From a3f6ee86dc63ff5d22c08ec226e55f43e1848204 Mon Sep 17 00:00:00 2001 From: Yibing Liu Date: Mon, 22 Apr 2019 17:20:13 +0800 Subject: [PATCH] Init paddle-nlp (#2112) * init paddle-nlp tools for QA test * Fix paragraph extraction bug * Update download links * first update LAC README.md * rename EmoTect as emotion_detection * download data from bos * Update README.md * Rename project * second add code * modify downloads.sh for lac * rename LAC to lexical_analysis * update lac readme * Update README.md * Update README.md * Update README.md * add struct.jpg * Update README.md * Update README.md * update README * Update README.md * update emotion_detection README * add download_data.sh and download_model.sh * first commit ADE * dialogue_model_toolkit_update * update emotion_detection model bos url * update README * update readme * update readme * update download file * first commit DAM * add readme * fix readme * fix readme * fix readme * fix readme * fix readme * rename * rename again * 1. add gradient_clip for ernie_lac 2. delete LARK config * fix download.sh * Rename MRC task * fix logger * fix to douban * fix final * update readme * update readme * update readme * fix batch is null * fix typo * fix typo * fix typo * update ernie config * update readme * add AI platform url in readme * update readme subtitlestyle * update * Update README.md * Update README.md * update * Create README.md * Update README.md * Update README.md * Update README.md * Update README.md * Update README.md * Update README.md * Update README.md * Update README.md * Update README.md * Update README.md * Update README.md * Update README.md * Update README.md * Update README.md * Update README.md * update batch size * adapt to samll data size * update ERNIE bcebos url * add language model * modify readme * update * update * Update README.md * Update README.md * fix readme * fix max_step, update run.sh and run_ernie.sh * add finetuned model for lac * fix bug * Update README.md * update * Update README.md * add ERNIE pretrained model, and update README * update readme * add CPU * update infer in run.sh and run_ernie.sh * Update README.md * Update README.md * Delete test.py * fix bug * fix run.sh infer bug & add ernie infer code * fix cpu mode * Update README.md * fix bug for python3 * fix CPU and GPU diff result bug * Update README.md * update readme * Update run_classifier.py * Update README.md * Update README.md * Update README.md * Update README.md * Update README.md * Update run.sh * Update run_ernie.sh * modify dir * Update README.md * modify dir too * modify path * Update README.md * PaddleNLP modules backup to old/, rm links-LAC,Senta,SimNet * mv all modules out of paddle-nlp, rm Senta, auto_dialog_eval, deep_match * mv models/classify to models/classification, models/seq_lab to models/sequence_labeling * update readme for models/classification * update sentiment_classification and rm README * Add Transformer into paddle-nlp * change seq_lab to sequence labeling * Rename old as unarchived in PaddleNLP * add LARK * Update README, add paddlehub * add paddlehub * Add tmp readme * Update README.md * Update README.md * Update README.md * Update README.md * Update run_ernie.sh * Update run_ernie.sh * Update README.md * Update run_ernie_classifier.py * Update README.md * Update README.md * Update run.sh * Update run_ernie_classifier.py * update * fix chunk_evaluator bug * change names * Update README * add gitmodules * add install code * Update README.md * Update README.md * Update README.md * Update README.md * Update README.md * Update README.md * Update README.md * Update README.md * Update README.md * Update READMEs * Update README.md * Update README.md * Update README.md * Update README.md * Update README.md * Update README.md * Update README.md * README * Update README.md * update emotion_detection README * Update README.md * Update README.md * Update README.md * Update README.md * Update README.md * Update README.md * Update README.md * Update README.md * Update README.md * update REAME, add finetune doc * update emotion_detection readme * change run.sh * Update README.md * Update the link in fluid dir * update readme * update README for markdown style * Update README.md * Update README.md --- .gitmodules | 6 + PaddleNLP/LAC | 1 - PaddleNLP/LARK | 1 - PaddleNLP/README.md | 6 +- PaddleNLP/Senta | 1 - PaddleNLP/SimNet | 1 - .../auto_dialogue_evaluation/README.md | 153 + .../auto_dialogue_evaluation/config.py | 77 + .../auto_dialogue_evaluation/download_data.sh | 2 + .../auto_dialogue_evaluation/evaluation.py | 57 + .../auto_dialogue_evaluation/init.py | 53 + .../auto_dialogue_evaluation/main.py | 461 + .../model_files/download_model.sh | 11 + .../auto_dialogue_evaluation/reader.py | 76 + .../auto_dialogue_evaluation/run.sh | 64 + .../auto_dialogue_evaluation/run_CPU.sh | 59 + .../deep_attention_matching/README.md | 90 + .../deep_attention_matching/config.py | 123 + .../data/download_data.sh | 40 + .../deep_attention_matching/evaluation.py | 153 + .../images/Figure1.png | Bin .../images/Figure2.png | Bin .../deep_attention_matching/main.py | 465 + .../deep_attention_matching/reader.py | 267 + .../deep_attention_matching/run.sh | 49 + .../deep_attention_matching/run_CPU.sh | 49 + .../deep_attention_matching/util.py | 41 + .../dialogue_general_understanding/README.md | 276 + .../batching.py | 204 + .../define_predict_pack.py | 85 + .../download_data.sh | 3 + .../download_models.sh | 3 + .../download_pretrain_model.sh | 3 + .../eval_metrics.py | 369 + .../finetune_args.py | 74 + .../optimization.py | 139 + .../dialogue_general_understanding/predict.py | 164 + .../reader}/__init__.py | 0 .../reader/data_reader.py | 794 + .../run_eval_metrics.sh | 12 + .../run_predict.sh | 57 + .../run_train.sh | 100 + .../scripts/README.md | 20 + .../scripts/build_atis_dataset.py | 155 + .../scripts/build_dstc2_dataset.py | 149 + .../scripts/build_mrda_dataset.py | 165 + .../scripts/build_swda_dataset.py | 231 + .../scripts/commonlib.py | 74 + .../scripts/conf/dstc2.conf | 3235 +++ .../scripts/conf/mrda.conf | 73 + .../scripts/conf/multi-woz.conf | 2000 ++ .../scripts/conf/swda.conf | 1134 + .../scripts/run_build_data.sh | 25 + .../tokenization.py | 370 + .../dialogue_general_understanding/train.py | 381 + .../utils/__init__.py | 0 .../utils/args.py | 48 + .../utils/fp16.py | 97 + .../utils/init.py | 81 + PaddleNLP/emotion_detection/README.md | 172 + PaddleNLP/emotion_detection/config.json | 4 + PaddleNLP/emotion_detection/config.py | 37 + PaddleNLP/emotion_detection/download_data.sh | 8 + PaddleNLP/emotion_detection/download_model.sh | 16 + PaddleNLP/emotion_detection/reader.py | 90 + PaddleNLP/emotion_detection/run.sh | 78 + PaddleNLP/emotion_detection/run_classifier.py | 344 + PaddleNLP/emotion_detection/run_ernie.sh | 85 + .../emotion_detection/run_ernie_classifier.py | 377 + PaddleNLP/emotion_detection/utils.py | 142 + ...ven-dialogue => knowledge_driven_dialogue} | 0 PaddleNLP/language_model/README.md | 125 + PaddleNLP/language_model/{lstm => }/args.py | 0 .../{lstm => }/data/download_data.sh | 0 PaddleNLP/language_model/{lstm => }/reader.py | 0 PaddleNLP/language_model/run.sh | 12 + PaddleNLP/language_model/{lstm => }/train.py | 3 +- PaddleNLP/language_representations_kit | 1 + PaddleNLP/lexical_analysis/README.md | 149 + .../conf/customization.dic} | 0 .../conf/customization.dic.example | 3 + .../lexical_analysis/conf/label_map.json | 1 + PaddleNLP/lexical_analysis/conf/q2b.dic | 172 + .../lexical_analysis/conf/strong_punc.dic | 5 + PaddleNLP/lexical_analysis/conf/tag.dic | 57 + PaddleNLP/lexical_analysis/conf/word.dic | 20940 ++++++++++++++++ PaddleNLP/lexical_analysis/downloads.sh | 43 + PaddleNLP/lexical_analysis/evaluate.py | 266 + PaddleNLP/lexical_analysis/gru-crf-model.png | Bin 0 -> 62360 bytes PaddleNLP/lexical_analysis/reader.py | 121 + PaddleNLP/lexical_analysis/run.sh | 98 + PaddleNLP/lexical_analysis/run_ernie.sh | 111 + .../run_ernie_sequence_labeling.py | 382 + .../lexical_analysis/run_sequence_labeling.py | 308 + PaddleNLP/lexical_analysis/utils.py | 138 + .../models/test.py => models/__init__.py} | 0 PaddleNLP/models/classification/__init__.py | 0 PaddleNLP/models/classification/nets.py | 283 + .../auto_dialogue_evaluation/__init__.py | 0 .../auto_dialogue_evaluation/net.py | 138 + .../deep_attention_matching/__init__.py | 0 .../deep_attention_matching/layers.py | 207 + .../deep_attention_matching/net.py | 226 + .../__init__.py | 0 .../dialogue_general_understanding/bert.py | 231 + .../create_model.py | 78 + .../define_paradigm.py | 178 + PaddleNLP/models/language_model/__init__.py | 0 .../language_model}/lm_model.py | 0 PaddleNLP/models/matching/__init__.py | 0 PaddleNLP/models/matching/bow.py | 52 + PaddleNLP/models/matching/cnn.py | 52 + PaddleNLP/models/matching/gru.py | 53 + PaddleNLP/models/matching/losses/__init__.py | 0 .../models/matching/losses/hinge_loss.py | 37 + PaddleNLP/models/matching/losses/log_loss.py | 29 + .../losses/softmax_cross_entropy_loss.py | 30 + PaddleNLP/models/matching/lstm.py | 52 + PaddleNLP/models/matching/mm_dnn.py | 162 + .../models/matching/optimizers/__init__.py | 0 .../matching/optimizers/paddle_optimizers.py | 47 + PaddleNLP/models/matching/paddle_layers.py | 428 + .../transformer/__init__.py | 0 .../transformer/desc.py | 92 + .../transformer/model.py | 909 + .../models/reading_comprehension/__init__.py | 0 .../reading_comprehension/bidaf_model.py} | 0 PaddleNLP/models/representation/__init__.py | 0 PaddleNLP/models/representation/ernie.py | 268 + .../models/sequence_labeling/__init__.py | 0 PaddleNLP/models/sequence_labeling/nets.py | 108 + PaddleNLP/models/transformer_encoder.py | 329 + .../transformer/README.md | 195 +- .../transformer/config.py | 90 - .../transformer/infer.py | 15 +- .../transformer/train.py | 3 + PaddleNLP/preprocess/__init__.py | 0 PaddleNLP/preprocess/ernie/__init__.py | 0 PaddleNLP/preprocess/ernie/task_reader.py | 360 + PaddleNLP/preprocess/ernie/tokenization.py | 370 + PaddleNLP/preprocess/padding.py | 65 + PaddleNLP/preprocess/tokenizer/README | 1 + .../tokenizer/conf/customization.dic | 0 .../tokenizer/conf/customization.dic.example | 3 + .../preprocess/tokenizer/conf/model/__model__ | Bin 0 -> 4544 bytes .../preprocess/tokenizer/conf/model/crfw | Bin 0 -> 13478 bytes .../preprocess/tokenizer/conf/model/fc_0.b_0 | Bin 0 -> 3097 bytes .../preprocess/tokenizer/conf/model/fc_0.w_0 | Bin 0 -> 393244 bytes .../preprocess/tokenizer/conf/model/fc_1.b_0 | Bin 0 -> 3097 bytes .../preprocess/tokenizer/conf/model/fc_1.w_0 | Bin 0 -> 393244 bytes .../preprocess/tokenizer/conf/model/fc_2.b_0 | Bin 0 -> 3097 bytes .../preprocess/tokenizer/conf/model/fc_2.w_0 | Bin 0 -> 1572892 bytes .../preprocess/tokenizer/conf/model/fc_3.b_0 | Bin 0 -> 3097 bytes .../preprocess/tokenizer/conf/model/fc_3.w_0 | Bin 0 -> 1572892 bytes .../preprocess/tokenizer/conf/model/fc_4.b_0 | Bin 0 -> 252 bytes .../preprocess/tokenizer/conf/model/fc_4.w_0 | Bin 0 -> 116763 bytes .../preprocess/tokenizer/conf/model/gru_0.b_0 | Bin 0 -> 3099 bytes .../preprocess/tokenizer/conf/model/gru_0.w_0 | Bin 0 -> 786460 bytes .../preprocess/tokenizer/conf/model/gru_1.b_0 | Bin 0 -> 3099 bytes .../preprocess/tokenizer/conf/model/gru_1.w_0 | Bin 0 -> 786460 bytes .../preprocess/tokenizer/conf/model/gru_2.b_0 | Bin 0 -> 3099 bytes .../preprocess/tokenizer/conf/model/gru_2.w_0 | Bin 0 -> 786460 bytes .../preprocess/tokenizer/conf/model/gru_3.b_0 | Bin 0 -> 3099 bytes .../preprocess/tokenizer/conf/model/gru_3.w_0 | Bin 0 -> 786460 bytes .../preprocess/tokenizer/conf/model/word_emb | Bin 0 -> 10721821 bytes PaddleNLP/preprocess/tokenizer/conf/q2b.dic | 172 + .../preprocess/tokenizer/conf/strong_punc.dic | 5 + PaddleNLP/preprocess/tokenizer/conf/tag.dic | 57 + PaddleNLP/preprocess/tokenizer/conf/word.dic | 20940 ++++++++++++++++ PaddleNLP/preprocess/tokenizer/reader.py | 123 + PaddleNLP/preprocess/tokenizer/test.txt.utf8 | 5 + PaddleNLP/preprocess/tokenizer/tokenizer.py | 171 + PaddleNLP/reading_comprehension/LICENSE | 202 + PaddleNLP/reading_comprehension/README.md | 255 + .../reading_comprehension/data/BiDAF.png | Bin 0 -> 166542 bytes .../data/demo/devset/search.dev.json | 100 + .../data/demo/testset/search.test.json | 100 + .../data/demo/trainset/search.train.json | 100 + .../reading_comprehension/data/download.sh | 30 + .../reading_comprehension/data/md5sum.txt | 2 + .../reading_comprehension/src/UPDATES.md | 28 + .../src}/args.py | 12 +- .../src}/dataset.py | 0 .../src}/paragraph_extraction.py | 2 +- .../src}/preprocess.py | 0 .../src}/run.py | 7 +- PaddleNLP/reading_comprehension/src/run.sh | 52 + .../src}/vocab.py | 0 .../reading_comprehension/utils/__init__.py | 36 + .../utils/download_thirdparty.sh | 0 .../utils/dureader_eval.py | 546 + .../reading_comprehension/utils/get_vocab.py | 67 + .../utils/marco_tokenize_data.py | 43 + .../utils/marcov1_to_dureader.py | 0 .../utils/marcov2_to_v1_tojsonl.py | 14 + .../reading_comprehension/utils/preprocess.py | 218 + .../utils/run_marco2dureader_preprocess.sh | 0 PaddleNLP/sentiment_classification/README.md | 184 + PaddleNLP/sentiment_classification/config.py | 41 + PaddleNLP/sentiment_classification/reader.py | 80 + PaddleNLP/sentiment_classification/run.sh | 85 + .../run_classifier.py | 369 + .../sentiment_classification/run_ernie.sh | 112 + .../run_ernie_classifier.py | 410 + .../senta_config.json | 4 + PaddleNLP/sentiment_classification/utils.py | 150 + PaddleNLP/similarity_net/README.md | 169 + PaddleNLP/similarity_net/config.py | 44 + .../similarity_net/config/bow_pairwise.json | 23 + .../similarity_net/config/bow_pointwise.json | 18 + .../similarity_net/config/cnn_pairwise.json | 24 + .../similarity_net/config/cnn_pointwise.json | 20 + .../similarity_net/config/gru_pairwise.json | 23 + .../similarity_net/config/gru_pointwise.json | 19 + .../similarity_net/config/lstm_pairwise.json | 23 + .../similarity_net/config/lstm_pointwise.json | 19 + .../config/mmdnn_pointwise.json | 32 + PaddleNLP/similarity_net/download.sh | 16 + .../similarity_net/evaluate/evaluate_ecom.sh | 27 + .../similarity_net/evaluate/evaluate_qqsim.sh | 27 + .../evaluate/evaluate_unicom.sh | 30 + .../evaluate/evaluate_zhidao.sh | 27 + .../evaluate/unicom_compute_pos_neg.py | 44 + .../similarity_net/evaluate/unicom_split.py | 11 + PaddleNLP/similarity_net/reader.py | 182 + PaddleNLP/similarity_net/run.sh | 99 + PaddleNLP/similarity_net/run_classifier.py | 354 + PaddleNLP/similarity_net/struct.jpg | Bin 0 -> 38115 bytes PaddleNLP/similarity_net/utils.py | 278 + .../{ => unarchived}/chinese_ner/.run_ce.sh | 0 .../{ => unarchived}/chinese_ner/README.md | 0 PaddleNLP/unarchived/chinese_ner/__init__.py | 0 PaddleNLP/{ => unarchived}/chinese_ner/_ce.py | 24 +- .../chinese_ner/data/label_dict | 0 .../chinese_ner/data/test_files/test_part_1 | 0 .../chinese_ner/data/test_files/test_part_2 | 0 .../chinese_ner/data/train_files/train_part_1 | 0 .../chinese_ner/data/train_files/train_part_2 | 0 .../{ => unarchived}/chinese_ner/infer.py | 0 .../{ => unarchived}/chinese_ner/reader.py | 0 .../chinese_ner/scripts/README.md | 0 .../chinese_ner/scripts/infer.sh | 0 .../chinese_ner/scripts/train.sh | 0 .../{ => unarchived}/chinese_ner/train.py | 13 +- .../deep_attention_matching_net/.run_ce.sh | 0 .../deep_attention_matching_net/README.md | 0 .../deep_attention_matching_net/_ce.py | 0 .../douban/download_data.sh | 0 .../douban/test.sh | 0 .../douban/train.sh | 0 .../images/Figure1.png | Bin 0 -> 721875 bytes .../images/Figure2.png | Bin 0 -> 214153 bytes .../deep_attention_matching_net/model.py | 0 .../test_and_evaluate.py | 0 .../train_and_evaluate.py | 15 +- .../ubuntu/download_data.sh | 0 .../ubuntu/test.sh | 0 .../ubuntu/train.sh | 0 .../utils/__init__.py | 0 .../utils/douban_evaluation.py | 0 .../utils/evaluation.py | 0 .../utils/layers.py | 0 .../utils/reader.py | 0 .../deep_attention_matching_net/utils/util.py | 0 .../language_model/gru/.run_ce.sh | 0 .../language_model/gru/README.md | 0 .../language_model/gru/_ce.py | 0 .../language_model/gru/infer.py | 0 .../language_model/gru/train.py | 0 .../language_model/gru/train_on_cloud.py | 0 .../language_model/gru/utils.py | 0 .../language_model/lstm/.run_ce.sh | 0 .../language_model/lstm/README.md | 0 .../language_model/lstm/_ce.py | 0 .../unarchived/language_model/lstm/args.py | 45 + .../language_model/lstm/data/download_data.sh | 4 + .../language_model/lstm/lm_model.py | 299 + .../unarchived/language_model/lstm/reader.py | 108 + .../unarchived/language_model/lstm/train.py | 307 + .../machine_reading_comprehension/.run_ce.sh | 0 .../machine_reading_comprehension/README.md | 2 +- .../machine_reading_comprehension/_ce.py | 0 .../machine_reading_comprehension/args.py | 145 + .../data/download.sh | 0 .../data/md5sum.txt | 0 .../machine_reading_comprehension/dataset.py | 244 + .../paragraph_extraction.py | 200 + .../preprocess.py | 219 + .../machine_reading_comprehension/rc_model.py | 331 + .../machine_reading_comprehension/run.py | 655 + .../machine_reading_comprehension/run.sh | 0 .../utils/__init__.py | 0 .../utils/download_thirdparty.sh | 48 + .../utils/dureader_eval.py | 0 .../utils/get_vocab.py | 0 .../utils/marco_tokenize_data.py | 0 .../utils/marcov1_to_dureader.py | 37 + .../utils/marcov2_to_v1_tojsonl.py | 0 .../utils/preprocess.py | 0 .../utils/run_marco2dureader_preprocess.sh | 22 + .../machine_reading_comprehension/vocab.py | 201 + .../neural_machine_translation/README.md | 0 .../rnn_search/.run_ce.sh | 0 .../rnn_search/README.md | 2 +- .../rnn_search/_ce.py | 0 .../rnn_search/args.py | 0 .../rnn_search/attention_model.py | 0 .../rnn_search/images/bi_rnn.png | Bin .../rnn_search/images/decoder_attention.png | Bin .../rnn_search/images/encoder_attention.png | Bin .../rnn_search/infer.py | 0 .../rnn_search/no_attention_model.py | 0 .../rnn_search/train.py | 0 .../transformer/.gitignore | 0 .../transformer/.run_ce.sh | 0 .../transformer/README.md | 23 + .../transformer/README_cn.md | 0 .../transformer/_ce.py | 0 .../transformer/config.py | 201 + .../transformer/gen_data.sh | 220 + .../transformer/images/attention_formula.png | Bin .../images/multi_head_attention.png | Bin 0 -> 107036 bytes .../images/transformer_network.png | Bin 0 -> 265291 bytes .../transformer/infer.py | 324 + .../transformer/local_dist.sh | 0 .../transformer/model.py | 0 .../transformer/optim.py | 0 .../transformer/profile.py | 0 .../transformer/reader.py | 335 + .../transformer/train.py | 771 + .../sequence_tagging_for_ner/.run_ce.sh | 0 .../sequence_tagging_for_ner/README.md | 0 .../sequence_tagging_for_ner/_ce.py | 0 .../sequence_tagging_for_ner/data/download.sh | 0 .../sequence_tagging_for_ner/data/target.txt | 0 .../sequence_tagging_for_ner/data/test | 0 .../sequence_tagging_for_ner/data/train | 0 .../imgs/convergence_curve.png | Bin .../sequence_tagging_for_ner/infer.py | 13 +- .../sequence_tagging_for_ner/network_conf.py | 0 .../sequence_tagging_for_ner/reader.py | 0 .../sequence_tagging_for_ner/train.py | 0 .../sequence_tagging_for_ner/utils.py | 0 .../sequence_tagging_for_ner/utils_extend.py | 0 .../text_classification/.run_ce.sh | 0 .../text_classification/README.md | 0 .../text_classification/_ce.py | 0 .../async_executor/README.md | 0 .../async_executor/data_generator.sh | 0 .../async_executor/data_generator/IMDB.py | 0 .../data_generator/build_raw_data.py | 0 .../data_generator/data_generator.py | 0 .../data_generator/splitfile.py | 0 .../async_executor/data_reader.py | 0 .../async_executor/infer.py | 0 .../async_executor/train.py | 0 .../clouds/scdb_parallel_executor.py | 0 .../clouds/scdb_single_card.py | 0 .../text_classification/infer.py | 0 .../text_classification/nets.py | 0 .../text_classification/train.py | 0 .../text_classification/utils.py | 0 .../text_matching_on_quora/.run_ce.sh | 0 .../text_matching_on_quora/README.md | 12 +- .../text_matching_on_quora/__init__.py | 0 .../text_matching_on_quora/_ce.py | 21 +- .../text_matching_on_quora/cdssm_base.log | 0 .../configs/__init__.py | 0 .../configs/basic_config.py | 9 +- .../text_matching_on_quora/configs/cdssm.py | 7 +- .../text_matching_on_quora/configs/dec_att.py | 18 +- .../configs/infer_sent.py | 8 +- .../text_matching_on_quora/configs/sse.py | 5 +- .../data/prepare_quora_data.sh | 0 .../text_matching_on_quora/imgs/README.md | 0 .../imgs/models_test_acc.png | Bin .../text_matching_on_quora/metric.py | 2 +- .../text_matching_on_quora/models/__init__.py | 0 .../text_matching_on_quora/models/cdssm.py | 44 +- .../text_matching_on_quora/models/dec_att.py | 102 +- .../models/infer_sent.py | 30 +- .../models/match_layers.py | 14 +- .../models/my_layers.py | 37 +- .../text_matching_on_quora/models/pwim.py | 0 .../text_matching_on_quora/models/sse.py | 32 +- .../text_matching_on_quora/models/test.py | 0 .../pretrained_word2vec.py | 24 +- .../quora_question_pairs.py | 21 +- .../train_and_evaluate.py | 193 +- .../text_matching_on_quora/utils.py | 89 +- README.md | 7 +- fluid/PaddleNLP/chinese_ner/README.md | 2 +- .../deep_attention_matching_net/README.md | 2 +- fluid/PaddleNLP/language_model/gru/README.md | 2 +- fluid/PaddleNLP/language_model/lstm/README.md | 2 +- .../machine_reading_comprehension/README.md | 2 +- .../neural_machine_translation/README.md | 2 +- .../sequence_tagging_for_ner/README.md | 2 +- fluid/PaddleNLP/text_classification/README.md | 2 +- .../text_matching_on_quora/README.md | 2 +- 400 files changed, 73466 insertions(+), 472 deletions(-) delete mode 160000 PaddleNLP/LAC delete mode 160000 PaddleNLP/LARK delete mode 160000 PaddleNLP/Senta delete mode 160000 PaddleNLP/SimNet create mode 100644 PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/README.md create mode 100755 PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/config.py create mode 100755 PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/download_data.sh create mode 100755 PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/evaluation.py create mode 100755 PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/init.py create mode 100755 PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/main.py create mode 100644 PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/model_files/download_model.sh create mode 100755 PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/reader.py create mode 100755 PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/run.sh create mode 100755 PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/run_CPU.sh create mode 100755 PaddleNLP/dialogue_model_toolkit/deep_attention_matching/README.md create mode 100644 PaddleNLP/dialogue_model_toolkit/deep_attention_matching/config.py create mode 100755 PaddleNLP/dialogue_model_toolkit/deep_attention_matching/data/download_data.sh create mode 100755 PaddleNLP/dialogue_model_toolkit/deep_attention_matching/evaluation.py rename PaddleNLP/{deep_attention_matching_net => dialogue_model_toolkit/deep_attention_matching}/images/Figure1.png (100%) mode change 100644 => 100755 rename PaddleNLP/{deep_attention_matching_net => dialogue_model_toolkit/deep_attention_matching}/images/Figure2.png (100%) mode change 100644 => 100755 create mode 100755 PaddleNLP/dialogue_model_toolkit/deep_attention_matching/main.py create mode 100755 PaddleNLP/dialogue_model_toolkit/deep_attention_matching/reader.py create mode 100755 PaddleNLP/dialogue_model_toolkit/deep_attention_matching/run.sh create mode 100755 PaddleNLP/dialogue_model_toolkit/deep_attention_matching/run_CPU.sh create mode 100755 PaddleNLP/dialogue_model_toolkit/deep_attention_matching/util.py create mode 100644 PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/README.md create mode 100644 PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/batching.py create mode 100644 PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/define_predict_pack.py create mode 100644 PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/download_data.sh create mode 100644 PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/download_models.sh create mode 100644 PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/download_pretrain_model.sh create mode 100644 PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/eval_metrics.py create mode 100644 PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/finetune_args.py create mode 100644 PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/optimization.py create mode 100644 PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/predict.py rename PaddleNLP/{chinese_ner => dialogue_model_toolkit/dialogue_general_understanding/reader}/__init__.py (100%) create mode 100644 PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/reader/data_reader.py create mode 100644 PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/run_eval_metrics.sh create mode 100644 PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/run_predict.sh create mode 100644 PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/run_train.sh create mode 100644 PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/README.md create mode 100755 PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/build_atis_dataset.py create mode 100755 PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/build_dstc2_dataset.py create mode 100755 PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/build_mrda_dataset.py create mode 100755 PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/build_swda_dataset.py create mode 100755 PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/commonlib.py create mode 100755 PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/conf/dstc2.conf create mode 100755 PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/conf/mrda.conf create mode 100755 PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/conf/multi-woz.conf create mode 100755 PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/conf/swda.conf create mode 100755 PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/run_build_data.sh create mode 100644 PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/tokenization.py create mode 100644 PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/train.py rename PaddleNLP/{deep_attention_matching_net => dialogue_model_toolkit/dialogue_general_understanding}/utils/__init__.py (100%) create mode 100644 PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/utils/args.py create mode 100644 PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/utils/fp16.py create mode 100644 PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/utils/init.py create mode 100644 PaddleNLP/emotion_detection/README.md create mode 100644 PaddleNLP/emotion_detection/config.json create mode 100644 PaddleNLP/emotion_detection/config.py create mode 100644 PaddleNLP/emotion_detection/download_data.sh create mode 100644 PaddleNLP/emotion_detection/download_model.sh create mode 100644 PaddleNLP/emotion_detection/reader.py create mode 100644 PaddleNLP/emotion_detection/run.sh create mode 100644 PaddleNLP/emotion_detection/run_classifier.py create mode 100644 PaddleNLP/emotion_detection/run_ernie.sh create mode 100644 PaddleNLP/emotion_detection/run_ernie_classifier.py create mode 100644 PaddleNLP/emotion_detection/utils.py rename PaddleNLP/{knowledge-driven-dialogue => knowledge_driven_dialogue} (100%) create mode 100644 PaddleNLP/language_model/README.md rename PaddleNLP/language_model/{lstm => }/args.py (100%) rename PaddleNLP/language_model/{lstm => }/data/download_data.sh (100%) rename PaddleNLP/language_model/{lstm => }/reader.py (100%) create mode 100644 PaddleNLP/language_model/run.sh rename PaddleNLP/language_model/{lstm => }/train.py (99%) create mode 160000 PaddleNLP/language_representations_kit create mode 100644 PaddleNLP/lexical_analysis/README.md rename PaddleNLP/{text_matching_on_quora/__init__.py => lexical_analysis/conf/customization.dic} (100%) create mode 100644 PaddleNLP/lexical_analysis/conf/customization.dic.example create mode 100644 PaddleNLP/lexical_analysis/conf/label_map.json create mode 100644 PaddleNLP/lexical_analysis/conf/q2b.dic create mode 100644 PaddleNLP/lexical_analysis/conf/strong_punc.dic create mode 100644 PaddleNLP/lexical_analysis/conf/tag.dic create mode 100644 PaddleNLP/lexical_analysis/conf/word.dic create mode 100644 PaddleNLP/lexical_analysis/downloads.sh create mode 100644 PaddleNLP/lexical_analysis/evaluate.py create mode 100644 PaddleNLP/lexical_analysis/gru-crf-model.png create mode 100644 PaddleNLP/lexical_analysis/reader.py create mode 100644 PaddleNLP/lexical_analysis/run.sh create mode 100644 PaddleNLP/lexical_analysis/run_ernie.sh create mode 100644 PaddleNLP/lexical_analysis/run_ernie_sequence_labeling.py create mode 100644 PaddleNLP/lexical_analysis/run_sequence_labeling.py create mode 100644 PaddleNLP/lexical_analysis/utils.py rename PaddleNLP/{text_matching_on_quora/models/test.py => models/__init__.py} (100%) create mode 100644 PaddleNLP/models/classification/__init__.py create mode 100644 PaddleNLP/models/classification/nets.py create mode 100755 PaddleNLP/models/dialogue_model_toolkit/auto_dialogue_evaluation/__init__.py create mode 100755 PaddleNLP/models/dialogue_model_toolkit/auto_dialogue_evaluation/net.py create mode 100644 PaddleNLP/models/dialogue_model_toolkit/deep_attention_matching/__init__.py create mode 100755 PaddleNLP/models/dialogue_model_toolkit/deep_attention_matching/layers.py create mode 100755 PaddleNLP/models/dialogue_model_toolkit/deep_attention_matching/net.py create mode 100644 PaddleNLP/models/dialogue_model_toolkit/dialogue_general_understanding/__init__.py create mode 100644 PaddleNLP/models/dialogue_model_toolkit/dialogue_general_understanding/bert.py create mode 100644 PaddleNLP/models/dialogue_model_toolkit/dialogue_general_understanding/create_model.py create mode 100644 PaddleNLP/models/dialogue_model_toolkit/dialogue_general_understanding/define_paradigm.py create mode 100644 PaddleNLP/models/language_model/__init__.py rename PaddleNLP/{language_model/lstm => models/language_model}/lm_model.py (100%) create mode 100644 PaddleNLP/models/matching/__init__.py create mode 100644 PaddleNLP/models/matching/bow.py create mode 100644 PaddleNLP/models/matching/cnn.py create mode 100644 PaddleNLP/models/matching/gru.py create mode 100644 PaddleNLP/models/matching/losses/__init__.py create mode 100644 PaddleNLP/models/matching/losses/hinge_loss.py create mode 100644 PaddleNLP/models/matching/losses/log_loss.py create mode 100644 PaddleNLP/models/matching/losses/softmax_cross_entropy_loss.py create mode 100644 PaddleNLP/models/matching/lstm.py create mode 100644 PaddleNLP/models/matching/mm_dnn.py create mode 100644 PaddleNLP/models/matching/optimizers/__init__.py create mode 100644 PaddleNLP/models/matching/optimizers/paddle_optimizers.py create mode 100644 PaddleNLP/models/matching/paddle_layers.py create mode 100644 PaddleNLP/models/neural_machine_translation/transformer/__init__.py create mode 100644 PaddleNLP/models/neural_machine_translation/transformer/desc.py create mode 100644 PaddleNLP/models/neural_machine_translation/transformer/model.py create mode 100644 PaddleNLP/models/reading_comprehension/__init__.py rename PaddleNLP/{machine_reading_comprehension/rc_model.py => models/reading_comprehension/bidaf_model.py} (100%) create mode 100644 PaddleNLP/models/representation/__init__.py create mode 100644 PaddleNLP/models/representation/ernie.py create mode 100644 PaddleNLP/models/sequence_labeling/__init__.py create mode 100644 PaddleNLP/models/sequence_labeling/nets.py create mode 100644 PaddleNLP/models/transformer_encoder.py create mode 100644 PaddleNLP/preprocess/__init__.py create mode 100644 PaddleNLP/preprocess/ernie/__init__.py create mode 100644 PaddleNLP/preprocess/ernie/task_reader.py create mode 100644 PaddleNLP/preprocess/ernie/tokenization.py create mode 100644 PaddleNLP/preprocess/padding.py create mode 100644 PaddleNLP/preprocess/tokenizer/README create mode 100644 PaddleNLP/preprocess/tokenizer/conf/customization.dic create mode 100644 PaddleNLP/preprocess/tokenizer/conf/customization.dic.example create mode 100644 PaddleNLP/preprocess/tokenizer/conf/model/__model__ create mode 100644 PaddleNLP/preprocess/tokenizer/conf/model/crfw create mode 100644 PaddleNLP/preprocess/tokenizer/conf/model/fc_0.b_0 create mode 100644 PaddleNLP/preprocess/tokenizer/conf/model/fc_0.w_0 create mode 100644 PaddleNLP/preprocess/tokenizer/conf/model/fc_1.b_0 create mode 100644 PaddleNLP/preprocess/tokenizer/conf/model/fc_1.w_0 create mode 100644 PaddleNLP/preprocess/tokenizer/conf/model/fc_2.b_0 create mode 100644 PaddleNLP/preprocess/tokenizer/conf/model/fc_2.w_0 create mode 100644 PaddleNLP/preprocess/tokenizer/conf/model/fc_3.b_0 create mode 100644 PaddleNLP/preprocess/tokenizer/conf/model/fc_3.w_0 create mode 100644 PaddleNLP/preprocess/tokenizer/conf/model/fc_4.b_0 create mode 100644 PaddleNLP/preprocess/tokenizer/conf/model/fc_4.w_0 create mode 100644 PaddleNLP/preprocess/tokenizer/conf/model/gru_0.b_0 create mode 100644 PaddleNLP/preprocess/tokenizer/conf/model/gru_0.w_0 create mode 100644 PaddleNLP/preprocess/tokenizer/conf/model/gru_1.b_0 create mode 100644 PaddleNLP/preprocess/tokenizer/conf/model/gru_1.w_0 create mode 100644 PaddleNLP/preprocess/tokenizer/conf/model/gru_2.b_0 create mode 100644 PaddleNLP/preprocess/tokenizer/conf/model/gru_2.w_0 create mode 100644 PaddleNLP/preprocess/tokenizer/conf/model/gru_3.b_0 create mode 100644 PaddleNLP/preprocess/tokenizer/conf/model/gru_3.w_0 create mode 100644 PaddleNLP/preprocess/tokenizer/conf/model/word_emb create mode 100644 PaddleNLP/preprocess/tokenizer/conf/q2b.dic create mode 100644 PaddleNLP/preprocess/tokenizer/conf/strong_punc.dic create mode 100644 PaddleNLP/preprocess/tokenizer/conf/tag.dic create mode 100644 PaddleNLP/preprocess/tokenizer/conf/word.dic create mode 100644 PaddleNLP/preprocess/tokenizer/reader.py create mode 100644 PaddleNLP/preprocess/tokenizer/test.txt.utf8 create mode 100644 PaddleNLP/preprocess/tokenizer/tokenizer.py create mode 100644 PaddleNLP/reading_comprehension/LICENSE create mode 100644 PaddleNLP/reading_comprehension/README.md create mode 100644 PaddleNLP/reading_comprehension/data/BiDAF.png create mode 100644 PaddleNLP/reading_comprehension/data/demo/devset/search.dev.json create mode 100644 PaddleNLP/reading_comprehension/data/demo/testset/search.test.json create mode 100644 PaddleNLP/reading_comprehension/data/demo/trainset/search.train.json create mode 100644 PaddleNLP/reading_comprehension/data/download.sh create mode 100644 PaddleNLP/reading_comprehension/data/md5sum.txt create mode 100644 PaddleNLP/reading_comprehension/src/UPDATES.md rename PaddleNLP/{machine_reading_comprehension => reading_comprehension/src}/args.py (90%) rename PaddleNLP/{machine_reading_comprehension => reading_comprehension/src}/dataset.py (100%) rename PaddleNLP/{machine_reading_comprehension => reading_comprehension/src}/paragraph_extraction.py (99%) rename PaddleNLP/{machine_reading_comprehension => reading_comprehension/src}/preprocess.py (100%) rename PaddleNLP/{machine_reading_comprehension => reading_comprehension/src}/run.py (99%) create mode 100644 PaddleNLP/reading_comprehension/src/run.sh rename PaddleNLP/{machine_reading_comprehension => reading_comprehension/src}/vocab.py (100%) create mode 100644 PaddleNLP/reading_comprehension/utils/__init__.py rename PaddleNLP/{machine_reading_comprehension => reading_comprehension}/utils/download_thirdparty.sh (100%) create mode 100644 PaddleNLP/reading_comprehension/utils/dureader_eval.py create mode 100644 PaddleNLP/reading_comprehension/utils/get_vocab.py create mode 100644 PaddleNLP/reading_comprehension/utils/marco_tokenize_data.py rename PaddleNLP/{machine_reading_comprehension => reading_comprehension}/utils/marcov1_to_dureader.py (100%) create mode 100644 PaddleNLP/reading_comprehension/utils/marcov2_to_v1_tojsonl.py create mode 100644 PaddleNLP/reading_comprehension/utils/preprocess.py rename PaddleNLP/{machine_reading_comprehension => reading_comprehension}/utils/run_marco2dureader_preprocess.sh (100%) create mode 100644 PaddleNLP/sentiment_classification/README.md create mode 100644 PaddleNLP/sentiment_classification/config.py create mode 100644 PaddleNLP/sentiment_classification/reader.py create mode 100644 PaddleNLP/sentiment_classification/run.sh create mode 100644 PaddleNLP/sentiment_classification/run_classifier.py create mode 100644 PaddleNLP/sentiment_classification/run_ernie.sh create mode 100644 PaddleNLP/sentiment_classification/run_ernie_classifier.py create mode 100644 PaddleNLP/sentiment_classification/senta_config.json create mode 100644 PaddleNLP/sentiment_classification/utils.py create mode 100644 PaddleNLP/similarity_net/README.md create mode 100644 PaddleNLP/similarity_net/config.py create mode 100644 PaddleNLP/similarity_net/config/bow_pairwise.json create mode 100644 PaddleNLP/similarity_net/config/bow_pointwise.json create mode 100644 PaddleNLP/similarity_net/config/cnn_pairwise.json create mode 100644 PaddleNLP/similarity_net/config/cnn_pointwise.json create mode 100644 PaddleNLP/similarity_net/config/gru_pairwise.json create mode 100644 PaddleNLP/similarity_net/config/gru_pointwise.json create mode 100644 PaddleNLP/similarity_net/config/lstm_pairwise.json create mode 100644 PaddleNLP/similarity_net/config/lstm_pointwise.json create mode 100644 PaddleNLP/similarity_net/config/mmdnn_pointwise.json create mode 100644 PaddleNLP/similarity_net/download.sh create mode 100644 PaddleNLP/similarity_net/evaluate/evaluate_ecom.sh create mode 100644 PaddleNLP/similarity_net/evaluate/evaluate_qqsim.sh create mode 100644 PaddleNLP/similarity_net/evaluate/evaluate_unicom.sh create mode 100644 PaddleNLP/similarity_net/evaluate/evaluate_zhidao.sh create mode 100644 PaddleNLP/similarity_net/evaluate/unicom_compute_pos_neg.py create mode 100644 PaddleNLP/similarity_net/evaluate/unicom_split.py create mode 100644 PaddleNLP/similarity_net/reader.py create mode 100644 PaddleNLP/similarity_net/run.sh create mode 100644 PaddleNLP/similarity_net/run_classifier.py create mode 100644 PaddleNLP/similarity_net/struct.jpg create mode 100644 PaddleNLP/similarity_net/utils.py rename PaddleNLP/{ => unarchived}/chinese_ner/.run_ce.sh (100%) rename PaddleNLP/{ => unarchived}/chinese_ner/README.md (100%) create mode 100644 PaddleNLP/unarchived/chinese_ner/__init__.py rename PaddleNLP/{ => unarchived}/chinese_ner/_ce.py (71%) rename PaddleNLP/{ => unarchived}/chinese_ner/data/label_dict (100%) rename PaddleNLP/{ => unarchived}/chinese_ner/data/test_files/test_part_1 (100%) rename PaddleNLP/{ => unarchived}/chinese_ner/data/test_files/test_part_2 (100%) rename PaddleNLP/{ => unarchived}/chinese_ner/data/train_files/train_part_1 (100%) rename PaddleNLP/{ => unarchived}/chinese_ner/data/train_files/train_part_2 (100%) rename PaddleNLP/{ => unarchived}/chinese_ner/infer.py (100%) rename PaddleNLP/{ => unarchived}/chinese_ner/reader.py (100%) rename PaddleNLP/{ => unarchived}/chinese_ner/scripts/README.md (100%) rename PaddleNLP/{ => unarchived}/chinese_ner/scripts/infer.sh (100%) rename PaddleNLP/{ => unarchived}/chinese_ner/scripts/train.sh (100%) rename PaddleNLP/{ => unarchived}/chinese_ner/train.py (98%) rename PaddleNLP/{ => unarchived}/deep_attention_matching_net/.run_ce.sh (100%) rename PaddleNLP/{ => unarchived}/deep_attention_matching_net/README.md (100%) rename PaddleNLP/{ => unarchived}/deep_attention_matching_net/_ce.py (100%) rename PaddleNLP/{ => unarchived}/deep_attention_matching_net/douban/download_data.sh (100%) rename PaddleNLP/{ => unarchived}/deep_attention_matching_net/douban/test.sh (100%) rename PaddleNLP/{ => unarchived}/deep_attention_matching_net/douban/train.sh (100%) create mode 100644 PaddleNLP/unarchived/deep_attention_matching_net/images/Figure1.png create mode 100644 PaddleNLP/unarchived/deep_attention_matching_net/images/Figure2.png rename PaddleNLP/{ => unarchived}/deep_attention_matching_net/model.py (100%) rename PaddleNLP/{ => unarchived}/deep_attention_matching_net/test_and_evaluate.py (100%) rename PaddleNLP/{ => unarchived}/deep_attention_matching_net/train_and_evaluate.py (97%) rename PaddleNLP/{ => unarchived}/deep_attention_matching_net/ubuntu/download_data.sh (100%) rename PaddleNLP/{ => unarchived}/deep_attention_matching_net/ubuntu/test.sh (100%) rename PaddleNLP/{ => unarchived}/deep_attention_matching_net/ubuntu/train.sh (100%) create mode 100644 PaddleNLP/unarchived/deep_attention_matching_net/utils/__init__.py rename PaddleNLP/{ => unarchived}/deep_attention_matching_net/utils/douban_evaluation.py (100%) rename PaddleNLP/{ => unarchived}/deep_attention_matching_net/utils/evaluation.py (100%) rename PaddleNLP/{ => unarchived}/deep_attention_matching_net/utils/layers.py (100%) rename PaddleNLP/{ => unarchived}/deep_attention_matching_net/utils/reader.py (100%) rename PaddleNLP/{ => unarchived}/deep_attention_matching_net/utils/util.py (100%) rename PaddleNLP/{ => unarchived}/language_model/gru/.run_ce.sh (100%) rename PaddleNLP/{ => unarchived}/language_model/gru/README.md (100%) rename PaddleNLP/{ => unarchived}/language_model/gru/_ce.py (100%) rename PaddleNLP/{ => unarchived}/language_model/gru/infer.py (100%) rename PaddleNLP/{ => unarchived}/language_model/gru/train.py (100%) rename PaddleNLP/{ => unarchived}/language_model/gru/train_on_cloud.py (100%) rename PaddleNLP/{ => unarchived}/language_model/gru/utils.py (100%) rename PaddleNLP/{ => unarchived}/language_model/lstm/.run_ce.sh (100%) rename PaddleNLP/{ => unarchived}/language_model/lstm/README.md (100%) rename PaddleNLP/{ => unarchived}/language_model/lstm/_ce.py (100%) create mode 100644 PaddleNLP/unarchived/language_model/lstm/args.py create mode 100644 PaddleNLP/unarchived/language_model/lstm/data/download_data.sh create mode 100644 PaddleNLP/unarchived/language_model/lstm/lm_model.py create mode 100644 PaddleNLP/unarchived/language_model/lstm/reader.py create mode 100644 PaddleNLP/unarchived/language_model/lstm/train.py rename PaddleNLP/{ => unarchived}/machine_reading_comprehension/.run_ce.sh (100%) rename PaddleNLP/{ => unarchived}/machine_reading_comprehension/README.md (99%) rename PaddleNLP/{ => unarchived}/machine_reading_comprehension/_ce.py (100%) create mode 100644 PaddleNLP/unarchived/machine_reading_comprehension/args.py rename PaddleNLP/{ => unarchived}/machine_reading_comprehension/data/download.sh (100%) rename PaddleNLP/{ => unarchived}/machine_reading_comprehension/data/md5sum.txt (100%) create mode 100644 PaddleNLP/unarchived/machine_reading_comprehension/dataset.py create mode 100644 PaddleNLP/unarchived/machine_reading_comprehension/paragraph_extraction.py create mode 100644 PaddleNLP/unarchived/machine_reading_comprehension/preprocess.py create mode 100644 PaddleNLP/unarchived/machine_reading_comprehension/rc_model.py create mode 100644 PaddleNLP/unarchived/machine_reading_comprehension/run.py rename PaddleNLP/{ => unarchived}/machine_reading_comprehension/run.sh (100%) rename PaddleNLP/{ => unarchived}/machine_reading_comprehension/utils/__init__.py (100%) create mode 100755 PaddleNLP/unarchived/machine_reading_comprehension/utils/download_thirdparty.sh rename PaddleNLP/{ => unarchived}/machine_reading_comprehension/utils/dureader_eval.py (100%) rename PaddleNLP/{ => unarchived}/machine_reading_comprehension/utils/get_vocab.py (100%) rename PaddleNLP/{ => unarchived}/machine_reading_comprehension/utils/marco_tokenize_data.py (100%) create mode 100644 PaddleNLP/unarchived/machine_reading_comprehension/utils/marcov1_to_dureader.py rename PaddleNLP/{ => unarchived}/machine_reading_comprehension/utils/marcov2_to_v1_tojsonl.py (100%) rename PaddleNLP/{ => unarchived}/machine_reading_comprehension/utils/preprocess.py (100%) create mode 100644 PaddleNLP/unarchived/machine_reading_comprehension/utils/run_marco2dureader_preprocess.sh create mode 100644 PaddleNLP/unarchived/machine_reading_comprehension/vocab.py rename PaddleNLP/{ => unarchived}/neural_machine_translation/README.md (100%) rename PaddleNLP/{ => unarchived}/neural_machine_translation/rnn_search/.run_ce.sh (100%) rename PaddleNLP/{ => unarchived}/neural_machine_translation/rnn_search/README.md (99%) rename PaddleNLP/{ => unarchived}/neural_machine_translation/rnn_search/_ce.py (100%) rename PaddleNLP/{ => unarchived}/neural_machine_translation/rnn_search/args.py (100%) rename PaddleNLP/{ => unarchived}/neural_machine_translation/rnn_search/attention_model.py (100%) rename PaddleNLP/{ => unarchived}/neural_machine_translation/rnn_search/images/bi_rnn.png (100%) rename PaddleNLP/{ => unarchived}/neural_machine_translation/rnn_search/images/decoder_attention.png (100%) rename PaddleNLP/{ => unarchived}/neural_machine_translation/rnn_search/images/encoder_attention.png (100%) rename PaddleNLP/{ => unarchived}/neural_machine_translation/rnn_search/infer.py (100%) rename PaddleNLP/{ => unarchived}/neural_machine_translation/rnn_search/no_attention_model.py (100%) rename PaddleNLP/{ => unarchived}/neural_machine_translation/rnn_search/train.py (100%) rename PaddleNLP/{ => unarchived}/neural_machine_translation/transformer/.gitignore (100%) rename PaddleNLP/{ => unarchived}/neural_machine_translation/transformer/.run_ce.sh (100%) create mode 100644 PaddleNLP/unarchived/neural_machine_translation/transformer/README.md rename PaddleNLP/{ => unarchived}/neural_machine_translation/transformer/README_cn.md (100%) rename PaddleNLP/{ => unarchived}/neural_machine_translation/transformer/_ce.py (100%) create mode 100644 PaddleNLP/unarchived/neural_machine_translation/transformer/config.py create mode 100644 PaddleNLP/unarchived/neural_machine_translation/transformer/gen_data.sh rename PaddleNLP/{ => unarchived}/neural_machine_translation/transformer/images/attention_formula.png (100%) create mode 100644 PaddleNLP/unarchived/neural_machine_translation/transformer/images/multi_head_attention.png create mode 100644 PaddleNLP/unarchived/neural_machine_translation/transformer/images/transformer_network.png create mode 100644 PaddleNLP/unarchived/neural_machine_translation/transformer/infer.py rename PaddleNLP/{ => unarchived}/neural_machine_translation/transformer/local_dist.sh (100%) rename PaddleNLP/{ => unarchived}/neural_machine_translation/transformer/model.py (100%) rename PaddleNLP/{ => unarchived}/neural_machine_translation/transformer/optim.py (100%) rename PaddleNLP/{ => unarchived}/neural_machine_translation/transformer/profile.py (100%) create mode 100644 PaddleNLP/unarchived/neural_machine_translation/transformer/reader.py create mode 100644 PaddleNLP/unarchived/neural_machine_translation/transformer/train.py rename PaddleNLP/{ => unarchived}/sequence_tagging_for_ner/.run_ce.sh (100%) rename PaddleNLP/{ => unarchived}/sequence_tagging_for_ner/README.md (100%) rename PaddleNLP/{ => unarchived}/sequence_tagging_for_ner/_ce.py (100%) rename PaddleNLP/{ => unarchived}/sequence_tagging_for_ner/data/download.sh (100%) rename PaddleNLP/{ => unarchived}/sequence_tagging_for_ner/data/target.txt (100%) rename PaddleNLP/{ => unarchived}/sequence_tagging_for_ner/data/test (100%) rename PaddleNLP/{ => unarchived}/sequence_tagging_for_ner/data/train (100%) rename PaddleNLP/{ => unarchived}/sequence_tagging_for_ner/imgs/convergence_curve.png (100%) rename PaddleNLP/{ => unarchived}/sequence_tagging_for_ner/infer.py (86%) rename PaddleNLP/{ => unarchived}/sequence_tagging_for_ner/network_conf.py (100%) rename PaddleNLP/{ => unarchived}/sequence_tagging_for_ner/reader.py (100%) rename PaddleNLP/{ => unarchived}/sequence_tagging_for_ner/train.py (100%) rename PaddleNLP/{ => unarchived}/sequence_tagging_for_ner/utils.py (100%) rename PaddleNLP/{ => unarchived}/sequence_tagging_for_ner/utils_extend.py (100%) rename PaddleNLP/{ => unarchived}/text_classification/.run_ce.sh (100%) rename PaddleNLP/{ => unarchived}/text_classification/README.md (100%) rename PaddleNLP/{ => unarchived}/text_classification/_ce.py (100%) rename PaddleNLP/{ => unarchived}/text_classification/async_executor/README.md (100%) rename PaddleNLP/{ => unarchived}/text_classification/async_executor/data_generator.sh (100%) rename PaddleNLP/{ => unarchived}/text_classification/async_executor/data_generator/IMDB.py (100%) rename PaddleNLP/{ => unarchived}/text_classification/async_executor/data_generator/build_raw_data.py (100%) rename PaddleNLP/{ => unarchived}/text_classification/async_executor/data_generator/data_generator.py (100%) rename PaddleNLP/{ => unarchived}/text_classification/async_executor/data_generator/splitfile.py (100%) rename PaddleNLP/{ => unarchived}/text_classification/async_executor/data_reader.py (100%) rename PaddleNLP/{ => unarchived}/text_classification/async_executor/infer.py (100%) rename PaddleNLP/{ => unarchived}/text_classification/async_executor/train.py (100%) rename PaddleNLP/{ => unarchived}/text_classification/clouds/scdb_parallel_executor.py (100%) rename PaddleNLP/{ => unarchived}/text_classification/clouds/scdb_single_card.py (100%) rename PaddleNLP/{ => unarchived}/text_classification/infer.py (100%) rename PaddleNLP/{ => unarchived}/text_classification/nets.py (100%) rename PaddleNLP/{ => unarchived}/text_classification/train.py (100%) rename PaddleNLP/{ => unarchived}/text_classification/utils.py (100%) rename PaddleNLP/{ => unarchived}/text_matching_on_quora/.run_ce.sh (100%) rename PaddleNLP/{ => unarchived}/text_matching_on_quora/README.md (97%) create mode 100644 PaddleNLP/unarchived/text_matching_on_quora/__init__.py rename PaddleNLP/{ => unarchived}/text_matching_on_quora/_ce.py (76%) rename PaddleNLP/{ => unarchived}/text_matching_on_quora/cdssm_base.log (100%) rename PaddleNLP/{ => unarchived}/text_matching_on_quora/configs/__init__.py (100%) rename PaddleNLP/{ => unarchived}/text_matching_on_quora/configs/basic_config.py (94%) rename PaddleNLP/{ => unarchived}/text_matching_on_quora/configs/cdssm.py (93%) rename PaddleNLP/{ => unarchived}/text_matching_on_quora/configs/dec_att.py (87%) rename PaddleNLP/{ => unarchived}/text_matching_on_quora/configs/infer_sent.py (93%) rename PaddleNLP/{ => unarchived}/text_matching_on_quora/configs/sse.py (96%) rename PaddleNLP/{ => unarchived}/text_matching_on_quora/data/prepare_quora_data.sh (100%) rename PaddleNLP/{ => unarchived}/text_matching_on_quora/imgs/README.md (100%) rename PaddleNLP/{ => unarchived}/text_matching_on_quora/imgs/models_test_acc.png (100%) rename PaddleNLP/{ => unarchived}/text_matching_on_quora/metric.py (100%) rename PaddleNLP/{ => unarchived}/text_matching_on_quora/models/__init__.py (100%) rename PaddleNLP/{ => unarchived}/text_matching_on_quora/models/cdssm.py (63%) rename PaddleNLP/{ => unarchived}/text_matching_on_quora/models/dec_att.py (52%) rename PaddleNLP/{ => unarchived}/text_matching_on_quora/models/infer_sent.py (70%) rename PaddleNLP/{ => unarchived}/text_matching_on_quora/models/match_layers.py (84%) rename PaddleNLP/{ => unarchived}/text_matching_on_quora/models/my_layers.py (59%) rename PaddleNLP/{ => unarchived}/text_matching_on_quora/models/pwim.py (100%) rename PaddleNLP/{ => unarchived}/text_matching_on_quora/models/sse.py (67%) create mode 100644 PaddleNLP/unarchived/text_matching_on_quora/models/test.py rename PaddleNLP/{ => unarchived}/text_matching_on_quora/pretrained_word2vec.py (78%) rename PaddleNLP/{ => unarchived}/text_matching_on_quora/quora_question_pairs.py (93%) rename PaddleNLP/{ => unarchived}/text_matching_on_quora/train_and_evaluate.py (63%) rename PaddleNLP/{ => unarchived}/text_matching_on_quora/utils.py (65%) diff --git a/.gitmodules b/.gitmodules index 100d42ff..3079d76d 100644 --- a/.gitmodules +++ b/.gitmodules @@ -13,3 +13,9 @@ [submodule "PaddleNLP/knowledge-driven-dialogue"] path = PaddleNLP/knowledge-driven-dialogue url = https://github.com/baidu/knowledge-driven-dialogue +[submodule "PaddleNLP/language_representations_kit"] + path = PaddleNLP/language_representations_kit + url = https://github.com/PaddlePaddle/LARK +[submodule "PaddleNLP/knowledge_driven_dialogue"] + path = PaddleNLP/knowledge_driven_dialogue + url = https://github.com/baidu/knowledge-driven-dialogue/ diff --git a/PaddleNLP/LAC b/PaddleNLP/LAC deleted file mode 160000 index a4eb73b2..00000000 --- a/PaddleNLP/LAC +++ /dev/null @@ -1 +0,0 @@ -Subproject commit a4eb73b2fb64d8aab8499a1184edf4fc386f8268 diff --git a/PaddleNLP/LARK b/PaddleNLP/LARK deleted file mode 160000 index 77ab80a7..00000000 --- a/PaddleNLP/LARK +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 77ab80a7061024c4b28f0b41fdd6ba42d5e6d9e1 diff --git a/PaddleNLP/README.md b/PaddleNLP/README.md index fa81f6a2..7132e149 100644 --- a/PaddleNLP/README.md +++ b/PaddleNLP/README.md @@ -9,7 +9,7 @@ Machine Translation, NMT)等阶段。在 NMT 成熟后,机器翻译才真正 本实例所实现的 Transformer 就是一个基于自注意力机制的机器翻译模型,其中不再有RNN或CNN结构,而是完全利用 Attention 学习语言中的上下文依赖。相较于RNN/CNN, 这种结构在单层内计算复杂度更低、易于并行化、对长程依赖更易建模,最终在多种语言之间取得了最好的翻译效果。 -- [Transformer](https://github.com/PaddlePaddle/models/blob/develop/PaddleNLP/neural_machine_translation/transformer/README_cn.md) +- [Transformer](https://github.com/PaddlePaddle/models/blob/develop/PaddleNLP/neural_machine_translation/transformer/README.md) 中文词法分析 @@ -35,7 +35,7 @@ Machine Translation, NMT)等阶段。在 NMT 成熟后,机器翻译才真正 本例所开放的DAM (Deep Attention Matching Network)为百度自然语言处理部发表于ACL-2018的工作,用于检索式聊天机器人多轮对话中应答的选择。DAM受Transformer的启发,其网络结构完全基于注意力(attention)机制,利用栈式的self-attention结构分别学习不同粒度下应答和语境的语义表示,然后利用cross-attention获取应答与语境之间的相关性,在两个大规模多轮对话数据集上的表现均好于其它模型。 -- [Deep Attention Matching Network](https://github.com/PaddlePaddle/models/tree/develop/PaddleNLP/deep_attention_matching_net) +- [Deep Attention Matching Network](https://github.com/PaddlePaddle/models/tree/develop/PaddleNLP/dialogue_model_toolkit/deep_attention_matching) AnyQ ---- @@ -53,4 +53,4 @@ SimNet是百度自然语言处理部于2013年自主研发的语义匹配框架 百度阅读理解数据集是由百度自然语言处理部开源的一个真实世界数据集,所有的问题、原文都来源于实际数据(百度搜索引擎数据和百度知道问答社区),答案是由人类回答的。每个问题都对应多个答案,数据集包含200k问题、1000k原文和420k答案,是目前最大的中文MRC数据集。百度同时开源了对应的阅读理解模型,称为DuReader,采用当前通用的网络分层结构,通过双向attention机制捕捉问题和原文之间的交互关系,生成query-aware的原文表示,最终基于query-aware的原文表示通过point network预测答案范围。 -- [DuReader in PaddlePaddle Fluid](https://github.com/PaddlePaddle/models/blob/develop/PaddleNLP/machine_reading_comprehension/README.md) +- [DuReader in PaddlePaddle Fluid](https://github.com/PaddlePaddle/models/blob/develop/PaddleNLP/reading_comprehension) \ No newline at end of file diff --git a/PaddleNLP/Senta b/PaddleNLP/Senta deleted file mode 160000 index dc1af6a8..00000000 --- a/PaddleNLP/Senta +++ /dev/null @@ -1 +0,0 @@ -Subproject commit dc1af6a83dd1372055158ac6d17f6d14b3a0f0f8 diff --git a/PaddleNLP/SimNet b/PaddleNLP/SimNet deleted file mode 160000 index b3e096b9..00000000 --- a/PaddleNLP/SimNet +++ /dev/null @@ -1 +0,0 @@ -Subproject commit b3e096b92f26720f6e3b020b374e11aa0748c032 diff --git a/PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/README.md b/PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/README.md new file mode 100644 index 00000000..8888a05d --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/README.md @@ -0,0 +1,153 @@ +# Auto Dialogue Evaluation +## 简介 +### 任务说明 +对话自动评估(Auto Dialogue Evaluation)评估开放领域对话系统的回复质量,能够帮助企业或个人快速评估对话系统的回复质量,减少人工评估成本。 +1. 在无标注数据的情况下,利用负采样训练匹配模型作为评估工具,实现对多个对话系统回复质量排序; +2. 利用少量标注数据(特定对话系统或场景的人工打分),在匹配模型基础上进行微调,可以显著该对话系统或场景的评估效果。 + +### 效果说明 +我们以四个不同的对话系统(seq2seq\_naive/seq2seq\_att/keywords/human)为例,使用对话自动评估工具进行自动评估。 +1. 无标注数据情况下,直接使用预训练好的评估工具进行评估; + 在四个对话系统上,自动评估打分和人工评估打分spearman相关系数,如下: + + /|seq2seq\_naive|seq2seq\_att|keywords|human + --|:--:|--:|:--:|--: + cor|0.361|0.343|0.324|0.288 + + 对四个系统平均得分排序: + + 人工评估|k(0.591) test.txt.utf8.seg +``` + +### 代码结构说明 +main.py:该项目的主函数,封装包括训练、预测、评估的部分 + +config.py:定义了该项目模型的相关配置,包括具体模型类别、以及模型的超参数 + +reader.py:定义了读入数据,加载词典的功能 + +evaluation.py:定义评估函数 + +init.py:定义模型load函数 + +run.sh:训练、预测、评估运行脚本 + +## 其他 +如何贡献代码 + +如果你可以修复某个issue或者增加一个新功能,欢迎给我们提交PR。如果对应的PR被接受了,我们将根据贡献的质量和难度进行打分(0-5分,越高越好)。如果你累计获得了10分,可以联系我们获得面试机会或者为你写推荐信。 diff --git a/PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/config.py b/PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/config.py new file mode 100755 index 00000000..4d1ae4dc --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/config.py @@ -0,0 +1,77 @@ +""" +Auto Dialogue Evaluation. +""" + +import argparse +import six + +def parse_args(): + """ + Auto Dialogue Evaluation Config + """ + parser = argparse.ArgumentParser('Automatic Dialogue Evaluation.') + parser.add_argument( + '--do_train', type=bool, default=False, help='Whether to perform training.') + parser.add_argument( + '--do_val', type=bool, default=False, help='Whether to perform evaluation.') + parser.add_argument( + '--do_infer', type=bool, default=False, help='Whether to perform inference.') + parser.add_argument( + '--loss_type', type=str, default='CLS', help='Loss type, CLS or L2.') + + #data path + parser.add_argument( + '--train_path', type=str, default=None, help='Path of training data') + parser.add_argument( + '--val_path', type=str, default=None, help='Path of validation data') + parser.add_argument( + '--test_path', type=str, default=None, help='Path of validation data') + parser.add_argument( + '--save_path', type=str, default='tmp', help='Save path') + + #step fit for data size + parser.add_argument( + '--print_step', type=int, default=50, help='Print step') + parser.add_argument( + '--save_step', type=int, default=400, help='Save step') + parser.add_argument( + '--num_scan_data', type=int, default=20, help='Save step') + + parser.add_argument( + '--word_emb_init', type=str, default=None, help='Path to the initial word embedding') + parser.add_argument( + '--init_model', type=str, default=None, help='Path to the init model') + + parser.add_argument( + '--use_cuda', + action='store_true', + help='If set, use cuda for training.') + parser.add_argument( + '--batch_size', type=int, default=256, help='Batch size') + parser.add_argument( + '--hidden_size', type=int, default=256, help='Hidden size') + parser.add_argument( + '--emb_size', type=int, default=256, help='Embedding size') + parser.add_argument( + '--vocab_size', type=int, default=484016, help='Vocabulary size') + parser.add_argument( + '--learning_rate', type=float, default=0.001, help='Learning rate') + parser.add_argument( + '--sample_pro', type=float, default=0.1, help='Sample probability for training data') + parser.add_argument( + '--max_len', type=int, default=50, help='Max length for sentences') + + args = parser.parse_args() + return args + + +def print_arguments(args): + """ + Print Config + """ + print('----------- Configuration Arguments -----------') + for arg, value in sorted(six.iteritems(vars(args))): + print('%s: %s' % (arg, value)) + print('------------------------------------------------') + + diff --git a/PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/download_data.sh b/PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/download_data.sh new file mode 100755 index 00000000..7f97c75e --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/download_data.sh @@ -0,0 +1,2 @@ +wget --no-check-certificate https://baidu-nlp.bj.bcebos.com/auto_dialogue_evaluation_dataset-1.0.0.tar.gz +tar -xzf auto_dialogue_evaluation_dataset-1.0.0.tar.gz diff --git a/PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/evaluation.py b/PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/evaluation.py new file mode 100755 index 00000000..8c8a7041 --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/evaluation.py @@ -0,0 +1,57 @@ +""" +Evaluation for auto dialogue evaluation +""" + +import sys +import numpy as np +import pandas as pd + +def get_p_at_n_in_m(data, n, m, ind): + """ + Get n in m + """ + pos_score = data[ind][0] + curr = data[ind : ind+m] + curr = sorted(curr, key = lambda x: x[0], reverse=True) + + if curr[n-1][0] <= pos_score: + return 1 + return 0 + + +def evaluate_Recall(data): + """ + Evaluate Recall + """ + p_at_1_in_2 = 0.0 + p_at_1_in_10 = 0.0 + p_at_2_in_10 = 0.0 + p_at_5_in_10 = 0.0 + + length = len(data) / 10 + + for i in xrange(0, length): + ind = i * 10 + assert data[ind][1] == 1 + + p_at_1_in_2 += get_p_at_n_in_m(data, 1, 2, ind) + p_at_1_in_10 += get_p_at_n_in_m(data, 1, 10, ind) + p_at_2_in_10 += get_p_at_n_in_m(data, 2, 10, ind) + p_at_5_in_10 += get_p_at_n_in_m(data, 5, 10, ind) + + recall_dict = { + '1_in_2': p_at_1_in_2 / length, + '1_in_10': p_at_1_in_10 / length, + '2_in_10': p_at_2_in_10 / length, + '5_in_10': p_at_5_in_10 / length} + + return recall_dict + + +def evaluate_cor(pred, true): + """ + Evaluate cor + """ + df = pd.DataFrame({'pred': pred, 'true': true}) + cor_matrix = df.corr('spearman') + return cor_matrix['pred']['true'] diff --git a/PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/init.py b/PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/init.py new file mode 100755 index 00000000..d56fb34b --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/init.py @@ -0,0 +1,53 @@ +""" +Init for pretrained para +""" + +# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import os +import six +import ast +import copy + +import numpy as np +import paddle.fluid as fluid + +def init_pretraining_params(exe, + pretraining_params_path, + main_program): + """ + Init pretraining params + """ + assert os.path.exists(pretraining_params_path + ), "[%s] cann't be found." % pretraining_params_path + + def existed_params(var): + """ + Test existed + """ + if not isinstance(var, fluid.framework.Parameter): + return False + return os.path.exists(os.path.join(pretraining_params_path, var.name)) + + fluid.io.load_vars( + exe, + pretraining_params_path, + main_program=main_program, + predicate=existed_params) + print("Load pretraining parameters from {}.".format( + pretraining_params_path)) + diff --git a/PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/main.py b/PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/main.py new file mode 100755 index 00000000..ad04cd8b --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/main.py @@ -0,0 +1,461 @@ +""" +Auto dialogue evaluation task +""" + +import os +import sys +import six +import numpy as np +import time +import multiprocessing +import paddle +import paddle.fluid as fluid +import reader as reader +import evaluation as eva +import init as init + +try: + import cPickle as pickle #python 2 +except ImportError as e: + import pickle #python 3 + +sys.path.append('../../models/dialogue_model_toolkit/auto_dialogue_evaluation/') +from net import Network +import config + +def train(args): + """Train + """ + if not os.path.exists(args.save_path): + os.makedirs(args.save_path) + + net = Network(args.vocab_size, args.emb_size, args.hidden_size) + + train_program = fluid.Program() + train_startup = fluid.Program() + if "CE_MODE_X" in os.environ: + train_program.random_seed = 110 + train_startup.random_seed = 110 + with fluid.program_guard(train_program, train_startup): + with fluid.unique_name.guard(): + logits, loss = net.network(args.loss_type) + loss.persistable = True + logits.persistable = True + # gradient clipping + fluid.clip.set_gradient_clip(clip=fluid.clip.GradientClipByValue( + max=1.0, min=-1.0)) + + optimizer = fluid.optimizer.Adam( + learning_rate=args.learning_rate) + optimizer.minimize(loss) + print("begin memory optimization ...") + fluid.memory_optimize(train_program) + print("end memory optimization ...") + + test_program = fluid.Program() + test_startup = fluid.Program() + if "CE_MODE_X" in os.environ: + test_program.random_seed = 110 + test_startup.random_seed = 110 + with fluid.program_guard(test_program, test_startup): + with fluid.unique_name.guard(): + logits, loss = net.network(args.loss_type) + loss.persistable = True + logits.persistable = True + + test_program = test_program.clone(for_test=True) + if args.use_cuda: + place = fluid.CUDAPlace(0) + dev_count = fluid.core.get_cuda_device_count() + else: + place = fluid.CPUPlace() + dev_count = int(os.environ.get('CPU_NUM', multiprocessing.cpu_count())) + + print("device count %d" % dev_count) + print("theoretical memory usage: ") + print( + fluid.contrib.memory_usage( + program=train_program, batch_size=args.batch_size)) + + exe = fluid.Executor(place) + exe.run(train_startup) + exe.run(test_startup) + + train_exe = fluid.ParallelExecutor( + use_cuda=args.use_cuda, loss_name=loss.name, main_program=train_program) + + test_exe = fluid.ParallelExecutor( + use_cuda=args.use_cuda, + main_program=test_program, + share_vars_from=train_exe) + + if args.word_emb_init is not None: + print("start loading word embedding init ...") + if six.PY2: + word_emb = np.array(pickle.load(open(args.word_emb_init, + 'rb'))).astype('float32') + else: + word_emb = np.array( + pickle.load( + open(args.word_emb_init, 'rb'), encoding="bytes")).astype( + 'float32') + net.set_word_embedding(word_emb, place) + print("finish init word embedding ...") + + print("start loading data ...") + + def train_with_feed(batch_data): + """ + Train on one batch + """ + #to do get_feed_names + feed_dict = dict(zip(net.get_feed_names(), batch_data)) + + cost = train_exe.run(feed=feed_dict, fetch_list=[loss.name]) + return cost[0] + + def test_with_feed(batch_data): + """ + Test on one batch + """ + feed_dict = dict(zip(net.get_feed_names(), batch_data)) + + score = test_exe.run(feed=feed_dict, fetch_list=[logits.name]) + return score[0] + + def evaluate(): + """ + Evaluate to choose model + """ + val_batches = reader.batch_reader( + args.val_path, args.batch_size, place, args.max_len, 1) + scores = [] + labels = [] + for batch in val_batches: + scores.extend(test_with_feed(batch)) + labels.extend([x[0] for x in batch[2]]) + + return eva.evaluate_Recall(zip(scores, labels)) + + def save_exe(step, best_recall): + """ + Save exe conditional + """ + recall_dict = evaluate() + print('evaluation recall result:') + print('1_in_2: %s\t1_in_10: %s\t2_in_10: %s\t5_in_10: %s' % ( + recall_dict['1_in_2'], recall_dict['1_in_10'], + recall_dict['2_in_10'], recall_dict['5_in_10'])) + + if recall_dict['1_in_10'] > best_recall and step != 0: + fluid.io.save_inference_model(args.save_path, + net.get_feed_inference_names(), + logits, exe, main_program=train_program) + + print("Save model at step %d ... " % step) + print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))) + best_recall = recall_dict['1_in_10'] + return best_recall + + # train over different epoches + global_step, train_time = 0, 0.0 + best_recall = 0 + for epoch in six.moves.xrange(args.num_scan_data): + train_batches = reader.batch_reader( + args.train_path, args.batch_size, place, + args.max_len, args.sample_pro) + + begin_time = time.time() + sum_cost = 0 + for batch in train_batches: + if (args.save_path is not None) and (global_step % args.save_step == 0): + best_recall = save_exe(global_step, best_recall) + + cost = train_with_feed(batch) + global_step += 1 + sum_cost += cost.mean() + + if global_step % args.print_step == 0: + print('training step %s avg loss %s' % (global_step, sum_cost / args.print_step)) + sum_cost = 0 + + pass_time_cost = time.time() - begin_time + train_time += pass_time_cost + print("Pass {0}, pass_time_cost {1}" + .format(epoch, "%2.2f sec" % pass_time_cost)) + + +def finetune(args): + """ + Finetune + """ + if not os.path.exists(args.save_path): + os.makedirs(args.save_path) + + net = Network(args.vocab_size, args.emb_size, args.hidden_size) + + train_program = fluid.Program() + train_startup = fluid.Program() + if "CE_MODE_X" in os.environ: + train_program.random_seed = 110 + train_startup.random_seed = 110 + with fluid.program_guard(train_program, train_startup): + with fluid.unique_name.guard(): + logits, loss = net.network(args.loss_type) + loss.persistable = True + logits.persistable = True + # gradient clipping + fluid.clip.set_gradient_clip(clip=fluid.clip.GradientClipByValue( + max=1.0, min=-1.0)) + + optimizer = fluid.optimizer.Adam( + learning_rate=fluid.layers.exponential_decay( + learning_rate=args.learning_rate, + decay_steps=400, + decay_rate=0.9, + staircase=True)) + optimizer.minimize(loss) + print("begin memory optimization ...") + fluid.memory_optimize(train_program) + print("end memory optimization ...") + + test_program = fluid.Program() + test_startup = fluid.Program() + if "CE_MODE_X" in os.environ: + test_program.random_seed = 110 + test_startup.random_seed = 110 + with fluid.program_guard(test_program, test_startup): + with fluid.unique_name.guard(): + logits, loss = net.network(args.loss_type) + loss.persistable = True + logits.persistable = True + + test_program = test_program.clone(for_test=True) + if args.use_cuda: + place = fluid.CUDAPlace(0) + dev_count = fluid.core.get_cuda_device_count() + else: + place = fluid.CPUPlace() + dev_count = int(os.environ.get('CPU_NUM', multiprocessing.cpu_count())) + + print("device count %d" % dev_count) + print("theoretical memory usage: ") + print( + fluid.contrib.memory_usage( + program=train_program, batch_size=args.batch_size)) + + exe = fluid.Executor(place) + exe.run(train_startup) + exe.run(test_startup) + + train_exe = fluid.ParallelExecutor( + use_cuda=args.use_cuda, loss_name=loss.name, main_program=train_program) + + test_exe = fluid.ParallelExecutor( + use_cuda=args.use_cuda, + main_program=test_program, + share_vars_from=train_exe) + + if args.init_model: + init.init_pretraining_params( + exe, + args.init_model, + main_program=train_startup) + print('sccuess init %s' % args.init_model) + + print("start loading data ...") + + def train_with_feed(batch_data): + """ + Train on one batch + """ + #to do get_feed_names + feed_dict = dict(zip(net.get_feed_names(), batch_data)) + + cost = train_exe.run(feed=feed_dict, fetch_list=[loss.name]) + return cost[0] + + def test_with_feed(batch_data): + """ + Test on one batch + """ + feed_dict = dict(zip(net.get_feed_names(), batch_data)) + + score = test_exe.run(feed=feed_dict, fetch_list=[logits.name]) + return score[0] + + def evaluate(): + """ + Evaluate to choose model + """ + val_batches = reader.batch_reader( + args.val_path, args.batch_size, place, args.max_len, 1) + scores = [] + labels = [] + for batch in val_batches: + scores.extend(test_with_feed(batch)) + labels.extend([x[0] for x in batch[2]]) + scores = [x[0] for x in scores] + return eva.evaluate_cor(scores, labels) + + def save_exe(step, best_cor): + """ + Save exe conditional + """ + cor = evaluate() + print('evaluation cor relevance %s' % cor) + if cor > best_cor and step != 0: + fluid.io.save_inference_model(args.save_path, + net.get_feed_inference_names(), logits, + exe, main_program=train_program) + print("Save model at step %d ... " % step) + print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))) + best_cor = cor + return best_cor + + # train over different epoches + global_step, train_time = 0, 0.0 + best_cor = 0.0 + pre_index = -1 + for epoch in six.moves.xrange(args.num_scan_data): + train_batches = reader.batch_reader( + args.train_path, + args.batch_size, place, + args.max_len, args.sample_pro) + + begin_time = time.time() + sum_cost = 0 + for batch in train_batches: + if (args.save_path is not None) and (global_step % args.save_step == 0): + best_cor = save_exe(global_step, best_cor) + + cost = train_with_feed(batch) + global_step += 1 + sum_cost += cost.mean() + + if global_step % args.print_step == 0: + print('training step %s avg loss %s' % (global_step, sum_cost / args.print_step)) + sum_cost = 0 + + pass_time_cost = time.time() - begin_time + train_time += pass_time_cost + print("Pass {0}, pass_time_cost {1}" + .format(epoch, "%2.2f sec" % pass_time_cost)) + + +def evaluate(args): + """ + Evaluate model for both pretrained and finetuned + """ + place = fluid.CUDAPlace(0) if args.use_cuda else fluid.CPUPlace() + exe = fluid.Executor(place) + + t0 = time.time() + + with fluid.scope_guard(fluid.core.Scope()): + infer_program, feed_target_names, fetch_vars = fluid.io.load_inference_model( + args.init_model, exe) + + global_step, infer_time = 0, 0.0 + test_batches = reader.batch_reader( + args.test_path, args.batch_size, place, + args.max_len, 1) + scores = [] + labels = [] + for batch in test_batches: + logits = exe.run( + infer_program, + feed = { + 'context_wordseq': batch[0], + 'response_wordseq': batch[1]}, + fetch_list = fetch_vars) + logits = [x[0] for x in logits[0]] + + scores.extend(logits) + labels.extend([x[0] for x in batch[2]]) + + mean_score = sum(scores)/len(scores) + if args.loss_type == 'CLS': + recall_dict = eva.evaluate_Recall(zip(scores, labels)) + print('mean score: %s' % mean_score) + print('evaluation recall result:') + print('1_in_2: %s\t1_in_10: %s\t2_in_10: %s\t5_in_10: %s' % ( + recall_dict['1_in_2'], recall_dict['1_in_10'], + recall_dict['2_in_10'], recall_dict['5_in_10'])) + elif args.loss_type == 'L2': + cor = eva.evaluate_cor(scores, labels) + print('mean score: %s\nevaluation cor resuls:%s' % (mean_score, cor)) + else: + raise ValueError + + t1 = time.time() + print("finish evaluate model:%s on data:%s time_cost(s):%.2f" % + (args.init_model, args.test_path, t1 - t0)) + + +def infer(args): + """ + Inference function + """ + place = fluid.CUDAPlace(0) if args.use_cuda else fluid.CPUPlace() + exe = fluid.Executor(place) + + t0 = time.time() + + with fluid.scope_guard(fluid.core.Scope()): + infer_program, feed_target_names, fetch_vars = fluid.io.load_inference_model( + args.init_model, exe) + + global_step, infer_time = 0, 0.0 + test_batches = reader.batch_reader( + args.test_path, args.batch_size, place, + args.max_len, 1) + scores = [] + for batch in test_batches: + logits = exe.run( + infer_program, + feed = { + 'context_wordseq': batch[0], + 'response_wordseq': batch[1]}, + fetch_list = fetch_vars) + logits = [x[0] for x in logits[0]] + + scores.extend(logits) + + in_file = open(args.test_path, 'r') + out_path = args.test_path + '.infer' + out_file = open(out_path, 'w') + for line, s in zip(in_file, scores): + out_file.write('%s\t%s\n' % (line.strip(), s)) + + in_file.close() + out_file.close() + + t1 = time.time() + print("finish infer model:%s out file: %s time_cost(s):%.2f" % + (args.init_model, out_path, t1 - t0)) + + +def main(): + """ + main + """ + args = config.parse_args() + config.print_arguments(args) + + if args.do_train == True: + if args.loss_type == 'CLS': + train(args) + elif args.loss_type == 'L2': + finetune(args) + else: + raise ValueError + elif args.do_val == True: + evaluate(args) + elif args.do_infer == True: + infer(args) + else: + raise ValueError + +if __name__ == '__main__': + main() diff --git a/PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/model_files/download_model.sh b/PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/model_files/download_model.sh new file mode 100644 index 00000000..30057083 --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/model_files/download_model.sh @@ -0,0 +1,11 @@ +#matching pretrained +wget --no-check-certificate https://baidu-nlp.bj.bcebos.com/auto_dialogue_evaluation_matching_pretrained-1.0.0.tar.gz +tar -xzf auto_dialogue_evaluation_matching_pretrained-1.0.0.tar.gz + +#finetuned +for task in seq2seq_naive seq2seq_att keywords human +do + wget --no-check-certificate https://baidu-nlp.bj.bcebos.com/auto_dialogue_evaluation_${task}_finetuned-1.0.0.tar.gz + tar -xzf auto_dialogue_evaluation_${task}_finetuned-1.0.0.tar.gz +done + diff --git a/PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/reader.py b/PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/reader.py new file mode 100755 index 00000000..bcf15523 --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/reader.py @@ -0,0 +1,76 @@ +""" +Reader for auto dialogue evaluation +""" + +import sys +import time +import numpy as np +import random + +import paddle.fluid as fluid +import paddle + +def to_lodtensor(data, place): + """ + Convert to LODtensor + """ + seq_lens = [len(seq) for seq in data] + cur_len = 0 + lod = [cur_len] + for l in seq_lens: + cur_len += l + lod.append(cur_len) + flattened_data = np.concatenate(data, axis=0).astype("int64") + flattened_data = flattened_data.reshape([len(flattened_data), 1]) + res = fluid.LoDTensor() + res.set(flattened_data, place) + res.set_lod([lod]) + return res + + +def reshape_batch(batch, place): + """ + Reshape batch + """ + context_reshape = to_lodtensor([dat[0] for dat in batch], place) + response_reshape = to_lodtensor([dat[1] for dat in batch], place) + label_reshape = [dat[2] for dat in batch] + return (context_reshape, response_reshape, label_reshape) + + +def batch_reader(data_path, + batch_size, + place, + max_len=50, + sample_pro=1): + """ + Yield batch + """ + batch = [] + with open(data_path, 'r') as f: + Print = True + for line in f: + #sample for training data + if sample_pro < 1: + if random.random() > sample_pro: + continue + + tokens = line.strip().split('\t') + assert len(tokens) == 3 + context = [int(x) for x in tokens[0].split()[:max_len]] + response = [int(x) for x in tokens[1].split()[:max_len]] + + label = [int(tokens[2])] + #label = int(tokens[2]) + instance = (context, response, label) + + if len(batch) < batch_size: + batch.append(instance) + else: + if len(batch) == batch_size: + yield reshape_batch(batch, place) + batch = [instance] + + if len(batch) == batch_size: + yield reshape_batch(batch, place) + diff --git a/PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/run.sh b/PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/run.sh new file mode 100755 index 00000000..d1bc8957 --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/run.sh @@ -0,0 +1,64 @@ +export CUDA_VISIBLE_DEVICES=4 +export FLAGS_eager_delete_tensor_gb=0.0 + +#pretrain +python -u main.py \ + --do_train True \ + --use_cuda \ + --save_path model_files_tmp/matching_pretrained \ + --train_path data/unlabel_data/train.ids \ + --val_path data/unlabel_data/val.ids + +#finetune based on one task +TASK=human +python -u main.py \ + --do_train True \ + --loss_type L2 \ + --use_cuda \ + --save_path model_files_tmp/${TASK}_finetuned \ + --init_model model_files/matching_pretrained \ + --train_path data/label_data/$TASK/train.ids \ + --val_path data/label_data/$TASK/val.ids \ + --print_step 1 \ + --save_step 1 \ + --num_scan_data 50 + +#evaluate pretrained model by Recall +python -u main.py \ + --do_val True \ + --use_cuda \ + --test_path data/unlabel_data/test.ids \ + --init_model model_files/matching_pretrained \ + --loss_type CLS + +#evaluate pretrained model by Cor +for task in seq2seq_naive seq2seq_att keywords human +do + echo $task + python -u main.py \ + --do_val True \ + --use_cuda \ + --test_path data/label_data/$task/test.ids \ + --init_model model_files/matching_pretrained \ + --loss_type L2 +done + +#evaluate finetuned model by Cor +for task in seq2seq_naive seq2seq_att keywords human +do + echo $task + python -u main.py \ + --do_val True \ + --use_cuda \ + --test_path data/label_data/$task/test.ids \ + --init_model model_files/${task}_finetuned \ + --loss_type L2 +done + +#infer +TASK=human +python -u main.py \ + --do_infer True \ + --use_cuda \ + --test_path data/label_data/$TASK/test.ids \ + --init_model model_files/${TASK}_finetuned diff --git a/PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/run_CPU.sh b/PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/run_CPU.sh new file mode 100755 index 00000000..31c44803 --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/auto_dialogue_evaluation/run_CPU.sh @@ -0,0 +1,59 @@ +export FLAGS_eager_delete_tensor_gb=0.0 + +#pretrain +python -u main.py \ + --do_train True \ + --sample_pro 0.9 \ + --batch_size 64 \ + --save_path model_files_tmp/matching_pretrained \ + --train_path data/unlabel_data/train.ids \ + --val_path data/unlabel_data/val.ids + +#finetune based on one task +TASK=human +python -u main.py \ + --do_train True \ + --loss_type L2 \ + --save_path model_files_tmp/${TASK}_finetuned \ + --init_model model_files/matching_pretrained \ + --train_path data/label_data/$TASK/train.ids \ + --val_path data/label_data/$TASK/val.ids \ + --print_step 1 \ + --save_step 1 \ + --num_scan_data 50 + +#evaluate pretrained model by Recall +python -u main.py \ + --do_val True \ + --test_path data/unlabel_data/test.ids \ + --init_model model_files/matching_pretrained \ + --loss_type CLS + +#evaluate pretrained model by Cor +for task in seq2seq_naive seq2seq_att keywords human +do + echo $task + python -u main.py \ + --do_val True \ + --test_path data/label_data/$task/test.ids \ + --init_model model_files/matching_pretrained \ + --loss_type L2 +done + +#evaluate finetuned model by Cor +for task in seq2seq_naive seq2seq_att keywords human +do + echo $task + python -u main.py \ + --do_val True \ + --test_path data/label_data/$task/test.ids \ + --init_model model_files/${task}_finetuned \ + --loss_type L2 +done + +#infer +TASK=human +python -u main.py \ + --do_infer True \ + --test_path data/label_data/$TASK/test.ids \ + --init_model model_files/${TASK}_finetuned diff --git a/PaddleNLP/dialogue_model_toolkit/deep_attention_matching/README.md b/PaddleNLP/dialogue_model_toolkit/deep_attention_matching/README.md new file mode 100755 index 00000000..477a0c3f --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/deep_attention_matching/README.md @@ -0,0 +1,90 @@ +# __Deep Attention Matching Network__ +## 简介 +### 任务说明 +深度注意力机制模型(Deep Attention Matching Network)是开放领域多轮对话匹配模型。根据多轮对话历史和候选回复内容,排序出最合适的回复。 +网络结构如下,更多内容可以参考论文:[http://aclweb.org/anthology/P18-1103](http://aclweb.org/anthology/P18-1103). + +

+
+Overview of Deep Attention Matching Network +

+ +### 效果说明 +该模型在两个公开数据集上效果如下: + +

+
+

+ +## 快速开始 +### 安装说明 +1. paddle安装 + + 本项目依赖于 Paddlepaddle Fluid 1.3.1,请参考安装指南进行安装。 + +2. 安装代码 +3. 环境依赖 +### 开始第一次模型调用 +1. 数据准备 + + 下载经过预处理的数据,运行该脚本之后,data目录下会存在unlabel和douban两个文件夹。 + ``` + cd data + sh download_data.sh + ``` +2. 模型训练 + ``` + python -u main.py \ + --do_train True \ + --use_cuda \ + --data_path ./data/ubuntu/data_small.pkl \ + --save_path ./model_files/ubuntu \ + --use_pyreader \ + --vocab_size 434512 \ + --_EOS_ 28270 \ + --batch_size 32 + ``` +3. 模型评估 + ``` + python -u main.py \ + --do_test True \ + --use_cuda \ + --data_path ./data/ubuntu/data_small.pkl \ + --save_path ./model_files/ubuntu/step_372 \ + --model_path ./model_files/ubuntu/step_372 \ + --vocab_size 434512 \ + --_EOS_ 28270 \ + --batch_size 100 + ``` +## 进阶使用 +### 任务定义与建模 +多轮对话匹配任务输入是多轮对话历史和候选回复,输出是回复匹配得分,根据匹配得分排序。 +### 模型原理介绍 +可以参考论文:[http://aclweb.org/anthology/P18-1103](http://aclweb.org/anthology/P18-1103). +### 数据格式说明 +训练、预测、评估使用的数据示例如下,数据由三列组成,以制表符('\t')分隔,第一列是以空 +格分开的上文id,第二列是以空格分开的回复id,第三列是标签 + +注:本项目额外提供了分词预处理脚本(在preprocess目录下),可供用户使用,具体使用方法如 +下: +``` +python tokenizer.py \ + --test_data_dir ./test.txt.utf8 \ + --batch_size 1 > test.txt.utf8.seg +``` +### 代码结构说明 +main.py:该项目的主函数,封装包括训练、预测的部分 + +config.py:定义了该项目模型的相关配置,包括具体模型类别、以及模型的超参数 + +reader.py:定义了读入数据,加载词典的功能 + +evaluation.py:定义评估函数 + +run.sh:训练、预测运行脚本 + +## 其他 +如何贡献代码 + +如果你可以修复某个issue或者增加一个新功能,欢迎给我们提交PR。如果对应的PR被接受了,我们将根据贡献的质量和难度进行打分(0-5分,越高越好)。如果你累计获得了10分,可以联系我们获得面试机会或者为你写推荐信。 + diff --git a/PaddleNLP/dialogue_model_toolkit/deep_attention_matching/config.py b/PaddleNLP/dialogue_model_toolkit/deep_attention_matching/config.py new file mode 100644 index 00000000..6b9c6649 --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/deep_attention_matching/config.py @@ -0,0 +1,123 @@ +""" +Deep Attention Matching Network +""" + +import argparse +import six + +def parse_args(): + """ + Deep Attention Matching Network Config + """ + parser = argparse.ArgumentParser("DAM Config") + + parser.add_argument( + '--do_train', + type=bool, + default=False, + help='Whether to perform training.') + parser.add_argument( + '--do_test', + type=bool, + default=False, + help='Whether to perform training.') + + parser.add_argument( + '--batch_size', + type=int, + default=256, + help='Batch size for training. (default: %(default)d)') + parser.add_argument( + '--num_scan_data', + type=int, + default=2, + help='Number of pass for training. (default: %(default)d)') + parser.add_argument( + '--learning_rate', + type=float, + default=1e-3, + help='Learning rate used to train. (default: %(default)f)') + parser.add_argument( + '--data_path', + type=str, + default="data/data_small.pkl", + help='Path to training data. (default: %(default)s)') + parser.add_argument( + '--save_path', + type=str, + default="saved_models", + help='Path to save trained models. (default: %(default)s)') + parser.add_argument( + '--model_path', + type=str, + default=None, + help='Path to load well-trained models. (default: %(default)s)') + parser.add_argument( + '--use_cuda', + action='store_true', + help='If set, use cuda for training.') + parser.add_argument( + '--use_pyreader', + action='store_true', + help='If set, use pyreader for reading data.') + parser.add_argument( + '--ext_eval', + action='store_true', + help='If set, use MAP, MRR ect for evaluation.') + parser.add_argument( + '--max_turn_num', + type=int, + default=9, + help='Maximum number of utterances in context.') + parser.add_argument( + '--max_turn_len', + type=int, + default=50, + help='Maximum length of setences in turns.') + parser.add_argument( + '--word_emb_init', + type=str, + default=None, + help='Path to the initial word embedding.') + parser.add_argument( + '--vocab_size', + type=int, + default=434512, + help='The size of vocabulary.') + parser.add_argument( + '--emb_size', + type=int, + default=200, + help='The dimension of word embedding.') + parser.add_argument( + '--_EOS_', + type=int, + default=28270, + help='The id for the end of sentence in vocabulary.') + parser.add_argument( + '--stack_num', + type=int, + default=5, + help='The number of stacked attentive modules in network.') + parser.add_argument( + '--channel1_num', + type=int, + default=32, + help="The channels' number of the 1st conv3d layer's output.") + parser.add_argument( + '--channel2_num', + type=int, + default=16, + help="The channels' number of the 2nd conv3d layer's output.") + args = parser.parse_args() + return args + + +def print_arguments(args): + """ + Print Config + """ + print('----------- Configuration Arguments -----------') + for arg, value in sorted(six.iteritems(vars(args))): + print('%s: %s' % (arg, value)) + print('------------------------------------------------') diff --git a/PaddleNLP/dialogue_model_toolkit/deep_attention_matching/data/download_data.sh b/PaddleNLP/dialogue_model_toolkit/deep_attention_matching/data/download_data.sh new file mode 100755 index 00000000..cf7a182c --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/deep_attention_matching/data/download_data.sh @@ -0,0 +1,40 @@ +ubuntu_url=http://dam-data.cdn.bcebos.com/ubuntu.tar.gz +ubuntu_md5=9d7db116a040530a16f68dc0ab44e4b6 + +if [ ! -e ubuntu.tar.gz ]; then + wget -c $ubuntu_url +fi + +echo "Checking md5 sum ..." +md5sum_tmp=`md5sum ubuntu.tar.gz | cut -d ' ' -f1` + +if [ $md5sum_tmp != $ubuntu_md5 ]; then + echo "Md5sum check failed, please remove and redownload ubuntu.tar.gz" + exit 1 +fi + +echo "Untar ubuntu.tar.gz ..." + +tar -xzvf ubuntu.tar.gz +mv data ubuntu + +douban_url=http://dam-data.cdn.bcebos.com/douban.tar.gz +douban_md5=e07ca68f21c20e09efb3e8b247194405 + +if [ ! -e douban.tar.gz ]; then + wget -c $douban_url +fi + +echo "Checking md5 sum ..." +md5sum_tmp=`md5sum douban.tar.gz | cut -d ' ' -f1` + +if [ $md5sum_tmp != $douban_md5 ]; then + echo "Md5sum check failed, please remove and redownload douban.tar.gz" + exit 1 +fi + +echo "Untar douban.tar.gz ..." + +tar -xzvf douban.tar.gz +mv data douban + diff --git a/PaddleNLP/dialogue_model_toolkit/deep_attention_matching/evaluation.py b/PaddleNLP/dialogue_model_toolkit/deep_attention_matching/evaluation.py new file mode 100755 index 00000000..8edc8000 --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/deep_attention_matching/evaluation.py @@ -0,0 +1,153 @@ +""" +Evaluation +""" + +import sys +import six +import numpy as np +from sklearn.metrics import average_precision_score + +def evaluate_ubuntu(file_path): + """ + Evaluate on ubuntu data + """ + def get_p_at_n_in_m(data, n, m, ind): + """ + Recall n at m + """ + pos_score = data[ind][0] + curr = data[ind:ind + m] + curr = sorted(curr, key=lambda x: x[0], reverse=True) + + if curr[n - 1][0] <= pos_score: + return 1 + return 0 + + data = [] + with open(file_path, 'r') as file: + for line in file: + line = line.strip() + tokens = line.split("\t") + + if len(tokens) != 2: + continue + + data.append((float(tokens[0]), int(tokens[1]))) + + #assert len(data) % 10 == 0 + + p_at_1_in_2 = 0.0 + p_at_1_in_10 = 0.0 + p_at_2_in_10 = 0.0 + p_at_5_in_10 = 0.0 + + length = len(data) // 10 + + for i in six.moves.xrange(0, length): + ind = i * 10 + assert data[ind][1] == 1 + + p_at_1_in_2 += get_p_at_n_in_m(data, 1, 2, ind) + p_at_1_in_10 += get_p_at_n_in_m(data, 1, 10, ind) + p_at_2_in_10 += get_p_at_n_in_m(data, 2, 10, ind) + p_at_5_in_10 += get_p_at_n_in_m(data, 5, 10, ind) + + result_dict = { + "1_in_2": p_at_1_in_2 / length, + "1_in_10": p_at_1_in_10 / length, + "2_in_10": p_at_2_in_10 / length, + "5_in_10": p_at_5_in_10 / length} + + return result_dict + + +def evaluate_douban(file_path): + """ + Evaluate douban data + """ + def mean_average_precision(sort_data): + """ + Evaluate mean average precision + """ + count_1 = 0 + sum_precision = 0 + for index in six.moves.xrange(len(sort_data)): + if sort_data[index][1] == 1: + count_1 += 1 + sum_precision += 1.0 * count_1 / (index + 1) + return sum_precision / count_1 + + def mean_reciprocal_rank(sort_data): + """ + Evaluate MRR + """ + sort_lable = [s_d[1] for s_d in sort_data] + assert 1 in sort_lable + return 1.0 / (1 + sort_lable.index(1)) + + def precision_at_position_1(sort_data): + """ + Evaluate precision + """ + if sort_data[0][1] == 1: + return 1 + else: + return 0 + + def recall_at_position_k_in_10(sort_data, k): + """" + Evaluate recall + """ + sort_lable = [s_d[1] for s_d in sort_data] + select_lable = sort_lable[:k] + return 1.0 * select_lable.count(1) / sort_lable.count(1) + + def evaluation_one_session(data): + """ + Evaluate one session + """ + sort_data = sorted(data, key=lambda x: x[0], reverse=True) + m_a_p = mean_average_precision(sort_data) + m_r_r = mean_reciprocal_rank(sort_data) + p_1 = precision_at_position_1(sort_data) + r_1 = recall_at_position_k_in_10(sort_data, 1) + r_2 = recall_at_position_k_in_10(sort_data, 2) + r_5 = recall_at_position_k_in_10(sort_data, 5) + return m_a_p, m_r_r, p_1, r_1, r_2, r_5 + + sum_m_a_p = 0 + sum_m_r_r = 0 + sum_p_1 = 0 + sum_r_1 = 0 + sum_r_2 = 0 + sum_r_5 = 0 + i = 0 + total_num = 0 + with open(file_path, 'r') as infile: + for line in infile: + if i % 10 == 0: + data = [] + + tokens = line.strip().split('\t') + data.append((float(tokens[0]), int(tokens[1]))) + if i % 10 == 9: + total_num += 1 + m_a_p, m_r_r, p_1, r_1, r_2, r_5 = evaluation_one_session(data) + sum_m_a_p += m_a_p + sum_m_r_r += m_r_r + sum_p_1 += p_1 + sum_r_1 += r_1 + sum_r_2 += r_2 + sum_r_5 += r_5 + i += 1 + + result_dict = { + "MAP": 1.0 * sum_m_a_p / total_num, + "MRR": 1.0 * sum_m_r_r / total_num, + "P_1": 1.0 * sum_p_1 / total_num, + "1_in_10": 1.0 * sum_r_1 / total_num, + "2_in_10": 1.0 * sum_r_2 / total_num, + "5_in_10": 1.0 * sum_r_5 / total_num} + return result_dict + + diff --git a/PaddleNLP/deep_attention_matching_net/images/Figure1.png b/PaddleNLP/dialogue_model_toolkit/deep_attention_matching/images/Figure1.png old mode 100644 new mode 100755 similarity index 100% rename from PaddleNLP/deep_attention_matching_net/images/Figure1.png rename to PaddleNLP/dialogue_model_toolkit/deep_attention_matching/images/Figure1.png diff --git a/PaddleNLP/deep_attention_matching_net/images/Figure2.png b/PaddleNLP/dialogue_model_toolkit/deep_attention_matching/images/Figure2.png old mode 100644 new mode 100755 similarity index 100% rename from PaddleNLP/deep_attention_matching_net/images/Figure2.png rename to PaddleNLP/dialogue_model_toolkit/deep_attention_matching/images/Figure2.png diff --git a/PaddleNLP/dialogue_model_toolkit/deep_attention_matching/main.py b/PaddleNLP/dialogue_model_toolkit/deep_attention_matching/main.py new file mode 100755 index 00000000..b7f05e54 --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/deep_attention_matching/main.py @@ -0,0 +1,465 @@ +""" +Deep Attention Matching Network +""" +import sys +import os +import six +import numpy as np +import time +import multiprocessing +import paddle +import paddle.fluid as fluid +import reader as reader +from util import mkdir +import evaluation as eva +import config + +try: + import cPickle as pickle #python 2 +except ImportError as e: + import pickle #python 3 + +sys.path.append('../../models/dialogue_model_toolkit/deep_attention_matching/') + +from net import Net + +def evaluate(score_path, result_file_path): + """ + Evaluate both douban and ubuntu dataset + """ + if args.ext_eval: + result = eva.evaluate_douban(score_path) + else: + result = eva.evaluate_ubuntu(score_path) + #write evaluation result + with open(result_file_path, 'w') as out_file: + for p_at in result: + out_file.write(p_at + '\t' + str(result[p_at]) + '\n') + print('finish evaluation') + print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))) + + +def test_with_feed(exe, program, feed_names, fetch_list, score_path, batches, + batch_num, dev_count): + """ + Test with feed + """ + score_file = open(score_path, 'w') + for it in six.moves.xrange(batch_num // dev_count): + feed_list = [] + for dev in six.moves.xrange(dev_count): + val_index = it * dev_count + dev + batch_data = reader.make_one_batch_input(batches, val_index) + feed_dict = dict(zip(feed_names, batch_data)) + feed_list.append(feed_dict) + + predicts = exe.run(feed=feed_list, fetch_list=fetch_list) + + scores = np.array(predicts[0]) + for dev in six.moves.xrange(dev_count): + val_index = it * dev_count + dev + for i in six.moves.xrange(args.batch_size): + score_file.write( + str(scores[args.batch_size * dev + i][0]) + '\t' + str( + batches["label"][val_index][i]) + '\n') + score_file.close() + + +def test_with_pyreader(exe, program, pyreader, fetch_list, score_path, batches, + batch_num, dev_count): + """ + Test with pyreader + """ + def data_provider(): + """ + Data reader + """ + for index in six.moves.xrange(batch_num): + yield reader.make_one_batch_input(batches, index) + + score_file = open(score_path, 'w') + pyreader.decorate_tensor_provider(data_provider) + it = 0 + pyreader.start() + while True: + try: + predicts = exe.run(fetch_list=fetch_list) + + scores = np.array(predicts[0]) + for dev in six.moves.xrange(dev_count): + val_index = it * dev_count + dev + for i in six.moves.xrange(args.batch_size): + score_file.write( + str(scores[args.batch_size * dev + i][0]) + '\t' + str( + batches["label"][val_index][i]) + '\n') + it += 1 + except fluid.core.EOFException: + pyreader.reset() + break + score_file.close() + + +def train(args): + """ + Train Program + """ + if not os.path.exists(args.save_path): + os.makedirs(args.save_path) + + # data data_config + data_conf = { + "batch_size": args.batch_size, + "max_turn_num": args.max_turn_num, + "max_turn_len": args.max_turn_len, + "_EOS_": args._EOS_, + } + + dam = Net(args.max_turn_num, args.max_turn_len, args.vocab_size, + args.emb_size, args.stack_num, args.channel1_num, + args.channel2_num) + + train_program = fluid.Program() + train_startup = fluid.Program() + if "CE_MODE_X" in os.environ: + train_program.random_seed = 110 + train_startup.random_seed = 110 + with fluid.program_guard(train_program, train_startup): + with fluid.unique_name.guard(): + if args.use_pyreader: + train_pyreader = dam.create_py_reader( + capacity=10, name='train_reader') + else: + dam.create_data_layers() + loss, logits = dam.create_network() + loss.persistable = True + logits.persistable = True + # gradient clipping + fluid.clip.set_gradient_clip(clip=fluid.clip.GradientClipByValue( + max=1.0, min=-1.0)) + + optimizer = fluid.optimizer.Adam( + learning_rate=fluid.layers.exponential_decay( + learning_rate=args.learning_rate, + decay_steps=400, + decay_rate=0.9, + staircase=True)) + optimizer.minimize(loss) + print("begin memory optimization ...") + print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))) + fluid.memory_optimize(train_program) + print("end memory optimization ...") + print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))) + + test_program = fluid.Program() + test_startup = fluid.Program() + if "CE_MODE_X" in os.environ: + test_program.random_seed = 110 + test_startup.random_seed = 110 + with fluid.program_guard(test_program, test_startup): + with fluid.unique_name.guard(): + if args.use_pyreader: + test_pyreader = dam.create_py_reader( + capacity=10, name='test_reader') + else: + dam.create_data_layers() + + loss, logits = dam.create_network() + loss.persistable = True + logits.persistable = True + + test_program = test_program.clone(for_test=True) + + if args.use_cuda: + place = fluid.CUDAPlace(0) + dev_count = fluid.core.get_cuda_device_count() + else: + place = fluid.CPUPlace() + dev_count = int(os.environ.get('CPU_NUM', multiprocessing.cpu_count())) + + print("device count %d" % dev_count) + print("theoretical memory usage: ") + print( + fluid.contrib.memory_usage( + program=train_program, batch_size=args.batch_size)) + + exe = fluid.Executor(place) + exe.run(train_startup) + exe.run(test_startup) + + train_exe = fluid.ParallelExecutor( + use_cuda=args.use_cuda, loss_name=loss.name, main_program=train_program) + + test_exe = fluid.ParallelExecutor( + use_cuda=args.use_cuda, + main_program=test_program, + share_vars_from=train_exe) + + if args.word_emb_init is not None: + print("start loading word embedding init ...") + if six.PY2: + word_emb = np.array(pickle.load(open(args.word_emb_init, + 'rb'))).astype('float32') + else: + word_emb = np.array( + pickle.load( + open(args.word_emb_init, 'rb'), encoding="bytes")).astype( + 'float32') + dam.set_word_embedding(word_emb, place) + print("finish init word embedding ...") + + print("start loading data ...") + with open(args.data_path, 'rb') as f: + if six.PY2: + train_data, val_data, test_data = pickle.load(f) + else: + train_data, val_data, test_data = pickle.load(f, encoding="bytes") + print("finish loading data ...") + + val_batches = reader.build_batches(val_data, data_conf) + + batch_num = len(train_data[six.b('y')]) // args.batch_size + val_batch_num = len(val_batches["response"]) + + print_step = max(1, batch_num // (dev_count * 100)) + save_step = max(1, batch_num // (dev_count * 10)) + + print("begin model training ...") + print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))) + + def train_with_feed(step): + """ + Train on one epoch data by feeding + """ + ave_cost = 0.0 + for it in six.moves.xrange(batch_num // dev_count): + feed_list = [] + for dev in six.moves.xrange(dev_count): + index = it * dev_count + dev + batch_data = reader.make_one_batch_input(train_batches, index) + feed_dict = dict(zip(dam.get_feed_names(), batch_data)) + feed_list.append(feed_dict) + + cost = train_exe.run(feed=feed_list, fetch_list=[loss.name]) + + ave_cost += np.array(cost[0]).mean() + step = step + 1 + if step % print_step == 0: + print("processed: [" + str(step * dev_count * 1.0 / batch_num) + + "] ave loss: [" + str(ave_cost / print_step) + "]") + ave_cost = 0.0 + + if (args.save_path is not None) and (step % save_step == 0): + save_path = os.path.join(args.save_path, "step_" + str(step)) + print("Save model at step %d ... " % step) + print( + time.strftime('%Y-%m-%d %H:%M:%S', + time.localtime(time.time()))) + fluid.io.save_persistables(exe, save_path, train_program) + + score_path = os.path.join(args.save_path, 'score.' + str(step)) + test_with_feed(test_exe, test_program, + dam.get_feed_names(), [logits.name], score_path, + val_batches, val_batch_num, dev_count) + + result_file_path = os.path.join(args.save_path, + 'result.' + str(step)) + evaluate(score_path, result_file_path) + return step, np.array(cost[0]).mean() + + def train_with_pyreader(step): + """ + Train on one epoch with pyreader + """ + def data_provider(): + """ + Data reader + """ + for index in six.moves.xrange(batch_num): + yield reader.make_one_batch_input(train_batches, index) + + train_pyreader.decorate_tensor_provider(data_provider) + + ave_cost = 0.0 + train_pyreader.start() + while True: + try: + cost = train_exe.run(fetch_list=[loss.name]) + + ave_cost += np.array(cost[0]).mean() + step = step + 1 + if step % print_step == 0: + print("processed: [" + str(step * dev_count * 1.0 / + batch_num) + "] ave loss: [" + + str(ave_cost / print_step) + "]") + ave_cost = 0.0 + + if (args.save_path is not None) and (step % save_step == 0): + save_path = os.path.join(args.save_path, + "step_" + str(step)) + print("Save model at step %d ... " % step) + print( + time.strftime('%Y-%m-%d %H:%M:%S', + time.localtime(time.time()))) + fluid.io.save_persistables(exe, save_path, train_program) + + score_path = os.path.join(args.save_path, + 'score.' + str(step)) + test_with_pyreader(test_exe, test_program, test_pyreader, + [logits.name], score_path, val_batches, + val_batch_num, dev_count) + + result_file_path = os.path.join(args.save_path, + 'result.' + str(step)) + evaluate(score_path, result_file_path) + + except fluid.core.EOFException: + train_pyreader.reset() + break + return step, np.array(cost[0]).mean() + + # train over different epoches + global_step, train_time = 0, 0.0 + for epoch in six.moves.xrange(args.num_scan_data): + shuffle_train = reader.unison_shuffle( + train_data, seed=110 if ("CE_MODE_X" in os.environ) else None) + train_batches = reader.build_batches(shuffle_train, data_conf) + + begin_time = time.time() + if args.use_pyreader: + global_step, last_cost = train_with_pyreader(global_step) + else: + global_step, last_cost = train_with_feed(global_step) + + pass_time_cost = time.time() - begin_time + train_time += pass_time_cost + print("Pass {0}, pass_time_cost {1}" + .format(epoch, "%2.2f sec" % pass_time_cost)) + # For internal continuous evaluation + if "CE_MODE_X" in os.environ: + print("kpis train_cost %f" % last_cost) + print("kpis train_duration %f" % train_time) + + +def test(args): + """ + Test + """ + if not os.path.exists(args.save_path): + mkdir(args.save_path) + if not os.path.exists(args.model_path): + raise ValueError("Invalid model init path %s" % args.model_path) + # data data_config + data_conf = { + "batch_size": args.batch_size, + "max_turn_num": args.max_turn_num, + "max_turn_len": args.max_turn_len, + "_EOS_": args._EOS_, + } + + dam = Net(args.max_turn_num, args.max_turn_len, args.vocab_size, + args.emb_size, args.stack_num, args.channel1_num, + args.channel2_num) + dam.create_data_layers() + loss, logits = dam.create_network() + + loss.persistable = True + logits.persistable = True + + # gradient clipping + fluid.clip.set_gradient_clip(clip=fluid.clip.GradientClipByValue( + max=1.0, min=-1.0)) + + test_program = fluid.default_main_program().clone(for_test=True) + optimizer = fluid.optimizer.Adam( + learning_rate=fluid.layers.exponential_decay( + learning_rate=args.learning_rate, + decay_steps=400, + decay_rate=0.9, + staircase=True)) + optimizer.minimize(loss) + + print("begin memory optimization ...") + print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))) + fluid.memory_optimize(fluid.default_main_program()) + print("end memory optimization ...") + print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))) + + if args.use_cuda: + place = fluid.CUDAPlace(0) + dev_count = fluid.core.get_cuda_device_count() + else: + place = fluid.CPUPlace() + #dev_count = multiprocessing.cpu_count() + dev_count = 1 + + exe = fluid.Executor(place) + exe.run(fluid.default_startup_program()) + + fluid.io.load_persistables(exe, args.model_path) + + test_exe = fluid.ParallelExecutor( + use_cuda=args.use_cuda, main_program=test_program) + + print("start loading data ...") + with open(args.data_path, 'rb') as f: + if six.PY2: + train_data, val_data, test_data = pickle.load(f) + else: + train_data, val_data, test_data = pickle.load(f, encoding="bytes") + print("finish loading data ...") + + test_batches = reader.build_batches(test_data, data_conf) + + test_batch_num = len(test_batches["response"]) + + print("test batch num: %d" % test_batch_num) + + print("begin inference ...") + print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))) + + score_path = os.path.join(args.save_path, 'score.txt') + score_file = open(score_path, 'w') + + for it in six.moves.xrange(test_batch_num // dev_count): + feed_list = [] + for dev in six.moves.xrange(dev_count): + index = it * dev_count + dev + batch_data = reader.make_one_batch_input(test_batches, index) + feed_dict = dict(zip(dam.get_feed_names(), batch_data)) + feed_list.append(feed_dict) + + predicts = test_exe.run(feed=feed_list, fetch_list=[logits.name]) + + scores = np.array(predicts[0]) + print("step = %d" % it) + + for dev in six.moves.xrange(dev_count): + index = it * dev_count + dev + for i in six.moves.xrange(args.batch_size): + score_file.write( + str(scores[args.batch_size * dev + i][0]) + '\t' + str( + test_batches["label"][index][i]) + '\n') + + score_file.close() + + #write evaluation result + if args.ext_eval: + result = eva.evaluate_douban(score_path) + else: + result = eva.evaluate_ubuntu(score_path) + result_file_path = os.path.join(args.save_path, 'result.txt') + with open(result_file_path, 'w') as out_file: + for metric in result: + out_file.write(metric + '\t' + str(result[metric]) + '\n') + print('finish test') + print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))) + + +if __name__ == '__main__': + args = config.parse_args() + config.print_arguments(args) + if args.do_train: + train(args) + + if args.do_test: + test(args) diff --git a/PaddleNLP/dialogue_model_toolkit/deep_attention_matching/reader.py b/PaddleNLP/dialogue_model_toolkit/deep_attention_matching/reader.py new file mode 100755 index 00000000..fd46fc32 --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/deep_attention_matching/reader.py @@ -0,0 +1,267 @@ +""" +Reader for deep attention matching network +""" + +import six +import numpy as np + +try: + import cPickle as pickle #python 2 +except ImportError as e: + import pickle #python 3 + + +def unison_shuffle(data, seed=None): + """ + Shuffle data + """ + if seed is not None: + np.random.seed(seed) + + y = np.array(data[six.b('y')]) + c = np.array(data[six.b('c')]) + r = np.array(data[six.b('r')]) + + assert len(y) == len(c) == len(r) + p = np.random.permutation(len(y)) + print(p) + shuffle_data = {six.b('y'): y[p], six.b('c'): c[p], six.b('r'): r[p]} + return shuffle_data + + +def split_c(c, split_id): + """ + Split + c is a list, example context + split_id is a integer, conf[_EOS_] + return nested list + """ + turns = [[]] + for _id in c: + if _id != split_id: + turns[-1].append(_id) + else: + turns.append([]) + if turns[-1] == [] and len(turns) > 1: + turns.pop() + return turns + + +def normalize_length(_list, length, cut_type='tail'): + """_list is a list or nested list, example turns/r/single turn c + cut_type is head or tail, if _list len > length is used + return a list len=length and min(read_length, length) + """ + real_length = len(_list) + if real_length == 0: + return [0] * length, 0 + + if real_length <= length: + if not isinstance(_list[0], list): + _list.extend([0] * (length - real_length)) + else: + _list.extend([[]] * (length - real_length)) + return _list, real_length + + if cut_type == 'head': + return _list[:length], length + if cut_type == 'tail': + return _list[-length:], length + + +def produce_one_sample(data, + index, + split_id, + max_turn_num, + max_turn_len, + turn_cut_type='tail', + term_cut_type='tail'): + """max_turn_num=10 + max_turn_len=50 + return y, nor_turns_nor_c, nor_r, turn_len, term_len, r_len + """ + c = data[six.b('c')][index] + r = data[six.b('r')][index][:] + y = data[six.b('y')][index] + + turns = split_c(c, split_id) + #normalize turns_c length, nor_turns length is max_turn_num + nor_turns, turn_len = normalize_length(turns, max_turn_num, turn_cut_type) + + nor_turns_nor_c = [] + term_len = [] + #nor_turn_nor_c length is max_turn_num, element is a list length is max_turn_len + for c in nor_turns: + #nor_c length is max_turn_len + nor_c, nor_c_len = normalize_length(c, max_turn_len, term_cut_type) + nor_turns_nor_c.append(nor_c) + term_len.append(nor_c_len) + + nor_r, r_len = normalize_length(r, max_turn_len, term_cut_type) + + return y, nor_turns_nor_c, nor_r, turn_len, term_len, r_len + + +def build_one_batch(data, + batch_index, + conf, + turn_cut_type='tail', + term_cut_type='tail'): + """ + Build one batch + """ + _turns = [] + _tt_turns_len = [] + _every_turn_len = [] + + _response = [] + _response_len = [] + + _label = [] + + for i in six.moves.xrange(conf['batch_size']): + index = batch_index * conf['batch_size'] + i + y, nor_turns_nor_c, nor_r, turn_len, term_len, r_len = produce_one_sample( + data, index, conf['_EOS_'], conf['max_turn_num'], + conf['max_turn_len'], turn_cut_type, term_cut_type) + + _label.append(y) + _turns.append(nor_turns_nor_c) + _response.append(nor_r) + _every_turn_len.append(term_len) + _tt_turns_len.append(turn_len) + _response_len.append(r_len) + + return _turns, _tt_turns_len, _every_turn_len, _response, _response_len, _label + + +def build_one_batch_dict(data, + batch_index, + conf, + turn_cut_type='tail', + term_cut_type='tail'): + """ + Build one batch dict + """ + _turns, _tt_turns_len, _every_turn_len, _response, _response_len, _label = build_one_batch( + data, batch_index, conf, turn_cut_type, term_cut_type) + ans = { + 'turns': _turns, + 'tt_turns_len': _tt_turns_len, + 'every_turn_len': _every_turn_len, + 'response': _response, + 'response_len': _response_len, + 'label': _label + } + return ans + + +def build_batches(data, conf, turn_cut_type='tail', term_cut_type='tail'): + """ + Build batches + """ + _turns_batches = [] + _tt_turns_len_batches = [] + _every_turn_len_batches = [] + + _response_batches = [] + _response_len_batches = [] + + _label_batches = [] + + batch_len = len(data[six.b('y')]) // conf['batch_size'] + for batch_index in six.moves.range(batch_len): + _turns, _tt_turns_len, _every_turn_len, _response, _response_len, _label = build_one_batch( + data, batch_index, conf, turn_cut_type='tail', term_cut_type='tail') + + _turns_batches.append(_turns) + _tt_turns_len_batches.append(_tt_turns_len) + _every_turn_len_batches.append(_every_turn_len) + + _response_batches.append(_response) + _response_len_batches.append(_response_len) + + _label_batches.append(_label) + + ans = { + "turns": _turns_batches, + "tt_turns_len": _tt_turns_len_batches, + "every_turn_len": _every_turn_len_batches, + "response": _response_batches, + "response_len": _response_len_batches, + "label": _label_batches + } + + return ans + + +def make_one_batch_input(data_batches, index): + """Split turns and return feeding data. + + Args: + data_batches: All data batches + index: The index for current batch + + Return: + feeding dictionary + """ + + turns = np.array(data_batches["turns"][index]) + tt_turns_len = np.array(data_batches["tt_turns_len"][index]) + every_turn_len = np.array(data_batches["every_turn_len"][index]) + response = np.array(data_batches["response"][index]) + response_len = np.array(data_batches["response_len"][index]) + + batch_size = turns.shape[0] + max_turn_num = turns.shape[1] + max_turn_len = turns.shape[2] + + turns_list = [turns[:, i, :] for i in six.moves.xrange(max_turn_num)] + every_turn_len_list = [ + every_turn_len[:, i] for i in six.moves.xrange(max_turn_num) + ] + + feed_list = [] + for i, turn in enumerate(turns_list): + turn = np.expand_dims(turn, axis=-1) + feed_list.append(turn) + + for i, turn_len in enumerate(every_turn_len_list): + turn_mask = np.ones((batch_size, max_turn_len, 1)).astype("float32") + for row in six.moves.xrange(batch_size): + turn_mask[row, turn_len[row]:, 0] = 0 + feed_list.append(turn_mask) + + response = np.expand_dims(response, axis=-1) + feed_list.append(response) + + response_mask = np.ones((batch_size, max_turn_len, 1)).astype("float32") + for row in six.moves.xrange(batch_size): + response_mask[row, response_len[row]:, 0] = 0 + feed_list.append(response_mask) + + label = np.array([data_batches["label"][index]]).reshape( + [-1, 1]).astype("float32") + feed_list.append(label) + + return feed_list + + +if __name__ == '__main__': + conf = { + "batch_size": 256, + "max_turn_num": 10, + "max_turn_len": 50, + "_EOS_": 28270, + } + with open('../ubuntu/data/data_small.pkl', 'rb') as f: + if six.PY2: + train, val, test = pickle.load(f) + else: + train, val, test = pickle.load(f, encoding="bytes") + print('load data success') + + train_batches = build_batches(train, conf) + val_batches = build_batches(val, conf) + test_batches = build_batches(test, conf) + print('build batches success') diff --git a/PaddleNLP/dialogue_model_toolkit/deep_attention_matching/run.sh b/PaddleNLP/dialogue_model_toolkit/deep_attention_matching/run.sh new file mode 100755 index 00000000..47395cb7 --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/deep_attention_matching/run.sh @@ -0,0 +1,49 @@ +export CUDA_VISIBLE_DEVICES=3 +export FLAGS_eager_delete_tensor_gb=0.0 + +#train on ubuntu +python -u main.py \ + --do_train True \ + --use_cuda \ + --data_path ./data/ubuntu/data_small.pkl \ + --save_path ./model_files/ubuntu \ + --use_pyreader \ + --vocab_size 434512 \ + --_EOS_ 28270 \ + --batch_size 32 + +#test on ubuntu +python -u main.py \ + --do_test True \ + --use_cuda \ + --data_path ./data/ubuntu/data_small.pkl \ + --save_path ./model_files/ubuntu/step_31 \ + --model_path ./model_files/ubuntu/step_31 \ + --vocab_size 434512 \ + --_EOS_ 28270 \ + --batch_size 100 + +#train on douban +python -u main.py \ + --do_train True \ + --use_cuda \ + --data_path ./data/douban/data_small.pkl \ + --save_path ./model_files/douban \ + --use_pyreader \ + --vocab_size 172130 \ + --_EOS_ 1 \ + --channel1_num 16 \ + --batch_size 32 + +#test on douban +python -u main.py \ + --do_test True \ + --use_cuda \ + --ext_eval \ + --data_path ./data/douban/data_small.pkl \ + --save_path ./model_files/douban/step_31 \ + --model_path ./model_files/douban/step_31 \ + --vocab_size 172130 \ + --_EOS_ 1 \ + --channel1_num 16 \ + --batch_size 32 diff --git a/PaddleNLP/dialogue_model_toolkit/deep_attention_matching/run_CPU.sh b/PaddleNLP/dialogue_model_toolkit/deep_attention_matching/run_CPU.sh new file mode 100755 index 00000000..091eda1f --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/deep_attention_matching/run_CPU.sh @@ -0,0 +1,49 @@ +export CPU_NUM=1 +export FLAGS_eager_delete_tensor_gb=0.0 + +#train on ubuntu +python -u main.py \ + --do_train True \ + --data_path ./data/ubuntu/data_small.pkl \ + --save_path ./model_files_cpu/ubuntu \ + --use_pyreader \ + --stack_num 2 \ + --vocab_size 434512 \ + --_EOS_ 28270 \ + --batch_size 32 + +#test on ubuntu +python -u main.py \ + --do_test True \ + --data_path ./data/ubuntu/data_small.pkl \ + --save_path ./model_files_cpu/ubuntu/step_31 \ + --model_path ./model_files_cpu/ubuntu/step_31 \ + --stack_num 2 \ + --vocab_size 434512 \ + --_EOS_ 28270 \ + --batch_size 40 + +#train on douban +python -u main.py \ + --do_train True \ + --data_path ./data/douban/data_small.pkl \ + --save_path ./model_files_cpu/douban \ + --use_pyreader \ + --stack_num 2 \ + --vocab_size 172130 \ + --_EOS_ 1 \ + --channel1_num 16 \ + --batch_size 32 + +#test on douban +python -u main.py \ + --do_test True \ + --ext_eval \ + --data_path ./data/douban/data_small.pkl \ + --save_path ./model_files_cpu/douban/step_31 \ + --model_path ./model_files_cpu/douban/step_31 \ + --stack_num 2 \ + --vocab_size 172130 \ + --_EOS_ 1 \ + --channel1_num 16 \ + --batch_size 40 diff --git a/PaddleNLP/dialogue_model_toolkit/deep_attention_matching/util.py b/PaddleNLP/dialogue_model_toolkit/deep_attention_matching/util.py new file mode 100755 index 00000000..709d5899 --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/deep_attention_matching/util.py @@ -0,0 +1,41 @@ +"""" +Utils +""" + +import six +import os + + +def print_arguments(args): + """ + Print arguments + """ + print('----------- Configuration Arguments -----------') + for arg, value in sorted(six.iteritems(vars(args))): + print('%s: %s' % (arg, value)) + print('------------------------------------------------') + + +def mkdir(path): + """ + Mkdir + """ + if not os.path.isdir(path): + mkdir(os.path.split(path)[0]) + else: + return + os.mkdir(path) + + +def pos_encoding_init(): + """ + Pos encoding init + """ + pass + + +def scaled_dot_product_attention(): + """ + Scaleed dot product attention + """ + pass diff --git a/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/README.md b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/README.md new file mode 100644 index 00000000..96d44f92 --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/README.md @@ -0,0 +1,276 @@ +# 对话通用理解模块DGU + - [一、简介](#一、简介) + - [二、快速开始](#二、快速开始) + - [三、进阶使用](#三、进阶使用) + - [四、其他](#四、其他) + +## 一、简介 + +### 任务说明 + +    对话相关的任务中,Dialogue System常常需要根据场景的变化去解决多种多样的任务。任务的多样性(意图识别、槽位解析、DA识别、DST等等),以及领域训练数据的稀少,给Dialogue System的研究和应用带来了巨大的困难和挑战,要使得dialogue system得到更好的发展,需要开发一个通用的对话理解模型。为此,我们给出了基于BERT的对话通用理解模块(DGU: DialogueGeneralUnderstanding),通过实验表明,使用base-model(BERT)并结合常见的学习范式,就可以在几乎全部对话理解任务上取得比肩甚至超越各个领域业内最好的模型的效果,展现了学习一个通用对话理解模型的巨大潜力。 + +### 效果说明 + +    a、效果上,我们基于对话相关的业内公开数据集进行评测,效果如下表所示: + +| task_name | udc | udc | udc | atis_slot | dstc2 | atis_intent | swda | mrda | +| :------ | :------ | :------ | :------ | :------| :------ | :------ | :------ | :------ | +| 对话任务 | 匹配 | 匹配 | 匹配 | 槽位解析 | DST | 意图识别 | DA | DA | +| 任务类型 | 分类 | 分类 | 分类 | 序列标注 | 多标签分类 | 分类 | 分类 | 分类 | +| 任务名称 | udc | udc | udc| atis_slot | dstc2 | atis_intent | swda | mrda | +| 评估指标 | R1@10 | R2@10 | R5@10 | F1 | JOINT ACC | ACC | ACC | ACC | +| SOTA | 76.70% | 87.40% | 96.90% | 96.89% | 74.50% | 98.32% | 81.30% | 91.70% | +| DGU | 82.02% | 90.43% | 97.75% | 97.10% | 89.57% | 97.65% | 80.19% | 91.43% | + +    b、数据集说明: + +``` +UDC: Ubuntu Corpus V1; +ATIS: 微软提供的公开数据集DSTC2,Airline Travel Information System; +DSTC2: 对话状态跟踪挑战(Dialog State Tracking Challenge)2; +MRDA: Meeting Recorder Dialogue Act; +SWDA:Switchboard Dialogue Act Corpus; +``` + +## 二、快速开始 + +### 1、安装说明 + +####   a、paddle安装 + +    本项目依赖于Paddle Fluid 1.3.1,请参考[安装指南](http://www.paddlepaddle.org/#quick-start)进行安装 + +####   b、安装代码 + +    克隆数据集代码库到本地 + +``` +git clone https://github.com/PaddlePaddle/models.git +cd models/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding +``` + +####   c、环境依赖 + +    python版本依赖python 2.7 + +### 2、开始第一次模型调用 + +####   a、数据准备(数据、模型下载,预处理) + +    i、数据下载 + +``` +sh download_data.sh +``` + +    ii、(非必需)下载的数据集中已提供了训练集,测试集和验证集,用户如果需要重新生成某数据集的训练数据,可执行: + +``` +cd dialogue_general_understanding/scripts && sh run_build_data.sh task_name +parameters: +task_name: udc, swda, mrda, atis, dstc2 +``` + +####   b、模型下载 + +    该项目中,我们基于BERT开发了相关的对话模型,对话模型训练时需要依赖BERT的模型做fine-tuning, 且提供了目前公开数据集上训练好的多个对话模型。 + +    i、BERT pretrain模型下载: + +``` +sh download_pretrain_model.sh +``` + +    ii、dialogue_general_understanding模块内对话相关模型下载: + +      方式一:基于PaddleHub命令行工具(PaddleHub安装方式 https://github.com/PaddlePaddle/PaddleHub) + +``` +hub download dmtk_models --output_path ./ +tar -xvf dmtk_models_1.0.0.tar.gz +``` + +      方式二:直接下载 + +``` +sh download_models.sh +``` + +####   c、CPU、GPU训练设置 + +    CPU训练和预测: + +``` +请将run_train.sh和run_predict.sh内如下两行参数设置为: +1、export CUDA_VISIBLE_DEVICES= +2、--use_cuda false +``` + +    GPU训练和预测: + +``` +请修改run_train.sh和run_predict.sh内如下两行参数设置为: +1、export CUDA_VISIBLE_DEVICES=4 (用户可自行指定空闲的卡) +2、--use_cuda true +``` + +####   d、训练 + +    方式一(推荐): + +``` +sh run_train.sh task_name +parameters: +task_name: udc, swda, mrda, atis_intent, atis_slot, dstc2 +``` + +    方式二: + +``` +python -u train.py --task_name mrda \ # name model to use. [udc|swda|mrda|atis_intent|atis_slot|dstc2] + + --use_cuda true \ # If set, use GPU for training. + --do_train true \ # Whether to perform training. + --do_val true \ # Whether to perform evaluation on dev data set. + --do_test true \ # Whether to perform evaluation on test data set. + --epoch 10 \ # Number of epoches for fine-tuning. + --batch_size 4096 \ # Total examples' number in batch for training. see also --in_tokens. + --data_dir ./data/mrda \ # Path to training data. + --bert_config_path ./uncased_L-12_H-768_A-12/bert_config.json \ # Path to the json file for bert model config. + --vocab_path ./uncased_L-12_H-768_A-12/vocab.txt \ # Vocabulary path. + --init_pretraining_params ./uncased_L-12_H-768_A-12/params \ # Init pre-training params which preforms fine-tuning from + --checkpoints ./output/mrda \ # Path to save checkpoints. + --save_steps 200 \ # The steps interval to save checkpoints. + --learning_rate 2e-5 \ # Learning rate used to train with warmup. + --weight_decay 0.01 \ # Weight decay rate for L2 regularizer. + --max_seq_len 128 \ # Number of words of the longest seqence. + --skip_steps 100 \ # The steps interval to print loss. + --validation_steps 500 \ # The steps interval to evaluate model performance. + --num_iteration_per_drop_scope 10 \ # The iteration intervals to clean up temporary variables. + --use_fp16 false # If set, use fp16 for training. +``` + +####   e、预测 (推荐f的方式来进行预测评估) + +    方式一(推荐): + +``` +sh run_predict.sh task_name +parameters: +task_name: udc, swda, mrda, atis_intent, atis_slot, dstc2 +``` + +    方式二: + +``` +python -u predict.py --task_name mrda \ # name model to use. [udc|swda|mrda|atis_intent|atis_slot|dstc2] +--use_cuda true \ # If set, use GPU for training. +--batch_size 4096 \ # Total examples' number in batch for training. see also --in_tokens. +--init_checkpoint ./output/mrda/step_6500 \ # Init model +--data_dir ./data/mrda \ # Path to training data. +--vocab_path ./uncased_L-12_H-768_A-12/vocab.txt \ # Vocabulary path. +--max_seq_len 128 \ # Number of words of the longest seqence. +--bert_config_path ./uncased_L-12_H-768_A-12/bert_config.json # Path to the json file for bert model config. +``` + +####   f、预测+评估(推荐) + +    dialogue_general_understanding模块内提供已训练好的对话模型,可通过sh download_models.sh下载,用户如果不训练模型的时候,可使用提供模型进行预测评估: + +``` +sh run_eval_metrics.sh task_name +parameters: +task_name: udc, swda, mrda, atis_intent, atis_slot, dstc2 +``` + +## 三、进阶使用 + +### 1、任务定义与建模 + +    dialogue_general_understanding模块,针对数据集开发了相关的模型训练过程,支持分类,多标签分类,序列标注等任务,用户可针对自己的数据集,进行相关的模型定制; + +### 2、模型原理介绍 + +    本项目针对对话理解相关的问题,底层基于BERT,上层定义范式(分类,多标签分类,序列标注), 开源了一系列公开数据集相关的模型,供用户可配置地使用: + +### 3、数据格式说明 + +    训练、预测、评估使用的数据可以由用户根据实际的对话应用场景,自己组织数据。输入网络的数据格式统一为,示例如下: + +``` +[CLS] token11 token12 token13 [INNER_SEP] token11 token12 token13 [SEP] token21 token22 token23 [SEP] token31 token32 token33 [SEP] +``` + +    输入数据以[CLS]开始,[SEP]分割内容为对话内容相关三部分,如上文,当前句,下文等,如[SEP]分割的每部分内部由多轮组成的话,使用[INNER_SEP]进行分割;第二部分和第三部分部分皆可缺省; + +    目前dialogue_general_understanding模块内已将数据准备部分集成到代码内,用户可根据上面输入数据格式,组装自己的数据; +### 4、代码结构说明 + +``` +. +├── run_train.sh # 训练执行脚本 +├── run_predict.sh # 预测执行脚本 +├── run_eval_metrics.sh # 评估执行脚本 +├── download_data.sh # 下载数据脚本 +├── download_models.sh # 下载对话模型脚本 +├── download_pretrain_model.sh # 下载bert pretrain模型脚本 +├── train.py # train流程 +├── predict.py # predict流程 +├── eval_metrics.py # 指标评估 +├── define_predict_pack.py # 封装预测结果 +├── finetune_args.py # 模型训练相关的配置参数 +├── batching.py # 封装yield batch数据 +├── optimization.py # 模型优化器 +├── tokenization.py # tokenizer工具 +├── reader/data_reader.py: # 数据的处理和组装过程,每个数据集都定义一个类进行处理 +├── README.md # 文档 +├── utils/* # 定义了其他常用的功能函数 +└── scripts # 数据处理脚本集合 + ├── run_build_data.sh # 数据处理运行脚本 + ├── build_atis_dataset.py # 构建atis_intent和atis_slot训练数据 + ├── build_dstc2_dataset.py # 构建dstc2训练数据 + ├── build_mrda_dataset.py # 构建mrda训练数据 + ├── build_swda_dataset.py # 构建swda训练数据 + ├── commonlib.py # 数据处理通用方法 + └── conf # 公开数据集中训练集、验证集、测试集划分 + +../../models/dialogue_model_toolkit/dialogue_general_understanding +├── bert.py # 底层bert模型 +├── define_paradigm.py # 上层网络范式 +└── create_model.py # 创建底层bert模型+上层网络范式网络结构 +``` + +### 5、如何组建自己的模型 + +    用户可以根据自己的需求,组建自定义的模型,具体方法如下所示: + +    i、自定义数据 + +      如用户目前有数据集为**task_name**, 则在**data**下定义**task_name**文件夹,将数据集存放进去;在**reader/data_reader.py**中,新增自定义的数据处理的类,如**udc**数据集对应**UDCProcessor**; 在**train.py**内设置**task_name**和**processor**的对应关系(如**processors = {'udc': reader.UDCProcessor}**),以及当前的数据集训练时是否是否使用**in_tokens**的方式计算batch大小(如:**in_tokens = {'udc': True}**) + +    ii、 自定义上层网络范式 + +      如果用户自定义模型属于分类、多分类和序列标注这3种类型其中一个,则只需要在**paddle-nlp/models/dialogue_model_toolkit/dialogue_general_understanding/define_paradigm.py** 内指明**task_name**和相应上层范式函数的对应关系即可,如用户自定义模型属于其他模型,则需要自定义上层范式函数并指明其与**task_name**之间的关系; + +    iii、自定义预测封装接口 + +      用户可在define_predict_pack.py内定义task_name和自定义封装预测接口的对应关系; + +### 6、如何训练 + +    i、按照上文所述的数据组织形式,组织自己的训练、评估、预测数据 + +    ii、运行训练脚本 + +``` +sh run_train.sh task_name +parameters: +task_name: 用户自定义名称 +``` + +## 四、其他 + +### 如何贡献代码 + +    如果你可以修复某个issue或者增加一个新功能,欢迎给我们提交PR。如果对应的PR被接受了,我们将根据贡献的质量和难度进行打分(0-5分,越高越好)。如果你累计获得了10分,可以联系我们获得面试机会或者为你写推荐信。 diff --git a/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/batching.py b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/batching.py new file mode 100644 index 00000000..2e63f6e0 --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/batching.py @@ -0,0 +1,204 @@ +# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Mask, padding and batching.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import numpy as np + + +def mask(batch_tokens, total_token_num, vocab_size, CLS=1, SEP=2, MASK=3): + """ + Add mask for batch_tokens, return out, mask_label, mask_pos; + Note: mask_pos responding the batch_tokens after padded; + """ + max_len = max([len(sent) for sent in batch_tokens]) + mask_label = [] + mask_pos = [] + prob_mask = np.random.rand(total_token_num) + # Note: the first token is [CLS], so [low=1] + replace_ids = np.random.randint(1, high=vocab_size, size=total_token_num) + pre_sent_len = 0 + prob_index = 0 + for sent_index, sent in enumerate(batch_tokens): + mask_flag = False + prob_index += pre_sent_len + for token_index, token in enumerate(sent): + prob = prob_mask[prob_index + token_index] + if prob > 0.15: + continue + elif 0.03 < prob <= 0.15: + # mask + if token != SEP and token != CLS: + mask_label.append(sent[token_index]) + sent[token_index] = MASK + mask_flag = True + mask_pos.append(sent_index * max_len + token_index) + elif 0.015 < prob <= 0.03: + # random replace + if token != SEP and token != CLS: + mask_label.append(sent[token_index]) + sent[token_index] = replace_ids[prob_index + token_index] + mask_flag = True + mask_pos.append(sent_index * max_len + token_index) + else: + # keep the original token + if token != SEP and token != CLS: + mask_label.append(sent[token_index]) + mask_pos.append(sent_index * max_len + token_index) + pre_sent_len = len(sent) + + # ensure at least mask one word in a sentence + while not mask_flag: + token_index = int(np.random.randint(1, high=len(sent) - 1, size=1)) + if sent[token_index] != SEP and sent[token_index] != CLS: + mask_label.append(sent[token_index]) + sent[token_index] = MASK + mask_flag = True + mask_pos.append(sent_index * max_len + token_index) + mask_label = np.array(mask_label).astype("int64").reshape([-1, 1]) + mask_pos = np.array(mask_pos).astype("int64").reshape([-1, 1]) + return batch_tokens, mask_label, mask_pos + + +def prepare_batch_data(insts, + max_len, + total_token_num, + voc_size=0, + pad_id=None, + cls_id=None, + sep_id=None, + mask_id=None, + return_input_mask=True, + return_max_len=True, + return_num_token=False): + """ + 1. generate Tensor of data + 2. generate Tensor of position + 3. generate self attention mask, [shape: batch_size * max_len * max_len] + """ + + batch_src_ids = [inst[0] for inst in insts] + batch_sent_ids = [inst[1] for inst in insts] + batch_pos_ids = [inst[2] for inst in insts] + labels_list = [] + # compatible with squad, whose example includes start/end positions, + # or unique id + + if isinstance(insts[0][3], list): + if max_len != -1: + labels_list = [inst[3] + [0] * (max_len - len(inst[3])) for inst in insts] + labels_list = [np.array(labels_list).astype("int64").reshape([-1, max_len])] + else: + labels_list = [inst[3] for inst in insts] + labels_list = [np.array(labels_list).astype("int64")] + else: + for i in range(3, len(insts[0]), 1): + labels = [inst[i] for inst in insts] + labels = np.array(labels).astype("int64").reshape([-1, 1]) + labels_list.append(labels) + + # First step: do mask without padding + if mask_id >= 0: + out, mask_label, mask_pos = mask( + batch_src_ids, + total_token_num, + vocab_size=voc_size, + CLS=cls_id, + SEP=sep_id, + MASK=mask_id) + else: + out = batch_src_ids + # Second step: padding + src_id, self_input_mask = pad_batch_data( + out, + max_len, + pad_idx=pad_id, + return_input_mask=True) + pos_id = pad_batch_data( + batch_pos_ids, + max_len, + pad_idx=pad_id, + return_pos=False, + return_input_mask=False) + sent_id = pad_batch_data( + batch_sent_ids, + max_len, + pad_idx=pad_id, + return_pos=False, + return_input_mask=False) + + if mask_id >= 0: + return_list = [ + src_id, pos_id, sent_id, self_input_mask, mask_label, mask_pos + ] + labels_list + else: + return_list = [src_id, pos_id, sent_id, self_input_mask] + labels_list + + return return_list if len(return_list) > 1 else return_list[0] + + +def pad_batch_data(insts, + max_len_in, + pad_idx=0, + return_pos=False, + return_input_mask=False, + return_max_len=False, + return_num_token=False): + """ + Pad the instances to the max sequence length in batch, and generate the + corresponding position data and attention bias. + """ + return_list = [] + max_len = max_len_in if max_len_in != -1 else max(len(inst) for inst in insts) + # Any token included in dict can be used to pad, since the paddings' loss + # will be masked out by weights and make no effect on parameter gradients. + + inst_data = np.array( + [inst + list([pad_idx] * (max_len - len(inst))) for inst in insts + ]) + return_list += [inst_data.astype("int64").reshape([-1, max_len, 1])] + + # position data + if return_pos: + inst_pos = np.array([ + list(range(0, len(inst))) + [pad_idx] * (max_len - len(inst)) + for inst in insts + ]) + + return_list += [inst_pos.astype("int64").reshape([-1, max_len, 1])] + + if return_input_mask: + # This is used to avoid attention on paddings. + input_mask_data = np.array([[1] * len(inst) + [0] * + (max_len - len(inst)) for inst in insts]) + input_mask_data = np.expand_dims(input_mask_data, axis=-1) + return_list += [input_mask_data.astype("float32")] + + if return_max_len: + return_list += [max_len] + + if return_num_token: + num_token = 0 + for inst in insts: + num_token += len(inst) + return_list += [num_token] + + return return_list if len(return_list) > 1 else return_list[0] + + +if __name__ == "__main__": + pass diff --git a/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/define_predict_pack.py b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/define_predict_pack.py new file mode 100644 index 00000000..ada84449 --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/define_predict_pack.py @@ -0,0 +1,85 @@ +# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""define prediction results""" + +import re +import sys +import numpy as np + +import paddle +import paddle.fluid as fluid + + +class DefinePredict(object): + """ + Packaging Prediction Results + """ + def __init__(self): + """ + init + """ + self.task_map = {'udc': 'get_matching_res', + 'swda': 'get_cls_res', + 'mrda': 'get_cls_res', + 'atis_intent': 'get_cls_res', + 'atis_slot': 'get_sequence_tagging', + 'dstc2': 'get_multi_cls_res', + 'dstc2_asr': 'get_multi_cls_res', + 'multi-woz': 'get_multi_cls_res'} + + def get_matching_res(self, probs, params=None): + """ + get matching score + """ + probs = list(probs) + return probs[1] + + def get_cls_res(self, probs, params=None): + """ + get da classify tag + """ + probs = list(probs) + max_prob = max(probs) + tag = probs.index(max_prob) + return tag + + def get_sequence_tagging(self, probs, params=None): + """ + get sequence tagging tag + """ + labels = [] + batch_labels = np.array(probs).reshape(-1, params) + labels = [" ".join([str(l) for l in list(l_l)]) for l_l in batch_labels] + return labels + + def get_multi_cls_res(self, probs, params=None): + """ + get dst classify tag + """ + labels = [] + probs = list(probs) + for i in range(len(probs)): + if probs[i] >= 0.5: + labels.append(i) + if not labels: + max_prob = max(probs) + label_str = str(probs.index(max_prob)) + else: + label_str = " ".join([str(l) for l in sorted(labels)]) + + return label_str + + + + diff --git a/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/download_data.sh b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/download_data.sh new file mode 100644 index 00000000..c7fe4a99 --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/download_data.sh @@ -0,0 +1,3 @@ +wget --no-check-certificate https://baidu-nlp.bj.bcebos.com/dmtk_data_1.0.0.tar.gz +tar -xvf dmtk_data_1.0.0.tar.gz +rm dmtk_data_1.0.0.tar.gz diff --git a/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/download_models.sh b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/download_models.sh new file mode 100644 index 00000000..fdb146d4 --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/download_models.sh @@ -0,0 +1,3 @@ +wget --no-check-certificate https://baidu-nlp.bj.bcebos.com/dmtk_models_1.0.0.tar.gz +tar -xvf dmtk_models_1.0.0.tar.gz +rm dmtk_models_1.0.0.tar.gz diff --git a/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/download_pretrain_model.sh b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/download_pretrain_model.sh new file mode 100644 index 00000000..5591926c --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/download_pretrain_model.sh @@ -0,0 +1,3 @@ +wget --no-check-certificate https://bert-models.bj.bcebos.com/uncased_L-12_H-768_A-12.tar.gz +tar -xvf uncased_L-12_H-768_A-12.tar.gz +rm uncased_L-12_H-768_A-12.tar.gz diff --git a/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/eval_metrics.py b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/eval_metrics.py new file mode 100644 index 00000000..d8967b7c --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/eval_metrics.py @@ -0,0 +1,369 @@ +# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +evaluate task metrics +""" + +import sys + + +class EvalDA(object): + """ + evaluate da testset, swda|mrda + """ + def __init__(self, task_name, pred): + """ + predict file + """ + self.pred_file = pred + if task_name == 'swda': + self.refer_file = "./data/swda/test.txt" + elif task_name == "mrda": + self.refer_file = "./data/mrda/test.txt" + + def load_data(self): + """ + load reference label and predict label + """ + pred_label = [] + refer_label = [] + with open(self.refer_file, 'r') as fr: + for line in fr: + label = line.rstrip('\n').split('\t')[1] + refer_label.append(int(label)) + idx = 0 + with open(self.pred_file, 'r') as fr: + for line in fr: + elems = line.rstrip('\n').split('\t') + if len(elems) != 2 or not elems[0].isdigit(): + continue + tag_id = int(elems[1]) + pred_label.append(tag_id) + return pred_label, refer_label + + def evaluate(self): + """ + calculate acc metrics + """ + pred_label, refer_label = self.load_data() + common_num = 0 + total_num = len(pred_label) + for i in range(total_num): + if pred_label[i] == refer_label[i]: + common_num += 1 + acc = float(common_num) / total_num + return acc + + +class EvalATISIntent(object): + """ + evaluate da testset, swda|mrda + """ + def __init__(self, pred): + """ + predict file + """ + self.pred_file = pred + self.refer_file = "./data/atis/atis_intent/test.txt" + + def load_data(self): + """ + load reference label and predict label + """ + pred_label = [] + refer_label = [] + with open(self.refer_file, 'r') as fr: + for line in fr: + label = line.rstrip('\n').split('\t')[0] + refer_label.append(int(label)) + idx = 0 + with open(self.pred_file, 'r') as fr: + for line in fr: + elems = line.rstrip('\n').split('\t') + if len(elems) != 2 or not elems[0].isdigit(): + continue + tag_id = int(elems[1]) + pred_label.append(tag_id) + return pred_label, refer_label + + def evaluate(self): + """ + calculate acc metrics + """ + pred_label, refer_label = self.load_data() + common_num = 0 + total_num = len(pred_label) + for i in range(total_num): + if pred_label[i] == refer_label[i]: + common_num += 1 + acc = float(common_num) / total_num + return acc + + +class EvalATISSlot(object): + """ + evaluate atis slot + """ + def __init__(self, pred): + """ + pred file + """ + self.pred_file = pred + self.refer_file = "./data/atis/atis_slot/test.txt" + + def load_data(self): + """ + load reference label and predict label + """ + pred_label = [] + refer_label = [] + with open(self.refer_file, 'r') as fr: + for line in fr: + labels = line.rstrip('\n').split('\t')[1].split() + labels = [int(l) for l in labels] + refer_label.append(labels) + with open(self.pred_file, 'r') as fr: + for line in fr: + if len(line.split('\t')) != 2 or not line[0].isdigit(): + continue + labels = line.rstrip('\n').split('\t')[1].split()[1:] + labels = [int(l) for l in labels] + pred_label.append(labels) + pred_label_equal = [] + refer_label_equal = [] + assert len(refer_label) == len(pred_label) + for i in range(len(refer_label)): + num = len(refer_label[i]) + refer_label_equal.extend(refer_label[i]) + pred_label[i] = pred_label[i][: num] + pred_label_equal.extend(pred_label[i]) + + return pred_label_equal, refer_label_equal + + def evaluate(self): + """ + evaluate f1_micro score + """ + pred_label, refer_label = self.load_data() + tp = dict() + fn = dict() + fp = dict() + for i in range(len(refer_label)): + if refer_label[i] == pred_label[i]: + if refer_label[i] not in tp: + tp[refer_label[i]] = 0 + tp[refer_label[i]] += 1 + else: + if pred_label[i] not in fp: + fp[pred_label[i]] = 0 + fp[pred_label[i]] += 1 + if refer_label[i] not in fn: + fn[refer_label[i]] = 0 + fn[refer_label[i]] += 1 + + results = ["label precision recall"] + for i in range(0, 130): + if i not in tp: + results.append(" %s: 0.0 0.0" % i) + continue + if i in fp: + precision = float(tp[i]) / (tp[i] + fp[i]) + else: + precision = 1.0 + if i in fn: + recall = float(tp[i]) / (tp[i] + fn[i]) + else: + recall = 1.0 + results.append(" %s: %.4f %.4f" % (i, precision, recall)) + tp_total = sum(tp.values()) + fn_total = sum(fn.values()) + fp_total = sum(fp.values()) + p_total = float(tp_total) / (tp_total + fp_total) + r_total = float(tp_total) / (tp_total + fn_total) + f_micro = 2 * p_total * r_total / (p_total + r_total) + results.append("f1_micro: %.4f" % (f_micro)) + return "\n".join(results) + + +class EvalUDC(object): + """ + evaluate udc + """ + def __init__(self, pred): + """ + predict file + """ + self.pred_file = pred + self.refer_file = "./data/udc/test.txt" + + def load_data(self): + """ + load reference label and predict label + """ + data = [] + refer_label = [] + with open(self.refer_file, 'r') as fr: + for line in fr: + label = line.rstrip('\n').split('\t')[0] + refer_label.append(label) + idx = 0 + with open(self.pred_file, 'r') as fr: + for line in fr: + elems = line.rstrip('\n').split('\t') + if len(elems) != 2 or not elems[0].isdigit(): + continue + match_prob = elems[1] + data.append((float(match_prob), int(refer_label[idx]))) + idx += 1 + return data + + def get_p_at_n_in_m(self, data, n, m, ind): + """ + calculate precision in recall n + """ + pos_score = data[ind][0] + curr = data[ind: ind + m] + curr = sorted(curr, key = lambda x: x[0], reverse = True) + + if curr[n - 1][0] <= pos_score: + return 1 + return 0 + + def evaluate(self): + """ + calculate udc data + """ + data = self.load_data() + assert len(data) % 10 == 0 + + p_at_1_in_2 = 0.0 + p_at_1_in_10 = 0.0 + p_at_2_in_10 = 0.0 + p_at_5_in_10 = 0.0 + + length = len(data)/10 + + for i in range(0, length): + ind = i * 10 + assert data[ind][1] == 1 + + p_at_1_in_2 += self.get_p_at_n_in_m(data, 1, 2, ind) + p_at_1_in_10 += self.get_p_at_n_in_m(data, 1, 10, ind) + p_at_2_in_10 += self.get_p_at_n_in_m(data, 2, 10, ind) + p_at_5_in_10 += self.get_p_at_n_in_m(data, 5, 10, ind) + + metrics_out = [p_at_1_in_2 / length, p_at_1_in_10 / length, \ + p_at_2_in_10 / length, p_at_5_in_10 / length] + return metrics_out + + +class EvalDSTC2(object): + """ + evaluate dst testset, dstc2 + """ + def __init__(self, task_name, pred): + """ + predict file + """ + self.task_name = task_name + self.pred_file = pred + self.refer_file = "./data/dstc2/%s/test.txt" % self.task_name + + def load_data(self): + """ + load reference label and predict label + """ + pred_label = [] + refer_label = [] + with open(self.refer_file, 'r') as fr: + for line in fr: + line = line.strip('\n') + labels = [int(l) for l in line.split('\t')[-1].split()] + labels = sorted(list(set(labels))) + refer_label.append(" ".join([str(l) for l in labels])) + all_pred = [] + with open(self.pred_file, 'r') as fr: + for line in fr: + line = line.strip('\n') + all_pred.append(line) + all_pred = all_pred[len(all_pred) - len(refer_label):] + for line in all_pred: + labels = [int(l) for l in line.split('\t')[-1].split()] + labels = sorted(list(set(labels))) + pred_label.append(" ".join([str(l) for l in labels])) + return pred_label, refer_label + + def evaluate(self): + """ + calculate joint acc && overall acc + """ + overall_all = 0.0 + correct_joint = 0 + pred_label, refer_label = self.load_data() + for i in range(len(refer_label)): + if refer_label[i] != pred_label[i]: + continue + correct_joint += 1 + joint_all = float(correct_joint) / len(refer_label) + metrics_out = [joint_all, overall_all] + return metrics_out + + +if __name__ == "__main__": + if len(sys.argv[1:]) < 2: + print("please input task_name predict_file") + + task_name = sys.argv[1] + pred_file = sys.argv[2] + + if task_name.lower() == 'udc': + eval_inst = EvalUDC(pred_file) + eval_metrics = eval_inst.evaluate() + print("MATCHING TASK: %s metrics in testset: " % task_name) + print("R1@2: %s" % eval_metrics[0]) + print("R1@10: %s" % eval_metrics[1]) + print("R2@10: %s" % eval_metrics[2]) + print("R5@10: %s" % eval_metrics[3]) + + elif task_name.lower() in ['swda', 'mrda']: + eval_inst = EvalDA(task_name.lower(), pred_file) + eval_metrics = eval_inst.evaluate() + print("DA TASK: %s metrics in testset: " % task_name) + print("ACC: %s" % eval_metrics) + + elif task_name.lower() == 'atis_intent': + eval_inst = EvalATISIntent(pred_file) + eval_metrics = eval_inst.evaluate() + print("INTENTION TASK: %s metrics in testset: " % task_name) + print("ACC: %s" % eval_metrics) + + elif task_name.lower() == 'atis_slot': + eval_inst = EvalATISSlot(pred_file) + eval_metrics = eval_inst.evaluate() + print("SLOT FILLING TASK: %s metrics in testset: " % task_name) + print(eval_metrics) + elif task_name.lower() in ['dstc2', 'dstc2_asr']: + eval_inst = EvalDSTC2(task_name.lower(), pred_file) + eval_metrics = eval_inst.evaluate() + print("DST TASK: %s metrics in testset: " % task_name) + print("JOINT ACC: %s" % eval_metrics[0]) + elif task_name.lower() == "multi-woz": + eval_inst = EvalMultiWoz(pred_file) + eval_metrics = eval_inst.evaluate() + print("DST TASK: %s metrics in testset: " % task_name) + print("JOINT ACC: %s" % eval_metrics[0]) + print("OVERALL ACC: %s" % eval_metrics[1]) + else: + print("task name not in [udc|swda|mrda|atis_intent|atis_slot|dstc2|dstc2_asr|multi-woz]") + diff --git a/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/finetune_args.py b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/finetune_args.py new file mode 100644 index 00000000..b88307ee --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/finetune_args.py @@ -0,0 +1,74 @@ +# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Finetuning on classification tasks.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import os +import time +import argparse + +from utils.args import ArgumentGroup + +parser = argparse.ArgumentParser(__doc__) +model_g = ArgumentGroup(parser, "model", "model configuration and paths.") +model_g.add_arg("bert_config_path", str, None, "Path to the json file for bert model config.") +model_g.add_arg("init_checkpoint", str, None, "Init checkpoint to resume training from.") +model_g.add_arg("init_pretraining_params", str, None, + "Init pre-training params which preforms fine-tuning from. If the " + "arg 'init_checkpoint' has been set, this argument wouldn't be valid.") +model_g.add_arg("checkpoints", str, "checkpoints", "Path to save checkpoints.") +model_g.add_arg("save_inference_model_path", str, None, "Path to save model.") + +train_g = ArgumentGroup(parser, "training", "training options.") +train_g.add_arg("epoch", int, 3, "Number of epoches for fine-tuning.") +train_g.add_arg("learning_rate", float, 5e-5, "Learning rate used to train with warmup.") +train_g.add_arg("lr_scheduler", str, "linear_warmup_decay", + "scheduler of learning rate.", choices=['linear_warmup_decay', 'noam_decay']) +train_g.add_arg("weight_decay", float, 0.01, "Weight decay rate for L2 regularizer.") +train_g.add_arg("warmup_proportion", float, 0.1, + "Proportion of training steps to perform linear learning rate warmup for.") +train_g.add_arg("save_steps", int, 10000, "The steps interval to save checkpoints.") +train_g.add_arg("validation_steps", int, 1000, "The steps interval to evaluate model performance.") +train_g.add_arg("use_fp16", bool, False, "Whether to use fp16 mixed precision training.") +train_g.add_arg("loss_scaling", float, 1.0, + "Loss scaling factor for mixed precision training, only valid when use_fp16 is enabled.") + +log_g = ArgumentGroup(parser, "logging", "logging related.") +log_g.add_arg("skip_steps", int, 10, "The steps interval to print loss.") +log_g.add_arg("verbose", bool, False, "Whether to output verbose log.") + +data_g = ArgumentGroup(parser, "data", "Data paths, vocab paths and data processing options") +data_g.add_arg("data_dir", str, None, "Path to training data.") +data_g.add_arg("vocab_path", str, None, "Vocabulary path.") +data_g.add_arg("max_seq_len", int, 512, "Number of words of the longest seqence.") +data_g.add_arg("batch_size", int, 32, "Total examples' number in batch for training. see also --in_tokens.") +data_g.add_arg("do_lower_case", bool, True, + "Whether to lower case the input text. Should be True for uncased models and False for cased models.") +data_g.add_arg("random_seed", int, 0, "Random seed.") + +run_type_g = ArgumentGroup(parser, "run_type", "running type options.") +run_type_g.add_arg("use_cuda", bool, True, "If set, use GPU for training.") +run_type_g.add_arg("use_fast_executor", bool, False, "If set, use fast parallel executor (in experiment).") +run_type_g.add_arg("num_iteration_per_drop_scope", int, 1, "Ihe iteration intervals to clean up temporary variables.") +run_type_g.add_arg("task_name", str, None, + "The name of task to perform fine-tuning, " + "should be in {'udc', 'swda', 'mrda', 'atis_slot', 'atis_intent', 'dstc2'}.") +run_type_g.add_arg("do_train", bool, True, "Whether to perform training.") +run_type_g.add_arg("do_val", bool, True, "Whether to perform evaluation on dev data set.") +run_type_g.add_arg("do_test", bool, True, "Whether to perform evaluation on test data set.") + + diff --git a/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/optimization.py b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/optimization.py new file mode 100644 index 00000000..e010bca9 --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/optimization.py @@ -0,0 +1,139 @@ +# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Optimization and learning rate scheduling.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import numpy as np +import paddle.fluid as fluid +from utils.fp16 import create_master_params_grads, master_param_to_train_param + + +def linear_warmup_decay(learning_rate, warmup_steps, num_train_steps): + """ Applies linear warmup of learning rate from 0 and decay to 0.""" + with fluid.default_main_program()._lr_schedule_guard(): + lr = fluid.layers.tensor.create_global_var( + shape=[1], + value=0.0, + dtype='float32', + persistable=True, + name="scheduled_learning_rate") + + global_step = fluid.layers.learning_rate_scheduler._decay_step_counter() + + with fluid.layers.control_flow.Switch() as switch: + with switch.case(global_step < warmup_steps): + warmup_lr = learning_rate * (global_step / warmup_steps) + fluid.layers.tensor.assign(warmup_lr, lr) + with switch.default(): + decayed_lr = fluid.layers.learning_rate_scheduler.polynomial_decay( + learning_rate=learning_rate, + decay_steps=num_train_steps, + end_learning_rate=0.0, + power=1.0, + cycle=False) + fluid.layers.tensor.assign(decayed_lr, lr) + + return lr + + +def optimization(loss, + warmup_steps, + num_train_steps, + learning_rate, + train_program, + startup_prog, + weight_decay, + scheduler='linear_warmup_decay', + use_fp16=False, + loss_scaling=1.0): + if warmup_steps > 0: + if scheduler == 'noam_decay': + scheduled_lr = fluid.layers.learning_rate_scheduler\ + .noam_decay(1/(warmup_steps *(learning_rate ** 2)), + warmup_steps) + elif scheduler == 'linear_warmup_decay': + scheduled_lr = linear_warmup_decay(learning_rate, warmup_steps, + num_train_steps) + else: + raise ValueError("Unkown learning rate scheduler, should be " + "'noam_decay' or 'linear_warmup_decay'") + optimizer = fluid.optimizer.Adam(learning_rate=scheduled_lr) + else: + optimizer = fluid.optimizer.Adam(learning_rate=learning_rate) + scheduled_lr = learning_rate + + clip_norm_thres = 1.0 + # When using mixed precision training, scale the gradient clip threshold + # by loss_scaling + if use_fp16 and loss_scaling > 1.0: + clip_norm_thres *= loss_scaling + fluid.clip.set_gradient_clip( + clip=fluid.clip.GradientClipByGlobalNorm(clip_norm=clip_norm_thres)) + + def exclude_from_weight_decay(name): + if name.find("layer_norm") > -1: + return True + bias_suffix = ["_bias", "_b", ".b_0"] + for suffix in bias_suffix: + if name.endswith(suffix): + return True + return False + + param_list = dict() + + if use_fp16: + param_grads = optimizer.backward(loss) + master_param_grads = create_master_params_grads( + param_grads, train_program, startup_prog, loss_scaling) + + for param, _ in master_param_grads: + param_list[param.name] = param * 1.0 + param_list[param.name].stop_gradient = True + + optimizer.apply_gradients(master_param_grads) + + if weight_decay > 0: + for param, grad in master_param_grads: + if exclude_from_weight_decay(param.name.rstrip(".master")): + continue + with param.block.program._optimized_guard( + [param, grad]), fluid.framework.name_scope("weight_decay"): + updated_param = param - param_list[ + param.name] * weight_decay * scheduled_lr + fluid.layers.assign(output=param, input=updated_param) + + master_param_to_train_param(master_param_grads, param_grads, + train_program) + + else: + for param in train_program.global_block().all_parameters(): + param_list[param.name] = param * 1.0 + param_list[param.name].stop_gradient = True + + _, param_grads = optimizer.minimize(loss) + + if weight_decay > 0: + for param, grad in param_grads: + if exclude_from_weight_decay(param.name): + continue + with param.block.program._optimized_guard( + [param, grad]), fluid.framework.name_scope("weight_decay"): + updated_param = param - param_list[ + param.name] * weight_decay * scheduled_lr + fluid.layers.assign(output=param, input=updated_param) + + return scheduled_lr diff --git a/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/predict.py b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/predict.py new file mode 100644 index 00000000..5faf6e4b --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/predict.py @@ -0,0 +1,164 @@ +# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Load checkpoint of running classifier to do prediction and save inference model.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import os +import sys +import time +import numpy as np +import multiprocessing + +import paddle +import paddle.fluid as fluid + +from finetune_args import parser +from utils.args import print_arguments +from utils.init import init_pretraining_params, init_checkpoint + +import define_predict_pack +import reader.data_reader as reader + +_WORK_DIR = os.path.split(os.path.realpath(__file__))[0] +sys.path.append('../../models/dialogue_model_toolkit/dialogue_general_understanding') + +from bert import BertConfig, BertModel +from create_model import create_model +import define_paradigm + + +def main(args): + """main function""" + bert_config = BertConfig(args.bert_config_path) + bert_config.print_config() + + task_name = args.task_name.lower() + paradigm_inst = define_paradigm.Paradigm(task_name) + pred_inst = define_predict_pack.DefinePredict() + pred_func = getattr(pred_inst, pred_inst.task_map[task_name]) + + processors = { + 'udc': reader.UDCProcessor, + 'swda': reader.SWDAProcessor, + 'mrda': reader.MRDAProcessor, + 'atis_slot': reader.ATISSlotProcessor, + 'atis_intent': reader.ATISIntentProcessor, + 'dstc2': reader.DSTC2Processor, + 'dstc2_asr': reader.DSTC2Processor, + } + + in_tokens = { + 'udc': True, + 'swda': True, + 'mrda': True, + 'atis_slot': False, + 'atis_intent': True, + 'dstc2': True, + 'dstc2_asr': True + } + + processor = processors[task_name](data_dir=args.data_dir, + vocab_path=args.vocab_path, + max_seq_len=args.max_seq_len, + do_lower_case=args.do_lower_case, + in_tokens=in_tokens[task_name], + task_name=task_name, + random_seed=args.random_seed) + num_labels = len(processor.get_labels()) + + predict_prog = fluid.Program() + predict_startup = fluid.Program() + with fluid.program_guard(predict_prog, predict_startup): + with fluid.unique_name.guard(): + pred_results = create_model( + args, + pyreader_name='predict_reader', + bert_config=bert_config, + num_labels=num_labels, + paradigm_inst=paradigm_inst, + is_prediction=True) + predict_pyreader = pred_results.get('pyreader', None) + probs = pred_results.get('probs', None) + feed_target_names = pred_results.get('feed_target_names', None) + + predict_prog = predict_prog.clone(for_test=True) + + if args.use_cuda: + place = fluid.CUDAPlace(0) + dev_count = fluid.core.get_cuda_device_count() + else: + place = fluid.CPUPlace() + dev_count = int(os.environ.get('CPU_NUM', multiprocessing.cpu_count())) + + place = fluid.CUDAPlace(0) if args.use_cuda == True else fluid.CPUPlace() + exe = fluid.Executor(place) + exe.run(predict_startup) + + if args.init_checkpoint: + init_pretraining_params(exe, args.init_checkpoint, predict_prog) + else: + raise ValueError("args 'init_checkpoint' should be set for prediction!") + + predict_exe = fluid.ParallelExecutor( + use_cuda=args.use_cuda, main_program=predict_prog) + + test_data_generator = processor.data_generator( + batch_size=args.batch_size, + phase='test', + epoch=1, + shuffle=False) + predict_pyreader.decorate_tensor_provider(test_data_generator) + + predict_pyreader.start() + all_results = [] + time_begin = time.time() + while True: + try: + results = predict_exe.run(fetch_list=[probs.name]) + all_results.extend(results[0]) + except fluid.core.EOFException: + predict_pyreader.reset() + break + time_end = time.time() + + np.set_printoptions(precision=4, suppress=True) + print("-------------- prediction results --------------") + print("example_id\t" + ' '.join(processor.get_labels())) + if in_tokens[task_name]: + for index, result in enumerate(all_results): + tags = pred_func(result) + print("%s\t%s" % (index, tags)) + else: + tags = pred_func(all_results, args.max_seq_len) + for index, tag in enumerate(tags): + print("%s\t%s" % (index, tag)) + + if args.save_inference_model_path: + _, ckpt_dir = os.path.split(args.init_checkpoint) + dir_name = ckpt_dir + '_inference_model' + model_path = os.path.join(args.save_inference_model_path, dir_name) + fluid.io.save_inference_model( + model_path, + feed_target_names, [probs], + exe, + main_program=predict_prog) + + +if __name__ == '__main__': + args = parser.parse_args() + print_arguments(args) + main(args) diff --git a/PaddleNLP/chinese_ner/__init__.py b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/reader/__init__.py similarity index 100% rename from PaddleNLP/chinese_ner/__init__.py rename to PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/reader/__init__.py diff --git a/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/reader/data_reader.py b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/reader/data_reader.py new file mode 100644 index 00000000..29e2d171 --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/reader/data_reader.py @@ -0,0 +1,794 @@ +# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""data reader""" +import os +import types +import csv +import numpy as np +import tokenization +from batching import prepare_batch_data + + +class DataProcessor(object): + """Base class for data converters for sequence classification data sets.""" + + def __init__(self, + data_dir, + vocab_path, + max_seq_len, + do_lower_case, + in_tokens, + task_name, + random_seed=None): + self.data_dir = data_dir + self.max_seq_len = max_seq_len + self.tokenizer = tokenization.FullTokenizer( + vocab_file=vocab_path, do_lower_case=do_lower_case) + self.vocab = self.tokenizer.vocab + self.in_tokens = in_tokens + + np.random.seed(random_seed) + + self.current_train_example = -1 + self.num_examples = {'train': -1, 'dev': -1, 'test': -1} + self.current_train_epoch = -1 + self.task_name = task_name + + def get_train_examples(self, data_dir): + """Gets a collection of `InputExample`s for the train set.""" + raise NotImplementedError() + + def get_dev_examples(self, data_dir): + """Gets a collection of `InputExample`s for the dev set.""" + raise NotImplementedError() + + def get_test_examples(self, data_dir): + """Gets a collection of `InputExample`s for prediction.""" + raise NotImplementedError() + + def get_labels(self): + """Gets the list of labels for this data set.""" + raise NotImplementedError() + + def convert_example(self, index, example, labels, max_seq_len, tokenizer): + """Converts a single `InputExample` into a single `InputFeatures`.""" + feature = convert_single_example(index, example, labels, max_seq_len, + tokenizer, self.task_name) + return feature + + def generate_instance(self, feature): + """ + generate instance with given feature + + Args: + feature: InputFeatures(object). A single set of features of data. + """ + input_pos = list(range(len(feature.input_ids))) + return [ + feature.input_ids, feature.segment_ids, input_pos, feature.label_id + ] + + def generate_batch_data(self, + batch_data, + max_len, + total_token_num, + voc_size=-1, + mask_id=-1, + return_input_mask=True, + return_max_len=False, + return_num_token=False): + """generate batch data""" + return prepare_batch_data( + batch_data, + max_len, + total_token_num, + voc_size=-1, + pad_id=self.vocab["[PAD]"], + cls_id=self.vocab["[CLS]"], + sep_id=self.vocab["[SEP]"], + mask_id=-1, + return_input_mask=True, + return_max_len=False, + return_num_token=False) + + @classmethod + def _read_tsv(cls, input_file, quotechar=None): + """Reads a tab separated value file.""" + with open(input_file, "r") as f: + reader = csv.reader(f, delimiter="\t", quotechar=quotechar) + lines = [] + for line in reader: + lines.append(line) + return lines + + def get_num_examples(self, phase): + """Get number of examples for train, dev or test.""" + if phase not in ['train', 'dev', 'test']: + raise ValueError( + "Unknown phase, which should be in ['train', 'dev', 'test'].") + return self.num_examples[phase] + + def get_train_progress(self): + """Gets progress for training phase.""" + return self.current_train_example, self.current_train_epoch + + def data_generator(self, batch_size, phase='train', epoch=1, shuffle=False): + """ + Generate data for train, dev or test. + + Args: + batch_size: int. The batch size of generated data. + phase: string. The phase for which to generate data. + epoch: int. Total epoches to generate data. + shuffle: bool. Whether to shuffle examples. + """ + if phase == 'train': + examples = self.get_train_examples(self.data_dir) + self.num_examples['train'] = len(examples) + elif phase == 'dev': + examples = self.get_dev_examples(self.data_dir) + self.num_examples['dev'] = len(examples) + elif phase == 'test': + examples = self.get_test_examples(self.data_dir) + self.num_examples['test'] = len(examples) + else: + raise ValueError( + "Unknown phase, which should be in ['train', 'dev', 'test'].") + + def instance_reader(): + """generate instance data""" + for epoch_index in range(epoch): + if shuffle: + np.random.shuffle(examples) + if phase == 'train': + self.current_train_epoch = epoch_index + for (index, example) in enumerate(examples): + if phase == 'train': + self.current_train_example = index + 1 + feature = self.convert_example( + index, example, + self.get_labels(), self.max_seq_len, self.tokenizer) + + instance = self.generate_instance(feature) + yield instance + + def batch_reader(reader, batch_size, in_tokens): + """read batch data""" + batch, total_token_num, max_len = [], 0, 0 + for instance in reader(): + token_ids, sent_ids, pos_ids, label = instance[:4] + max_len = max(max_len, len(token_ids)) + if in_tokens: + to_append = (len(batch) + 1) * max_len <= batch_size + else: + to_append = len(batch) < batch_size + if to_append: + batch.append(instance) + total_token_num += len(token_ids) + else: + yield batch, total_token_num + batch, total_token_num, max_len = [instance], len( + token_ids), len(token_ids) + + if len(batch) > 0: + yield batch, total_token_num + + def wrapper(): + """yield batch data to network""" + for batch_data, total_token_num in batch_reader( + instance_reader, batch_size, self.in_tokens): + if self.in_tokens: + max_seq = -1 + else: + max_seq = self.max_seq_len + batch_data = self.generate_batch_data( + batch_data, + max_seq, + total_token_num, + voc_size=-1, + mask_id=-1, + return_input_mask=True, + return_max_len=False, + return_num_token=False) + yield batch_data + + return wrapper + + +class InputExample(object): + """A single training/test example for simple sequence classification.""" + + def __init__(self, guid, text_a, text_b=None, text_c=None, label=None): + """Constructs a InputExample. + + Args: + guid: Unique id for the example. + text_a: string. The untokenized text of the first sequence. For single + sequence tasks, only this sequence must be specified. + text_b: (Optional) string. The untokenized text of the second sequence. + Only must be specified for sequence pair tasks. + label: (Optional) string. The label of the example. This should be + specified for train and dev examples, but not for test examples. + """ + self.guid = guid + self.text_a = text_a + self.text_b = text_b + self.text_c = text_c + self.label = label + + +def _truncate_seq_pair(tokens_a, tokens_b, max_length): + """Truncates a sequence pair in place to the maximum length.""" + + # This is a simple heuristic which will always truncate the longer sequence + # one token at a time. This makes more sense than truncating an equal percent + # of tokens from each, since if one sequence is very short then each token + # that's truncated likely contains more information than a longer sequence. + while True: + total_length = len(tokens_a) + len(tokens_b) + if total_length <= max_length: + break + if len(tokens_a) > len(tokens_b): + tokens_a.pop() + else: + tokens_b.pop() + + +class InputFeatures(object): + """A single set of features of data.""" + + def __init__(self, input_ids, input_mask, segment_ids, label_id): + self.input_ids = input_ids + self.input_mask = input_mask + self.segment_ids = segment_ids + self.label_id = label_id + + +class UDCProcessor(DataProcessor): + """Processor for the UDC data set.""" + def _create_examples(self, lines, set_type): + """Creates examples for the training and dev sets.""" + examples = [] + for (i, line) in enumerate(lines): + guid = "%s-%d" % (set_type, i) + text_a = "\t".join(line[1: -1]) + text_a = tokenization.convert_to_unicode(text_a) + text_a = text_a.split('\t') + text_b = line[-1] + text_b = tokenization.convert_to_unicode(text_b) + label = tokenization.convert_to_unicode(line[0]) + examples.append( + InputExample( + guid=guid, text_a=text_a, text_b=text_b, label=label)) + return examples + + def get_train_examples(self, data_dir): + """See base class.""" + examples = [] + lines = self._read_tsv(os.path.join(data_dir, "train.txt")) + examples = self._create_examples(lines, "train") + return examples + + def get_dev_examples(self, data_dir): + """See base class.""" + examples = [] + lines = self._read_tsv(os.path.join(data_dir, "dev.txt")) + examples = self._create_examples(lines, "dev") + return examples + + def get_test_examples(self, data_dir): + """See base class.""" + examples = [] + lines = self._read_tsv(os.path.join(data_dir, "test.txt")) + examples = self._create_examples(lines, "test") + return examples + + def get_labels(self): + """See base class.""" + return ["0", "1"] + + +class SWDAProcessor(DataProcessor): + """Processor for the SWDA data set.""" + def _create_examples(self, lines, set_type): + """Creates examples for the training and dev sets.""" + examples = create_multi_turn_examples(lines, set_type) + return examples + + def get_train_examples(self, data_dir): + """See base class.""" + examples = [] + lines = self._read_tsv(os.path.join(data_dir, "train.txt")) + examples = self._create_examples(lines, "train") + return examples + + def get_dev_examples(self, data_dir): + """See base class.""" + examples = [] + lines = self._read_tsv(os.path.join(data_dir, "dev.txt")) + examples = self._create_examples(lines, "dev") + return examples + + def get_test_examples(self, data_dir): + """See base class.""" + examples = [] + lines = self._read_tsv(os.path.join(data_dir, "test.txt")) + examples = self._create_examples(lines, "test") + return examples + + def get_labels(self): + """See base class.""" + labels = range(42) + labels = [str(label) for label in labels] + return labels + + +class MRDAProcessor(DataProcessor): + """Processor for the MRDA data set.""" + def _create_examples(self, lines, set_type): + """Creates examples for the training and dev sets.""" + examples = create_multi_turn_examples(lines, set_type) + return examples + + def get_train_examples(self, data_dir): + """See base class.""" + examples = [] + lines = self._read_tsv(os.path.join(data_dir, "train.txt")) + examples = self._create_examples(lines, "train") + return examples + + def get_dev_examples(self, data_dir): + """See base class.""" + examples = [] + lines = self._read_tsv(os.path.join(data_dir, "dev.txt")) + examples = self._create_examples(lines, "dev") + return examples + + def get_test_examples(self, data_dir): + """See base class.""" + examples = [] + lines = self._read_tsv(os.path.join(data_dir, "test.txt")) + examples = self._create_examples(lines, "test") + return examples + + def get_labels(self): + """See base class.""" + labels = range(42) + labels = [str(label) for label in labels] + return labels + + +class ATISSlotProcessor(DataProcessor): + """Processor for the ATIS Slot data set.""" + def _create_examples(self, lines, set_type): + """Creates examples for the training and dev sets.""" + examples = [] + for (i, line) in enumerate(lines): + guid = "%s-%d" % (set_type, i) + text_a = line[0] + label = line[1] + text_a = tokenization.convert_to_unicode(text_a) + label_list = label.split() + examples.append( + InputExample( + guid=guid, text_a=text_a, label=label_list)) + return examples + + def get_train_examples(self, data_dir): + """See base class.""" + examples = [] + lines = self._read_tsv(os.path.join(data_dir, "train.txt")) + examples = self._create_examples(lines, "train") + return examples + + def get_dev_examples(self, data_dir): + """See base class.""" + examples = [] + lines = self._read_tsv(os.path.join(data_dir, "dev.txt")) + examples = self._create_examples(lines, "dev") + return examples + + def get_test_examples(self, data_dir): + """See base class.""" + examples = [] + lines = self._read_tsv(os.path.join(data_dir, "test.txt")) + examples = self._create_examples(lines, "test") + return examples + + def get_labels(self): + """See base class.""" + labels = range(130) + labels = [str(label) for label in labels] + return labels + + +class ATISIntentProcessor(DataProcessor): + """Processor for the ATIS intent data set.""" + def _create_examples(self, lines, set_type): + """Creates examples for the training and dev sets.""" + examples = [] + for (i, line) in enumerate(lines): + guid = "%s-%d" % (set_type, i) + text_a = line[1] + text_a = tokenization.convert_to_unicode(text_a) + label = tokenization.convert_to_unicode(line[0]) + examples.append( + InputExample( + guid=guid, text_a=text_a, label=label)) + return examples + + def get_train_examples(self, data_dir): + """See base class.""" + examples = [] + lines = self._read_tsv(os.path.join(data_dir, "train.txt")) + examples = self._create_examples(lines, "train") + return examples + + def get_dev_examples(self, data_dir): + """See base class.""" + examples = [] + lines = self._read_tsv(os.path.join(data_dir, "dev.txt")) + examples = self._create_examples(lines, "dev") + return examples + + def get_test_examples(self, data_dir): + """See base class.""" + examples = [] + lines = self._read_tsv(os.path.join(data_dir, "test.txt")) + examples = self._create_examples(lines, "test") + return examples + + def get_labels(self): + """See base class.""" + labels = range(26) + labels = [str(label) for label in labels] + return labels + + +class DSTC2Processor(DataProcessor): + """Processor for the DSTC2 data set.""" + def _create_turns(self, conv_example): + """create multi turn dataset""" + samples = [] + max_turns = 20 + for i in range(len(conv_example)): + conv_turns = conv_example[max(i - max_turns, 0): i + 1] + conv_info = "\1".join([sample[0] for sample in conv_turns]) + samples.append((conv_info.split('\1'), conv_example[i][1])) + return samples + + def _create_examples(self, lines, set_type): + """Creates examples for multi-turn dialogue sets.""" + examples = [] + conv_id = -1 + index = 0 + conv_example = [] + for (i, line) in enumerate(lines): + conv_no = line[0] + text_a = line[1] + label_list = line[2].split() + if conv_no != conv_id and i != 0: + samples = self._create_turns(conv_example) + for sample in samples: + guid = "%s-%s" % (set_type, index) + index += 1 + history = sample[0] + dst_label = sample[1] + examples.append(InputExample(guid=guid, text_a=history, label=dst_label)) + conv_example = [] + conv_id = conv_no + if i == 0: + conv_id = conv_no + conv_example.append((text_a, label_list)) + if conv_example: + samples = self._create_turns(conv_example) + for sample in samples: + guid = "%s-%s" % (set_type, index) + index += 1 + history = sample[0] + dst_label = sample[1] + examples.append(InputExample(guid=guid, text_a=history, label=dst_label)) + return examples + + def get_train_examples(self, data_dir): + """See base class.""" + examples = [] + lines = self._read_tsv(os.path.join(data_dir, "train.txt")) + examples = self._create_examples(lines, "train") + return examples + + def get_dev_examples(self, data_dir): + """See base class.""" + examples = [] + lines = self._read_tsv(os.path.join(data_dir, "dev.txt")) + examples = self._create_examples(lines, "dev") + return examples + + def get_test_examples(self, data_dir): + """See base class.""" + examples = [] + lines = self._read_tsv(os.path.join(data_dir, "test.txt")) + examples = self._create_examples(lines, "test") + return examples + + def get_labels(self): + """See base class.""" + labels = range(217) + labels = [str(label) for label in labels] + return labels + + +class MULTIWOZProcessor(DataProcessor): + """Processor for the MULTIWOZ data set.""" + def _create_turns(self, conv_example): + """create multi turn dataset""" + samples = [] + max_turns = 2 + for i in range(len(conv_example)): + prefix_turns = conv_example[max(i - max_turns, 0): i] + conv_info = "\1".join([turn[0] for turn in prefix_turns]) + current_turns = conv_example[i][0] + samples.append((conv_info.split('\1'), current_turns.split('\1'), conv_example[i][1])) + return samples + + def _create_examples(self, lines, set_type): + """Creates examples for multi-turn dialogue sets.""" + examples = [] + conv_id = -1 + index = 0 + conv_example = [] + for (i, line) in enumerate(lines): + conv_no = line[0] + text_a = line[2] + label_list = line[1].split() + if conv_no != conv_id and i != 0: + samples = self._create_turns(conv_example) + for sample in samples: + guid = "%s-%s" % (set_type, index) + index += 1 + history = sample[0] + current = sample[1] + dst_label = sample[2] + examples.append(InputExample(guid=guid, text_a=history, text_b=current, label=dst_label)) + conv_example = [] + conv_id = conv_no + if i == 0: + conv_id = conv_no + conv_example.append((text_a, label_list)) + if conv_example: + samples = self._create_turns(conv_example) + for sample in samples: + guid = "%s-%s" % (set_type, index) + index += 1 + history = sample[0] + current = sample[1] + dst_label = sample[2] + examples.append(InputExample(guid=guid, text_a=history, text_b=current, label=dst_label)) + return examples + + def get_train_examples(self, data_dir): + """See base class.""" + examples = [] + lines = self._read_tsv(os.path.join(data_dir, "train.txt")) + examples = self._create_examples(lines, "train") + return examples + + def get_dev_examples(self, data_dir): + """See base class.""" + examples = [] + lines = self._read_tsv(os.path.join(data_dir, "dev.txt")) + examples = self._create_examples(lines, "dev") + return examples + + def get_test_examples(self, data_dir): + """See base class.""" + examples = [] + lines = self._read_tsv(os.path.join(data_dir, "test.txt")) + examples = self._create_examples(lines, "test") + return examples + + def get_labels(self): + """See base class.""" + labels = range(722) + labels = [str(label) for label in labels] + return labels + + +def create_dialogue_examples(conv): + """Creates dialogue sample""" + samples = [] + for i in range(len(conv)): + cur_txt = "%s : %s" % (conv[i][2], conv[i][3]) + pre_txt = ["%s : %s" % (c[2], c[3]) for c in conv[max(0, i - 5): i]] + suf_txt = ["%s : %s" % (c[2], c[3]) for c in conv[i + 1: min(len(conv), i + 3)]] + sample = [conv[i][1], pre_txt, cur_txt, suf_txt] + samples.append(sample) + return samples + + +def create_multi_turn_examples(lines, set_type): + """Creates examples for multi-turn dialogue sets.""" + conv_id = -1 + examples = [] + conv_example = [] + index = 0 + for (i, line) in enumerate(lines): + tokens = line + conv_no = tokens[0] + if conv_no != conv_id and i != 0: + samples = create_dialogue_examples(conv_example) + for sample in samples: + guid = "%s-%s" % (set_type, index) + index += 1 + label = sample[0] + text_a = sample[1] + text_b = sample[2] + text_c = sample[3] + examples.append( + InputExample(guid=guid, text_a=text_a, text_b=text_b, text_c=text_c, label=label)) + conv_example = [] + conv_id = conv_no + if i == 0: + conv_id = conv_no + conv_example.append(tokens) + if conv_example: + samples = create_dialogue_examples(conv_example) + for sample in samples: + guid = "%s-%s" % (set_type, index) + index += 1 + label = sample[0] + text_a = sample[1] + text_b = sample[2] + text_c = sample[3] + examples.append( + InputExample(guid=guid, text_a=text_a, text_b=text_b, text_c=text_c, label=label)) + return examples + + +def convert_tokens(tokens, sep_id, tokenizer): + """Converts tokens to ids""" + tokens_ids = [] + if not tokens: + return tokens_ids + if isinstance(tokens, list): + for text in tokens: + tok_text = tokenizer.tokenize(text) + ids = tokenizer.convert_tokens_to_ids(tok_text) + tokens_ids.extend(ids) + if text != tokens[-1]: + tokens_ids.append(sep_id) + else: + tok_text = tokenizer.tokenize(tokens) + tokens_ids = tokenizer.convert_tokens_to_ids(tok_text) + return tokens_ids + + +def convert_single_example(ex_index, example, label_list, max_seq_length, + tokenizer, task_name): + """Converts a single DA `InputExample` into a single `InputFeatures`.""" + label_map = {} + SEP = 102 + CLS = 101 + + if task_name == 'udc': + INNER_SEP = 1 + limit_length = 60 + elif task_name == 'swda': + INNER_SEP = 1 + limit_length = 50 + elif task_name == 'mrda': + INNER_SEP = 1 + limit_length = 50 + elif task_name == 'atis_intent': + INNER_SEP = -1 + limit_length = -1 + elif task_name == 'atis_slot': + INNER_SEP = -1 + limit_length = -1 + elif task_name == 'dstc2': + INNER_SEP = 1 + limit_length = -1 + elif task_name == 'dstc2_asr': + INNER_SEP = 1 + limit_length = -1 + elif task_name == 'multi-woz': + INNER_SEP = 1 + limit_length = 200 + for (i, label) in enumerate(label_list): + label_map[label] = i + + tokens_a = example.text_a + tokens_b = example.text_b + tokens_c = example.text_c + + tokens_a_ids = convert_tokens(tokens_a, INNER_SEP, tokenizer) + tokens_b_ids = convert_tokens(tokens_b, INNER_SEP, tokenizer) + tokens_c_ids = convert_tokens(tokens_c, INNER_SEP, tokenizer) + + if tokens_b_ids: + tokens_b_ids = tokens_b_ids[:min(limit_length, len(tokens_b_ids))] + else: + tokens_a_ids = tokens_a_ids[len(tokens_a_ids) - max_seq_length + 2:] + if not tokens_c_ids: + if len(tokens_a_ids) > max_seq_length - len(tokens_b_ids) - 3: + tokens_a_ids = tokens_a_ids[len(tokens_a_ids) - max_seq_length + len(tokens_b_ids) + 3:] + else: + if len(tokens_a_ids) + len(tokens_b_ids) + len(tokens_c_ids) > max_seq_length - 4: + left_num = max_seq_length - len(tokens_b_ids) - 4 + if len(tokens_a_ids) > len(tokens_c_ids): + if not tokens_c_ids: + tokens_a_ids = tokens_a_ids[max(0, len(tokens_a_ids) - left_num):] + else: + suffix_num = int(left_num / 2) + tokens_c_ids = tokens_c_ids[: min(len(tokens_c_ids), suffix_num)] + prefix_num = left_num - len(tokens_c_ids) + tokens_a_ids = tokens_a_ids[max(0, len(tokens_a_ids) - prefix_num):] + else: + if not tokens_a_ids: + tokens_c_ids = tokens_c_ids[max(0, len(tokens_c_ids) - left_num):] + else: + prefix_num = int(left_num / 2) + tokens_a_ids = tokens_a_ids[max(0, len(tokens_a_ids) - prefix_num):] + suffix_num = left_num - len(tokens_a_ids) + tokens_c_ids = tokens_c_ids[: min(len(tokens_c_ids), suffix_num)] + + input_ids = [] + segment_ids = [] + input_ids.append(CLS) + segment_ids.append(0) + input_ids.extend(tokens_a_ids) + segment_ids.extend([0] * len(tokens_a_ids)) + input_ids.append(SEP) + segment_ids.append(0) + if tokens_b_ids: + input_ids.extend(tokens_b_ids) + segment_ids.extend([1] * len(tokens_b_ids)) + input_ids.append(SEP) + segment_ids.append(1) + if tokens_c_ids: + input_ids.extend(tokens_c_ids) + segment_ids.extend([0] * len(tokens_c_ids)) + input_ids.append(SEP) + segment_ids.append(0) + + input_mask = [1] * len(input_ids) + if task_name == 'atis_slot': + label_id = [0] + [label_map[l] for l in example.label] + [0] + elif task_name in ['dstc2', 'dstc2_asr', 'multi-woz']: + label_id_enty = [label_map[l] for l in example.label] + label_id = [] + for i in range(len(label_map)): + if i in label_id_enty: + label_id.append(1) + else: + label_id.append(0) + else: + label_id = label_map[example.label] + + if ex_index < 5: + print("*** Example ***") + print("guid: %s" % (example.guid)) + print("input_ids: %s" % " ".join([str(x) for x in input_ids])) + print("input_mask: %s" % " ".join([str(x) for x in input_mask])) + print("segment_ids: %s" % " ".join([str(x) for x in segment_ids])) + print("label: %s (id = %s)" % (example.label, label_id)) + feature = InputFeatures( + input_ids=input_ids, + input_mask=input_mask, + segment_ids=segment_ids, + label_id=label_id) + + return feature + + diff --git a/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/run_eval_metrics.sh b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/run_eval_metrics.sh new file mode 100644 index 00000000..52ab3d18 --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/run_eval_metrics.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +TASK_NAME=$1 +PRED_FILE="./pred_"${TASK_NAME} +PYTHON_PATH="python" + +echo "run predict............................" +sh run_predict.sh ${TASK_NAME} > ${PRED_FILE} + +echo "eval_metrics..........................." +${PYTHON_PATH} eval_metrics.py ${TASK_NAME} ${PRED_FILE} + diff --git a/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/run_predict.sh b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/run_predict.sh new file mode 100644 index 00000000..977e9ad5 --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/run_predict.sh @@ -0,0 +1,57 @@ +#!/bin/bash +export CUDA_VISIBLE_DEVICES=4 +export CPU_NUM=1 + +TASK_NAME=$1 +BERT_BASE_PATH="./uncased_L-12_H-768_A-12" +INPUT_PATH="./data/${TASK_NAME}" +OUTPUT_PATH="./output/${TASK_NAME}" +PYTHON_PATH="python" + +if [ "$TASK_NAME" = "udc" ] +then + best_model="step_62500" + max_seq_len=210 + batch_size=6720 +elif [ "$TASK_NAME" = "swda" ] +then + best_model="step_12500" + max_seq_len=128 + batch_size=6720 +elif [ "$TASK_NAME" = "mrda" ] +then + best_model="step_6500" + max_seq_len=128 + batch_size=6720 +elif [ "$TASK_NAME" = "atis_intent" ] +then + best_model="step_600" + max_seq_len=128 + batch_size=4096 + INPUT_PATH="./data/atis/${TASK_NAME}" +elif [ "$TASK_NAME" = "atis_slot" ] +then + best_model="step_7500" + max_seq_len=128 + batch_size=32 + INPUT_PATH="./data/atis/${TASK_NAME}" +elif [ "$TASK_NAME" = "dstc2" ] +then + best_model="step_12000" + max_seq_len=700 + batch_size=6000 + INPUT_PATH="./data/dstc2/${TASK_NAME}" +else + echo "not support ${TASK_NAME} dataset.." + exit 255 +fi + +$PYTHON_PATH -u predict.py --task_name ${TASK_NAME} \ + --use_cuda true\ + --batch_size ${batch_size} \ + --init_checkpoint ${OUTPUT_PATH}/${best_model} \ + --data_dir ${INPUT_PATH} \ + --vocab_path ${BERT_BASE_PATH}/vocab.txt \ + --max_seq_len ${max_seq_len} \ + --bert_config_path ${BERT_BASE_PATH}/bert_config.json + diff --git a/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/run_train.sh b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/run_train.sh new file mode 100644 index 00000000..781c2000 --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/run_train.sh @@ -0,0 +1,100 @@ +#!/bin/bash +export CUDA_VISIBLE_DEVICES=3 +export CPU_NUM=1 + +TASK_NAME=$1 +typeset -l TASK_NAME + +BERT_BASE_PATH="./uncased_L-12_H-768_A-12" +INPUT_PATH="./data/${TASK_NAME}" +OUTPUT_PATH="./output/${TASK_NAME}" +PYTHON_PATH="python" + +DO_TRAIN=true +DO_VAL=true +DO_TEST=true + +#parameter configuration +if [ "${TASK_NAME}" = "udc" ] +then + save_steps=1000 + max_seq_len=210 + skip_steps=1000 + batch_size=6720 + epoch=2 + learning_rate=2e-5 + DO_VAL=false + DO_TEST=false +elif [ "${TASK_NAME}" = "swda" ] +then + save_steps=500 + max_seq_len=128 + skip_steps=200 + batch_size=6720 + epoch=10 + learning_rate=2e-5 +elif [ "${TASK_NAME}" = "mrda" ] +then + save_steps=500 + max_seq_len=128 + skip_steps=200 + batch_size=4096 + epoch=4 + learning_rate=2e-5 +elif [ "${TASK_NAME}" = "atis_intent" ] +then + save_steps=100 + max_seq_len=128 + skip_steps=10 + batch_size=4096 + epoch=20 + learning_rate=2e-5 + INPUT_PATH="./data/atis/${TASK_NAME}" +elif [ "${TASK_NAME}" = "atis_slot" ] +then + save_steps=100 + max_seq_len=128 + skip_steps=10 + batch_size=32 + epoch=50 + learning_rate=2e-5 + INPUT_PATH="./data/atis/${TASK_NAME}" +elif [ "${TASK_NAME}" = "dstc2" ] +then + save_steps=400 + max_seq_len=256 + skip_steps=20 + batch_size=8192 + epoch=40 + learning_rate=5e-5 + INPUT_PATH="./data/dstc2/${TASK_NAME}" +else + echo "not support ${TASK_NAME} dataset.." + exit 255 +fi + +# build train, dev, test dataset +cd scripts && sh run_build_data.sh ${TASK_NAME} && cd .. + +#training +$PYTHON_PATH -u train.py --task_name ${TASK_NAME} \ + --use_cuda true\ + --do_train ${DO_TRAIN} \ + --do_val ${DO_VAL} \ + --do_test ${DO_TEST} \ + --epoch ${epoch} \ + --batch_size ${batch_size} \ + --data_dir ${INPUT_PATH} \ + --bert_config_path ${BERT_BASE_PATH}/bert_config.json \ + --vocab_path ${BERT_BASE_PATH}/vocab.txt \ + --init_pretraining_params ${BERT_BASE_PATH}/params \ + --checkpoints ${OUTPUT_PATH} \ + --save_steps ${save_steps} \ + --learning_rate ${learning_rate} \ + --weight_decay 0.01 \ + --max_seq_len ${max_seq_len} \ + --skip_steps ${skip_steps} \ + --validation_steps 1000000 \ + --num_iteration_per_drop_scope 10 \ + --use_fp16 false + diff --git a/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/README.md b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/README.md new file mode 100644 index 00000000..dd99e0a0 --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/README.md @@ -0,0 +1,20 @@ +scripts:运行数据处理脚本目录 +运行命令: + sh run_build_data.sh [udc|swda|mrda|atis] + +生成DA任务所需要的训练集、开发集、测试集时: + sh run_build_data.sh swda + sh run_build_data.sh mrda + 生成数据分别在open-dialog/data/swda和open-dialog/data/mrda + +生成DST任务所需的训练集、开发集、测试集时: + sh run_build_data.sh dstc2 + 生成数据分别在open-dialog/data/dstc2 + +生成意图解析, 槽位识别任务所需训练集、开发集、测试集时: + sh run_build_data.sh atis + 生成槽位识别数据在open-dialog/data/atis/atis_slot + 生成意图识别数据在open-dialog/data/atis/atis_intent + + + diff --git a/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/build_atis_dataset.py b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/build_atis_dataset.py new file mode 100755 index 00000000..2dafb1bc --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/build_atis_dataset.py @@ -0,0 +1,155 @@ +# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""build swda train dev test dataset""" + +import json +import sys +import csv +import os +import re + + +class ATIS(object): + """ + nlu dataset atis data process + """ + def __init__(self): + """ + init instance + """ + self.slot_id = 2 + self.slot_dict = {"PAD": 0, "O": 1} + self.intent_id = 0 + self.intent_dict = dict() + self.src_dir = "../data/atis/source_data" + self.out_slot_dir = "../data/atis/atis_slot" + self.out_intent_dir = "../data/atis/atis_intent" + self.map_tag_slot = "../data/atis/atis_slot/map_tag_slot_id.txt" + self.map_tag_intent = "../data/atis/atis_intent/map_tag_intent_id.txt" + + def _load_file(self, data_type): + """ + load dataset filename + """ + slot_stat = os.path.exists(self.out_slot_dir) + if not slot_stat: + os.makedirs(self.out_slot_dir) + intent_stat = os.path.exists(self.out_intent_dir) + if not intent_stat: + os.makedirs(self.out_intent_dir) + src_examples = [] + json_file = os.path.join(self.src_dir, "%s.json" % data_type) + with open(json_file, 'r') as load_f: + json_dict = json.load(load_f) + examples = json_dict['rasa_nlu_data']['common_examples'] + for example in examples: + text = example.get('text') + intent = example.get('intent') + entities = example.get('entities') + src_examples.append((text, intent, entities)) + return src_examples + + def _parser_intent_data(self, examples, data_type): + """ + parser intent dataset + """ + out_filename = "%s/%s.txt" % (self.out_intent_dir, data_type) + with open(out_filename, 'w') as fw: + for example in examples: + if example[1] not in self.intent_dict: + self.intent_dict[example[1]] = self.intent_id + self.intent_id += 1 + fw.write("%s\t%s\n" % (self.intent_dict[example[1]], example[0].lower())) + + with open(self.map_tag_intent, 'w') as fw: + for tag in self.intent_dict: + fw.write("%s\t%s\n" % (tag, self.intent_dict[tag])) + + def _parser_slot_data(self, examples, data_type): + """ + parser slot dataset + """ + out_filename = "%s/%s.txt" % (self.out_slot_dir, data_type) + with open(out_filename, 'w') as fw: + for example in examples: + tags = [] + text = example[0] + entities = example[2] + if not entities: + tags = [str(self.slot_dict['O'])] * len(text.strip().split()) + continue + for i in range(len(entities)): + enty = entities[i] + start = enty['start'] + value_num = len(enty['value'].split()) + tags_slot = [] + for j in range(value_num): + if j == 0: + bround_tag = "B" + else: + bround_tag = "I" + tag = "%s-%s" % (bround_tag, enty['entity']) + if tag not in self.slot_dict: + self.slot_dict[tag] = self.slot_id + self.slot_id += 1 + tags_slot.append(str(self.slot_dict[tag])) + if i == 0: + if start not in [0, 1]: + prefix_num = len(text[: start].strip().split()) + tags.extend([str(self.slot_dict['O'])] * prefix_num) + tags.extend(tags_slot) + else: + prefix_num = len(text[entities[i - 1]['end']: start].strip().split()) + tags.extend([str(self.slot_dict['O'])] * prefix_num) + tags.extend(tags_slot) + if entities[-1]['end'] < len(text): + suffix_num = len(text[entities[-1]['end']:].strip().split()) + tags.extend([str(self.slot_dict['O'])] * suffix_num) + fw.write("%s\t%s\n" % (text.encode('utf8'), " ".join(tags).encode('utf8'))) + + with open(self.map_tag_slot, 'w') as fw: + for slot in self.slot_dict: + fw.write("%s\t%s\n" % (slot, self.slot_dict[slot])) + + def get_train_dataset(self): + """ + parser train dataset and print train.txt + """ + train_examples = self._load_file("train") + self._parser_intent_data(train_examples, "train") + self._parser_slot_data(train_examples, "train") + + def get_test_dataset(self): + """ + parser test dataset and print test.txt + """ + test_examples = self._load_file("test") + self._parser_intent_data(test_examples, "test") + self._parser_slot_data(test_examples, "test") + + def main(self): + """ + run data process + """ + self.get_train_dataset() + self.get_test_dataset() + + +if __name__ == "__main__": + atis_inst = ATIS() + atis_inst.main() + + + + diff --git a/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/build_dstc2_dataset.py b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/build_dstc2_dataset.py new file mode 100755 index 00000000..b38de16b --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/build_dstc2_dataset.py @@ -0,0 +1,149 @@ +# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""build mrda train dev test dataset""" + +import json +import sys +import csv +import os +import re + +import commonlib + + +class DSTC2(object): + """ + dialogue state tracking dstc2 data process + """ + def __init__(self): + """ + init instance + """ + self.map_tag_dict = {} + self.out_dir = "../data/dstc2/dstc2" + self.out_asr_dir = "../data/dstc2/dstc2_asr" + self.data_list = "./conf/dstc2.conf" + self.map_tag = "../data/dstc2/dstc2/map_tag_id.txt" + self.src_dir = "../data/dstc2/source_data" + self.onto_json = "../data/dstc2/source_data/ontology_dstc2.json" + self._load_file() + self._load_ontology() + + def _load_file(self): + """ + load dataset filename + """ + self.data_dict = commonlib.load_dict(self.data_list) + for data_type in self.data_dict: + for i in range(len(self.data_dict[data_type])): + self.data_dict[data_type][i] = os.path.join(self.src_dir, self.data_dict[data_type][i]) + + def _load_ontology(self): + """ + load ontology tag + """ + tag_id = 1 + self.map_tag_dict['none'] = 0 + with open(self.onto_json, 'r') as fr: + ontology = json.load(fr) + slots_values = ontology['informable'] + for slot in slots_values: + for value in slots_values[slot]: + key = "%s_%s" % (slot, value) + self.map_tag_dict[key] = tag_id + tag_id += 1 + key = "%s_none" % (slot) + self.map_tag_dict[key] = tag_id + tag_id += 1 + + def _parser_dataset(self, data_type): + """ + parser train dev test dataset + """ + stat = os.path.exists(self.out_dir) + if not stat: + os.makedirs(self.out_dir) + asr_stat = os.path.exists(self.out_asr_dir) + if not asr_stat: + os.makedirs(self.out_asr_dir) + out_file = os.path.join(self.out_dir, "%s.txt" % data_type) + out_asr_file = os.path.join(self.out_asr_dir, "%s.txt" % data_type) + with open(out_file, 'w') as fw, open(out_asr_file, 'w') as fw_asr: + data_list = self.data_dict.get(data_type) + for fn in data_list: + log_file = os.path.join(fn, "log.json") + label_file = os.path.join(fn, "label.json") + with open(log_file, 'r') as f_log, open(label_file, 'r') as f_label: + log_json = json.load(f_log) + label_json = json.load(f_label) + session_id = log_json['session-id'] + assert len(label_json["turns"]) == len(log_json["turns"]) + for i in range(len(label_json["turns"])): + log_turn = log_json["turns"][i] + label_turn = label_json["turns"][i] + assert log_turn["turn-index"] == label_turn["turn-index"] + labels = ["%s_%s" % (slot, label_turn["goal-labels"][slot]) for slot in label_turn["goal-labels"]] + labels_ids = " ".join([str(self.map_tag_dict.get(label, self.map_tag_dict["%s_none" % label.split('_')[0]])) for label in labels]) + mach = log_turn['output']['transcript'] + user = label_turn['transcription'] + if not labels_ids.strip(): + labels_ids = self.map_tag_dict['none'] + out = "%s\t%s\1%s\t%s" % (session_id, mach, user, labels_ids) + user_asr = log_turn['input']['live']['asr-hyps'][0]['asr-hyp'].strip() + out_asr = "%s\t%s\1%s\t%s" % (session_id, mach, user_asr, labels_ids) + fw.write("%s\n" % out.encode('utf8')) + fw_asr.write("%s\n" % out_asr.encode('utf8')) + + def get_train_dataset(self): + """ + parser train dataset and print train.txt + """ + self._parser_dataset("train") + + def get_dev_dataset(self): + """ + parser dev dataset and print dev.txt + """ + self._parser_dataset("dev") + + def get_test_dataset(self): + """ + parser test dataset and print test.txt + """ + self._parser_dataset("test") + + def get_labels(self): + """ + get tag and map ids file + """ + with open(self.map_tag, 'w') as fw: + for elem in self.map_tag_dict: + fw.write("%s\t%s\n" % (elem, self.map_tag_dict[elem])) + + def main(self): + """ + run data process + """ + self.get_train_dataset() + self.get_dev_dataset() + self.get_test_dataset() + self.get_labels() + +if __name__ == "__main__": + dstc_inst = DSTC2() + dstc_inst.main() + + + + diff --git a/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/build_mrda_dataset.py b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/build_mrda_dataset.py new file mode 100755 index 00000000..ee03d937 --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/build_mrda_dataset.py @@ -0,0 +1,165 @@ +# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""build mrda train dev test dataset""" + +import sys +import csv +import os +import re + +import commonlib + + +class MRDA(object): + """ + dialogue act dataset mrda data process + """ + def __init__(self): + """ + init instance + """ + self.tag_id = 0 + self.map_tag_dict = dict() + self.out_dir = "../data/mrda" + self.data_list = "./conf/mrda.conf" + self.map_tag = "../data/mrda/map_tag_id.txt" + self.voc_map_tag = "../data/mrda/source_data/icsi_mrda+hs_corpus_050512/classmaps/map_01b_expanded_w_split" + self.src_dir = "../data/mrda/source_data/icsi_mrda+hs_corpus_050512/data" + self._load_file() + self.tag_dict = commonlib.load_voc(self.voc_map_tag) + + def _load_file(self): + """ + load dataset filename + """ + self.dadb_dict = {} + self.trans_dict = {} + self.data_dict = commonlib.load_dict(self.data_list) + file_list, file_path = commonlib.get_file_list(self.src_dir) + for i in range(len(file_list)): + name = file_list[i] + keyword = name.split('.')[0] + if 'dadb' in name: + self.dadb_dict[keyword] = file_path[i] + if 'trans' in name: + self.trans_dict[keyword] = file_path[i] + + def load_dadb(self, data_type): + """ + load dadb dataset + """ + dadb_dict = {} + conv_id_list = [] + dadb_list = self.data_dict[data_type] + for dadb_key in dadb_list: + dadb_file = self.dadb_dict[dadb_key] + with open(dadb_file, 'r') as fr: + row = csv.reader(fr, delimiter = ',') + for line in row: + elems = line + conv_id = elems[2] + conv_id_list.append(conv_id) + if len(elems) != 14: + continue + error_code = elems[3] + da_tag = elems[-9] + da_ori_tag = elems[-6] + dadb_dict[conv_id] = (error_code, da_ori_tag, da_tag) + return dadb_dict, conv_id_list + + def load_trans(self, data_type): + """load trans data""" + trans_dict = {} + trans_list = self.data_dict[data_type] + for trans_key in trans_list: + trans_file = self.trans_dict[trans_key] + with open(trans_file, 'r') as fr: + row = csv.reader(fr, delimiter = ',') + for line in row: + elems = line + if len(elems) != 3: + continue + conv_id = elems[0] + text = elems[1] + text_process = elems[2] + trans_dict[conv_id] = (text, text_process) + return trans_dict + + def _parser_dataset(self, data_type): + """ + parser train dev test dataset + """ + out_filename = "%s/%s.txt" % (self.out_dir, data_type) + dadb_dict, conv_id_list = self.load_dadb(data_type) + trans_dict = self.load_trans(data_type) + with open(out_filename, 'w') as fw: + for elem in conv_id_list: + v_dadb = dadb_dict[elem] + v_trans = trans_dict[elem] + da_tag = v_dadb[2] + if da_tag not in self.tag_dict: + continue + tag = self.tag_dict[da_tag] + if tag == "Z": + continue + if tag not in self.map_tag_dict: + self.map_tag_dict[tag] = self.tag_id + self.tag_id += 1 + caller = elem.split('_')[0].split('-')[-1] + conv_no = elem.split('_')[0].split('-')[0] + out = "%s\t%s\t%s\t%s" % (conv_no, self.map_tag_dict[tag], caller, v_trans[0]) + fw.write("%s\n" % out) + + def get_train_dataset(self): + """ + parser train dataset and print train.txt + """ + self._parser_dataset("train") + + def get_dev_dataset(self): + """ + parser dev dataset and print dev.txt + """ + self._parser_dataset("dev") + + def get_test_dataset(self): + """ + parser test dataset and print test.txt + """ + self._parser_dataset("test") + + def get_labels(self): + """ + get tag and map ids file + """ + with open(self.map_tag, 'w') as fw: + for elem in self.map_tag_dict: + fw.write("%s\t%s\n" % (elem, self.map_tag_dict[elem])) + + def main(self): + """ + run data process + """ + self.get_train_dataset() + self.get_dev_dataset() + self.get_test_dataset() + self.get_labels() + +if __name__ == "__main__": + mrda_inst = MRDA() + mrda_inst.main() + + + + diff --git a/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/build_swda_dataset.py b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/build_swda_dataset.py new file mode 100755 index 00000000..ba25e370 --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/build_swda_dataset.py @@ -0,0 +1,231 @@ +# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""build swda train dev test dataset""" + +import sys +import csv +import os +import re + +import commonlib + + +class SWDA(object): + """ + dialogue act dataset swda data process + """ + def __init__(self): + """ + init instance + """ + self.tag_id = 0 + self.map_tag_dict = dict() + self.out_dir = "../data/swda" + self.data_list = "./conf/swda.conf" + self.map_tag = "../data/swda/map_tag_id.txt" + self.src_dir = "../data/swda/source_data/swda" + self._load_file() + + def _load_file(self): + """ + load dataset filename + """ + self.data_dict = commonlib.load_dict(self.data_list) + self.file_dict = {} + child_dir = commonlib.get_dir_list(self.src_dir) + for chd in child_dir: + file_list, file_path = commonlib.get_file_list(chd) + for i in range(len(file_list)): + name = file_list[i] + keyword = "sw%s" % name.split('.')[0].split('_')[-1] + self.file_dict[keyword] = file_path[i] + + def _parser_dataset(self, data_type): + """ + parser train dev test dataset + """ + out_filename = "%s/%s.txt" % (self.out_dir, data_type) + with open(out_filename, 'w') as fw: + for name in self.data_dict[data_type]: + file_path = self.file_dict[name] + with open(file_path, 'r') as fr: + idx = 0 + row = csv.reader(fr, delimiter = ',') + for r in row: + if idx == 0: + idx += 1 + continue + out = self._parser_utterence(r) + fw.write("%s\n" % out) + + def _clean_text(self, text): + """ + text cleaning for dialogue act dataset + """ + if text.startswith('<') and text.endswith('>.'): + return text + if "[" in text or "]" in text: + stat = True + else: + stat = False + group = re.findall("\[.*?\+.*?\]", text) + while group and stat: + for elem in group: + elem_src = elem + elem = re.sub('\+', '', elem.lstrip('[').rstrip(']')) + text = text.replace(elem_src, elem) + if "[" in text or "]" in text: + stat = True + else: + stat = False + group = re.findall("\[.*?\+.*?\]", text) + if "{" in text or "}" in text: + stat = True + else: + stat = False + group = re.findall("{[A-Z].*?}", text) + while group and stat: + child_group = re.findall("{[A-Z]*(.*?)}", text) + for i in range(len(group)): + text = text.replace(group[i], child_group[i]) + if "{" in text or "}" in text: + stat = True + else: + stat = False + group = re.findall("{[A-Z].*?}", text) + if "(" in text or ")" in text: + stat = True + else: + stat = False + group = re.findall("\(\(.*?\)\)", text) + while group and stat: + for elem in group: + if elem: + elem_clean = re.sub("\(|\)", "", elem) + text = text.replace(elem, elem_clean) + else: + text = text.replace(elem, "mumblex") + if "(" in text or ")" in text: + stat = True + else: + stat = False + group = re.findall("\(\((.*?)\)\)", text) + + group = re.findall("\<.*?\>", text) + if group: + for elem in group: + text = text.replace(elem, "") + + text = re.sub(r" \'s", "\'s", text) + text = re.sub(r" n\'t", "n\'t", text) + text = re.sub(r" \'t", "\'t", text) + text = re.sub(" +", " ", text) + text = text.rstrip('\/').strip().strip('-') + text = re.sub("\[|\]|\+|\>|\<|\{|\}", "", text) + return text.strip().lower() + + def _map_tag(self, da_tag): + """ + map tag to 42 classes + """ + curr_da_tags = [] + curr_das = re.split(r"\s*[,;]\s*", da_tag) + for curr_da in curr_das: + if curr_da == "qy_d" or curr_da == "qw^d" or curr_da == "b^m": + pass + elif curr_da == "nn^e": + curr_da = "ng" + elif curr_da == "ny^e": + curr_da = "na" + else: + curr_da = re.sub(r'(.)\^.*', r'\1', curr_da) + curr_da = re.sub(r'[\(\)@*]', '', curr_da) + tag = curr_da + if tag in ('qr', 'qy'): + tag = 'qy' + elif tag in ('fe', 'ba'): + tag = 'ba' + elif tag in ('oo', 'co', 'cc'): + tag = 'oo_co_cc' + elif tag in ('fx', 'sv'): + tag = 'sv' + elif tag in ('aap', 'am'): + tag = 'aap_am' + elif tag in ('arp', 'nd'): + tag = 'arp_nd' + elif tag in ('fo', 'o', 'fw', '"', 'by', 'bc'): + tag = 'fo_o_fw_"_by_bc' + curr_da = tag + curr_da_tags.append(curr_da) + if curr_da_tags[0] not in self.map_tag_dict: + self.map_tag_dict[curr_da_tags[0]] = self.tag_id + self.tag_id += 1 + return self.map_tag_dict[curr_da_tags[0]] + + def _parser_utterence(self, line): + """ + parser one turn dialogue + """ + conversation_no = line[2] + act_tag = line[4] + caller = line[5] + text = line[8] + text = self._clean_text(text) + act_tag = self._map_tag(act_tag) + + out = "%s\t%s\t%s\t%s" % (conversation_no, act_tag, caller, text) + return out + + def get_train_dataset(self): + """ + parser train dataset and print train.txt + """ + self._parser_dataset("train") + + def get_dev_dataset(self): + """ + parser dev dataset and print dev.txt + """ + self._parser_dataset("dev") + + def get_test_dataset(self): + """ + parser test dataset and print test.txt + """ + self._parser_dataset("test") + + def get_labels(self): + """ + get tag and map ids file + """ + with open(self.map_tag, 'w') as fw: + for elem in self.map_tag_dict: + fw.write("%s\t%s\n" % (elem, self.map_tag_dict[elem])) + + def main(self): + """ + run data process + """ + self.get_train_dataset() + self.get_dev_dataset() + self.get_test_dataset() + self.get_labels() + +if __name__ == "__main__": + swda_inst = SWDA() + swda_inst.main() + + + + diff --git a/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/commonlib.py b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/commonlib.py new file mode 100755 index 00000000..9706af30 --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/commonlib.py @@ -0,0 +1,74 @@ +# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""common function""" +import sys +import os + + +def get_file_list(dir_name): + """ + get file list in directory + """ + file_list = list() + file_path = list() + for root, dirs, files in os.walk(dir_name): + for file in files: + file_list.append(file) + file_path.append(os.path.join(root, file)) + return file_list, file_path + + +def get_dir_list(dir_name): + """ + get directory names + """ + child_dir = [] + dir_list = os.listdir(dir_name) + for cur_file in dir_list: + path = os.path.join(dir_name, cur_file) + if not os.path.isdir(path): + continue + child_dir.append(path) + return child_dir + + +def load_dict(conf): + """ + load swda dataset config + """ + conf_dict = dict() + with open(conf, 'r') as fr: + for line in fr: + line = line.strip() + elems = line.split('\t') + if elems[0] not in conf_dict: + conf_dict[elems[0]] = [] + conf_dict[elems[0]].append(elems[1]) + return conf_dict + + +def load_voc(conf): + """ + load map dict + """ + map_dict = {} + with open(conf, 'r') as fr: + for line in fr: + line = line.strip() + elems = line.split('\t') + map_dict[elems[0]] = elems[1] + return map_dict + + + diff --git a/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/conf/dstc2.conf b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/conf/dstc2.conf new file mode 100755 index 00000000..c40b94ee --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/conf/dstc2.conf @@ -0,0 +1,3235 @@ +train Mar13_S0A1/voip-b772dbf437-20130402_143019 +train Mar13_S1A0/voip-b772dbf437-20130402_142118 +train Mar13_S1A0/voip-b772dbf437-20130402_141805 +train Mar13_S0A1/voip-b772dbf437-20130402_141105 +train Mar13_S1A1/voip-b772dbf437-20130402_142550 +train Mar13_S1A1/voip-b772dbf437-20130402_141548 +train Mar13_S0A0/voip-ec87351630-20130328_162752 +train Mar13_S1A0/voip-ec87351630-20130328_162916 +train Mar13_S0A1/voip-14cb91bc48-20130327_202138 +train Mar13_S1A1/voip-14cb91bc48-20130327_190136 +train Mar13_S1A1/voip-14cb91bc48-20130328_162839 +train Mar13_S1A0/voip-14cb91bc48-20130328_163311 +train Mar13_S1A0/voip-14cb91bc48-20130328_194336 +train Mar13_S0A0/voip-14cb91bc48-20130327_190340 +train Mar13_S1A0/voip-14cb91bc48-20130328_163437 +train Mar13_S0A0/voip-14cb91bc48-20130328_161626 +train Mar13_S1A1/voip-14cb91bc48-20130328_163139 +train Mar13_S0A0/voip-14cb91bc48-20130328_165532 +train Mar13_S1A0/voip-14cb91bc48-20130327_182446 +train Mar13_S1A1/voip-14cb91bc48-20130327_204058 +train Mar13_S0A1/voip-4c0d36762a-20130328_213330 +train Mar13_S1A1/voip-4c0d36762a-20130328_204534 +train Mar13_S0A1/voip-4c0d36762a-20130328_210737 +train Mar13_S0A0/voip-4c0d36762a-20130328_202045 +train Mar13_S1A0/voip-4c0d36762a-20130328_212300 +train Mar13_S0A1/voip-4c0d36762a-20130328_204720 +train Mar13_S1A0/voip-4c0d36762a-20130328_205236 +train Mar13_S0A1/voip-4c0d36762a-20130328_212449 +train Mar13_S0A1/voip-4c0d36762a-20130328_201131 +train Mar13_S1A1/voip-4c0d36762a-20130328_204956 +train Mar13_S1A0/voip-4c0d36762a-20130328_203937 +train Mar13_S0A1/voip-e2a895cfe5-20130327_020622 +train Mar13_S0A1/voip-e2a895cfe5-20130325_233852 +train Mar13_S1A0/voip-e2a895cfe5-20130325_231121 +train Mar13_S1A1/voip-e2a895cfe5-20130327_021102 +train Mar13_S1A0/voip-e2a895cfe5-20130326_232941 +train Mar13_S0A1/voip-e2a895cfe5-20130326_233604 +train Mar13_S0A0/voip-e2a895cfe5-20130325_234115 +train Mar13_S1A1/voip-e2a895cfe5-20130326_233655 +train Mar13_S0A0/voip-e2a895cfe5-20130326_233829 +train Mar13_S0A0/voip-e2a895cfe5-20130325_230900 +train Mar13_S1A0/voip-e2a895cfe5-20130325_230741 +train Mar13_S0A1/voip-e2a895cfe5-20130327_021619 +train Mar13_S0A0/voip-e2a895cfe5-20130326_232742 +train Mar13_S1A1/voip-e2a895cfe5-20130325_232606 +train Mar13_S0A0/voip-e2a895cfe5-20130325_233234 +train Mar13_S1A0/voip-fce37b0ccb-20130328_151459 +train Mar13_S0A0/voip-fce37b0ccb-20130328_145245 +train Mar13_S1A0/voip-fce37b0ccb-20130328_145831 +train Mar13_S1A0/voip-fce37b0ccb-20130328_143934 +train Mar13_S1A1/voip-fce37b0ccb-20130328_143529 +train Mar13_S0A1/voip-fce37b0ccb-20130328_145418 +train Mar13_S1A1/voip-fce37b0ccb-20130328_145014 +train Mar13_S0A1/voip-fce37b0ccb-20130328_144400 +train Mar13_S0A0/voip-fce37b0ccb-20130328_144745 +train Mar13_S0A1/voip-8991b7bff6-20130401_180409 +train Mar13_S1A0/voip-8991b7bff6-20130401_174504 +train Mar13_S0A0/voip-8991b7bff6-20130327_192101 +train Mar13_S1A0/voip-8991b7bff6-20130401_175934 +train Mar13_S0A1/voip-8991b7bff6-20130401_165927 +train Mar13_S0A1/voip-8991b7bff6-20130401_174957 +train Mar13_S1A0/voip-8991b7bff6-20130326_232407 +train Mar13_S1A0/voip-8991b7bff6-20130326_230855 +train Mar13_S1A1/voip-8991b7bff6-20130326_231508 +train Mar13_S1A1/voip-8991b7bff6-20130401_180108 +train Mar13_S0A1/voip-8991b7bff6-20130401_161851 +train Mar13_S0A0/voip-8991b7bff6-20130326_231411 +train Mar13_S1A0/voip-8991b7bff6-20130401_173851 +train Mar13_S0A0/voip-8991b7bff6-20130326_230316 +train Mar13_S1A1/voip-8991b7bff6-20130401_174811 +train Mar13_S1A1/voip-8991b7bff6-20130401_160325 +train Mar13_S1A1/voip-8991b7bff6-20130401_174015 +train Mar13_S1A1/voip-8991b7bff6-20130326_230200 +train Mar13_S1A0/voip-8991b7bff6-20130401_163110 +train Mar13_S0A1/voip-8991b7bff6-20130326_231255 +train Mar13_S1A0/voip-ef9aa63b85-20130328_190838 +train Mar13_S1A0/voip-ef9aa63b85-20130329_131927 +train Mar13_S1A0/voip-ef9aa63b85-20130328_223357 +train Mar13_S1A0/voip-ef9aa63b85-20130329_132421 +train Mar13_S1A1/voip-ef9aa63b85-20130329_125741 +train Mar13_S1A1/voip-ef9aa63b85-20130328_190444 +train Mar13_S0A1/voip-ef9aa63b85-20130329_130049 +train Mar13_S0A1/voip-ef9aa63b85-20130328_210319 +train Mar13_S1A1/voip-ef9aa63b85-20130329_131544 +train Mar13_S0A1/voip-ef9aa63b85-20130328_184027 +train Mar13_S1A1/voip-ef9aa63b85-20130328_211048 +train Mar13_S0A1/voip-ef9aa63b85-20130329_131415 +train Mar13_S1A0/voip-22a181cad5-20130326_023529 +train Mar13_S0A0/voip-22a181cad5-20130326_022324 +train Mar13_S0A0/voip-3cf7bd870d-20130327_184430 +train Mar13_S1A0/voip-3cf7bd870d-20130327_180414 +train Mar13_S1A1/voip-3cf7bd870d-20130328_212502 +train Mar13_S0A0/voip-3cf7bd870d-20130327_174017 +train Mar13_S1A1/voip-3cf7bd870d-20130328_211947 +train Mar13_S1A0/voip-3cf7bd870d-20130328_212749 +train Mar13_S1A0/voip-3cf7bd870d-20130328_150815 +train Mar13_S0A0/voip-3cf7bd870d-20130327_183049 +train Mar13_S0A0/voip-3cf7bd870d-20130327_175817 +train Mar13_S0A1/voip-3cf7bd870d-20130327_175213 +train Mar13_S1A0/voip-3cf7bd870d-20130327_174155 +train Mar13_S1A1/voip-3cf7bd870d-20130327_180852 +train Mar13_S1A1/voip-3cf7bd870d-20130327_181243 +train Mar13_S0A1/voip-3cf7bd870d-20130328_212136 +train Mar13_S1A1/voip-3cf7bd870d-20130327_184051 +train Mar13_S0A1/voip-3cf7bd870d-20130327_174526 +train Mar13_S0A1/voip-4f069a4136-20130402_032014 +train Mar13_S0A0/voip-4f069a4136-20130327_205937 +train Mar13_S1A1/voip-4f069a4136-20130402_031309 +train Mar13_S0A0/voip-4f069a4136-20130327_205819 +train Mar13_S1A0/voip-4f069a4136-20130327_204405 +train Mar13_S1A1/voip-4f069a4136-20130327_211252 +train Mar13_S0A1/voip-4f069a4136-20130402_030948 +train Mar13_S0A1/voip-4f069a4136-20130327_210318 +train Mar13_S1A1/voip-4f069a4136-20130402_032750 +train Mar13_S1A0/voip-4f069a4136-20130402_031636 +train Mar13_S0A1/voip-4f069a4136-20130327_204947 +train Mar13_S1A0/voip-4f069a4136-20130402_030550 +train Mar13_S1A1/voip-4f069a4136-20130327_205654 +train Mar13_S1A1/voip-4f069a4136-20130402_033307 +train Mar13_S0A0/voip-4f069a4136-20130327_204639 +train Mar13_S1A0/voip-4f069a4136-20130402_034737 +train Mar13_S1A0/voip-4f069a4136-20130402_032149 +train Mar13_S1A0/voip-4f069a4136-20130327_205501 +train Mar13_S0A1/voip-fdf8b50918-20130326_051637 +train Mar13_S1A0/voip-fdf8b50918-20130329_014245 +train Mar13_S1A1/voip-fdf8b50918-20130327_024718 +train Mar13_S1A0/voip-fdf8b50918-20130329_013736 +train Mar13_S0A0/voip-fdf8b50918-20130327_024110 +train Mar13_S0A1/voip-fdf8b50918-20130329_042606 +train Mar13_S1A1/voip-fdf8b50918-20130329_035324 +train Mar13_S1A1/voip-fdf8b50918-20130326_050512 +train Mar13_S0A1/voip-fdf8b50918-20130329_014041 +train Mar13_S1A0/voip-fdf8b50918-20130326_051438 +train Mar13_S0A1/voip-fdf8b50918-20130327_024909 +train Mar13_S0A0/voip-fdf8b50918-20130327_023811 +train Mar13_S1A1/voip-fdf8b50918-20130329_013925 +train Mar13_S1A1/voip-fdf8b50918-20130326_051921 +train Mar13_S1A0/voip-fdf8b50918-20130326_052504 +train Mar13_S0A1/voip-fdf8b50918-20130326_052355 +train Mar13_S1A0/voip-fdf8b50918-20130329_042348 +train Mar13_S1A0/voip-fdf8b50918-20130327_024453 +train Mar13_S1A1/voip-249d0f617b-20130328_161219 +train Mar13_S0A0/voip-249d0f617b-20130328_162005 +train Mar13_S1A1/voip-249d0f617b-20130326_005503 +train Mar13_S0A0/voip-249d0f617b-20130326_010500 +train Mar13_S1A0/voip-249d0f617b-20130328_161812 +train Mar13_S1A1/voip-9819537952-20130328_232353 +train Mar13_S1A0/voip-9819537952-20130327_023510 +train Mar13_S1A0/voip-9819537952-20130327_022757 +train Mar13_S0A1/voip-9819537952-20130328_230928 +train Mar13_S0A1/voip-9819537952-20130328_231209 +train Mar13_S0A0/voip-9819537952-20130327_023104 +train Mar13_S1A1/voip-9819537952-20130328_235404 +train Mar13_S1A0/voip-9819537952-20130328_232122 +train Mar13_S0A0/voip-9819537952-20130327_024815 +train Mar13_S1A1/voip-9819537952-20130327_023856 +train Mar13_S1A1/voip-9819537952-20130327_021901 +train Mar13_S0A1/voip-9819537952-20130328_235607 +train Mar13_S1A0/voip-9819537952-20130328_234847 +train Mar13_S1A1/voip-9f447ce48e-20130328_115123 +train Mar13_S0A1/voip-9f447ce48e-20130328_114744 +train Mar13_S1A1/voip-05e7a5440b-20130328_214746 +train Mar13_S0A1/voip-05e7a5440b-20130328_211013 +train Mar13_S1A1/voip-05e7a5440b-20130328_220724 +train Mar13_S0A1/voip-05e7a5440b-20130328_215502 +train Mar13_S1A0/voip-05e7a5440b-20130328_213943 +train Mar13_S1A0/voip-05e7a5440b-20130328_220901 +train Mar13_S0A1/voip-05e7a5440b-20130328_213636 +train Mar13_S1A0/voip-05e7a5440b-20130328_211620 +train Mar13_S1A1/voip-05e7a5440b-20130328_211413 +train Mar13_S1A1/voip-05e7a5440b-20130328_215138 +train Mar13_S1A0/voip-05e7a5440b-20130328_222114 +train Mar13_S0A1/voip-05e7a5440b-20130328_211839 +train Mar13_S1A1/voip-05e7a5440b-20130328_212037 +train Mar13_S1A0/voip-583e7cede5-20130323_060529 +train Mar13_S1A0/voip-583e7cede5-20130324_060002 +train Mar13_S1A1/voip-583e7cede5-20130324_063315 +train Mar13_S1A1/voip-583e7cede5-20130324_055306 +train Mar13_S1A1/voip-583e7cede5-20130323_052525 +train Mar13_S0A0/voip-583e7cede5-20130323_143756 +train Mar13_S1A1/voip-583e7cede5-20130324_060901 +train Mar13_S1A0/voip-583e7cede5-20130323_061037 +train Mar13_S1A1/voip-583e7cede5-20130324_062107 +train Mar13_S0A1/voip-583e7cede5-20130323_062756 +train Mar13_S0A0/voip-583e7cede5-20130324_060405 +train Mar13_S1A1/voip-583e7cede5-20130323_055421 +train Mar13_S0A0/voip-583e7cede5-20130324_063452 +train Mar13_S0A1/voip-583e7cede5-20130323_060100 +train Mar13_S1A0/voip-583e7cede5-20130323_051824 +train Mar13_S0A0/voip-583e7cede5-20130323_054735 +train Mar13_S0A0/voip-583e7cede5-20130324_061500 +train Mar13_S0A1/voip-583e7cede5-20130324_062248 +train Mar13_S0A1/voip-583e7cede5-20130324_063717 +train Mar13_S0A1/voip-869dd52548-20130401_183901 +train Mar13_S1A0/voip-869dd52548-20130401_175459 +train Mar13_S1A0/voip-869dd52548-20130401_184505 +train Mar13_S0A1/voip-869dd52548-20130401_175746 +train Mar13_S1A0/voip-869dd52548-20130326_000755 +train Mar13_S1A0/voip-869dd52548-20130401_180217 +train Mar13_S0A1/voip-869dd52548-20130401_184747 +train Mar13_S1A1/voip-869dd52548-20130401_180331 +train Mar13_S1A1/voip-869dd52548-20130401_184008 +train Mar13_S1A1/voip-869dd52548-20130326_000655 +train Mar13_S1A0/voip-869dd52548-20130401_184240 +train Mar13_S0A1/voip-869dd52548-20130326_001137 +train Mar13_S1A1/voip-869dd52548-20130401_175624 +train Mar13_S0A0/voip-869dd52548-20130326_000910 +train Mar13_S1A1/voip-869dd52548-20130401_184559 +train Mar13_S0A1/voip-869dd52548-20130401_175253 +train Mar13_S0A0/voip-a7ddefaeb3-20130327_174327 +train Mar13_S0A1/voip-a7ddefaeb3-20130327_174908 +train Mar13_S1A1/voip-a7ddefaeb3-20130328_172838 +train Mar13_S0A1/voip-a7ddefaeb3-20130328_172708 +train Mar13_S1A0/voip-a7ddefaeb3-20130328_173142 +train Mar13_S1A1/voip-8d5173f3a6-20130324_183442 +train Mar13_S1A1/voip-8d5173f3a6-20130324_185406 +train Mar13_S1A0/voip-8d5173f3a6-20130323_012203 +train Mar13_S1A1/voip-8d5173f3a6-20130323_011736 +train Mar13_S0A1/voip-8d5173f3a6-20130324_183623 +train Mar13_S0A0/voip-8d5173f3a6-20130323_012048 +train Mar13_S1A1/voip-8d5173f3a6-20130323_015026 +train Mar13_S0A0/voip-8d5173f3a6-20130324_190545 +train Mar13_S0A1/voip-8d5173f3a6-20130324_190350 +train Mar13_S1A0/voip-8d5173f3a6-20130324_184915 +train Mar13_S0A1/voip-8d5173f3a6-20130323_011549 +train Mar13_S0A1/voip-8d5173f3a6-20130323_013323 +train Mar13_S1A1/voip-8d5173f3a6-20130324_184603 +train Mar13_S1A0/voip-8d5173f3a6-20130323_015902 +train Mar13_S0A0/voip-8d5173f3a6-20130324_185203 +train Mar13_S0A1/voip-8d5173f3a6-20130323_013205 +train Mar13_S0A0/voip-8d5173f3a6-20130323_015159 +train Mar13_S1A0/voip-8d5173f3a6-20130324_190231 +train Mar13_S0A0/voip-8d5173f3a6-20130324_183949 +train Mar13_S0A1/voip-8d5173f3a6-20130324_184438 +train Mar13_S0A1/voip-58047f5227-20130327_032952 +train Mar13_S1A1/voip-58047f5227-20130327_034311 +train Mar13_S1A1/voip-58047f5227-20130326_032713 +train Mar13_S1A0/voip-58047f5227-20130326_031756 +train Mar13_S1A1/voip-58047f5227-20130326_030552 +train Mar13_S1A0/voip-58047f5227-20130326_031502 +train Mar13_S0A0/voip-58047f5227-20130327_032049 +train Mar13_S0A1/voip-58047f5227-20130327_033611 +train Mar13_S1A0/voip-58047f5227-20130326_032849 +train Mar13_S0A0/voip-58047f5227-20130327_032257 +train Mar13_S0A1/voip-58047f5227-20130326_032210 +train Mar13_S0A0/voip-58047f5227-20130327_033352 +train Mar13_S0A1/voip-58047f5227-20130327_034209 +train Mar13_S1A0/voip-58047f5227-20130327_032835 +train Mar13_S0A1/voip-58047f5227-20130326_030421 +train Mar13_S0A1/voip-58047f5227-20130327_004644 +train Mar13_S1A0/voip-58047f5227-20130327_033220 +train Mar13_S0A0/voip-58047f5227-20130326_030155 +train Mar13_S1A1/voip-58047f5227-20130327_032739 +train Mar13_S1A0/voip-da4a08ad84-20130328_154258 +train Mar13_S0A1/voip-da4a08ad84-20130328_160004 +train Mar13_S0A0/voip-da4a08ad84-20130328_154747 +train Mar13_S1A1/voip-da4a08ad84-20130328_154358 +train Mar13_S1A1/voip-da4a08ad84-20130328_155250 +train Mar13_S0A1/voip-da4a08ad84-20130328_155023 +train Mar13_S1A1/voip-da4a08ad84-20130328_155120 +train Mar13_S0A0/voip-da4a08ad84-20130328_160315 +train Mar13_S1A0/voip-14f776f781-20130328_140514 +train Mar13_S0A1/voip-14f776f781-20130328_141638 +train Mar13_S0A1/voip-14f776f781-20130328_115724 +train Mar13_S1A1/voip-14f776f781-20130329_033249 +train Mar13_S0A1/voip-14f776f781-20130328_123358 +train Mar13_S0A0/voip-14f776f781-20130328_151904 +train Mar13_S0A0/voip-14f776f781-20130328_121622 +train Mar13_S1A1/voip-14f776f781-20130328_142356 +train Mar13_S0A0/voip-14f776f781-20130328_123813 +train Mar13_S1A1/voip-1b51204ef5-20130401_173145 +train Mar13_S0A1/voip-1b51204ef5-20130401_172934 +train Mar13_S1A0/voip-1b51204ef5-20130401_145241 +train Mar13_S1A0/voip-1b51204ef5-20130401_150524 +train Mar13_S1A1/voip-1b51204ef5-20130401_145742 +train Mar13_S0A1/voip-1b51204ef5-20130401_145115 +train Mar13_S1A0/voip-fd0c0fb514-20130328_114840 +train Mar13_S1A1/voip-fd0c0fb514-20130328_114152 +train Mar13_S0A1/voip-fd0c0fb514-20130328_114024 +train Mar13_S0A1/voip-e3b4879e0d-20130327_030628 +train Mar13_S0A0/voip-e3b4879e0d-20130327_181026 +train Mar13_S1A1/voip-e3b4879e0d-20130326_215705 +train Mar13_S1A0/voip-e3b4879e0d-20130327_031211 +train Mar13_S1A1/voip-e3b4879e0d-20130327_182147 +train Mar13_S1A1/voip-e3b4879e0d-20130326_024455 +train Mar13_S0A1/voip-e3b4879e0d-20130327_033913 +train Mar13_S1A1/voip-e3b4879e0d-20130326_024044 +train Mar13_S1A1/voip-e3b4879e0d-20130326_021022 +train Mar13_S0A1/voip-e3b4879e0d-20130326_024336 +train Mar13_S0A0/voip-e3b4879e0d-20130326_023841 +train Mar13_S0A0/voip-e3b4879e0d-20130326_215254 +train Mar13_S0A0/voip-e3b4879e0d-20130326_020631 +train Mar13_S1A1/voip-e3b4879e0d-20130327_030955 +train Mar13_S1A0/voip-e3b4879e0d-20130327_181509 +train Mar13_S1A0/voip-e3b4879e0d-20130326_215515 +train Mar13_S1A0/voip-e3b4879e0d-20130326_022732 +train Mar13_S0A0/voip-e3b4879e0d-20130327_030803 +train Mar13_S1A1/voip-e3b4879e0d-20130327_181715 +train Mar13_S0A1/voip-e3b4879e0d-20130326_023327 +train Mar13_S1A1/voip-560cbd32a5-20130401_144308 +train Mar13_S1A1/voip-560cbd32a5-20130401_143827 +train Mar13_S1A0/voip-62cc0cc55d-20130323_155832 +train Mar13_S0A0/voip-62cc0cc55d-20130328_143213 +train Mar13_S0A1/voip-62cc0cc55d-20130323_155313 +train Mar13_S0A0/voip-62cc0cc55d-20130328_141332 +train Mar13_S0A0/voip-62cc0cc55d-20130323_155046 +train Mar13_S1A0/voip-62cc0cc55d-20130328_140957 +train Mar13_S0A1/voip-62cc0cc55d-20130323_160732 +train Mar13_S1A1/voip-62cc0cc55d-20130323_160541 +train Mar13_S1A1/voip-62cc0cc55d-20130323_161217 +train Mar13_S0A1/voip-62cc0cc55d-20130328_143025 +train Mar13_S1A1/voip-ab4f1dbb59-20130325_214245 +train Mar13_S1A1/voip-ab4f1dbb59-20130328_180542 +train Mar13_S0A1/voip-ab4f1dbb59-20130325_214609 +train Mar13_S1A0/voip-ab4f1dbb59-20130325_214838 +train Mar13_S0A0/voip-ab4f1dbb59-20130325_214441 +train Mar13_S0A0/voip-158a881c88-20130328_152500 +train Mar13_S1A1/voip-158a881c88-20130328_153956 +train Mar13_S0A0/voip-158a881c88-20130328_151638 +train Mar13_S1A1/voip-158a881c88-20130328_150912 +train Mar13_S0A1/voip-158a881c88-20130328_151847 +train Mar13_S1A0/voip-158a881c88-20130328_151021 +train Mar13_S1A0/voip-39a25ab2f8-20130326_122045 +train Mar13_S0A1/voip-39a25ab2f8-20130328_150729 +train Mar13_S1A1/voip-39a25ab2f8-20130329_010257 +train Mar13_S0A1/voip-39a25ab2f8-20130326_120526 +train Mar13_S1A0/voip-39a25ab2f8-20130328_131901 +train Mar13_S0A0/voip-39a25ab2f8-20130326_120841 +train Mar13_S1A1/voip-39a25ab2f8-20130326_131509 +train Mar13_S1A1/voip-30772678da-20130328_193916 +train Mar13_S1A0/voip-30772678da-20130328_192354 +train Mar13_S0A1/voip-30772678da-20130328_192819 +train Mar13_S0A0/voip-30772678da-20130328_200455 +train Mar13_S1A0/voip-30772678da-20130328_200202 +train Mar13_S0A1/voip-30772678da-20130328_195730 +train Mar13_S0A0/voip-30772678da-20130328_193458 +train Mar13_S0A0/voip-d76f6e4f82-20130327_195041 +train Mar13_S1A1/voip-d76f6e4f82-20130327_193430 +train Mar13_S1A1/voip-d76f6e4f82-20130327_201839 +train Mar13_S1A1/voip-d76f6e4f82-20130327_184807 +train Mar13_S1A0/voip-d76f6e4f82-20130327_184557 +train Mar13_S0A1/voip-d76f6e4f82-20130327_185043 +train Mar13_S0A0/voip-d76f6e4f82-20130327_185328 +train Mar13_S0A0/voip-d76f6e4f82-20130327_194817 +train Mar13_S0A1/voip-d76f6e4f82-20130327_190022 +train Mar13_S1A0/voip-d76f6e4f82-20130327_193247 +train Mar13_S1A1/voip-21ec2b7850-20130327_043931 +train Mar13_S1A1/voip-21ec2b7850-20130327_033947 +train Mar13_S0A1/voip-21ec2b7850-20130327_034759 +train Mar13_S0A1/voip-21ec2b7850-20130326_021426 +train Mar13_S0A0/voip-21ec2b7850-20130327_044255 +train Mar13_S1A0/voip-21ec2b7850-20130327_040209 +train Mar13_S0A0/voip-21ec2b7850-20130327_040416 +train Mar13_S1A1/voip-21ec2b7850-20130326_114409 +train Mar13_S1A1/voip-21ec2b7850-20130327_042044 +train Mar13_S0A0/voip-21ec2b7850-20130325_143118 +train Mar13_S0A0/voip-21ec2b7850-20130326_021614 +train Mar13_S0A1/voip-21ec2b7850-20130327_040626 +train Mar13_S1A0/voip-21ec2b7850-20130325_132551 +train Mar13_S0A0/voip-21ec2b7850-20130326_024646 +train Mar13_S0A1/voip-21ec2b7850-20130325_145025 +train Mar13_S0A1/voip-21ec2b7850-20130326_025529 +train Mar13_S1A0/voip-21ec2b7850-20130325_154906 +train Mar13_S1A0/voip-21ec2b7850-20130326_025736 +train Mar13_S1A0/voip-21ec2b7850-20130326_022849 +train Mar13_S1A0/voip-21ec2b7850-20130327_045035 +train Mar13_S0A1/voip-21ec2b7850-20130327_044903 +train Mar13_S1A1/voip-21ec2b7850-20130325_162417 +train Mar13_S1A1/voip-b27a230d2e-20130323_050439 +train Mar13_S0A0/voip-b27a230d2e-20130323_043834 +train Mar13_S1A0/voip-b27a230d2e-20130329_032627 +train Mar13_S1A1/voip-b27a230d2e-20130323_042544 +train Mar13_S1A1/voip-b27a230d2e-20130323_053403 +train Mar13_S0A1/voip-b27a230d2e-20130329_035828 +train Mar13_S0A1/voip-b27a230d2e-20130329_031831 +train Mar13_S0A1/voip-b27a230d2e-20130323_053029 +train Mar13_S0A1/voip-b27a230d2e-20130329_034847 +train Mar13_S0A1/voip-b27a230d2e-20130323_043046 +train Mar13_S1A0/voip-b27a230d2e-20130323_050944 +train Mar13_S1A0/voip-b27a230d2e-20130329_031627 +train Mar13_S1A0/voip-b27a230d2e-20130323_041953 +train Mar13_S1A1/voip-b27a230d2e-20130329_040014 +train Mar13_S1A0/voip-b27a230d2e-20130329_043420 +train Mar13_S1A0/voip-b27a230d2e-20130329_035413 +train Mar13_S0A1/voip-b27a230d2e-20130329_031256 +train Mar13_S1A0/voip-b27a230d2e-20130323_050830 +train Mar13_S1A1/voip-b27a230d2e-20130329_033804 +train Mar13_S0A1/voip-b27a230d2e-20130323_045538 +train Mar13_S1A0/voip-b27a230d2e-20130329_030940 +train Mar13_S1A1/voip-b27a230d2e-20130329_031422 +train Mar13_S1A1/voip-b27a230d2e-20130329_030324 +train Mar13_S0A0/voip-b27a230d2e-20130323_050551 +train Mar13_S1A0/voip-2d2d103292-20130326_041300 +train Mar13_S1A1/voip-2d2d103292-20130329_041636 +train Mar13_S1A1/voip-2d2d103292-20130326_041008 +train Mar13_S0A1/voip-2d2d103292-20130326_041541 +train Mar13_S0A1/voip-2d2d103292-20130329_041521 +train Mar13_S0A0/voip-2d2d103292-20130326_042921 +train Mar13_S0A1/voip-2d2d103292-20130326_041959 +train Mar13_S1A0/voip-2d2d103292-20130326_041815 +train Mar13_S1A0/voip-2d2d103292-20130326_043748 +train Mar13_S0A0/voip-2d2d103292-20130326_044020 +train Mar13_S1A1/voip-2d2d103292-20130329_041255 +train Mar13_S1A0/voip-2d2d103292-20130329_035727 +train Mar13_S1A0/voip-2d2d103292-20130329_040958 +train Mar13_S1A1/voip-2d2d103292-20130326_043352 +train Mar13_S1A1/voip-2d2d103292-20130329_040656 +train Mar13_S1A0/voip-2d2d103292-20130329_041424 +train Mar13_S1A1/voip-2d2d103292-20130329_035835 +train Mar13_S0A1/voip-2d2d103292-20130329_042056 +train Mar13_S1A0/voip-2d2d103292-20130328_195634 +train Mar13_S0A1/voip-2d2d103292-20130329_041951 +train Mar13_S1A1/voip-2d2d103292-20130328_195103 +train Mar13_S0A1/voip-2d2d103292-20130329_035601 +train Mar13_S0A0/voip-2d2d103292-20130326_040723 +train Mar13_S1A1/voip-908884f5fd-20130326_224812 +train Mar13_S0A0/voip-908884f5fd-20130326_235015 +train Mar13_S1A0/voip-908884f5fd-20130326_215916 +train Mar13_S1A1/voip-908884f5fd-20130327_000655 +train Mar13_S1A0/voip-8586129f35-20130328_160121 +train Mar13_S0A1/voip-03c59ba692-20130325_183305 +train Mar13_S1A0/voip-03c59ba692-20130324_034930 +train Mar13_S0A1/voip-03c59ba692-20130324_034507 +train Mar13_S1A1/voip-03c59ba692-20130402_054245 +train Mar13_S1A1/voip-03c59ba692-20130324_034058 +train Mar13_S1A0/voip-03c59ba692-20130324_033629 +train Mar13_S1A1/voip-03c59ba692-20130324_180022 +train Mar13_S0A1/voip-03c59ba692-20130402_060307 +train Mar13_S1A0/voip-03c59ba692-20130402_053545 +train Mar13_S0A1/voip-03c59ba692-20130324_180433 +train Mar13_S1A0/voip-03c59ba692-20130402_060050 +train Mar13_S0A0/voip-03c59ba692-20130324_034712 +train Mar13_S1A1/voip-03c59ba692-20130325_180735 +train Mar13_S0A0/voip-03c59ba692-20130324_033821 +train Mar13_S0A0/voip-aaa44b4121-20130326_060013 +train Mar13_S0A0/voip-aaa44b4121-20130326_055316 +train Mar13_S1A0/voip-aaa44b4121-20130326_055608 +train Mar13_S1A1/voip-aaa44b4121-20130326_051152 +train Mar13_S0A1/voip-aaa44b4121-20130326_055421 +train Mar13_S1A1/voip-aaa44b4121-20130327_170849 +train Mar13_S1A0/voip-aaa44b4121-20130327_171453 +train Mar13_S1A0/voip-aaa44b4121-20130327_170337 +train Mar13_S0A0/voip-aaa44b4121-20130326_051458 +train Mar13_S0A0/voip-aaa44b4121-20130327_171321 +train Mar13_S1A1/voip-aaa44b4121-20130327_002455 +train Mar13_S0A0/voip-aaa44b4121-20130327_165758 +train Mar13_S0A1/voip-aaa44b4121-20130326_050817 +train Mar13_S0A1/voip-aaa44b4121-20130327_172530 +train Mar13_S1A1/voip-aaa44b4121-20130327_170541 +train Mar13_S0A0/voip-aaa44b4121-20130327_172225 +train Mar13_S0A1/voip-aaa44b4121-20130327_170209 +train Mar13_S1A1/voip-aaa44b4121-20130327_171617 +train Mar13_S0A1/voip-aaa44b4121-20130327_170719 +train Mar13_S1A0/voip-aaa44b4121-20130326_060838 +train Mar13_S1A1/voip-aaa44b4121-20130326_054647 +train Mar13_S1A0/voip-aaa44b4121-20130327_171958 +train Mar13_S1A1/voip-affbf578cf-20130401_162946 +train Mar13_S0A1/voip-affbf578cf-20130401_163655 +train Mar13_S1A1/voip-e61fa89add-20130327_074357 +train Mar13_S0A1/voip-e61fa89add-20130326_013340 +train Mar13_S1A0/voip-e61fa89add-20130327_075222 +train Mar13_S1A0/voip-e61fa89add-20130326_100409 +train Mar13_S0A0/voip-e61fa89add-20130327_072313 +train Mar13_S0A0/voip-e61fa89add-20130326_004919 +train Mar13_S0A0/voip-e61fa89add-20130327_075612 +train Mar13_S1A0/voip-e61fa89add-20130327_074017 +train Mar13_S1A1/voip-e61fa89add-20130326_013931 +train Mar13_S0A1/voip-e61fa89add-20130327_080334 +train Mar13_S1A0/voip-e61fa89add-20130326_011204 +train Mar13_S0A1/voip-e61fa89add-20130326_100750 +train Mar13_S0A1/voip-e61fa89add-20130327_074526 +train Mar13_S0A0/voip-e61fa89add-20130326_013737 +train Mar13_S0A0/voip-e61fa89add-20130326_100617 +train Mar13_S1A1/voip-e61fa89add-20130327_071630 +train Mar13_S0A1/voip-c8ec8c76dd-20130328_205311 +train Mar13_S0A1/voip-c8ec8c76dd-20130328_205953 +train Mar13_S1A0/voip-c8ec8c76dd-20130328_180054 +train Mar13_S1A1/voip-c8ec8c76dd-20130328_175715 +train Mar13_S0A0/voip-c8ec8c76dd-20130328_174742 +train Mar13_S0A1/voip-c8ec8c76dd-20130328_175504 +train Mar13_S1A0/voip-c8ec8c76dd-20130328_210403 +train Mar13_S1A0/voip-c8ec8c76dd-20130328_181909 +train Mar13_S1A1/voip-c8ec8c76dd-20130328_205558 +train Mar13_S0A0/voip-c8ec8c76dd-20130328_205041 +train Mar13_S1A1/voip-5cf59cc660-20130327_144955 +train Mar13_S1A0/voip-5cf59cc660-20130328_161517 +train Mar13_S1A1/voip-5cf59cc660-20130328_163609 +train Mar13_S1A0/voip-5cf59cc660-20130327_145314 +train Mar13_S0A0/voip-5cf59cc660-20130328_143501 +train Mar13_S0A1/voip-5cf59cc660-20130328_164112 +train Mar13_S1A0/voip-5cf59cc660-20130328_161200 +train Mar13_S0A0/voip-5cf59cc660-20130328_161054 +train Mar13_S1A0/voip-5cf59cc660-20130328_143758 +train Mar13_S0A0/voip-5cf59cc660-20130327_144735 +train Mar13_S0A1/voip-5cf59cc660-20130328_172318 +train Mar13_S0A1/voip-5cf59cc660-20130327_145222 +train Mar13_S1A0/voip-5cf59cc660-20130327_144604 +train Mar13_S0A1/voip-5cf59cc660-20130328_151124 +train Mar13_S0A1/voip-5cf59cc660-20130327_143126 +train Mar13_S0A0/voip-5cf59cc660-20130328_171914 +train Mar13_S0A0/voip-5cf59cc660-20130327_145740 +train Mar13_S1A1/voip-5cf59cc660-20130328_145134 +train Mar13_S1A1/voip-5cf59cc660-20130328_160837 +train Mar13_S1A1/voip-5cf59cc660-20130327_144338 +train Mar13_S0A1/voip-5cf59cc660-20130328_160946 +train Mar13_S1A1/voip-5cf59cc660-20130327_143308 +train Mar13_S1A1/voip-5cf59cc660-20130328_172556 +train Mar13_S0A0/voip-5749b16764-20130328_145900 +train Mar13_S1A0/voip-5749b16764-20130328_150143 +train Mar13_S0A0/voip-5749b16764-20130328_151730 +train Mar13_S1A1/voip-5749b16764-20130328_150400 +train Mar13_S0A1/voip-5749b16764-20130328_145608 +train Mar13_S1A1/voip-5749b16764-20130328_151234 +train Mar13_S0A1/voip-3b59a0391b-20130401_135225 +train Mar13_S1A1/voip-3b59a0391b-20130401_133524 +train Mar13_S1A1/voip-3b59a0391b-20130401_133038 +train Mar13_S1A0/voip-3b59a0391b-20130401_133238 +train Mar13_S1A1/voip-3b59a0391b-20130401_135704 +train Mar13_S0A1/voip-3b59a0391b-20130401_132324 +train Mar13_S1A1/voip-3b59a0391b-20130401_134901 +train Mar13_S0A1/voip-3b59a0391b-20130401_134053 +train Mar13_S1A0/voip-b20b6e847a-20130326_222313 +train Mar13_S1A1/voip-b20b6e847a-20130326_222030 +train Mar13_S0A1/voip-b20b6e847a-20130326_222936 +train Mar13_S0A0/voip-b20b6e847a-20130326_224116 +train Mar13_S1A0/voip-b20b6e847a-20130326_222829 +train Mar13_S0A1/voip-64150aca03-20130401_194320 +train Mar13_S1A1/voip-199d62165b-20130402_123401 +train Mar13_S1A0/voip-199d62165b-20130402_122050 +train Mar13_S1A1/voip-199d62165b-20130402_122711 +train Mar13_S0A1/voip-199d62165b-20130402_123507 +train Mar13_S1A0/voip-199d62165b-20130402_124137 +train Mar13_S1A1/voip-199d62165b-20130402_115733 +train Mar13_S1A0/voip-199d62165b-20130402_120922 +train Mar13_S0A1/voip-199d62165b-20130402_121352 +train Mar13_S1A0/voip-199d62165b-20130402_120456 +train Mar13_S0A1/voip-199d62165b-20130402_122354 +train Mar13_S1A0/voip-199d62165b-20130402_122214 +train Mar13_S1A1/voip-199d62165b-20130402_121601 +train Mar13_S0A1/voip-199d62165b-20130402_115850 +train Mar13_S0A0/voip-3860c915c2-20130328_164046 +train Mar13_S1A0/voip-3860c915c2-20130328_164431 +train Mar13_S0A1/voip-22c938c8ba-20130325_134402 +train Mar13_S1A0/voip-22c938c8ba-20130325_130740 +train Mar13_S0A0/voip-22c938c8ba-20130325_123407 +train Mar13_S0A0/voip-22c938c8ba-20130325_130445 +train Mar13_S1A0/voip-22c938c8ba-20130325_134206 +train Mar13_S0A0/voip-22c938c8ba-20130325_142552 +train Mar13_S1A0/voip-22c938c8ba-20130325_143621 +train Mar13_S0A1/voip-22c938c8ba-20130325_125039 +train Mar13_S1A1/voip-22c938c8ba-20130325_124542 +train Mar13_S0A1/voip-22c938c8ba-20130325_130208 +train Mar13_S1A0/voip-52d599db9c-20130326_213541 +train Mar13_S1A1/voip-52d599db9c-20130327_175215 +train Mar13_S1A1/voip-52d599db9c-20130402_001953 +train Mar13_S0A0/voip-52d599db9c-20130323_054632 +train Mar13_S0A1/voip-52d599db9c-20130326_214515 +train Mar13_S0A1/voip-52d599db9c-20130326_213117 +train Mar13_S1A0/voip-52d599db9c-20130326_214326 +train Mar13_S1A1/voip-52d599db9c-20130326_212814 +train Mar13_S1A1/voip-52d599db9c-20130326_214615 +train Mar13_S0A1/voip-52d599db9c-20130402_001838 +train Mar13_S0A1/voip-52d599db9c-20130323_075058 +train Mar13_S1A0/voip-52d599db9c-20130402_002245 +train Mar13_S0A1/voip-52d599db9c-20130325_135450 +train Mar13_S0A1/voip-52d599db9c-20130328_154633 +train Mar13_S1A1/voip-52d599db9c-20130326_010952 +train Mar13_S1A0/voip-52d599db9c-20130323_054736 +train Mar13_S1A0/voip-52d599db9c-20130402_002522 +train Mar13_S0A0/voip-52d599db9c-20130323_074800 +train Mar13_S0A0/voip-52d599db9c-20130326_212959 +train Mar13_S1A1/voip-52d599db9c-20130402_000814 +train Mar13_S0A0/voip-52d599db9c-20130326_011106 +train Mar13_S0A1/voip-0fa32b1e78-20130402_141848 +train Mar13_S0A1/voip-0fa32b1e78-20130328_234145 +train Mar13_S1A1/voip-0fa32b1e78-20130328_151950 +train Mar13_S0A1/voip-0fa32b1e78-20130402_140213 +train Mar13_S0A1/voip-0fa32b1e78-20130401_151332 +train Mar13_S1A0/voip-0fa32b1e78-20130328_234804 +train Mar13_S0A0/voip-0fa32b1e78-20130328_151140 +train Mar13_S1A1/voip-0fa32b1e78-20130328_233916 +train Mar13_S1A1/voip-0fa32b1e78-20130402_132031 +train Mar13_S1A0/voip-0fa32b1e78-20130328_233625 +train Mar13_S1A0/voip-0fa32b1e78-20130402_142351 +train Mar13_S1A1/voip-0fa32b1e78-20130328_233747 +train Mar13_S0A1/voip-0fa32b1e78-20130402_142234 +train Mar13_S1A1/voip-0fa32b1e78-20130402_142022 +train Mar13_S1A1/voip-0fa32b1e78-20130328_151336 +train Mar13_S0A1/voip-0fa32b1e78-20130328_152422 +train Mar13_S1A0/voip-0fa32b1e78-20130328_235008 +train Mar13_S1A1/voip-0fa32b1e78-20130328_153725 +train Mar13_S1A0/voip-0fa32b1e78-20130402_140846 +train Mar13_S1A0/voip-0fa32b1e78-20130328_152808 +train Mar13_S0A1/voip-0fa32b1e78-20130402_140739 +train Mar13_S1A0/voip-0fa32b1e78-20130402_140356 +train Mar13_S1A1/voip-0fa32b1e78-20130402_141117 +train Mar13_S0A1/voip-09733d9e4c-20130327_123942 +train Mar13_S1A1/voip-7e4cdce06a-20130323_130200 +train Mar13_S1A0/voip-7e4cdce06a-20130323_131109 +train Mar13_S1A1/voip-7e4cdce06a-20130323_130824 +train Mar13_S0A1/voip-7e4cdce06a-20130323_130520 +train Mar13_S1A1/voip-edb8609855-20130327_183456 +train Mar13_S1A1/voip-edb8609855-20130327_182512 +train Mar13_S0A1/voip-edb8609855-20130327_182309 +train Mar13_S0A1/voip-edb8609855-20130327_183200 +train Mar13_S1A0/voip-edb8609855-20130327_183323 +train Mar13_S0A0/voip-edb8609855-20130327_183802 +train Mar13_S1A0/voip-edb8609855-20130327_182651 +train Mar13_S1A0/voip-b6618de447-20130328_152114 +train Mar13_S0A1/voip-b6618de447-20130328_160617 +train Mar13_S0A1/voip-b6618de447-20130329_131207 +train Mar13_S1A0/voip-b6618de447-20130328_160419 +train Mar13_S0A1/voip-b6618de447-20130326_211132 +train Mar13_S1A1/voip-b6618de447-20130328_160315 +train Mar13_S0A1/voip-b6618de447-20130326_211551 +train Mar13_S1A0/voip-b6618de447-20130328_154703 +train Mar13_S0A0/voip-b6618de447-20130328_160810 +train Mar13_S0A0/voip-b6618de447-20130328_151251 +train Mar13_S0A1/voip-b6618de447-20130328_155353 +train Mar13_S1A1/voip-b6618de447-20130328_151535 +train Mar13_S0A0/voip-b6618de447-20130328_154027 +train Mar13_S1A1/voip-b6618de447-20130325_145518 +train Mar13_S1A1/voip-b6618de447-20130328_152908 +train Mar13_S1A1/voip-b6618de447-20130328_155531 +train Mar13_S0A1/voip-b6618de447-20130328_153612 +train Mar13_S1A0/voip-b6618de447-20130328_153132 +train Mar13_S1A0/voip-2d3d74d091-20130328_134757 +train Mar13_S0A1/voip-2d3d74d091-20130401_235307 +train Mar13_S0A1/voip-2d3d74d091-20130328_143735 +train Mar13_S0A0/voip-2d3d74d091-20130328_191642 +train Mar13_S1A1/voip-2d3d74d091-20130328_135311 +train Mar13_S1A0/voip-2d3d74d091-20130328_192945 +train Mar13_S0A1/voip-2d3d74d091-20130328_135137 +train Mar13_S1A1/voip-2d3d74d091-20130325_221539 +train Mar13_S1A1/voip-2d3d74d091-20130328_140026 +train Mar13_S0A0/voip-2d3d74d091-20130325_230144 +train Mar13_S1A1/voip-2d3d74d091-20130401_234343 +train Mar13_S1A1/voip-2d3d74d091-20130328_153522 +train Mar13_S0A1/voip-2d3d74d091-20130401_234151 +train Mar13_S0A0/voip-2d3d74d091-20130328_144241 +train Mar13_S0A1/voip-2d3d74d091-20130325_224245 +train Mar13_S0A0/voip-2d3d74d091-20130328_182952 +train Mar13_S1A0/voip-fe60dae302-20130328_191353 +train Mar13_S0A0/voip-fe60dae302-20130328_192329 +train Mar13_S1A0/voip-4c25da9a27-20130327_141855 +train Mar13_S0A1/voip-4c25da9a27-20130327_135709 +train Mar13_S1A0/voip-4c25da9a27-20130325_184604 +train Mar13_S0A1/voip-4c25da9a27-20130325_182216 +train Mar13_S1A0/voip-4c25da9a27-20130327_140750 +train Mar13_S0A0/voip-4c25da9a27-20130325_183726 +train Mar13_S1A1/voip-4c25da9a27-20130325_183445 +train Mar13_S0A1/voip-4c25da9a27-20130327_141034 +train Mar13_S0A1/voip-4c25da9a27-20130327_140554 +train Mar13_S0A0/voip-4c25da9a27-20130327_140246 +train Mar13_S0A1/voip-4c25da9a27-20130325_184738 +train Mar13_S0A0/voip-4c25da9a27-20130327_141556 +train Mar13_S1A0/voip-4c25da9a27-20130325_181340 +train Mar13_S0A1/voip-4c25da9a27-20130325_181011 +train Mar13_S1A1/voip-4c25da9a27-20130327_140126 +train Mar13_S1A0/voip-4c25da9a27-20130325_183247 +train Mar13_S1A1/voip-4c25da9a27-20130327_140438 +train Mar13_S1A1/voip-4c25da9a27-20130325_181701 +train Mar13_S1A0/voip-317a1436fe-20130325_142027 +train Mar13_S0A1/voip-317a1436fe-20130325_171203 +train Mar13_S0A0/voip-317a1436fe-20130325_170800 +train Mar13_S1A1/voip-317a1436fe-20130325_172154 +train Mar13_S1A0/voip-317a1436fe-20130325_171718 +train Mar13_S1A1/voip-317a1436fe-20130325_171321 +train Mar13_S0A0/voip-317a1436fe-20130325_173537 +train Mar13_S0A1/voip-317a1436fe-20130325_174012 +train Mar13_S0A0/voip-317a1436fe-20130325_171950 +train Mar13_S0A1/voip-317a1436fe-20130325_173122 +train Mar13_S0A0/voip-88f198881b-20130326_032712 +train Mar13_S0A1/voip-88f198881b-20130326_014111 +train Mar13_S1A0/voip-88f198881b-20130326_014812 +train Mar13_S1A1/voip-88f198881b-20130326_032851 +train Mar13_S0A1/voip-0f41c16f2f-20130402_005145 +train Mar13_S1A0/voip-0f41c16f2f-20130402_005414 +train Mar13_S1A0/voip-0f41c16f2f-20130325_193723 +train Mar13_S1A0/voip-0f41c16f2f-20130325_213659 +train Mar13_S1A0/voip-0f41c16f2f-20130401_235338 +train Mar13_S1A1/voip-0f41c16f2f-20130402_005015 +train Mar13_S0A0/voip-0f41c16f2f-20130325_213819 +train Mar13_S0A1/voip-0f41c16f2f-20130402_004841 +train Mar13_S1A1/voip-0f41c16f2f-20130325_213951 +train Mar13_S1A1/voip-0f41c16f2f-20130401_235748 +train Mar13_S1A1/voip-0f41c16f2f-20130325_192310 +train Mar13_S1A1/voip-0f41c16f2f-20130401_235017 +train Mar13_S0A1/voip-0f41c16f2f-20130401_235542 +train Mar13_S0A1/voip-0f41c16f2f-20130401_235916 +train Mar13_S1A1/voip-0f41c16f2f-20130325_204340 +train Mar13_S1A1/voip-0f41c16f2f-20130402_005804 +train Mar13_S1A0/voip-0f41c16f2f-20130402_000226 +train Mar13_S0A1/voip-0f41c16f2f-20130325_193614 +train Mar13_S1A0/voip-0f41c16f2f-20130402_004415 +train Mar13_S0A0/voip-0f41c16f2f-20130325_193856 +train Mar13_S0A1/voip-87de4f7a80-20130324_154621 +train Mar13_S0A0/voip-87de4f7a80-20130324_154113 +train Mar13_S1A0/voip-87de4f7a80-20130324_153705 +train Mar13_S1A0/voip-87de4f7a80-20130324_155006 +train Mar13_S1A1/voip-87de4f7a80-20130324_154807 +train Mar13_S1A1/voip-c8821c664b-20130322_222908 +train Mar13_S0A0/voip-202b6a3cc4-20130327_185502 +train Mar13_S1A0/voip-202b6a3cc4-20130327_184449 +train Mar13_S1A1/voip-202b6a3cc4-20130327_185351 +train Mar13_S1A1/voip-202b6a3cc4-20130327_184215 +train Mar13_S0A1/voip-202b6a3cc4-20130327_184721 +train Mar13_S1A1/voip-ccf48b9a6a-20130329_042239 +train Mar13_S0A1/voip-ccf48b9a6a-20130329_041327 +train Mar13_S0A1/voip-ccf48b9a6a-20130329_053509 +train Mar13_S1A1/voip-ccf48b9a6a-20130329_052518 +train Mar13_S1A0/voip-ccf48b9a6a-20130329_053241 +train Mar13_S1A1/voip-ccf48b9a6a-20130329_053050 +train Mar13_S0A1/voip-ccf48b9a6a-20130329_042416 +train Mar13_S1A0/voip-ccf48b9a6a-20130329_041830 +train Mar13_S0A1/voip-ccf48b9a6a-20130329_043420 +train Mar13_S0A0/voip-15d8a89cec-20130327_021458 +train Mar13_S0A0/voip-15d8a89cec-20130327_015041 +train Mar13_S0A1/voip-15d8a89cec-20130327_020255 +train Mar13_S1A0/voip-15d8a89cec-20130327_021758 +train Mar13_S1A1/voip-15d8a89cec-20130327_020852 +train Mar13_S1A1/voip-15d8a89cec-20130327_014753 +train Mar13_S0A1/voip-15d8a89cec-20130327_022535 +train Mar13_S0A1/voip-15d8a89cec-20130327_015432 +train Mar13_S1A0/voip-15d8a89cec-20130327_020057 +train Mar13_S1A1/voip-f1e8236264-20130323_002501 +train Mar13_S1A1/voip-f1e8236264-20130323_003719 +train Mar13_S0A1/voip-f1e8236264-20130323_003057 +train Mar13_S0A1/voip-f1e8236264-20130323_002130 +train Mar13_S1A1/voip-f1e8236264-20130323_004330 +train Mar13_S1A0/voip-f1e8236264-20130323_002326 +train Mar13_S1A0/voip-f1e8236264-20130323_004442 +train Mar13_S0A1/voip-f1e8236264-20130323_005548 +train Mar13_S1A0/voip-f1e8236264-20130323_003523 +train Mar13_S1A1/voip-7c3a08072d-20130326_005842 +train Mar13_S0A0/voip-7c3a08072d-20130326_005512 +train Mar13_S0A1/voip-7c3a08072d-20130326_010113 +train Mar13_S1A0/voip-7c3a08072d-20130326_005642 +train Mar13_S1A0/voip-b08f15a787-20130402_072347 +train Mar13_S1A0/voip-b08f15a787-20130402_075759 +train Mar13_S1A0/voip-b08f15a787-20130326_021953 +train Mar13_S0A1/voip-b08f15a787-20130402_071415 +train Mar13_S0A0/voip-b08f15a787-20130326_023412 +train Mar13_S1A1/voip-b08f15a787-20130402_074900 +train Mar13_S1A0/voip-b08f15a787-20130326_023936 +train Mar13_S0A1/voip-b08f15a787-20130402_075211 +train Mar13_S1A1/voip-b08f15a787-20130326_021438 +train Mar13_S1A1/voip-b08f15a787-20130402_074726 +train Mar13_S1A1/voip-b08f15a787-20130402_072532 +train Mar13_S0A1/voip-b08f15a787-20130326_022327 +train Mar13_S0A1/voip-b08f15a787-20130402_072705 +train Mar13_S0A1/voip-b08f15a787-20130402_065804 +train Mar13_S1A0/voip-b08f15a787-20130402_071910 +train Mar13_S1A1/voip-b08f15a787-20130402_070704 +train Mar13_S1A1/voip-22756d9e8f-20130329_045435 +train Mar13_S1A0/voip-22756d9e8f-20130329_044955 +train Mar13_S1A1/voip-22756d9e8f-20130329_043954 +train Mar13_S0A1/voip-22756d9e8f-20130329_045117 +train Mar13_S1A0/voip-22756d9e8f-20130329_050412 +train Mar13_S1A0/voip-22756d9e8f-20130328_170312 +train Mar13_S1A1/voip-22756d9e8f-20130329_044234 +train Mar13_S0A1/voip-22756d9e8f-20130329_050743 +train Mar13_S1A1/voip-22756d9e8f-20130329_050114 +train Mar13_S1A0/voip-22756d9e8f-20130329_044622 +train Mar13_S0A1/voip-22756d9e8f-20130329_050612 +train Mar13_S0A1/voip-22756d9e8f-20130329_044511 +train Mar13_S0A1/voip-22756d9e8f-20130329_043814 +train Mar13_S1A0/voip-0999e9bb30-20130325_124910 +train Mar13_S0A1/voip-0999e9bb30-20130325_124715 +train Mar13_S1A1/voip-dda7c88c6e-20130323_052950 +train Mar13_S0A1/voip-dda7c88c6e-20130323_055048 +train Mar13_S1A0/voip-dda7c88c6e-20130323_053612 +train Mar13_S1A0/voip-dda7c88c6e-20130323_053057 +train Mar13_S0A1/voip-dda7c88c6e-20130323_053928 +train Mar13_S0A1/voip-9735278861-20130401_153620 +train Mar13_S0A1/voip-9735278861-20130401_160849 +train Mar13_S1A1/voip-9735278861-20130401_154740 +train Mar13_S1A1/voip-9735278861-20130401_161036 +train Mar13_S0A1/voip-9735278861-20130401_161717 +train Mar13_S1A0/voip-9735278861-20130401_151648 +train Mar13_S1A0/voip-9735278861-20130401_161215 +train Mar13_S0A1/voip-9735278861-20130401_154605 +train Mar13_S1A1/voip-9735278861-20130402_140608 +train Mar13_S1A0/voip-9735278861-20130401_155528 +train Mar13_S1A0/voip-9735278861-20130401_154251 +train Mar13_S1A1/voip-9735278861-20130401_152826 +train Mar13_S1A1/voip-9735278861-20130401_160600 +train Mar13_S1A0/voip-7e22911804-20130327_200922 +train Mar13_S0A1/voip-7e22911804-20130324_184015 +train Mar13_S1A1/voip-7e22911804-20130324_193050 +train Mar13_S1A1/voip-7e22911804-20130326_142538 +train Mar13_S0A0/voip-7e22911804-20130324_173542 +train Mar13_S1A1/voip-7e22911804-20130328_164203 +train Mar13_S1A0/voip-7e22911804-20130325_202948 +train Mar13_S0A0/voip-7e22911804-20130324_184843 +train Mar13_S0A0/voip-7e22911804-20130325_210616 +train Mar13_S0A0/voip-7e22911804-20130328_162243 +train Mar13_S1A0/voip-7e22911804-20130328_162140 +train Mar13_S0A0/voip-7e22911804-20130327_202516 +train Mar13_S0A0/voip-7e22911804-20130328_204851 +train Mar13_S0A1/voip-7e22911804-20130328_161325 +train Mar13_S0A0/voip-7e22911804-20130325_202635 +train Mar13_S1A0/voip-7e22911804-20130328_204150 +train Mar13_S0A1/voip-7e22911804-20130328_162643 +train Mar13_S1A1/voip-7e22911804-20130324_190421 +train Mar13_S0A1/voip-7e22911804-20130325_200320 +train Mar13_S0A1/voip-7e22911804-20130328_203706 +train Mar13_S0A1/voip-7e22911804-20130328_204351 +train Mar13_S0A1/voip-876ef67873-20130402_032159 +train Mar13_S0A1/voip-876ef67873-20130327_035756 +train Mar13_S0A1/voip-876ef67873-20130402_030352 +train Mar13_S0A0/voip-876ef67873-20130327_040247 +train Mar13_S1A0/voip-876ef67873-20130327_035558 +train Mar13_S1A1/voip-876ef67873-20130327_035101 +train Mar13_S1A1/voip-876ef67873-20130402_031718 +train Mar13_S0A1/voip-876ef67873-20130402_033525 +train Mar13_S1A0/voip-876ef67873-20130402_033954 +train Mar13_S1A1/voip-876ef67873-20130402_030744 +train Mar13_S1A1/voip-d225fad9df-20130328_205151 +train Mar13_S0A1/voip-d225fad9df-20130328_201024 +train Mar13_S0A0/voip-d225fad9df-20130328_183716 +train Mar13_S1A0/voip-d225fad9df-20130328_200758 +train Mar13_S0A1/voip-d225fad9df-20130328_205512 +train Mar13_S0A0/voip-d225fad9df-20130328_205817 +train Mar13_S0A0/voip-d225fad9df-20130328_175010 +train Mar13_S0A0/voip-d225fad9df-20130328_201303 +train Mar13_S1A1/voip-d225fad9df-20130328_183352 +train Mar13_S1A0/voip-d225fad9df-20130328_175805 +train Mar13_S1A0/voip-d225fad9df-20130328_204846 +train Mar13_S0A1/voip-d225fad9df-20130328_203254 +train Mar13_S0A1/voip-d225fad9df-20130328_175224 +train Mar13_S1A0/voip-31d9d1a567-20130402_042843 +train Mar13_S1A1/voip-31d9d1a567-20130402_035757 +train Mar13_S1A1/voip-31d9d1a567-20130402_042614 +train Mar13_S1A0/voip-31d9d1a567-20130402_041025 +train Mar13_S1A0/voip-31d9d1a567-20130402_035524 +train Mar13_S0A1/voip-31d9d1a567-20130402_041209 +train Mar13_S1A1/voip-31d9d1a567-20130402_040402 +train Mar13_S0A1/voip-31d9d1a567-20130402_035950 +train Mar13_S1A1/voip-31d9d1a567-20130402_034546 +train Mar13_S0A1/voip-31d9d1a567-20130402_042138 +train Mar13_S1A0/voip-31d9d1a567-20130402_034915 +train Mar13_S0A1/voip-31d9d1a567-20130402_034022 +train Mar13_S1A1/voip-a31ca3e355-20130324_190654 +train Mar13_S0A1/voip-a31ca3e355-20130323_223152 +train Mar13_S0A0/voip-a31ca3e355-20130323_222700 +train Mar13_S1A1/voip-a31ca3e355-20130323_223338 +train Mar13_S1A0/voip-a31ca3e355-20130323_222931 +train Mar13_S1A0/voip-a31ca3e355-20130324_190831 +train Mar13_S0A1/voip-a31ca3e355-20130323_223643 +train Mar13_S0A0/voip-a31ca3e355-20130323_234926 +train Mar13_S0A0/voip-3b3edac94d-20130326_004101 +train Mar13_S1A0/voip-3b3edac94d-20130326_003522 +train Mar13_S0A1/voip-3b3edac94d-20130326_004002 +train Mar13_S1A1/voip-3b3edac94d-20130324_211737 +train Mar13_S1A1/voip-3b3edac94d-20130324_201732 +train Mar13_S0A1/voip-3b3edac94d-20130323_203121 +train Mar13_S1A0/voip-3b3edac94d-20130324_202932 +train Mar13_S1A0/voip-3b3edac94d-20130323_201535 +train Mar13_S1A1/voip-3b3edac94d-20130323_202936 +train Mar13_S0A0/voip-3b3edac94d-20130324_210908 +train Mar13_S1A1/voip-3b3edac94d-20130324_203209 +train Mar13_S0A1/voip-3b3edac94d-20130324_211848 +train Mar13_S0A0/voip-3b3edac94d-20130326_004204 +train Mar13_S0A0/voip-3b3edac94d-20130323_202743 +train Mar13_S1A0/voip-3b3edac94d-20130324_202415 +train Mar13_S0A0/voip-3b3edac94d-20130324_202802 +train Mar13_S1A0/voip-3b3edac94d-20130326_004303 +train Mar13_S0A0/voip-3b3edac94d-20130323_203738 +train Mar13_S0A1/voip-3b3edac94d-20130324_201858 +train Mar13_S0A1/voip-a352cb5ca5-20130401_234557 +train Mar13_S1A1/voip-a352cb5ca5-20130401_232817 +train Mar13_S0A1/voip-a352cb5ca5-20130401_234753 +train Mar13_S0A1/voip-a352cb5ca5-20130401_231740 +train Mar13_S1A0/voip-a352cb5ca5-20130401_234216 +train Mar13_S1A0/voip-a352cb5ca5-20130401_234944 +train Mar13_S1A1/voip-a352cb5ca5-20130401_231236 +train Mar13_S0A0/voip-8f9fb7a86b-20130328_183857 +train Mar13_S0A0/voip-8f9fb7a86b-20130328_185140 +train Mar13_S0A1/voip-8f9fb7a86b-20130328_182235 +train Mar13_S0A1/voip-8f9fb7a86b-20130328_184241 +train Mar13_S1A0/voip-8f9fb7a86b-20130328_184820 +train Mar13_S1A0/voip-8f9fb7a86b-20130328_182710 +train Mar13_S1A1/voip-8f9fb7a86b-20130328_184504 +train Mar13_S1A1/voip-8f9fb7a86b-20130328_181534 +train Mar13_S1A0/voip-155e939ebc-20130327_203952 +train Mar13_S0A1/voip-155e939ebc-20130327_204114 +train Mar13_S0A1/voip-155e939ebc-20130327_202425 +train Mar13_S1A1/voip-155e939ebc-20130327_203006 +train Mar13_S0A0/voip-155e939ebc-20130327_203543 +train Mar13_S1A1/voip-155e939ebc-20130327_211511 +train Mar13_S1A0/voip-155e939ebc-20130327_203128 +train Mar13_S0A1/voip-00d76b791d-20130327_011740 +train Mar13_S1A1/voip-00d76b791d-20130327_005023 +train Mar13_S1A0/voip-00d76b791d-20130327_004513 +train Mar13_S1A1/voip-00d76b791d-20130327_012807 +train Mar13_S1A0/voip-00d76b791d-20130327_012331 +train Mar13_S0A0/voip-00d76b791d-20130327_010416 +train Mar13_S1A1/voip-00d76b791d-20130327_010906 +train Mar13_S1A0/voip-00d76b791d-20130327_011609 +train Mar13_S0A1/voip-00d76b791d-20130327_012711 +train Mar13_S0A0/voip-00d76b791d-20130327_011116 +train Mar13_S0A1/voip-00d76b791d-20130327_003853 +train Mar13_S0A0/voip-0abf414c0c-20130327_000359 +train Mar13_S1A1/voip-0abf414c0c-20130326_235444 +train Mar13_S0A1/voip-0abf414c0c-20130326_235059 +train Mar13_S1A1/voip-4b7e22cc07-20130401_185110 +train Mar13_S1A0/voip-4b7e22cc07-20130401_185640 +train Mar13_S1A0/voip-f32f2cfdae-20130327_013132 +train Mar13_S0A0/voip-f32f2cfdae-20130328_195839 +train Mar13_S0A0/voip-f32f2cfdae-20130327_013402 +train Mar13_S0A1/voip-f32f2cfdae-20130327_015652 +train Mar13_S0A0/voip-f32f2cfdae-20130328_195220 +train Mar13_S0A0/voip-f32f2cfdae-20130327_020400 +train Mar13_S1A0/voip-f32f2cfdae-20130328_192506 +train Mar13_S0A1/voip-f32f2cfdae-20130328_193321 +train Mar13_S0A1/voip-f32f2cfdae-20130327_012823 +train Mar13_S0A1/voip-f32f2cfdae-20130328_195409 +train Mar13_S1A1/voip-f32f2cfdae-20130328_192703 +train Mar13_S0A0/voip-f32f2cfdae-20130327_015332 +train Mar13_S1A1/voip-f32f2cfdae-20130328_194348 +train Mar13_S1A1/voip-f32f2cfdae-20130328_200054 +train Mar13_S1A0/voip-f32f2cfdae-20130328_200431 +train Mar13_S0A1/voip-f32f2cfdae-20130328_194814 +train Mar13_S1A0/voip-f32f2cfdae-20130328_194522 +train Mar13_S1A1/voip-f32f2cfdae-20130327_015213 +train Mar13_S1A0/voip-f32f2cfdae-20130327_014628 +train Mar13_S0A0/voip-f32f2cfdae-20130328_193841 +train Mar13_S0A1/voip-f32f2cfdae-20130327_014457 +train Mar13_S1A0/voip-f32f2cfdae-20130328_193606 +train Mar13_S1A1/voip-f32f2cfdae-20130327_013713 +train Mar13_S0A0/voip-381a50592b-20130326_035922 +train Mar13_S1A0/voip-381a50592b-20130326_040146 +train Mar13_S1A0/voip-381a50592b-20130323_234808 +train Mar13_S0A0/voip-381a50592b-20130324_000124 +train Mar13_S0A1/voip-381a50592b-20130324_000330 +train Mar13_S1A0/voip-381a50592b-20130323_233220 +train Mar13_S0A0/voip-381a50592b-20130326_043646 +train Mar13_S1A1/voip-381a50592b-20130323_233439 +train Mar13_S0A0/voip-381a50592b-20130326_040928 +train Mar13_S1A1/voip-381a50592b-20130326_040539 +train Mar13_S0A1/voip-381a50592b-20130326_043059 +train Mar13_S0A1/voip-381a50592b-20130326_042242 +train Mar13_S1A1/voip-381a50592b-20130326_044413 +train Mar13_S1A0/voip-381a50592b-20130326_044146 +train Mar13_S1A1/voip-381a50592b-20130326_043457 +train Mar13_S0A0/voip-381a50592b-20130323_234249 +train Mar13_S1A1/voip-381a50592b-20130323_235612 +train Mar13_S0A1/voip-381a50592b-20130326_040051 +train Mar13_S0A1/voip-381a50592b-20130323_234506 +train Mar13_S1A0/voip-381a50592b-20130326_041040 +train Mar13_S1A1/voip-381a50592b-20130326_041932 +train Mar13_S0A1/voip-922209b777-20130325_160523 +train Mar13_S1A1/voip-922209b777-20130325_162603 +train Mar13_S0A1/voip-922209b777-20130327_004402 +train Mar13_S1A0/voip-922209b777-20130325_163924 +train Mar13_S1A0/voip-922209b777-20130325_162222 +train Mar13_S0A0/voip-922209b777-20130325_160004 +train Mar13_S0A1/voip-922209b777-20130327_012955 +train Mar13_S1A0/voip-922209b777-20130327_013531 +train Mar13_S1A1/voip-922209b777-20130327_004226 +train Mar13_S1A0/voip-922209b777-20130327_011149 +train Mar13_S1A1/voip-922209b777-20130325_160141 +train Mar13_S0A0/voip-922209b777-20130325_155209 +train Mar13_S0A0/voip-922209b777-20130327_011354 +train Mar13_S1A0/voip-922209b777-20130327_005342 +train Mar13_S1A0/voip-922209b777-20130325_161023 +train Mar13_S1A1/voip-922209b777-20130327_011455 +train Mar13_S0A1/voip-922209b777-20130325_162946 +train Mar13_S0A0/voip-922209b777-20130325_162423 +train Mar13_S0A0/voip-922209b777-20130327_012258 +train Mar13_S0A0/voip-922209b777-20130326_140616 +train Mar13_S0A1/voip-922209b777-20130325_163118 +train Mar13_S0A0/voip-ccc459b689-20130327_002730 +train Mar13_S0A1/voip-ccc459b689-20130327_004851 +train Mar13_S0A0/voip-ccc459b689-20130327_004727 +train Mar13_S1A1/voip-ccc459b689-20130327_004458 +train Mar13_S1A0/voip-ccc459b689-20130327_004041 +train Mar13_S1A0/voip-bde2721237-20130326_200257 +train Mar13_S1A0/voip-bde2721237-20130325_162942 +train Mar13_S0A1/voip-bde2721237-20130325_154428 +train Mar13_S1A0/voip-bde2721237-20130326_194753 +train Mar13_S0A1/voip-bde2721237-20130326_194022 +train Mar13_S1A0/voip-bde2721237-20130326_193127 +train Mar13_S1A1/voip-bde2721237-20130325_164032 +train Mar13_S1A1/voip-bde2721237-20130326_193025 +train Mar13_S0A0/voip-bde2721237-20130326_192751 +train Mar13_S0A0/voip-bde2721237-20130325_154829 +train Mar13_S0A1/voip-bde2721237-20130326_200505 +train Mar13_S0A1/voip-bde2721237-20130325_161826 +train Mar13_S1A1/voip-bde2721237-20130325_162740 +train Mar13_S0A0/voip-bde2721237-20130325_162622 +train Mar13_S1A0/voip-bde2721237-20130325_155806 +train Mar13_S1A1/voip-bde2721237-20130326_195337 +train Mar13_S0A1/voip-bde2721237-20130326_193743 +train Mar13_S0A0/voip-bde2721237-20130326_195733 +train Mar13_S1A1/voip-bde2721237-20130325_155143 +train Mar13_S0A0/voip-78f497f314-20130323_184557 +train Mar13_S0A0/voip-78f497f314-20130324_202309 +train Mar13_S0A0/voip-78f497f314-20130324_201923 +train Mar13_S0A1/voip-78f497f314-20130323_184435 +train Mar13_S1A0/voip-78f497f314-20130324_201554 +train Mar13_S1A1/voip-78f497f314-20130324_201211 +train Mar13_S0A1/voip-78f497f314-20130324_200748 +train Mar13_S1A1/voip-78f497f314-20130323_144347 +train Mar13_S1A1/voip-78f497f314-20130324_140601 +train Mar13_S1A1/voip-78f497f314-20130324_203101 +train Mar13_S0A0/voip-78f497f314-20130323_143533 +train Mar13_S1A0/voip-78f497f314-20130323_184250 +train Mar13_S0A0/voip-78f497f314-20130323_183806 +train Mar13_S0A1/voip-78f497f314-20130324_201025 +train Mar13_S1A0/voip-78f497f314-20130324_203349 +train Mar13_S1A0/voip-78f497f314-20130324_200619 +train Mar13_S1A1/voip-78f497f314-20130323_184110 +train Mar13_S0A1/voip-78f497f314-20130323_144139 +train Mar13_S1A1/voip-78f497f314-20130323_142856 +train Mar13_S1A0/voip-88b68a9a41-20130322_221424 +train Mar13_S1A1/voip-88b68a9a41-20130323_114000 +train Mar13_S0A0/voip-88b68a9a41-20130322_223613 +train Mar13_S0A0/voip-88b68a9a41-20130324_002417 +train Mar13_S1A0/voip-88b68a9a41-20130324_004148 +train Mar13_S0A0/voip-88b68a9a41-20130324_004748 +train Mar13_S1A1/voip-88b68a9a41-20130324_004031 +train Mar13_S0A1/voip-88b68a9a41-20130322_221603 +train Mar13_S0A1/voip-88b68a9a41-20130322_221256 +train Mar13_S0A0/voip-88b68a9a41-20130322_221731 +train Mar13_S1A0/voip-88b68a9a41-20130322_222845 +train Mar13_S1A1/voip-88b68a9a41-20130324_002917 +train Mar13_S0A0/voip-88b68a9a41-20130322_224608 +train Mar13_S0A1/voip-88b68a9a41-20130324_003144 +train Mar13_S0A1/voip-88b68a9a41-20130324_002132 +train Mar13_S1A1/voip-88b68a9a41-20130322_222725 +train Mar13_S0A0/voip-88b68a9a41-20130324_004639 +train Mar13_S1A0/voip-88b68a9a41-20130324_003412 +train Mar13_S1A0/voip-88b68a9a41-20130324_002251 +train Mar13_S0A1/voip-88b68a9a41-20130322_224115 +train Mar13_S0A1/voip-88b68a9a41-20130324_003525 +train Mar13_S1A0/voip-88b68a9a41-20130322_222344 +train Mar13_S0A0/voip-bb1fd497eb-20130325_165023 +train Mar13_S0A1/voip-bb1fd497eb-20130325_131052 +train Mar13_S0A1/voip-bb1fd497eb-20130326_232136 +train Mar13_S1A1/voip-bb1fd497eb-20130326_230444 +train Mar13_S1A0/voip-bb1fd497eb-20130325_132300 +train Mar13_S0A0/voip-bb1fd497eb-20130326_232439 +train Mar13_S1A1/voip-bb1fd497eb-20130325_164823 +train Mar13_S1A0/voip-bb1fd497eb-20130326_120755 +train Mar13_S0A0/voip-bb1fd497eb-20130325_123757 +train Mar13_S0A1/voip-bb1fd497eb-20130326_233411 +train Mar13_S1A1/voip-bb1fd497eb-20130325_163241 +train Mar13_S1A0/voip-bb1fd497eb-20130325_164128 +train Mar13_S1A1/voip-bb1fd497eb-20130325_163635 +train Mar13_S0A1/voip-bb1fd497eb-20130326_223048 +train Mar13_S0A1/voip-bb1fd497eb-20130326_230746 +train Mar13_S0A0/voip-bb1fd497eb-20130326_231219 +train Mar13_S0A0/voip-bb1fd497eb-20130325_164421 +train Mar13_S1A0/voip-bb1fd497eb-20130326_233115 +train Mar13_S1A1/voip-bb1fd497eb-20130326_231928 +train Mar13_S1A0/voip-bb1fd497eb-20130326_230547 +train Mar13_S0A1/voip-bb1fd497eb-20130325_163956 +train Mar13_S0A1/voip-03c2655d43-20130327_194746 +train Mar13_S0A1/voip-03c2655d43-20130327_195421 +train Mar13_S1A1/voip-03c2655d43-20130327_195546 +train Mar13_S0A0/voip-03c2655d43-20130327_193309 +train Mar13_S0A0/voip-03c2655d43-20130327_201323 +train Mar13_S1A0/voip-03c2655d43-20130327_193528 +train Mar13_S0A0/voip-03c2655d43-20130327_194221 +train Mar13_S0A1/voip-03c2655d43-20130327_191742 +train Mar13_S1A0/voip-03c2655d43-20130327_200522 +train Mar13_S1A1/voip-03c2655d43-20130327_194616 +train Mar13_S1A1/voip-03c2655d43-20130327_192312 +train Mar13_S0A0/voip-03c2655d43-20130327_200023 +train Mar13_S1A0/voip-03c2655d43-20130327_195308 +train Mar13_S1A1/voip-d0341706f2-20130329_022056 +train Mar13_S1A0/voip-d0341706f2-20130329_053021 +train Mar13_S1A1/voip-d0341706f2-20130329_012808 +train Mar13_S1A0/voip-d0341706f2-20130329_052240 +train Mar13_S1A1/voip-d0341706f2-20130329_052811 +train Mar13_S0A1/voip-d0341706f2-20130329_021540 +train Mar13_S1A0/voip-d0341706f2-20130329_021425 +train Mar13_S0A1/voip-d0341706f2-20130329_052605 +train Mar13_S0A1/voip-d0341706f2-20130329_051653 +train Mar13_S0A1/voip-d0341706f2-20130329_052031 +train Mar13_S1A0/voip-d0341706f2-20130329_021901 +train Mar13_S1A1/voip-3b81cbb287-20130324_215534 +train Mar13_S1A0/voip-3b81cbb287-20130324_015653 +train Mar13_S0A0/voip-3b81cbb287-20130326_030605 +train Mar13_S1A0/voip-3b81cbb287-20130326_030827 +train Mar13_S1A1/voip-3b81cbb287-20130326_031714 +train Mar13_S0A0/voip-3b81cbb287-20130324_020817 +train Mar13_S0A1/voip-3b81cbb287-20130324_014336 +train Mar13_S0A1/voip-3b81cbb287-20130324_020125 +train Mar13_S0A0/voip-3b81cbb287-20130326_024829 +train Mar13_S0A0/voip-3b81cbb287-20130324_014713 +train Mar13_S1A0/voip-3b81cbb287-20130326_025919 +train Mar13_S1A1/voip-3b81cbb287-20130324_020944 +train Mar13_S0A1/voip-3b81cbb287-20130326_024219 +train Mar13_S1A1/voip-3b81cbb287-20130326_025552 +train Mar13_S0A1/voip-3b81cbb287-20130326_031529 +train Mar13_S0A0/voip-3b81cbb287-20130324_022110 +train Mar13_S1A0/voip-3b81cbb287-20130324_021600 +train Mar13_S1A0/voip-3b81cbb287-20130324_014857 +train Mar13_S1A1/voip-3b81cbb287-20130324_015234 +train Mar13_S0A1/voip-3be3bda933-20130326_132152 +train Mar13_S1A0/voip-3be3bda933-20130326_125503 +train Mar13_S1A1/voip-3be3bda933-20130326_131833 +train Mar13_S0A0/voip-3be3bda933-20130326_124519 +train Mar13_S1A1/voip-3be3bda933-20130327_014120 +train Mar13_S0A0/voip-3be3bda933-20130327_015627 +train Mar13_S1A0/voip-3be3bda933-20130327_014928 +train Mar13_S0A0/voip-3be3bda933-20130326_131651 +train Mar13_S0A1/voip-3be3bda933-20130326_125353 +train Mar13_S1A0/voip-90732b027d-20130401_222446 +train Mar13_S1A0/voip-90732b027d-20130401_221503 +train Mar13_S0A1/voip-90732b027d-20130401_215559 +train Mar13_S1A0/voip-90732b027d-20130401_194905 +train Mar13_S1A0/voip-90732b027d-20130401_195256 +train Mar13_S0A0/voip-90732b027d-20130327_183400 +train Mar13_S0A1/voip-90732b027d-20130401_215804 +train Mar13_S0A1/voip-90732b027d-20130327_190536 +train Mar13_S0A0/voip-90732b027d-20130327_174703 +train Mar13_S0A1/voip-90732b027d-20130401_222250 +train Mar13_S1A1/voip-90732b027d-20130327_180737 +train Mar13_S1A1/voip-90732b027d-20130327_181557 +train Mar13_S0A1/voip-90732b027d-20130327_181126 +train Mar13_S1A1/voip-90732b027d-20130401_215925 +train Mar13_S1A0/voip-90732b027d-20130327_170938 +train Mar13_S1A0/voip-90732b027d-20130401_220530 +train Mar13_S1A0/voip-90732b027d-20130327_184229 +train Mar13_S1A0/voip-90732b027d-20130328_164236 +train Mar13_S1A1/voip-90732b027d-20130401_222034 +train Mar13_S0A0/voip-90732b027d-20130327_190004 +train Mar13_S1A1/voip-90732b027d-20130401_195651 +train Mar13_S1A1/voip-90732b027d-20130327_190315 +train Mar13_S0A1/voip-90732b027d-20130401_221049 +train Mar13_S0A1/voip-90732b027d-20130327_183925 +train Mar13_S1A1/voip-90732b027d-20130327_173620 +train Mar13_S1A1/voip-f091d0e461-20130327_210252 +train Mar13_S1A0/voip-f091d0e461-20130327_211411 +train Mar13_S1A0/voip-f091d0e461-20130327_205009 +train Mar13_S0A1/voip-f091d0e461-20130327_210653 +train Mar13_S0A0/voip-f091d0e461-20130327_211146 +train Mar13_S0A0/voip-f091d0e461-20130327_205249 +train Mar13_S0A1/voip-f091d0e461-20130327_210851 +train Mar13_S0A0/voip-340dbb333e-20130327_011501 +train Mar13_S0A1/voip-340dbb333e-20130325_230536 +train Mar13_S0A0/voip-340dbb333e-20130325_231720 +train Mar13_S1A0/voip-340dbb333e-20130325_231020 +train Mar13_S0A0/voip-340dbb333e-20130327_004847 +train Mar13_S0A0/voip-340dbb333e-20130325_233510 +train Mar13_S0A1/voip-340dbb333e-20130327_011550 +train Mar13_S0A1/voip-340dbb333e-20130325_231846 +train Mar13_S1A0/voip-340dbb333e-20130325_232333 +train Mar13_S0A1/voip-340dbb333e-20130327_011403 +train Mar13_S1A1/voip-340dbb333e-20130325_231614 +train Mar13_S0A0/voip-340dbb333e-20130327_012037 +train Mar13_S0A1/voip-340dbb333e-20130327_004741 +train Mar13_S0A0/voip-340dbb333e-20130325_230054 +train Mar13_S1A0/voip-340dbb333e-20130327_005751 +train Mar13_S1A0/voip-340dbb333e-20130327_010913 +train Mar13_S1A1/voip-340dbb333e-20130327_011703 +train Mar13_S1A1/voip-340dbb333e-20130325_233348 +train Mar13_S1A0/voip-340dbb333e-20130325_233821 +train Mar13_S1A1/voip-318851c80b-20130328_224338 +train Mar13_S1A0/voip-318851c80b-20130328_220608 +train Mar13_S0A1/voip-318851c80b-20130328_214947 +train Mar13_S1A1/voip-318851c80b-20130328_214609 +train Mar13_S0A1/voip-318851c80b-20130328_224511 +train Mar13_S0A1/voip-318851c80b-20130328_215616 +train Mar13_S1A0/voip-318851c80b-20130328_224701 +train Mar13_S0A1/voip-318851c80b-20130328_214057 +train Mar13_S1A0/voip-318851c80b-20130328_220743 +train Mar13_S1A1/voip-318851c80b-20130328_220341 +train Mar13_S1A0/voip-36440f7305-20130326_201757 +train Mar13_S0A0/voip-36440f7305-20130327_201555 +train Mar13_S0A1/voip-36440f7305-20130326_140907 +train Mar13_S0A0/voip-36440f7305-20130327_200335 +train Mar13_S1A0/voip-36440f7305-20130327_200608 +train Mar13_S0A1/voip-36440f7305-20130327_201745 +train Mar13_S1A1/voip-36440f7305-20130327_195221 +train Mar13_S0A0/voip-36440f7305-20130326_141231 +train Mar13_S0A0/voip-36440f7305-20130327_194928 +train Mar13_S0A1/voip-36440f7305-20130327_200102 +train Mar13_S0A0/voip-36440f7305-20130326_200035 +train Mar13_S0A1/voip-36440f7305-20130326_142415 +train Mar13_S1A0/voip-36440f7305-20130326_140559 +train Mar13_S0A0/voip-a8649977cf-20130323_161909 +train Mar13_S1A0/voip-a8649977cf-20130323_161448 +train Mar13_S0A1/voip-a8649977cf-20130323_161257 +train Mar13_S0A1/voip-a8649977cf-20130323_161633 +train Mar13_S1A0/voip-a8649977cf-20130323_155614 +train Mar13_S1A1/voip-a8649977cf-20130323_160436 +train Mar13_S0A0/voip-a8649977cf-20130323_155424 +train Mar13_S1A0/voip-a8649977cf-20130323_160845 +train Mar13_S0A1/voip-a8649977cf-20130323_160201 +train Mar13_S1A1/voip-a8649977cf-20130323_160311 +train Mar13_S1A1/voip-2f209793f4-20130326_005104 +train Mar13_S1A0/voip-2f209793f4-20130326_005217 +train Mar13_S1A0/voip-2f209793f4-20130326_004451 +train Mar13_S0A1/voip-2f209793f4-20130326_004858 +train Mar13_S1A0/voip-2f209793f4-20130326_012033 +train Mar13_S0A0/voip-2f209793f4-20130326_003256 +train Mar13_S0A1/voip-f17e3b578c-20130328_174844 +train Mar13_S0A0/voip-f17e3b578c-20130328_173556 +train Mar13_S1A0/voip-f17e3b578c-20130328_171738 +train Mar13_S0A1/voip-f17e3b578c-20130328_180105 +train Mar13_S1A1/voip-f17e3b578c-20130328_174548 +train Mar13_S1A0/voip-f17e3b578c-20130328_174111 +train Mar13_S0A1/voip-f17e3b578c-20130328_173326 +train Mar13_S0A1/voip-4a6ecc1f1c-20130329_151857 +train Mar13_S1A0/voip-4a6ecc1f1c-20130328_125036 +train Mar13_S1A0/voip-4a6ecc1f1c-20130329_151950 +train Mar13_S1A1/voip-4a6ecc1f1c-20130328_124608 +train Mar13_S1A1/voip-4a6ecc1f1c-20130328_125150 +train Mar13_S1A0/voip-4a6ecc1f1c-20130328_121012 +train Mar13_S1A0/voip-4a6ecc1f1c-20130329_153643 +train Mar13_S0A1/voip-4a6ecc1f1c-20130329_151331 +train Mar13_S1A0/voip-4a6ecc1f1c-20130328_124458 +train Mar13_S1A1/voip-4a6ecc1f1c-20130329_153749 +train Mar13_S0A1/voip-4a6ecc1f1c-20130328_121706 +train Mar13_S0A1/voip-4a6ecc1f1c-20130328_120337 +train Mar13_S0A1/voip-4a6ecc1f1c-20130328_124342 +train Mar13_S0A0/voip-4a6ecc1f1c-20130328_124921 +train Mar13_S0A1/voip-4a6ecc1f1c-20130329_154425 +train Mar13_S0A0/voip-4a6ecc1f1c-20130328_123700 +train Mar13_S0A1/voip-4a6ecc1f1c-20130329_153858 +train Mar13_S1A1/voip-4a6ecc1f1c-20130329_152505 +train Mar13_S1A0/voip-4a6ecc1f1c-20130329_152840 +train Mar13_S0A0/voip-4a6ecc1f1c-20130328_120628 +train Mar13_S0A1/voip-4a6ecc1f1c-20130329_153244 +train Mar13_S1A1/voip-4a6ecc1f1c-20130328_121528 +train Mar13_S0A0/voip-4a6ecc1f1c-20130328_120903 +train Mar13_S1A0/voip-4a6ecc1f1c-20130329_154219 +train Mar13_S1A0/voip-0241bbae39-20130327_201053 +train Mar13_S0A1/voip-0241bbae39-20130327_193939 +train Mar13_S1A0/voip-0241bbae39-20130327_190942 +train Mar13_S1A1/voip-0241bbae39-20130327_204651 +train Mar13_S0A0/voip-0241bbae39-20130327_191625 +train Mar13_S0A0/voip-0241bbae39-20130327_204233 +train Mar13_S0A1/voip-0241bbae39-20130327_195830 +train Mar13_S1A1/voip-0241bbae39-20130327_194703 +train Mar13_S1A1/voip-afd3aa91f0-20130325_225729 +train Mar13_S0A0/voip-afd3aa91f0-20130327_190135 +train Mar13_S0A1/voip-afd3aa91f0-20130327_185906 +train Mar13_S0A0/voip-afd3aa91f0-20130325_224112 +train Mar13_S0A0/voip-afd3aa91f0-20130325_230434 +train Mar13_S1A1/voip-afd3aa91f0-20130325_231946 +train Mar13_S1A1/voip-afd3aa91f0-20130325_224706 +train Mar13_S0A1/voip-afd3aa91f0-20130325_224842 +train Mar13_S0A1/voip-afd3aa91f0-20130325_223728 +train Mar13_S0A0/voip-afd3aa91f0-20130325_225303 +train Mar13_S1A0/voip-afd3aa91f0-20130327_190234 +train Mar13_S0A1/voip-afd3aa91f0-20130326_015648 +train Mar13_S1A0/voip-afd3aa91f0-20130325_224235 +train Mar13_S1A0/voip-afd3aa91f0-20130326_015124 +train Mar13_S1A0/voip-afd3aa91f0-20130327_211102 +train Mar13_S0A1/voip-cfd5fa34d9-20130402_073524 +train Mar13_S1A1/voip-cfd5fa34d9-20130402_073021 +train Mar13_S1A0/voip-cfd5fa34d9-20130402_073351 +train Mar13_S1A1/voip-cfd5fa34d9-20130402_072808 +train Mar13_S1A1/voip-cfd5fa34d9-20130402_074254 +train Mar13_S0A1/voip-d6f8c4271e-20130326_221007 +train Mar13_S0A0/voip-d6f8c4271e-20130326_220359 +train Mar13_S1A0/voip-50af5438f1-20130402_080841 +train Mar13_S0A1/voip-50af5438f1-20130402_085506 +train Mar13_S0A1/voip-50af5438f1-20130402_084641 +train Mar13_S1A0/voip-50af5438f1-20130327_034500 +train Mar13_S1A0/voip-50af5438f1-20130402_085256 +train Mar13_S1A1/voip-50af5438f1-20130402_085647 +train Mar13_S0A1/voip-50af5438f1-20130402_090250 +train Mar13_S1A0/voip-50af5438f1-20130402_081930 +train Mar13_S1A0/voip-50af5438f1-20130402_082750 +train Mar13_S1A1/voip-50af5438f1-20130402_084400 +train Mar13_S1A1/voip-50af5438f1-20130402_081345 +train Mar13_S0A1/voip-50af5438f1-20130327_032137 +train Mar13_S1A0/voip-50af5438f1-20130327_043337 +train Mar13_S1A1/voip-50af5438f1-20130402_081208 +train Mar13_S1A0/voip-50af5438f1-20130327_032335 +train Mar13_S1A0/voip-50af5438f1-20130402_085904 +train Mar13_S0A0/voip-50af5438f1-20130327_041542 +train Mar13_S1A1/voip-50af5438f1-20130327_042937 +train Mar13_S0A1/voip-50af5438f1-20130327_041921 +train Mar13_S1A1/voip-50af5438f1-20130327_031854 +train Mar13_S0A1/voip-eaef6f434c-20130323_025453 +train Mar13_S0A1/voip-eaef6f434c-20130323_030832 +train Mar13_S0A1/voip-eaef6f434c-20130323_025316 +train Mar13_S0A0/voip-eaef6f434c-20130323_031257 +train Mar13_S1A0/voip-eaef6f434c-20130323_031356 +train Mar13_S1A0/voip-eaef6f434c-20130323_025154 +train Mar13_S0A1/voip-eaef6f434c-20130323_024502 +train Mar13_S0A1/voip-eaef6f434c-20130323_025614 +train Mar13_S1A1/voip-eaef6f434c-20130323_025908 +train Mar13_S1A1/voip-e8997b10da-20130329_011058 +train Mar13_S0A0/voip-e8997b10da-20130327_193112 +train Mar13_S1A0/voip-e8997b10da-20130329_001748 +train Mar13_S0A1/voip-e8997b10da-20130401_152342 +train Mar13_S1A0/voip-e8997b10da-20130401_152530 +train Mar13_S1A0/voip-e8997b10da-20130329_011626 +train Mar13_S0A1/voip-e8997b10da-20130401_151850 +train Mar13_S1A1/voip-e8997b10da-20130327_194306 +train Mar13_S1A1/voip-e8997b10da-20130329_010926 +train Mar13_S1A1/voip-e8997b10da-20130329_001948 +train Mar13_S1A0/voip-e8997b10da-20130329_000534 +train Mar13_S1A1/voip-e8997b10da-20130327_200112 +train Mar13_S0A1/voip-e8997b10da-20130329_012511 +train Mar13_S0A1/voip-e8997b10da-20130329_010205 +train Mar13_S0A1/voip-e8997b10da-20130329_000658 +train Mar13_S1A0/voip-e8997b10da-20130327_194901 +train Mar13_S0A0/voip-e8997b10da-20130327_195713 +train Mar13_S0A1/voip-e8997b10da-20130327_193641 +train Mar13_S0A1/voip-e8997b10da-20130329_012334 +train Mar13_S1A0/voip-e8997b10da-20130401_152228 +train Mar13_S1A0/voip-e8997b10da-20130329_012706 +train Mar13_S0A1/voip-e8997b10da-20130329_000819 +train Mar13_S1A1/voip-e8997b10da-20130401_152019 +train Mar13_S0A1/voip-31de0daa7b-20130402_133530 +train Mar13_S1A0/voip-31de0daa7b-20130402_133216 +train Mar13_S1A0/voip-31de0daa7b-20130401_205355 +train Mar13_S1A0/voip-31de0daa7b-20130401_204415 +train Mar13_S0A1/voip-31de0daa7b-20130401_220541 +train Mar13_S1A1/voip-31de0daa7b-20130402_132633 +train Mar13_S1A0/voip-31de0daa7b-20130401_220728 +train Mar13_S1A1/voip-31de0daa7b-20130401_205633 +train Mar13_S1A0/voip-31de0daa7b-20130401_221101 +train Mar13_S1A0/voip-31de0daa7b-20130401_220217 +train Mar13_S0A1/voip-31de0daa7b-20130401_204621 +train Mar13_S1A1/voip-31de0daa7b-20130402_133047 +train Mar13_S1A1/voip-31de0daa7b-20130401_203534 +train Mar13_S0A0/voip-52eb280e7b-20130325_130820 +train Mar13_S0A1/voip-52eb280e7b-20130326_212826 +train Mar13_S1A0/voip-52eb280e7b-20130326_214113 +train Mar13_S1A0/voip-52eb280e7b-20130326_212546 +train Mar13_S0A0/voip-52eb280e7b-20130325_124240 +train Mar13_S0A1/voip-52eb280e7b-20130326_215158 +train Mar13_S1A1/voip-52eb280e7b-20130326_211923 +train Mar13_S0A0/voip-52eb280e7b-20130326_212733 +train Mar13_S0A1/voip-52eb280e7b-20130325_131334 +train Mar13_S1A1/voip-52eb280e7b-20130325_125522 +train Mar13_S0A1/voip-52eb280e7b-20130326_213528 +train Mar13_S0A0/voip-52eb280e7b-20130326_213227 +train Mar13_S0A1/voip-52eb280e7b-20130325_130645 +train Mar13_S1A0/voip-52eb280e7b-20130325_125355 +train Mar13_S1A1/voip-52eb280e7b-20130325_132224 +train Mar13_S0A1/voip-52eb280e7b-20130325_123856 +train Mar13_S0A0/voip-52eb280e7b-20130326_214342 +train Mar13_S0A0/voip-52eb280e7b-20130325_130950 +train Mar13_S1A1/voip-52eb280e7b-20130326_215044 +train Mar13_S1A0/voip-52eb280e7b-20130325_125157 +train Mar13_S0A1/voip-2c217000af-20130328_224808 +train Mar13_S1A1/voip-2c217000af-20130328_223711 +train Mar13_S0A1/voip-2c217000af-20130328_224934 +train Mar13_S0A0/voip-2c217000af-20130325_221252 +train Mar13_S0A1/voip-2c217000af-20130325_222647 +train Mar13_S1A0/voip-2c217000af-20130328_224449 +train Mar13_S0A1/voip-2c217000af-20130325_221701 +train Mar13_S1A1/voip-2c217000af-20130325_222430 +train Mar13_S1A0/voip-2c217000af-20130325_221525 +train Mar13_S1A0/voip-2c217000af-20130328_223847 +train Mar13_S1A1/voip-2c217000af-20130328_224206 +train Mar13_S0A1/voip-e9b53d6ace-20130324_223617 +train Mar13_S1A0/voip-e9b53d6ace-20130401_190230 +train Mar13_S0A0/voip-e9b53d6ace-20130324_221046 +train Mar13_S1A1/voip-e9b53d6ace-20130324_222525 +train Mar13_S0A1/voip-e9b53d6ace-20130401_190004 +train Mar13_S1A1/voip-e9b53d6ace-20130324_222855 +train Mar13_S0A1/voip-e9b53d6ace-20130401_210057 +train Mar13_S1A0/voip-e9b53d6ace-20130401_202424 +train Mar13_S1A1/voip-e9b53d6ace-20130401_202659 +train Mar13_S1A0/voip-e9b53d6ace-20130401_184931 +train Mar13_S0A0/voip-e9b53d6ace-20130324_221924 +train Mar13_S1A0/voip-e9b53d6ace-20130401_212753 +train Mar13_S1A0/voip-e9b53d6ace-20130401_204254 +train Mar13_S0A1/voip-e9b53d6ace-20130401_201306 +train Mar13_S1A1/voip-e9b53d6ace-20130401_200405 +train Mar13_S0A1/voip-e9b53d6ace-20130324_220844 +train Mar13_S1A0/voip-e9b53d6ace-20130324_222746 +train Mar13_S1A0/voip-e9b53d6ace-20130401_194326 +train Mar13_S0A0/voip-e9b53d6ace-20130324_223300 +train Mar13_S1A0/voip-e9b53d6ace-20130324_222642 +train Mar13_S1A1/voip-e9b53d6ace-20130324_220734 +train Mar13_S1A1/voip-e9b53d6ace-20130401_190135 +train Mar13_S0A1/voip-e9b53d6ace-20130324_221520 +train Mar13_S1A1/voip-e9b53d6ace-20130401_205843 +train Mar13_S0A1/voip-f4026333dc-20130327_175843 +train Mar13_S1A1/voip-f4026333dc-20130327_180235 +train Mar13_S0A1/voip-2b66f60368-20130326_203353 +train Mar13_S0A0/voip-2b66f60368-20130326_202529 +train Mar13_S0A0/voip-2b66f60368-20130326_203610 +train Mar13_S1A0/voip-2b66f60368-20130326_202221 +train Mar13_S1A0/voip-2b66f60368-20130326_203705 +train Mar13_S0A1/voip-2b66f60368-20130326_202811 +train Mar13_S1A1/voip-2b66f60368-20130326_203018 +train Mar13_S0A0/voip-e72dba1759-20130325_211057 +train Mar13_S1A0/voip-e72dba1759-20130326_221205 +train Mar13_S0A1/voip-e72dba1759-20130326_223506 +train Mar13_S0A1/voip-e72dba1759-20130326_215630 +train Mar13_S1A0/voip-e72dba1759-20130325_204847 +train Mar13_S0A0/voip-e72dba1759-20130326_222657 +train Mar13_S0A1/voip-e72dba1759-20130325_204725 +train Mar13_S1A1/voip-e72dba1759-20130326_220053 +train Mar13_S0A0/voip-e72dba1759-20130326_220605 +train Mar13_S1A1/voip-e72dba1759-20130325_204601 +train Mar13_S1A1/voip-e72dba1759-20130326_222550 +train Mar13_S0A1/voip-e72dba1759-20130325_210429 +train Mar13_S1A1/voip-e72dba1759-20130325_210629 +train Mar13_S1A0/voip-e72dba1759-20130325_205739 +train Mar13_S1A0/voip-e72dba1759-20130326_223157 +train Mar13_S1A0/voip-e72dba1759-20130325_210917 +train Mar13_S0A0/voip-e72dba1759-20130325_205315 +train Mar13_S0A0/voip-e72dba1759-20130325_210022 +train Mar13_S1A1/voip-e72dba1759-20130326_220956 +train Mar13_S1A1/voip-e72dba1759-20130325_215313 +train Mar13_S1A1/voip-fe2783c40a-20130401_150540 +train Mar13_S1A0/voip-fe2783c40a-20130401_145640 +train Mar13_S1A0/voip-fe2783c40a-20130401_143504 +train Mar13_S0A1/voip-fe2783c40a-20130401_144421 +train Mar13_S0A1/voip-fe2783c40a-20130401_144037 +train Mar13_S1A0/voip-fe2783c40a-20130401_144320 +train Mar13_S1A0/voip-fe2783c40a-20130401_150659 +train Mar13_S0A1/voip-fe2783c40a-20130401_151115 +train Mar13_S1A1/voip-fe2783c40a-20130401_145010 +train Mar13_S0A1/voip-fe2783c40a-20130401_145931 +train Mar13_S1A1/voip-fe2783c40a-20130401_143116 +train Mar13_S1A1/voip-fe2783c40a-20130401_145146 +train Mar13_S0A1/voip-fe2783c40a-20130401_150347 +train Mar13_S1A0/voip-e30cb521fb-20130328_131711 +train Mar13_S1A1/voip-e30cb521fb-20130328_144608 +train Mar13_S0A0/voip-e30cb521fb-20130328_122635 +train Mar13_S0A1/voip-e30cb521fb-20130328_134803 +train Mar13_S0A0/voip-e30cb521fb-20130328_135828 +train Mar13_S1A1/voip-e30cb521fb-20130328_135454 +train Mar13_S1A1/voip-e0035cc31b-20130323_210244 +train Mar13_S0A0/voip-e0035cc31b-20130323_211513 +train Mar13_S1A1/voip-e0035cc31b-20130323_211354 +train Mar13_S1A0/voip-e0035cc31b-20130326_204829 +train Mar13_S0A1/voip-e0035cc31b-20130323_212516 +train Mar13_S0A1/voip-e0035cc31b-20130326_205511 +train Mar13_S1A1/voip-e0035cc31b-20130326_205626 +train Mar13_S0A0/voip-e0035cc31b-20130323_212959 +train Mar13_S1A0/voip-e0035cc31b-20130326_203550 +train Mar13_S0A0/voip-e0035cc31b-20130326_205950 +train Mar13_S0A1/voip-e0035cc31b-20130323_211206 +train Mar13_S1A0/voip-e0035cc31b-20130323_210352 +train Mar13_S0A0/voip-e0035cc31b-20130326_205004 +train Mar13_S0A0/voip-e0035cc31b-20130323_211112 +train Mar13_S1A1/voip-e0035cc31b-20130326_203939 +train Mar13_S1A1/voip-e0035cc31b-20130323_212626 +train Mar13_S1A0/voip-e0035cc31b-20130323_212221 +train Mar13_S1A1/voip-e0035cc31b-20130326_210405 +train Mar13_S1A0/voip-e0035cc31b-20130326_205255 +train Mar13_S0A0/voip-e0035cc31b-20130326_203710 +train Mar13_S0A1/voip-e0035cc31b-20130326_204429 +train Mar13_S1A0/voip-0a45bc863d-20130325_201403 +train Mar13_S1A1/voip-0a45bc863d-20130325_201117 +train Mar13_S1A0/voip-0a45bc863d-20130325_195848 +train Mar13_S0A0/voip-0a45bc863d-20130325_202319 +train Mar13_S0A0/voip-0a45bc863d-20130325_201240 +train Mar13_S1A0/voip-0a45bc863d-20130326_205127 +train Mar13_S0A1/voip-0a45bc863d-20130326_205629 +train Mar13_S0A1/voip-0a45bc863d-20130325_200201 +train Mar13_S1A1/voip-0a45bc863d-20130325_200034 +train Mar13_S0A0/voip-0a45bc863d-20130325_200515 +train Mar13_S1A1/voip-0a45bc863d-20130326_204718 +train Mar13_S1A1/voip-0a45bc863d-20130325_202120 +train Mar13_S0A0/voip-0a45bc863d-20130326_205408 +train Mar13_S1A0/voip-0a45bc863d-20130326_210243 +train Mar13_S0A1/voip-43479eb5c2-20130324_004748 +train Mar13_S1A1/voip-43479eb5c2-20130329_045902 +train Mar13_S0A0/voip-43479eb5c2-20130324_003643 +train Mar13_S1A1/voip-43479eb5c2-20130323_180515 +train Mar13_S1A0/voip-43479eb5c2-20130323_180105 +train Mar13_S1A1/voip-f113dbb0e1-20130322_233310 +train Mar13_S0A0/voip-f113dbb0e1-20130327_173541 +train Mar13_S0A1/voip-f113dbb0e1-20130322_233822 +train Mar13_S0A0/voip-f113dbb0e1-20130327_173417 +train Mar13_S1A0/voip-f113dbb0e1-20130322_234230 +train Mar13_S0A1/voip-f113dbb0e1-20130327_173842 +train Mar13_S0A0/voip-d645d56d23-20130324_000301 +train Mar13_S0A1/voip-d645d56d23-20130323_221635 +train Mar13_S1A0/voip-d645d56d23-20130324_001536 +train Mar13_S1A1/voip-d645d56d23-20130323_222004 +train Mar13_S1A0/voip-d645d56d23-20130401_204623 +train Mar13_S1A1/voip-d645d56d23-20130401_204424 +train Mar13_S1A0/voip-d645d56d23-20130323_223158 +train Mar13_S0A1/voip-d645d56d23-20130323_222341 +train Mar13_S1A0/voip-d645d56d23-20130401_203033 +train Mar13_S0A0/voip-d645d56d23-20130324_232209 +train Mar13_S0A1/voip-d645d56d23-20130324_232416 +train Mar13_S1A1/voip-d645d56d23-20130324_000123 +train Mar13_S1A1/voip-d645d56d23-20130324_000651 +train Mar13_S1A1/voip-d645d56d23-20130402_035204 +train Mar13_S1A1/voip-d645d56d23-20130401_203325 +train Mar13_S0A1/voip-03d6592b76-20130326_015131 +train Mar13_S1A1/voip-03d6592b76-20130326_012615 +train Mar13_S0A1/voip-03d6592b76-20130326_012832 +train Mar13_S0A1/voip-03d6592b76-20130327_030334 +train Mar13_S0A0/voip-03d6592b76-20130326_014023 +train Mar13_S1A1/voip-03d6592b76-20130326_012940 +train Mar13_S0A0/voip-03d6592b76-20130326_012529 +train Mar13_S1A0/voip-03d6592b76-20130327_025441 +train Mar13_S0A1/voip-03d6592b76-20130327_024630 +train Mar13_S0A1/voip-03d6592b76-20130327_030034 +train Mar13_S0A1/voip-03d6592b76-20130326_013107 +train Mar13_S1A0/voip-03d6592b76-20130327_030602 +train Mar13_S1A0/voip-03d6592b76-20130327_024820 +train Mar13_S1A0/voip-03d6592b76-20130326_012136 +train Mar13_S1A1/voip-03d6592b76-20130327_025219 +train Mar13_S1A1/voip-03d6592b76-20130326_014855 +train Mar13_S0A0/voip-03d6592b76-20130327_030242 +train Mar13_S1A0/voip-03d6592b76-20130326_013208 +train Mar13_S0A1/voip-ad40cf5489-20130325_180702 +train Mar13_S0A0/voip-ad40cf5489-20130327_192610 +train Mar13_S1A1/voip-ad40cf5489-20130325_180305 +train Mar13_S1A0/voip-ad40cf5489-20130325_175953 +train Mar13_S0A0/voip-ad40cf5489-20130327_193841 +train Mar13_S1A1/voip-ad40cf5489-20130327_192458 +train Mar13_S1A0/voip-ad40cf5489-20130327_192300 +train Mar13_S0A1/voip-ad40cf5489-20130327_194457 +train Mar13_S1A0/voip-ad40cf5489-20130327_194224 +train Mar13_S0A0/voip-ad40cf5489-20130325_180552 +train Mar13_S0A0/voip-ad40cf5489-20130327_191905 +train Mar13_S0A1/voip-ad40cf5489-20130327_192405 +train Mar13_S0A1/voip-ad40cf5489-20130325_181942 +train Mar13_S1A1/voip-ad40cf5489-20130325_181004 +train Mar13_S0A1/voip-ad40cf5489-20130325_175502 +train Mar13_S1A1/voip-ad40cf5489-20130327_191318 +train Mar13_S1A1/voip-ad40cf5489-20130327_194114 +train Mar13_S0A1/voip-ad40cf5489-20130327_191638 +train Mar13_S1A1/voip-ad40cf5489-20130327_192155 +train Mar13_S1A0/voip-ad40cf5489-20130325_180434 +train Mar13_S0A0/voip-ad40cf5489-20130325_175141 +train Mar13_S0A0/voip-ad40cf5489-20130325_181825 +train Mar13_S1A0/voip-ad40cf5489-20130325_182117 +train Mar13_S0A1/voip-2f4c700ae3-20130401_191629 +train Mar13_S1A1/voip-2f4c700ae3-20130401_190757 +train Mar13_S0A1/voip-2f4c700ae3-20130401_190456 +train Mar13_S1A1/voip-2f4c700ae3-20130401_193033 +train Mar13_S1A0/voip-2f4c700ae3-20130401_190613 +train Mar13_S1A1/voip-2f4c700ae3-20130401_191755 +train Mar13_S0A1/voip-2f4c700ae3-20130401_192646 +train Mar13_S1A0/voip-2f4c700ae3-20130401_191205 +train Mar13_S1A0/voip-2f4c700ae3-20130401_190045 +train Mar13_S1A1/voip-2f4c700ae3-20130401_185424 +train Mar13_S0A1/voip-2f4c700ae3-20130401_185133 +train Mar13_S1A0/voip-2f4c700ae3-20130401_191934 +train Mar13_S1A0/voip-f22c2bf9c7-20130328_165410 +train Mar13_S0A1/voip-f22c2bf9c7-20130328_122513 +train Mar13_S0A1/voip-f22c2bf9c7-20130326_192953 +train Mar13_S0A1/voip-f22c2bf9c7-20130326_020833 +train Mar13_S1A0/voip-f22c2bf9c7-20130326_193723 +train Mar13_S1A0/voip-f22c2bf9c7-20130326_202745 +train Mar13_S0A0/voip-f22c2bf9c7-20130326_203001 +train Mar13_S1A1/voip-f22c2bf9c7-20130328_120523 +train Mar13_S0A0/voip-f22c2bf9c7-20130328_114452 +train Mar13_S1A0/voip-f22c2bf9c7-20130328_115849 +train Mar13_S1A0/voip-f22c2bf9c7-20130328_121753 +train Mar13_S1A1/voip-f22c2bf9c7-20130326_203410 +train Mar13_S0A0/voip-f22c2bf9c7-20130326_015536 +train Mar13_S0A0/voip-f22c2bf9c7-20130328_121125 +train Mar13_S1A0/voip-f22c2bf9c7-20130326_020431 +train Mar13_S1A0/voip-f22c2bf9c7-20130328_115128 +train Mar13_S0A0/voip-f22c2bf9c7-20130326_193956 +train Mar13_S0A1/voip-f22c2bf9c7-20130328_113750 +train Mar13_S0A0/voip-f22c2bf9c7-20130328_114939 +train Mar13_S1A1/voip-f22c2bf9c7-20130328_120123 +train Mar13_S0A1/voip-f22c2bf9c7-20130326_203814 +train Mar13_S1A1/voip-f22c2bf9c7-20130328_113940 +train Mar13_S0A1/voip-f22c2bf9c7-20130328_120730 +train Mar13_S0A1/voip-e99e4f4538-20130328_195304 +train Mar13_S0A1/voip-e99e4f4538-20130328_202105 +train Mar13_S1A0/voip-e99e4f4538-20130328_200547 +train Mar13_S1A0/voip-e99e4f4538-20130327_152308 +train Mar13_S1A1/voip-e99e4f4538-20130328_201948 +train Mar13_S1A0/voip-e99e4f4538-20130327_154223 +train Mar13_S0A0/voip-e99e4f4538-20130327_145422 +train Mar13_S1A1/voip-e99e4f4538-20130327_152525 +train Mar13_S0A1/voip-e99e4f4538-20130327_145848 +train Mar13_S1A1/voip-e99e4f4538-20130328_195927 +train Mar13_S1A1/voip-e99e4f4538-20130328_201531 +train Mar13_S1A1/voip-e99e4f4538-20130327_140746 +train Mar13_S1A0/voip-e99e4f4538-20130328_195729 +train Mar13_S0A0/voip-e99e4f4538-20130328_200259 +train Mar13_S1A1/voip-e99e4f4538-20130328_203355 +train Mar13_S1A1/voip-e99e4f4538-20130327_153656 +train Mar13_S0A0/voip-e99e4f4538-20130327_141042 +train Mar13_S1A0/voip-e99e4f4538-20130328_201813 +train Mar13_S1A0/voip-e99e4f4538-20130327_150414 +train Mar13_S0A0/voip-e99e4f4538-20130327_150148 +train Mar13_S1A0/voip-e99e4f4538-20130328_201635 +train Mar13_S0A1/voip-e99e4f4538-20130327_141718 +train Mar13_S0A1/voip-e99e4f4538-20130328_200645 +train Mar13_S0A1/voip-e99e4f4538-20130327_154024 +train Mar13_S1A1/voip-e99e4f4538-20130327_144810 +train Mar13_S1A1/voip-e99e4f4538-20130327_150005 +train Mar13_S1A0/voip-6d6587c57d-20130328_142518 +train Mar13_S0A1/voip-6d6587c57d-20130328_142302 +train Mar13_S0A1/voip-6d6587c57d-20130329_000439 +train Mar13_S1A0/voip-6d6587c57d-20130328_143700 +train Mar13_S1A1/voip-6d6587c57d-20130328_235137 +train Mar13_S1A1/voip-6d6587c57d-20130328_140525 +train Mar13_S0A0/voip-6d6587c57d-20130328_143034 +train Mar13_S1A1/voip-263ab0e49f-20130326_104818 +train Mar13_S0A0/voip-263ab0e49f-20130326_104535 +train Mar13_S1A0/voip-263ab0e49f-20130326_104358 +train Mar13_S0A1/voip-263ab0e49f-20130326_110450 +train Mar13_S1A0/voip-263ab0e49f-20130326_105455 +train Mar13_S0A1/voip-263ab0e49f-20130326_105158 +train Mar13_S1A0/voip-a617b6827c-20130323_170328 +train Mar13_S0A1/voip-a617b6827c-20130323_170210 +train Mar13_S1A1/voip-a617b6827c-20130323_165831 +train Mar13_S0A0/voip-a617b6827c-20130323_170929 +train Mar13_S1A0/voip-a617b6827c-20130323_171108 +train Mar13_S1A0/voip-80451eaa72-20130401_191337 +dev Mar13_S0A1/voip-f246dfe0f2-20130328_161556 +dev Mar13_S1A0/voip-be5694f464-20130328_125233 +dev Mar13_S1A1/voip-be5694f464-20130328_125916 +dev Mar13_S0A1/voip-8efef4eae9-20130325_233404 +dev Mar13_S1A1/voip-8efef4eae9-20130325_230851 +dev Mar13_S0A0/voip-8efef4eae9-20130325_234241 +dev Mar13_S1A1/voip-8efef4eae9-20130325_234031 +dev Mar13_S0A0/voip-8efef4eae9-20130325_232205 +dev Mar13_S1A0/voip-db80a9e6df-20130328_230519 +dev Mar13_S0A1/voip-db80a9e6df-20130328_232414 +dev Mar13_S0A1/voip-db80a9e6df-20130328_233633 +dev Mar13_S1A0/voip-db80a9e6df-20130328_230354 +dev Mar13_S1A1/voip-db80a9e6df-20130328_230211 +dev Mar13_S1A0/voip-db80a9e6df-20130328_231959 +dev Mar13_S0A1/voip-db80a9e6df-20130328_230014 +dev Mar13_S1A1/voip-db80a9e6df-20130328_230811 +dev Mar13_S1A1/voip-db80a9e6df-20130328_231545 +dev Mar13_S1A1/voip-db80a9e6df-20130328_234234 +dev Mar13_S0A1/voip-db80a9e6df-20130328_231320 +dev Mar13_S0A1/voip-597cfafdee-20130402_005639 +dev Mar13_S1A0/voip-597cfafdee-20130328_230830 +dev Mar13_S1A0/voip-597cfafdee-20130402_004903 +dev Mar13_S0A1/voip-597cfafdee-20130402_005048 +dev Mar13_S0A1/voip-597cfafdee-20130402_011624 +dev Mar13_S1A0/voip-597cfafdee-20130402_011135 +dev Mar13_S1A1/voip-597cfafdee-20130328_232032 +dev Mar13_S1A0/voip-597cfafdee-20130328_233209 +dev Mar13_S0A1/voip-597cfafdee-20130328_232942 +dev Mar13_S1A0/voip-597cfafdee-20130402_011915 +dev Mar13_S1A1/voip-597cfafdee-20130328_234346 +dev Mar13_S1A1/voip-597cfafdee-20130402_011800 +dev Mar13_S1A0/voip-597cfafdee-20130328_231821 +dev Mar13_S1A1/voip-597cfafdee-20130402_005342 +dev Mar13_S0A1/voip-597cfafdee-20130328_231921 +dev Mar13_S1A1/voip-597cfafdee-20130328_231005 +dev Mar13_S1A1/voip-597cfafdee-20130402_010910 +dev Mar13_S0A1/voip-597cfafdee-20130328_233056 +dev Mar13_S0A1/voip-597cfafdee-20130402_005918 +dev Mar13_S0A1/voip-597cfafdee-20130328_231524 +dev Mar13_S1A0/voip-597cfafdee-20130402_005206 +dev Mar13_S1A1/voip-597cfafdee-20130328_234727 +dev Mar13_S1A0/voip-597cfafdee-20130402_010156 +dev Mar13_S1A0/voip-d7853a398f-20130401_152954 +dev Mar13_S1A0/voip-d7853a398f-20130402_153810 +dev Mar13_S1A1/voip-d7853a398f-20130401_154937 +dev Mar13_S1A1/voip-d7853a398f-20130401_160153 +dev Mar13_S1A0/voip-d7853a398f-20130402_161902 +dev Mar13_S0A1/voip-d7853a398f-20130401_161556 +dev Mar13_S1A0/voip-d7853a398f-20130401_152001 +dev Mar13_S0A1/voip-d7853a398f-20130402_153648 +dev Mar13_S1A1/voip-d7853a398f-20130401_152711 +dev Mar13_S0A1/voip-d7853a398f-20130401_155154 +dev Mar13_S1A1/voip-d7853a398f-20130402_154628 +dev Mar13_S0A1/voip-d7853a398f-20130401_150837 +dev Mar13_S1A1/voip-d7853a398f-20130401_152229 +dev Mar13_S1A1/voip-d7853a398f-20130402_161739 +dev Mar13_S0A1/voip-d7853a398f-20130401_153102 +dev Mar13_S1A0/voip-d7853a398f-20130402_144000 +dev Mar13_S1A0/voip-d7853a398f-20130402_143701 +dev Mar13_S1A1/voip-d7853a398f-20130402_153534 +dev Mar13_S1A0/voip-d7853a398f-20130402_162826 +dev Mar13_S1A0/voip-59bc8a2167-20130325_141534 +dev Mar13_S0A1/voip-59bc8a2167-20130328_132058 +dev Mar13_S0A1/voip-59bc8a2167-20130328_130310 +dev Mar13_S0A1/voip-59bc8a2167-20130328_132516 +dev Mar13_S0A0/voip-59bc8a2167-20130328_115953 +dev Mar13_S1A1/voip-59bc8a2167-20130325_135337 +dev Mar13_S1A0/voip-59bc8a2167-20130328_132242 +dev Mar13_S0A0/voip-59bc8a2167-20130325_133335 +dev Mar13_S1A0/voip-59bc8a2167-20130328_131211 +dev Mar13_S0A0/voip-59bc8a2167-20130328_125821 +dev Mar13_S1A1/voip-59bc8a2167-20130325_143706 +dev Mar13_S0A1/voip-59bc8a2167-20130325_142656 +dev Mar13_S0A1/voip-59bc8a2167-20130325_133605 +dev Mar13_S0A0/voip-59bc8a2167-20130328_133527 +dev Mar13_S1A0/voip-59bc8a2167-20130328_113646 +dev Mar13_S0A0/voip-59bc8a2167-20130328_131004 +dev Mar13_S1A1/voip-59bc8a2167-20130328_132714 +dev Mar13_S1A1/voip-59bc8a2167-20130328_130054 +dev Mar13_S0A0/voip-59bc8a2167-20130325_141345 +dev Mar13_S1A1/voip-59bc8a2167-20130328_130810 +dev Mar13_S0A1/voip-13ff413553-20130328_175102 +dev Mar13_S0A1/voip-13ff413553-20130328_172007 +dev Mar13_S0A0/voip-13ff413553-20130328_180611 +dev Mar13_S1A0/voip-13ff413553-20130328_173028 +dev Mar13_S0A0/voip-13ff413553-20130328_174306 +dev Mar13_S1A1/voip-13ff413553-20130328_174952 +dev Mar13_S1A0/voip-13ff413553-20130328_172247 +dev Mar13_S1A0/voip-13ff413553-20130328_175302 +dev Mar13_S0A0/voip-13ff413553-20130328_172810 +dev Mar13_S0A0/voip-13ff413553-20130328_171651 +dev Mar13_S1A0/voip-13ff413553-20130328_180202 +dev Mar13_S1A1/voip-13ff413553-20130328_174036 +dev Mar13_S0A1/voip-13ff413553-20130328_174156 +dev Mar13_S1A1/voip-13ff413553-20130328_172109 +dev Mar13_S0A1/voip-d66e12b45c-20130327_172835 +dev Mar13_S1A0/voip-d66e12b45c-20130327_172709 +dev Mar13_S1A1/voip-d66e12b45c-20130327_173320 +dev Mar13_S0A0/voip-d66e12b45c-20130327_173123 +dev Mar13_S1A1/voip-ec3c3aaf98-20130323_141647 +dev Mar13_S1A0/voip-ec3c3aaf98-20130323_142016 +dev Mar13_S0A0/voip-ec3c3aaf98-20130323_142504 +dev Mar13_S1A1/voip-ec3c3aaf98-20130323_141803 +dev Mar13_S1A0/voip-ec3c3aaf98-20130323_141045 +dev Mar13_S0A1/voip-ec3c3aaf98-20130323_142633 +dev Mar13_S0A0/voip-dd9f7810fd-20130322_222124 +dev Mar13_S1A1/voip-dd9f7810fd-20130322_225458 +dev Mar13_S0A1/voip-dd9f7810fd-20130322_224356 +dev Mar13_S1A1/voip-dd9f7810fd-20130322_222331 +dev Mar13_S1A0/voip-dd9f7810fd-20130322_232304 +dev Mar13_S0A1/voip-dd9f7810fd-20130322_223946 +dev Mar13_S0A0/voip-dd9f7810fd-20130322_225618 +dev Mar13_S1A0/voip-dd9f7810fd-20130322_224705 +dev Mar13_S1A1/voip-dd9f7810fd-20130322_232152 +dev Mar13_S0A1/voip-dd9f7810fd-20130322_231612 +dev Mar13_S0A0/voip-dd9f7810fd-20130322_224223 +dev Mar13_S1A1/voip-7fdb5b39e7-20130328_222225 +dev Mar13_S0A1/voip-7fdb5b39e7-20130328_222519 +dev Mar13_S0A1/voip-7fdb5b39e7-20130328_222006 +dev Mar13_S0A1/voip-561b472540-20130328_123800 +dev Mar13_S1A0/voip-561b472540-20130328_123452 +dev Mar13_S1A1/voip-561b472540-20130328_123209 +dev Mar13_S1A0/voip-561b472540-20130328_124350 +dev Mar13_S0A1/voip-561b472540-20130328_122315 +dev Mar13_S0A0/voip-561b472540-20130328_122903 +dev Mar13_S1A1/voip-561b472540-20130328_124901 +dev Mar13_S0A1/voip-4660dd9eab-20130329_085548 +dev Mar13_S1A1/voip-4660dd9eab-20130329_080725 +dev Mar13_S0A1/voip-4660dd9eab-20130329_075545 +dev Mar13_S0A1/voip-4660dd9eab-20130329_085136 +dev Mar13_S1A1/voip-4660dd9eab-20130329_085648 +dev Mar13_S0A1/voip-4660dd9eab-20130329_090121 +dev Mar13_S1A0/voip-4660dd9eab-20130329_085007 +dev Mar13_S1A1/voip-4660dd9eab-20130329_080055 +dev Mar13_S1A0/voip-4660dd9eab-20130329_075244 +dev Mar13_S1A0/voip-4660dd9eab-20130329_085824 +dev Mar13_S1A0/voip-4660dd9eab-20130329_075906 +dev Mar13_S1A1/voip-4660dd9eab-20130329_085958 +dev Mar13_S0A1/voip-4660dd9eab-20130328_202554 +dev Mar13_S1A0/voip-be5b7bf9d9-20130402_202812 +dev Mar13_S1A0/voip-be5b7bf9d9-20130401_152843 +dev Mar13_S1A0/voip-be5b7bf9d9-20130402_201920 +dev Mar13_S1A0/voip-be5b7bf9d9-20130401_153435 +dev Mar13_S1A0/voip-be5b7bf9d9-20130401_150230 +dev Mar13_S1A0/voip-be5b7bf9d9-20130402_080342 +dev Mar13_S1A1/voip-be5b7bf9d9-20130401_155148 +dev Mar13_S0A1/voip-be5b7bf9d9-20130401_154835 +dev Mar13_S1A0/voip-be5b7bf9d9-20130402_201818 +dev Mar13_S0A1/voip-be5b7bf9d9-20130401_152612 +dev Mar13_S1A1/voip-be5b7bf9d9-20130402_202555 +dev Mar13_S1A1/voip-be5b7bf9d9-20130402_203757 +dev Mar13_S0A1/voip-be5b7bf9d9-20130402_202421 +dev Mar13_S1A0/voip-be5b7bf9d9-20130401_155038 +dev Mar13_S1A1/voip-be5b7bf9d9-20130401_151225 +dev Mar13_S1A1/voip-be5b7bf9d9-20130401_152435 +dev Mar13_S1A1/voip-be5b7bf9d9-20130402_201144 +dev Mar13_S0A1/voip-be5b7bf9d9-20130402_200701 +dev Mar13_S0A1/voip-be5b7bf9d9-20130402_201007 +dev Mar13_S1A1/voip-be5b7bf9d9-20130401_155552 +dev Mar13_S0A1/voip-be5b7bf9d9-20130401_155804 +dev Mar13_S0A0/voip-29c52c01b6-20130327_180554 +dev Mar13_S1A1/voip-d7aef99178-20130328_184019 +dev Mar13_S1A1/voip-d7aef99178-20130328_184824 +dev Mar13_S1A0/voip-d7aef99178-20130328_184726 +dev Mar13_S1A1/voip-d7aef99178-20130328_184628 +dev Mar13_S1A0/voip-d7aef99178-20130328_184453 +dev Mar13_S0A0/voip-73ce546185-20130324_022749 +dev Mar13_S0A1/voip-73ce546185-20130324_022621 +dev Mar13_S1A1/voip-73ce546185-20130324_021914 +dev Mar13_S1A0/voip-da10d74c3e-20130326_001406 +dev Mar13_S1A1/voip-da10d74c3e-20130326_001047 +dev Mar13_S1A0/voip-da10d74c3e-20130328_140824 +dev Mar13_S1A1/voip-da10d74c3e-20130328_142725 +dev Mar13_S1A0/voip-da10d74c3e-20130328_142921 +dev Mar13_S0A1/voip-da10d74c3e-20130328_153416 +dev Mar13_S0A1/voip-da10d74c3e-20130325_232243 +dev Mar13_S0A1/voip-da10d74c3e-20130326_002440 +dev Mar13_S1A0/voip-da10d74c3e-20130328_142413 +dev Mar13_S0A1/voip-da10d74c3e-20130328_141927 +dev Mar13_S0A0/voip-da10d74c3e-20130328_152819 +dev Mar13_S0A1/voip-da10d74c3e-20130326_000047 +dev Mar13_S0A0/voip-da10d74c3e-20130326_002258 +dev Mar13_S0A0/voip-da10d74c3e-20130328_141826 +dev Mar13_S1A0/voip-da10d74c3e-20130326_000532 +dev Mar13_S1A1/voip-da10d74c3e-20130328_152552 +dev Mar13_S0A1/voip-da10d74c3e-20130328_144144 +dev Mar13_S1A1/voip-da10d74c3e-20130328_141500 +dev Mar13_S0A1/voip-da10d74c3e-20130328_141404 +dev Mar13_S1A0/voip-da10d74c3e-20130325_232050 +dev Mar13_S1A1/voip-da10d74c3e-20130326_001657 +dev Mar13_S0A0/voip-96f43326a4-20130323_073950 +dev Mar13_S0A1/voip-96f43326a4-20130323_070356 +dev Mar13_S0A0/voip-96f43326a4-20130324_100620 +dev Mar13_S1A0/voip-96f43326a4-20130323_070939 +dev Mar13_S1A1/voip-96f43326a4-20130323_073316 +dev Mar13_S0A1/voip-96f43326a4-20130323_072452 +dev Mar13_S1A1/voip-96f43326a4-20130324_101258 +dev Mar13_S1A0/voip-96f43326a4-20130324_101528 +dev Mar13_S1A1/voip-96f43326a4-20130323_071326 +dev Mar13_S1A0/voip-96f43326a4-20130323_074111 +dev Mar13_S0A0/voip-96f43326a4-20130323_071538 +dev Mar13_S0A1/voip-935947e17b-20130328_172142 +dev Mar13_S0A1/voip-935947e17b-20130402_200348 +dev Mar13_S0A1/voip-935947e17b-20130402_200522 +dev Mar13_S1A1/voip-935947e17b-20130402_202130 +dev Mar13_S1A1/voip-935947e17b-20130328_152337 +dev Mar13_S1A1/voip-935947e17b-20130328_165646 +dev Mar13_S1A1/voip-935947e17b-20130402_194703 +dev Mar13_S0A1/voip-935947e17b-20130328_163714 +dev Mar13_S0A1/voip-935947e17b-20130328_153738 +dev Mar13_S0A1/voip-935947e17b-20130402_195840 +dev Mar13_S0A1/voip-935947e17b-20130328_153237 +dev Mar13_S0A1/voip-935947e17b-20130402_195651 +dev Mar13_S0A1/voip-935947e17b-20130402_201638 +dev Mar13_S1A1/voip-935947e17b-20130402_201340 +dev Mar13_S1A0/voip-935947e17b-20130402_200817 +dev Mar13_S1A1/voip-935947e17b-20130402_202004 +dev Mar13_S0A1/voip-935947e17b-20130402_195455 +dev Mar13_S1A1/voip-935947e17b-20130328_164611 +dev Mar13_S0A0/voip-935947e17b-20130328_163520 +dev Mar13_S0A1/voip-935947e17b-20130402_195123 +dev Mar13_S0A1/voip-935947e17b-20130328_164334 +dev Mar13_S0A1/voip-10beae627f-20130401_162601 +dev Mar13_S0A1/voip-10beae627f-20130328_130428 +dev Mar13_S1A1/voip-10beae627f-20130401_163221 +dev Mar13_S0A0/voip-10beae627f-20130328_171027 +dev Mar13_S1A0/voip-10beae627f-20130401_164239 +dev Mar13_S0A1/voip-10beae627f-20130401_165213 +dev Mar13_S1A0/voip-10beae627f-20130328_165757 +dev Mar13_S1A1/voip-10beae627f-20130401_164954 +dev Mar13_S1A1/voip-10beae627f-20130401_163410 +dev Mar13_S0A1/voip-10beae627f-20130328_122107 +dev Mar13_S1A1/voip-10beae627f-20130328_132217 +dev Mar13_S0A0/voip-10beae627f-20130328_164850 +dev Mar13_S1A1/voip-10beae627f-20130328_165245 +dev Mar13_S0A1/voip-10beae627f-20130329_154245 +dev Mar13_S0A1/voip-10beae627f-20130401_163556 +dev Mar13_S1A0/voip-10beae627f-20130328_171149 +dev Mar13_S1A1/voip-10beae627f-20130329_155112 +dev Mar13_S0A1/voip-10beae627f-20130328_170907 +dev Mar13_S1A0/voip-10beae627f-20130401_162723 +dev Mar13_S1A1/voip-10beae627f-20130328_171313 +dev Mar13_S1A0/voip-10beae627f-20130329_154519 +dev Mar13_S0A1/voip-10beae627f-20130328_165908 +dev Mar13_S1A0/voip-10beae627f-20130328_125615 +dev Mar13_S1A0/voip-10beae627f-20130329_125733 +dev Mar13_S0A1/voip-10beae627f-20130329_154826 +dev Mar13_S1A0/voip-10beae627f-20130401_164712 +dev Mar13_S1A0/voip-2e134ee190-20130401_225214 +dev Mar13_S1A1/voip-2e134ee190-20130401_230615 +dev Mar13_S1A0/voip-2e134ee190-20130401_230728 +dev Mar13_S0A1/voip-dcaeb62b29-20130327_084207 +dev Mar13_S0A0/voip-dcaeb62b29-20130327_082012 +dev Mar13_S1A0/voip-dcaeb62b29-20130327_081209 +dev Mar13_S0A1/voip-dcaeb62b29-20130327_081541 +dev Mar13_S0A0/voip-dcaeb62b29-20130327_083709 +dev Mar13_S1A1/voip-dcaeb62b29-20130327_082220 +dev Mar13_S1A1/voip-dcaeb62b29-20130326_041709 +dev Mar13_S1A0/voip-dcaeb62b29-20130326_043007 +dev Mar13_S1A0/voip-dcaeb62b29-20130327_082853 +dev Mar13_S1A1/voip-dcaeb62b29-20130327_082120 +dev Mar13_S1A1/voip-dcaeb62b29-20130326_033832 +dev Mar13_S0A0/voip-dcaeb62b29-20130326_034447 +dev Mar13_S0A0/voip-dcaeb62b29-20130326_042721 +dev Mar13_S0A1/voip-dcaeb62b29-20130327_082345 +dev Mar13_S0A0/voip-dcaeb62b29-20130326_035616 +dev Mar13_S1A1/voip-dcaeb62b29-20130327_083948 +dev Mar13_S1A0/voip-dcaeb62b29-20130327_084757 +dev Mar13_S0A0/voip-dcaeb62b29-20130327_083422 +dev Mar13_S1A0/voip-dcaeb62b29-20130326_033336 +dev Mar13_S1A1/voip-ddcaad92a1-20130326_012016 +dev Mar13_S1A1/voip-ddcaad92a1-20130325_234956 +dev Mar13_S1A0/voip-ddcaad92a1-20130325_234752 +dev Mar13_S1A1/voip-ddcaad92a1-20130325_222532 +dev Mar13_S1A0/voip-ddcaad92a1-20130326_010701 +dev Mar13_S1A0/voip-ddcaad92a1-20130325_222005 +dev Mar13_S0A0/voip-ddcaad92a1-20130325_222137 +dev Mar13_S0A1/voip-ddcaad92a1-20130326_002225 +dev Mar13_S1A0/voip-ddcaad92a1-20130326_012409 +dev Mar13_S1A0/voip-d76851034e-20130326_221719 +dev Mar13_S0A0/voip-d76851034e-20130326_222422 +dev Mar13_S1A1/voip-d76851034e-20130326_221320 +dev Mar13_S0A0/voip-187c1708f2-20130325_134301 +dev Mar13_S1A1/voip-187c1708f2-20130325_135219 +dev Mar13_S1A0/voip-187c1708f2-20130325_134444 +dev Mar13_S0A0/voip-187c1708f2-20130325_133552 +dev Mar13_S0A0/voip-187c1708f2-20130325_125747 +dev Mar13_S1A1/voip-187c1708f2-20130327_130329 +dev Mar13_S0A1/voip-187c1708f2-20130327_132135 +dev Mar13_S1A1/voip-187c1708f2-20130325_133750 +dev Mar13_S0A1/voip-187c1708f2-20130325_134928 +dev Mar13_S0A1/voip-187c1708f2-20130325_134104 +dev Mar13_S0A1/voip-187c1708f2-20130325_131829 +dev Mar13_S0A0/voip-187c1708f2-20130327_130928 +dev Mar13_S1A0/voip-187c1708f2-20130327_130141 +dev Mar13_S1A0/voip-187c1708f2-20130326_114238 +dev Mar13_S1A0/voip-187c1708f2-20130325_130444 +dev Mar13_S1A1/voip-187c1708f2-20130327_132257 +dev Mar13_S1A1/voip-187c1708f2-20130325_131253 +dev Mar13_S1A1/voip-187c1708f2-20130327_132654 +dev Mar13_S1A0/voip-187c1708f2-20130327_131445 +dev Mar13_S0A1/voip-72e50baa85-20130326_051956 +dev Mar13_S0A0/voip-72e50baa85-20130326_053934 +dev Mar13_S1A1/voip-72e50baa85-20130326_045411 +dev Mar13_S0A1/voip-72e50baa85-20130327_061724 +dev Mar13_S0A0/voip-72e50baa85-20130326_045105 +dev Mar13_S1A1/voip-72e50baa85-20130327_061918 +dev Mar13_S1A1/voip-72e50baa85-20130327_062822 +dev Mar13_S0A0/voip-72e50baa85-20130327_061457 +dev Mar13_S1A1/voip-72e50baa85-20130327_060348 +dev Mar13_S0A0/voip-72e50baa85-20130327_060626 +dev Mar13_S1A0/voip-72e50baa85-20130327_062939 +dev Mar13_S0A1/voip-72e50baa85-20130326_052829 +dev Mar13_S1A1/voip-72e50baa85-20130326_052642 +dev Mar13_S1A0/voip-72e50baa85-20130326_053156 +dev Mar13_S0A0/voip-72e50baa85-20130327_063159 +dev Mar13_S1A0/voip-72e50baa85-20130327_060812 +dev Mar13_S1A0/voip-72e50baa85-20130326_051315 +dev Mar13_S1A0/voip-72e50baa85-20130326_044704 +dev Mar13_S1A1/voip-fbd422ad18-20130328_182406 +dev Mar13_S0A0/voip-fbd422ad18-20130328_182833 +dev Mar13_S0A1/voip-fbd422ad18-20130328_182117 +dev Mar13_S1A0/voip-fbd422ad18-20130328_185220 +dev Mar13_S0A1/voip-fbd422ad18-20130328_181401 +dev Mar13_S1A1/voip-fbd422ad18-20130328_184129 +dev Mar13_S1A0/voip-fbd422ad18-20130328_181748 +dev Mar13_S0A0/voip-fbd422ad18-20130328_185340 +dev Mar13_S1A0/voip-fbd422ad18-20130328_183717 +dev Mar13_S0A0/voip-fbd422ad18-20130328_184603 +dev Mar13_S0A0/voip-fbd422ad18-20130328_181029 +dev Mar13_S1A1/voip-fbd422ad18-20130328_181932 +dev Mar13_S0A1/voip-fbd422ad18-20130328_184715 +dev Mar13_S1A0/voip-7e07d8f0f5-20130327_180143 +dev Mar13_S0A0/voip-7e07d8f0f5-20130327_175716 +dev Mar13_S1A0/voip-7e07d8f0f5-20130327_175257 +dev Mar13_S1A1/voip-7e07d8f0f5-20130327_175512 +dev Mar13_S1A1/voip-7e07d8f0f5-20130327_180412 +dev Mar13_S0A0/voip-7e07d8f0f5-20130327_175344 +dev Mar13_S1A1/voip-7e07d8f0f5-20130327_181435 +dev Mar13_S0A1/voip-7e07d8f0f5-20130328_154253 +dev Mar13_S0A0/voip-7e07d8f0f5-20130327_181135 +dev Mar13_S0A1/voip-7e07d8f0f5-20130328_192245 +dev Mar13_S0A1/voip-7e07d8f0f5-20130327_180533 +dev Mar13_S1A1/voip-7e07d8f0f5-20130328_191131 +dev Mar13_S0A1/voip-7e07d8f0f5-20130328_184338 +dev Mar13_S0A0/voip-7e07d8f0f5-20130328_191418 +dev Mar13_S0A1/voip-7e07d8f0f5-20130327_175626 +dev Mar13_S1A0/voip-7e07d8f0f5-20130327_180809 +dev Mar13_S1A1/voip-7e07d8f0f5-20130328_185739 +dev Mar13_S1A0/voip-7e07d8f0f5-20130328_192048 +dev Mar13_S1A1/voip-7e07d8f0f5-20130328_184956 +dev Mar13_S0A0/voip-7e07d8f0f5-20130328_190516 +dev Mar13_S1A0/voip-7e07d8f0f5-20130327_181229 +dev Mar13_S0A0/voip-7e07d8f0f5-20130328_185855 +dev Mar13_S1A1/voip-7e07d8f0f5-20130327_174435 +dev Mar13_S0A1/voip-7e07d8f0f5-20130328_190850 +dev Mar13_S1A0/voip-7e07d8f0f5-20130328_190156 +dev Mar13_S1A0/voip-7e07d8f0f5-20130328_185114 +dev Mar13_S0A0/voip-b20968d1ea-20130323_110813 +dev Mar13_S1A0/voip-b20968d1ea-20130323_113731 +dev Mar13_S1A1/voip-b20968d1ea-20130323_110621 +dev Mar13_S0A1/voip-b20968d1ea-20130323_113345 +dev Mar13_S0A0/voip-b20968d1ea-20130323_112309 +dev Mar13_S0A1/voip-b20968d1ea-20130323_105949 +dev Mar13_S1A1/voip-b20968d1ea-20130323_111539 +dev Mar13_S0A1/voip-b20968d1ea-20130323_111126 +dev Mar13_S0A0/voip-b20968d1ea-20130323_112930 +dev Mar13_S1A0/voip-b20968d1ea-20130323_112130 +dev Mar13_S1A0/voip-b20968d1ea-20130323_105540 +dev Mar13_S1A1/voip-48c12815b3-20130326_034916 +dev Mar13_S1A0/voip-48c12815b3-20130402_052233 +dev Mar13_S1A0/voip-48c12815b3-20130402_045222 +dev Mar13_S1A1/voip-48c12815b3-20130326_011113 +dev Mar13_S0A0/voip-48c12815b3-20130326_012654 +dev Mar13_S1A1/voip-48c12815b3-20130402_051808 +dev Mar13_S1A0/voip-48c12815b3-20130402_050736 +dev Mar13_S1A1/voip-48c12815b3-20130402_052405 +dev Mar13_S1A1/voip-48c12815b3-20130326_023109 +dev Mar13_S0A1/voip-48c12815b3-20130402_045814 +dev Mar13_S0A0/voip-48c12815b3-20130326_005929 +dev Mar13_S0A1/voip-48c12815b3-20130326_021219 +dev Mar13_S1A0/voip-48c12815b3-20130402_050517 +dev Mar13_S1A1/voip-48c12815b3-20130402_050043 +dev Mar13_S0A1/voip-48c12815b3-20130402_050617 +dev Mar13_S1A0/voip-48c12815b3-20130326_011456 +dev Mar13_S0A1/voip-48c12815b3-20130326_034052 +dev Mar13_S0A1/voip-48c12815b3-20130402_045915 +dev Mar13_S0A1/voip-48c12815b3-20130326_011743 +dev Mar13_S0A1/voip-48c12815b3-20130402_052119 +dev Mar13_S1A1/voip-48c12815b3-20130402_045503 +dev Mar13_S0A1/voip-fb0047e535-20130326_051359 +dev Mar13_S1A0/voip-b57f8ee22b-20130325_173942 +dev Mar13_S0A0/voip-b57f8ee22b-20130325_185940 +dev Mar13_S1A0/voip-b57f8ee22b-20130325_185315 +dev Mar13_S1A1/voip-b57f8ee22b-20130327_000138 +dev Mar13_S1A1/voip-b57f8ee22b-20130325_173725 +dev Mar13_S1A0/voip-b57f8ee22b-20130325_183850 +dev Mar13_S1A1/voip-b57f8ee22b-20130325_184849 +dev Mar13_S0A1/voip-b57f8ee22b-20130325_185743 +dev Mar13_S0A0/voip-b57f8ee22b-20130326_234206 +dev Mar13_S0A1/voip-b57f8ee22b-20130325_174455 +dev Mar13_S0A1/voip-b57f8ee22b-20130325_184420 +dev Mar13_S0A1/voip-b57f8ee22b-20130325_190707 +dev Mar13_S0A0/voip-b57f8ee22b-20130327_000010 +dev Mar13_S1A0/voip-b57f8ee22b-20130326_235132 +dev Mar13_S1A0/voip-b57f8ee22b-20130326_235430 +dev Mar13_S0A0/voip-b57f8ee22b-20130325_185141 +dev Mar13_S0A1/voip-b57f8ee22b-20130326_235613 +dev Mar13_S0A1/voip-7f9c1c8411-20130328_173449 +dev Mar13_S0A0/voip-7f9c1c8411-20130328_173956 +dev Mar13_S1A1/voip-7f9c1c8411-20130328_164730 +dev Mar13_S1A0/voip-7f9c1c8411-20130328_141931 +dev Mar13_S0A1/voip-7f9c1c8411-20130401_170036 +dev Mar13_S1A1/voip-7f9c1c8411-20130328_141000 +dev Mar13_S1A1/voip-7f9c1c8411-20130328_210227 +dev Mar13_S1A0/voip-7f9c1c8411-20130328_212116 +dev Mar13_S1A0/voip-7f9c1c8411-20130401_161411 +dev Mar13_S0A1/voip-7f9c1c8411-20130328_164602 +dev Mar13_S1A1/voip-7f9c1c8411-20130401_162408 +dev Mar13_S0A1/voip-7f9c1c8411-20130328_204059 +dev Mar13_S1A0/voip-7f9c1c8411-20130328_135911 +dev Mar13_S1A1/voip-7f9c1c8411-20130401_161233 +dev Mar13_S1A0/voip-7f9c1c8411-20130401_165623 +dev Mar13_S1A0/voip-7f9c1c8411-20130401_165234 +dev Mar13_S0A1/voip-7f9c1c8411-20130401_161953 +dev Mar13_S1A0/voip-7f9c1c8411-20130401_163716 +dev Mar13_S0A1/voip-7f9c1c8411-20130328_210507 +dev Mar13_S0A1/voip-7f9c1c8411-20130401_164058 +dev Mar13_S1A1/voip-7f9c1c8411-20130401_170332 +dev Mar13_S1A1/voip-7f9c1c8411-20130328_200359 +dev Mar13_S1A0/voip-7f9c1c8411-20130328_195010 +dev Mar13_S0A0/voip-e54437a6f0-20130325_131749 +dev Mar13_S1A1/voip-e54437a6f0-20130325_133942 +dev Mar13_S0A1/voip-e54437a6f0-20130325_141052 +dev Mar13_S1A0/voip-e54437a6f0-20130325_133648 +dev Mar13_S1A1/voip-e54437a6f0-20130325_140049 +dev Mar13_S0A0/voip-e54437a6f0-20130326_195611 +dev Mar13_S0A0/voip-e54437a6f0-20130325_134159 +dev Mar13_S0A1/voip-e54437a6f0-20130325_132940 +dev Mar13_S1A1/voip-e54437a6f0-20130326_194636 +dev Mar13_S1A0/voip-e54437a6f0-20130326_195047 +dev Mar13_S1A1/voip-e54437a6f0-20130326_210856 +dev Mar13_S0A0/voip-e54437a6f0-20130325_140819 +dev Mar13_S1A0/voip-e54437a6f0-20130325_140347 +dev Mar13_S1A1/voip-936ec6902a-20130328_133128 +dev Mar13_S0A0/voip-936ec6902a-20130328_140211 +dev Mar13_S1A0/voip-936ec6902a-20130328_133638 +dev Mar13_S1A0/voip-936ec6902a-20130328_132836 +dev Mar13_S0A0/voip-936ec6902a-20130328_133008 +dev Mar13_S0A1/voip-936ec6902a-20130328_133502 +dev Mar13_S1A1/voip-5a464ca603-20130401_130854 +dev Mar13_S1A0/voip-5a464ca603-20130401_170359 +dev Mar13_S1A1/voip-5a464ca603-20130401_192625 +dev Mar13_S1A0/voip-5a464ca603-20130401_193533 +dev Mar13_S1A1/voip-5a464ca603-20130401_165953 +dev Mar13_S0A1/voip-5a464ca603-20130401_193155 +dev Mar13_S1A0/voip-5a464ca603-20130401_170156 +dev Mar13_S1A0/voip-5a464ca603-20130401_131710 +dev Mar13_S0A1/voip-5a464ca603-20130401_165744 +dev Mar13_S0A1/voip-5a464ca603-20130401_193321 +dev Mar13_S0A1/voip-5a464ca603-20130401_131414 +dev Mar13_S1A1/voip-5a464ca603-20130401_193933 +dev Mar13_S0A1/voip-6dbc3573bc-20130329_035437 +dev Mar13_S1A0/voip-6dbc3573bc-20130328_193322 +dev Mar13_S1A1/voip-6dbc3573bc-20130328_192107 +dev Mar13_S0A1/voip-6dbc3573bc-20130328_191519 +dev Mar13_S1A1/voip-6dbc3573bc-20130328_193508 +dev Mar13_S0A0/voip-6dbc3573bc-20130328_193216 +dev Mar13_S0A1/voip-6dbc3573bc-20130329_035304 +dev Mar13_S1A0/voip-6dbc3573bc-20130328_191808 +dev Mar13_S0A0/voip-6dbc3573bc-20130328_191939 +dev Mar13_S0A0/voip-fe4b6ef58f-20130325_224823 +dev Mar13_S0A1/voip-fe4b6ef58f-20130325_233144 +dev Mar13_S1A1/voip-fe4b6ef58f-20130325_233447 +dev Mar13_S1A0/voip-fe4b6ef58f-20130325_234625 +dev Mar13_S1A0/voip-fe4b6ef58f-20130325_234527 +dev Mar13_S0A0/voip-fe4b6ef58f-20130325_234737 +dev Mar13_S0A1/voip-fe4b6ef58f-20130325_233655 +dev Mar13_S1A0/voip-fe4b6ef58f-20130325_224609 +dev Mar13_S0A0/voip-fe4b6ef58f-20130325_233912 +dev Mar13_S1A1/voip-fe4b6ef58f-20130325_223854 +dev Mar13_S1A1/voip-fe4b6ef58f-20130325_220934 +dev Mar13_S0A1/voip-fe4b6ef58f-20130328_231737 +dev Mar13_S0A0/voip-9f989824fd-20130324_080929 +dev Mar13_S1A0/voip-9f989824fd-20130324_074759 +dev Mar13_S1A1/voip-9f989824fd-20130324_074224 +dev Mar13_S1A0/voip-9f989824fd-20130325_202120 +dev Mar13_S1A1/voip-9f989824fd-20130324_075152 +dev Mar13_S0A0/voip-9f989824fd-20130324_072907 +dev Mar13_S0A1/voip-9f989824fd-20130325_200940 +dev Mar13_S0A1/voip-9f989824fd-20130325_201803 +dev Mar13_S1A0/voip-9f989824fd-20130325_204229 +dev Mar13_S1A1/voip-9f989824fd-20130325_203925 +dev Mar13_S1A0/voip-9f989824fd-20130324_075833 +dev Mar13_S0A0/voip-9f989824fd-20130325_201640 +dev Mar13_S0A0/voip-9f989824fd-20130325_204124 +dev Mar13_S1A0/voip-9f989824fd-20130325_204551 +dev Mar13_S1A1/voip-9f989824fd-20130325_201956 +dev Mar13_S0A1/voip-9f989824fd-20130325_203533 +dev Mar13_S0A1/voip-9f989824fd-20130325_204439 +dev Mar13_S0A1/voip-9f989824fd-20130324_074401 +dev Mar13_S0A1/voip-9f989824fd-20130324_073857 +dev Mar13_S1A0/voip-9f989824fd-20130324_080805 +dev Mar13_S1A1/voip-9f989824fd-20130325_201440 +dev Mar13_S1A0/voip-9f989824fd-20130324_073316 +dev Mar13_S0A1/voip-9f989824fd-20130324_080528 +test Mar13_S2A0/voip-36440f7305-20130327_195525 +test Mar13_S2A0/voip-a8649977cf-20130323_155952 +test Mar13_S2A0/voip-48c12815b3-20130402_052503 +test Mar13_S2A0/voip-4660dd9eab-20130329_075421 +test Mar13_S2A0/voip-59bc8a2167-20130328_131523 +test Mar13_S2A0/voip-59bc8a2167-20130325_140609 +test Mar13_S2A0/voip-f22c2bf9c7-20130328_113108 +test Mar13_S2A0/voip-fce37b0ccb-20130328_144530 +test Mar13_S2A0/voip-e2a895cfe5-20130325_230413 +test Mar13_S2A0/voip-90732b027d-20130401_220107 +test Mar13_S2A0/voip-03d6592b76-20130326_014519 +test Mar13_S2A0/voip-10beae627f-20130401_165339 +test Mar13_S2A0/voip-7c3a08072d-20130326_010255 +test Mar13_S2A0/voip-ccf48b9a6a-20130329_052911 +test Mar13_S2A0/voip-0f41c16f2f-20130325_205211 +test Mar13_S2A0/voip-b08f15a787-20130326_021707 +test Mar13_S2A0/voip-f32f2cfdae-20130328_194050 +test Mar13_S2A0/voip-3be3bda933-20130326_133018 +test Mar13_S2A0/voip-597cfafdee-20130402_005455 +test Mar13_S2A0/voip-f1e8236264-20130323_005412 +test Mar13_S2A0/voip-5a464ca603-20130401_192446 +test Mar13_S2A0/voip-22c938c8ba-20130325_125501 +test Mar13_S2A0/voip-eaef6f434c-20130323_024813 +test Mar13_S2A0/voip-8d5173f3a6-20130323_013442 +test Mar13_S2A0/voip-583e7cede5-20130323_045812 +test Mar13_S2A0/voip-88b68a9a41-20130324_002532 +test Mar13_S2A0/voip-187c1708f2-20130325_130023 +test Mar13_S2A0/voip-e8997b10da-20130329_010030 +test Mar13_S2A0/voip-318851c80b-20130328_224811 +test Mar13_S2A0/voip-22756d9e8f-20130329_045224 +test Mar13_S2A0/voip-fbd422ad18-20130328_184355 +test Mar13_S2A0/voip-da10d74c3e-20130328_153635 +test Mar13_S2A0/voip-4660dd9eab-20130329_085402 +test Mar13_S2A0/voip-8991b7bff6-20130326_225512 +test Mar13_S2A0/voip-aaa44b4121-20130327_172407 +test Mar13_S2A0/voip-869dd52548-20130401_175350 +test Mar13_S2A0/voip-9819537952-20130328_231437 +test Mar13_S2A0/voip-30772678da-20130328_193737 +test Mar13_S2A0/voip-50af5438f1-20130327_032648 +test Mar13_S2A0/voip-90732b027d-20130327_184844 +test Mar13_S2A0/voip-52eb280e7b-20130326_211017 +test Mar13_S2A0/voip-88b68a9a41-20130322_223747 +test Mar13_S2A0/voip-52eb280e7b-20130325_131201 +test Mar13_S2A0/voip-31de0daa7b-20130401_203807 +test Mar13_S2A0/voip-aaa44b4121-20130327_172805 +test Mar13_S2A0/voip-2f209793f4-20130326_004945 +test Mar13_S2A0/voip-4a6ecc1f1c-20130329_154322 +test Mar13_S2A0/voip-8d5173f3a6-20130324_184114 +test Mar13_S2A0/voip-14cb91bc48-20130327_212630 +test Mar13_S2A0/voip-72e50baa85-20130326_053416 +test Mar13_S2A0/voip-be5b7bf9d9-20130401_151827 +test Mar13_S2A0/voip-78f497f314-20130324_201800 +test Mar13_S2A0/voip-a352cb5ca5-20130401_235652 +test Mar13_S2A0/voip-560cbd32a5-20130401_143541 +test Mar13_S2A0/voip-0fa32b1e78-20130402_131752 +test Mar13_S2A0/voip-db80a9e6df-20130328_233111 +test Mar13_S2A0/voip-03d6592b76-20130327_030134 +test Mar13_S2A0/voip-14cb91bc48-20130328_155141 +test Mar13_S2A0/voip-199d62165b-20130402_120014 +test Mar13_S2A0/voip-597cfafdee-20130402_010017 +test Mar13_S2A0/voip-263ab0e49f-20130326_105848 +test Mar13_S2A0/voip-da4a08ad84-20130328_160514 +test Mar13_S2A0/voip-58047f5227-20130327_031936 +test Mar13_S2A0/voip-10beae627f-20130329_154949 +test Mar13_S2A0/voip-2f209793f4-20130326_011737 +test Mar13_S2A0/voip-03c59ba692-20130324_180148 +test Mar13_S2A0/voip-187c1708f2-20130326_155526 +test Mar13_S2A0/voip-fe2783c40a-20130401_144813 +test Mar13_S2A0/voip-bde2721237-20130326_200732 +test Mar13_S2A0/voip-e9b53d6ace-20130324_223454 +test Mar13_S2A0/voip-2d3d74d091-20130325_232837 +test Mar13_S2A0/voip-869dd52548-20130326_000526 +test Mar13_S2A0/voip-4a6ecc1f1c-20130329_153339 +test Mar13_S2A0/voip-00d76b791d-20130327_012544 +test Mar13_S2A0/voip-d0341706f2-20130329_053139 +test Mar13_S2A0/voip-fbd422ad18-20130328_182536 +test Mar13_S2A0/voip-6dbc3573bc-20130328_190140 +test Mar13_S2A0/voip-ccf48b9a6a-20130329_052221 +test Mar13_S2A0/voip-381a50592b-20130326_042813 +test Mar13_S2A0/voip-0a45bc863d-20130325_203340 +test Mar13_S2A0/voip-48c12815b3-20130402_045710 +test Mar13_S2A0/voip-03d6592b76-20130326_013732 +test Mar13_S2A0/voip-3b81cbb287-20130326_031000 +test Mar13_S2A0/voip-e54437a6f0-20130326_211240 +test Mar13_S2A0/voip-ef9aa63b85-20130329_132055 +test Mar13_S2A0/voip-e54437a6f0-20130325_132456 +test Mar13_S2A0/voip-88f198881b-20130326_014324 +test Mar13_S2A0/voip-c8821c664b-20130322_215042 +test Mar13_S2A0/voip-2c217000af-20130328_224325 +test Mar13_S2A0/voip-36440f7305-20130327_201412 +test Mar13_S2A0/voip-bde2721237-20130325_155003 +test Mar13_S2A0/voip-8efef4eae9-20130325_233021 +test Mar13_S2A0/voip-d645d56d23-20130401_202916 +test Mar13_S2A0/voip-b08f15a787-20130402_070129 +test Mar13_S2A0/voip-50af5438f1-20130402_081003 +test Mar13_S2A0/voip-7e22911804-20130327_200501 +test Mar13_S2A0/voip-d645d56d23-20130402_221701 +test Mar13_S2A0/voip-0abf414c0c-20130327_000042 +test Mar13_S2A0/voip-4a6ecc1f1c-20130329_152119 +test Mar13_S2A0/voip-10beae627f-20130328_165624 +test Mar13_S2A0/voip-ccf48b9a6a-20130329_053900 +test Mar13_S2A0/voip-52d599db9c-20130326_214853 +test Mar13_S2A0/voip-ad40cf5489-20130325_180830 +test Mar13_S2A0/voip-9735278861-20130401_155012 +test Mar13_S2A0/voip-da10d74c3e-20130328_141156 +test Mar13_S2A0/voip-52eb280e7b-20130325_125005 +test Mar13_S2A0/voip-be5b7bf9d9-20130402_202238 +test Mar13_S2A0/voip-2d2d103292-20130329_040304 +test Mar13_S2A0/voip-9735278861-20130401_155901 +test Mar13_S2A0/voip-2d2d103292-20130329_035951 +test Mar13_S2A0/voip-0f41c16f2f-20130325_211046 +test Mar13_S2A0/voip-2b66f60368-20130326_201641 +test Mar13_S2A0/voip-583e7cede5-20130324_064258 +test Mar13_S2A0/voip-4a6ecc1f1c-20130328_124050 +test Mar13_S2A0/voip-48c12815b3-20130327_191202 +test Mar13_S2A0/voip-381a50592b-20130323_235040 +test Mar13_S2A0/voip-5a464ca603-20130401_132040 +test Mar13_S2A0/voip-9f989824fd-20130325_202557 +test Mar13_S2A0/voip-e99e4f4538-20130328_202515 +test Mar13_S2A0/voip-21ec2b7850-20130326_022445 +test Mar13_S2A0/voip-6dbc3573bc-20130328_192806 +test Mar13_S2A0/voip-52eb280e7b-20130326_212429 +test Mar13_S2A0/voip-30772678da-20130328_193147 +test Mar13_S2A0/voip-5a464ca603-20130401_132347 +test Mar13_S2A0/voip-bb1fd497eb-20130326_225924 +test Mar13_S2A0/voip-dd9f7810fd-20130322_231436 +test Mar13_S2A0/voip-36440f7305-20130326_140039 +test Mar13_S2A0/voip-ec3c3aaf98-20130323_141214 +test Mar13_S2A0/voip-7f9c1c8411-20130401_170150 +test Mar13_S2A0/voip-fe4b6ef58f-20130325_234219 +test Mar13_S2A0/voip-14f776f781-20130328_145126 +test Mar13_S2A0/voip-e8997b10da-20130401_151321 +test Mar13_S2A0/voip-9f447ce48e-20130328_115314 +test Mar13_S2A0/voip-a8649977cf-20130323_162034 +test Mar13_S2A0/voip-3cf7bd870d-20130327_173831 +test Mar13_S2A0/voip-922209b777-20130327_005933 +test Mar13_S2A0/voip-7e22911804-20130324_184635 +test Mar13_S2A0/voip-4f069a4136-20130402_040121 +test Mar13_S2A0/voip-263ab0e49f-20130326_104952 +test Mar13_S2A0/voip-f091d0e461-20130327_205430 +test Mar13_S2A0/voip-ad40cf5489-20130327_194606 +test Mar13_S2A0/voip-f1e8236264-20130323_004235 +test Mar13_S2A0/voip-2d2d103292-20130326_043537 +test Mar13_S2A0/voip-9735278861-20130401_161944 +test Mar13_S2A0/voip-381a50592b-20130324_001030 +test Mar13_S2A0/voip-b6618de447-20130328_155926 +test Mar13_S2A0/voip-3b59a0391b-20130401_131609 +test Mar13_S2A0/voip-e3b4879e0d-20130327_030338 +test Mar13_S2A0/voip-187c1708f2-20130325_132335 +test Mar13_S2A0/voip-da10d74c3e-20130326_000153 +test Mar13_S2A0/voip-e72dba1759-20130325_210200 +test Mar13_S2A0/voip-15d8a89cec-20130327_020407 +test Mar13_S2A0/voip-0fa32b1e78-20130402_140529 +test Mar13_S2A0/voip-3b3edac94d-20130324_202039 +test Mar13_S2A0/voip-a352cb5ca5-20130401_232957 +test Mar13_S2A0/voip-52eb280e7b-20130325_125637 +test Mar13_S2A0/voip-78f497f314-20130323_184811 +test Mar13_S2A0/voip-22c938c8ba-20130325_131944 +test Mar13_S2A0/voip-e61fa89add-20130326_005156 +test Mar13_S2A0/voip-13ff413553-20130328_175848 +test Mar13_S2A0/voip-7e22911804-20130325_202451 +test Mar13_S2A0/voip-5f71221015-20130328_114629 +test Mar13_S2A0/voip-52d599db9c-20130402_000539 +test Mar13_S2A0/voip-ab4f1dbb59-20130325_213532 +test Mar13_S2A0/voip-f22c2bf9c7-20130326_193447 +test Mar13_S2A0/voip-7e07d8f0f5-20130328_191029 +test Mar13_S2A0/voip-5cf59cc660-20130328_161332 +test Mar13_S2A0/voip-eaef6f434c-20130323_031124 +test Mar13_S2A0/voip-dcaeb62b29-20130326_034706 +test Mar13_S2A0/voip-36440f7305-20130326_200502 +test Mar13_S2A0/voip-59bc8a2167-20130325_134038 +test Mar13_S2A0/voip-e9b53d6ace-20130401_185044 +test Mar13_S2A0/voip-48c12815b3-20130326_021101 +test Mar13_S2A0/voip-ad40cf5489-20130327_191744 +test Mar13_S2A0/voip-f32f2cfdae-20130327_014232 +test Mar13_S2A0/voip-ddcaad92a1-20130325_234854 +test Mar13_S2A0/voip-50af5438f1-20130327_041743 +test Mar13_S2A0/voip-d645d56d23-20130324_001329 +test Mar13_S2A0/voip-db80a9e6df-20130328_225812 +test Mar13_S2A0/voip-fdf8b50918-20130327_024335 +test Mar13_S2A0/voip-edb8609855-20130327_183610 +test Mar13_S2A0/voip-ef9aa63b85-20130328_223650 +test Mar13_S2A0/voip-cc1b0c8a0f-20130329_003412 +test Mar13_S2A0/voip-88b68a9a41-20130322_222223 +test Mar13_S2A0/voip-e99e4f4538-20130328_200914 +test Mar13_S2A0/voip-b6618de447-20130326_211712 +test Mar13_S2A0/voip-f32f2cfdae-20130327_012930 +test Mar13_S2A0/voip-ef9aa63b85-20130328_210422 +test Mar13_S2A0/voip-922209b777-20130327_013219 +test Mar13_S2A0/voip-13ff413553-20130328_173542 +test Mar13_S2A0/voip-0999e9bb30-20130325_125249 +test Mar13_S2A0/voip-8991b7bff6-20130326_232812 +test Mar13_S2A0/voip-340dbb333e-20130325_232138 +test Mar13_S2A0/voip-3cf7bd870d-20130327_175320 +test Mar13_S2A0/voip-318851c80b-20130328_224123 +test Mar13_S2A0/voip-2f4c700ae3-20130401_191340 +test Mar13_S2A0/voip-3cf7bd870d-20130327_183216 +test Mar13_S2A0/voip-cfd5fa34d9-20130402_073708 +test Mar13_S2A0/voip-869dd52548-20130401_175909 +test Mar13_S2A0/voip-e8997b10da-20130327_192446 +test Mar13_S2A0/voip-f17e3b578c-20130328_173916 +test Mar13_S2A0/voip-ef9aa63b85-20130328_190707 +test Mar13_S2A0/voip-b08f15a787-20130402_072830 +test Mar13_S2A0/voip-fe2783c40a-20130401_150049 +test Mar13_S2A0/voip-7e07d8f0f5-20130328_190349 +test Mar13_S2A0/voip-4b7e22cc07-20130401_185340 +test Mar13_S2A0/voip-e30cb521fb-20130328_121247 +test Mar13_S2A0/voip-fdf8b50918-20130326_052130 +test Mar13_S2A0/voip-f113dbb0e1-20130327_173300 +test Mar13_S2A0/voip-bde2721237-20130325_163504 +test Mar13_S2A0/voip-0241bbae39-20130327_202609 +test Mar13_S2A0/voip-ccc459b689-20130327_003319 +test Mar13_S2A0/voip-50af5438f1-20130402_082329 +test Mar13_S2A0/voip-3b81cbb287-20130324_014524 +test Mar13_S2A0/voip-b20b6e847a-20130326_223553 +test Mar13_S2A0/voip-afd3aa91f0-20130327_190613 +test Mar13_S2A0/voip-187c1708f2-20130325_135350 +test Mar13_S2A0/voip-e99e4f4538-20130328_195519 +test Mar13_S2A0/voip-3be3bda933-20130325_195900 +test Mar13_S2A0/voip-f32f2cfdae-20130327_015459 +test Mar13_S2A0/voip-dda7c88c6e-20130323_055620 +test Mar13_S2A0/voip-b57f8ee22b-20130325_174323 +test Mar13_S2A0/voip-187c1708f2-20130327_132431 +test Mar13_S2A0/voip-78f497f314-20130323_143934 +test Mar13_S2A0/voip-d7853a398f-20130401_153507 +test Mar13_S2A0/voip-5749b16764-20130328_151006 +test Mar13_S2A0/voip-fbd422ad18-20130328_180701 +test Mar13_S2A0/voip-aaa44b4121-20130327_165607 +test Mar13_S2A0/voip-22a181cad5-20130326_022730 +test Mar13_S2A0/voip-03c59ba692-20130402_061438 +test Mar13_S2A0/voip-f32f2cfdae-20130328_195632 +test Mar13_S2A0/voip-21ec2b7850-20130325_144235 +test Mar13_S2A0/voip-90732b027d-20130401_221257 +test Mar13_S2A0/voip-2f4c700ae3-20130401_192808 +test Mar13_S2A0/voip-d7853a398f-20130401_162351 +test Mar13_S2A0/voip-4a6ecc1f1c-20130329_152622 +test Mar13_S2A0/voip-e8997b10da-20130329_000115 +test Mar13_S2A0/voip-43479eb5c2-20130324_004459 +test Mar13_S2A0/voip-2d2d103292-20130326_042359 +test Mar13_S2A0/voip-15d8a89cec-20130327_022310 +test Mar13_S2A0/voip-158a881c88-20130328_152013 +test Mar13_S2A0/voip-f113dbb0e1-20130327_173654 +test Mar13_S2A0/voip-fe4b6ef58f-20130328_232107 +test Mar13_S2A0/voip-ad40cf5489-20130327_193526 +test Mar13_S2A0/voip-edb8609855-20130327_184126 +test Mar13_S2A0/voip-b20968d1ea-20130323_110947 +test Mar13_S2A0/voip-e61fa89add-20130327_074844 +test Mar13_S2A0/voip-62cc0cc55d-20130323_155532 +test Mar13_S2A0/voip-15d8a89cec-20130327_013956 +test Mar13_S2A0/voip-e8997b10da-20130401_151533 +test Mar13_S2A0/voip-ddcaad92a1-20130326_013530 +test Mar13_S2A0/voip-be5b7bf9d9-20130401_155324 +test Mar13_S2A0/voip-b6618de447-20130328_155757 +test Mar13_S2A0/voip-9735278861-20130401_154026 +test Mar13_S2A0/voip-4c25da9a27-20130327_141325 +test Mar13_S2A0/voip-72e50baa85-20130326_050547 +test Mar13_S2A0/voip-4f069a4136-20130327_205328 +test Mar13_S2A0/voip-317a1436fe-20130325_171536 +test Mar13_S2A0/voip-aaa44b4121-20130326_060713 +test Mar13_S2A0/voip-fe2783c40a-20130401_143319 +test Mar13_S2A0/voip-2d3d74d091-20130328_140236 +test Mar13_S2A0/voip-a352cb5ca5-20130401_231920 +test Mar13_S2A0/voip-36440f7305-20130326_195310 +test Mar13_S2A0/voip-59bc8a2167-20130325_142909 +test Mar13_S2A0/voip-b08f15a787-20130402_075018 +test Mar13_S2A0/voip-e0035cc31b-20130323_211610 +test Mar13_S2A0/voip-e72dba1759-20130326_092440 +test Mar13_S2A0/voip-10beae627f-20130401_162926 +test Mar13_S2A0/voip-4f069a4136-20130402_031057 +test Mar13_S2A0/voip-d66e12b45c-20130327_173005 +test Mar13_S2A0/voip-b772dbf437-20130402_142332 +test Mar13_S2A0/voip-935947e17b-20130402_201122 +test Mar13_S2A0/voip-1b51204ef5-20130401_145436 +test Mar13_S2A0/voip-31d9d1a567-20130402_035040 +test Mar13_S2A0/voip-922209b777-20130327_003615 +test Mar13_S2A0/voip-2c217000af-20130328_224634 +test Mar13_S2A0/voip-14f776f781-20130329_033522 +test Mar13_S2A0/voip-2e134ee190-20130401_225512 +test Mar13_S2A0/voip-6d6587c57d-20130328_235437 +test Mar13_S2A0/voip-0f41c16f2f-20130401_235202 +test Mar13_S2A0/voip-e2a895cfe5-20130325_234006 +test Mar13_S2A0/voip-d0341706f2-20130329_051849 +test Mar13_S2A0/voip-876ef67873-20130402_034440 +test Mar13_S2A0/voip-22756d9e8f-20130329_044407 +test Mar13_S2A0/voip-5749b16764-20130328_150805 +test Mar13_S2A0/voip-72e50baa85-20130327_063755 +test Mar13_S2A0/voip-597cfafdee-20130328_234836 +test Mar13_S2A0/voip-b6618de447-20130328_152659 +test Mar13_S2A0/voip-b20b6e847a-20130326_222651 +test Mar13_S2A0/voip-5a464ca603-20130401_193807 +test Mar13_S2A0/voip-8d5173f3a6-20130324_185936 +test Mar13_S2A0/voip-f22c2bf9c7-20130328_115705 +test Mar13_S2A0/voip-e0035cc31b-20130323_213127 +test Mar13_S2A0/voip-0f41c16f2f-20130325_192056 +test Mar13_S2A0/voip-9819537952-20130328_234654 +test Mar13_S2A0/voip-affbf578cf-20130401_163239 +test Mar13_S2A0/voip-d7853a398f-20130402_144102 +test Mar13_S2A0/voip-8d5173f3a6-20130323_011906 +test Mar13_S2A0/voip-935947e17b-20130402_200947 +test Mar13_S2A0/voip-31d9d1a567-20130402_043117 +test Mar13_S2A0/voip-381a50592b-20130323_233842 +test Mar13_S2A0/voip-14cb91bc48-20130327_182258 +test Mar13_S2A0/voip-3860c915c2-20130328_165141 +test Mar13_S2A0/voip-381a50592b-20130326_042630 +test Mar13_S2A0/voip-f17e3b578c-20130328_173734 +test Mar13_S2A0/voip-72e50baa85-20130327_060952 +test Mar13_S2A0/voip-bde2721237-20130325_163654 +test Mar13_S2A0/voip-e9b53d6ace-20130401_212559 +test Mar13_S2A0/voip-d76f6e4f82-20130327_190818 +test Mar13_S2A0/voip-583e7cede5-20130323_061534 +test Mar13_S2A0/voip-0f41c16f2f-20130402_004710 +test Mar13_S2A0/voip-dda7c88c6e-20130323_053330 +test Mar13_S2A0/voip-59bc8a2167-20130328_125604 +test Mar13_S2A0/voip-5cf59cc660-20130328_163939 +test Mar13_S2A0/voip-4a6ecc1f1c-20130327_192159 +test Mar13_S2A0/voip-48c12815b3-20130402_050359 +test Mar13_S2A0/voip-4c0d36762a-20130328_210105 +test Mar13_S2A0/voip-3b81cbb287-20130326_024351 +test Mar13_S2A0/voip-597cfafdee-20130402_011248 +test Mar13_S2A0/voip-14cb91bc48-20130327_182707 +test Mar13_S2A0/voip-d0341706f2-20130329_052508 +test Mar13_S2A0/voip-2c217000af-20130325_222245 +test Mar13_S2A0/voip-afd3aa91f0-20130326_052804 +test Mar13_S2A0/voip-e2a895cfe5-20130326_233932 +test Mar13_S2A0/voip-318851c80b-20130328_215256 +test Mar13_S2A0/voip-b27a230d2e-20130329_043241 +test Mar13_S2A0/voip-73928a3e49-20130402_200624 +test Mar13_S2A0/voip-03c59ba692-20130324_180743 +test Mar13_S2A0/voip-ccf48b9a6a-20130329_041458 +test Mar13_S2A0/voip-b27a230d2e-20130323_051403 +test Mar13_S2A0/voip-52d599db9c-20130326_213343 +test Mar13_S2A0/voip-d7853a398f-20130401_152451 +test Mar13_S2A0/voip-869dd52548-20130401_184414 +test Mar13_S2A0/voip-340dbb333e-20130326_193257 +test Mar13_S2A0/voip-fce37b0ccb-20130328_145549 +test Mar13_S2A0/voip-03c2655d43-20130327_200228 +test Mar13_S2A0/voip-22756d9e8f-20130329_045922 +test Mar13_S2A0/voip-922209b777-20130325_160320 +test Mar13_S2A0/voip-e9b53d6ace-20130401_200646 +test Mar13_S2A0/voip-e2a895cfe5-20130326_233054 +test Mar13_S2A0/voip-0fa32b1e78-20130328_234553 +test Mar13_S2A0/voip-52d599db9c-20130402_002106 +test Mar13_S2A0/voip-7e4cdce06a-20130323_130631 +test Mar13_S2A0/voip-96f43326a4-20130324_095517 +test Mar13_S2A0/voip-8991b7bff6-20130401_174622 +test Mar13_S2A0/voip-05e7a5440b-20130328_211141 +test Mar13_S2A0/voip-e0035cc31b-20130326_205145 +test Mar13_S2A0/voip-62cc0cc55d-20130328_143853 +test Mar13_S2A0/voip-db80a9e6df-20130328_230640 +test Mar13_S2A0/voip-05e7a5440b-20130328_221829 +test Mar13_S2A0/voip-fe60dae302-20130328_170925 +test Mar13_S2A0/voip-0a45bc863d-20130326_210127 +test Mar13_S2A0/voip-155e939ebc-20130327_203704 +test Mar13_S2A0/voip-43479eb5c2-20130323_175928 +test Mar13_S2A0/voip-199d62165b-20130402_120742 +test Mar13_S2A0/voip-ad40cf5489-20130327_191513 +test Mar13_S2A0/voip-00d76b791d-20130327_011305 +test Mar13_S2A0/voip-78f497f314-20130323_183443 +test Mar13_S2A0/voip-7e22911804-20130324_191409 +test Mar13_S2A0/voip-f22c2bf9c7-20130328_121417 +test Mar13_S2A0/voip-aaa44b4121-20130326_055751 +test Mar13_S2A0/voip-7f9c1c8411-20130401_160945 +test Mar13_S2A0/voip-db80a9e6df-20130328_232734 +test Mar13_S2A0/voip-e0035cc31b-20130323_210647 +test Mar13_S2A0/voip-4c25da9a27-20130327_142052 +test Mar13_S2A0/voip-03d6592b76-20130327_024445 +test Mar13_S2A0/voip-935947e17b-20130402_201821 +test Mar13_S2A0/voip-b08f15a787-20130402_070857 +test Mar13_S2A0/voip-4c25da9a27-20130327_134538 +test Mar13_S2A0/voip-9819537952-20130328_234410 +test Mar13_S2A0/voip-ef9aa63b85-20130329_125907 +test Mar13_S2A0/voip-e8997b10da-20130329_001441 +test Mar13_S2A0/voip-14f776f781-20130328_120030 +test Mar13_S2A0/voip-7e22911804-20130328_204634 +test Mar13_S2A0/voip-f32f2cfdae-20130328_194631 +test Mar13_S2A0/voip-936ec6902a-20130328_134653 +test Mar13_S2A0/voip-583e7cede5-20130324_061814 +test Mar13_S2A0/voip-7e07d8f0f5-20130327_181332 +test Mar13_S2A0/voip-90732b027d-20130327_173404 +test Mar13_S2A0/voip-583e7cede5-20130324_055616 +test Mar13_S2A0/voip-b27a230d2e-20130329_035000 +test Mar13_S2A0/voip-87de4f7a80-20130324_153839 +test Mar13_S2A0/voip-edb8609855-20130327_182012 +test Mar13_S2A0/voip-e9b53d6ace-20130324_221634 +test Mar13_S2A0/voip-bb1fd497eb-20130325_130133 +test Mar13_S2A0/voip-3b3edac94d-20130323_203402 +test Mar13_S2A0/voip-d7aef99178-20130328_184232 +test Mar13_S2A0/voip-9f989824fd-20130325_201147 +test Mar13_S2A0/voip-4c25da9a27-20130325_181923 +test Mar13_S2A0/voip-922209b777-20130325_161915 +test Mar13_S2A0/voip-eaef6f434c-20130323_024700 +test Mar13_S2A0/voip-14cb91bc48-20130327_182851 +test Mar13_S2A0/voip-a31ca3e355-20130323_234744 +test Mar13_S2A0/voip-6d6587c57d-20130328_143231 +test Mar13_S2A0/voip-da10d74c3e-20130326_001533 +test Mar13_S2A0/voip-58047f5227-20130326_031323 +test Mar13_S2A0/voip-1b51204ef5-20130401_150210 +test Mar13_S2A0/voip-be5b7bf9d9-20130401_154326 +test Mar13_S2A0/voip-be5b7bf9d9-20130402_203957 +test Mar13_S2A0/voip-0fa32b1e78-20130328_234246 +test Mar13_S2A0/voip-db80a9e6df-20130328_233914 +test Mar13_S2A0/voip-8991b7bff6-20130326_231625 +test Mar13_S2A0/voip-39a25ab2f8-20130326_121243 +test Mar13_S2A0/voip-dd9f7810fd-20130322_223611 +test Mar13_S2A0/voip-4f069a4136-20130327_204836 +test Mar13_S2A0/voip-e3b4879e0d-20130326_022151 +test Mar13_S2A0/voip-31d9d1a567-20130402_040542 +test Mar13_S2A0/voip-bc900c7903-20130328_120449 +test Mar13_S2A0/voip-31de0daa7b-20130401_205012 +test Mar13_S2A0/voip-bb1fd497eb-20130325_165136 +test Mar13_S2A0/voip-4c0d36762a-20130328_202750 +test Mar13_S2A0/voip-21ec2b7850-20130327_044054 +test Mar13_S2A0/voip-e54437a6f0-20130325_135714 +test Mar13_S2A0/voip-ec3c3aaf98-20130323_142354 +test Mar13_S2A0/voip-4c0d36762a-20130328_201801 +test Mar13_S2A0/voip-aaa44b4121-20130326_053622 +test Mar13_S2A0/voip-58047f5227-20130326_032423 +test Mar13_S2A0/voip-58047f5227-20130327_033701 +test Mar13_S2A0/voip-e99e4f4538-20130328_202311 +test Mar13_S2A0/voip-0f41c16f2f-20130402_005250 +test Mar13_S2A0/voip-bb1fd497eb-20130326_232308 +test Mar13_S2A0/voip-597cfafdee-20130328_231637 +test Mar13_S2A0/voip-0a45bc863d-20130325_201814 +test Mar13_S2A0/voip-3be3bda933-20130326_132730 +test Mar13_S2A0/voip-d76851034e-20130326_222136 +test Mar13_S2A0/voip-58047f5227-20130327_032153 +test Mar13_S2A0/voip-8f9fb7a86b-20130328_183553 +test Mar13_S2A0/voip-bde2721237-20130325_161252 +test Mar13_S2A0/voip-4660dd9eab-20130329_080538 +test Mar13_S2A0/voip-8991b7bff6-20130401_173731 +test Mar13_S2A0/voip-7e22911804-20130328_163017 +test Mar13_S2A0/voip-7f9c1c8411-20130328_175349 +test Mar13_S2A0/voip-ab4f1dbb59-20130328_180420 +test Mar13_S2A0/voip-7e07d8f0f5-20130328_185314 +test Mar13_S2A0/voip-da4a08ad84-20130328_154102 +test Mar13_S2A0/voip-0a45bc863d-20130325_200321 +test Mar13_S2A0/voip-7f9c1c8411-20130401_164614 +test Mar13_S2A0/voip-f17e3b578c-20130328_175132 +test Mar13_S2A0/voip-4a6ecc1f1c-20130328_124754 +test Mar13_S2A0/voip-10beae627f-20130328_170130 +test Mar13_S2A0/voip-50af5438f1-20130402_090102 +test Mar13_S2A0/voip-b27a230d2e-20130329_034612 +test Mar13_S2A0/voip-a31ca3e355-20130323_223438 +test Mar13_S2A0/voip-48c12815b3-20130402_051349 +test Mar13_S2A0/voip-7e07d8f0f5-20130327_180638 +test Mar13_S2A0/voip-869dd52548-20130401_184904 +test Mar13_S2A0/voip-31d9d1a567-20130402_041419 +test Mar13_S2A0/voip-be5b7bf9d9-20130401_151019 +test Mar13_S2A0/voip-88b68a9a41-20130324_003743 +test Mar13_S2A0/voip-3b3edac94d-20130324_203441 +test Mar13_S2A0/voip-96f43326a4-20130323_070742 +test Mar13_S2A0/voip-4f069a4136-20130402_032939 +test Mar13_S2A0/voip-d225fad9df-20130328_175540 +test Mar13_S2A0/voip-e54437a6f0-20130325_142134 +test Mar13_S2A0/voip-96f43326a4-20130326_003351 +test Mar13_S2A0/voip-561b472540-20130328_123644 +test Mar13_S2A0/voip-583e7cede5-20130323_054313 +test Mar13_S2A0/voip-fdf8b50918-20130329_013239 +test Mar13_S2A0/voip-d7853a398f-20130401_153715 +test Mar13_S2A0/voip-3b59a0391b-20130401_134700 +test Mar13_S2A0/voip-158a881c88-20130328_151410 +test Mar13_S2A0/voip-dcaeb62b29-20130326_044625 +test Mar13_S2A0/voip-affbf578cf-20130401_164412 +test Mar13_S2A0/voip-13ff413553-20130328_174617 +test Mar13_S2A0/voip-ccc459b689-20130327_005145 +test Mar13_S2A0/voip-2d2d103292-20130329_041843 +test Mar13_S2A0/voip-8991b7bff6-20130401_181228 +test Mar13_S2A0/voip-90732b027d-20130401_223007 +test Mar13_S2A0/voip-d76f6e4f82-20130327_184332 +test Mar13_S2A0/voip-b20968d1ea-20130323_105740 +test Mar13_S2A0/voip-e72dba1759-20130325_205011 +test Mar13_S2A0/voip-03c59ba692-20130324_034320 +test Mar13_S2A0/voip-b772dbf437-20130402_141317 +test Mar13_S2A0/voip-340dbb333e-20130325_231340 +test Mar13_S2A0/voip-14f776f781-20130328_140648 +test Mar13_S2A0/voip-aaa44b4121-20130327_171127 +test Mar13_S2A0/voip-bb1fd497eb-20130325_164243 +test Mar13_S2A0/voip-dcaeb62b29-20130326_035420 +test Mar13_S2A0/voip-2f4c700ae3-20130401_190924 +test Mar13_S2A0/voip-908884f5fd-20130326_224635 +test Mar13_S2A0/voip-0fa32b1e78-20130328_152310 +test Mar13_S2A0/voip-318851c80b-20130328_213742 +test Mar13_S2A0/voip-199d62165b-20130402_121823 +test Mar13_S2A0/voip-10beae627f-20130328_171512 +test Mar13_S2A0/voip-340dbb333e-20130327_010702 +test Mar13_S2A0/voip-597cfafdee-20130328_232228 +test Mar13_S2A0/voip-59bc8a2167-20130328_133101 +test Mar13_S2A0/voip-afd3aa91f0-20130327_211250 +test Mar13_S2A0/voip-cfd5fa34d9-20130402_073203 +test Mar13_S2A0/voip-d0341706f2-20130329_021001 +test Mar13_S2A0/voip-4f069a4136-20130327_210716 +test Mar13_S2A0/voip-d645d56d23-20130323_235755 +test Mar13_S2A0/voip-7f9c1c8411-20130328_210648 +test Mar13_S2A0/voip-e0035cc31b-20130326_204117 +test Mar13_S2A0/voip-dda7c88c6e-20130323_053456 +test Mar13_S2A0/voip-8991b7bff6-20130401_160529 +test Mar13_S2A0/voip-f22c2bf9c7-20130326_194620 +test Mar13_S2A0/voip-e3b4879e0d-20130327_025916 +test Mar13_S2A0/voip-00d76b791d-20130327_005342 +test Mar13_S2A0/voip-2c217000af-20130328_223531 +test Mar13_S2A0/voip-317a1436fe-20130325_172654 +test Mar13_S2A0/voip-dd9f7810fd-20130322_224109 +test Mar13_S2A0/voip-bb1fd497eb-20130326_232612 +test Mar13_S2A0/voip-90732b027d-20130327_185508 +test Mar13_S2A0/voip-05e7a5440b-20130328_213317 +test Mar13_S2A0/voip-3b59a0391b-20130401_134422 +test Mar13_S2A0/voip-e72dba1759-20130326_223001 +test Mar13_S2A0/voip-935947e17b-20130402_200030 +test Mar13_S2A0/voip-202b6a3cc4-20130327_185147 +test Mar13_S2A0/voip-50af5438f1-20130402_085054 +test Mar13_S2A0/voip-b57f8ee22b-20130325_183652 +test Mar13_S2A0/voip-fce37b0ccb-20130328_145947 +test Mar13_S2A0/voip-c8ec8c76dd-20130328_205803 +test Mar13_S2A0/voip-876ef67873-20130327_040445 +test Mar13_S2A0/voip-afd3aa91f0-20130325_224502 +test Mar13_S2A0/voip-e2a895cfe5-20130325_231536 +test Mar13_S2A0/voip-d7853a398f-20130402_162013 +test Mar13_S2A0/voip-9f989824fd-20130325_204726 +test Mar13_S2A0/voip-fdf8b50918-20130329_042452 +test Mar13_S2A0/voip-9f989824fd-20130324_073738 +test Mar13_S2A0/voip-62cc0cc55d-20130328_141812 +test Mar13_S2A0/voip-ccf48b9a6a-20130329_042702 +test Mar13_S2A0/voip-597cfafdee-20130328_231125 +test Mar13_S2A0/voip-50af5438f1-20130402_082945 +test Mar13_S2A0/voip-0f41c16f2f-20130402_000400 +test Mar13_S2A0/voip-2d2d103292-20130326_041139 +test Mar13_S2A0/voip-72e50baa85-20130327_062540 +test Mar13_S2A0/voip-96f43326a4-20130323_071928 +test Mar13_S2A0/voip-96f43326a4-20130324_100133 +test Mar13_S2A0/voip-0241bbae39-20130327_194449 +test Mar13_S2A0/voip-d645d56d23-20130323_221203 +test Mar13_S2A0/voip-10beae627f-20130401_163806 +test Mar13_S2A0/voip-e9b53d6ace-20130401_203151 +test Mar13_S2A0/voip-3b3edac94d-20130324_211500 +test Mar13_S2A0/voip-5cf59cc660-20130327_143457 +test Mar13_S2A0/voip-fe60dae302-20130328_191909 +test Mar13_S2A0/voip-e61fa89add-20130326_010819 +test Mar13_S2A0/voip-fbd422ad18-20130328_185608 +test Mar13_S2A0/voip-8d5173f3a6-20130323_020057 +test Mar13_S2A0/voip-b57f8ee22b-20130326_235307 +test Mar13_S2A0/voip-4c0d36762a-20130328_212635 +test Mar13_S2A0/voip-7fdb5b39e7-20130328_221029 +test Mar13_S2A1/voip-e54437a6f0-20130325_142328 +test Mar13_S2A1/voip-f091d0e461-20130327_205954 +test Mar13_S2A1/voip-fbd422ad18-20130328_181200 +test Mar13_S2A1/voip-4c0d36762a-20130327_223800 +test Mar13_S2A1/voip-90732b027d-20130401_195930 +test Mar13_S2A1/voip-2c217000af-20130325_221054 +test Mar13_S2A1/voip-fdf8b50918-20130329_013609 +test Mar13_S2A1/voip-d7853a398f-20130402_162613 +test Mar13_S2A1/voip-2d2d103292-20130329_041742 +test Mar13_S2A1/voip-d0341706f2-20130329_021729 +test Mar13_S2A1/voip-03d6592b76-20130326_014622 +test Mar13_S2A1/voip-a7ddefaeb3-20130328_172402 +test Mar13_S2A1/voip-187c1708f2-20130325_130901 +test Mar13_S2A1/voip-e61fa89add-20130327_073821 +test Mar13_S2A1/voip-7fdb5b39e7-20130328_222404 +test Mar13_S2A1/voip-3b3edac94d-20130324_202300 +test Mar13_S2A1/voip-39a25ab2f8-20130326_121108 +test Mar13_S2A1/voip-d645d56d23-20130324_000902 +test Mar13_S2A1/voip-22756d9e8f-20130329_045750 +test Mar13_S2A1/voip-48c12815b3-20130326_011334 +test Mar13_S2A1/voip-7e22911804-20130328_173432 +test Mar13_S2A1/voip-00d76b791d-20130327_010107 +test Mar13_S2A1/voip-bb1fd497eb-20130326_233535 +test Mar13_S2A1/voip-9735278861-20130401_153108 +test Mar13_S2A1/voip-30772678da-20130328_195945 +test Mar13_S2A1/voip-ddcaad92a1-20130325_233344 +test Mar13_S2A1/voip-3b59a0391b-20130401_132600 +test Mar13_S2A1/voip-e72dba1759-20130326_215902 +test Mar13_S2A1/voip-318851c80b-20130328_215451 +test Mar13_S2A1/voip-597cfafdee-20130328_231352 +test Mar13_S2A1/voip-edb8609855-20130327_183923 +test Mar13_S2A1/voip-e9b53d6ace-20130324_221317 +test Mar13_S2A1/voip-908884f5fd-20130326_220112 +test Mar13_S2A1/voip-03c2655d43-20130327_194017 +test Mar13_S2A1/voip-b08f15a787-20130402_070354 +test Mar13_S2A1/voip-4a6ecc1f1c-20130329_154535 +test Mar13_S2A1/voip-583e7cede5-20130324_062757 +test Mar13_S2A1/voip-f22c2bf9c7-20130326_020152 +test Mar13_S2A1/voip-4c25da9a27-20130327_134727 +test Mar13_S2A1/voip-202b6a3cc4-20130327_184930 +test Mar13_S2A1/voip-b27a230d2e-20130329_032018 +test Mar13_S2A1/voip-0a45bc863d-20130325_201511 +test Mar13_S2A1/voip-9735278861-20130401_155253 +test Mar13_S2A1/voip-58047f5227-20130326_031614 +test Mar13_S2A1/voip-158a881c88-20130328_153837 +test Mar13_S2A1/voip-31d9d1a567-20130402_035402 +test Mar13_S2A1/voip-b08f15a787-20130402_073115 +test Mar13_S2A1/voip-f091d0e461-20130327_210443 +test Mar13_S2A1/voip-d645d56d23-20130401_203541 +test Mar13_S2A1/voip-7e07d8f0f5-20130328_185623 +test Mar13_S2A1/voip-187c1708f2-20130327_133125 +test Mar13_S2A1/voip-e30cb521fb-20130328_121001 +test Mar13_S2A1/voip-597cfafdee-20130402_012024 +test Mar13_S2A1/voip-381a50592b-20130326_041542 +test Mar13_S2A1/voip-199d62165b-20130402_122925 +test Mar13_S2A1/voip-869dd52548-20130401_184121 +test Mar13_S2A1/voip-0fa32b1e78-20130402_140939 +test Mar13_S2A1/voip-0fa32b1e78-20130402_131901 +test Mar13_S2A1/voip-21ec2b7850-20130327_035011 +test Mar13_S2A1/voip-03c59ba692-20130324_035050 +test Mar13_S2A1/voip-edb8609855-20130327_183018 +test Mar13_S2A1/voip-e8997b10da-20130401_152728 +test Mar13_S2A1/voip-88b68a9a41-20130322_223446 +test Mar13_S2A1/voip-381a50592b-20130323_234042 +test Mar13_S2A1/voip-d0341706f2-20130329_052923 +test Mar13_S2A1/voip-39a25ab2f8-20130325_225331 +test Mar13_S2A1/voip-da10d74c3e-20130328_144024 +test Mar13_S2A1/voip-58047f5227-20130326_033102 +test Mar13_S2A1/voip-0fa32b1e78-20130328_151651 +test Mar13_S2A1/voip-ef9aa63b85-20130329_131238 +test Mar13_S2A1/voip-7f9c1c8411-20130328_211713 +test Mar13_S2A1/voip-a617b6827c-20130323_170453 +test Mar13_S2A1/voip-249d0f617b-20130326_005728 +test Mar13_S2A1/voip-48c12815b3-20130326_034254 +test Mar13_S2A1/voip-59bc8a2167-20130328_132914 +test Mar13_S2A1/voip-5749b16764-20130328_152223 +test Mar13_S2A1/voip-59bc8a2167-20130325_131603 +test Mar13_S2A1/voip-b20b6e847a-20130326_223847 +test Mar13_S2A1/voip-597cfafdee-20130402_005736 +test Mar13_S2A1/voip-bde2721237-20130325_160816 +test Mar13_S2A1/voip-a31ca3e355-20130323_222552 +test Mar13_S2A1/voip-e8997b10da-20130329_012147 +test Mar13_S2A1/voip-bb1fd497eb-20130326_231731 +test Mar13_S2A1/voip-b08f15a787-20130326_021138 +test Mar13_S2A1/voip-31de0daa7b-20130401_204118 +test Mar13_S2A1/voip-e99e4f4538-20130327_143352 +test Mar13_S2A1/voip-7f9c1c8411-20130328_135458 +test Mar13_S2A1/voip-7e4cdce06a-20130323_125758 +test Mar13_S2A1/voip-3be3bda933-20130326_124936 +test Mar13_S2A1/voip-da10d74c3e-20130328_142219 +test Mar13_S2A1/voip-561b472540-20130328_124727 +test Mar13_S2A1/voip-10beae627f-20130328_172038 +test Mar13_S2A1/voip-ccf48b9a6a-20130329_052054 +test Mar13_S2A1/voip-8991b7bff6-20130401_180619 +test Mar13_S2A1/voip-a31ca3e355-20130324_191003 +test Mar13_S2A1/voip-52eb280e7b-20130326_212944 +test Mar13_S2A1/voip-f22c2bf9c7-20130328_114132 +test Mar13_S2A1/voip-bb1fd497eb-20130325_125615 +test Mar13_S2A1/voip-a352cb5ca5-20130401_235134 +test Mar13_S2A1/voip-fdf8b50918-20130326_050951 +test Mar13_S2A1/voip-4c25da9a27-20130327_141725 +test Mar13_S2A1/voip-b27a230d2e-20130323_051110 +test Mar13_S2A1/voip-50af5438f1-20130402_080107 +test Mar13_S2A1/voip-155e939ebc-20130327_203809 +test Mar13_S2A1/voip-dd9f7810fd-20130322_231736 +test Mar13_S2A1/voip-4f069a4136-20130327_204207 +test Mar13_S2A1/voip-5749b16764-20130328_150604 +test Mar13_S2A1/voip-0241bbae39-20130327_200234 +test Mar13_S2A1/voip-2d3d74d091-20130328_143406 +test Mar13_S2A1/voip-a617b6827c-20130323_171342 +test Mar13_S2A1/voip-935947e17b-20130328_163838 +test Mar13_S2A1/voip-f32f2cfdae-20130328_194950 +test Mar13_S2A1/voip-72e50baa85-20130326_054126 +test Mar13_S2A1/voip-50af5438f1-20130402_082131 +test Mar13_S2A1/voip-3b3edac94d-20130323_203541 +test Mar13_S2A1/voip-9f989824fd-20130324_074034 +test Mar13_S2A1/voip-afd3aa91f0-20130327_210955 +test Mar13_S2A1/voip-be5b7bf9d9-20130401_152114 +test Mar13_S2A1/voip-3cf7bd870d-20130327_174946 +test Mar13_S2A1/voip-935947e17b-20130402_194303 +test Mar13_S2A1/voip-2f4c700ae3-20130401_185847 +test Mar13_S2A1/voip-13ff413553-20130328_174442 +test Mar13_S2A1/voip-72e50baa85-20130327_061212 +test Mar13_S2A1/voip-78f497f314-20130323_143314 +test Mar13_S2A1/voip-e99e4f4538-20130328_203100 +test Mar13_S2A1/voip-340dbb333e-20130327_005111 +test Mar13_S2A1/voip-5cf59cc660-20130327_145424 +test Mar13_S2A1/voip-10beae627f-20130401_163040 +test Mar13_S2A1/voip-fe4b6ef58f-20130325_225038 +test Mar13_S2A1/voip-50af5438f1-20130402_083440 +test Mar13_S2A1/voip-7e4cdce06a-20130323_130941 +test Mar13_S2A1/voip-dcaeb62b29-20130327_084323 +test Mar13_S2A1/voip-88b68a9a41-20130324_004511 +test Mar13_S2A1/voip-dcaeb62b29-20130327_083558 +test Mar13_S2A1/voip-ef9aa63b85-20130329_132823 +test Mar13_S2A1/voip-50af5438f1-20130327_031431 +test Mar13_S2A1/voip-d225fad9df-20130328_184121 +test Mar13_S2A1/voip-31de0daa7b-20130401_210038 +test Mar13_S2A1/voip-dcaeb62b29-20130326_042524 +test Mar13_S2A1/voip-be5b7bf9d9-20130401_151504 +test Mar13_S2A1/voip-0a45bc863d-20130326_205013 +test Mar13_S2A1/voip-22756d9e8f-20130329_050240 +test Mar13_S2A1/voip-2d2d103292-20130326_043228 +test Mar13_S2A1/voip-db80a9e6df-20130328_231020 +test Mar13_S2A1/voip-48c12815b3-20130326_020904 +test Mar13_S2A1/voip-199d62165b-20130402_121242 +test Mar13_S2A1/voip-f1e8236264-20130323_002724 +test Mar13_S2A1/voip-ad40cf5489-20130327_193125 +test Mar13_S2A1/voip-f1e8236264-20130323_004924 +test Mar13_S2A1/voip-8efef4eae9-20130325_234447 +test Mar13_S2A1/voip-03d6592b76-20130327_024206 +test Mar13_S2A1/voip-ad40cf5489-20130325_181141 +test Mar13_S2A1/voip-e0035cc31b-20130326_204723 +test Mar13_S2A1/voip-4a6ecc1f1c-20130329_153452 +test Mar13_S2A1/voip-21ec2b7850-20130327_043730 +test Mar13_S2A1/voip-62cc0cc55d-20130323_160957 +test Mar13_S2A1/voip-48c12815b3-20130402_051947 +test Mar13_S2A1/voip-3be3bda933-20130326_125658 +test Mar13_S2A1/voip-597cfafdee-20130328_234958 +test Mar13_S2A1/voip-b6618de447-20130328_151803 +test Mar13_S2A1/voip-88b68a9a41-20130322_222453 +test Mar13_S2A1/voip-d6f8c4271e-20130326_220634 +test Mar13_S2A1/voip-936ec6902a-20130328_133245 +test Mar13_S2A1/voip-597cfafdee-20130328_230619 +test Mar13_S2A1/voip-62cc0cc55d-20130328_143612 +test Mar13_S2A1/voip-4c0d36762a-20130328_210344 +test Mar13_S2A1/voip-9819537952-20130327_023250 +test Mar13_S2A1/voip-f113dbb0e1-20130322_233626 +test Mar13_S2A1/voip-2d3d74d091-20130401_235614 +test Mar13_S2A1/voip-f32f2cfdae-20130327_014809 +test Mar13_S2A1/voip-e2a895cfe5-20130326_211144 +test Mar13_S2A1/voip-03c59ba692-20130402_061946 +test Mar13_S2A1/voip-f22c2bf9c7-20130326_194425 +test Mar13_S2A1/voip-317a1436fe-20130325_172530 +test Mar13_S2A1/voip-560cbd32a5-20130401_143320 +test Mar13_S2A1/voip-9f989824fd-20130324_074635 +test Mar13_S2A1/voip-2d2d103292-20130329_040425 +test Mar13_S2A1/voip-8d5173f3a6-20130323_015334 +test Mar13_S2A1/voip-13ff413553-20130328_173151 +test Mar13_S2A1/voip-3b81cbb287-20130324_015448 +test Mar13_S2A1/voip-7e22911804-20130328_203928 +test Mar13_S2A1/voip-0f41c16f2f-20130325_192738 +test Mar13_S2A1/voip-b57f8ee22b-20130326_234333 +test Mar13_S2A1/voip-52eb280e7b-20130326_210626 +test Mar13_S2A1/voip-fe2783c40a-20130401_150831 +test Mar13_S2A1/voip-2f4c700ae3-20130401_192127 +test Mar13_S2A1/voip-b6618de447-20130328_154929 +test Mar13_S2A1/voip-4a6ecc1f1c-20130328_122618 +test Mar13_S2A1/voip-e61fa89add-20130326_010554 +test Mar13_S2A1/voip-b08f15a787-20130402_074345 +test Mar13_S2A1/voip-e8997b10da-20130401_150651 +test Mar13_S2A1/voip-be5b7bf9d9-20130402_203154 +test Mar13_S2A1/voip-14f776f781-20130328_121813 +test Mar13_S2A1/voip-10beae627f-20130328_170245 +test Mar13_S2A1/voip-afd3aa91f0-20130325_225508 +test Mar13_S2A1/voip-f32f2cfdae-20130327_012648 +test Mar13_S2A1/voip-03d6592b76-20130327_025322 +test Mar13_S2A1/voip-7f9c1c8411-20130401_165827 +test Mar13_S2A1/voip-14cb91bc48-20130327_184646 +test Mar13_S2A1/voip-bde2721237-20130326_200914 +test Mar13_S2A1/voip-48c12815b3-20130402_045340 +test Mar13_S2A1/voip-05e7a5440b-20130328_213127 +test Mar13_S2A1/voip-187c1708f2-20130325_134758 +test Mar13_S2A1/voip-78f497f314-20130323_183923 +test Mar13_S2A1/voip-ddcaad92a1-20130325_235723 +test Mar13_S2A1/voip-4660dd9eab-20130329_075100 +test Mar13_S2A1/voip-ddcaad92a1-20130326_001940 +test Mar13_S2A1/voip-d7853a398f-20130402_160702 +test Mar13_S2A1/voip-f32f2cfdae-20130328_193113 +test Mar13_S2A1/voip-0a45bc863d-20130326_204426 +test Mar13_S2A1/voip-155e939ebc-20130327_202728 +test Mar13_S2A1/voip-59bc8a2167-20130325_143923 +test Mar13_S2A1/voip-8991b7bff6-20130326_231944 +test Mar13_S2A1/voip-d7aef99178-20130328_184914 +test Mar13_S2A1/voip-9735278861-20130401_161357 +test Mar13_S2A1/voip-922209b777-20130325_161315 +test Mar13_S2A1/voip-43479eb5c2-20130323_180350 +test Mar13_S2A1/voip-78f497f314-20130323_142232 +test Mar13_S2A1/voip-e99e4f4538-20130328_200114 +test Mar13_S2A1/voip-ccf48b9a6a-20130329_042843 +test Mar13_S2A1/voip-340dbb333e-20130327_011300 +test Mar13_S2A1/voip-ef9aa63b85-20130329_131801 +test Mar13_S2A1/voip-7e07d8f0f5-20130327_180243 +test Mar13_S2A1/voip-2f209793f4-20130326_004310 +test Mar13_S2A1/voip-22c938c8ba-20130325_131418 +test Mar13_S2A1/voip-b6618de447-20130328_153322 +test Mar13_S2A1/voip-d7853a398f-20130402_143833 +test Mar13_S2A1/voip-d645d56d23-20130402_034956 +test Mar13_S2A1/voip-36440f7305-20130326_141507 +test Mar13_S2A1/voip-a31ca3e355-20130323_223758 +test Mar13_S2A1/voip-db80a9e6df-20130328_225627 +test Mar13_S2A1/voip-5cf59cc660-20130328_155842 +test Mar13_S2A1/voip-5cf59cc660-20130327_143822 +test Mar13_S2A1/voip-e9b53d6ace-20130401_212408 +test Mar13_S2A1/voip-e72dba1759-20130326_215428 +test Mar13_S2A1/voip-59bc8a2167-20130328_125358 +test Mar13_S2A1/voip-922209b777-20130325_164119 +test Mar13_S2A1/voip-8f9fb7a86b-20130328_183210 +test Mar13_S2A1/voip-1b51204ef5-20130401_145857 +test Mar13_S2A1/voip-2f209793f4-20130326_010311 +test Mar13_S2A1/voip-869dd52548-20130401_180101 +test Mar13_S2A1/voip-21ec2b7850-20130325_135235 +test Mar13_S2A1/voip-876ef67873-20130402_031240 +test Mar13_S2A1/voip-2c217000af-20130325_222951 +test Mar13_S2A1/voip-e61fa89add-20130327_075051 +test Mar13_S2A1/voip-22c938c8ba-20130325_124120 +test Mar13_S2A1/voip-a352cb5ca5-20130401_232604 +test Mar13_S2A1/voip-318851c80b-20130328_220944 +test Mar13_S2A1/voip-d7aef99178-20130328_185047 +test Mar13_S2A1/voip-50af5438f1-20130327_034343 +test Mar13_S2A1/voip-03c59ba692-20130402_053803 +test Mar13_S2A1/voip-22c938c8ba-20130325_131010 +test Mar13_S2A1/voip-935947e17b-20130402_200210 +test Mar13_S2A1/voip-e3b4879e0d-20130326_015352 +test Mar13_S2A1/voip-22756d9e8f-20130329_050902 +test Mar13_S2A1/voip-187c1708f2-20130327_131214 +test Mar13_S2A1/voip-31d9d1a567-20130402_034321 +test Mar13_S2A1/voip-e54437a6f0-20130325_132124 +test Mar13_S2A1/voip-3b81cbb287-20130326_025313 +test Mar13_S2A1/voip-3be3bda933-20130326_133147 +test Mar13_S2A1/voip-340dbb333e-20130325_233930 +test Mar13_S2A1/voip-afd3aa91f0-20130325_224003 +test Mar13_S2A1/voip-340dbb333e-20130325_231507 +test Mar13_S2A1/voip-aaa44b4121-20130326_053838 +test Mar13_S2A1/voip-da10d74c3e-20130325_232439 +test Mar13_S2A1/voip-7e07d8f0f5-20130328_185451 +test Mar13_S2A1/voip-8991b7bff6-20130401_174154 +test Mar13_S2A1/voip-cc1b0c8a0f-20130327_030423 +test Mar13_S2A1/voip-d76f6e4f82-20130327_192835 +test Mar13_S2A1/voip-05e7a5440b-20130328_213802 +test Mar13_S2A1/voip-e9b53d6ace-20130401_201712 +test Mar13_S2A1/voip-cfd5fa34d9-20130402_073932 +test Mar13_S2A1/voip-340dbb333e-20130327_012139 +test Mar13_S2A1/voip-b27a230d2e-20130329_043717 +test Mar13_S2A1/voip-03c2655d43-20130327_194409 +test Mar13_S2A1/voip-4f069a4136-20130402_030819 +test Mar13_S2A1/voip-935947e17b-20130328_165200 +test Mar13_S2A1/voip-9735278861-20130401_160258 +test Mar13_S2A1/voip-a8649977cf-20130323_160101 +test Mar13_S2A1/voip-fe4b6ef58f-20130325_234847 +test Mar13_S2A1/voip-8991b7bff6-20130326_232108 +test Mar13_S2A1/voip-0f41c16f2f-20130402_005558 +test Mar13_S2A1/voip-e8997b10da-20130329_010345 +test Mar13_S2A1/voip-f4026333dc-20130327_175948 +test Mar13_S2A1/voip-249d0f617b-20130328_161028 +test Mar13_S2A1/voip-eaef6f434c-20130323_030101 +test Mar13_S2A1/voip-4c25da9a27-20130325_181538 +test Mar13_S2A1/voip-da10d74c3e-20130326_002144 +test Mar13_S2A1/voip-e2a895cfe5-20130325_233130 +test Mar13_S2A1/voip-5a464ca603-20130401_165618 +test Mar13_S2A1/voip-8efef4eae9-20130325_233556 +test Mar13_S2A1/voip-00d76b791d-20130327_010003 +test Mar13_S2A1/voip-4a6ecc1f1c-20130329_151443 +test Mar13_S2A1/voip-10beae627f-20130329_154707 +test Mar13_S2A1/voip-0241bbae39-20130327_193412 +test Mar13_S2A1/voip-561b472540-20130328_122717 +test Mar13_S2A1/voip-0fa32b1e78-20130328_153445 +test Mar13_S2A1/voip-d7853a398f-20130401_153359 +test Mar13_S2A1/voip-7f9c1c8411-20130401_162206 +test Mar13_S2A1/voip-4c0d36762a-20130328_202338 +test Mar13_S2A1/voip-22756d9e8f-20130329_044119 +test Mar13_S2A1/voip-922209b777-20130327_010427 +test Mar13_S2A1/voip-ad40cf5489-20130327_193630 +test Mar13_S2A1/voip-4c25da9a27-20130327_135205 +test Mar13_S2A1/voip-be5b7bf9d9-20130402_195840 +test Mar13_S2A1/voip-876ef67873-20130402_031929 +test Mar13_S2A1/voip-0a45bc863d-20130325_200625 +test Mar13_S2A1/voip-0fa32b1e78-20130402_132446 +test Mar13_S2A1/voip-58047f5227-20130327_033108 +test Mar13_S2A1/voip-62cc0cc55d-20130323_154821 +test Mar13_S2A1/voip-876ef67873-20130327_035355 +test Mar13_S2A1/voip-869dd52548-20130401_185021 +test Mar13_S2A1/voip-14f776f781-20130329_033917 +test Mar13_S2A1/voip-e8997b10da-20130329_002339 +test Mar13_S2A1/voip-583e7cede5-20130323_055709 +test Mar13_S2A1/voip-597cfafdee-20130328_234535 +test Mar13_S2A1/voip-5cf59cc660-20130328_163718 +test Mar13_S2A1/voip-7c3a08072d-20130326_005321 +test Mar13_S2A1/voip-199d62165b-20130402_123231 +test Mar13_S2A1/voip-9819537952-20130328_231843 +test Mar13_S2A1/voip-88b68a9a41-20130324_003305 +test Mar13_S2A1/voip-fe4b6ef58f-20130328_232533 +test Mar13_S2A1/voip-aaa44b4121-20130326_060540 +test Mar13_S2A1/voip-31de0daa7b-20130402_132852 +test Mar13_S2A1/voip-fdf8b50918-20130327_022752 +test Mar13_S2A1/voip-e0035cc31b-20130323_210142 +test Mar13_S2A1/voip-7e07d8f0f5-20130327_181018 +test Mar13_S2A1/voip-7f9c1c8411-20130401_161605 +test Mar13_S2A1/voip-d225fad9df-20130328_174737 +test Mar13_S2A1/voip-3cf7bd870d-20130327_182909 +test Mar13_S2A1/voip-bde2721237-20130326_192622 +test Mar13_S2A1/voip-381a50592b-20130323_235324 +test Mar13_S2A1/voip-7f9c1c8411-20130328_173656 +test Mar13_S2A1/voip-b27a230d2e-20130323_042340 +test Mar13_S2A1/voip-aaa44b4121-20130327_165952 +test Mar13_S2A1/voip-e61fa89add-20130327_071905 +test Mar13_S2A1/voip-e72dba1759-20130326_223325 +test Mar13_S2A1/voip-31d9d1a567-20130402_041615 +test Mar13_S2A1/voip-187c1708f2-20130327_125741 +test Mar13_S2A1/voip-bb1fd497eb-20130325_163404 +test Mar13_S2A1/voip-e99e4f4538-20130327_155041 +test Mar13_S2A1/voip-f22c2bf9c7-20130328_120313 +test Mar13_S2A1/voip-b20b6e847a-20130326_222516 +test Mar13_S2A1/voip-3cf7bd870d-20130328_212334 +test Mar13_S2A1/voip-908884f5fd-20130327_001907 +test Mar13_S2A1/voip-f22c2bf9c7-20130328_115321 +test Mar13_S2A1/voip-da10d74c3e-20130326_000905 +test Mar13_S2A1/voip-4c25da9a27-20130325_184849 +test Mar13_S2A1/voip-78f497f314-20130324_203218 +test Mar13_S2A1/voip-fe2783c40a-20130401_145319 +test Mar13_S2A1/voip-d645d56d23-20130323_235959 +test Mar13_S2A1/voip-50af5438f1-20130402_084822 +test Mar13_S2A1/voip-87de4f7a80-20130324_154432 +test Mar13_S2A1/voip-3860c915c2-20130328_164936 +test Mar13_S2A1/voip-90732b027d-20130401_220312 +test Mar13_S2A1/voip-4c25da9a27-20130325_182051 +test Mar13_S2A1/voip-fbd422ad18-20130328_183328 +test Mar13_S2A1/voip-7e22911804-20130324_185318 +test Mar13_S2A1/voip-318851c80b-20130328_215115 +test Mar13_S2A1/voip-da10d74c3e-20130328_144418 +test Mar13_S2A1/voip-1b51204ef5-20130401_145608 +test Mar13_S2A1/voip-6dbc3573bc-20130328_191134 +test Mar13_S2A1/voip-e61fa89add-20130326_013026 +test Mar13_S2A1/voip-13ff413553-20130328_171126 +test Mar13_S2A1/voip-dda7c88c6e-20130323_053821 +test Mar13_S2A1/voip-36440f7305-20130326_201603 +test Mar13_S2A1/voip-90732b027d-20130327_175055 +test Mar13_S2A1/voip-3b81cbb287-20130326_031145 +test Mar13_S2A1/voip-7f9c1c8411-20130401_164805 +test Mar13_S2A1/voip-0f41c16f2f-20130402_000017 +test Mar13_S2A1/voip-dda7c88c6e-20130323_052547 +test Mar13_S2A1/voip-72e50baa85-20130326_044852 +test Mar13_S2A1/voip-4a6ecc1f1c-20130328_124224 +test Mar13_S2A1/voip-4f069a4136-20130327_205100 +test Mar13_S2A1/voip-b20968d1ea-20130323_110448 +test Mar13_S2A1/voip-2b66f60368-20130326_202640 +test Mar13_S2A1/voip-4a6ecc1f1c-20130328_120737 +test Mar13_S2A1/voip-2d3d74d091-20130328_134957 +test Mar13_S2A1/voip-199d62165b-20130402_121930 +test Mar13_S2A1/voip-59bc8a2167-20130328_131751 +test Mar13_S2A1/voip-d76f6e4f82-20130327_233840 +test Mar13_S2A1/voip-ccc459b689-20130327_002947 +test Mar13_S2A1/voip-15d8a89cec-20130327_015153 +test Mar13_S2A1/voip-0fa32b1e78-20130328_234027 +test Mar13_S2A1/voip-fce37b0ccb-20130328_142727 +test Mar13_S2A1/voip-d7853a398f-20130401_161843 +test Mar13_S2A1/voip-3b59a0391b-20130401_133804 +test Mar13_S2A1/voip-3b3edac94d-20130326_003647 +test Mar13_S2A1/voip-3b81cbb287-20130324_020625 +test Mar13_S2A1/voip-4c0d36762a-20130328_211536 +test Mar13_S2A1/voip-0fa32b1e78-20130329_121012 +test Mar13_S2A1/voip-7e07d8f0f5-20130326_211014 +test Mar13_S2A1/voip-72e50baa85-20130327_063340 +test Mar13_S2A1/voip-dcaeb62b29-20130326_033543 +test Mar13_S2A1/voip-2d2d103292-20130329_040112 +test Mar13_S2A1/voip-be5b7bf9d9-20130402_202043 +test Mar13_S2A1/voip-90732b027d-20130401_222746 +test Mar13_S2A1/voip-202b6a3cc4-20130327_185647 +test Mar13_S2A1/voip-eaef6f434c-20130323_161453 +test Mar13_S2A1/voip-7e22911804-20130328_200848 +test Mar13_S2A1/voip-bde2721237-20130326_195205 +test Mar13_S2A1/voip-e3b4879e0d-20130326_024945 +test Mar13_S2A1/voip-2f4c700ae3-20130401_192527 +test Mar13_S2A1/voip-e8997b10da-20130327_195126 +test Mar13_S2A1/voip-e9b53d6ace-20130401_190756 +test Mar13_S2A1/voip-b57f8ee22b-20130325_184213 +test Mar13_S2A1/voip-59bc8a2167-20130325_140117 +test Mar13_S2A1/voip-03c2655d43-20130327_195040 +test Mar13_S2A1/voip-3cf7bd870d-20130327_174807 +test Mar13_S2A1/voip-e0035cc31b-20130323_211252 +test Mar13_S2A1/voip-b27a230d2e-20130323_045313 +test Mar13_S2A1/voip-b20968d1ea-20130323_113517 +test Mar13_S2A1/voip-583e7cede5-20130323_062041 +test Mar13_S2A1/voip-4660dd9eab-20130329_085305 +test Mar13_S2A1/voip-aaa44b4121-20130327_171002 +test Mar13_S2A1/voip-52d599db9c-20130401_230827 +test Mar13_S2A1/voip-b772dbf437-20130402_140732 +test Mar13_S2A1/voip-05e7a5440b-20130328_215302 +test Mar13_S2A1/voip-21ec2b7850-20130326_024118 +test Mar13_S2A1/voip-b58f1f9eb2-20130325_135605 +test Mar13_S2A1/voip-597cfafdee-20130402_011515 +test Mar13_S2A1/voip-2e134ee190-20130401_225343 +test Mar13_S2A1/voip-eaef6f434c-20130323_030255 +test Mar13_S2A1/voip-fe2783c40a-20130401_143641 +test Mar13_S2A1/voip-dcaeb62b29-20130326_041211 +test Mar13_S2A1/voip-14f776f781-20130328_150129 +test Mar13_S2A1/voip-10beae627f-20130401_164518 +test Mar13_S2A1/voip-be5b7bf9d9-20130401_154519 +test Mar13_S2A1/voip-10beae627f-20130328_133754 +test Mar13_S2A1/voip-fe2783c40a-20130401_144635 +test Mar13_S2A1/voip-fe4b6ef58f-20130325_234042 +test Mar13_S2A1/voip-cfd5fa34d9-20130402_074917 +test Mar13_S2A1/voip-90732b027d-20130327_171445 +test Mar13_S2A1/voip-340dbb333e-20130325_232704 +test Mar13_S2A1/voip-2c217000af-20130328_224036 +test Mar13_S2A1/voip-96f43326a4-20130323_065251 +test Mar13_S2A1/voip-14f776f781-20130328_140326 +test Mar13_S2A1/voip-52d599db9c-20130402_002405 +test Mar13_S2A1/voip-a8649977cf-20130323_160703 +test Mar13_S2A1/voip-d0341706f2-20130329_021314 +test Mar13_S2A1/voip-0f41c16f2f-20130325_210905 +test Mar13_S2A1/voip-e72dba1759-20130325_205417 +test Mar13_S2A1/voip-30772678da-20130328_192635 +test Mar13_S2A1/voip-dcaeb62b29-20130327_081431 +test Mar13_S2A1/voip-f1e8236264-20130323_002843 +test Mar13_S2A1/voip-10beae627f-20130328_124527 +test Mar13_S2A1/voip-5cf59cc660-20130327_142353 +test Mar13_S2A1/voip-22756d9e8f-20130329_044809 +test Mar13_S2A1/voip-7f9c1c8411-20130401_170713 +test Mar13_S2A1/voip-e8997b10da-20130401_151651 +test Mar13_S2A1/voip-317a1436fe-20130325_140754 +test Mar13_S2A1/voip-72e50baa85-20130326_052057 +test Mar13_S2A1/voip-e99e4f4538-20130328_202734 +test Mar13_S2A1/voip-3b3edac94d-20130324_211317 +test Mar13_S2A1/voip-7e07d8f0f5-20130328_190644 +test Mar13_S2A1/voip-8d5173f3a6-20130324_184257 +test Mar13_S2A1/voip-583e7cede5-20130324_060159 +test Mar13_S2A1/voip-9819537952-20130328_235233 +test Mar13_S2A1/voip-58047f5227-20130327_033832 +test Mar13_S2A1/voip-869dd52548-20130401_175008 +test Mar13_S2A1/voip-e99e4f4538-20130328_200738 +test Mar13_S2A1/voip-afd3aa91f0-20130326_214153 +test Mar13_S2A1/voip-be5b7bf9d9-20130401_153551 +test Mar13_S2A1/voip-4f069a4136-20130402_031518 +test Mar13_S2A1/voip-36440f7305-20130327_201033 +test Mar13_S2A1/voip-9f989824fd-20130324_075958 +test Mar13_S2A1/voip-fbd422ad18-20130328_181624 +test Mar13_S2A1/voip-ef9aa63b85-20130328_185737 +test Mar13_S2A1/voip-597cfafdee-20130328_232745 +test Mar13_S2A1/voip-31d9d1a567-20130402_040755 +test Mar13_S2A1/voip-e2a895cfe5-20130326_234439 +test Mar13_S2A1/voip-31de0daa7b-20130401_205821 +test Mar13_S2A1/voip-50af5438f1-20130327_043154 +test Mar13_S2A1/voip-fdf8b50918-20130327_023943 +test Mar13_S2A1/voip-ab4f1dbb59-20130325_213924 +test Mar13_S2A1/voip-0f41c16f2f-20130325_193433 +test Mar13_S2A1/voip-9f989824fd-20130325_202242 +test Mar13_S2A1/voip-ad40cf5489-20130325_182402 +test Mar13_S2A1/voip-e8997b10da-20130327_195907 +test Mar13_S2A1/voip-fce37b0ccb-20130328_144854 +test Mar13_S2A1/voip-52d599db9c-20130325_134330 +test Mar13_S2A1/voip-8991b7bff6-20130326_230001 +test Mar13_S2A1/voip-263ab0e49f-20130326_110634 +test Mar13_S2A1/voip-fe60dae302-20130328_191630 +test Mar13_S2A1/voip-b27a230d2e-20130329_031113 +test Mar13_S2A1/voip-8991b7bff6-20130401_170513 +test Mar13_S2A1/voip-d7853a398f-20130401_155744 +test Mar13_S2A1/voip-9f989824fd-20130325_203801 +test Mar13_S2A1/voip-876ef67873-20130402_033321 +test Mar13_S2A1/voip-ad40cf5489-20130325_175759 +test Mar13_S2A1/voip-935947e17b-20130402_193955 +test Mar13_S2A1/voip-f32f2cfdae-20130328_192215 +test Mar13_S2A1/voip-96f43326a4-20130323_073811 +test Mar13_S2A1/voip-4a6ecc1f1c-20130329_152337 +test Mar13_S2A1/voip-6dbc3573bc-20130329_035705 +test Mar13_S2A1/voip-d76f6e4f82-20130327_185747 +test Mar13_S2A1/voip-dd9f7810fd-20130322_225252 +test Mar13_S2A1/voip-f22c2bf9c7-20130328_122233 +test Mar13_S2A1/voip-4a6ecc1f1c-20130329_152220 +test Mar13_S2A1/voip-b20968d1ea-20130323_111733 +test Mar13_S2A1/voip-0f41c16f2f-20130402_004600 +test Mar13_S2A1/voip-e30cb521fb-20130328_140026 +test Mar13_S2A1/voip-2d2d103292-20130326_042207 +test Mar13_S2A1/voip-6dbc3573bc-20130328_193627 +test Mar13_S2A1/voip-fd0c0fb514-20130328_114336 +test Mar13_S2A1/voip-317a1436fe-20130325_173342 +test Mar13_S2A1/voip-0fa32b1e78-20130328_233501 +test Mar13_S2A1/voip-e99e4f4538-20130327_154727 +test Mar13_S2A1/voip-6d6587c57d-20130328_142901 +test Mar13_S2A1/voip-52eb280e7b-20130325_130314 +test Mar13_S2A1/voip-90732b027d-20130401_220750 +test Mar13_S2A1/voip-0abf414c0c-20130326_235756 +test Mar13_S2A1/voip-b57f8ee22b-20130325_190112 +test Mar13_S2A1/voip-0fa32b1e78-20130402_142127 +test Mar13_S2A1/voip-8991b7bff6-20130401_174337 +test Mar13_S2A1/voip-922209b777-20130327_005727 +test Mar13_S2A1/voip-d0341706f2-20130329_052402 +test Mar13_S2A1/voip-e8997b10da-20130327_195402 +test Mar13_S2A1/voip-5a464ca603-20130401_194446 +test Mar13_S2A1/voip-fe4b6ef58f-20130325_224421 +test Mar13_S2A1/voip-b08f15a787-20130402_071627 +test Mar13_S2A1/voip-597cfafdee-20130402_010720 +test Mar13_S2A1/voip-7e22911804-20130328_161735 +test Mar13_S2A1/voip-b772dbf437-20130402_142836 +test Mar13_S2A1/voip-e3b4879e0d-20130327_030049 +test Mar13_S2A1/voip-88b68a9a41-20130324_002023 +test Mar13_S2A1/voip-5cf59cc660-20130328_152711 +test Mar13_S2A1/voip-da10d74c3e-20130326_012915 +test Mar13_S2A1/voip-52d599db9c-20130326_215152 +test Mar13_S2A1/voip-381a50592b-20130324_001254 +test Mar13_S2A1/voip-48c12815b3-20130402_052605 +test Mar13_S2A1/voip-9819537952-20130327_023654 +test Mar13_S2A1/voip-36440f7305-20130326_142215 +test Mar13_S2A1/voip-e0035cc31b-20130326_205735 +test Mar13_S2A1/voip-2d3d74d091-20130401_234634 +test Mar13_S2A1/voip-e2a895cfe5-20130327_021723 +test Mar13_S2A1/voip-72e50baa85-20130327_062358 +test Mar13_S2A1/voip-869dd52548-20130326_000401 +test Mar13_S2A1/voip-52eb280e7b-20130325_124356 +test Mar13_S2A1/voip-c8821c664b-20130322_214749 +test Mar13_S2A1/voip-e9b53d6ace-20130324_220625 +test Mar13_S2A1/voip-d7aef99178-20130328_183821 +test Mar13_S2A1/voip-fdf8b50918-20130329_024411 +test Mar13_S2A1/voip-22a181cad5-20130325_175448 +test Mar13_S2A1/voip-4660dd9eab-20130329_080420 +test Mar13_S2A1/voip-d225fad9df-20130328_202900 +test Mar13_S2A1/voip-db80a9e6df-20130328_232300 +test Mar13_S2A1/voip-90732b027d-20130327_185216 +test Mar13_S2A1/voip-2d2d103292-20130329_040845 +test Mar13_S2A1/voip-58047f5227-20130326_030256 +test Mar13_S2A1/voip-5a464ca603-20130401_192921 +test Mar13_S2A1/voip-ad40cf5489-20130327_192019 +test Mar13_S2A1/voip-7e22911804-20130326_141301 +test Mar13_S2A1/voip-90732b027d-20130401_193450 +test Mar13_S2A1/voip-d645d56d23-20130323_222138 +test Mar13_S2A1/voip-e99e4f4538-20130327_151632 +test Mar13_S2A1/voip-96f43326a4-20130323_074518 +test Mar13_S2A1/voip-2d2d103292-20130326_041424 +test Mar13_S2A1/voip-e3b4879e0d-20130326_021312 +test Mar13_S2A1/voip-ccf48b9a6a-20130329_053708 +test Mar13_S2A1/voip-3b81cbb287-20130324_021405 +test Mar13_S2A1/voip-8d5173f3a6-20130324_185535 +test Mar13_S2A1/voip-03d6592b76-20130326_013841 +test Mar13_S2A1/voip-583e7cede5-20130323_050541 +test Mar13_S2A1/voip-2f4c700ae3-20130401_190219 +test Mar13_S2A1/voip-d645d56d23-20130401_203212 +test Mar13_S2A1/voip-d66e12b45c-20130327_173227 +test Mar13_S2A1/voip-f22c2bf9c7-20130326_194912 +test Mar13_S2A1/voip-5a464ca603-20130401_194651 +test Mar13_S2A1/voip-4660dd9eab-20130329_084741 +test Mar13_S2A1/voip-ccf48b9a6a-20130329_052647 +test Mar13_S2A1/voip-e72dba1759-20130325_210742 +test Mar13_S2A1/voip-7e22911804-20130324_192309 +test Mar13_S2A1/voip-0f41c16f2f-20130401_234805 +test Mar13_S2A1/voip-3cf7bd870d-20130327_183809 diff --git a/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/conf/mrda.conf b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/conf/mrda.conf new file mode 100755 index 00000000..43760568 --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/conf/mrda.conf @@ -0,0 +1,73 @@ +train Bdb001 +train Bed002 +train Bed004 +train Bed005 +train Bed008 +train Bed009 +train Bed011 +train Bed013 +train Bed014 +train Bed015 +train Bed017 +train Bmr002 +train Bmr003 +train Bmr006 +train Bmr007 +train Bmr008 +train Bmr009 +train Bmr011 +train Bmr012 +train Bmr015 +train Bmr016 +train Bmr020 +train Bmr021 +train Bmr023 +train Bmr025 +train Bmr026 +train Bmr027 +train Bmr029 +train Bmr031 +train Bns001 +train Bns002 +train Bns003 +train Bro003 +train Bro005 +train Bro007 +train Bro010 +train Bro012 +train Bro013 +train Bro015 +train Bro016 +train Bro017 +train Bro019 +train Bro022 +train Bro023 +train Bro025 +train Bro026 +train Bro028 +train Bsr001 +train Btr001 +train Btr002 +train Buw001 +dev Bed003 +dev Bed010 +dev Bmr005 +dev Bmr014 +dev Bmr019 +dev Bmr024 +dev Bmr030 +dev Bro004 +dev Bro011 +dev Bro018 +dev Bro024 +test Bed006 +test Bed012 +test Bed016 +test Bmr001 +test Bmr010 +test Bmr022 +test Bmr028 +test Bro008 +test Bro014 +test Bro021 +test Bro027 diff --git a/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/conf/multi-woz.conf b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/conf/multi-woz.conf new file mode 100755 index 00000000..783a2285 --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/conf/multi-woz.conf @@ -0,0 +1,2000 @@ +test MUL0484.json +test PMUL4462.json +test PMUL0320.json +test MUL2155.json +test PMUL0815.json +test PMUL3263.json +test PMUL3672.json +test SNG0423.json +test SNG0296.json +test PMUL0079.json +test PMUL1484.json +test SNG0840.json +test PMUL0089.json +test PMUL2859.json +test PMUL2009.json +test SNG0528.json +test SNG01367.json +test PMUL3858.json +test SNG1076.json +test PMUL2166.json +test PMUL2436.json +test MUL0225.json +test PMUL1966.json +test PMUL4239.json +test SNG01290.json +test SNG0888.json +test SNG1150.json +test MUL2646.json +test PMUL4247.json +test SNG0589.json +test MUL2089.json +test SNG01434.json +test MUL0237.json +test PMUL4125.json +test MUL1588.json +test MUL0432.json +test SNG0253.json +test PMUL4998.json +test PMUL1323.json +test SNG0580.json +test PMUL2719.json +test PMUL3224.json +test PMUL4440.json +test PMUL4840.json +test SNG0081.json +test SNG02172.json +test PMUL0550.json +test PMUL3558.json +test PMUL2275.json +test PMUL3600.json +test SNG0323.json +test MUL0323.json +test MUL1137.json +test MUL2525.json +test MUL0409.json +test SNG01359.json +test PMUL1259.json +test SNG0568.json +test PMUL1374.json +test SNG02315.json +test PMUL3437.json +test MUL0454.json +test MUL1926.json +test PMUL1180.json +test MUL0744.json +test SNG1004.json +test PMUL1412.json +test PMUL0998.json +test PMUL4660.json +test SNG0006.json +test PMUL1755.json +test PMUL0399.json +test PMUL3921.json +test MUL1848.json +test SNG0085.json +test SNG02205.json +test PMUL4880.json +test SNG01153.json +test MUL0537.json +test MUL1555.json +test PMUL0844.json +test PMUL4025.json +test SNG0691.json +test PMUL1002.json +test PMUL4949.json +test MUL1248.json +test MUL1008.json +test PMUL1455.json +test SNG0416.json +test MUL0978.json +test MUL0524.json +test PMUL2953.json +test SNG0832.json +test PMUL4643.json +test PMUL4716.json +test MUL0071.json +test SNG0317.json +test PMUL1477.json +test PMUL1424.json +test PMUL4693.json +test MUL2499.json +test MUL1575.json +test PMUL1173.json +test PMUL1385.json +test PMUL1463.json +test PMUL3309.json +test PMUL0578.json +test PMUL2477.json +test SNG0390.json +test MUL2120.json +test MUL0671.json +test PMUL1320.json +test MUL2410.json +test MUL0197.json +test MUL1811.json +test MUL2269.json +test MUL2053.json +test MUL2146.json +test PMUL2503.json +test MUL0842.json +test MUL1695.json +test PMUL2210.json +test MUL0869.json +test MUL2138.json +test MUL0264.json +test SNG01884.json +test SNG01432.json +test MUL0391.json +test MUL1285.json +test MUL2368.json +test SNG0451.json +test PMUL2636.json +test PMUL3027.json +test PMUL0509.json +test PMUL3044.json +test PMUL1920.json +test PMUL3141.json +test MUL2042.json +test MUL1050.json +test MUL1598.json +test PMUL2634.json +test PMUL1867.json +test MUL0677.json +test PMUL4134.json +test PMUL1266.json +test SNG02153.json +test PMUL2272.json +test MUL1489.json +test PMUL0129.json +test PMUL1978.json +test SNG0571.json +test SNG01608.json +test MUL0496.json +test PMUL1763.json +test MUL1202.json +test MUL0397.json +test PMUL1210.json +test PMUL0573.json +test SNG01353.json +test PMUL1931.json +test SNG0927.json +test PMUL1344.json +test PMUL1788.json +test PMUL1329.json +test SNG01503.json +test PMUL2917.json +test SNG01634.json +test PMUL0566.json +test PMUL3599.json +test MUL2206.json +test SNG0940.json +test PMUL2330.json +test MUL2106.json +test PMUL3424.json +test SNG0733.json +test MUL2365.json +test MUL1569.json +test PMUL1593.json +test PMUL4186.json +test PMUL0006.json +test MUL2442.json +test SNG01692.json +test MUL2130.json +test MUL2193.json +test MUL1661.json +test SNG0617.json +test PMUL1623.json +test PMUL2006.json +test PMUL4368.json +test PMUL1801.json +test SNG0098.json +test PMUL3279.json +test MUL2321.json +test MUL2305.json +test PMUL2882.json +test MUL0798.json +test MUL1627.json +test MUL0073.json +test PMUL4231.json +test PMUL4616.json +test MUL1697.json +test PMUL4842.json +test PMUL0090.json +test PMUL1812.json +test PMUL0713.json +test SNG0456.json +test MUL2569.json +test PMUL2563.json +test MUL1514.json +test SNG0722.json +test MUL2423.json +test MUL1491.json +test PMUL2746.json +test MUL2218.json +test SNG0613.json +test MUL1060.json +test MUL1527.json +test SNG0455.json +test MUL2270.json +test MUL1552.json +test PMUL0919.json +test MUL1376.json +test MUL2294.json +test MUL1649.json +test PMUL0367.json +test PMUL3647.json +test PMUL1462.json +test PMUL1316.json +test PMUL0410.json +test PMUL0864.json +test PMUL1772.json +test MUL0937.json +test PMUL0832.json +test PMUL4317.json +test MUL0814.json +test MUL0148.json +test MUL0260.json +test MUL2664.json +test SNG1042.json +test PMUL4964.json +test PMUL2194.json +test PMUL4048.json +test MUL1110.json +test MUL0785.json +test SNG0008.json +test PMUL1182.json +test MUL2116.json +test PMUL4567.json +test PMUL0204.json +test MUL0354.json +test MUL0018.json +test SNG0095.json +test MUL1045.json +test MUL1883.json +test MUL1958.json +test SNG0792.json +test MUL1515.json +test MUL1633.json +test MUL1935.json +test PMUL3933.json +test PMUL2491.json +test MUL0474.json +test PMUL3778.json +test SNG01403.json +test SNG0715.json +test PMUL4155.json +test PMUL4026.json +test MUL1028.json +test MUL0364.json +test PMUL3734.json +test MUL0089.json +test SNG01936.json +test PMUL3992.json +test MUL1899.json +test PMUL3012.json +test MUL2195.json +test MUL2072.json +test MUL1642.json +test SNG01542.json +test PMUL0692.json +test MUL2275.json +test PMUL0109.json +test PMUL2119.json +test MUL0370.json +test PMUL4077.json +test MUL0466.json +test MUL0212.json +test PMUL2627.json +test PMUL2215.json +test MUL2320.json +test MUL1675.json +test MUL0457.json +test SNG0519.json +test PMUL3304.json +test MUL0233.json +test PMUL1253.json +test MUL0810.json +test SNG0274.json +test PMUL2670.json +test SNG02096.json +test PMUL4255.json +test MUL2439.json +test MUL1612.json +test SNG01775.json +test MUL2281.json +test PMUL3310.json +test MUL0088.json +test SNG0308.json +test PMUL0548.json +test PMUL3803.json +test PMUL3759.json +test SNG01270.json +test PMUL4610.json +test PMUL4946.json +test MUL0690.json +test PMUL3336.json +test MUL2427.json +test PMUL1613.json +test PMUL1067.json +test SNG0354.json +test SNG0692.json +test MUL0844.json +test MUL1620.json +test MUL1351.json +test MUL1803.json +test SNG01937.json +test SNG0735.json +test MUL0035.json +test PMUL4059.json +test SNG0073.json +test MUL1806.json +test MUL1254.json +test MUL0789.json +test MUL1554.json +test PMUL1533.json +test PMUL4229.json +test SNG01262.json +test SNG0466.json +test PMUL4294.json +test MUL2119.json +test MUL0624.json +test PMUL3625.json +test MUL0498.json +test SNG0482.json +test PMUL4603.json +test PMUL1113.json +test PMUL2195.json +test PMUL1949.json +test PMUL3264.json +test MUL1289.json +test SNG02006.json +test MUL0208.json +test MUL0340.json +test MUL0072.json +test SNG0539.json +test SNG01835.json +test MUL2012.json +test MUL1015.json +test PMUL3737.json +test PMUL4325.json +test MUL2466.json +test MUL0371.json +test MUL0469.json +test PMUL0795.json +test MUL0003.json +test SNG0799.json +test MUL0881.json +test SNG01710.json +test PMUL3940.json +test MUL0510.json +test MUL0621.json +test PMUL0012.json +test SNG0659.json +test MUL0230.json +test SNG0767.json +test MUL0528.json +test PMUL1118.json +test MUL2284.json +test MUL0388.json +test SNG0016.json +test SNG1041.json +test SNG0305.json +test PMUL2513.json +test PMUL3834.json +test MUL0841.json +test MUL0912.json +test MUL0450.json +test PMUL1470.json +test PMUL4622.json +test MUL2001.json +test PMUL1183.json +test PMUL0685.json +test SNG1105.json +test PMUL4542.json +test MUL1189.json +test PMUL2146.json +test SNG0256.json +test PMUL1526.json +test PMUL2403.json +test MUL0555.json +test MUL0772.json +test PMUL1172.json +test PMUL4366.json +test SNG01534.json +test MUL0803.json +test PMUL0267.json +test SNG1075.json +test PMUL4001.json +test PMUL1869.json +test SNG0055.json +test PMUL2945.json +test MUL1064.json +test MUL0628.json +test MUL0116.json +test PMUL1537.json +test PMUL1046.json +test MUL0992.json +test MUL1422.json +test MUL0346.json +test PMUL1087.json +test SNG0933.json +test PMUL4644.json +test PMUL3913.json +test PMUL3919.json +test MUL1392.json +test PMUL1844.json +test MUL1596.json +test MUL1478.json +test PMUL2279.json +test SNG0644.json +test PMUL4648.json +test PMUL4122.json +test SNG0391.json +test MUL1836.json +test MUL0822.json +test MUL1211.json +test PMUL3145.json +test PMUL4626.json +test PMUL2437.json +test SNG0721.json +test PMUL1283.json +test PMUL3723.json +test PMUL2457.json +test MUL2491.json +test MUL0738.json +test MUL1076.json +test MUL2609.json +test SNG0964.json +test MUL0638.json +test SNG01492.json +test PMUL4259.json +test PMUL3066.json +test PMUL1809.json +test MUL2376.json +test MUL1753.json +test PMUL0265.json +test MUL1024.json +test SNG01733.json +test MUL1274.json +test PMUL4905.json +test MUL1159.json +test MUL0080.json +test PMUL4140.json +test SNG0078.json +test MUL0843.json +test MUL2567.json +test PMUL2703.json +test SNG01767.json +test PMUL1373.json +test PMUL3815.json +test PMUL0875.json +test PMUL4672.json +test PMUL2778.json +test PMUL3495.json +test MUL2139.json +test PMUL3156.json +test SNG0004.json +test PMUL1521.json +test MUL1638.json +test PMUL0982.json +test MUL2386.json +test PMUL2123.json +test MUL1350.json +test SNG0293.json +test PMUL1105.json +test SNG02018.json +test MUL1212.json +test SNG01332.json +test PMUL1148.json +test SNG0483.json +test PMUL3890.json +test PMUL4356.json +test PMUL3162.json +test SNG01679.json +test MUL2542.json +test PMUL3494.json +test MUL2151.json +test PMUL3239.json +test PMUL3742.json +test PMUL1895.json +test PMUL4362.json +test SNG0781.json +test SNG01538.json +test PMUL1486.json +test PMUL2174.json +test PMUL0095.json +test MUL0316.json +test PMUL2708.json +test SNG0897.json +test PMUL3247.json +test PMUL3523.json +test MUL2359.json +test PMUL3520.json +test MUL2060.json +test MUL2358.json +test PMUL0069.json +test PMUL2869.json +test SNG0661.json +test MUL1838.json +test MUL0831.json +test MUL2405.json +test PMUL0117.json +test PMUL0182.json +test MUL1493.json +test MUL0890.json +test SNG01165.json +test SNG0690.json +test PMUL3423.json +test PMUL4258.json +test MUL0369.json +test MUL0838.json +test SNG1066.json +test PMUL1435.json +test SNG1078.json +test PMUL3957.json +test MUL1240.json +test MUL0947.json +test MUL1606.json +test MUL0594.json +test PMUL1854.json +test SNG0830.json +test PMUL3301.json +test PMUL2286.json +test SNG0898.json +test MUL0682.json +test MUL0990.json +test PMUL4731.json +test SNG01323.json +test SNG0515.json +test SNG1086.json +test SNG01898.json +test PMUL4034.json +test PMUL4504.json +test SNG0678.json +test PMUL2983.json +test SNG0954.json +test MUL2051.json +test PMUL0615.json +test MUL0383.json +test MUL1455.json +test PMUL4054.json +test SNG0892.json +test PMUL3171.json +test PMUL4713.json +test SNG01272.json +test MUL0011.json +test SNG01530.json +test MUL1870.json +test PMUL3946.json +test MUL1828.json +test SNG01686.json +test PMUL1811.json +test MUL2665.json +test SNG0689.json +test PMUL0994.json +test MUL1624.json +test PMUL3328.json +test PMUL1987.json +test MUL1091.json +test MUL0570.json +test PMUL3731.json +test PMUL1944.json +test PMUL0558.json +test PMUL1332.json +test MUL1766.json +test MUL1066.json +test PMUL3668.json +test MUL2086.json +test MUL0760.json +test PMUL3785.json +test SNG0874.json +test PMUL1330.json +test PMUL2980.json +test PMUL0599.json +test MUL0379.json +test SNG01386.json +test PMUL2452.json +test PMUL0745.json +test SNG01943.json +test MUL0761.json +test PMUL2558.json +test MUL0222.json +test PMUL1247.json +test PMUL4515.json +test PMUL3376.json +test PMUL1241.json +test PMUL1136.json +test PMUL0899.json +test MUL1258.json +test SNG0994.json +test PMUL3918.json +test PMUL2483.json +test MUL0286.json +test PMUL3439.json +test PMUL3107.json +test SNG01673.json +test MUL1860.json +test PMUL2704.json +test PMUL1983.json +test SNG0586.json +test MUL1410.json +test SNG0867.json +test MUL1746.json +test MUL2290.json +test PMUL1853.json +test MUL2162.json +test PMUL4131.json +test SNG0412.json +test MUL1692.json +test SNG02240.json +test SNG0742.json +test PMUL1347.json +test SNG01873.json +test SNG0991.json +test PMUL3897.json +test MUL1855.json +test MUL2657.json +test PMUL4011.json +test MUL1342.json +test MUL1986.json +test MUL2137.json +test SNG02198.json +test MUL1739.json +test MUL0309.json +test PMUL1600.json +test MUL2630.json +test SNG0007.json +test SNG0908.json +test PMUL0630.json +test MUL2074.json +test SNG0610.json +test PMUL4343.json +test PMUL2205.json +test MUL1712.json +test PMUL1804.json +test MUL0014.json +test MUL0341.json +test MUL1657.json +test PMUL4641.json +test MUL1077.json +test PMUL4106.json +test MUL0473.json +test PMUL1981.json +test PMUL3283.json +test PMUL3085.json +test MUL1192.json +test PMUL2000.json +test SNG0855.json +test SNG1048.json +test MUL0099.json +test PMUL4306.json +test MUL0941.json +test MUL2317.json +test PMUL4318.json +test SNG01983.json +test PMUL2497.json +test MUL1787.json +test SNG0518.json +test PMUL3976.json +test MUL1466.json +test MUL0845.json +test PMUL4524.json +test PMUL0782.json +test SNG1126.json +test MUL0492.json +test PMUL4432.json +test SNG0338.json +test PMUL2848.json +test MUL1560.json +test MUL0739.json +test MUL1763.json +test PMUL4078.json +test PMUL1657.json +test PMUL1109.json +test SNG01755.json +test MUL0332.json +test MUL0113.json +test MUL1800.json +test PMUL4819.json +test PMUL1779.json +test MUL2432.json +test SNG02207.json +test MUL2658.json +test MUL2099.json +test PMUL0674.json +test MUL1983.json +test MUL1071.json +test PMUL3160.json +test PMUL4044.json +test SNG0280.json +test MUL1664.json +test PMUL3868.json +test SNG0429.json +test PMUL3886.json +test PMUL2756.json +test SNG0701.json +test SNG0755.json +test MUL2225.json +test SNG01907.json +test MUL1833.json +test PMUL3662.json +test PMUL0938.json +test MUL0669.json +test SNG1090.json +test MUL0533.json +test MUL1799.json +test PMUL4636.json +test PMUL2755.json +test SNG01919.json +test PMUL4326.json +test PMUL1273.json +test PMUL1311.json +test PMUL4780.json +test SNG01924.json +test PMUL3127.json +test PMUL3514.json +test SNG0572.json +test MUL2063.json +test PMUL3907.json +test MUL1417.json +test PMUL0441.json +test SNG01683.json +test MUL2122.json +test PMUL1998.json +test SNG0468.json +test MUL0787.json +test SNG1026.json +test MUL2347.json +test PMUL3685.json +test PMUL3275.json +test PMUL0522.json +test PMUL1834.json +test PMUL1232.json +test PMUL1980.json +test MUL2378.json +test PMUL2380.json +test MUL0654.json +test SNG0649.json +test PMUL4800.json +test PMUL3126.json +test PMUL2209.json +test SNG0322.json +test MUL1650.json +test MUL1088.json +test MUL0939.json +test SNG0772.json +test SNG0446.json +test PMUL3748.json +test SNG0611.json +test MUL2204.json +test SNG01155.json +test PMUL4756.json +test PMUL3576.json +test SNG0005.json +test MUL1759.json +test PMUL4958.json +test SNG0547.json +test PMUL4333.json +test SNG0805.json +test MUL2148.json +test PMUL0205.json +test MUL0575.json +test PMUL1256.json +test SNG0477.json +test MUL1901.json +test PMUL3557.json +test PMUL3506.json +test PMUL2898.json +test PMUL4941.json +test PMUL3875.json +test MUL0373.json +test PMUL3465.json +test PMUL0457.json +test MUL0828.json +test MUL0034.json +test MUL2415.json +test SNG01752.json +test PMUL3348.json +test MUL1268.json +test PMUL1284.json +test SNG01797.json +test MUL2457.json +test PMUL3014.json +test MUL1818.json +test MUL1055.json +test MUL1505.json +test MUL0374.json +test MUL2177.json +test PMUL1982.json +test PMUL1420.json +test PMUL4383.json +test PMUL0076.json +test MUL0389.json +test SNG01380.json +test MUL0353.json +test PMUL4388.json +test PMUL1739.json +test PMUL1200.json +test PMUL1276.json +test PMUL0506.json +test PMUL3364.json +test PMUL3425.json +test PMUL2311.json +test MUL0821.json +test PMUL3549.json +test SNG1036.json +test SNG01850.json +test SNG0529.json +test PMUL3663.json +test PMUL3415.json +test PMUL2729.json +test PMUL4911.json +test MUL2686.json +test MUL1228.json +test SNG0983.json +test PMUL1775.json +test SNG0099.json +test PMUL4050.json +test SNG0348.json +test MUL0172.json +test SNG02202.json +test SNG01551.json +test SNG0941.json +test MUL0021.json +test SNG1012.json +test PMUL3521.json +test MUL0901.json +test PMUL1762.json +test MUL2482.json +test MUL0228.json +test MUL0818.json +test SNG0779.json +test MUL0515.json +test PMUL3707.json +test SNG0590.json +test PMUL3596.json +test SNG0797.json +test PMUL4357.json +test PMUL4605.json +test PMUL4303.json +test SNG0069.json +test PMUL0518.json +test MUL2197.json +test PMUL2239.json +test MUL2675.json +test PMUL1883.json +test PMUL2578.json +test MUL1059.json +test PMUL1137.json +test SNG0284.json +test SNG0500.json +test MUL2637.json +test PMUL4234.json +test SNG0600.json +test MUL2009.json +test PMUL0873.json +test MUL0239.json +test SNG0962.json +test PMUL2351.json +test MUL1365.json +test MUL2316.json +test PMUL1036.json +test MUL1117.json +test SNG02319.json +test PMUL4884.json +test PMUL4316.json +test SNG0714.json +test PMUL0048.json +test PMUL3649.json +test SNG0979.json +test MUL1139.json +test MUL2077.json +test PMUL4894.json +test PMUL0286.json +test SNG0782.json +test SNG0448.json +test SNG0531.json +test MUL0896.json +test SNG0345.json +test SNG0061.json +test SNG01940.json +test MUL1276.json +test SNG0822.json +test PMUL4547.json +test SNG0768.json +test MUL1278.json +test MUL0540.json +test PMUL4569.json +test MUL0004.json +test MUL0694.json +test SNG0775.json +test PMUL3158.json +test SNG1016.json +test SNG01784.json +test MUL2523.json +test SNG0289.json +test MUL0536.json +test PMUL2080.json +test PMUL3935.json +test PMUL1370.json +test SNG0459.json +test MUL0144.json +test MUL2301.json +test PMUL0262.json +test PMUL4176.json +test MUL1273.json +test SNG1147.json +test PMUL1952.json +test MUL1475.json +test MUL0613.json +test SNG0866.json +test SNG01366.json +test PMUL3403.json +test PMUL3293.json +test PMUL2433.json +test PMUL2933.json +test SNG0360.json +test MUL0352.json +test PMUL4220.json +test SNG0616.json +test PMUL3708.json +test MUL1690.json +test PMUL0768.json +test PMUL2489.json +test PMUL4224.json +test PMUL2124.json +test MUL1508.json +test PMUL1106.json +test SNG0636.json +test MUL1756.json +test PMUL3688.json +test PMUL1147.json +test PMUL1091.json +test PMUL3217.json +test PMUL0276.json +test MUL1678.json +test SNG0263.json +test SNG01391.json +test PMUL4246.json +test SNG01819.json +test PMUL3931.json +test SNG0803.json +test SNG02342.json +test MUL0849.json +test PMUL3282.json +test PMUL4564.json +test PMUL0732.json +test SNG1091.json +test PMUL1359.json +test MUL1122.json +test MUL1546.json +test MUL0198.json +test PMUL4930.json +test PMUL2362.json +test PMUL1008.json +test PMUL4919.json +test MUL1718.json +test PMUL1342.json +test MUL1898.json +test PMUL4344.json +test MUL1418.json +test SNG0681.json +test SNG0433.json +test SNG0471.json +test MUL1844.json +test SNG1070.json +test SNG01957.json +test MUL1607.json +test MUL0199.json +test MUL0297.json +test PMUL2942.json +test SNG0601.json +test MUL1981.json +test SNG0527.json +test MUL2254.json +test MUL0306.json +test MUL2228.json +test MUL0641.json +test PMUL1194.json +test MUL0527.json +dev PMUL0698.json +dev PMUL3233.json +dev SNG01627.json +dev MUL1719.json +dev MUL0242.json +dev PMUL1072.json +dev PMUL3048.json +dev PMUL1100.json +dev PMUL3979.json +dev MUL1409.json +dev PMUL4828.json +dev SNG0329.json +dev PMUL3314.json +dev MUL1768.json +dev MUL0293.json +dev PMUL0420.json +dev PMUL0858.json +dev MUL1367.json +dev MUL1271.json +dev PMUL0928.json +dev MUL1589.json +dev PMUL3200.json +dev MUL0398.json +dev SNG01735.json +dev PMUL4290.json +dev SNG0551.json +dev MUL2384.json +dev SNG01993.json +dev PMUL2235.json +dev PMUL4075.json +dev PMUL0724.json +dev MUL2160.json +dev PMUL1402.json +dev PMUL1152.json +dev PMUL1121.json +dev SNG02071.json +dev PMUL3215.json +dev PMUL4833.json +dev MUL0344.json +dev MUL2418.json +dev PMUL1181.json +dev MUL1604.json +dev PMUL0287.json +dev MUL2064.json +dev PMUL4581.json +dev MUL1888.json +dev MUL1603.json +dev PMUL1591.json +dev MUL2393.json +dev MUL0300.json +dev PMUL3406.json +dev MUL2092.json +dev PMUL3428.json +dev SNG0899.json +dev MUL1503.json +dev MUL2470.json +dev MUL2361.json +dev SNG0759.json +dev PMUL0590.json +dev SNG01598.json +dev MUL0126.json +dev MUL1973.json +dev PMUL1951.json +dev PMUL4993.json +dev PMUL2307.json +dev MUL1920.json +dev SNG01184.json +dev MUL0960.json +dev MUL2366.json +dev PMUL1429.json +dev PMUL0564.json +dev SNG1049.json +dev PMUL4688.json +dev SNG02346.json +dev MUL0696.json +dev MUL0899.json +dev PMUL0601.json +dev MUL0452.json +dev MUL1923.json +dev MUL2387.json +dev PMUL1635.json +dev PMUL2412.json +dev PMUL3123.json +dev PMUL0508.json +dev PMUL1917.json +dev SNG0665.json +dev SNG0374.json +dev PMUL2804.json +dev PMUL4612.json +dev PMUL4846.json +dev MUL0985.json +dev PMUL2464.json +dev MUL0676.json +dev MUL0754.json +dev MUL0509.json +dev SNG0041.json +dev SNG0651.json +dev PMUL0529.json +dev PMUL3891.json +dev MUL2117.json +dev MUL2222.json +dev PMUL4426.json +dev PMUL2713.json +dev SNG02221.json +dev PMUL2179.json +dev MUL1022.json +dev PMUL2973.json +dev PMUL4359.json +dev PMUL1653.json +dev PMUL3779.json +dev MUL0602.json +dev PMUL4687.json +dev SNG1046.json +dev MUL0559.json +dev MUL2372.json +dev SNG0807.json +dev PMUL1990.json +dev PMUL3817.json +dev PMUL1620.json +dev MUL0142.json +dev SNG1143.json +dev MUL0425.json +dev PMUL1257.json +dev MUL0962.json +dev MUL1245.json +dev PMUL1953.json +dev SNG0038.json +dev MUL0631.json +dev MUL1472.json +dev MUL2258.json +dev PMUL0759.json +dev MUL0791.json +dev MUL0627.json +dev PMUL0569.json +dev MUL0476.json +dev MUL1977.json +dev MUL0129.json +dev PMUL2394.json +dev MUL2644.json +dev MUL1286.json +dev PMUL0032.json +dev PMUL1768.json +dev PMUL3574.json +dev SNG0535.json +dev MUL1881.json +dev PMUL4414.json +dev MUL1347.json +dev MUL0773.json +dev SNG0939.json +dev MUL1415.json +dev PMUL3899.json +dev MUL0751.json +dev SNG1007.json +dev MUL2324.json +dev PMUL4198.json +dev MUL0737.json +dev MUL1626.json +dev MUL1379.json +dev SNG0588.json +dev SNG0521.json +dev MUL1407.json +dev PMUL4826.json +dev PMUL3591.json +dev MUL2487.json +dev PMUL4354.json +dev MUL0362.json +dev MUL1370.json +dev PMUL1393.json +dev PMUL0959.json +dev SNG0922.json +dev PMUL4881.json +dev PMUL2617.json +dev MUL1652.json +dev PMUL4471.json +dev MUL2670.json +dev PMUL2478.json +dev PMUL0729.json +dev MUL1647.json +dev PMUL2532.json +dev MUL2495.json +dev PMUL1132.json +dev PMUL0623.json +dev PMUL3806.json +dev PMUL2282.json +dev SNG01229.json +dev PMUL1579.json +dev SNG02105.json +dev PMUL3073.json +dev PMUL2219.json +dev SNG0698.json +dev PMUL4719.json +dev SNG01903.json +dev MUL1837.json +dev MUL1426.json +dev MUL2291.json +dev SNG01586.json +dev PMUL3607.json +dev PMUL3744.json +dev PMUL4431.json +dev MUL1900.json +dev SNG01777.json +dev SNG01609.json +dev MUL1078.json +dev MUL1125.json +dev PMUL1310.json +dev PMUL0912.json +dev MUL0622.json +dev SNG1055.json +dev MUL2612.json +dev MUL2277.json +dev MUL2016.json +dev PMUL2894.json +dev SNG0900.json +dev PMUL2504.json +dev SNG01579.json +dev PMUL3110.json +dev PMUL3169.json +dev PMUL3410.json +dev MUL2351.json +dev MUL1131.json +dev SNG1047.json +dev PMUL4686.json +dev MUL0635.json +dev MUL2526.json +dev SNG0414.json +dev MUL1975.json +dev MUL1160.json +dev MUL0915.json +dev SNG01297.json +dev PMUL1408.json +dev SNG1011.json +dev MUL0625.json +dev MUL1995.json +dev MUL0663.json +dev PMUL0589.json +dev PMUL0653.json +dev PMUL3702.json +dev PMUL3735.json +dev PMUL3244.json +dev SNG0603.json +dev SNG0068.json +dev SNG0080.json +dev PMUL1968.json +dev SNG0299.json +dev SNG01595.json +dev MUL0675.json +dev MUL0681.json +dev MUL0673.json +dev PMUL1322.json +dev SNG0891.json +dev MUL1292.json +dev PMUL4289.json +dev PMUL0512.json +dev PMUL1389.json +dev PMUL3456.json +dev PMUL4939.json +dev MUL1341.json +dev MUL0426.json +dev PMUL0294.json +dev MUL2282.json +dev MUL2013.json +dev PMUL0716.json +dev MUL0637.json +dev PMUL4711.json +dev PMUL1453.json +dev PMUL0743.json +dev PMUL3362.json +dev PMUL0984.json +dev MUL2251.json +dev MUL1673.json +dev PMUL1108.json +dev PMUL3002.json +dev PMUL1029.json +dev PMUL2255.json +dev PMUL0605.json +dev MUL2041.json +dev PMUL0575.json +dev SNG0283.json +dev PMUL2863.json +dev PMUL0936.json +dev MUL1381.json +dev PMUL0025.json +dev MUL1963.json +dev MUL2111.json +dev PMUL3106.json +dev PMUL2711.json +dev PMUL2021.json +dev PMUL0356.json +dev SNG0494.json +dev PMUL3888.json +dev PMUL1018.json +dev MUL1463.json +dev MUL0065.json +dev PMUL3756.json +dev PMUL2204.json +dev MUL0878.json +dev MUL2389.json +dev MUL2000.json +dev PMUL1557.json +dev MUL1368.json +dev MUL1740.json +dev PMUL3434.json +dev PMUL3072.json +dev SNG0934.json +dev MUL2640.json +dev PMUL2320.json +dev PMUL4853.json +dev MUL0107.json +dev PMUL4287.json +dev PMUL4482.json +dev MUL0219.json +dev PMUL3466.json +dev MUL2173.json +dev MUL2082.json +dev MUL0493.json +dev MUL1843.json +dev PMUL0010.json +dev MUL0386.json +dev MUL2096.json +dev MUL1564.json +dev PMUL3384.json +dev PMUL0596.json +dev PMUL4588.json +dev PMUL0622.json +dev MUL0490.json +dev SNG0845.json +dev PMUL2421.json +dev SNG0437.json +dev MUL0959.json +dev SNG0883.json +dev SNG01235.json +dev PMUL1791.json +dev PMUL1289.json +dev PMUL3835.json +dev SNG1125.json +dev MUL0971.json +dev MUL2467.json +dev MUL1288.json +dev PMUL2386.json +dev SNG0344.json +dev SNG0896.json +dev PMUL3554.json +dev SNG0942.json +dev PMUL1684.json +dev PMUL1892.json +dev PMUL0289.json +dev MUL2065.json +dev SNG0996.json +dev MUL2083.json +dev PMUL2314.json +dev SNG1023.json +dev MUL2690.json +dev PMUL3332.json +dev SNG01934.json +dev PMUL4793.json +dev PMUL2428.json +dev PMUL3537.json +dev MUL1854.json +dev PMUL2762.json +dev SNG01201.json +dev MUL1221.json +dev MUL0719.json +dev PMUL0717.json +dev PMUL0400.json +dev PMUL0874.json +dev PMUL1683.json +dev MUL0713.json +dev MUL1808.json +dev PMUL1063.json +dev MUL1967.json +dev SNG0650.json +dev MUL1976.json +dev MUL1450.json +dev SNG0542.json +dev SNG0913.json +dev MUL0338.json +dev PMUL3645.json +dev SNG0937.json +dev MUL1908.json +dev SNG01206.json +dev SNG0751.json +dev SNG01504.json +dev PMUL2906.json +dev MUL2330.json +dev PMUL4120.json +dev SNG0018.json +dev SNG01172.json +dev MUL2263.json +dev SNG0314.json +dev MUL1256.json +dev PMUL4864.json +dev PMUL1879.json +dev SNG0490.json +dev MUL0657.json +dev PMUL2383.json +dev PMUL0142.json +dev PMUL2839.json +dev MUL0478.json +dev MUL0401.json +dev PMUL0543.json +dev PMUL1016.json +dev PMUL4451.json +dev SNG01626.json +dev MUL2249.json +dev PMUL1831.json +dev PMUL4439.json +dev PMUL0835.json +dev MUL1025.json +dev PMUL3665.json +dev SNG0565.json +dev PMUL2350.json +dev PMUL3405.json +dev MUL0287.json +dev PMUL1057.json +dev SNG01630.json +dev MUL0135.json +dev PMUL3485.json +dev PMUL0697.json +dev PMUL3396.json +dev PMUL3017.json +dev MUL1738.json +dev SNG0384.json +dev PMUL3397.json +dev PMUL1159.json +dev PMUL1518.json +dev PMUL4276.json +dev MUL1934.json +dev MUL0430.json +dev PMUL0348.json +dev PMUL4109.json +dev MUL0154.json +dev MUL1227.json +dev PMUL3363.json +dev SNG0829.json +dev PMUL2623.json +dev PMUL1795.json +dev PMUL3464.json +dev MUL1360.json +dev MUL0839.json +dev SNG0090.json +dev MUL0603.json +dev MUL1780.json +dev SNG01935.json +dev MUL0995.json +dev MUL1130.json +dev PMUL3634.json +dev SNG0893.json +dev MUL1651.json +dev SNG1122.json +dev PMUL3695.json +dev SNG01889.json +dev MUL0745.json +dev MUL1879.json +dev PMUL1771.json +dev PMUL4053.json +dev SNG01317.json +dev SNG0554.json +dev PMUL4620.json +dev MUL1492.json +dev MUL0549.json +dev PMUL1747.json +dev PMUL2792.json +dev MUL0187.json +dev PMUL3740.json +dev MUL1487.json +dev MUL1249.json +dev PMUL2841.json +dev PMUL4814.json +dev MUL1457.json +dev PMUL1712.json +dev SNG0331.json +dev MUL2259.json +dev SNG0804.json +dev PMUL4332.json +dev MUL1540.json +dev MUL2638.json +dev SNG0058.json +dev PMUL0555.json +dev SNG0030.json +dev MUL2341.json +dev PMUL3683.json +dev SNG0993.json +dev PMUL1594.json +dev PMUL4877.json +dev MUL0731.json +dev PMUL4702.json +dev MUL1166.json +dev PMUL3052.json +dev MUL1576.json +dev SNG0501.json +dev PMUL4976.json +dev MUL1762.json +dev PMUL4729.json +dev PMUL1345.json +dev PMUL4424.json +dev SNG1043.json +dev SNG01684.json +dev MUL0178.json +dev MUL1916.json +dev MUL1549.json +dev MUL0311.json +dev SNG1069.json +dev MUL2574.json +dev PMUL4446.json +dev PMUL1681.json +dev MUL0305.json +dev SNG0422.json +dev MUL2108.json +dev MUL0051.json +dev SNG02311.json +dev PMUL1211.json +dev MUL0598.json +dev SNG0771.json +dev MUL0707.json +dev PMUL0432.json +dev MUL0544.json +dev SNG02214.json +dev PMUL1599.json +dev PMUL4314.json +dev PMUL1235.json +dev MUL0272.json +dev SNG1018.json +dev SNG0668.json +dev PMUL4302.json +dev SNG0697.json +dev PMUL0036.json +dev PMUL2441.json +dev SNG1017.json +dev SNG01553.json +dev SNG01426.json +dev PMUL0187.json +dev PMUL2301.json +dev MUL2344.json +dev PMUL4436.json +dev MUL0055.json +dev MUL1722.json +dev MUL0674.json +dev PMUL2748.json +dev PMUL3630.json +dev SNG01804.json +dev MUL2289.json +dev PMUL2472.json +dev MUL2468.json +dev PMUL1744.json +dev PMUL2757.json +dev MUL1590.json +dev MUL1146.json +dev PMUL2494.json +dev SNG0442.json +dev SNG0769.json +dev SNG01664.json +dev MUL0046.json +dev SNG1035.json +dev PMUL4623.json +dev MUL0302.json +dev SNG02278.json +dev PMUL3392.json +dev PMUL0739.json +dev PMUL4033.json +dev MUL2448.json +dev MUL1338.json +dev PMUL0689.json +dev PMUL3883.json +dev PMUL1496.json +dev SNG0277.json +dev SNG01453.json +dev SNG02220.json +dev PMUL0083.json +dev PMUL3911.json +dev PMUL2648.json +dev MUL0032.json +dev MUL0337.json +dev PMUL4592.json +dev MUL0095.json +dev PMUL4264.json +dev PMUL3344.json +dev SNG0660.json +dev MUL1615.json +dev MUL1034.json +dev MUL1550.json +dev MUL0012.json +dev MUL0093.json +dev SNG0368.json +dev MUL1903.json +dev MUL2677.json +dev PMUL1881.json +dev PMUL3015.json +dev MUL2456.json +dev PMUL3426.json +dev SNG01218.json +dev MUL1517.json +dev MUL0650.json +dev PMUL1142.json +dev PMUL4955.json +dev PMUL2599.json +dev MUL1433.json +dev MUL0326.json +dev PMUL2807.json +dev PMUL0626.json +dev SNG01703.json +dev MUL0384.json +dev PMUL4467.json +dev SNG02109.json +dev SNG01953.json +dev SNG01928.json +dev MUL0702.json +dev PMUL0515.json +dev MUL0806.json +dev PMUL1052.json +dev MUL0202.json +dev PMUL4744.json +dev PMUL4126.json +dev PMUL1346.json +dev SNG01527.json +dev MUL1617.json +dev MUL1128.json +dev MUL1655.json +dev PMUL3109.json +dev PMUL3050.json +dev SNG0017.json +dev PMUL3884.json +dev MUL2199.json +dev SNG0381.json +dev SNG0463.json +dev MUL2268.json +dev PMUL2324.json +dev SNG02115.json +dev MUL0923.json +dev SNG02052.json +dev PMUL2075.json +dev PMUL4485.json +dev PMUL0134.json +dev MUL2152.json +dev MUL1968.json +dev MUL0013.json +dev MUL1462.json +dev MUL0513.json +dev PMUL2248.json +dev PMUL1661.json +dev PMUL0207.json +dev MUL2649.json +dev MUL0227.json +dev SNG0711.json +dev PMUL0690.json +dev PMUL4984.json +dev PMUL2594.json +dev MUL0539.json +dev PMUL4707.json +dev MUL0651.json +dev PMUL3453.json +dev PMUL3632.json +dev PMUL1451.json +dev PMUL1282.json +dev PMUL4076.json +dev PMUL0863.json +dev SNG01523.json +dev MUL0512.json +dev MUL1042.json +dev SNG02029.json +dev SNG02121.json +dev PMUL0607.json +dev PMUL2144.json +dev MUL1382.json +dev SNG02260.json +dev PMUL2763.json +dev MUL1044.json +dev MUL1531.json +dev PMUL0978.json +dev PMUL2304.json +dev PMUL2259.json +dev SNG1119.json +dev PMUL2105.json +dev PMUL3498.json +dev MUL0313.json +dev MUL2618.json +dev SNG0550.json +dev MUL0505.json +dev MUL0317.json +dev MUL2020.json +dev SNG0049.json +dev PMUL2585.json +dev PMUL4913.json +dev PMUL0687.json +dev MUL0376.json +dev MUL2360.json +dev MUL1158.json +dev PMUL3324.json +dev SNG0684.json +dev PMUL0958.json +dev PMUL4244.json +dev PMUL2718.json +dev MUL0418.json +dev PMUL1644.json +dev MUL0207.json +dev PMUL1464.json +dev SNG0949.json +dev PMUL2844.json +dev PMUL4386.json +dev PMUL1054.json +dev MUL0819.json +dev PMUL1517.json +dev SNG0846.json +dev PMUL4005.json +dev PMUL0124.json +dev PMUL1722.json +dev SNG1085.json +dev PMUL0705.json +dev MUL0047.json +dev SNG1121.json +dev SNG1050.json +dev MUL1831.json +dev PMUL4791.json +dev SNG02057.json +dev SNG1002.json +dev MUL1622.json +dev PMUL2724.json +dev PMUL4473.json +dev SNG0862.json +dev MUL0209.json +dev PMUL3862.json +dev SNG0609.json +dev PMUL0571.json +dev PMUL4583.json +dev PMUL4555.json +dev PMUL2385.json +dev SNG0019.json +dev PMUL2721.json +dev PMUL1071.json +dev PMUL1274.json +dev PMUL4399.json +dev SNG01839.json +dev MUL0703.json +dev PMUL2880.json +dev PMUL3174.json +dev PMUL2786.json +dev MUL0177.json +dev MUL0463.json +dev PMUL3644.json +dev SNG0271.json +dev PMUL4060.json +dev SNG0889.json +dev MUL0082.json +dev MUL0445.json +dev MUL0871.json +dev MUL2683.json +dev PMUL2389.json +dev MUL2541.json +dev SNG0060.json +dev PMUL2970.json +dev SNG0736.json +dev MUL1758.json +dev MUL1571.json +dev MUL2307.json +dev MUL0036.json +dev PMUL2661.json +dev MUL0706.json +dev PMUL2178.json +dev PMUL3352.json +dev SNG0703.json +dev MUL0848.json +dev SNG0566.json +dev MUL1635.json +dev SNG0910.json +dev PMUL0740.json +dev SNG01196.json +dev PMUL2518.json +dev SNG01411.json +dev MUL2587.json +dev PMUL0266.json +dev PMUL2338.json +dev SNG0386.json +dev PMUL2154.json +dev PMUL3105.json +dev SNG0457.json +dev MUL2309.json +dev PMUL2747.json +dev MUL1476.json +dev PMUL3966.json +dev PMUL0521.json +dev MUL2271.json +dev MUL1897.json +dev PMUL3923.json +dev MUL1167.json +dev PMUL2520.json +dev SNG1019.json +dev SNG0700.json +dev MUL0610.json +dev PMUL3262.json +dev MUL1318.json +dev MUL0312.json +dev PMUL4850.json +dev SNG0497.json +dev SNG0907.json +dev MUL0957.json +dev MUL1133.json +dev MUL1453.json +dev SNG02296.json +dev MUL1419.json +dev MUL0571.json +dev PMUL4022.json +dev PMUL1624.json +dev PMUL3820.json +dev MUL1904.json +dev PMUL3880.json +dev MUL1295.json +dev PMUL3797.json +dev SNG0567.json +dev PMUL3166.json +dev SNG01320.json +dev MUL1858.json +dev MUL1500.json +dev SNG1106.json +dev MUL1247.json +dev MUL1297.json +dev PMUL3983.json +dev PMUL2565.json +dev MUL0489.json +dev MUL1556.json +dev SNG0633.json +dev MUL1884.json +dev MUL1972.json +dev SNG01698.json +dev PMUL1318.json +dev MUL0182.json +dev PMUL4522.json +dev MUL2421.json +dev PMUL4492.json +dev PMUL2968.json +dev MUL1162.json +dev SNG0406.json +dev PMUL4107.json +dev PMUL4400.json +dev MUL0206.json +dev MUL0106.json +dev PMUL3497.json +dev SNG01227.json +dev PMUL3776.json +dev PMUL1352.json +dev PMUL3470.json +dev PMUL4691.json +dev PMUL3840.json +dev MUL1770.json +dev PMUL1615.json +dev MUL2688.json +dev MUL0229.json +dev PMUL3929.json +dev PMUL2576.json +dev PMUL2780.json +dev SNG01996.json +dev PMUL2306.json +dev SNG0752.json +dev MUL0321.json +dev MUL2486.json +dev MUL1594.json +dev SNG1142.json +dev MUL2489.json +dev MUL1676.json +dev PMUL1479.json +dev PMUL4438.json +dev SNG1071.json +dev SNG0084.json +dev MUL0348.json +dev MUL2223.json +dev PMUL2260.json +dev MUL2004.json +dev MUL0830.json +dev MUL1003.json +dev MUL0893.json +dev MUL0060.json +dev MUL2319.json +dev SNG01974.json +dev MUL0687.json +dev PMUL2821.json +dev PMUL0318.json +dev MUL2615.json +dev MUL0368.json +dev MUL2097.json +dev SNG02077.json +dev SNG02274.json +dev PMUL1104.json +dev PMUL1828.json +dev PMUL3974.json +dev SNG0843.json +dev MUL0580.json +dev MUL1663.json +dev PMUL4488.json +dev MUL2507.json +dev PMUL0389.json +dev PMUL0574.json +dev MUL1126.json +dev SNG01284.json +dev PMUL1233.json +dev PMUL3926.json +dev MUL0759.json +dev SNG02330.json +dev PMUL3928.json +dev SNG0968.json +dev MUL2699.json +dev MUL1636.json +dev PMUL2992.json +dev PMUL2986.json +dev MUL0276.json +dev PMUL1721.json +dev SNG0770.json +dev MUL1357.json +dev PMUL2363.json +dev SNG0597.json +dev PMUL1636.json +dev PMUL1186.json +dev MUL1821.json +dev MUL0049.json +dev PMUL3766.json +dev MUL0691.json +dev SNG02341.json +dev SNG0447.json +dev MUL0729.json +dev SNG0694.json +dev SNG01699.json +dev PMUL3117.json +dev PMUL1692.json +dev SNG0730.json +dev PMUL2088.json +dev MUL0244.json +dev PMUL0600.json +dev PMUL2368.json +dev SNG0783.json +dev MUL0933.json +dev PMUL1122.json +dev PMUL2190.json +dev PMUL2492.json +dev PMUL2382.json +dev SNG02089.json +dev SNG0340.json +dev PMUL1074.json +dev PMUL2274.json +dev PMUL2093.json +dev PMUL2902.json +dev SNG0495.json +dev PMUL4902.json +dev PMUL3133.json +dev MUL0888.json +dev MUL1174.json +dev PMUL0764.json +dev PMUL4554.json +dev SNG0475.json +dev PMUL3728.json +dev SNG02133.json +dev PMUL0987.json +dev MUL2100.json +dev SNG01962.json +dev PMUL2469.json +dev MUL0827.json +dev MUL0039.json +dev SNG0856.json +dev PMUL3320.json +dev MUL2039.json +dev PMUL4757.json +dev PMUL1133.json +dev MUL1971.json +dev PMUL4870.json +dev SNG0835.json +dev MUL0016.json +dev MUL2693.json +dev SNG01459.json +dev PMUL3850.json +dev SNG0670.json +dev SNG0327.json +dev MUL0517.json +dev SNG0369.json +dev PMUL3378.json +dev MUL2067.json +dev PMUL4634.json +dev PMUL0963.json +dev PMUL3379.json +dev MUL1988.json +dev MUL2184.json +dev MUL1873.json +dev PMUL3786.json +dev MUL2014.json +dev PMUL1650.json +dev MUL1326.json +dev SNG0921.json +dev MUL2058.json +dev MUL1183.json +dev PMUL0720.json +dev MUL0448.json +dev SNG02276.json +dev PMUL1184.json +dev MUL1482.json +dev PMUL1525.json +dev PMUL1140.json +dev PMUL0119.json +dev PMUL3544.json +dev MUL1669.json +dev PMUL4029.json +dev PMUL2975.json +dev SNG01801.json +dev PMUL1400.json +dev MUL0128.json +dev PMUL0077.json diff --git a/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/conf/swda.conf b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/conf/swda.conf new file mode 100755 index 00000000..99d45354 --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/conf/swda.conf @@ -0,0 +1,1134 @@ +train sw2005 +train sw2006 +train sw2008 +train sw2010 +train sw2012 +train sw2015 +train sw2018 +train sw2019 +train sw2020 +train sw2022 +train sw2024 +train sw2025 +train sw2027 +train sw2028 +train sw2032 +train sw2035 +train sw2038 +train sw2039 +train sw2040 +train sw2041 +train sw2051 +train sw2060 +train sw2061 +train sw2062 +train sw2064 +train sw2065 +train sw2073 +train sw2078 +train sw2079 +train sw2085 +train sw2086 +train sw2090 +train sw2092 +train sw2093 +train sw2094 +train sw2095 +train sw2101 +train sw2102 +train sw2104 +train sw2105 +train sw2107 +train sw2109 +train sw2110 +train sw2111 +train sw2113 +train sw2120 +train sw2122 +train sw2124 +train sw2125 +train sw2130 +train sw2137 +train sw2139 +train sw2145 +train sw2149 +train sw2154 +train sw2155 +train sw2157 +train sw2168 +train sw2171 +train sw2177 +train sw2178 +train sw2180 +train sw2181 +train sw2184 +train sw2185 +train sw2187 +train sw2190 +train sw2191 +train sw2197 +train sw2205 +train sw2220 +train sw2221 +train sw2226 +train sw2227 +train sw2228 +train sw2231 +train sw2232 +train sw2234 +train sw2235 +train sw2237 +train sw2241 +train sw2244 +train sw2247 +train sw2248 +train sw2249 +train sw2252 +train sw2259 +train sw2260 +train sw2262 +train sw2263 +train sw2264 +train sw2265 +train sw2266 +train sw2268 +train sw2275 +train sw2278 +train sw2279 +train sw2283 +train sw2285 +train sw2287 +train sw2290 +train sw2292 +train sw2293 +train sw2295 +train sw2296 +train sw2300 +train sw2301 +train sw2302 +train sw2303 +train sw2304 +train sw2305 +train sw2308 +train sw2309 +train sw2313 +train sw2314 +train sw2316 +train sw2323 +train sw2324 +train sw2325 +train sw2330 +train sw2331 +train sw2334 +train sw2336 +train sw2339 +train sw2342 +train sw2344 +train sw2349 +train sw2353 +train sw2354 +train sw2355 +train sw2362 +train sw2365 +train sw2366 +train sw2368 +train sw2370 +train sw2372 +train sw2376 +train sw2379 +train sw2380 +train sw2382 +train sw2383 +train sw2386 +train sw2387 +train sw2389 +train sw2393 +train sw2397 +train sw2405 +train sw2406 +train sw2407 +train sw2413 +train sw2418 +train sw2421 +train sw2423 +train sw2424 +train sw2426 +train sw2427 +train sw2429 +train sw2431 +train sw2432 +train sw2433 +train sw2435 +train sw2436 +train sw2437 +train sw2439 +train sw2442 +train sw2445 +train sw2446 +train sw2448 +train sw2450 +train sw2451 +train sw2452 +train sw2457 +train sw2460 +train sw2465 +train sw2466 +train sw2467 +train sw2469 +train sw2471 +train sw2472 +train sw2476 +train sw2477 +train sw2478 +train sw2479 +train sw2482 +train sw2483 +train sw2485 +train sw2486 +train sw2488 +train sw2490 +train sw2492 +train sw2495 +train sw2499 +train sw2502 +train sw2504 +train sw2506 +train sw2510 +train sw2511 +train sw2514 +train sw2515 +train sw2519 +train sw2521 +train sw2524 +train sw2525 +train sw2526 +train sw2527 +train sw2528 +train sw2533 +train sw2537 +train sw2539 +train sw2540 +train sw2543 +train sw2545 +train sw2546 +train sw2547 +train sw2548 +train sw2549 +train sw2552 +train sw2554 +train sw2557 +train sw2559 +train sw2562 +train sw2565 +train sw2566 +train sw2568 +train sw2570 +train sw2571 +train sw2575 +train sw2576 +train sw2578 +train sw2579 +train sw2584 +train sw2585 +train sw2586 +train sw2587 +train sw2589 +train sw2597 +train sw2599 +train sw2602 +train sw2603 +train sw2604 +train sw2608 +train sw2609 +train sw2610 +train sw2611 +train sw2614 +train sw2615 +train sw2616 +train sw2617 +train sw2619 +train sw2622 +train sw2627 +train sw2628 +train sw2631 +train sw2634 +train sw2638 +train sw2640 +train sw2641 +train sw2642 +train sw2645 +train sw2647 +train sw2648 +train sw2650 +train sw2652 +train sw2657 +train sw2658 +train sw2661 +train sw2662 +train sw2663 +train sw2667 +train sw2669 +train sw2672 +train sw2675 +train sw2676 +train sw2678 +train sw2679 +train sw2684 +train sw2689 +train sw2690 +train sw2691 +train sw2692 +train sw2693 +train sw2703 +train sw2707 +train sw2708 +train sw2709 +train sw2710 +train sw2711 +train sw2716 +train sw2717 +train sw2719 +train sw2723 +train sw2726 +train sw2729 +train sw2734 +train sw2736 +train sw2741 +train sw2743 +train sw2744 +train sw2749 +train sw2751 +train sw2754 +train sw2756 +train sw2759 +train sw2761 +train sw2766 +train sw2767 +train sw2768 +train sw2770 +train sw2773 +train sw2774 +train sw2775 +train sw2780 +train sw2782 +train sw2784 +train sw2785 +train sw2788 +train sw2789 +train sw2792 +train sw2793 +train sw2794 +train sw2797 +train sw2800 +train sw2803 +train sw2806 +train sw2812 +train sw2818 +train sw2819 +train sw2820 +train sw2821 +train sw2826 +train sw2827 +train sw2828 +train sw2830 +train sw2834 +train sw2835 +train sw2837 +train sw2840 +train sw2844 +train sw2847 +train sw2849 +train sw2851 +train sw2858 +train sw2860 +train sw2862 +train sw2866 +train sw2868 +train sw2870 +train sw2871 +train sw2875 +train sw2876 +train sw2877 +train sw2879 +train sw2883 +train sw2884 +train sw2887 +train sw2893 +train sw2896 +train sw2897 +train sw2898 +train sw2900 +train sw2909 +train sw2910 +train sw2913 +train sw2915 +train sw2917 +train sw2921 +train sw2924 +train sw2926 +train sw2927 +train sw2929 +train sw2930 +train sw2932 +train sw2934 +train sw2935 +train sw2938 +train sw2942 +train sw2945 +train sw2950 +train sw2952 +train sw2953 +train sw2954 +train sw2955 +train sw2956 +train sw2957 +train sw2960 +train sw2962 +train sw2963 +train sw2965 +train sw2967 +train sw2968 +train sw2969 +train sw2970 +train sw2982 +train sw2983 +train sw2984 +train sw2991 +train sw2992 +train sw2993 +train sw2994 +train sw2995 +train sw2996 +train sw2998 +train sw2999 +train sw3000 +train sw3001 +train sw3002 +train sw3003 +train sw3004 +train sw3007 +train sw3009 +train sw3011 +train sw3012 +train sw3013 +train sw3014 +train sw3016 +train sw3018 +train sw3019 +train sw3020 +train sw3021 +train sw3023 +train sw3025 +train sw3028 +train sw3029 +train sw3030 +train sw3034 +train sw3036 +train sw3038 +train sw3039 +train sw3040 +train sw3041 +train sw3042 +train sw3045 +train sw3047 +train sw3049 +train sw3050 +train sw3051 +train sw3052 +train sw3054 +train sw3055 +train sw3056 +train sw3057 +train sw3059 +train sw3061 +train sw3062 +train sw3063 +train sw3064 +train sw3065 +train sw3067 +train sw3068 +train sw3069 +train sw3070 +train sw3071 +train sw3073 +train sw3074 +train sw3075 +train sw3076 +train sw3077 +train sw3080 +train sw3081 +train sw3082 +train sw3083 +train sw3085 +train sw3086 +train sw3087 +train sw3088 +train sw3090 +train sw3092 +train sw3093 +train sw3095 +train sw3097 +train sw3099 +train sw3102 +train sw3103 +train sw3104 +train sw3105 +train sw3107 +train sw3108 +train sw3111 +train sw3113 +train sw3115 +train sw3118 +train sw3120 +train sw3121 +train sw3124 +train sw3130 +train sw3131 +train sw3133 +train sw3134 +train sw3135 +train sw3136 +train sw3138 +train sw3140 +train sw3142 +train sw3143 +train sw3144 +train sw3146 +train sw3150 +train sw3151 +train sw3152 +train sw3154 +train sw3155 +train sw3158 +train sw3159 +train sw3161 +train sw3162 +train sw3166 +train sw3167 +train sw3168 +train sw3169 +train sw3170 +train sw3171 +train sw3173 +train sw3174 +train sw3175 +train sw3182 +train sw3185 +train sw3186 +train sw3187 +train sw3188 +train sw3189 +train sw3194 +train sw3195 +train sw3196 +train sw3198 +train sw3200 +train sw3201 +train sw3203 +train sw3204 +train sw3205 +train sw3206 +train sw3208 +train sw3214 +train sw3215 +train sw3216 +train sw3219 +train sw3221 +train sw3223 +train sw3225 +train sw3226 +train sw3227 +train sw3228 +train sw3229 +train sw3230 +train sw3231 +train sw3232 +train sw3233 +train sw3234 +train sw3235 +train sw3236 +train sw3237 +train sw3238 +train sw3242 +train sw3244 +train sw3245 +train sw3247 +train sw3252 +train sw3253 +train sw3254 +train sw3256 +train sw3259 +train sw3260 +train sw3265 +train sw3266 +train sw3267 +train sw3268 +train sw3269 +train sw3270 +train sw3271 +train sw3272 +train sw3275 +train sw3276 +train sw3279 +train sw3280 +train sw3282 +train sw3283 +train sw3284 +train sw3286 +train sw3293 +train sw3294 +train sw3296 +train sw3300 +train sw3303 +train sw3304 +train sw3306 +train sw3309 +train sw3310 +train sw3311 +train sw3313 +train sw3315 +train sw3317 +train sw3319 +train sw3320 +train sw3324 +train sw3325 +train sw3326 +train sw3327 +train sw3328 +train sw3330 +train sw3331 +train sw3332 +train sw3333 +train sw3338 +train sw3340 +train sw3342 +train sw3343 +train sw3344 +train sw3345 +train sw3349 +train sw3351 +train sw3353 +train sw3355 +train sw3359 +train sw3360 +train sw3361 +train sw3362 +train sw3363 +train sw3364 +train sw3365 +train sw3367 +train sw3368 +train sw3369 +train sw3371 +train sw3372 +train sw3373 +train sw3375 +train sw3377 +train sw3379 +train sw3381 +train sw3383 +train sw3384 +train sw3386 +train sw3387 +train sw3389 +train sw3393 +train sw3397 +train sw3398 +train sw3399 +train sw3402 +train sw3403 +train sw3405 +train sw3406 +train sw3408 +train sw3409 +train sw3411 +train sw3414 +train sw3417 +train sw3419 +train sw3420 +train sw3421 +train sw3424 +train sw3425 +train sw3426 +train sw3427 +train sw3428 +train sw3429 +train sw3431 +train sw3435 +train sw3439 +train sw3441 +train sw3443 +train sw3447 +train sw3448 +train sw3449 +train sw3450 +train sw3451 +train sw3453 +train sw3454 +train sw3455 +train sw3457 +train sw3458 +train sw3460 +train sw3463 +train sw3464 +train sw3467 +train sw3473 +train sw3476 +train sw3487 +train sw3489 +train sw3495 +train sw3496 +train sw3503 +train sw3504 +train sw3508 +train sw3513 +train sw3514 +train sw3515 +train sw3517 +train sw3518 +train sw3521 +train sw3523 +train sw3524 +train sw3525 +train sw3526 +train sw3527 +train sw3530 +train sw3533 +train sw3535 +train sw3537 +train sw3539 +train sw3541 +train sw3543 +train sw3549 +train sw3550 +train sw3551 +train sw3556 +train sw3557 +train sw3561 +train sw3563 +train sw3565 +train sw3567 +train sw3569 +train sw3570 +train sw3573 +train sw3574 +train sw3580 +train sw3586 +train sw3591 +train sw3595 +train sw3596 +train sw3597 +train sw3606 +train sw3607 +train sw3615 +train sw3624 +train sw3626 +train sw3628 +train sw3633 +train sw3636 +train sw3638 +train sw3639 +train sw3642 +train sw3646 +train sw3647 +train sw3651 +train sw3655 +train sw3657 +train sw3660 +train sw3662 +train sw3663 +train sw3665 +train sw3676 +train sw3680 +train sw3681 +train sw3682 +train sw3688 +train sw3691 +train sw3692 +train sw3693 +train sw3694 +train sw3696 +train sw3699 +train sw3703 +train sw3707 +train sw3709 +train sw3716 +train sw3720 +train sw3723 +train sw3725 +train sw3727 +train sw3728 +train sw3734 +train sw3735 +train sw3736 +train sw3738 +train sw3743 +train sw3745 +train sw3746 +train sw3747 +train sw3750 +train sw3751 +train sw3754 +train sw3760 +train sw3763 +train sw3764 +train sw3768 +train sw3770 +train sw3773 +train sw3774 +train sw3776 +train sw3777 +train sw3781 +train sw3784 +train sw3788 +train sw3791 +train sw3796 +train sw3798 +train sw3801 +train sw3802 +train sw3803 +train sw3804 +train sw3805 +train sw3809 +train sw3813 +train sw3815 +train sw3821 +train sw3825 +train sw3828 +train sw3830 +train sw3838 +train sw3841 +train sw3845 +train sw3847 +train sw3850 +train sw3852 +train sw3855 +train sw3862 +train sw3870 +train sw3876 +train sw3883 +train sw3887 +train sw3898 +train sw3902 +train sw3903 +train sw3908 +train sw3911 +train sw3917 +train sw3925 +train sw3926 +train sw3946 +train sw3952 +train sw3956 +train sw3962 +train sw3965 +train sw3971 +train sw3979 +train sw3983 +train sw3985 +train sw3988 +train sw3993 +train sw4008 +train sw4013 +train sw4019 +train sw4022 +train sw4023 +train sw4028 +train sw4032 +train sw4033 +train sw4036 +train sw4038 +train sw4049 +train sw4050 +train sw4051 +train sw4055 +train sw4056 +train sw4060 +train sw4064 +train sw4071 +train sw4074 +train sw4077 +train sw4078 +train sw4079 +train sw4080 +train sw4082 +train sw4090 +train sw4092 +train sw4096 +train sw4099 +train sw4101 +train sw4103 +train sw4104 +train sw4108 +train sw4109 +train sw4113 +train sw4114 +train sw4123 +train sw4127 +train sw4129 +train sw4130 +train sw4133 +train sw4137 +train sw4138 +train sw4147 +train sw4148 +train sw4149 +train sw4150 +train sw4151 +train sw4152 +train sw4153 +train sw4154 +train sw4155 +train sw4158 +train sw4159 +train sw4165 +train sw4166 +train sw4168 +train sw4171 +train sw4174 +train sw4175 +train sw4177 +train sw4181 +train sw4184 +train sw4311 +train sw4312 +train sw4314 +train sw4316 +train sw4319 +train sw4320 +train sw4325 +train sw4327 +train sw4329 +train sw4330 +train sw4333 +train sw4334 +train sw4336 +train sw4339 +train sw4340 +train sw4341 +train sw4342 +train sw4345 +train sw4346 +train sw4349 +train sw4353 +train sw4358 +train sw4360 +train sw4362 +train sw4363 +train sw4364 +train sw4366 +train sw4370 +train sw4376 +train sw4378 +train sw4379 +train sw4380 +train sw4382 +train sw4443 +train sw4483 +train sw4519 +train sw4548 +train sw4565 +train sw4603 +train sw4605 +train sw4608 +train sw4611 +train sw4615 +train sw4617 +train sw4618 +train sw4619 +train sw4626 +train sw4628 +train sw4630 +train sw4642 +train sw4644 +train sw4646 +train sw4649 +train sw4655 +train sw4659 +train sw4666 +train sw4675 +train sw4679 +train sw4681 +train sw4682 +train sw4688 +train sw4691 +train sw4698 +train sw4703 +train sw4709 +train sw4720 +train sw4721 +train sw4723 +train sw4725 +train sw4726 +train sw4728 +train sw4733 +train sw4735 +train sw4745 +train sw4752 +train sw4758 +train sw4759 +train sw4765 +train sw4770 +train sw4774 +train sw4784 +train sw4785 +train sw4788 +train sw4792 +train sw4796 +train sw4799 +train sw4801 +train sw4812 +train sw4814 +train sw4821 +train sw4822 +train sw4826 +train sw4829 +train sw4830 +train sw4831 +train sw4834 +train sw4840 +train sw4856 +train sw4858 +train sw4859 +train sw4868 +train sw4876 +train sw4877 +train sw4880 +train sw4886 +train sw4902 +train sw4905 +train sw4908 +train sw4927 +train sw4928 +train sw4936 +train sw4940 +dev sw2053 +dev sw2067 +dev sw2071 +dev sw2072 +dev sw2160 +dev sw2163 +dev sw2175 +dev sw2253 +dev sw2289 +dev sw2299 +dev sw2340 +dev sw2373 +dev sw2395 +dev sw2399 +dev sw2455 +dev sw2501 +dev sw2534 +dev sw2558 +dev sw2593 +dev sw2594 +dev sw2598 +dev sw2620 +dev sw2621 +dev sw2623 +dev sw2630 +dev sw2653 +dev sw2713 +dev sw2755 +dev sw2772 +dev sw2776 +dev sw2790 +dev sw2832 +dev sw2839 +dev sw2842 +dev sw2854 +dev sw2874 +dev sw2888 +dev sw2889 +dev sw2944 +dev sw2959 +dev sw2981 +dev sw2989 +dev sw3015 +dev sw3046 +dev sw3072 +dev sw3096 +dev sw3148 +dev sw3156 +dev sw3181 +dev sw3184 +dev sw3190 +dev sw3191 +dev sw3202 +dev sw3207 +dev sw3239 +dev sw3246 +dev sw3250 +dev sw3251 +dev sw3255 +dev sw3257 +dev sw3281 +dev sw3288 +dev sw3290 +dev sw3291 +dev sw3334 +dev sw3346 +dev sw3352 +dev sw3354 +dev sw3382 +dev sw3433 +dev sw3445 +dev sw3491 +dev sw3497 +dev sw3500 +dev sw3506 +dev sw3509 +dev sw3554 +dev sw3576 +dev sw3584 +dev sw3587 +dev sw3658 +dev sw3659 +dev sw3666 +dev sw3675 +dev sw3686 +dev sw3697 +dev sw3711 +dev sw3769 +dev sw3797 +dev sw3810 +dev sw3811 +dev sw3921 +dev sw4004 +dev sw4026 +dev sw4037 +dev sw4048 +dev sw4072 +dev sw4318 +dev sw4321 +dev sw4347 +dev sw4356 +dev sw4372 +dev sw4572 +dev sw4633 +dev sw4660 +dev sw4697 +dev sw4707 +dev sw4716 +dev sw4736 +dev sw4802 +dev sw4890 +dev sw4917 +test sw2121 +test sw2131 +test sw2151 +test sw2229 +test sw2335 +test sw2434 +test sw2441 +test sw2461 +test sw2503 +test sw2632 +test sw2724 +test sw2752 +test sw2753 +test sw2836 +test sw2838 +test sw3528 +test sw3756 +test sw3942 +test sw3994 diff --git a/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/run_build_data.sh b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/run_build_data.sh new file mode 100755 index 00000000..7a08bf98 --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/scripts/run_build_data.sh @@ -0,0 +1,25 @@ +#!/bin/bash + +TASK_DATA=$1 +typeset -l TASK_DATA + +if [ "${TASK_DATA}" = "udc" ] +then + exit 0 +elif [ "${TASK_DATA}" = "swda" ] +then + python build_swda_dataset.py +elif [ "${TASK_DATA}" = "mrda" ] +then + python build_mrda_dataset.py +elif [[ "${TASK_DATA}" =~ "atis" ]] +then + python build_atis_dataset.py + cat ../data/atis/atis_slot/test.txt > ../data/atis/atis_slot/dev.txt + cat ../data/atis/atis_intent/test.txt > ../data/atis/atis_intent/dev.txt +elif [ "${TASK_DATA}" = "dstc2" ] +then + python build_dstc2_dataset.py +else + echo "can not support $TASK_DATA , please choose [swda|mrda|atis|dstc2|multi-woz]" +fi diff --git a/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/tokenization.py b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/tokenization.py new file mode 100644 index 00000000..f906b537 --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/tokenization.py @@ -0,0 +1,370 @@ +# coding=utf-8 +# Copyright 2018 The Google AI Language Team Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tokenization classes.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import collections +import unicodedata +import six + + +def convert_to_unicode(text): + """Converts `text` to Unicode (if it's not already), assuming utf-8 input.""" + if six.PY3: + if isinstance(text, str): + return text + elif isinstance(text, bytes): + return text.decode("utf-8", "ignore") + else: + raise ValueError("Unsupported string type: %s" % (type(text))) + elif six.PY2: + if isinstance(text, str): + return text.decode("utf-8", "ignore") + elif isinstance(text, unicode): + return text + else: + raise ValueError("Unsupported string type: %s" % (type(text))) + else: + raise ValueError("Not running on Python2 or Python 3?") + + +def printable_text(text): + """Returns text encoded in a way suitable for print or `tf.logging`.""" + + # These functions want `str` for both Python2 and Python3, but in one case + # it's a Unicode string and in the other it's a byte string. + if six.PY3: + if isinstance(text, str): + return text + elif isinstance(text, bytes): + return text.decode("utf-8", "ignore") + else: + raise ValueError("Unsupported string type: %s" % (type(text))) + elif six.PY2: + if isinstance(text, str): + return text + elif isinstance(text, unicode): + return text.encode("utf-8") + else: + raise ValueError("Unsupported string type: %s" % (type(text))) + else: + raise ValueError("Not running on Python2 or Python 3?") + + +def load_vocab(vocab_file): + """Loads a vocabulary file into a dictionary.""" + vocab = collections.OrderedDict() + fin = open(vocab_file) + for num, line in enumerate(fin): + items = convert_to_unicode(line.strip()).split("\t") + if len(items) > 2: + break + token = items[0] + index = items[1] if len(items) == 2 else num + token = token.strip() + vocab[token] = int(index) + return vocab + + +def convert_by_vocab(vocab, items): + """Converts a sequence of [tokens|ids] using the vocab.""" + output = [] + for item in items: + output.append(vocab[item]) + return output + + +def convert_tokens_to_ids(vocab, tokens): + return convert_by_vocab(vocab, tokens) + + +def convert_ids_to_tokens(inv_vocab, ids): + return convert_by_vocab(inv_vocab, ids) + + +def whitespace_tokenize(text): + """Runs basic whitespace cleaning and splitting on a peice of text.""" + text = text.strip() + if not text: + return [] + tokens = text.split() + return tokens + + +class FullTokenizer(object): + """Runs end-to-end tokenziation.""" + + def __init__(self, vocab_file, do_lower_case=True): + self.vocab = load_vocab(vocab_file) + self.inv_vocab = {v: k for k, v in self.vocab.items()} + self.basic_tokenizer = BasicTokenizer(do_lower_case=do_lower_case) + self.wordpiece_tokenizer = WordpieceTokenizer(vocab=self.vocab) + + def tokenize(self, text): + split_tokens = [] + for token in self.basic_tokenizer.tokenize(text): + for sub_token in self.wordpiece_tokenizer.tokenize(token): + split_tokens.append(sub_token) + + return split_tokens + + def convert_tokens_to_ids(self, tokens): + return convert_by_vocab(self.vocab, tokens) + + def convert_ids_to_tokens(self, ids): + return convert_by_vocab(self.inv_vocab, ids) + + +class CharTokenizer(object): + """Runs end-to-end tokenziation.""" + + def __init__(self, vocab_file, do_lower_case=True): + self.vocab = load_vocab(vocab_file) + self.inv_vocab = {v: k for k, v in self.vocab.items()} + self.wordpiece_tokenizer = WordpieceTokenizer(vocab=self.vocab) + + def tokenize(self, text): + split_tokens = [] + for token in text.lower().split(" "): + for sub_token in self.wordpiece_tokenizer.tokenize(token): + split_tokens.append(sub_token) + + return split_tokens + + def convert_tokens_to_ids(self, tokens): + return convert_by_vocab(self.vocab, tokens) + + def convert_ids_to_tokens(self, ids): + return convert_by_vocab(self.inv_vocab, ids) + + +class BasicTokenizer(object): + """Runs basic tokenization (punctuation splitting, lower casing, etc.).""" + + def __init__(self, do_lower_case=True): + """Constructs a BasicTokenizer. + + Args: + do_lower_case: Whether to lower case the input. + """ + self.do_lower_case = do_lower_case + + def tokenize(self, text): + """Tokenizes a piece of text.""" + text = convert_to_unicode(text) + text = self._clean_text(text) + + # This was added on November 1st, 2018 for the multilingual and Chinese + # models. This is also applied to the English models now, but it doesn't + # matter since the English models were not trained on any Chinese data + # and generally don't have any Chinese data in them (there are Chinese + # characters in the vocabulary because Wikipedia does have some Chinese + # words in the English Wikipedia.). + text = self._tokenize_chinese_chars(text) + + orig_tokens = whitespace_tokenize(text) + split_tokens = [] + for token in orig_tokens: + if self.do_lower_case: + token = token.lower() + token = self._run_strip_accents(token) + split_tokens.extend(self._run_split_on_punc(token)) + + output_tokens = whitespace_tokenize(" ".join(split_tokens)) + return output_tokens + + def _run_strip_accents(self, text): + """Strips accents from a piece of text.""" + text = unicodedata.normalize("NFD", text) + output = [] + for char in text: + cat = unicodedata.category(char) + if cat == "Mn": + continue + output.append(char) + return "".join(output) + + def _run_split_on_punc(self, text): + """Splits punctuation on a piece of text.""" + chars = list(text) + i = 0 + start_new_word = True + output = [] + while i < len(chars): + char = chars[i] + if _is_punctuation(char): + output.append([char]) + start_new_word = True + else: + if start_new_word: + output.append([]) + start_new_word = False + output[-1].append(char) + i += 1 + + return ["".join(x) for x in output] + + def _tokenize_chinese_chars(self, text): + """Adds whitespace around any CJK character.""" + output = [] + for char in text: + cp = ord(char) + if self._is_chinese_char(cp): + output.append(" ") + output.append(char) + output.append(" ") + else: + output.append(char) + return "".join(output) + + def _is_chinese_char(self, cp): + """Checks whether CP is the codepoint of a CJK character.""" + # This defines a "chinese character" as anything in the CJK Unicode block: + # https://en.wikipedia.org/wiki/CJK_Unified_Ideographs_(Unicode_block) + # + # Note that the CJK Unicode block is NOT all Japanese and Korean characters, + # despite its name. The modern Korean Hangul alphabet is a different block, + # as is Japanese Hiragana and Katakana. Those alphabets are used to write + # space-separated words, so they are not treated specially and handled + # like the all of the other languages. + if ((cp >= 0x4E00 and cp <= 0x9FFF) or # + (cp >= 0x3400 and cp <= 0x4DBF) or # + (cp >= 0x20000 and cp <= 0x2A6DF) or # + (cp >= 0x2A700 and cp <= 0x2B73F) or # + (cp >= 0x2B740 and cp <= 0x2B81F) or # + (cp >= 0x2B820 and cp <= 0x2CEAF) or + (cp >= 0xF900 and cp <= 0xFAFF) or # + (cp >= 0x2F800 and cp <= 0x2FA1F)): # + return True + + return False + + def _clean_text(self, text): + """Performs invalid character removal and whitespace cleanup on text.""" + output = [] + for char in text: + cp = ord(char) + if cp == 0 or cp == 0xfffd or _is_control(char): + continue + if _is_whitespace(char): + output.append(" ") + else: + output.append(char) + return "".join(output) + + +class WordpieceTokenizer(object): + """Runs WordPiece tokenziation.""" + + def __init__(self, vocab, unk_token="[UNK]", max_input_chars_per_word=100): + self.vocab = vocab + self.unk_token = unk_token + self.max_input_chars_per_word = max_input_chars_per_word + + def tokenize(self, text): + """Tokenizes a piece of text into its word pieces. + + This uses a greedy longest-match-first algorithm to perform tokenization + using the given vocabulary. + + For example: + input = "unaffable" + output = ["un", "##aff", "##able"] + + Args: + text: A single token or whitespace separated tokens. This should have + already been passed through `BasicTokenizer. + + Returns: + A list of wordpiece tokens. + """ + + text = convert_to_unicode(text) + + output_tokens = [] + for token in whitespace_tokenize(text): + chars = list(token) + if len(chars) > self.max_input_chars_per_word: + output_tokens.append(self.unk_token) + continue + + is_bad = False + start = 0 + sub_tokens = [] + while start < len(chars): + end = len(chars) + cur_substr = None + while start < end: + substr = "".join(chars[start:end]) + if start > 0: + substr = "##" + substr + if substr in self.vocab: + cur_substr = substr + break + end -= 1 + if cur_substr is None: + is_bad = True + break + sub_tokens.append(cur_substr) + start = end + + if is_bad: + output_tokens.append(self.unk_token) + else: + output_tokens.extend(sub_tokens) + return output_tokens + + +def _is_whitespace(char): + """Checks whether `chars` is a whitespace character.""" + # \t, \n, and \r are technically contorl characters but we treat them + # as whitespace since they are generally considered as such. + if char == " " or char == "\t" or char == "\n" or char == "\r": + return True + cat = unicodedata.category(char) + if cat == "Zs": + return True + return False + + +def _is_control(char): + """Checks whether `chars` is a control character.""" + # These are technically control characters but we count them as whitespace + # characters. + if char == "\t" or char == "\n" or char == "\r": + return False + cat = unicodedata.category(char) + if cat.startswith("C"): + return True + return False + + +def _is_punctuation(char): + """Checks whether `chars` is a punctuation character.""" + cp = ord(char) + # We treat all non-letter/number ASCII as punctuation. + # Characters such as "^", "$", and "`" are not in the Unicode + # Punctuation class but we treat them as punctuation anyways, for + # consistency. + if ((cp >= 33 and cp <= 47) or (cp >= 58 and cp <= 64) or + (cp >= 91 and cp <= 96) or (cp >= 123 and cp <= 126)): + return True + cat = unicodedata.category(char) + if cat.startswith("P"): + return True + return False diff --git a/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/train.py b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/train.py new file mode 100644 index 00000000..6d2e3295 --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/train.py @@ -0,0 +1,381 @@ +# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Finetuning on dialogue tasks.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import os +import sys +import time +import numpy as np +import multiprocessing + +import paddle +import paddle.fluid as fluid + +from finetune_args import parser +import reader.data_reader as reader +from optimization import optimization +from utils.args import print_arguments +from utils.init import init_checkpoint, init_pretraining_params + +_WORK_DIR = os.path.split(os.path.realpath(__file__))[0] +sys.path.append('../../models/dialogue_model_toolkit/dialogue_general_understanding') + +from bert import BertConfig, BertModel +from create_model import create_model +import define_paradigm + + +def evaluate(test_exe, test_program, test_pyreader, fetch_list, eval_phase): + """evaluate validation or test data""" + test_pyreader.start() + total_cost, total_acc, total_num_seqs = [], [], [] + time_begin = time.time() + while True: + try: + if len(fetch_list) > 2: + np_loss, np_acc, np_num_seqs = test_exe.run(fetch_list=fetch_list) + total_acc.extend(np_acc * np_num_seqs) + else: + np_loss, np_num_seqs = test_exe.run(fetch_list=fetch_list) + total_cost.extend(np_loss * np_num_seqs) + total_num_seqs.extend(np_num_seqs) + except fluid.core.EOFException: + test_pyreader.reset() + break + time_end = time.time() + current_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) + if len(fetch_list) > 2: + print("[%s evaluation] %s ave loss: %f, ave acc: %f, elapsed time: %f s" % + (eval_phase, current_time, np.sum(total_cost) / np.sum(total_num_seqs), + np.sum(total_acc) / np.sum(total_num_seqs), time_end - time_begin)) + else: + print("[%s evaluation] %s ave loss: %f, elapsed time: %f s" % + (eval_phase, current_time, np.sum(total_cost) / np.sum(total_num_seqs), + time_end - time_begin)) + + +def main(args): + """main function""" + bert_config = BertConfig(args.bert_config_path) + bert_config.print_config() + + if args.use_cuda: + place = fluid.CUDAPlace(int(os.getenv('FLAGS_selected_gpus', '0'))) + dev_count = fluid.core.get_cuda_device_count() + else: + place = fluid.CPUPlace() + dev_count = int(os.environ.get('CPU_NUM', multiprocessing.cpu_count())) + exe = fluid.Executor(place) + + task_name = args.task_name.lower() + paradigm_inst = define_paradigm.Paradigm(task_name) + + processors = { + 'udc': reader.UDCProcessor, + 'swda': reader.SWDAProcessor, + 'mrda': reader.MRDAProcessor, + 'atis_slot': reader.ATISSlotProcessor, + 'atis_intent': reader.ATISIntentProcessor, + 'dstc2': reader.DSTC2Processor, + } + in_tokens = { + 'udc': True, + 'swda': True, + 'mrda': True, + 'atis_slot': False, + 'atis_intent': True, + 'dstc2': True, + } + + processor = processors[task_name](data_dir=args.data_dir, + vocab_path=args.vocab_path, + max_seq_len=args.max_seq_len, + do_lower_case=args.do_lower_case, + in_tokens=in_tokens[task_name], + task_name=task_name, + random_seed=args.random_seed) + + num_labels = len(processor.get_labels()) + + if not (args.do_train or args.do_val or args.do_test): + raise ValueError("For args `do_train`, `do_val` and `do_test`, at " + "least one of them must be True.") + + startup_prog = fluid.Program() + if args.random_seed is not None: + startup_prog.random_seed = args.random_seed + + if args.do_train: + train_data_generator = processor.data_generator( + batch_size=args.batch_size, + phase='train', + epoch=args.epoch, + shuffle=True) + num_train_examples = processor.get_num_examples(phase='train') + + if in_tokens[task_name]: + max_train_steps = args.epoch * num_train_examples // ( + args.batch_size // args.max_seq_len) // dev_count + else: + max_train_steps = args.epoch * num_train_examples // args.batch_size // dev_count + + warmup_steps = int(max_train_steps * args.warmup_proportion) + print("Device count: %d" % dev_count) + print("Num train examples: %d" % num_train_examples) + print("Max train steps: %d" % max_train_steps) + print("Num warmup steps: %d" % warmup_steps) + + train_program = fluid.Program() + with fluid.program_guard(train_program, startup_prog): + with fluid.unique_name.guard(): + results = create_model( + args, + pyreader_name='train_reader', + bert_config=bert_config, + num_labels=num_labels, + paradigm_inst=paradigm_inst) + train_pyreader = results.get("pyreader", None) + loss = results.get("loss", None) + probs = results.get("probs", None) + accuracy = results.get("accuracy", None) + num_seqs = results.get("num_seqs", None) + scheduled_lr = optimization( + loss=loss, + warmup_steps=warmup_steps, + num_train_steps=max_train_steps, + learning_rate=args.learning_rate, + train_program=train_program, + startup_prog=startup_prog, + weight_decay=args.weight_decay, + scheduler=args.lr_scheduler, + use_fp16=args.use_fp16, + loss_scaling=args.loss_scaling) + + if accuracy is not None: + skip_opt_set = [loss.name, probs.name, accuracy.name, num_seqs.name] + else: + skip_opt_set = [loss.name, probs.name, num_seqs.name] + fluid.memory_optimize( + input_program=train_program, + skip_opt_set=skip_opt_set) + + if args.verbose: + if in_tokens[task_name]: + lower_mem, upper_mem, unit = fluid.contrib.memory_usage( + program=train_program, + batch_size=args.batch_size // args.max_seq_len) + else: + lower_mem, upper_mem, unit = fluid.contrib.memory_usage( + program=train_program, batch_size=args.batch_size) + print("Theoretical memory usage in training: %.3f - %.3f %s" % + (lower_mem, upper_mem, unit)) + + if args.do_val or args.do_test: + test_prog = fluid.Program() + with fluid.program_guard(test_prog, startup_prog): + with fluid.unique_name.guard(): + test_results = create_model( + args, + pyreader_name='test_reader', + bert_config=bert_config, + num_labels=num_labels, + paradigm_inst=paradigm_inst) + test_pyreader = test_results.get("pyreader", None) + loss = test_results.get("loss", None) + probs = test_results.get("probs", None) + accuracy = test_results.get("accuracy", None) + num_seqs = test_results.get("num_seqs", None) + test_prog = test_prog.clone(for_test=True) + + exe.run(startup_prog) + + if args.do_train: + if args.init_checkpoint and args.init_pretraining_params: + print( + "WARNING: args 'init_checkpoint' and 'init_pretraining_params' " + "both are set! Only arg 'init_checkpoint' is made valid.") + if args.init_checkpoint: + init_checkpoint( + exe, + args.init_checkpoint, + main_program=startup_prog, + use_fp16=args.use_fp16) + elif args.init_pretraining_params: + init_pretraining_params( + exe, + args.init_pretraining_params, + main_program=startup_prog, + use_fp16=args.use_fp16) + elif args.do_val or args.do_test: + if not args.init_checkpoint: + raise ValueError("args 'init_checkpoint' should be set if" + "only doing validation or testing!") + init_checkpoint( + exe, + args.init_checkpoint, + main_program=startup_prog, + use_fp16=args.use_fp16) + + if args.do_train: + exec_strategy = fluid.ExecutionStrategy() + exec_strategy.use_experimental_executor = args.use_fast_executor + exec_strategy.num_threads = dev_count + exec_strategy.num_iteration_per_drop_scope = args.num_iteration_per_drop_scope + + train_exe = fluid.ParallelExecutor( + use_cuda=args.use_cuda, + loss_name=loss.name, + exec_strategy=exec_strategy, + main_program=train_program) + train_pyreader.decorate_tensor_provider(train_data_generator) + else: + train_exe = None + + if args.do_val or args.do_test: + test_exe = fluid.ParallelExecutor( + use_cuda=args.use_cuda, + main_program=test_prog, + share_vars_from=train_exe) + + if args.do_train: + train_pyreader.start() + steps = 0 + total_cost, total_acc, total_num_seqs = [], [], [] + time_begin = time.time() + while True: + try: + steps += 1 + if steps % args.skip_steps == 0: + if warmup_steps <= 0: + if accuracy is not None: + fetch_list = [loss.name, accuracy.name, num_seqs.name] + else: + fetch_list = [loss.name, num_seqs.name] + else: + if accuracy is not None: + fetch_list = [ + loss.name, accuracy.name, scheduled_lr.name, + num_seqs.name + ] + else: + fetch_list = [loss.name, scheduled_lr.name, num_seqs.name] + else: + fetch_list = [] + if accuracy is not None: + fetch_test_list = [loss.name, accuracy.name, num_seqs.name] + else: + fetch_test_list = [loss.name, num_seqs.name] + + outputs = train_exe.run(fetch_list=fetch_list) + + if steps % args.skip_steps == 0: + if warmup_steps <= 0: + if accuracy is not None: + np_loss, np_acc, np_num_seqs = outputs + else: + np_loss, np_num_seqs = outputs + else: + if accuracy is not None: + np_loss, np_acc, np_lr, np_num_seqs = outputs + else: + np_loss, np_lr, np_num_seqs = outputs + + total_cost.extend(np_loss * np_num_seqs) + total_num_seqs.extend(np_num_seqs) + if accuracy is not None: + total_acc.extend(np_acc * np_num_seqs) + + if args.verbose: + verbose = "train pyreader queue size: %d, " % train_pyreader.queue.size() + verbose += "learning rate: %f" % ( + np_lr[0] + if warmup_steps > 0 else args.learning_rate) + print(verbose) + + current_example, current_epoch = processor.get_train_progress() + time_end = time.time() + used_time = time_end - time_begin + current_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) + if accuracy is not None: + print("%s epoch: %d, progress: %d/%d, step: %d, ave loss: %f, " + "ave acc: %f, speed: %f steps/s" % + (current_time, current_epoch, current_example, num_train_examples, + steps, np.sum(total_cost) / np.sum(total_num_seqs), + np.sum(total_acc) / np.sum(total_num_seqs), + args.skip_steps / used_time)) + else: + print("%s epoch: %d, progress: %d/%d, step: %d, ave loss: %f, " + "speed: %f steps/s" % + (current_time, current_epoch, current_example, num_train_examples, + steps, np.sum(total_cost) / np.sum(total_num_seqs), + args.skip_steps / used_time)) + total_cost, total_acc, total_num_seqs = [], [], [] + time_begin = time.time() + + if steps % args.save_steps == 0: + save_path = os.path.join(args.checkpoints, "step_" + str(steps)) + fluid.io.save_persistables(exe, save_path, train_program) + if steps % args.validation_steps == 0: + #evaluate dev set + if args.do_val: + test_pyreader.decorate_tensor_provider( + processor.data_generator( + batch_size=args.batch_size, + phase='dev', + epoch=1, + shuffle=False)) + evaluate(test_exe, test_prog, test_pyreader, fetch_test_list, "dev") + #evaluate test set + if args.do_test: + test_pyreader.decorate_tensor_provider( + processor.data_generator( + batch_size=args.batch_size, + phase='test', + epoch=1, + shuffle=False)) + evaluate(test_exe, test_prog, test_pyreader, fetch_test_list, "test") + except fluid.core.EOFException: + save_path = os.path.join(args.checkpoints, "step_" + str(steps)) + fluid.io.save_persistables(exe, save_path, train_program) + train_pyreader.reset() + break + #final eval on dev set + if args.do_val: + test_pyreader.decorate_tensor_provider( + processor.data_generator( + batch_size=args.batch_size, phase='dev', epoch=1, + shuffle=False)) + print("Final validation result:") + evaluate(test_exe, test_prog, test_pyreader, fetch_test_list, "dev") + + #final eval on test set + if args.do_test: + test_pyreader.decorate_tensor_provider( + processor.data_generator( + batch_size=args.batch_size, + phase='test', + epoch=1, + shuffle=False)) + print("Final test result:") + evaluate(test_exe, test_prog, test_pyreader, fetch_test_list, "test") + + +if __name__ == '__main__': + args = parser.parse_args() + print_arguments(args) + main(args) diff --git a/PaddleNLP/deep_attention_matching_net/utils/__init__.py b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/utils/__init__.py similarity index 100% rename from PaddleNLP/deep_attention_matching_net/utils/__init__.py rename to PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/utils/__init__.py diff --git a/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/utils/args.py b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/utils/args.py new file mode 100644 index 00000000..b9be634f --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/utils/args.py @@ -0,0 +1,48 @@ +# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Arguments for configuration.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import six +import argparse + + +def str2bool(v): + # because argparse does not support to parse "true, False" as python + # boolean directly + return v.lower() in ("true", "t", "1") + + +class ArgumentGroup(object): + def __init__(self, parser, title, des): + self._group = parser.add_argument_group(title=title, description=des) + + def add_arg(self, name, type, default, help, **kwargs): + type = str2bool if type == bool else type + self._group.add_argument( + "--" + name, + default=default, + type=type, + help=help + ' Default: %(default)s.', + **kwargs) + + +def print_arguments(args): + print('----------- Configuration Arguments -----------') + for arg, value in sorted(six.iteritems(vars(args))): + print('%s: %s' % (arg, value)) + print('------------------------------------------------') diff --git a/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/utils/fp16.py b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/utils/fp16.py new file mode 100644 index 00000000..e153c2b9 --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/utils/fp16.py @@ -0,0 +1,97 @@ +# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function +import paddle +import paddle.fluid as fluid + + +def cast_fp16_to_fp32(i, o, prog): + prog.global_block().append_op( + type="cast", + inputs={"X": i}, + outputs={"Out": o}, + attrs={ + "in_dtype": fluid.core.VarDesc.VarType.FP16, + "out_dtype": fluid.core.VarDesc.VarType.FP32 + }) + + +def cast_fp32_to_fp16(i, o, prog): + prog.global_block().append_op( + type="cast", + inputs={"X": i}, + outputs={"Out": o}, + attrs={ + "in_dtype": fluid.core.VarDesc.VarType.FP32, + "out_dtype": fluid.core.VarDesc.VarType.FP16 + }) + + +def copy_to_master_param(p, block): + v = block.vars.get(p.name, None) + if v is None: + raise ValueError("no param name %s found!" % p.name) + new_p = fluid.framework.Parameter( + block=block, + shape=v.shape, + dtype=fluid.core.VarDesc.VarType.FP32, + type=v.type, + lod_level=v.lod_level, + stop_gradient=p.stop_gradient, + trainable=p.trainable, + optimize_attr=p.optimize_attr, + regularizer=p.regularizer, + gradient_clip_attr=p.gradient_clip_attr, + error_clip=p.error_clip, + name=v.name + ".master") + return new_p + + +def create_master_params_grads(params_grads, main_prog, startup_prog, + loss_scaling): + master_params_grads = [] + tmp_role = main_prog._current_role + OpRole = fluid.core.op_proto_and_checker_maker.OpRole + main_prog._current_role = OpRole.Backward + for p, g in params_grads: + # create master parameters + master_param = copy_to_master_param(p, main_prog.global_block()) + startup_master_param = startup_prog.global_block()._clone_variable( + master_param) + startup_p = startup_prog.global_block().var(p.name) + cast_fp16_to_fp32(startup_p, startup_master_param, startup_prog) + # cast fp16 gradients to fp32 before apply gradients + if g.name.find("layer_norm") > -1: + if loss_scaling > 1: + scaled_g = g / float(loss_scaling) + else: + scaled_g = g + master_params_grads.append([p, scaled_g]) + continue + master_grad = fluid.layers.cast(g, "float32") + if loss_scaling > 1: + master_grad = master_grad / float(loss_scaling) + master_params_grads.append([master_param, master_grad]) + main_prog._current_role = tmp_role + return master_params_grads + + +def master_param_to_train_param(master_params_grads, params_grads, main_prog): + for idx, m_p_g in enumerate(master_params_grads): + train_p, _ = params_grads[idx] + if train_p.name.find("layer_norm") > -1: + continue + with main_prog._optimized_guard([m_p_g[0], m_p_g[1]]): + cast_fp32_to_fp16(m_p_g[0], train_p, main_prog) diff --git a/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/utils/init.py b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/utils/init.py new file mode 100644 index 00000000..3844d012 --- /dev/null +++ b/PaddleNLP/dialogue_model_toolkit/dialogue_general_understanding/utils/init.py @@ -0,0 +1,81 @@ +# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import os +import six +import ast +import copy + +import numpy as np +import paddle.fluid as fluid + + +def cast_fp32_to_fp16(exe, main_program): + print("Cast parameters to float16 data format.") + for param in main_program.global_block().all_parameters(): + if not param.name.endswith(".master"): + param_t = fluid.global_scope().find_var(param.name).get_tensor() + data = np.array(param_t) + if param.name.find("layer_norm") == -1: + param_t.set(np.float16(data).view(np.uint16), exe.place) + master_param_var = fluid.global_scope().find_var(param.name + + ".master") + if master_param_var is not None: + master_param_var.get_tensor().set(data, exe.place) + + +def init_checkpoint(exe, init_checkpoint_path, main_program, use_fp16=False): + assert os.path.exists( + init_checkpoint_path), "[%s] cann't be found." % init_checkpoint_path + + def existed_persitables(var): + if not fluid.io.is_persistable(var): + return False + return os.path.exists(os.path.join(init_checkpoint_path, var.name)) + + fluid.io.load_vars( + exe, + init_checkpoint_path, + main_program=main_program, + predicate=existed_persitables) + print("Load model from {}".format(init_checkpoint_path)) + + if use_fp16: + cast_fp32_to_fp16(exe, main_program) + + +def init_pretraining_params(exe, + pretraining_params_path, + main_program, + use_fp16=False): + assert os.path.exists(pretraining_params_path + ), "[%s] cann't be found." % pretraining_params_path + + def existed_params(var): + if not isinstance(var, fluid.framework.Parameter): + return False + return os.path.exists(os.path.join(pretraining_params_path, var.name)) + + fluid.io.load_vars( + exe, + pretraining_params_path, + main_program=main_program, + predicate=existed_params) + print("Load pretraining parameters from {}.".format( + pretraining_params_path)) + + if use_fp16: + cast_fp32_to_fp16(exe, main_program) diff --git a/PaddleNLP/emotion_detection/README.md b/PaddleNLP/emotion_detection/README.md new file mode 100644 index 00000000..955843f6 --- /dev/null +++ b/PaddleNLP/emotion_detection/README.md @@ -0,0 +1,172 @@ +## 简介 + +对话情绪识别(Emotion Detection,简称EmoTect),专注于识别智能对话场景中用户的情绪,针对智能对话场景中的用户文本,自动判断该文本的情绪类别并给出相应的置信度,情绪类型分为积极、消极、中性。 + +对话情绪识别适用于聊天、客服等多个场景,能够帮助企业更好地把握对话质量、改善产品的用户交互体验,也能分析客服服务质量、降低人工质检成本。可通过 [AI开放平台-对话情绪识别](http://ai.baidu.com/tech/nlp_apply/emotion_detection) 线上体验。 + +效果上,我们基于百度自建测试集(包含闲聊、客服)和nlpcc2014微博情绪数据集,进行评测,效果如下表所示,此外我们还开源了百度基于海量数据训练好的模型,该模型在聊天对话语料上fine-tune之后,可以得到更好的效果。 + +| 模型 | 闲聊 | 客服 | 微博 | +| :------| :------ | :------ | :------ | +| BOW | 90.2% | 87.6% | 74.2% | +| LSTM | 91.4% | 90.1% | 73.8% | +| Bi-LSTM | 91.2% | 89.9% | 73.6% | +| CNN | 90.8% | 90.7% | 76.3% | +| TextCNN | 91.1% | 91.0% | 76.8% | +| BERT | 93.6% | 92.3% | 78.6% | +| ERNIE | 94.4% | 94.0% | 80.6% | + + +## 快速开始 + +本项目依赖于 Python2.7 和 Paddlepaddle Fluid 1.3.2,请参考 [安装指南](http://www.paddlepaddle.org/#quick-start) 进行安装 + +#### 安装代码 + +克隆代码库到本地 +```shell +git clone https://github.com/PaddlePaddle/models.git +cd models/PaddleNLP/emotion_detection +``` + +#### 数据准备 + +下载经过预处理的数据,运行该脚本之后,会生成data目录,data目录下有训练集数据(train.tsv)、开发集数据(dev.tsv)、测试集数据(test.tsv)、 待预测数据(infer.tsv)以及对应词典(vocab.txt) +```shell +sh download_data.sh +``` + +#### 模型下载 + +我们开源了基于海量数据训练好的对话情绪识别模型(基于TextCNN、ERNIE等模型训练),可供用户直接使用,我们提供两种下载方式。 + +**方式一**:基于PaddleHub命令行工具(PaddleHub[安装方式](https://github.com/PaddlePaddle/PaddleHub)) +```shell +mkdir models && cd models +hub download emotion_detection_textcnn --output_path ./ +hub download emotion_detection_ernie_finetune --output_path ./ +tar xvf emotion_detection_textcnn-1.0.0.tar.gz +tar xvf emotion_detection_ernie_finetune-1.0.0.tar.gz +``` + +**方式二**:直接下载脚本 +```shell +sh download_model.sh +``` + +#### 模型评估 + +基于已有的预训练模型和数据,可以运行下面的命令进行测试,查看预训练的模型在测试集(test.tsv)上的评测结果 +```shell +# TextCNN 模型 +sh run.sh eval +# ERNIE 模型 +sh run_ernie.sh eval +``` + +#### 模型训练 + +基于示例的数据集,可以运行下面的命令,在训练集(train.tsv)上进行模型训练,并在开发集(dev.tsv)验证 +```shell +# TextCNN 模型 +sh run.sh train +# ERNIE 模型 +sh run_ernie.sh train +``` +训练完成后,可修改```run.sh```和```run_ernie.sh```中的init_checkpoint 参数,选择最优step的模型进行评估和预测 + +#### 模型预测 + +利用已有模型,可在未知label的数据集(infer.tsv)上进行预测,得到模型预测结果及各label的概率 +```shell +# TextCNN 模型 +sh run.sh infer +# ERNIE 模型 +sh run_ernie.sh infer +``` + +## 进阶使用 + +#### 任务定义 + +对话情绪识别任务输入是一段用户文本,输出是检测到的情绪类别,包括消极、积极、中性,这是一个经典的短文本三分类任务。 + +#### 模型原理介绍 + +本项目针对对话情绪识别问题,开源了一系列分类模型,供用户可配置地使用: + ++ BOW:Bag Of Words,是一个非序列模型,使用基本的全连接结构; ++ CNN:浅层CNN模型,能够处理变长的序列输入,提取一个局部区域之内的特征;; ++ TextCNN:多卷积核CNN模型,能够更好地捕捉句子局部相关性; ++ LSTM:单层LSTM模型,能够较好地解决序列文本中长距离依赖的问题; ++ BI-LSTM:双向单层LSTM模型,采用双向LSTM结构,更好地捕获句子中的语义特征; ++ ERNIE:百度自研基于海量数据和先验知识训练的通用文本语义表示模型,并基于此在对话情绪分类数据集上进行fine-tune获得。 + +#### 数据格式说明 + +训练、预测、评估使用的数据示例如下,数据由两列组成,以制表符('\t')分隔,第一列是情绪分类的类别(0表示消极;1表示中性;2表示积极),第二列是以空格分词的中文文本,文件为utf8编码。 + +```text +label text_a +0 谁 骂人 了 ? 我 从来 不 骂人 , 我 骂 的 都 不是 人 , 你 是 人 吗 ? +1 我 有事 等会儿 就 回来 和 你 聊 +2 我 见到 你 很高兴 谢谢 你 帮 我 +``` +注:本项目额外提供了分词预处理脚本(在preprocess目录下),可供用户使用,具体使用方法如下: +```shell +python tokenizer.py --test_data_dir ./test.txt.utf8 --batch_size 1 > test.txt.utf8.seg +``` + +#### 代码结构说明 + +```text +. +├── config.json # 模型配置文件 +├── config.py # 定义了该项目模型的相关配置,包括具体模型类别、以及模型的超参数 +├── reader.py # 定义了读入数据,加载词典的功能 +├── run_classifier.py # 该项目的主函数,封装包括训练、预测、评估的部分 +├── run_ernie_classifier.py # 基于ERNIE表示的项目的主函数 +├── run_ernie.sh # 基于ERNIE的训练、预测、评估运行脚本 +├── run.sh # 训练、预测、评估运行脚本 +├── utils.py # 定义了其他常用的功能函数 +``` + +#### 如何组建自己的模型 + +可以根据自己的需求,组建自定义的模型,具体方法如下所示: + +1. 定义自己的网络结构 +用户可以在 ```models/classification/nets.py``` 中,定义自己的模型,只需要增加新的函数即可。假设用户自定义的函数名为```user_net``` +2. 更改模型配置 +在 ```config.json``` 中需要将 ```model_type``` 改为用户自定义的 ```user_net``` +3. 模型训练,运行训练、评估、预测需要在 ```run.sh``` 、```run_ernie.sh``` 中将模型、数据、词典路径等配置进行修改 + +#### 如何基于百度开源模型进行 Finetune + +用户可基于百度开源的对话情绪识别模型在自有数据上实现 Finetune 训练,以期获得更好的效果提升,具体模型 Finetune 方法如下所示 + +如果用户基于开源的 TextCNN模型进行 Finetune,需要修改```run.sh```和```config.json```文件 + +```run.sh``` 脚本修改如下: +```shell +# 在train()函数中,增加--init_checkpoint选项;修改--vocab_path +--init_checkpoint ./models/textcnn +--vocab_path ./data/vocab.txt +``` + +```config.json``` 配置修改如下: +```shell +# vocab_size为词典大小,对应上面./data/vocab.txt +"vocab_size": 240465 +``` + +如果用户基于开源的 ERNIE模型进行Finetune,需要更新```run_ernie.sh```脚本,具体修改如下: +```shell +# 在train()函数中,修改--init_checkpoint选项 +--init_checkpoint ./models/ernie_finetune/params +``` + +## 如何贡献代码 + +如果你可以修复某个issue或者增加一个新功能,欢迎给我们提交PR。如果对应的PR被接受了,我们将根据贡献的质量和难度进行打分(0-5分,越高越好)。如果你累计获得了10分,可以联系我们获得面试机会或者为你写推荐信。 + diff --git a/PaddleNLP/emotion_detection/config.json b/PaddleNLP/emotion_detection/config.json new file mode 100644 index 00000000..70657397 --- /dev/null +++ b/PaddleNLP/emotion_detection/config.json @@ -0,0 +1,4 @@ +{ + "model_type": "textcnn_net", + "vocab_size": 240465 +} diff --git a/PaddleNLP/emotion_detection/config.py b/PaddleNLP/emotion_detection/config.py new file mode 100644 index 00000000..1d8f4156 --- /dev/null +++ b/PaddleNLP/emotion_detection/config.py @@ -0,0 +1,37 @@ +""" +EmoTect config +""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import six +import json + +class EmoTectConfig(object): + """ + EmoTect Config + """ + def __init__(self, config_path): + self._config_dict = self._parse(config_path) + + def _parse(self, config_path): + try: + with open(config_path) as json_file: + config_dict = json.load(json_file) + except Exception: + raise IOError("Error in parsing emotect model config file '%s'" % config_path) + else: + return config_dict + + def __getitem__(self, key): + return self._config_dict[key] + + def print_config(self): + """ + Print Config + """ + for arg, value in sorted(six.iteritems(self._config_dict)): + print('%s: %s' % (arg, value)) + print('------------------------------------------------') diff --git a/PaddleNLP/emotion_detection/download_data.sh b/PaddleNLP/emotion_detection/download_data.sh new file mode 100644 index 00000000..e699d426 --- /dev/null +++ b/PaddleNLP/emotion_detection/download_data.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +# download dataset file to ./data/ +DATA_URL=https://baidu-nlp.bj.bcebos.com/emotion_detection-dataset-1.0.0.tar.gz +wget --no-check-certificate ${DATA_URL} + +tar xvf emotion_detection-dataset-1.0.0.tar.gz +/bin/rm emotion_detection-dataset-1.0.0.tar.gz diff --git a/PaddleNLP/emotion_detection/download_model.sh b/PaddleNLP/emotion_detection/download_model.sh new file mode 100644 index 00000000..2451aff6 --- /dev/null +++ b/PaddleNLP/emotion_detection/download_model.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +mkdir -p models +cd models + +# download pretrain model file to ./models/ +MODEL_CNN=https://baidu-nlp.bj.bcebos.com/emotion_detection_textcnn-1.0.0.tar.gz +MODEL_ERNIE=https://baidu-nlp.bj.bcebos.com/emotion_detection_ernie_finetune-1.0.0.tar.gz +wget --no-check-certificate ${MODEL_CNN} +wget --no-check-certificate ${MODEL_ERNIE} + +tar xvf emotion_detection_textcnn-1.0.0.tar.gz +tar xvf emotion_detection_ernie_finetune-1.0.0.tar.gz + +/bin/rm emotion_detection_textcnn-1.0.0.tar.gz +/bin/rm emotion_detection_ernie_finetune-1.0.0.tar.gz diff --git a/PaddleNLP/emotion_detection/reader.py b/PaddleNLP/emotion_detection/reader.py new file mode 100644 index 00000000..75a1be57 --- /dev/null +++ b/PaddleNLP/emotion_detection/reader.py @@ -0,0 +1,90 @@ +""" +EmoTect Reader, data converters for classification data. +""" +import os + +import paddle +import paddle.fluid as fluid +import numpy as np + +from utils import load_vocab +from utils import data_reader + +class EmoTectProcessor(object): + """ + Processor class for data convertors for EmoTect. + """ + def __init__(self, + data_dir, + vocab_path, + random_seed=None): + self.data_dir = data_dir + self.vocab = load_vocab(vocab_path) + self.num_examples = {"train": -1, "dev": -1, "test": -1, "infer": -1} + np.random.seed(random_seed) + + def get_train_examples(self, data_dir, epoch=1): + """ + Load training examples + """ + return data_reader(os.path.join(self.data_dir, "train.tsv"), + self.vocab, self.num_examples, "train", epoch) + + def get_dev_examples(self, data_dir): + """ + Load dev examples + """ + return data_reader(os.path.join(self.data_dir, "dev.tsv"), + self.vocab, self.num_examples, "dev") + + def get_test_examples(self, data_dir): + """ + Load test examples + """ + return data_reader(os.path.join(self.data_dir, "test.tsv"), + self.vocab, self.num_examples, "test") + + def get_infer_examples(self, data_dir): + """ + Load infer querys + """ + return data_reader(os.path.join(self.data_dir, "infer.tsv"), + self.vocab, self.num_examples, "infer") + + def get_labels(self): + """ + Return Labels + """ + return ["0", "1", "2"] + + def get_num_examples(self, phase): + """ + Return num of examples in train, dev, test set + """ + if phase not in ['train', 'dev', 'test', 'infer']: + raise ValueError( + "Unknown phase, which should be in ['train', 'dev', 'test', 'infer'].") + return self.num_examples[phase] + + def get_train_progress(self): + """ + Get train progress + """ + return self.current_train_example, self.current_train_epoch + + def data_generator(self, batch_size, phase='train', epoch=1): + """ + Generate data for train, dev or test + """ + if phase == "train": + return paddle.batch(self.get_train_examples(self.data_dir, epoch), batch_size) + elif phase == "dev": + return paddle.batch(self.get_dev_examples(self.data_dir), batch_size) + elif phase == "test": + return paddle.batch(self.get_test_examples(self.data_dir), batch_size) + elif phase == "infer": + return paddle.batch(self.get_infer_examples(self.data_dir), batch_size) + else: + raise ValueError( + "Unknown phase, which should be in ['train', 'dev', 'test', 'infer'].") + diff --git a/PaddleNLP/emotion_detection/run.sh b/PaddleNLP/emotion_detection/run.sh new file mode 100644 index 00000000..3a6caae2 --- /dev/null +++ b/PaddleNLP/emotion_detection/run.sh @@ -0,0 +1,78 @@ +#!/bin/bash +export FLAGS_enable_parallel_graph=1 +export FLAGS_sync_nccl_allreduce=1 +export CUDA_VISIBLE_DEVICES=3 +export FLAGS_fraction_of_gpu_memory_to_use=0.95 +TASK_NAME='emotion_detection' +DATA_PATH=./data/ +VOCAB_PATH=./data/vocab.txt +CKPT_PATH=./save_models/textcnn +MODEL_PATH=./models/textcnn + +# run_train on train.tsv and do_val on dev.tsv +train() { + python run_classifier.py \ + --task_name ${TASK_NAME} \ + --use_cuda false \ + --do_train true \ + --do_val true \ + --batch_size 64 \ + --data_dir ${DATA_PATH} \ + --vocab_path ${VOCAB_PATH} \ + --output_dir ${CKPT_PATH} \ + --save_steps 200 \ + --validation_steps 200 \ + --epoch 5 \ + --lr 0.002 \ + --config_path ./config.json \ + --skip_steps 200 +} +# run_eval on test.tsv +evaluate() { + python run_classifier.py \ + --task_name ${TASK_NAME} \ + --use_cuda false \ + --do_val true \ + --batch_size 128 \ + --data_dir ${DATA_PATH} \ + --vocab_path ${VOCAB_PATH} \ + --init_checkpoint ${MODEL_PATH} \ + --config_path ./config.json +} +# run_infer on infer.tsv +infer() { + python run_classifier.py \ + --task_name ${TASK_NAME} \ + --use_cuda false \ + --do_infer true \ + --batch_size 32 \ + --data_dir ${DATA_PATH} \ + --vocab_path ${VOCAB_PATH} \ + --init_checkpoint ${MODEL_PATH} \ + --config_path ./config.json +} + +main() { + local cmd=${1:-help} + case "${cmd}" in + train) + train "$@"; + ;; + eval) + evaluate "$@"; + ;; + infer) + infer "$@"; + ;; + help) + echo "Usage: ${BASH_SOURCE} {train|eval|infer}"; + return 0; + ;; + *) + echo "unsupport command [${cmd}]"; + echo "Usage: ${BASH_SOURCE} {train|eval|infer}"; + return 1; + ;; + esac +} +main "$@" diff --git a/PaddleNLP/emotion_detection/run_classifier.py b/PaddleNLP/emotion_detection/run_classifier.py new file mode 100644 index 00000000..2b792cf3 --- /dev/null +++ b/PaddleNLP/emotion_detection/run_classifier.py @@ -0,0 +1,344 @@ +""" +Emotion Detection Task +""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import os +import time +import argparse +import multiprocessing +import sys +sys.path.append("../") + +import paddle +import paddle.fluid as fluid +import numpy as np + +from models.classification import nets +import reader +import config +import utils + +parser = argparse.ArgumentParser(__doc__) +model_g = utils.ArgumentGroup(parser, "model", "model configuration and paths.") +model_g.add_arg("config_path", str, None, "Path to the json file for EmoTect model config.") +model_g.add_arg("init_checkpoint", str, None, "Init checkpoint to resume training from.") +model_g.add_arg("output_dir", str, None, "Directory path to save checkpoints") + +train_g = utils.ArgumentGroup(parser, "training", "training options.") +train_g.add_arg("epoch", int, 10, "Number of epoches for training.") +train_g.add_arg("save_steps", int, 10000, "The steps interval to save checkpoints.") +train_g.add_arg("validation_steps", int, 1000, "The steps interval to evaluate model performance.") +train_g.add_arg("lr", float, 0.002, "The Learning rate value for training.") + +log_g = utils.ArgumentGroup(parser, "logging", "logging related") +log_g.add_arg("skip_steps", int, 10, "The steps interval to print loss.") +log_g.add_arg("verbose", bool, False, "Whether to output verbose log") + +data_g = utils.ArgumentGroup(parser, "data", "Data paths, vocab paths and data processing options") +data_g.add_arg("data_dir", str, None, "Directory path to training data.") +data_g.add_arg("vocab_path", str, None, "Vocabulary path.") +data_g.add_arg("batch_size", int, 256, "Total examples' number in batch for training.") +data_g.add_arg("random_seed", int, 0, "Random seed.") + +run_type_g = utils.ArgumentGroup(parser, "run_type", "running type options.") +run_type_g.add_arg("use_cuda", bool, False, "If set, use GPU for training.") +run_type_g.add_arg("task_name", str, None, "The name of task to perform sentiment classification.") +run_type_g.add_arg("do_train", bool, False, "Whether to perform training.") +run_type_g.add_arg("do_val", bool, False, "Whether to perform evaluation.") +run_type_g.add_arg("do_infer", bool, False, "Whether to perform inference.") + +args = parser.parse_args() + +def create_model(args, + pyreader_name, + emotect_config, + num_labels, + is_infer=False): + """ + Create Model for sentiment classification + """ + if is_infer: + pyreader = fluid.layers.py_reader( + capacity=16, + shapes=[[-1, 1]], + dtypes=['int64'], + lod_levels=[1], + name=pyreader_name, + use_double_buffer=False) + else: + pyreader = fluid.layers.py_reader( + capacity=16, + shapes=([-1, 1], [-1, 1]), + dtypes=('int64', 'int64'), + lod_levels=(1, 0), + name=pyreader_name, + use_double_buffer=False) + + if emotect_config['model_type'] == "cnn_net": + network = nets.cnn_net + elif emotect_config['model_type'] == "bow_net": + network = nets.bow_net + elif emotect_config['model_type'] == "lstm_net": + network = nets.lstm_net + elif emotect_config['model_type'] == "bilstm_net": + network = nets.bilstm_net + elif emotect_config['model_type'] == "gru_net": + network = nets.gru_net + elif emotect_config['model_type'] == "textcnn_net": + network = nets.textcnn_net + else: + raise ValueError("Unknown network type!") + + if is_infer: + data = fluid.layers.read_file(pyreader) + probs = network(data, None, emotect_config["vocab_size"], class_dim=num_labels, is_infer=True) + return pyreader, probs + + data, label = fluid.layers.read_file(pyreader) + avg_loss, probs = network(data, label, emotect_config["vocab_size"], class_dim=num_labels) + num_seqs = fluid.layers.create_tensor(dtype='int64') + accuracy = fluid.layers.accuracy(input=probs, label=label, total=num_seqs) + return pyreader, avg_loss, accuracy, num_seqs + + +def evaluate(exe, test_program, test_pyreader, fetch_list, eval_phase): + """ + Evaluation Function + """ + test_pyreader.start() + total_cost, total_acc, total_num_seqs = [], [], [] + time_begin = time.time() + while True: + try: + np_loss, np_acc, np_num_seqs = exe.run(program=test_program, + fetch_list=fetch_list, + return_numpy=False) + np_loss = np.array(np_loss) + np_acc = np.array(np_acc) + np_num_seqs = np.array(np_num_seqs) + total_cost.extend(np_loss * np_num_seqs) + total_acc.extend(np_acc * np_num_seqs) + total_num_seqs.extend(np_num_seqs) + except fluid.core.EOFException: + test_pyreader.reset() + break + time_end = time.time() + print("[%s evaluation] avg loss: %f, avg acc: %f, elapsed time: %f s" % + (eval_phase, np.sum(total_cost) / np.sum(total_num_seqs), + np.sum(total_acc) / np.sum(total_num_seqs), time_end - time_begin)) + + +def infer(exe, infer_program, infer_pyreader, fetch_list, infer_phase): + infer_pyreader.start() + time_begin = time.time() + while True: + try: + batch_probs = exe.run(program=infer_program, + fetch_list=fetch_list, + return_numpy=True) + for probs in batch_probs[0]: + print("%d\t%f\t%f\t%f" % (np.argmax(probs), probs[0], probs[1], probs[2])) + except fluid.core.EOFException as e: + infer_pyreader.reset() + break + time_end = time.time() + print("[%s] elapsed time: %f s" % (infer_phase, time_end - time_begin)) + + +def main(args): + """ + Main Function + """ + emotect_config = config.EmoTectConfig(args.config_path) + + if args.use_cuda: + place = fluid.CUDAPlace(int(os.getenv('FLAGS_selected_gpus', '0'))) + else: + place = fluid.CPUPlace() + exe = fluid.Executor(place) + + task_name = args.task_name.lower() + processor = reader.EmoTectProcessor(data_dir=args.data_dir, + vocab_path=args.vocab_path, + random_seed=args.random_seed) + num_labels = len(processor.get_labels()) + + if not (args.do_train or args.do_val or args.do_infer): + raise ValueError("For args `do_train`, `do_val` and `do_infer`, at " + "least one of them must be True.") + + startup_prog = fluid.Program() + if args.random_seed is not None: + startup_prog.random_seed = args.random_seed + + if args.do_train: + train_data_generator = processor.data_generator( + batch_size=args.batch_size, + phase='train', + epoch=args.epoch) + + num_train_examples = processor.get_num_examples(phase="train") + max_train_steps = args.epoch * num_train_examples // args.batch_size + 1 + + print("Num train examples: %d" % num_train_examples) + print("Max train steps: %d" % max_train_steps) + + train_program = fluid.Program() + + with fluid.program_guard(train_program, startup_prog): + with fluid.unique_name.guard(): + train_pyreader, loss, accuracy, num_seqs = create_model( + args, + pyreader_name='train_reader', + emotect_config=emotect_config, + num_labels=num_labels, + is_infer=False) + + sgd_optimizer = fluid.optimizer.Adagrad(learning_rate=args.lr) + sgd_optimizer.minimize(loss) + + if args.verbose: + lower_mem, upper_mem, unit = fluid.contrib.memory_usage( + program=train_program, batch_size=args.batch_size) + print("Theoretical memory usage in training: %.3f - %.3f %s" % + (lower_mem, upper_mem, unit)) + + if args.do_val: + test_prog = fluid.Program() + with fluid.program_guard(test_prog, startup_prog): + with fluid.unique_name.guard(): + test_pyreader, loss, accuracy, num_seqs = create_model( + args, + pyreader_name='test_reader', + emotect_config=emotect_config, + num_labels=num_labels, + is_infer=False) + test_prog = test_prog.clone(for_test=True) + + if args.do_infer: + test_prog = fluid.Program() + with fluid.program_guard(test_prog, startup_prog): + with fluid.unique_name.guard(): + infer_pyreader, probs = create_model( + args, + pyreader_name='infer_reader', + emotect_config=emotect_config, + num_labels=num_labels, + is_infer=True) + test_prog = test_prog.clone(for_test=True) + + exe.run(startup_prog) + + if args.do_train: + if args.init_checkpoint: + utils.init_checkpoint( + exe, + args.init_checkpoint, + main_program=startup_prog) + elif args.do_val or args.do_infer: + if not args.init_checkpoint: + raise ValueError("args 'init_checkpoint' should be set if" + "only doing validation or infer!") + utils.init_checkpoint( + exe, + args.init_checkpoint, + main_program=test_prog) + + if args.do_train: + train_exe = exe + train_pyreader.decorate_paddle_reader(train_data_generator) + else: + train_exe = None + if args.do_val or args.do_infer: + test_exe = exe + + if args.do_train: + train_pyreader.start() + steps = 0 + total_cost, total_acc, total_num_seqs = [], [], [] + time_begin = time.time() + while True: + try: + steps += 1 + if steps % args.skip_steps == 0: + fetch_list = [loss.name, accuracy.name, num_seqs.name] + else: + fetch_list = [] + + outputs = train_exe.run(program=train_program, + fetch_list=fetch_list, + return_numpy=False) + if steps % args.skip_steps == 0: + np_loss, np_acc, np_num_seqs = outputs + np_loss = np.array(np_loss) + np_acc = np.array(np_acc) + np_num_seqs = np.array(np_num_seqs) + total_cost.extend(np_loss * np_num_seqs) + total_acc.extend(np_acc * np_num_seqs) + total_num_seqs.extend(np_num_seqs) + + if args.verbose: + verbose = "train pyreader queue size: %d, " % train_pyreader.queue.size() + print(verbose) + + time_end = time.time() + used_time = time_end - time_begin + print("step: %d, avg loss: %f, " + "avg acc: %f, speed: %f steps/s" % + (steps, np.sum(total_cost) / np.sum(total_num_seqs), + np.sum(total_acc) / np.sum(total_num_seqs), + args.skip_steps / used_time)) + total_cost, total_acc, total_num_seqs = [], [], [] + time_begin = time.time() + + if steps % args.save_steps == 0: + save_path = os.path.join(args.output_dir, "step_" + str(steps)) + fluid.io.save_persistables(exe, save_path, train_program) + + if steps % args.validation_steps == 0: + # evaluate on dev set + if args.do_val: + test_pyreader.decorate_paddle_reader( + processor.data_generator( + batch_size=args.batch_size, + phase='dev', + epoch=1)) + evaluate(test_exe, test_prog, test_pyreader, + [loss.name, accuracy.name, num_seqs.name], + "dev") + + except fluid.core.EOFException: + save_path = os.path.join(args.output_dir, "step_" + str(steps)) + fluid.io.save_persistables(exe, save_path, train_program) + train_pyreader.reset() + break + + # evaluate on test set + if not args.do_train and args.do_val: + test_pyreader.decorate_paddle_reader( + processor.data_generator( + batch_size=args.batch_size, + phase='test', + epoch=1)) + print("Final test result:") + evaluate(test_exe, test_prog, test_pyreader, + [loss.name, accuracy.name, num_seqs.name], + "test") + + # infer + if args.do_infer: + infer_pyreader.decorate_paddle_reader( + processor.data_generator( + batch_size=args.batch_size, + phase='infer', + epoch=1)) + infer(test_exe, test_prog, infer_pyreader, + [probs.name], "infer") + +if __name__ == "__main__": + utils.print_arguments(args) + main(args) diff --git a/PaddleNLP/emotion_detection/run_ernie.sh b/PaddleNLP/emotion_detection/run_ernie.sh new file mode 100644 index 00000000..e90faf2f --- /dev/null +++ b/PaddleNLP/emotion_detection/run_ernie.sh @@ -0,0 +1,85 @@ +#!/bin/bash +export FLAGS_sync_nccl_allreduce=1 +export CUDA_VISIBLE_DEVICES=2 +MODEL_PATH=./models/ernie_finetune +TASK_DATA_PATH=./data +CKPT_PATH=./save_models/ernie + +# run_train +train() { + python run_ernie_classifier.py \ + --use_cuda true \ + --verbose true \ + --do_train true \ + --do_val true \ + --batch_size 32 \ + --init_checkpoint ${MODEL_PATH}/params \ + --train_set ${TASK_DATA_PATH}/train.tsv \ + --dev_set ${TASK_DATA_PATH}/dev.tsv \ + --vocab_path ${MODEL_PATH}/vocab.txt \ + --output_dir ${CKPT_PATH} \ + --save_steps 500 \ + --validation_steps 50 \ + --epoch 3 \ + --max_seq_len 64 \ + --ernie_config_path ${MODEL_PATH}/ernie_config.json \ + --lr 2e-5 \ + --skip_steps 50 \ + --num_labels 3 \ + --random_seed 1 +} + +# run_test +evaluate() { + python run_ernie_classifier.py \ + --use_cuda true \ + --verbose true \ + --do_val true \ + --batch_size 32 \ + --init_checkpoint ${MODEL_PATH}/params \ + --test_set ${TASK_DATA_PATH}/test.tsv \ + --vocab_path ${MODEL_PATH}/vocab.txt \ + --max_seq_len 64 \ + --ernie_config_path ${MODEL_PATH}/ernie_config.json \ + --num_labels 3 +} + +# run_infer +infer() { + python run_ernie_classifier.py \ + --use_cuda true \ + --verbose true \ + --do_infer true \ + --batch_size 32 \ + --init_checkpoint ${MODEL_PATH}/params \ + --infer_set ${TASK_DATA_PATH}/infer.tsv \ + --vocab_path ${MODEL_PATH}/vocab.txt \ + --max_seq_len 64 \ + --ernie_config_path ${MODEL_PATH}/ernie_config.json \ + --num_labels 3 +} + +main() { + local cmd=${1:-help} + case "${cmd}" in + train) + train "$@"; + ;; + eval) + evaluate "$@"; + ;; + infer) + infer "$@"; + ;; + help) + echo "Usage: ${BASH_SOURCE} {train|eval|infer}"; + return 0; + ;; + *) + echo "unsupport command [${cmd}]"; + echo "Usage: ${BASH_SOURCE} {train|eval|infer}"; + return 1; + ;; + esac +} +main "$@" diff --git a/PaddleNLP/emotion_detection/run_ernie_classifier.py b/PaddleNLP/emotion_detection/run_ernie_classifier.py new file mode 100644 index 00000000..a87caf1c --- /dev/null +++ b/PaddleNLP/emotion_detection/run_ernie_classifier.py @@ -0,0 +1,377 @@ +""" +Emotion Detection Task, based on ERNIE +""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import os +import time +import argparse +import multiprocessing +import sys +sys.path.append("../") + +import paddle +import paddle.fluid as fluid +import numpy as np + +from preprocess.ernie import task_reader +from models.representation import ernie +import utils + +# yapf: disable +parser = argparse.ArgumentParser(__doc__) +model_g = utils.ArgumentGroup(parser, "model", "model configuration and paths.") +model_g.add_arg("ernie_config_path", str, None, "Path to the json file for ernie model config.") +model_g.add_arg("senta_config_path", str, None, "Path to the json file for senta model config.") +model_g.add_arg("init_checkpoint", str, None, "Init checkpoint to resume training from.") +model_g.add_arg("output_dir", str, "checkpoints", "Path to save checkpoints") + +train_g = utils.ArgumentGroup(parser, "training", "training options.") +train_g.add_arg("epoch", int, 10, "Number of epoches for training.") +train_g.add_arg("save_steps", int, 10000, "The steps interval to save checkpoints.") +train_g.add_arg("validation_steps", int, 1000, "The steps interval to evaluate model performance.") +train_g.add_arg("lr", float, 0.002, "The Learning rate value for training.") + +log_g = utils.ArgumentGroup(parser, "logging", "logging related") +log_g.add_arg("skip_steps", int, 10, "The steps interval to print loss.") +log_g.add_arg("verbose", bool, False, "Whether to output verbose log") + +data_g = utils.ArgumentGroup(parser, "data", "Data paths, vocab paths and data processing options") +data_g.add_arg("data_dir", str, None, "Directory path to training data.") +data_g.add_arg("vocab_path", str, None, "Vocabulary path.") +data_g.add_arg("batch_size", int, 256, "Total examples' number in batch for training.") +data_g.add_arg("random_seed", int, 0, "Random seed.") +data_g.add_arg("num_labels", int, 2, "label number") +data_g.add_arg("max_seq_len", int, 512, "Number of words of the longest seqence.") +data_g.add_arg("train_set", str, None, "Path to training data.") +data_g.add_arg("test_set", str, None, "Path to test data.") +data_g.add_arg("dev_set", str, None, "Path to validation data.") +data_g.add_arg("infer_set", str, None, "Path to validation data.") +data_g.add_arg("label_map_config", str, None, "label_map_path.") +data_g.add_arg("do_lower_case", bool, True, + "Whether to lower case the input text. Should be True for uncased models and False for cased models.") + +run_type_g = utils.ArgumentGroup(parser, "run_type", "running type options.") +run_type_g.add_arg("use_cuda", bool, False, "If set, use GPU for training.") +run_type_g.add_arg("task_name", str, None, "The name of task to perform sentiment classification.") +run_type_g.add_arg("do_train", bool, False, "Whether to perform training.") +run_type_g.add_arg("do_val", bool, False, "Whether to perform evaluation.") +run_type_g.add_arg("do_infer", bool, False, "Whether to perform inference.") + +args = parser.parse_args() +# yapf: enable. + +def create_model(args, + embeddings, + labels, + is_prediction=False): + + """ + Create Model for sentiment classification based on ERNIE encoder + """ + sentence_embeddings = embeddings["sentence_embeddings"] + token_embeddings = embeddings["token_embeddings"] + + cls_feats = fluid.layers.dropout( + x=sentence_embeddings, + dropout_prob=0.1, + dropout_implementation="upscale_in_train") + logits = fluid.layers.fc( + input=cls_feats, + size=args.num_labels, + param_attr=fluid.ParamAttr( + name="cls_out_w", + initializer=fluid.initializer.TruncatedNormal(scale=0.02)), + bias_attr=fluid.ParamAttr( + name="cls_out_b", initializer=fluid.initializer.Constant(0.))) + + ce_loss, probs = fluid.layers.softmax_with_cross_entropy( + logits=logits, label=labels, return_softmax=True) + if is_prediction: + return probs + loss = fluid.layers.mean(x=ce_loss) + + num_seqs = fluid.layers.create_tensor(dtype='int64') + accuracy = fluid.layers.accuracy(input=probs, label=labels, total=num_seqs) + + return loss, accuracy, num_seqs + + +def evaluate(exe, test_program, test_pyreader, fetch_list, eval_phase): + """ + Evaluation Function + """ + test_pyreader.start() + total_cost, total_acc, total_num_seqs = [], [], [] + time_begin = time.time() + while True: + try: + np_loss, np_acc, np_num_seqs = exe.run(program=test_program, + fetch_list=fetch_list, + return_numpy=False) + np_loss = np.array(np_loss) + np_acc = np.array(np_acc) + np_num_seqs = np.array(np_num_seqs) + total_cost.extend(np_loss * np_num_seqs) + total_acc.extend(np_acc * np_num_seqs) + total_num_seqs.extend(np_num_seqs) + except fluid.core.EOFException: + test_pyreader.reset() + break + time_end = time.time() + print("[%s evaluation] avg loss: %f, ave acc: %f, elapsed time: %f s" % + (eval_phase, np.sum(total_cost) / np.sum(total_num_seqs), + np.sum(total_acc) / np.sum(total_num_seqs), time_end - time_begin)) + + +def infer(exe, infer_program, infer_pyreader, fetch_list, infer_phase): + """Infer""" + infer_pyreader.start() + time_begin = time.time() + while True: + try: + batch_probs = exe.run(program=infer_program, fetch_list=fetch_list, + return_numpy=True) + for probs in batch_probs[0]: + print("%d\t%f\t%f\t%f" % (np.argmax(probs), probs[0], probs[1], probs[2])) + except fluid.core.EOFException: + infer_pyreader.reset() + break + time_end = time.time() + print("[%s] elapsed time: %f s" % (infer_phase, time_end - time_begin)) + + +def main(args): + """ + Main Function + """ + args = parser.parse_args() + ernie_config = ernie.ErnieConfig(args.ernie_config_path) + ernie_config.print_config() + + if args.use_cuda: + place = fluid.CUDAPlace(int(os.getenv('FLAGS_selected_gpus', '0'))) + dev_count = fluid.core.get_cuda_device_count() + else: + place = fluid.CPUPlace() + dev_count = int(os.environ.get('CPU_NUM', multiprocessing.cpu_count())) + exe = fluid.Executor(place) + + reader = task_reader.ClassifyReader( + vocab_path=args.vocab_path, + label_map_config=args.label_map_config, + max_seq_len=args.max_seq_len, + do_lower_case=args.do_lower_case, + random_seed=args.random_seed) + + if not (args.do_train or args.do_val or args.do_infer): + raise ValueError("For args `do_train`, `do_val` and `do_infer`, at " + "least one of them must be True.") + + startup_prog = fluid.Program() + if args.random_seed is not None: + startup_prog.random_seed = args.random_seed + + if args.do_train: + train_data_generator = reader.data_generator( + input_file=args.train_set, + batch_size=args.batch_size, + epoch=args.epoch, + shuffle=True, + phase="train") + + num_train_examples = reader.get_num_examples(args.train_set) + + max_train_steps = args.epoch * num_train_examples // args.batch_size // dev_count + 1 + + print("Device count: %d" % dev_count) + print("Num train examples: %d" % num_train_examples) + print("Max train steps: %d" % max_train_steps) + + train_program = fluid.Program() + + with fluid.program_guard(train_program, startup_prog): + with fluid.unique_name.guard(): + # create ernie_pyreader + train_pyreader, ernie_inputs, labels = ernie.ernie_pyreader( + args, + pyreader_name='train_reader') + + # get ernie_embeddings + embeddings = ernie.ernie_encoder(ernie_inputs, ernie_config=ernie_config) + + # user defined model based on ernie embeddings + loss, accuracy, num_seqs = create_model( + args, + embeddings, + labels=labels, + is_prediction=False) + + """ + sgd_optimizer = fluid.optimizer.Adagrad(learning_rate=args.lr) + sgd_optimizer.minimize(loss) + """ + optimizer = fluid.optimizer.Adam(learning_rate=args.lr) + optimizer.minimize(loss) + + if args.verbose: + lower_mem, upper_mem, unit = fluid.contrib.memory_usage( + program=train_program, batch_size=args.batch_size) + print("Theoretical memory usage in training: %.3f - %.3f %s" % + (lower_mem, upper_mem, unit)) + + if args.do_val: + test_prog = fluid.Program() + with fluid.program_guard(test_prog, startup_prog): + with fluid.unique_name.guard(): + # create ernie_pyreader + test_pyreader, ernie_inputs, labels = ernie.ernie_pyreader( + args, + pyreader_name='eval_reader') + + # get ernie_embeddings + embeddings = ernie.ernie_encoder(ernie_inputs, ernie_config=ernie_config) + + # user defined model based on ernie embeddings + loss, accuracy, num_seqs = create_model( + args, + embeddings, + labels=labels, + is_prediction=False) + + test_prog = test_prog.clone(for_test=True) + + if args.do_infer: + test_prog = fluid.Program() + with fluid.program_guard(test_prog, startup_prog): + with fluid.unique_name.guard(): + infer_pyreader, ernie_inputs, labels = ernie.ernie_pyreader( + args, + pyreader_name='infer_reader') + + # get ernie_embeddings + embeddings = ernie.ernie_encoder(ernie_inputs, ernie_config=ernie_config) + + probs = create_model(args, + embeddings, + labels=labels, + is_prediction=True) + test_prog = test_prog.clone(for_test=True) + + exe.run(startup_prog) + + if args.do_train: + if args.init_checkpoint: + utils.init_checkpoint( + exe, + args.init_checkpoint, + main_program=startup_prog) + elif args.do_val or args.do_infer: + if not args.init_checkpoint: + raise ValueError("args 'init_checkpoint' should be set if" + "only doing validation or infer!") + utils.init_checkpoint( + exe, + args.init_checkpoint, + main_program=test_prog) + + if args.do_train: + train_exe = exe + train_pyreader.decorate_tensor_provider(train_data_generator) + else: + train_exe = None + if args.do_val or args.do_infer: + test_exe = exe + + if args.do_train: + train_pyreader.start() + steps = 0 + total_cost, total_acc, total_num_seqs = [], [], [] + time_begin = time.time() + while True: + try: + steps += 1 + if steps % args.skip_steps == 0: + fetch_list = [loss.name, accuracy.name, num_seqs.name] + else: + fetch_list = [] + + outputs = train_exe.run(program=train_program, fetch_list=fetch_list, return_numpy=False) + if steps % args.skip_steps == 0: + np_loss, np_acc, np_num_seqs = outputs + np_loss = np.array(np_loss) + np_acc = np.array(np_acc) + np_num_seqs = np.array(np_num_seqs) + total_cost.extend(np_loss * np_num_seqs) + total_acc.extend(np_acc * np_num_seqs) + total_num_seqs.extend(np_num_seqs) + + if args.verbose: + verbose = "train pyreader queue size: %d, " % train_pyreader.queue.size() + print(verbose) + + time_end = time.time() + used_time = time_end - time_begin + print("step: %d, avg loss: %f, " + "avg acc: %f, speed: %f steps/s" % + (steps, np.sum(total_cost) / np.sum(total_num_seqs), + np.sum(total_acc) / np.sum(total_num_seqs), + args.skip_steps / used_time)) + total_cost, total_acc, total_num_seqs = [], [], [] + time_begin = time.time() + + if steps % args.save_steps == 0: + save_path = os.path.join(args.output_dir, "step_" + str(steps)) + fluid.io.save_persistables(exe, save_path, train_program) + + if steps % args.validation_steps == 0: + # evaluate dev set + if args.do_val: + test_pyreader.decorate_tensor_provider( + reader.data_generator( + input_file=args.dev_set, + batch_size=args.batch_size, + phase='dev', + epoch=1, + shuffle=False)) + + evaluate(exe, test_prog, test_pyreader, + [loss.name, accuracy.name, num_seqs.name], + "dev") + + except fluid.core.EOFException: + save_path = os.path.join(args.output_dir, "step_" + str(steps)) + fluid.io.save_persistables(exe, save_path, train_program) + train_pyreader.reset() + break + + # eval on test set + if not args.do_train and args.do_val: + test_pyreader.decorate_tensor_provider( + reader.data_generator( + input_file=args.test_set, + batch_size=args.batch_size, phase='test', epoch=1, + shuffle=False)) + print("Final validation result:") + evaluate(exe, test_prog, test_pyreader, + [loss.name, accuracy.name, num_seqs.name], "test") + + # infer on infer set + if args.do_infer: + infer_pyreader.decorate_tensor_provider( + reader.data_generator( + input_file=args.infer_set, + batch_size=args.batch_size, + phase='infer', + epoch=1, + shuffle=False)) + print("Final test result:") + infer(exe, test_prog, infer_pyreader, + [probs.name], "infer") + +if __name__ == "__main__": + utils.print_arguments(args) + main(args) diff --git a/PaddleNLP/emotion_detection/utils.py b/PaddleNLP/emotion_detection/utils.py new file mode 100644 index 00000000..ac916d25 --- /dev/null +++ b/PaddleNLP/emotion_detection/utils.py @@ -0,0 +1,142 @@ +""" +EmoTect utilities. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import io +import os +import six +import sys +import random +import argparse + +import paddle +import paddle.fluid as fluid +import numpy as np + +def str2bool(value): + """ + String to Boolean + """ + # because argparse does not support to parse "true, False" as python + # boolean directly + return value.lower() in ("true", "t", "1") + + +class ArgumentGroup(object): + """ + Argument Class + """ + def __init__(self, parser, title, des): + self._group = parser.add_argument_group(title=title, description=des) + + def add_arg(self, name, type, default, help, **kwargs): + """ + Add argument + """ + type = str2bool if type == bool else type + self._group.add_argument( + "--" + name, + default=default, + type=type, + help=help + ' Default: %(default)s.', + **kwargs) + + +def print_arguments(args): + """ + Print Arguments + """ + print('----------- Configuration Arguments -----------') + for arg, value in sorted(six.iteritems(vars(args))): + print('%s: %s' % (arg, value)) + print('------------------------------------------------') + + +def init_checkpoint(exe, init_checkpoint_path, main_program): + """ + Init CheckPoint + """ + assert os.path.exists( + init_checkpoint_path), "[%s] cann't be found." % init_checkpoint_path + + def existed_persitables(var): + """ + If existed presitabels + """ + if not fluid.io.is_persistable(var): + return False + return os.path.exists(os.path.join(init_checkpoint_path, var.name)) + + fluid.io.load_vars( + exe, + init_checkpoint_path, + main_program=main_program, + predicate=existed_persitables) + print("Load model from {}".format(init_checkpoint_path)) + + +def data_reader(file_path, word_dict, num_examples, phrase, epoch=1): + """ + Convert word sequence into slot + """ + unk_id = len(word_dict) + all_data = [] + with io.open(file_path, "r", encoding='utf8') as fin: + for line in fin: + if line.startswith("label"): + continue + if phrase == "infer": + cols = line.strip().split("\t") + if len(cols) != 1: + query = cols[-1] + wids = [word_dict[x] if x in word_dict else unk_id + for x in query.strip().split(" ")] + all_data.append((wids,)) + else: + cols = line.strip().split("\t") + if len(cols) != 2: + sys.stderr.write("[NOTICE] Error Format Line!") + continue + label = int(cols[0]) + wids = [word_dict[x] if x in word_dict else unk_id + for x in cols[1].split(" ")] + all_data.append((wids, label)) + num_examples[phrase] = len(all_data) + + if phrase == "infer": + def reader(): + """ + Infer reader function + """ + for wids in all_data: + yield wids + return reader + + def reader(): + """ + Reader function + """ + for idx in range(epoch): + if phrase == "train": + random.shuffle(all_data) + for wids, label in all_data: + yield wids, label + return reader + + +def load_vocab(file_path): + """ + load the given vocabulary + """ + vocab = {} + with io.open(file_path, 'r', encoding='utf8') as fin: + wid = 0 + for line in fin: + if line.strip() not in vocab: + vocab[line.strip()] = wid + wid += 1 + vocab[""] = len(vocab) + return vocab diff --git a/PaddleNLP/knowledge-driven-dialogue b/PaddleNLP/knowledge_driven_dialogue similarity index 100% rename from PaddleNLP/knowledge-driven-dialogue rename to PaddleNLP/knowledge_driven_dialogue diff --git a/PaddleNLP/language_model/README.md b/PaddleNLP/language_model/README.md new file mode 100644 index 00000000..adb23188 --- /dev/null +++ b/PaddleNLP/language_model/README.md @@ -0,0 +1,125 @@ +# 语言模型 + +# 简介 + +## 1. 任务说明 +本文主要介绍基于lstm的语言的模型的实现,给定一个输入词序列(中文分词、英文tokenize),计算其ppl(语言模型困惑度,用户表示句子的流利程度),基于循环神经网络语言模型的介绍可以[参阅论文](https://arxiv.org/abs/1409.2329)。相对于传统的方法,基于循环神经网络的方法能够更好的解决稀疏词的问题。 + +## 2. 效果说明 +在small meidum large三个不同配置情况的ppl对比: + +| small config | train | valid | test | +| :------------- | :---------: | :--------: | :----------: | +| paddle | 40.962 | 118.111 | 112.617 | +| tensorflow | 40.492 | 118.329 | 113.788 | + +| medium config | train | valid | test | +| :------------- | :---------: | :--------: | :----------: | +| paddle | 45.620 | 87.398 | 83.682 | +| tensorflow | 45.594 | 87.363 | 84.015 | + +| large config | train | valid | test | +| :------------- | :---------: | :--------: | :----------: | +| paddle | 37.221 | 82.358 | 78.137 | +| tensorflow | 38.342 | 82.311 | 78.121 | + +## 3. 数据集 + +此任务的数据集合是采用ptb dataset,下载地址为: http://www.fit.vutbr.cz/~imikolov/rnnlm/simple-examples.tgz + + +# 快速开始 + +## 1. 安装说明 + +### Paddle安装 +本项目依赖于 Paddle Fluid, 关于PaddlePaddle框架的安装教程,详见[PaddlePaddle官方网站](http://paddlepaddle.org/documentation/docs/zh/1.3/beginners_guide/install/index_cn.html)。 +### 安装代码 +### 环境依赖 + +## 2. 开始第一次模型调用 + +### 数据准备 +为了方便开发者进行测试,我们提供了数据下载脚本。用户也可以自行下载数据,并解压。 + +``` +cd data; sh download_data.sh +``` + +### 训练或fine-tune +任务训练启动命令如下: +``` +sh run.sh +``` +需要指定数据的目录,模型的大小(默认为small,用户可以选择medium, 或者large)。 + +# 进阶使用 +## 1. 任务定义与建模 +此任务目的是给定一个输入的词序列,预测下一个词出现的概率。 + +## 2. 模型原理介绍 +此任务采用了序列任务常用的rnn网络,实现了一个两层的lstm网络,然后lstm的结果去预测下一个词出现的概率。 + +由于数据的特殊性,每一个batch的last hidden和last cell会被作为下一个batch 的init hidden 和 init cell,数据的特殊性下节会介绍。 + + +## 3. 数据格式说明 +此任务的数据格式比较简单,每一行为一个已经分好词(英文的tokenize)的词序列。 + +目前的句子示例如下图所示: +``` +aer banknote berlitz calloway centrust cluett fromstein gitano guterman hydro-quebec ipo kia memotec mlx nahb punts rake regatta rubens sim snack-food ssangyong swapo wachter +pierre N years old will join the board as a nonexecutive director nov. N +mr. is chairman of n.v. the dutch publishing group +``` + +特殊说明:ptb的数据比较特殊,ptb的数据来源于一些文章,相邻的句子可能来源于一个段落或者相邻的段落,ptb 数据不能做shuffle + + + +## 4. 目录结构 + +```text +. +├── README.md # 文档 +├── run.sh # 启动脚本 +├── train.py # 训练代码 +├── reader.py # 数据读取 +├── args.py # 参数读取 +└── data # 数据下载 +../ +└── models + └── language_model + └── lm_model.py # 模型定义文件 +``` + +## 5. 如何组建自己的模型 ++ **自定义数据:** 关于数据,如果可以把自己的数据先进行分词(或者tokenize),然后放入到data目录下,并修改reader.py中文件的名称,如果句子之间没有关联,用户可以将`train.py`中更新的代码注释掉。 + ``` + init_hidden = np.array(fetch_outs[1]) + init_cell = np.array(fetch_outs[2]) + ``` + ++ **网络结构更改:** 网络只实现了基于lstm的语言模型,用户可以自己的需求更换为gru或者self等网络结构,这些实现都是在lm_model.py 中定义 + + +# 其他 + +## Copyright and License +Copyright 2017 Baidu.com, Inc. All Rights Reserved + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +## 如何贡献代码 + +如果你可以修复某个issue或者增加一个新功能,欢迎给我们提交PR。如果对应的PR被接受了,我们将根据贡献的质量和难度进行打分(0-5分,越高越好)。如果你累计获得了10分,可以联系我们获得面试机会或者为你写推荐信。 diff --git a/PaddleNLP/language_model/lstm/args.py b/PaddleNLP/language_model/args.py similarity index 100% rename from PaddleNLP/language_model/lstm/args.py rename to PaddleNLP/language_model/args.py diff --git a/PaddleNLP/language_model/lstm/data/download_data.sh b/PaddleNLP/language_model/data/download_data.sh similarity index 100% rename from PaddleNLP/language_model/lstm/data/download_data.sh rename to PaddleNLP/language_model/data/download_data.sh diff --git a/PaddleNLP/language_model/lstm/reader.py b/PaddleNLP/language_model/reader.py similarity index 100% rename from PaddleNLP/language_model/lstm/reader.py rename to PaddleNLP/language_model/reader.py diff --git a/PaddleNLP/language_model/run.sh b/PaddleNLP/language_model/run.sh new file mode 100644 index 00000000..394f5f83 --- /dev/null +++ b/PaddleNLP/language_model/run.sh @@ -0,0 +1,12 @@ +#!/bin/bash +export CUDA_VISIBLE_DEVICES=0 + +function run_train() { + echo "training" + python train.py \ + --data_path data/simple-examples/data/ \ + --model_type small \ + --use_gpu True +} + +run_train \ No newline at end of file diff --git a/PaddleNLP/language_model/lstm/train.py b/PaddleNLP/language_model/train.py similarity index 99% rename from PaddleNLP/language_model/lstm/train.py rename to PaddleNLP/language_model/train.py index 2599a92c..c6e56467 100644 --- a/PaddleNLP/language_model/lstm/train.py +++ b/PaddleNLP/language_model/train.py @@ -40,7 +40,8 @@ import os os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3" from args import * -import lm_model +sys.path.append("../") +from models.language_model import lm_model import logging import pickle diff --git a/PaddleNLP/language_representations_kit b/PaddleNLP/language_representations_kit new file mode 160000 index 00000000..b9dae026 --- /dev/null +++ b/PaddleNLP/language_representations_kit @@ -0,0 +1 @@ +Subproject commit b9dae026c25602b96adf7ee776ff9f894c912338 diff --git a/PaddleNLP/lexical_analysis/README.md b/PaddleNLP/lexical_analysis/README.md new file mode 100644 index 00000000..8d56386c --- /dev/null +++ b/PaddleNLP/lexical_analysis/README.md @@ -0,0 +1,149 @@ +# 中文词法分析 + +## 1. 简介 +Lexical Analysis of Chinese,简称 LAC,是一个联合的词法分析模型,能整体性地完成中文分词、词性标注、专名识别任务。效果方面,分词、词性标注、专名识别的整体准确率 88.0%,召回率 88.7%,F1 值 88.4%。此外,我们在百度开放的 [ERNIE](https://github.com/PaddlePaddle/LARK/tree/develop/ERNIE) 模型上 finetune,效果可以提升到准确率 92.0%,召回率 92.0%,F1 值 92.0%。可通过 [AI开放平台-词法分析](http://ai.baidu.com/tech/nlp/lexical) 线上体验。 + +## 2. 快速开始 +本项目依赖 Paddle 1.3.2,安装请参考官网 [快速安装](http://www.paddlepaddle.org/paddle#quick-start)。 + +### 基础模型 +运行下载数据和模型的脚本, +```bash +sh downloads.sh +``` +会生成下面几个文件夹, +```test +./data/ # 数据文件夹 +./model_baseline/ # lexical analysis 模型文件 +./model_finetuned/ # lexical analysis 在 ERNIE 上 finetune 的模型文件 +./pretrained/ # ERNIE 发布的 pretrained 模型 +``` + +模型文件也可以很方便地通过 [PaddleHub](https://github.com/PaddlePaddle/PaddleHub) 的方式来获取,参考下面的命令示例: +```bash +# install paddlehub +pip install paddlehub + +# download baseline model +hub download lexical_analysis + +# download ERNIE finetuned model +hub download lexical_analysis_finetuned +``` + +我们基于百度的海量数据训练了一个词法分析的模型,可以直接用这个模型对开放的测试集 ./data/test.tsv 进行验证, +```bash +sh run.sh test +``` + +也可以用该模型预测新的数据, +```bash +sh run.sh infer +``` + +用户也可以选择在自己的数据集上训练自己的词法分析模型,这里我们提供用于训练的脚本和代码, +``` +sh run.sh train +``` + +### 使用 ERNIE 进行 finetune +原 ERNIE 开放的模型参考 [ERNIE](https://github.com/PaddlePaddle/LARK/tree/develop/ERNIE),这里我们为了方便,把下载的命令也放到 `download.sh` 中了。下载的预训练的 ERNIE 模型文件会放在 `./pretrained/` 目录中。 + +词法分析的模型在 `ERNIE` 上 finetune 之后取得了不错的提升效果,这里我们也开放此模型,模型放在了 `./model_finetuned` 中。运行下面的脚本在测试集上验证效果, +```bash +sh run_ernie.sh test +``` + +也可以用该模型预测新的数据, +```bash +sh run_ernie.sh infer +``` + +当然,您也可以用自己的数据去 finetune 在 ERNIE 上的效果, +```bash +sh run_ernie.sh train +``` + + +## 3. 进阶使用 + +### 任务定义与建模 +词法分析任务的输入是一个字符串(我们后面使用『句子』来指代它),而输出是句子中的词边界和词性、实体类别。序列标注是词法分析的经典建模方式。我们使用基于 GRU 的网络结构学习特征,将学习到的特征接入 CRF 解码层完成序列标注。CRF 解码层本质上是将传统 CRF 中的线性模型换成了非线性神经网络,基于句子级别的似然概率,因而能够更好的解决标记偏置问题。模型要点如下,具体细节请参考 `run_sequence_labeling.py` 代码。 +1. 输入采用 one-hot 方式表示,每个字以一个 id 表示 +2. one-hot 序列通过字表,转换为实向量表示的字向量序列; +3. 字向量序列作为双向 GRU 的输入,学习输入序列的特征表示,得到新的特性表示序列,我们堆叠了两层双向GRU以增加学习能力; +4. CRF 以 GRU 学习到的特征为输入,以标记序列为监督信号,实现序列标注。 + +词性和专名类别标签集合如下表,其中词性标签 24 个(小写字母),专名类别标签 4 个(大写字母)。这里需要说明的是,人名、地名、机名和时间四个类别,在上表中存在两套标签(PER / LOC / ORG / TIME 和 nr / ns / nt / t),被标注为第二套标签的词,是模型判断为低置信度的人名、地名、机构名和时间词。开发者可以基于这两套标签,在四个类别的准确、召回之间做出自己的权衡。 + +| 标签 | 含义 | 标签 | 含义 | 标签 | 含义 | 标签 | 含义 | +| ---- | -------- | ---- | -------- | ---- | -------- | ---- | -------- | +| n | 普通名词 | f | 方位名词 | s | 处所名词 | t | 时间 | +| nr | 人名 | ns | 地名 | nt | 机构名 | nw | 作品名 | +| nz | 其他专名 | v | 普通动词 | vd | 动副词 | vn | 名动词 | +| a | 形容词 | ad | 副形词 | an | 名形词 | d | 副词 | +| m | 数量词 | q | 量词 | r | 代词 | p | 介词 | +| c | 连词 | u | 助词 | xc | 其他虚词 | w | 标点符号 | +| PER | 人名 | LOC | 地名 | ORG | 机构名 | TIME | 时间 | + +### 模型原理介绍 +上面介绍的模型原理如下图所示:
+ + +![GRU-CRF-MODEL](./gru-crf-model.png) + +### 数据格式 +训练使用的数据可以由用户根据实际的应用场景,自己组织数据。除了第一行是 `text_a\tlabel` 固定的开头,后面的每行数据都是由两列组成,以制表符分隔,第一列是 utf-8 编码的中文文本,以 `\002` 分割,第二列是对应每个字的标注,以 `\002` 分隔。我们采用 IOB2 标注体系,即以 X-B 作为类型为 X 的词的开始,以 X-I 作为类型为 X 的词的持续,以 O 表示不关注的字(实际上,在词性、专名联合标注中,不存在 O )。示例如下: + +```text +除\002了\002他\002续\002任\002十\002二\002届\002政\002协\002委\002员\002,\002马\002化\002腾\002,\002雷\002军\002,\002李\002彦\002宏\002也\002被\002推\002选\002为\002新\002一\002届\002全\002国\002人\002大\002代\002表\002或\002全\002国\002政\002协\002委\002员 p-B\002p-I\002r-B\002v-B\002v-I\002m-B\002m-I\002m-I\002ORG-B\002ORG-I\002n-B\002n-I\002w-B\002PER-B\002PER-I\002PER-I\002w-B\002PER-B\002PER-I\002w-B\002PER-B\002PER-I\002PER-I\002d-B\002p-B\002v-B\002v-I\002v-B\002a-B\002m-B\002m-I\002ORG-B\002ORG-I\002ORG-I\002ORG-I\002n-B\002n-I\002c-B\002n-B\002n-I\002ORG-B\002ORG-I\002n-B\002n-I +``` + ++ 我们随同代码一并发布了完全版的模型和相关的依赖数据。但是,由于模型的训练数据过于庞大,我们没有发布训练数据,仅在`data`目录下放置少数样本用以示例输入数据格式。 + ++ 模型依赖数据包括: + 1. 输入文本的词典,在`conf`目录下,对应`word.dic` + 2. 对输入文本中特殊字符进行转换的字典,在`conf`目录下,对应`q2b.dic` + 3. 标记标签的词典,在`conf`目录下,对应`tag.dic` + ++ 在训练和预测阶段,我们都需要进行原始数据的预处理,具体处理工作包括: + + 1. 从原始数据文件中抽取出句子和标签,构造句子序列和标签序列 + 2. 将句子序列中的特殊字符进行转换 + 3. 依据词典获取词对应的整数索引 + +### 代码结构说明 +```text +. +├── README.md # 本文档 +├── conf/ # 词典目录 +├── data/ # 存放数据集的目录 +├── downloads.sh # 用于下载数据和模型的脚本 +├── gru-crf-model.png # README 用到的模型图片 +├── reader.py # 文件读取相关函数 +├── run_ernie_sequence_labeling.py # 用于 finetune ERNIE 的代码 +├── run_ernie.sh # 启用上面代码的脚本 +├── run_sequence_labeling.py # 词法分析任务代码 +├── run.sh # 启用上面代码的脚本 +└── utils.py # 常用工具函数 +``` + +### 如何组建自己的模型 +如果您需要定制自己的词法分析模型,可以在 `../models/sequence_labeling/nets.py` 中添加自己的网络结构,具体接口要求可以参考默认的 `lex_net` 函数。 + +## 4. 其他 +### 在论文中引用 LAC + +如果您的学术工作成果中使用了 LAC,请您增加下述引用。我们非常欣慰 LAC 能够对您的学术工作带来帮助。 + +```text +@article{jiao2018LAC, + title={Chinese Lexical Analysis with Deep Bi-GRU-CRF Network}, + author={Jiao, Zhenyu and Sun, Shuqi and Sun, Ke}, + journal={arXiv preprint arXiv:1807.01882}, + year={2018}, + url={https://arxiv.org/abs/1807.01882} +} +``` +### 如何贡献代码 +如果你可以修复某个 issue 或者增加一个新功能,欢迎给我们提交PR。如果对应的PR被接受了,我们将根据贡献的质量和难度 进行打分(0-5分,越高越好)。如果你累计获得了 10 分,可以联系我们获得面试机会或为你写推荐信。 diff --git a/PaddleNLP/text_matching_on_quora/__init__.py b/PaddleNLP/lexical_analysis/conf/customization.dic similarity index 100% rename from PaddleNLP/text_matching_on_quora/__init__.py rename to PaddleNLP/lexical_analysis/conf/customization.dic diff --git a/PaddleNLP/lexical_analysis/conf/customization.dic.example b/PaddleNLP/lexical_analysis/conf/customization.dic.example new file mode 100644 index 00000000..22ebf8a4 --- /dev/null +++ b/PaddleNLP/lexical_analysis/conf/customization.dic.example @@ -0,0 +1,3 @@ +[D:MONTH] +月 +月份 diff --git a/PaddleNLP/lexical_analysis/conf/label_map.json b/PaddleNLP/lexical_analysis/conf/label_map.json new file mode 100644 index 00000000..52011d81 --- /dev/null +++ b/PaddleNLP/lexical_analysis/conf/label_map.json @@ -0,0 +1 @@ +{"d-B": 8, "c-I": 7, "PER-I": 49, "nr-B": 16, "u-B": 36, "c-B": 6, "nr-I": 17, "an-I": 5, "ns-B": 18, "vn-I": 43, "w-B": 44, "an-B": 4, "PER-B": 48, "vn-B": 42, "ns-I": 19, "a-I": 1, "r-B": 30, "xc-B": 46, "LOC-B": 50, "ad-I": 3, "nz-B": 24, "u-I": 37, "a-B": 0, "ad-B": 2, "vd-I": 41, "nw-B": 22, "m-I": 13, "d-I": 9, "n-B": 14, "nz-I": 25, "vd-B": 40, "nw-I": 23, "n-I": 15, "nt-B": 20, "ORG-I": 53, "nt-I": 21, "ORG-B": 52, "LOC-I": 51, "t-B": 34, "TIME-I": 55, "O": 56, "s-I": 33, "f-I": 11, "TIME-B": 54, "t-I": 35, "f-B": 10, "s-B": 32, "r-I": 31, "q-B": 28, "v-I": 39, "v-B": 38, "w-I": 45, "q-I": 29, "p-B": 26, "xc-I": 47, "m-B": 12, "p-I": 27} \ No newline at end of file diff --git a/PaddleNLP/lexical_analysis/conf/q2b.dic b/PaddleNLP/lexical_analysis/conf/q2b.dic new file mode 100644 index 00000000..d1f14691 --- /dev/null +++ b/PaddleNLP/lexical_analysis/conf/q2b.dic @@ -0,0 +1,172 @@ +  +、 , +。 . +— - +~ ~ +‖ | +… . +‘ ' +’ ' +“ " +” " +〔 ( +〕 ) +〈 < +〉 > +「 ' +」 ' +『 " +』 " +〖 [ +〗 ] +【 [ +】 ] +∶ : +$ $ +! ! +" " +# # +% % +& & +' ' +( ( +) ) +* * ++ + +, , +- - +. . +/ / +0 0 +1 1 +2 2 +3 3 +4 4 +5 5 +6 6 +7 7 +8 8 +9 9 +: : +; ; +< < += = +> > +? ? +@ @ +A a +B b +C c +D d +E e +F f +G g +H h +I i +J j +K k +L l +M m +N n +O o +P p +Q q +R r +S s +T t +U u +V v +W w +X x +Y y +Z z +[ [ +\ \ +] ] +^ ^ +_ _ +` ` +a a +b b +c c +d d +e e +f f +g g +h h +i i +j j +k k +l l +m m +n n +o o +p p +q q +r r +s s +t t +u u +v v +w w +x x +y y +z z +{ { +| | +} } + ̄ ~ +〝 " +〞 " +﹐ , +﹑ , +﹒ . +﹔ ; +﹕ : +﹖ ? +﹗ ! +﹙ ( +﹚ ) +﹛ { +﹜ { +﹝ [ +﹞ ] +﹟ # +﹠ & +﹡ * +﹢ + +﹣ - +﹤ < +﹥ > +﹦ = +﹨ \ +﹩ $ +﹪ % +﹫ @ + , +A a +B b +C c +D d +E e +F f +G g +H h +I i +J j +K k +L l +M m +N n +O o +P p +Q q +R r +S s +T t +U u +V v +W w +X x +Y y +Z z diff --git a/PaddleNLP/lexical_analysis/conf/strong_punc.dic b/PaddleNLP/lexical_analysis/conf/strong_punc.dic new file mode 100644 index 00000000..595e2f67 --- /dev/null +++ b/PaddleNLP/lexical_analysis/conf/strong_punc.dic @@ -0,0 +1,5 @@ +! +。 +! +; +; diff --git a/PaddleNLP/lexical_analysis/conf/tag.dic b/PaddleNLP/lexical_analysis/conf/tag.dic new file mode 100644 index 00000000..753fa967 --- /dev/null +++ b/PaddleNLP/lexical_analysis/conf/tag.dic @@ -0,0 +1,57 @@ +0 a-B +1 a-I +2 ad-B +3 ad-I +4 an-B +5 an-I +6 c-B +7 c-I +8 d-B +9 d-I +10 f-B +11 f-I +12 m-B +13 m-I +14 n-B +15 n-I +16 nr-B +17 nr-I +18 ns-B +19 ns-I +20 nt-B +21 nt-I +22 nw-B +23 nw-I +24 nz-B +25 nz-I +26 p-B +27 p-I +28 q-B +29 q-I +30 r-B +31 r-I +32 s-B +33 s-I +34 t-B +35 t-I +36 u-B +37 u-I +38 v-B +39 v-I +40 vd-B +41 vd-I +42 vn-B +43 vn-I +44 w-B +45 w-I +46 xc-B +47 xc-I +48 PER-B +49 PER-I +50 LOC-B +51 LOC-I +52 ORG-B +53 ORG-I +54 TIME-B +55 TIME-I +56 O diff --git a/PaddleNLP/lexical_analysis/conf/word.dic b/PaddleNLP/lexical_analysis/conf/word.dic new file mode 100644 index 00000000..d0ec3249 --- /dev/null +++ b/PaddleNLP/lexical_analysis/conf/word.dic @@ -0,0 +1,20940 @@ +0 a +1 e +2 i +3 n +4 o +5 s +6 r +7 t +8 l +9 0 +10 u +11 c +12 1 +13 d +14 m +15 h +16 g +17 2 +18 p +19 b +20 y +21 5 +22 3 +23 8 +24 6 +25 k +26 A +27 4 +28 9 +29 f +30 7 +31 S +32 v +33 E +34 w +35 z +36 C +37 x +38 T +39 I +40 j +41 M +42 R +43 O +44 D +45 L +46 N +47 B +48 P +49 H +50 G +51 李 +52 F +53 K +54 王 +55 张 +56 q +57 U +58 刘 +59 陈 +60 W +61 Y +62 V +63 斯 +64 文 +65 X +66 J +67 Z +68 华 +69 明 +70 尔 +71 林 +72 德 +73 晓 +74 杨 +75 金 +76 Q +77 克 +78 小 +79 志 +80 国 +81 海 +82 丽 +83 平 +84 玉 +85 黄 +86 吴 +87 建 +88 特 +89 拉 +90 子 +91 赵 +92 利 +93 马 +94 军 +95 周 +96 亚 +97 伟 +98 东 +99 红 +100 龙 +101 春 +102 云 +103 生 +104 朱 +105 孙 +106 徐 +107 永 +108 达 +109 美 +110 安 +111 杰 +112 卡 +113 天 +114 新 +115 罗 +116 里 +117 大 +118 光 +119 波 +120 家 +121 成 +122 福 +123 高 +124 胡 +125 荣 +126 英 +127 阿 +128 思 +129 立 +130 瑞 +131 峰 +132 宝 +133 郭 +134 清 +135 兰 +136 西 +137 山 +138 维 +139 爱 +140 宇 +141 佳 +142 辉 +143 俊 +144 雅 +145 庆 +146 尼 +147 梅 +148 格 +149 之 +150 一 +151 君 +152 忠 +153 强 +154 学 +155 世 +156 雪 +157 良 +158 民 +159 芳 +160 郑 +161 敏 +162 秀 +163 迪 +164 元 +165 洪 +166 祥 +167 泽 +168 中 +169 康 +170 科 +171 嘉 +172 正 +173 飞 +174 巴 +175 兴 +176 松 +177 恩 +178 江 +179 乐 +180 宏 +181 振 +182 斌 +183 路 +184 雨 +185 娜 +186 雷 +187 玲 +188 长 +189 多 +190 凯 +191 米 +192 加 +193 奇 +194 吉 +195 青 +196 武 +197 水 +198 布 +199 力 +200 燕 +201 纳 +202 白 +203 慧 +204 宋 +205 万 +206 莱 +207 勇 +208 丹 +209 威 +210 宁 +211 南 +212 士 +213 堂 +214 何 +215 普 +216 洛 +217 秋 +218 胜 +219 仁 +220 韩 +221 奥 +222 富 +223 丁 +224 月 +225 石 +226 方 +227 博 +228 森 +229 艳 +230 鹏 +231 刚 +232 凤 +233 诺 +234 阳 +235 涛 +236 叶 +237 香 +238 比 +239 曹 +240 少 +241 昌 +242 泰 +243 伊 +244 亮 +245 沈 +246 霞 +247 梁 +248 菲 +249 谢 +250 唐 +251 智 +252 梦 +253 希 +254 曼 +255 贝 +256 杜 +257 木 +258 花 +259 苏 +260 星 +261 萍 +262 心 +263 景 +264 超 +265 欣 +266 树 +267 广 +268 许 +269 伯 +270 来 +271 夫 +272 塔 +273 卫 +274 义 +275 冯 +276 可 +277 田 +278 道 +279 圣 +280 汉 +281 三 +282 娟 +283 友 +284 夏 +285 基 +286 宗 +287 人 +288 贵 +289 婷 +290 鲁 +291 根 +292 艾 +293 静 +294 诗 +295 惠 +296 法 +297 蔡 +298 玛 +299 喜 +300 浩 +301 欧 +302 保 +303 潘 +304 风 +305 莉 +306 珍 +307 源 +308 桂 +309 远 +310 孟 +311 沙 +312 继 +313 顺 +314 锦 +315 邓 +316 贤 +317 书 +318 全 +319 得 +320 轩 +321 通 +322 吕 +323 才 +324 妮 +325 董 +326 曾 +327 彭 +328 雄 +329 琴 +330 旭 +331 袁 +332 城 +333 琳 +334 芬 +335 豪 +336 村 +337 卢 +338 剑 +339 蒋 +340 伦 +341 培 +342 魏 +343 瓦 +344 哈 +345 莫 +346 丝 +347 兵 +348 古 +349 银 +350 泉 +351 发 +352 传 +353 群 +354 若 +355 虎 +356 连 +357 如 +358 肖 +359 鑫 +360 盛 +361 先 +362 凡 +363 鸿 +364 图 +365 章 +366 姜 +367 琪 +368 启 +369 柏 +370 耀 +371 开 +372 依 +373 坤 +374 有 +375 萨 +376 怡 +377 崔 +378 川 +379 祖 +380 尚 +381 贾 +382 园 +383 素 +384 托 +385 淑 +386 健 +387 彦 +388 余 +389 双 +390 信 +391 麦 +392 范 +393 汪 +394 蒂 +395 程 +396 朝 +397 和 +398 然 +399 本 +400 塞 +401 灵 +402 秦 +403 铭 +404 河 +405 进 +406 姆 +407 百 +408 陆 +409 彬 +410 锋 +411 洁 +412 莲 +413 冰 +414 晨 +415 邦 +416 兆 +417 钟 +418 日 +419 绍 +420 铁 +421 怀 +422 赛 +423 善 +424 舒 +425 恒 +426 其 +427 行 +428 旺 +429 修 +430 易 +431 任 +432 莎 +433 . +434 顾 +435 艺 +436 丰 +437 皮 +438 帕 +439 延 +440 隆 +441 门 +442 太 +443 哲 +444 定 +445 蒙 +446 洋 +447 紫 +448 庄 +449 姚 +450 戴 +451 向 +452 顿 +453 礼 +454 权 +455 桥 +456 颖 +457 镇 +458 茂 +459 益 +460 露 +461 齐 +462 仙 +463 儿 +464 勒 +465 地 +466 真 +467 凌 +468 毛 +469 佩 +470 冬 +471 弗 +472 九 +473 润 +474 涵 +475 千 +476 史 +477 碧 +478 自 +479 承 +480 彩 +481 翔 +482 乔 +483 施 +484 治 +485 索 +486 会 +487 运 +488 卓 +489 毅 +490 年 +491 莹 +492 沃 +493 于 +494 孔 +495 薛 +496 业 +497 柳 +498 内 +499 钱 +500 廷 +501 登 +502 仕 +503 熙 +504 守 +505 敬 +506 孝 +507 雯 +508 增 +509 相 +510 时 +511 楠 +512 二 +513 竹 +514 谷 +515 不 +516 牛 +517 好 +518 京 +519 仲 +520 赫 +521 黑 +522 朗 +523 汤 +524 悦 +525 蓝 +526 公 +527 梓 +528 珠 +529 芝 +530 苑 +531 炳 +532 奎 +533 黎 +534 老 +535 佛 +536 谭 +537 鱼 +538 尹 +539 神 +540 温 +541 帝 +542 锡 +543 陶 +544 墨 +545 媛 +546 上 +547 乌 +548 常 +549 言 +550 熊 +551 化 +552 火 +553 升 +554 庭 +555 臣 +556 同 +557 头 +558 晶 +559 磊 +560 楚 +561 提 +562 优 +563 勤 +564 歌 +565 岩 +566 琦 +567 草 +568 韦 +569 库 +570 溪 +571 逸 +572 五 +573 政 +574 冠 +575 果 +576 跃 +577 辰 +578 柯 +579 戈 +580 廖 +581 薇 +582 琼 +583 申 +584 占 +585 湖 +586 辛 +587 代 +588 四 +589 严 +590 扎 +591 倩 +592 邹 +593 乃 +594 宜 +595 捷 +596 理 +597 洲 +598 鸣 +599 邱 +600 栋 +601 翠 +602 睿 +603 满 +604 容 +605 霖 +606 纪 +607 岳 +608 卿 +609 羽 +610 扬 +611 阁 +612 亦 +613 邵 +614 居 +615 久 +616 桑 +617 寿 +618 记 +619 北 +620 哥 +621 瑶 +622 埃 +623 彤 +624 贺 +625 菊 +626 湘 +627 诚 +628 宾 +629 郝 +630 非 +631 珊 +632 存 +633 无 +634 颜 +635 意 +636 盖 +637 é +638 霍 +639 初 +640 派 +641 野 +642 摩 +643 妍 +644 应 +645 口 +646 馨 +647 名 +648 坚 +649 品 +650 能 +651 寒 +652 纯 +653 蓉 +654 声 +655 葛 +656 航 +657 以 +658 坦 +659 童 +660 尤 +661 色 +662 晴 +663 令 +664 重 +665 聪 +666 芙 +667 亭 +668 柱 +669 合 +670 兹 +671 育 +672 音 +673 厚 +674 迈 +675 付 +676 奈 +677 语 +678 情 +679 宫 +680 列 +681 都 +682 钦 +683 炎 +684 必 +685 客 +686 蕾 +687 龚 +688 笑 +689 左 +690 作 +691 楼 +692 切 +693 娇 +694 宪 +695 韵 +696 农 +697 流 +698 密 +699 关 +700 岭 +701 干 +702 为 +703 夜 +704 氏 +705 微 +706 男 +707 显 +708 腾 +709 甘 +710 娅 +711 晋 +712 昊 +713 仪 +714 查 +715 焕 +716 姬 +717 印 +718 台 +719 苗 +720 钰 +721 甲 +722 勋 +723 车 +724 班 +725 锐 +726 原 +727 虹 +728 六 +729 段 +730 曲 +731 崇 +732 七 +733 茹 +734 萌 +735 & +736 巧 +737 州 +738 那 +739 标 +740 俞 +741 堡 +742 劳 +743 联 +744 土 +745 血 +746 起 +747 乡 +748 瑜 +749 岛 +750 池 +751 战 +752 师 +753 茶 +754 鹤 +755 彪 +756 鼎 +757 婉 +758 裕 +759 季 +760 耶 +761 闫 +762 冷 +763 昆 +764 知 +765 绿 +766 麟 +767 朵 +768 默 +769 贞 +770 什 +771 赖 +772 倪 +773 尧 +774 灿 +775 因 +776 官 +777 昭 +778 奕 +779 穆 +780 佐 +781 影 +782 荷 +783 功 +784 撒 +785 照 +786 井 +787 宽 +788 桐 +789 萱 +790 坊 +791 聚 +792 萧 +793 球 +794 璐 +795 晖 +796 鬼 +797 面 +798 字 +799 慕 +800 费 +801 越 +802 约 +803 曦 +804 后 +805 欢 +806 枫 +807 玮 +808 殷 +809 包 +810 念 +811 八 +812 汝 +813 翰 +814 黃 +815 奴 +816 手 +817 望 +818 茜 +819 儒 +820 傅 +821 气 +822 玄 +823 黛 +824 汇 +825 肯 +826 龍 +827 耐 +828 佑 +829 湾 +830 单 +831 岚 +832 舍 +833 热 +834 昂 +835 步 +836 钢 +837 环 +838 御 +839 缘 +840 伍 +841 下 +842 机 +843 乾 +844 魔 +845 前 +846 震 +847 巨 +848 线 +849 皓 +850 盈 +851 庞 +852 谦 +853 宣 +854 女 +855 体 +856 靖 +857 均 +858 劲 +859 济 +860 硕 +861 营 +862 帆 +863 妙 +864 瑟 +865 财 +866 出 +867 在 +868 炜 +869 味 +870 斗 +871 留 +872 深 +873 芸 +874 耿 +875 沛 +876 经 +877 管 +878 菜 +879 献 +880 外 +881 殿 +882 房 +883 焦 +884 骨 +885 点 +886 禹 +887 禄 +888 毕 +889 桃 +890 空 +891 侯 +892 鹰 +893 岗 +894 津 +895 雁 +896 帅 +897 妃 +898 复 +899 衣 +900 骏 +901 聂 +902 绪 +903 娃 +904 眼 +905 舟 +906 打 +907 分 +908 油 +909 者 +910 度 +911 角 +912 朴 +913 藤 +914 枝 +915 落 +916 亨 +917 游 +918 潮 +919 皇 +920 華 +921 梵 +922 滨 +923 禾 +924 郎 +925 洞 +926 精 +927 烈 +928 翁 +929 允 +930 塘 +931 璇 +932 事 +933 祝 +934 翼 +935 粉 +936 板 +937 赤 +938 盘 +939 昕 +940 蕊 +941 姿 +942 侠 +943 回 +944 á +945 秉 +946 征 +947 圆 +948 考 +949 茨 +950 娘 +951 邢 +952 电 +953 瑾 +954 酒 +955 寺 +956 尊 +957 冉 +958 边 +959 别 +960 刀 +961 工 +962 筱 +963 馬 +964 坡 +965 弘 +966 樊 +967 裴 +968 柔 +969 甫 +970 妹 +971 浦 +972 锁 +973 渊 +974 映 +975 当 +976 鲍 +977 见 +978 麻 +979 婧 +980 选 +981 牙 +982 烟 +983 翟 +984 钧 +985 屋 +986 冲 +987 放 +988 芹 +989 煜 +990 再 +991 尘 +992 司 +993 创 +994 恋 +995 幼 +996 展 +997 镜 +998 实 +999 浪 +1000 珂 +1001 爽 +1002 驰 +1003 鹿 +1004 吾 +1005 简 +1006 虫 +1007 网 +1008 从 +1009 è +1010 紅 +1011 食 +1012 赞 +1013 à +1014 柴 +1015 沟 +1016 魂 +1017 張 +1018 叔 +1019 端 +1020 入 +1021 闻 +1022 耳 +1023 慈 +1024 汀 +1025 集 +1026 郁 +1027 娥 +1028 死 +1029 伏 +1030 观 +1031 鸟 +1032 港 +1033 仓 +1034 芭 +1035 羊 +1036 纽 +1037 詹 +1038 唯 +1039 主 +1040 亿 +1041 旗 +1042 朋 +1043 蔚 +1044 商 +1045 斐 +1046 拜 +1047 凝 +1048 十 +1049 酷 +1050 片 +1051 性 +1052 烨 +1053 長 +1054 寨 +1055 蓓 +1056 动 +1057 魁 +1058 猫 +1059 迎 +1060 魚 +1061 敦 +1062 浮 +1063 東 +1064 用 +1065 霜 +1066 咏 +1067 采 +1068 狼 +1069 解 +1070 衡 +1071 录 +1072 府 +1073 琛 +1074 舞 +1075 街 +1076 澜 +1077 致 +1078 则 +1079 努 +1080 愛 +1081 举 +1082 淼 +1083 ì +1084 晟 +1085 肉 +1086 身 +1087 巷 +1088 伽 +1089 畅 +1090 典 +1091 首 +1092 ê +1093 斋 +1094 拿 +1095 沐 +1096 骆 +1097 丙 +1098 狗 +1099 瓜 +1100 內 +1101 细 +1102 í +1103 视 +1104 屯 +1105 臻 +1106 酸 +1107 速 +1108 頭 +1109 养 +1110 傲 +1111 牧 +1112 添 +1113 直 +1114 鸡 +1115 泊 +1116 勃 +1117 昱 +1118 巍 +1119 宸 +1120 式 +1121 茵 +1122 豆 +1123 休 +1124 半 +1125 场 +1126 蛇 +1127 灯 +1128 临 +1129 玺 +1130 煌 +1131 顶 +1132 次 +1133 忆 +1134 壮 +1135 社 +1136 席 +1137 物 +1138 陵 +1139 醉 +1140 毒 +1141 媚 +1142 風 +1143 积 +1144 佰 +1145 車 +1146 庚 +1147 过 +1148 猛 +1149 菁 +1150 母 +1151 两 +1152 龄 +1153 破 +1154 买 +1155 效 +1156 祺 +1157 發 +1158 玥 +1159 我 +1160 藏 +1161 县 +1162 号 +1163 坎 +1164 训 +1165 嘎 +1166 众 +1167 懿 +1168 ò +1169 底 +1170 党 +1171 門 +1172 尾 +1173 予 +1174 達 +1175 转 +1176 变 +1177 盟 +1178 是 +1179 阮 +1180 药 +1181 船 +1182 足 +1183 快 +1184 蘭 +1185 毓 +1186 乙 +1187 讯 +1188 杏 +1189 渡 +1190 陽 +1191 而 +1192 拓 +1193 象 +1194 喻 +1195 汗 +1196 眉 +1197 散 +1198 也 +1199 横 +1200 召 +1201 节 +1202 归 +1203 离 +1204 坪 +1205 位 +1206 制 +1207 暗 +1208 榕 +1209 今 +1210 量 +1211 器 +1212 仔 +1213 脱 +1214 所 +1215 交 +1216 结 +1217 轻 +1218 颂 +1219 现 +1220 又 +1221 界 +1222 病 +1223 封 +1224 祁 +1225 寅 +1226 岸 +1227 樱 +1228 阴 +1229 妖 +1230 澳 +1231 期 +1232 历 +1233 命 +1234 绮 +1235 彼 +1236 夕 +1237 丸 +1238 异 +1239 淳 +1240 苦 +1241 ó +1242 澄 +1243 求 +1244 開 +1245 杀 +1246 途 +1247 ü +1248 珀 +1249 调 +1250 沁 +1251 國 +1252 反 +1253 零 +1254 茗 +1255 0 +1256 族 +1257 蒲 +1258 泓 +1259 棠 +1260 引 +1261 弟 +1262 爾 +1263 牌 +1264 团 +1265 至 +1266 独 +1267 娴 +1268 迷 +1269 倒 +1270 瀚 +1271 铃 +1272 苍 +1273 淇 +1274 轮 +1275 狄 +1276 繁 +1277 樂 +1278 卜 +1279 氣 +1280 校 +1281 婚 +1282 断 +1283 霸 +1284 寶 +1285 固 +1286 豹 +1287 韬 +1288 隐 +1289 教 +1290 姓 +1291 1 +1292 极 +1293 带 +1294 走 +1295 羅 +1296 帮 +1297 亞 +1298 净 +1299 婕 +1300 难 +1301 挺 +1302 糖 +1303 招 +1304 凉 +1305 蜜 +1306 收 +1307 数 +1308 奧 +1309 雲 +1310 述 +1311 逊 +1312 杭 +1313 幽 +1314 脚 +1315 2 +1316 廉 +1317 桦 +1318 灰 +1319 医 +1320 与 +1321 陳 +1322 坝 +1323 芮 +1324 目 +1325 丘 +1326 舜 +1327 覃 +1328 潇 +1329 含 +1330 亲 +1331 铜 +1332 晚 +1333 支 +1334 猪 +1335 画 +1336 玖 +1337 ú +1338 店 +1339 项 +1340 渝 +1341 排 +1342 旋 +1343 笔 +1344 压 +1345 芷 +1346 报 +1347 強 +1348 乳 +1349 融 +1350 笛 +1351 冈 +1352 的 +1353 棋 +1354 领 +1355 瑛 +1356 屈 +1357 狂 +1358 院 +1359 峻 +1360 孤 +1361 谋 +1362 未 +1363 兔 +1364 鲜 +1365 衍 +1366 术 +1367 吟 +1368 间 +1369 计 +1370 觉 +1371 泥 +1372 乱 +1373 蝶 +1374 倍 +1375 卷 +1376 残 +1377 蓬 +1378 对 +1379 植 +1380 耕 +1381 盾 +1382 迦 +1383 缪 +1384 条 +1385 域 +1386 欲 +1387 杯 +1388 虚 +1389 习 +1390 爷 +1391 早 +1392 麗 +1393 郡 +1394 浅 +1395 退 +1396 纸 +1397 策 +1398 a +1399 活 +1400 窦 +1401 攀 +1402 屏 +1403 刺 +1404 泳 +1405 旦 +1406 补 +1407 防 +1408 姝 +1409 恺 +1410 晔 +1411 肤 +1412 軍 +1413 漫 +1414 失 +1415 滕 +1416 背 +1417 词 +1418 晗 +1419 表 +1420 來 +1421 涂 +1422 坑 +1423 誉 +1424 装 +1425 受 +1426 甜 +1427 機 +1428 邪 +1429 嘴 +1430 雍 +1431 棉 +1432 霄 +1433 针 +1434 荆 +1435 料 +1436 鼠 +1437 革 +1438 炫 +1439 将 +1440 绝 +1441 锅 +1442 取 +1443 電 +1444 宿 +1445 货 +1446 粤 +1447 葵 +1448 姐 +1449 介 +1450 爵 +1451 阔 +1452 涅 +1453 闪 +1454 听 +1455 央 +1456 掌 +1457 近 +1458 贡 +1459 沉 +1460 迟 +1461 改 +1462 配 +1463 庙 +1464 染 +1465 铮 +1466 阎 +1467 芯 +1468 汐 +1469 颐 +1470 蛋 +1471 护 +1472 部 +1473 孚 +1474 伤 +1475 狐 +1476 饭 +1477 鼓 +1478 娄 +1479 戚 +1480 略 +1481 啸 +1482 幸 +1483 滋 +1484 指 +1485 悠 +1486 妻 +1487 脉 +1488 丛 +1489 警 +1490 模 +1491 洗 +1492 奶 +1493 枪 +1494 恶 +1495 宴 +1496 靳 +1497 3 +1498 契 +1499 想 +1500 床 +1501 泪 +1502 随 +1503 市 +1504 探 +1505 焰 +1506 豫 +1507 點 +1508 住 +1509 淮 +1510 炮 +1511 圈 +1512 吐 +1513 楷 +1514 话 +1515 線 +1516 接 +1517 假 +1518 仇 +1519 射 +1520 蜀 +1521 婴 +1522 佟 +1523 追 +1524 奔 +1525 胶 +1526 晏 +1527 兽 +1528 跳 +1529 弦 +1530 质 +1531 體 +1532 扣 +1533 更 +1534 卉 +1535 梨 +1536 形 +1537 息 +1538 壁 +1539 共 +1540 菱 +1541 闵 +1542 渠 +1543 感 +1544 寻 +1545 盐 +1546 惊 +1547 珺 +1548 慎 +1549 去 +1550 狮 +1551 韶 +1552 雙 +1553 瞳 +1554 宅 +1555 座 +1556 总 +1557 趣 +1558 萬 +1559 短 +1560 奉 +1561 滩 +1562 飛 +1563 扶 +1564 折 +1565 筠 +1566 寇 +1567 ā +1568 尖 +1569 暖 +1570 弥 +1571 惜 +1572 涌 +1573 符 +1574 8 +1575 匠 +1576 嫣 +1577 璞 +1578 杉 +1579 让 +1580 雾 +1581 動 +1582 蕴 +1583 处 +1584 宠 +1585 楊 +1586 务 +1587 猴 +1588 翻 +1589 到 +1590 竞 +1591 参 +1592 某 +1593 闽 +1594 送 +1595 匡 +1596 钊 +1597 薄 +1598 磨 +1599 芒 +1600 婵 +1601 厄 +1602 渔 +1603 户 +1604 推 +1605 研 +1606 纲 +1607 恭 +1608 聖 +1609 茅 +1610 资 +1611 宛 +1612 魅 +1613 软 +1614 胎 +1615 鸭 +1616 愚 +1617 喆 +1618 鉴 +1619 荒 +1620 协 +1621 罪 +1622 铎 +1623 迅 +1624 笙 +1625 語 +1626 葆 +1627 匹 +1628 区 +1629 绣 +1630 型 +1631 轶 +1632 额 +1633 消 +1634 靓 +1635 硬 +1636 着 +1637 姣 +1638 偏 +1639 票 +1640 碎 +1641 套 +1642 遥 +1643 冀 +1644 际 +1645 架 +1646 拳 +1647 巫 +1648 6 +1649 妇 +1650 赋 +1651 私 +1652 曙 +1653 站 +1654 载 +1655 抗 +1656 芦 +1657 膜 +1658 尸 +1659 适 +1660 错 +1661 潭 +1662 击 +1663 俭 +1664 巢 +1665 幻 +1666 婆 +1667 麒 +1668 值 +1669 止 +1670 种 +1671 維 +1672 c +1673 岐 +1674 後 +1675 伶 +1676 墙 +1677 刃 +1678 缇 +1679 琰 +1680 殇 +1681 烧 +1682 窝 +1683 砚 +1684 無 +1685 矿 +1686 遗 +1687 争 +1688 怪 +1689 b +1690 末 +1691 逆 +1692 码 +1693 释 +1694 屠 +1695 问 +1696 恬 +1697 腰 +1698 掉 +1699 時 +1700 具 +1701 脸 +1702 璋 +1703 隋 +1704 芽 +1705 控 +1706 壹 +1707 甄 +1708 會 +1709 价 +1710 劫 +1711 菌 +1712 熱 +1713 岁 +1714 痛 +1715 刻 +1716 單 +1717 咸 +1718 書 +1719 兮 +1720 服 +1721 敖 +1722 禁 +1723 差 +1724 沫 +1725 栗 +1726 暮 +1727 倾 +1728 戰 +1729 投 +1730 戏 +1731 币 +1732 要 +1733 造 +1734 冥 +1735 肌 +1736 降 +1737 龟 +1738 低 +1739 o +1740 痕 +1741 學 +1742 弹 +1743 淡 +1744 迹 +1745 箭 +1746 岑 +1747 读 +1748 灭 +1749 萝 +1750 潜 +1751 穗 +1752 俄 +1753 吊 +1754 虞 +1755 斑 +1756 炉 +1757 肥 +1758 说 +1759 稳 +1760 焱 +1761 隽 +1762 急 +1763 橙 +1764 卞 +1765 雀 +1766 停 +1767 槐 +1768 级 +1769 剧 +1770 姑 +1771 岱 +1772 e +1773 弄 +1774 脑 +1775 蔓 +1776 论 +1777 壳 +1778 鼻 +1779 圖 +1780 醒 +1781 犬 +1782 堤 +1783 闲 +1784 坐 +1785 专 +1786 蜂 +1787 饶 +1788 证 +1789 液 +1790 莺 +1791 导 +1792 跑 +1793 砂 +1794 谈 +1795 虾 +1796 湛 +1797 杂 +1798 看 +1799 父 +1800 埠 +1801 盲 +1802 敌 +1803 泛 +1804 摇 +1805 翎 +1806 霆 +1807 核 +1808 屿 +1809 换 +1810 股 +1811 产 +1812 呈 +1813 漏 +1814 興 +1815 铺 +1816 刑 +1817 省 +1818 裝 +1819 刁 +1820 曰 +1821 劉 +1822 察 +1823 除 +1824 齿 +1825 峥 +1826 牟 +1827 飘 +1828 律 +1829 鞋 +1830 禅 +1831 瞿 +1832 右 +1833 璟 +1834 滑 +1835 煤 +1836 滢 +1837 琨 +1838 逢 +1839 税 +1840 宮 +1841 状 +1842 納 +1843 谨 +1844 寄 +1845 弓 +1846 练 +1847 序 +1848 纱 +1849 恨 +1850 凱 +1851 寧 +1852 帶 +1853 境 +1854 局 +1855 操 +1856 妤 +1857 裂 +1858 猎 +1859 眠 +1860 泡 +1861 辞 +1862 i +1863 势 +1864 戎 +1865 室 +1866 順 +1867 透 +1868 享 +1869 演 +1870 裘 +1871 由 +1872 助 +1873 第 +1874 奋 +1875 储 +1876 伐 +1877 沪 +1878 9 +1879 磁 +1880 拍 +1881 盼 +1882 珈 +1883 贻 +1884 偷 +1885 混 +1886 仰 +1887 队 +1888 場 +1889 胤 +1890 呼 +1891 案 +1892 驹 +1893 还 +1894 铂 +1895 栾 +1896 腿 +1897 响 +1898 禧 +1899 溢 +1900 饼 +1901 4 +1902 馆 +1903 材 +1904 粮 +1905 姗 +1906 缺 +1907 桢 +1908 業 +1909 歆 +1910 惟 +1911 纹 +1912 祯 +1913 崖 +1914 预 +1915 肇 +1916 連 +1917 悲 +1918 唱 +1919 鹭 +1920 胸 +1921 杆 +1922 暴 +1923 園 +1924 准 +1925 汶 +1926 吳 +1927 钻 +1928 纤 +1929 氧 +1930 冶 +1931 脂 +1932 怨 +1933 島 +1934 爆 +1935 尽 +1936 夹 +1937 挂 +1938 肠 +1939 绵 +1940 崎 +1941 銀 +1942 措 +1943 算 +1944 陀 +1945 橋 +1946 执 +1947 职 +1948 徽 +1949 邑 +1950 瑪 +1951 荡 +1952 戒 +1953 旧 +1954 丑 +1955 浓 +1956 便 +1957 仑 +1958 歇 +1959 縣 +1960 围 +1961 纬 +1962 褚 +1963 丞 +1964 胆 +1965 辅 +1966 减 +1967 贯 +1968 圭 +1969 乘 +1970 率 +1971 別 +1972 藍 +1973 扇 +1974 萊 +1975 瘦 +1976 漢 +1977 n +1978 滿 +1979 榆 +1980 屹 +1981 廣 +1982 句 +1983 借 +1984 鞠 +1985 垂 +1986 骥 +1987 鐵 +1988 雞 +1989 號 +1990 胃 +1991 玩 +1992 雕 +1993 罕 +1994 墩 +1995 谊 +1996 贼 +1997 對 +1998 件 +1999 编 +2000 d +2001 嫂 +2002 葉 +2003 栓 +2004 湿 +2005 统 +2006 箱 +2007 庸 +2008 终 +2009 轉 +2010 吹 +2011 噶 +2012 炼 +2013 聯 +2014 谱 +2015 悬 +2016 甸 +2017 兩 +2018 委 +2019 徒 +2020 午 +2021 忘 +2022 藻 +2023 遇 +2024 師 +2025 數 +2026 激 +2027 經 +2028 炯 +2029 怒 +2030 珏 +2031 靈 +2032 熹 +2033 靜 +2034 兒 +2035 報 +2036 調 +2037 圩 +2038 袋 +2039 妆 +2040 各 +2041 祭 +2042 层 +2043 聲 +2044 陌 +2045 幕 +2046 帽 +2047 了 +2048 舌 +2049 碗 +2050 記 +2051 窑 +2052 丕 +2053 貝 +2054 盤 +2055 過 +2056 醇 +2057 紧 +2058 类 +2059 娣 +2060 嵘 +2061 弃 +2062 嵩 +2063 卖 +2064 侨 +2065 p +2066 块 +2067 束 +2068 绳 +2069 橫 +2070 鄂 +2071 窗 +2072 粒 +2073 膏 +2074 灏 +2075 義 +2076 馥 +2077 藥 +2078 卧 +2079 夷 +2080 诸 +2081 侃 +2082 抱 +2083 絲 +2084 故 +2085 厨 +2086 喷 +2087 荔 +2088 俏 +2089 凶 +2090 斜 +2091 忍 +2092 關 +2093 完 +2094 皖 +2095 逃 +2096 榜 +2097 样 +2098 淫 +2099 運 +2100 喀 +2101 互 +2102 浆 +2103 結 +2104 侧 +2105 闯 +2106 抽 +2107 腊 +2108 秘 +2109 请 +2110 写 +2111 续 +2112 组 +2113 此 +2114 烁 +2115 吸 +2116 销 +2117 翊 +2118 漾 +2119 荫 +2120 進 +2121 ù +2122 键 +2123 囚 +2124 等 +2125 疏 +2126 弱 +2127 棒 +2128 渣 +2129 嫁 +2130 夺 +2131 链 +2132 懒 +2133 你 +2134 骁 +2135 励 +2136 胖 +2137 螺 +2138 恰 +2139 珉 +2140 须 +2141 墅 +2142 款 +2143 堆 +2144 轴 +2145 整 +2146 咪 +2147 注 +2148 救 +2149 網 +2150 勾 +2151 播 +2152 称 +2153 裸 +2154 频 +2155 棚 +2156 尿 +2157 珑 +2158 旻 +2159 害 +2160 枣 +2161 阵 +2162 备 +2163 稻 +2164 叫 +2165 就 +2166 攻 +2167 辣 +2168 邻 +2169 俐 +2170 昀 +2171 踏 +2172 肝 +2173 坛 +2174 像 +2175 夢 +2176 愿 +2177 斩 +2178 腹 +2179 苟 +2180 愁 +2181 樹 +2182 錢 +2183 蟹 +2184 傻 +2185 鹅 +2186 态 +2187 苇 +2188 筒 +2189 溫 +2190 諾 +2191 蕙 +2192 穿 +2193 紙 +2194 涧 +2195 奸 +2196 厂 +2197 鸥 +2198 琅 +2199 漆 +2200 昶 +2201 檀 +2202 险 +2203 昇 +2204 補 +2205 译 +2206 枕 +2207 悅 +2208 持 +2209 评 +2210 庵 +2211 黔 +2212 煞 +2213 拾 +2214 熟 +2215 试 +2216 题 +2217 浴 +2218 遠 +2219 摆 +2220 邬 +2221 枯 +2222 鞭 +2223 蔻 +2224 7 +2225 劍 +2226 吃 +2227 勉 +2228 纶 +2229 迁 +2230 伴 +2231 疯 +2232 使 +2233 肃 +2234 审 +2235 梭 +2236 他 +2237 拔 +2238 悟 +2239 穴 +2240 豐 +2241 勝 +2242 實 +2243 綠 +2244 玻 +2245 彻 +2246 告 +2247 蛮 +2248 抢 +2249 瓷 +2250 枢 +2251 系 +2252 峡 +2253 蘇 +2254 淘 +2255 负 +2256 s +2257 员 +2258 乎 +2259 邊 +2260 賽 +2261 歐 +2262 纵 +2263 哀 +2264 被 +2265 籍 +2266 肩 +2267 尺 +2268 圓 +2269 旅 +2270 漪 +2271 泗 +2272 莊 +2273 臧 +2274 標 +2275 朔 +2276 搜 +2277 塑 +2278 視 +2279 狱 +2280 铸 +2281 筑 +2282 附 +2283 剂 +2284 筋 +2285 柜 +2286 购 +2287 滚 +2288 驴 +2289 腳 +2290 墓 +2291 盆 +2292 骑 +2293 溜 +2294 垒 +2295 陰 +2296 始 +2297 废 +2298 赢 +2299 隔 +2300 粗 +2301 议 +2302 峪 +2303 蒸 +2304 傷 +2305 芊 +2306 砖 +2307 變 +2308 检 +2309 巾 +2310 充 +2311 免 +2312 版 +2313 拼 +2314 笼 +2315 袖 +2316 滔 +2317 鴻 +2318 貨 +2319 置 +2320 疮 +2321 灌 +2322 槽 +2323 厉 +2324 錦 +2325 瓶 +2326 企 +2327 栖 +2328 吧 +2329 睡 +2330 渭 +2331 梯 +2332 胥 +2333 织 +2334 價 +2335 荟 +2336 坏 +2337 唇 +2338 澈 +2339 臭 +2340 怜 +2341 赌 +2342 玫 +2343 柒 +2344 囊 +2345 慢 +2346 樓 +2347 穷 +2348 養 +2349 扫 +2350 僧 +2351 鸽 +2352 凰 +2353 燃 +2354 溶 +2355 绒 +2356 勿 +2357 亡 +2358 贴 +2359 燈 +2360 詞 +2361 宰 +2362 湯 +2363 鲸 +2364 帛 +2365 漠 +2366 饰 +2367 吻 +2368 條 +2369 惑 +2370 詩 +2371 做 +2372 u +2373 財 +2374 阅 +2375 移 +2376 忧 +2377 诱 +2378 麥 +2379 奚 +2380 串 +2381 級 +2382 奖 +2383 寂 +2384 剪 +2385 盗 +2386 偶 +2387 妈 +2388 驿 +2389 突 +2390 滴 +2391 煊 +2392 昔 +2393 往 +2394 限 +2395 帐 +2396 蛟 +2397 败 +2398 輝 +2399 椿 +2400 殺 +2401 酱 +2402 約 +2403 撞 +2404 痴 +2405 庐 +2406 寰 +2407 陪 +2408 苹 +2409 辽 +2410 霓 +2411 擎 +2412 澤 +2413 俗 +2414 嗣 +2415 拥 +2416 t +2417 碟 +2418 待 +2419 菡 +2420 缸 +2421 傳 +2422 阶 +2423 络 +2424 欠 +2425 兄 +2426 殊 +2427 枭 +2428 遂 +2429 難 +2430 環 +2431 课 +2432 危 +2433 巡 +2434 話 +2435 耘 +2436 樟 +2437 逐 +2438 候 +2439 遊 +2440 爪 +2441 钉 +2442 畫 +2443 當 +2444 疆 +2445 插 +2446 糕 +2447 薪 +2448 阻 +2449 缩 +2450 頂 +2451 割 +2452 袭 +2453 弯 +2454 挑 +2455 铨 +2456 見 +2457 葬 +2458 咒 +2459 倚 +2460 祎 +2461 贷 +2462 輪 +2463 筆 +2464 测 +2465 產 +2466 蜡 +2467 每 +2468 脫 +2469 腔 +2470 仟 +2471 叙 +2472 h +2473 肾 +2474 領 +2475 误 +2476 熠 +2477 邮 +2478 荃 +2479 ē +2480 稅 +2481 径 +2482 扁 +2483 臨 +2484 g +2485 绯 +2486 蓮 +2487 缝 +2488 伪 +2489 悉 +2490 碳 +2491 丫 +2492 魯 +2493 援 +2494 宙 +2495 蚁 +2496 換 +2497 費 +2498 莘 +2499 刊 +2500 區 +2501 疾 +2502 炬 +2503 己 +2504 巩 +2505 祈 +2506 伞 +2507 妥 +2508 孜 +2509 襄 +2510 拖 +2511 呆 +2512 汁 +2513 猿 +2514 疑 +2515 赟 +2516 及 +2517 叉 +2518 缠 +2519 裤 +2520 硫 +2521 翘 +2522 丧 +2523 识 +2524 赐 +2525 頓 +2526 椰 +2527 戶 +2528 x +2529 浙 +2530 笃 +2531 壶 +2532 哉 +2533 饮 +2534 俪 +2535 碑 +2536 倫 +2537 潤 +2538 截 +2539 棍 +2540 规 +2541 餐 +2542 岙 +2543 稿 +2544 绘 +2545 骐 +2546 牢 +2547 累 +2548 葱 +2549 裙 +2550 衫 +2551 侍 +2552 哨 +2553 離 +2554 叹 +2555 祸 +2556 避 +2557 萃 +2558 蒿 +2559 哭 +2560 將 +2561 几 +2562 渐 +2563 决 +2564 供 +2565 斷 +2566 困 +2567 租 +2568 闷 +2569 灼 +2570 氯 +2571 扑 +2572 例 +2573 膠 +2574 間 +2575 橘 +2576 虛 +2577 飯 +2578 尉 +2579 蟲 +2580 赣 +2581 涼 +2582 灾 +2583 質 +2584 犯 +2585 % +2586 導 +2587 節 +2588 轨 +2589 拐 +2590 瀛 +2591 骞 +2592 沅 +2593 妾 +2594 骅 +2595 旁 +2596 觅 +2597 且 +2598 示 +2599 似 +2600 赏 +2601 粟 +2602 復 +2603 哑 +2604 觀 +2605 敢 +2606 只 +2607 烏 +2608 親 +2609 姨 +2610 豬 +2611 著 +2612 選 +2613 浚 +2614 兜 +2615 监 +2616 驾 +2617 并 +2618 蚕 +2619 針 +2620 磷 +2621 扩 +2622 烂 +2623 履 +2624 泼 +2625 闹 +2626 泾 +2627 办 +2628 吞 +2629 蛙 +2630 焊 +2631 坟 +2632 盒 +2633 愈 +2634 y +2635 焚 +2636 抓 +2637 偉 +2638 垚 +2639 烤 +2640 羚 +2641 淋 +2642 披 +2643 阙 +2644 m +2645 罡 +2646 慰 +2647 洼 +2648 髮 +2649 柄 +2650 燒 +2651 荻 +2652 弈 +2653 番 +2654 參 +2655 技 +2656 碱 +2657 捕 +2658 夸 +2659 逼 +2660 漂 +2661 鳞 +2662 慶 +2663 鸾 +2664 裳 +2665 樵 +2666 隊 +2667 懋 +2668 稀 +2669 預 +2670 验 +2671 缓 +2672 旱 +2673 函 +2674 稚 +2675 鲨 +2676 幅 +2677 佘 +2678 資 +2679 返 +2680 划 +2681 專 +2682 沖 +2683 忌 +2684 藩 +2685 璃 +2686 奏 +2687 陇 +2688 腸 +2689 鎮 +2690 廊 +2691 批 +2692 绫 +2693 签 +2694 幺 +2695 忻 +2696 璧 +2697 肽 +2698 涉 +2699 桶 +2700 苔 +2701 搭 +2702 替 +2703 種 +2704 把 +2705 鳳 +2706 減 +2707 苓 +2708 锤 +2709 優 +2710 煙 +2711 即 +2712 舰 +2713 颈 +2714 贱 +2715 钩 +2716 冻 +2717 獨 +2718 銅 +2719 卯 +2720 妞 +2721 碰 +2722 袍 +2723 赶 +2724 填 +2725 霁 +2726 债 +2727 闸 +2728 择 +2729 趙 +2730 胺 +2731 阜 +2732 絕 +2733 刮 +2734 罐 +2735 虐 +2736 扭 +2737 铝 +2738 钙 +2739 聘 +2740 汽 +2741 铅 +2742 牵 +2743 烽 +2744 棣 +2745 葯 +2746 恕 +2747 藝 +2748 售 +2749 極 +2750 壓 +2751 喉 +2752 皂 +2753 触 +2754 異 +2755 彈 +2756 菇 +2757 翅 +2758 垫 +2759 腦 +2760 寸 +2761 珩 +2762 锌 +2763 昏 +2764 膳 +2765 逝 +2766 绅 +2767 损 +2768 現 +2769 l +2770 肺 +2771 畏 +2772 伙 +2773 煦 +2774 挽 +2775 韓 +2776 涤 +2777 v +2778 霏 +2779 恐 +2780 炸 +2781 貓 +2782 鳥 +2783 芋 +2784 笠 +2785 冢 +2786 坂 +2787 叠 +2788 皋 +2789 腐 +2790 桓 +2791 噴 +2792 皆 +2793 蝉 +2794 崩 +2795 鋼 +2796 忙 +2797 疗 +2798 篇 +2799 鄉 +2800 跨 +2801 答 +2802 衛 +2803 涩 +2804 庫 +2805 處 +2806 驼 +2807 硝 +2808 堃 +2809 試 +2810 務 +2811 棕 +2812 孕 +2813 杖 +2814 爹 +2815 劇 +2816 椒 +2817 拙 +2818 兼 +2819 诡 +2820 册 +2821 應 +2822 栏 +2823 仿 +2824 抛 +2825 卒 +2826 访 +2827 枚 +2828 鲤 +2829 f +2830 卵 +2831 孽 +2832 蚀 +2833 认 +2834 歪 +2835 厦 +2836 钛 +2837 挖 +2838 哇 +2839 熏 +2840 涯 +2841 悍 +2842 咬 +2843 曉 +2844 竺 +2845 厝 +2846 說 +2847 鲲 +2848 遮 +2849 榮 +2850 弋 +2851 跟 +2852 臂 +2853 貴 +2854 禮 +2855 創 +2856 骄 +2857 讲 +2858 距 +2859 硅 +2860 灣 +2861 恆 +2862 權 +2863 臺 +2864 览 +2865 贫 +2866 圃 +2867 孑 +2868 磐 +2869 澎 +2870 醫 +2871 陸 +2872 刷 +2873 笋 +2874 属 +2875 贪 +2876 町 +2877 堰 +2878 闭 +2879 彰 +2880 账 +2881 已 +2882 評 +2883 侬 +2884 農 +2885 覆 +2886 拨 +2887 炒 +2888 洙 +2889 臉 +2890 媒 +2891 爬 +2892 捞 +2893 嫩 +2894 肚 +2895 鏡 +2896 驱 +2897 伸 +2898 甚 +2899 掛 +2900 垣 +2901 况 +2902 滞 +2903 匯 +2904 催 +2905 傑 +2906 ū +2907 總 +2908 桔 +2909 猜 +2910 炽 +2911 職 +2912 冒 +2913 莽 +2914 聽 +2915 骚 +2916 洒 +2917 曜 +2918 衰 +2919 绕 +2920 暄 +2921 诉 +2922 授 +2923 奢 +2924 題 +2925 晃 +2926 眸 +2927 踢 +2928 妄 +2929 護 +2930 簡 +2931 丈 +2932 灶 +2933 诊 +2934 罩 +2935 醋 +2936 桩 +2937 崗 +2938 绞 +2939 沧 +2940 裁 +2941 拆 +2942 镁 +2943 犁 +2944 判 +2945 尕 +2946 氢 +2947 鸠 +2948 劝 +2949 竖 +2950 飚 +2951 最 +2952 蹄 +2953 羡 +2954 陷 +2955 缨 +2956 旷 +2957 页 +2958 翌 +2959 烛 +2960 筝 +2961 毁 +2962 戀 +2963 荀 +2964 陂 +2965 貼 +2966 鶴 +2967 讀 +2968 輕 +2969 档 +2970 抚 +2971 副 +2972 订 +2973 槍 +2974 凹 +2975 編 +2976 稼 +2977 拱 +2978 雏 +2979 碼 +2980 桌 +2981 霉 +2982 睦 +2983 骊 +2984 摸 +2985 證 +2986 茄 +2987 絮 +2988 匪 +2989 豚 +2990 酥 +2991 團 +2992 厅 +2993 获 +2994 鸦 +2995 押 +2996 沿 +2997 逗 +2998 愉 +2999 椅 +3000 卦 +3001 鞍 +3002 笨 +3003 寫 +3004 純 +3005 緣 +3006 竟 +3007 組 +3008 抄 +3009 滇 +3010 粪 +3011 鍋 +3012 淦 +3013 佬 +3014 泣 +3015 弼 +3016 俠 +3017 旸 +3018 浑 +3019 绥 +3020 设 +3021 薯 +3022 梧 +3023 亢 +3024 幹 +3025 症 +3026 舫 +3027 煮 +3028 咔 +3029 軟 +3030 賢 +3031 賣 +3032 狀 +3033 癌 +3034 氨 +3035 靠 +3036 細 +3037 揭 +3038 构 +3039 彧 +3040 帘 +3041 卤 +3042 秒 +3043 镭 +3044 潼 +3045 k +3046 韧 +3047 栩 +3048 熔 +3049 坞 +3050 污 +3051 遵 +3052 製 +3053 孫 +3054 羲 +3055 忽 +3056 勐 +3057 營 +3058 纷 +3059 殘 +3060 脊 +3061 寡 +3062 洵 +3063 仆 +3064 劈 +3065 辩 +3066 鐘 +3067 缤 +3068 禽 +3069 甬 +3070 勺 +3071 佃 +3072 茸 +3073 蛾 +3074 谁 +3075 虽 +3076 痰 +3077 凸 +3078 酮 +3079 腕 +3080 宵 +3081 穹 +3082 惡 +3083 計 +3084 r +3085 钓 +3086 抵 +3087 给 +3088 晕 +3089 課 +3090 許 +3091 員 +3092 综 +3093 茉 +3094 亂 +3095 啟 +3096 問 +3097 捐 +3098 烦 +3099 脆 +3100 備 +3101 棱 +3102 埋 +3103 泷 +3104 洽 +3105 珞 +3106 婦 +3107 羞 +3108 确 +3109 隨 +3110 犀 +3111 蚊 +3112 毫 +3113 謝 +3114 糊 +3115 颠 +3116 喵 +3117 胞 +3118 邸 +3119 軒 +3120 測 +3121 份 +3122 斧 +3123 弧 +3124 矛 +3125 冕 +3126 琉 +3127 狸 +3128 扒 +3129 甩 +3130 肆 +3131 柚 +3132 屎 +3133 庶 +3134 蓋 +3135 額 +3136 否 +3137 擊 +3138 鴨 +3139 旨 +3140 峙 +3141 騰 +3142 購 +3143 歸 +3144 遁 +3145 檢 +3146 缔 +3147 矮 +3148 煎 +3149 紋 +3150 浸 +3151 梗 +3152 瑰 +3153 闺 +3154 挡 +3155 砍 +3156 筹 +3157 涟 +3158 宥 +3159 纺 +3160 贸 +3161 聊 +3162 缅 +3163 沣 +3164 芃 +3165 銷 +3166 潞 +3167 溥 +3168 虱 +3169 矢 +3170 梳 +3171 输 +3172 晁 +3173 穎 +3174 獸 +3175 呂 +3176 飒 +3177 頻 +3178 析 +3179 帖 +3180 懷 +3181 旬 +3182 裡 +3183 焉 +3184 漁 +3185 層 +3186 个 +3187 跌 +3188 粘 +3189 役 +3190 揚 +3191 鵬 +3192 鳌 +3193 驻 +3194 罚 +3195 晞 +3196 乖 +3197 搏 +3198 岔 +3199 氮 +3200 琢 +3201 粹 +3202 碘 +3203 抹 +3204 骗 +3205 湄 +3206 玟 +3207 鸢 +3208 沸 +3209 誓 +3210 歡 +3211 削 +3212 臀 +3213 铠 +3214 滾 +3215 憨 +3216 框 +3217 耗 +3218 摘 +3219 责 +3220 障 +3221 赠 +3222 遺 +3223 瑄 +3224 搖 +3225 鷹 +3226 踪 +3227 歷 +3228 嶺 +3229 葳 +3230 瑤 +3231 倉 +3232 潔 +3233 拒 +3234 統 +3235 据 +3236 衬 +3237 麓 +3238 啦 +3239 怕 +3240 魄 +3241 窃 +3242 侵 +3243 為 +3244 薩 +3245 璨 +3246 署 +3247 蒼 +3248 叁 +3249 炭 +3250 類 +3251 炀 +3252 讨 +3253 聆 +3254 蝇 +3255 冤 +3256 轰 +3257 裔 +3258 粥 +3259 涨 +3260 沂 +3261 沼 +3262 決 +3263 悔 +3264 壽 +3265 夙 +3266 荼 +3267 ī +3268 按 +3269 担 +3270 堪 +3271 卑 +3272 尋 +3273 苯 +3274 垢 +3275 忱 +3276 濠 +3277 貌 +3278 骂 +3279 澍 +3280 靡 +3281 谜 +3282 館 +3283 璜 +3284 隱 +3285 拴 +3286 瞬 +3287 扰 +3288 违 +3289 铿 +3290 聿 +3291 瞻 +3292 犹 +3293 箫 +3294 酉 +3295 很 +3296 勞 +3297 岡 +3298 燮 +3299 蔺 +3300 薰 +3301 缚 +3302 锭 +3303 楓 +3304 绩 +3305 督 +3306 芥 +3307 茧 +3308 緊 +3309 坠 +3310 辜 +3311 辈 +3312 惨 +3313 搬 +3314 翀 +3315 幣 +3316 镐 +3317 涓 +3318 敛 +3319 锚 +3320 錯 +3321 凭 +3322 埔 +3323 劣 +3324 吏 +3325 糜 +3326 浊 +3327 術 +3328 積 +3329 却 +3330 刹 +3331 蒜 +3332 溯 +3333 餅 +3334 瞎 +3335 锴 +3336 钜 +3337 籽 +3338 掩 +3339 孩 +3340 簽 +3341 驚 +3342 肿 +3343 邝 +3344 谟 +3345 ě +3346 億 +3347 患 +3348 終 +3349 襟 +3350 跪 +3351 獅 +3352 没 +3353 浣 +3354 渚 +3355 痞 +3356 脾 +3357 滤 +3358 凄 +3359 歧 +3360 鎖 +3361 柠 +3362 態 +3363 擒 +3364 泄 +3365 皙 +3366 晒 +3367 陕 +3368 柿 +3369 锟 +3370 膝 +3371 握 +3372 濕 +3373 循 +3374 淹 +3375 敷 +3376 樣 +3377 規 +3378 挚 +3379 址 +3380 論 +3381 株 +3382 仗 +3383 稱 +3384 還 +3385 氟 +3386 辟 +3387 谛 +3388 谌 +3389 譜 +3390 锥 +3391 亏 +3392 阀 +3393 锯 +3394 蛊 +3395 撤 +3396 扯 +3397 钞 +3398 獎 +3399 錄 +3400 銘 +3401 茫 +3402 崧 +3403 侣 +3404 乞 +3405 欺 +3406 瘤 +3407 篮 +3408 泠 +3409 阚 +3410 濑 +3411 钳 +3412 荊 +3413 咲 +3414 蝎 +3415 卸 +3416 耍 +3417 摄 +3418 惹 +3419 壬 +3420 辱 +3421 柑 +3422 顽 +3423 铉 +3424 祚 +3425 複 +3426 挥 +3427 蛤 +3428 沾 +3429 脏 +3430 找 +3431 圍 +3432 促 +3433 賓 +3434 朮 +3435 挤 +3436 郊 +3437 既 +3438 舅 +3439 給 +3440 咕 +3441 骋 +3442 夾 +3443 鄭 +3444 鈴 +3445 浒 +3446 酶 +3447 屁 +3448 茲 +3449 迫 +3450 焯 +3451 晰 +3452 戲 +3453 驗 +3454 舸 +3455 驭 +3456 肢 +3457 罢 +3458 嫡 +3459 栈 +3460 箐 +3461 这 +3462 銮 +3463 認 +3464 鬥 +3465 縮 +3466 愤 +3467 郜 +3468 仝 +3469 递 +3470 勢 +3471 ō +3472 贰 +3473 粵 +3474 痘 +3475 姦 +3476 缴 +3477 揽 +3478 恪 +3479 舵 +3480 艷 +3481 葡 +3482 鋒 +3483 叛 +3484 産 +3485 窩 +3486 嵌 +3487 敲 +3488 蓄 +3489 泻 +3490 畜 +3491 抒 +3492 韻 +3493 項 +3494 摊 +3495 疃 +3496 の +3497 烯 +3498 吓 +3499 戊 +3500 腺 +3501 褲 +3502 監 +3503 谣 +3504 廠 +3505 迭 +3506 鄢 +3507 谏 +3508 載 +3509 拂 +3510 茎 +3511 俱 +3512 斤 +3513 紀 +3514 颤 +3515 尝 +3516 沥 +3517 習 +3518 淞 +3519 昧 +3520 逍 +3521 嗨 +3522 榴 +3523 臥 +3524 嬌 +3525 側 +3526 券 +3527 渗 +3528 雜 +3529 閃 +3530 盜 +3531 艇 +3532 喬 +3533 详 +3534 秃 +3535 採 +3536 汛 +3537 呀 +3538 厌 +3539 喊 +3540 訂 +3541 訊 +3542 燊 +3543 栅 +3544 誠 +3545 夭 +3546 皱 +3547 蛛 +3548 矣 +3549 鳴 +3550 攸 +3551 麵 +3552 冼 +3553 儀 +3554 晉 +3555 濤 +3556 莓 +3557 齊 +3558 晦 +3559 竣 +3560 抖 +3561 w +3562 キ +3563 墻 +3564 媽 +3565 敗 +3566 淺 +3567 礁 +3568 荐 +3569 估 +3570 驳 +3571 舱 +3572 绰 +3573 宦 +3574 泵 +3575 寮 +3576 雌 +3577 脐 +3578 舊 +3579 續 +3580 弩 +3581 羌 +3582 拌 +3583 瓣 +3584 戟 +3585 髓 +3586 暑 +3587 婶 +3588 撕 +3589 豁 +3590 竿 +3591 隙 +3592 谓 +3593 铖 +3594 旌 +3595 蝦 +3596 秧 +3597 或 +3598 颢 +3599 兑 +3600 厥 +3601 鳄 +3602 暂 +3603 汾 +3604 钝 +3605 杠 +3606 買 +3607 苒 +3608 牆 +3609 炊 +3610 糠 +3611 矾 +3612 懂 +3613 侗 +3614 剛 +3615 壇 +3616 帳 +3617 櫃 +3618 毀 +3619 湧 +3620 捉 +3621 練 +3622 窖 +3623 緑 +3624 沽 +3625 馋 +3626 斥 +3627 郵 +3628 喇 +3629 垛 +3630 概 +3631 们 +3632 岂 +3633 腎 +3634 銳 +3635 岷 +3636 烙 +3637 掠 +3638 浜 +3639 泸 +3640 醬 +3641 沱 +3642 蔷 +3643 皎 +3644 榛 +3645 檐 +3646 閣 +3647 抬 +3648 顏 +3649 橡 +3650 镛 +3651 塊 +3652 盡 +3653 壯 +3654 靴 +3655 亥 +3656 酚 +3657 窄 +3658 肛 +3659 亘 +3660 糟 +3661 烘 +3662 貂 +3663 講 +3664 狠 +3665 窥 +3666 賭 +3667 賀 +3668 莞 +3669 箕 +3670 爺 +3671 喘 +3672 但 +3673 咖 +3674 織 +3675 い +3676 彿 +3677 唤 +3678 蕉 +3679 僵 +3680 熬 +3681 妓 +3682 踩 +3683 铲 +3684 匙 +3685 撑 +3686 弛 +3687 耻 +3688 丢 +3689 堵 +3690 膽 +3691 厘 +3692 辨 +3693 瓢 +3694 崴 +3695 篱 +3696 碾 +3697 畔 +3698 涝 +3699 膚 +3700 绛 +3701 黏 +3702 屑 +3703 衝 +3704 簧 +3705 杞 +3706 轲 +3707 贲 +3708 溝 +3709 烷 +3710 霧 +3711 塵 +3712 瘾 +3713 颉 +3714 凿 +3715 彝 +3716 诛 +3717 訪 +3718 鮮 +3719 覺 +3720 歲 +3721 窟 +3722 週 +3723 苞 +3724 濟 +3725 叟 +3726 爭 +3727 椎 +3728 療 +3729 眾 +3730 審 +3731 拋 +3732 棘 +3733 诀 +3734 鹃 +3735 倦 +3736 擦 +3737 暢 +3738 酬 +3739 蠢 +3740 聞 +3741 囧 +3742 從 +3743 脈 +3744 缆 +3745 陋 +3746 哪 +3747 酿 +3748 娆 +3749 屍 +3750 檬 +3751 捧 +3752 凛 +3753 靶 +3754 疣 +3755 餘 +3756 鹊 +3757 陣 +3758 昙 +3759 栎 +3760 鳖 +3761 镶 +3762 飄 +3763 烫 +3764 芜 +3765 垦 +3766 癣 +3767 蟾 +3768 萤 +3769 寓 +3770 診 +3771 蚌 +3772 霈 +3773 诈 +3774 負 +3775 吼 +3776 疹 +3777 縫 +3778 則 +3779 鹽 +3780 啊 +3781 捣 +3782 勘 +3783 俯 +3784 陡 +3785 叮 +3786 $ +3787 饱 +3788 寬 +3789 帥 +3790 漿 +3791 掘 +3792 棺 +3793 汞 +3794 钵 +3795 こ +3796 绸 +3797 括 +3798 濂 +3799 壞 +3800 躲 +3801 拦 +3802 錫 +3803 拟 +3804 钠 +3805 嘛 +3806 趋 +3807 遣 +3808 谐 +3809 墟 +3810 喧 +3811 榭 +3812 閉 +3813 筛 +3814 j +3815 渴 +3816 峨 +3817 嬰 +3818 巳 +3819 梢 +3820 漱 +3821 疤 +3822 祉 +3823 矽 +3824 痒 +3825 咽 +3826 邀 +3827 缀 +3828 庇 +3829 虔 +3830 盏 +3831 羿 +3832 抑 +3833 叨 +3834 弑 +3835 唛 +3836 侑 +3837 賊 +3838 稽 +3839 黨 +3840 妝 +3841 谍 +3842 蓁 +3843 ま +3844 蕃 +3845 藜 +3846 赘 +3847 诞 +3848 眷 +3849 够 +3850 岫 +3851 釣 +3852 喃 +3853 樑 +3854 钮 +3855 鋪 +3856 牡 +3857 溴 +3858 缕 +3859 溺 +3860 溟 +3861 描 +3862 渺 +3863 藕 +3864 胚 +3865 刨 +3866 獵 +3867 琬 +3868 寝 +3869 稷 +3870 缎 +3871 锈 +3872 需 +3873 遍 +3874 醛 +3875 戬 +3876 噬 +3877 闰 +3878 蔣 +3879 協 +3880 響 +3881 顯 +3882 飾 +3883 厢 +3884 钗 +3885 毯 +3886 询 +3887 簪 +3888 堅 +3889 鼬 +3890 貢 +3891 遭 +3892 肘 +3893 燥 +3894 砸 +3895 趾 +3896 豔 +3897 蟒 +3898 淨 +3899 廟 +3900 唑 +3901 z +3902 诠 +3903 垭 +3904 龜 +3905 剥 +3906 辦 +3907 翱 +3908 挨 +3909 峽 +3910 紗 +3911 拘 +3912 绢 +3913 畴 +3914 蔼 +3915 隶 +3916 溃 +3917 濃 +3918 碌 +3919 宓 +3920 趴 +3921 浔 +3922 搞 +3923 挪 +3924 楞 +3925 邈 +3926 虑 +3927 捌 +3928 舉 +3929 嫔 +3930 漓 +3931 捻 +3932 逵 +3933 呢 +3934 砾 +3935 谬 +3936 琥 +3937 撮 +3938 準 +3939 嗜 +3940 它 +3941 議 +3942 於 +3943 執 +3944 顔 +3945 匣 +3946 焘 +3947 狭 +3948 涡 +3949 衔 +3950 靚 +3951 祠 +3952 雉 +3953 疼 +3954 镖 +3955 嚣 +3956 骸 +3957 ん +3958 証 +3959 恢 +3960 凑 +3961 丐 +3962 貞 +3963 蛹 +3964 呵 +3965 昼 +3966 蛉 +3967 翳 +3968 匀 +3969 侦 +3970 設 +3971 轧 +3972 損 +3973 盧 +3974 叩 +3975 這 +3976 跡 +3977 谕 +3978 迴 +3979 鳗 +3980 炕 +3981 珮 +3982 カ +3983 咀 +3984 搅 +3985 矫 +3986 矩 +3987 箍 +3988 渤 +3989 狩 +3990 苛 +3991 劼 +3992 濡 +3993 慌 +3994 勁 +3995 腫 +3996 般 +3997 酌 +3998 徕 +3999 廓 +4000 燎 +4001 颇 +4002 樽 +4003 槎 +4004 鑽 +4005 摔 +4006 诵 +4007 槿 +4008 琐 +4009 塌 +4010 锻 +4011 願 +4012 顧 +4013 萎 +4014 は +4015 膛 +4016 祛 +4017 檔 +4018 蠡 +4019 觸 +4020 虬 +4021 談 +4022 喝 +4023 娱 +4024 噪 +4025 胀 +4026 褐 +4027 疫 +4028 札 +4029 昉 +4030 呱 +4031 禪 +4032 債 +4033 屬 +4034 佶 +4035 垠 +4036 貿 +4037 葭 +4038 齡 +4039 萦 +4040 蕤 +4041 燚 +4042 # +4043 劑 +4044 彥 +4045 棗 +4046 紐 +4047 浇 +4048 汲 +4049 臼 +4050 咎 +4051 絨 +4052 裹 +4053 茬 +4054 厕 +4055 傾 +4056 釋 +4057 秽 +4058 颅 +4059 蹦 +4060 么 +4061 嘟 +4062 锣 +4063 腻 +4064 寐 +4065 妲 +4066 湃 +4067 醜 +4068 另 +4069 泮 +4070 幂 +4071 獄 +4072 滅 +4073 玳 +4074 氰 +4075 鞘 +4076 峭 +4077 鹂 +4078 嗅 +4079 ら +4080 瑙 +4081 咳 +4082 蝗 +4083 瓯 +4084 猷 +4085 樾 +4086 赎 +4087 她 +4088 朕 +4089 淀 +4090 頁 +4091 飙 +4092 羁 +4093 镒 +4094 喂 +4095 袜 +4096 钺 +4097 扉 +4098 曆 +4099 櫻 +4100 曳 +4101 辕 +4102 帧 +4103 誤 +4104 哄 +4105 漳 +4106 亓 +4107 隅 +4108 訴 +4109 螨 +4110 艮 +4111 識 +4112 適 +4113 诏 +4114 饵 +4115 俨 +4116 郦 +4117 坳 +4118 鵝 +4119 礦 +4120 褒 +4121 犇 +4122 隘 +4123 咯 +4124 赴 +4125 競 +4126 個 +4127 劃 +4128 殼 +4129 睛 +4130 究 +4131 兢 +4132 緩 +4133 纠 +4134 惧 +4135 践 +4136 躬 +4137 惯 +4138 稠 +4139 惩 +4140 秤 +4141 嚴 +4142 茁 +4143 濮 +4144 亩 +4145 憬 +4146 撩 +4147 赔 +4148 渎 +4149 镀 +4150 汴 +4151 婢 +4152 菩 +4153 鍾 +4154 锰 +4155 挠 +4156 泱 +4157 毗 +4158 丅 +4159 琮 +4160 痧 +4161 痣 +4162 堕 +4163 鄙 +4164 搓 +4165 な +4166 蕭 +4167 赦 +4168 耆 +4169 稍 +4170 險 +4171 胭 +4172 沢 +4173 婬 +4174 畈 +4175 炖 +4176 毋 +4177 蜗 +4178 煲 +4179 铧 +4180 並 +4181 廚 +4182 佈 +4183 衙 +4184 荧 +4185 钥 +4186 黯 +4187 雳 +4188 吨 +4189 铬 +4190 請 +4191 鎏 +4192 釉 +4193 栽 +4194 騎 +4195 磚 +4196 廢 +4197 郢 +4198 偃 +4199 賞 +4200 奪 +4201 鬓 +4202 鳍 +4203 乏 +4204 蹲 +4205 盯 +4206 ー +4207 く +4208 し +4209 ア +4210 寵 +4211 悶 +4212 構 +4213 煉 +4214 粿 +4215 絶 +4216 诫 +4217 狙 +4218 钾 +4219 敵 +4220 偿 +4221 锄 +4222 姫 +4223 幡 +4224 戳 +4225 澹 +4226 坯 +4227 濯 +4228 骈 +4229 嬉 +4230 砌 +4231 囡 +4232 峦 +4233 漕 +4234 闾 +4235 镍 +4236 罰 +4237 肋 +4238 遐 +4239 荤 +4240 窍 +4241 绾 +4242 怯 +4243 携 +4244 鹄 +4245 戌 +4246 凳 +4247 蕩 +4248 揉 +4249 柘 +4250 冗 +4251 須 +4252 蔽 +4253 焜 +4254 驯 +4255 騙 +4256 騷 +4257 恳 +4258 凈 +4259 籁 +4260 註 +4261 傣 +4262 凍 +4263 霭 +4264 爸 +4265 謀 +4266 酯 +4267 渍 +4268 駿 +4269 绎 +4270 粲 +4271 衷 +4272 葫 +4273 鬆 +4274 況 +4275 掃 +4276 撸 +4277 呗 +4278 碩 +4279 诘 +4280 贊 +4281 坨 +4282 芩 +4283 垌 +4284 茱 +4285 塚 +4286 洱 +4287 齒 +4288 嫚 +4289 篆 +4290 瑯 +4291 贩 +4292 き +4293 啓 +4294 墊 +4295 潛 +4296 瀾 +4297 饥 +4298 笺 +4299 轿 +4300 糞 +4301 範 +4302 嘲 +4303 啶 +4304 繼 +4305 捆 +4306 拢 +4307 脓 +4308 渥 +4309 谅 +4310 迩 +4311 烹 +4312 瀑 +4313 姥 +4314 缦 +4315 蛆 +4316 毙 +4317 腥 +4318 痨 +4319 喪 +4320 に +4321 壤 +4322 饲 +4323 胄 +4324 淚 +4325 濱 +4326 矶 +4327 汰 +4328 ノ +4329 飲 +4330 媳 +4331 磬 +4332 砺 +4333 啼 +4334 瘟 +4335 扈 +4336 祀 +4337 頸 +4338 蘆 +4339 钨 +4340 馳 +4341 佣 +4342 鬧 +4343 舂 +4344 翩 +4345 蝠 +4346 挣 +4347 誘 +4348 蛰 +4349 佚 +4350 辙 +4351 邁 +4352 塗 +4353 賬 +4354 塬 +4355 埭 +4356 诰 +4357 圻 +4358 拗 +4359 耽 +4360 祿 +4361 璠 +4362 瓊 +4363 珣 +4364 た +4365 儲 +4366 棄 +4367 辑 +4368 灸 +4369 狡 +4370 綿 +4371 歼 +4372 糧 +4373 癸 +4374 撫 +4375 帷 +4376 镰 +4377 俩 +4378 垄 +4379 募 +4380 嗔 +4381 滥 +4382 鏈 +4383 僻 +4384 馍 +4385 娼 +4386 撇 +4387 崽 +4388 蚂 +4389 酪 +4390 怿 +4391 愫 +4392 廈 +4393 琏 +4394 械 +4395 些 +4396 恤 +4397 疝 +4398 榄 +4399 琚 +4400 り +4401 リ +4402 妒 +4403 杲 +4404 楣 +4405 槌 +4406 槟 +4407 孺 +4408 桧 +4409 桀 +4410 牲 +4411 戍 +4412 幫 +4413 旎 +4414 铣 +4415 躺 +4416 剃 +4417 锵 +4418 呜 +4419 嫌 +4420 剔 +4421 駕 +4422 谎 +4423 绚 +4424 眩 +4425 阉 +4426 駐 +4427 討 +4428 驅 +4429 腋 +4430 痹 +4431 冊 +4432 饿 +4433 磅 +4434 乍 +4435 毡 +4436 盔 +4437 簇 +4438 殖 +4439 説 +4440 篁 +4441 襲 +4442 攒 +4443 鮑 +4444 哆 +4445 遲 +4446 遷 +4447 禀 +4448 賴 +4449 邰 +4450 軌 +4451 奂 +4452 倌 +4453 荞 +4454 苡 +4455 苷 +4456 圳 +4457 莜 +4458 荪 +4459 菀 +4460 軸 +4461 羹 +4462 爐 +4463 確 +4464 讓 +4465 癬 +4466 獲 +4467 籃 +4468 垟 +4469 奮 +4470 擺 +4471 暈 +4472 瀬 +4473 蓟 +4474 溅 +4475 疥 +4476 届 +4477 綱 +4478 烬 +4479 嵐 +4480 雇 +4481 蹭 +4482 俺 +4483 敞 +4484 砲 +4485 涣 +4486 阑 +4487 聶 +4488 蹇 +4489 糯 +4490 災 +4491 淬 +4492 骡 +4493 吗 +4494 疲 +4495 錶 +4496 狎 +4497 漩 +4498 泫 +4499 泯 +4500 擂 +4501 鹫 +4502 枳 +4503 剩 +4504 韫 +4505 攘 +4506 怂 +4507 镕 +4508 讼 +4509 牝 +4510 譯 +4511 膘 +4512 惶 +4513 铵 +4514 钿 +4515 頔 +4516 硐 +4517 涎 +4518 驮 +4519 裆 +4520 褶 +4521 捍 +4522 绑 +4523 痈 +4524 訓 +4525 膀 +4526 懸 +4527 鴿 +4528 兀 +4529 貪 +4530 壕 +4531 隼 +4532 澡 +4533 躁 +4534 秩 +4535 蚝 +4536 哼 +4537 淤 +4538 盂 +4539 叽 +4540 違 +4541 遙 +4542 欄 +4543 诃 +4544 郗 +4545 劭 +4546 偌 +4547 倬 +4548 阡 +4549 苕 +4550 谒 +4551 莒 +4552 埕 +4553 輸 +4554 葩 +4555 蕨 +4556 爛 +4557 爲 +4558 燦 +4559 拽 +4560 讚 +4561 悼 +4562 籠 +4563 サ +4564 佔 +4565 搶 +4566 曌 +4567 紡 +4568 拷 +4569 緹 +4570 嚼 +4571 藉 +4572 韭 +4573 饺 +4574 綫 +4575 哺 +4576 脖 +4577 吵 +4578 め +4579 ち +4580 痢 +4581 嗟 +4582 馈 +4583 庾 +4584 獾 +4585 獐 +4586 鈺 +4587 蹬 +4588 磕 +4589 愣 +4590 脹 +4591 僚 +4592 噜 +4593 匿 +4594 婊 +4595 啤 +4596 尻 +4597 驷 +4598 骧 +4599 繪 +4600 嗪 +4601 赓 +4602 滟 +4603 鋁 +4604 扮 +4605 纾 +4606 撬 +4607 馃 +4608 朽 +4609 瘘 +4610 嗓 +4611 瑕 +4612 啡 +4613 と +4614 麝 +4615 删 +4616 汕 +4617 胧 +4618 際 +4619 轼 +4620 掰 +4621 讽 +4622 頌 +4623 瘫 +4624 镝 +4625 颓 +4626 涕 +4627 舷 +4628 慾 +4629 憂 +4630 癖 +4631 酣 +4632 鸳 +4633 歹 +4634 翡 +4635 帜 +4636 箴 +4637 箬 +4638 骤 +4639 痔 +4640 姻 +4641 舆 +4642 赃 +4643 嘿 +4644 觞 +4645 遼 +4646 唔 +4647 唧 +4648 桿 +4649 孃 +4650 倭 +4651 偕 +4652 芪 +4653 躍 +4654 縱 +4655 癡 +4656 萘 +4657 堇 +4658 輔 +4659 攝 +4660 據 +4661 忿 +4662 蓼 +4663 辭 +4664 碍 +4665 慷 +4666 か +4667 あ +4668 弊 +4669 啞 +4670 彎 +4671 灘 +4672 煩 +4673 缉 +4674 徑 +4675 綺 +4676 荚 +4677 竭 +4678 簿 +4679 倡 +4680 趁 +4681 釜 +4682 绷 +4683 む +4684 鄧 +4685 モ +4686 垮 +4687 宕 +4688 澧 +4689 撲 +4690 鋆 +4691 洄 +4692 蘑 +4693 樸 +4694 惘 +4695 该 +4696 戮 +4697 榔 +4698 滦 +4699 ゆ +4700 滄 +4701 娑 +4702 闳 +4703 嫖 +4704 篷 +4705 捏 +4706 湟 +4707 恼 +4708 阖 +4709 螟 +4710 膺 +4711 沦 +4712 泌 +4713 帼 +4714 玑 +4715 啃 +4716 鹦 +4717 鹞 +4718 婿 +4719 搁 +4720 惰 +4721 瑗 +4722 筷 +4723 ナ +4724 る +4725 嘶 +4726 枧 +4727 杵 +4728 肴 +4729 芍 +4730 暧 +4731 朦 +4732 绊 +4733 枉 +4734 挫 +4735 奠 +4736 桅 +4737 潍 +4738 辖 +4739 暇 +4740 戾 +4741 龛 +4742 锷 +4743 嘻 +4744 q +4745 矜 +4746 焙 +4747 瑚 +4748 夯 +4749 ン +4750 蟠 +4751 覽 +4752 凋 +4753 酰 +4754 斬 +4755 貫 +4756 胰 +4757 陨 +4758 炙 +4759 謎 +4760 誌 +4761 鯨 +4762 鲈 +4763 匾 +4764 鳅 +4765 拯 +4766 僑 +4767 哒 +4768 恥 +4769 璘 +4770 谧 +4771 讷 +4772 佼 +4773 佗 +4774 畸 +4775 篡 +4776 窜 +4777 涇 +4778 芘 +4779 弁 +4780 壑 +4781 谯 +4782 茭 +4783 冽 +4784 賈 +4785 菽 +4786 燙 +4787 础 +4788 揣 +4789 鬃 +4790 赚 +4791 怠 +4792 筏 +4793 犊 +4794 畢 +4795 タ +4796 弢 +4797 彌 +4798 沒 +4799 瀨 +4800 綏 +4801 窘 +4802 悸 +4803 綾 +4804 枷 +4805 捡 +4806 颊 +4807 疽 +4808 沮 +4809 辊 +4810 箔 +4811 コ +4812 幔 +4813 チ +4814 粱 +4815 鄰 +4816 愧 +4817 扳 +4818 も +4819 鈣 +4820 靛 +4821 鍍 +4822 柵 +4823 艦 +4824 讳 +4825 涞 +4826 浏 +4827 恽 +4828 棵 +4829 峤 +4830 啪 +4831 虏 +4832 嗒 +4833 徵 +4834 硼 +4835 湫 +4836 怅 +4837 嫒 +4838 畦 +4839 鍵 +4840 蔑 +4841 翹 +4842 逯 +4843 渲 +4844 繳 +4845 鈞 +4846 眀 +4847 绶 +4848 钎 +4849 缙 +4850 琊 +4851 呛 +4852 禿 +4853 廳 +4854 懶 +4855 楔 +4856 疳 +4857 蠻 +4858 ラ +4859 咨 +4860 璎 +4861 擅 +4862 鑑 +4863 炅 +4864 腌 +4865 祟 +4866 薑 +4867 轸 +4868 暾 +4869 腮 +4870 玦 +4871 獻 +4872 ろ +4873 ロ +4874 傢 +4875 憩 +4876 吠 +4877 睢 +4878 偽 +4879 憋 +4880 蠟 +4881 钼 +4882 捂 +4883 倘 +4884 韋 +4885 掏 +4886 瓮 +4887 镯 +4888 睇 +4889 烃 +4890 慘 +4891 癞 +4892 癫 +4893 殉 +4894 谚 +4895 骇 +4896 颌 +4897 颍 +4898 饕 +4899 耙 +4900 ひ +4901 酩 +4902 榨 +4903 辐 +4904 刈 +4905 責 +4906 逾 +4907 绽 +4908 蒯 +4909 蚤 +4910 鲫 +4911 麸 +4912 迂 +4913 鲷 +4914 臆 +4915 贮 +4916 佞 +4917 瑀 +4918 痳 +4919 係 +4920 吡 +4921 咩 +4922 呷 +4923 啉 +4924 擴 +4925 擔 +4926 衮 +4927 僖 +4928 嬴 +4929 趕 +4930 踫 +4931 鹵 +4932 邺 +4933 癢 +4934 輩 +4935 莳 +4936 萼 +4937 蘅 +4938 鳝 +4939 鳐 +4940 撰 +4941 瑩 +4942 瘋 +4943 慨 +4944 績 +4945 珅 +4946 哗 +4947 え +4948 シ +4949 墜 +4950 幾 +4951 憶 +4952 擾 +4953 煥 +4954 紛 +4955 桨 +4956 絡 +4957 仅 +4958 ス +4959 褂 +4960 阐 +4961 洺 +4962 橱 +4963 洩 +4964 贬 +4965 釘 +4966 呕 +4967 疟 +4968 や +4969 洮 +4970 っ +4971 氓 +4972 殴 +4973 迤 +4974 ユ +4975 て +4976 偲 +4977 掐 +4978 繩 +4979 臟 +4980 膨 +4981 漉 +4982 暹 +4983 鉻 +4984 妩 +4985 鉛 +4986 珥 +4987 邕 +4988 胁 +4989 楸 +4990 瓒 +4991 叭 +4992 戛 +4993 驶 +4994 炔 +4995 階 +4996 鑒 +4997 缮 +4998 腓 +4999 耸 +5000 腚 +5001 閘 +5002 桉 +5003 恃 +5004 楹 +5005 橹 +5006 蓑 +5007 栀 +5008 侶 +5009 籌 +5010 ね +5011 斓 +5012 畲 +5013 顫 +5014 铳 +5015 砥 +5016 蜕 +5017 锶 +5018 祜 +5019 铛 +5020 唾 +5021 嵇 +5022 袂 +5023 佯 +5024 殃 +5025 婳 +5026 扼 +5027 昨 +5028 赭 +5029 詠 +5030 侄 +5031 踝 +5032 傍 +5033 禺 +5034 貧 +5035 缶 +5036 霾 +5037 邯 +5038 蜚 +5039 翥 +5040 掷 +5041 罔 +5042 蝽 +5043 襪 +5044 怎 +5045 諸 +5046 斛 +5047 誼 +5048 鲛 +5049 媞 +5050 漲 +5051 吖 +5052 叱 +5053 譚 +5054 譽 +5055 漸 +5056 鸮 +5057 郅 +5058 芗 +5059 贏 +5060 貸 +5061 亵 +5062 俎 +5063 剎 +5064 俘 +5065 篙 +5066 気 +5067 荭 +5068 莪 +5069 萸 +5070 蒽 +5071 マ +5072 夼 +5073 藓 +5074 牽 +5075 鱗 +5076 繆 +5077 钒 +5078 珐 +5079 穩 +5080 脯 +5081 珪 +5082 さ +5083 じ +5084 け +5085 エ +5086 ク +5087 彊 +5088 挌 +5089 暉 +5090 棟 +5091 踞 +5092 艰 +5093 缄 +5094 酵 +5095 较 +5096 糾 +5097 糙 +5098 お +5099 メ +5100 釀 +5101 喔 +5102 啾 +5103 篓 +5104 掳 +5105 拧 +5106 哦 +5107 氫 +5108 つ +5109 摹 +5110 悖 +5111 嗝 +5112 沔 +5113 與 +5114 眯 +5115 衢 +5116 娉 +5117 剖 +5118 嫦 +5119 嬷 +5120 湮 +5121 繫 +5122 舖 +5123 鈔 +5124 醚 +5125 庖 +5126 馒 +5127 潋 +5128 逻 +5129 聋 +5130 纖 +5131 潺 +5132 遛 +5133 滲 +5134 绉 +5135 绀 +5136 磺 +5137 菓 +5138 顷 +5139 玠 +5140 淒 +5141 挟 +5142 痫 +5143 鹬 +5144 鹳 +5145 閱 +5146 偵 +5147 胯 +5148 璀 +5149 娶 +5150 甑 +5151 辘 +5152 魇 +5153 ル +5154 嶋 +5155 榻 +5156 杈 +5157 昵 +5158 黍 +5159 塍 +5160 丟 +5161 恣 +5162 れ +5163 袒 +5164 挞 +5165 锂 +5166 旖 +5167 铄 +5168 掀 +5169 砦 +5170 舔 +5171 燧 +5172 稔 +5173 漬 +5174 蜒 +5175 裾 +5176 瀘 +5177 暫 +5178 嚎 +5179 蚧 +5180 匆 +5181 掖 +5182 铱 +5183 詢 +5184 擋 +5185 燉 +5186 壺 +5187 販 +5188 爻 +5189 蜥 +5190 翦 +5191 仄 +5192 螂 +5193 砧 +5194 厮 +5195 粑 +5196 匝 +5197 吁 +5198 豎 +5199 蝴 +5200 蛀 +5201 剌 +5202 歳 +5203 遜 +5204 咚 +5205 渦 +5206 讴 +5207 谤 +5208 抠 +5209 僮 +5210 俑 +5211 廂 +5212 撥 +5213 芨 +5214 诩 +5215 芫 +5216 巽 +5217 苣 +5218 茴 +5219 荏 +5220 苴 +5221 賤 +5222 鹹 +5223 祕 +5224 逮 +5225 薏 +5226 矗 +5227 ǐ +5228 禍 +5229 瘡 +5230 緻 +5231 涪 +5232 唬 +5233 イ +5234 钡 +5235 雹 +5236 們 +5237 兇 +5238 兌 +5239 勛 +5240 剝 +5241 揮 +5242 擼 +5243 敘 +5244 殤 +5245 灑 +5246 烜 +5247 揪 +5248 綜 +5249 拣 +5250 絞 +5251 柬 +5252 秸 +5253 緒 +5254 埂 +5255 逛 +5256 逞 +5257 滁 +5258 麽 +5259 揍 +5260 岘 +5261 袄 +5262 坷 +5263 繞 +5264 瞒 +5265 聰 +5266 髋 +5267 屌 +5268 颁 +5269 啄 +5270 傘 +5271 疵 +5272 嬅 +5273 崂 +5274 徙 +5275 呐 +5276 噻 +5277 彗 +5278 闱 +5279 寥 +5280 嚓 +5281 潢 +5282 瞄 +5283 婺 +5284 骜 +5285 骠 +5286 纨 +5287 鈎 +5288 嵬 +5289 阆 +5290 庠 +5291 悯 +5292 剁 +5293 瞧 +5294 缜 +5295 酋 +5296 癲 +5297 叼 +5298 バ +5299 疸 +5300 楝 +5301 闊 +5302 搔 +5303 瑷 +5304 ト +5305 戗 +5306 陝 +5307 娛 +5308 柺 +5309 蔥 +5310 爰 +5311 獒 +5312 蠕 +5313 杳 +5314 脲 +5315 閑 +5316 孰 +5317 薊 +5318 橄 +5319 褥 +5320 胪 +5321 腱 +5322 仍 +5323 膈 +5324 赊 +5325 竑 +5326 刪 +5327 孖 +5328 擁 +5329 坍 +5330 壩 +5331 捨 +5332 锉 +5333 跋 +5334 ハ +5335 熄 +5336 沓 +5337 湍 +5338 惕 +5339 焖 +5340 钏 +5341 钴 +5342 馅 +5343 発 +5344 凪 +5345 曬 +5346 癜 +5347 耦 +5348 窈 +5349 奄 +5350 簾 +5351 蠓 +5352 螭 +5353 臾 +5354 吱 +5355 鯊 +5356 氛 +5357 咋 +5358 徹 +5359 噩 +5360 乜 +5361 孬 +5362 揖 +5363 鼐 +5364 醪 +5365 撼 +5366 蚰 +5367 蛎 +5368 鲟 +5369 帚 +5370 蔗 +5371 厍 +5372 鬱 +5373 诣 +5374 羯 +5375 蜓 +5376 盅 +5377 誕 +5378 蜻 +5379 剡 +5380 簌 +5381 筵 +5382 酊 +5383 怔 +5384 贿 +5385 み +5386 忒 +5387 叻 +5388 吒 +5389 撷 +5390 遞 +5391 廁 +5392 俚 +5393 贇 +5394 勖 +5395 夔 +5396 苋 +5397 诤 +5398 塾 +5399 賠 +5400 谲 +5401 淵 +5402 鼾 +5403 莼 +5404 輯 +5405 菰 +5406 滯 +5407 薮 +5408 揆 +5409 辯 +5410 髯 +5411 瑠 +5412 皑 +5413 盎 +5414 哎 +5415 祷 +5416 ウ +5417 償 +5418 厭 +5419 嘆 +5420 嚇 +5421 嬿 +5422 嶽 +5423 憑 +5424 憲 +5425 攤 +5426 桜 +5427 檯 +5428 渾 +5429 湉 +5430 澀 +5431 綉 +5432 綸 +5433 緯 +5434 疚 +5435 倔 +5436 笹 +5437 硃 +5438 瀉 +5439 妨 +5440 ム +5441 栢 +5442 猥 +5443 膩 +5444 悌 +5445 鉆 +5446 悚 +5447 屆 +5448 铆 +5449 崮 +5450 嗦 +5451 箩 +5452 屡 +5453 饷 +5454 涿 +5455 娲 +5456 娓 +5457 娈 +5458 姊 +5459 撈 +5460 拈 +5461 鎂 +5462 讫 +5463 録 +5464 嵊 +5465 猶 +5466 吝 +5467 霹 +5468 溱 +5469 羨 +5470 琵 +5471 恂 +5472 琤 +5473 疊 +5474 凜 +5475 堑 +5476 珲 +5477 甦 +5478 梆 +5479 筐 +5480 穰 +5481 瓠 +5482 饒 +5483 鸪 +5484 疱 +5485 鹉 +5486 猩 +5487 痂 +5488 嘘 +5489 瘀 +5490 閨 +5491 閩 +5492 惦 +5493 侩 +5494 敕 +5495 桠 +5496 赉 +5497 伺 +5498 殓 +5499 犟 +5500 唆 +5501 雛 +5502 淄 +5503 勍 +5504 レ +5505 飕 +5506 獭 +5507 蘿 +5508 讹 +5509 ワ +5510 飨 +5511 頑 +5512 趟 +5513 侮 +5514 蝕 +5515 惋 +5516 碛 +5517 熵 +5518 钤 +5519 硒 +5520 飏 +5521 蟬 +5522 睑 +5523 稞 +5524 盞 +5525 擬 +5526 勸 +5527 擇 +5528 駝 +5529 窠 +5530 耒 +5531 裱 +5532 ず +5533 憾 +5534 曈 +5535 蜃 +5536 ヒ +5537 簸 +5538 憎 +5539 鰲 +5540 敝 +5541 謂 +5542 柞 +5543 醴 +5544 蠹 +5545 蚶 +5546 翕 +5547 雎 +5548 雒 +5549 跖 +5550 啬 +5551 誦 +5552 铀 +5553 蜷 +5554 蹊 +5555 蹼 +5556 誇 +5557 蜢 +5558 跷 +5559 謙 +5560 咱 +5561 伫 +5562 ミ +5563 呓 +5564 诒 +5565 倏 +5566 鄱 +5567 倜 +5568 芾 +5569 茆 +5570 阪 +5571 谄 +5572 谙 +5573 芡 +5574 隗 +5575 芎 +5576 茯 +5577 荇 +5578 濾 +5579 龐 +5580 菘 +5581 菟 +5582 齋 +5583 蕲 +5584 掬 +5585 扪 +5586 轟 +5587 燭 +5588 捶 +5589 幢 +5590 ǎ +5591 鳕 +5592 皺 +5593 縛 +5594 扛 +5595 穂 +5596 ゴ +5597 セ +5598 ギ +5599 噹 +5600 墳 +5601 奬 +5602 姍 +5603 嫄 +5604 慮 +5605 様 +5606 灝 +5607 槛 +5608 伎 +5609 綁 +5610 澗 +5611 痉 +5612 剿 +5613 撅 +5614 緋 +5615 睫 +5616 筍 +5617 舶 +5618 菠 +5619 矇 +5620 怖 +5621 猖 +5622 ǔ +5623 郴 +5624 椽 +5625 オ +5626 暘 +5627 獣 +5628 羔 +5629 庒 +5630 掂 +5631 鉀 +5632 灞 +5633 鍛 +5634 颗 +5635 麂 +5636 浯 +5637 鋅 +5638 鋸 +5639 寞 +5640 併 +5641 銜 +5642 峒 +5643 喙 +5644 嗯 +5645 忏 +5646 滏 +5647 繡 +5648 沌 +5649 臘 +5650 沭 +5651 阈 +5652 姒 +5653 苺 +5654 滂 +5655 淙 +5656 汩 +5657 媾 +5658 艶 +5659 嫱 +5660 莆 +5661 曝 +5662 錐 +5663 撂 +5664 逄 +5665 逑 +5666 馏 +5667 囿 +5668 嘀 +5669 弭 +5670 啮 +5671 皿 +5672 泺 +5673 纏 +5674 噗 +5675 歉 +5676 玎 +5677 悄 +5678 珙 +5679 缬 +5680 缭 +5681 擠 +5682 愷 +5683 恍 +5684 鸩 +5685 餌 +5686 鹑 +5687 蠶 +5688 疖 +5689 瘕 +5690 榈 +5691 椤 +5692 闇 +5693 辫 +5694 瑭 +5695 氪 +5696 榫 +5697 昴 +5698 昝 +5699 拭 +5700 殒 +5701 腈 +5702 枞 +5703 枋 +5704 隧 +5705 腩 +5706 妊 +5707 蓆 +5708 楮 +5709 枸 +5710 辇 +5711 臊 +5712 窮 +5713 琯 +5714 禛 +5715 恙 +5716 ネ +5717 捅 +5718 飓 +5719 眺 +5720 虧 +5721 勵 +5722 顛 +5723 螞 +5724 飽 +5725 幌 +5726 蟻 +5727 搪 +5728 砣 +5729 镫 +5730 晤 +5731 蘊 +5732 萄 +5733 蘋 +5734 碣 +5735 頤 +5736 诬 +5737 镗 +5738 梟 +5739 瘿 +5740 蚜 +5741 衲 +5742 聃 +5743 馮 +5744 駒 +5745 颀 +5746 蟆 +5747 螽 +5748 螈 +5749 哟 +5750 堯 +5751 滘 +5752 颞 +5753 颚 +5754 颛 +5755 衄 +5756 徳 +5757 炘 +5758 該 +5759 詳 +5760 囍 +5761 孵 +5762 鯉 +5763 諜 +5764 亟 +5765 蛄 +5766 蚺 +5767 袅 +5768 衾 +5769 踵 +5770 斟 +5771 孛 +5772 箧 +5773 羟 +5774 笏 +5775 蛏 +5776 跛 +5777 鴉 +5778 蛭 +5779 鱿 +5780 蹴 +5781 仵 +5782 暨 +5783 蜈 +5784 酐 +5785 鲑 +5786 髒 +5787 篩 +5788 觚 +5789 鯛 +5790 瀝 +5791 摺 +5792 哝 +5793 呦 +5794 喏 +5795 哌 +5796 咻 +5797 瀟 +5798 髻 +5799 俣 +5800 賺 +5801 贈 +5802 滬 +5803 郄 +5804 蹤 +5805 墉 +5806 俟 +5807 傩 +5808 偎 +5809 凼 +5810 荜 +5811 陟 +5812 贛 +5813 隍 +5814 邛 +5815 垡 +5816 荠 +5817 摧 +5818 萁 +5819 莨 +5820 蒌 +5821 嶼 +5822 稗 +5823 掇 +5824 蕈 +5825 鳢 +5826 鞣 +5827 鞅 +5828 瑋 +5829 竊 +5830 籤 +5831 蛔 +5832 猾 +5833 粄 +5834 が +5835 ジ +5836 * +5837 伕 +5838 厠 +5839 嘯 +5840 姮 +5841 廬 +5842 搾 +5843 潑 +5844 讥 +5845 絳 +5846 喚 +5847 铰 +5848 硷 +5849 絢 +5850 す +5851 搀 +5852 掺 +5853 硯 +5854 毆 +5855 濁 +5856 峄 +5857 幛 +5858 哩 +5859 喋 +5860 啵 +5861 婪 +5862 烩 +5863 猝 +5864 迸 +5865 ヤ +5866 洹 +5867 鋭 +5868 撃 +5869 拇 +5870 膿 +5871 臍 +5872 鉤 +5873 悻 +5874 嗑 +5875 嗖 +5876 喑 +5877 饬 +5878 琶 +5879 懵 +5880 噫 +5881 忡 +5882 怵 +5883 孀 +5884 姘 +5885 潦 +5886 怆 +5887 砰 +5888 蔫 +5889 藐 +5890 乒 +5891 嫫 +5892 骓 +5893 孢 +5894 纡 +5895 孪 +5896 沆 +5897 泔 +5898 錘 +5899 怏 +5900 庹 +5901 抡 +5902 銹 +5903 巅 +5904 恸 +5905 遒 +5906 遨 +5907 狞 +5908 淏 +5909 癱 +5910 绡 +5911 纭 +5912 扦 +5913 玢 +5914 缢 +5915 缥 +5916 珧 +5917 躯 +5918 畿 +5919 鸫 +5920 鸱 +5921 鸨 +5922 樞 +5923 懈 +5924 衅 +5925 鹗 +5926 惺 +5927 餓 +5928 蠱 +5929 痱 +5930 匈 +5931 榉 +5932 楦 +5933 ど +5934 よ +5935 龋 +5936 戢 +5937 笆 +5938 雫 +5939 隴 +5940 擘 +5941 杓 +5942 牍 +5943 嚷 +5944 樯 +5945 砷 +5946 轭 +5947 栉 +5948 觑 +5949 闖 +5950 柩 +5951 腭 +5952 捎 +5953 樨 +5954 枰 +5955 鑄 +5956 閥 +5957 滷 +5958 焗 +5959 嘔 +5960 蛻 +5961 胳 +5962 勳 +5963 歙 +5964 蘚 +5965 瞰 +5966 螢 +5967 わ +5968 蠔 +5969 斫 +5970 砭 +5971 旃 +5972 钇 +5973 褪 +5974 烊 +5975 淌 +5976 铍 +5977 铐 +5978 鸵 +5979 熨 +5980 铤 +5981 铢 +5982 镔 +5983 顆 +5984 癒 +5985 僱 +5986 媗 +5987 琇 +5988 嘗 +5989 竦 +5990 癀 +5991 秆 +5992 衿 +5993 竜 +5994 螃 +5995 蟮 +5996 罂 +5997 螳 +5998 傭 +5999 夠 +6000 蝼 +6001 驕 +6002 噎 +6003 ぴ +6004 侖 +6005 訾 +6006 嬛 +6007 謠 +6008 蜘 +6009 酢 +6010 趸 +6011 醍 +6012 フ +6013 汎 +6014 匕 +6015 氐 +6016 蚓 +6017 蚬 +6018 鲢 +6019 諧 +6020 蚴 +6021 訣 +6022 綦 +6023 謊 +6024 鳩 +6025 驢 +6026 蛳 +6027 窒 +6028 瘴 +6029 笳 +6030 鲵 +6031 嘱 +6032 貘 +6033 睾 +6034 佤 +6035 詐 +6036 篾 +6037 蛸 +6038 貔 +6039 簋 +6040 窺 +6041 卻 +6042 唏 +6043 咧 +6044 慣 +6045 歎 +6046 烔 +6047 鷺 +6048 べ +6049 贅 +6050 刍 +6051 蹟 +6052 黒 +6053 艽 +6054 堀 +6055 鷄 +6056 垅 +6057 勰 +6058 坭 +6059 谔 +6060 凫 +6061 賜 +6062 谠 +6063 俸 +6064 垓 +6065 黴 +6066 邴 +6067 圪 +6068 賦 +6069 荥 +6070 剋 +6071 僕 +6072 陛 +6073 較 +6074 莅 +6075 荨 +6076 茛 +6077 菖 +6078 轄 +6079 薹 +6080 捺 +6081 骰 +6082 掸 +6083 禎 +6084 々 +6085 腑 +6086 竅 +6087 玙 +6088 玕 +6089 ご +6090 う +6091 せ +6092 ぎ +6093 グ +6094 倖 +6095 厲 +6096 唸 +6097 姪 +6098 姉 +6099 寢 +6100 崟 +6101 悽 +6102 柊 +6103 棧 +6104 殯 +6105 湊 +6106 湜 +6107 潰 +6108 骷 +6109 紳 +6110 ソ +6111 粳 +6112 紹 +6113 綢 +6114 綴 +6115 叢 +6116 洸 +6117 膊 +6118 惭 +6119 豺 +6120 姵 +6121 躇 +6122 癮 +6123 溼 +6124 岬 +6125 釆 +6126 鬣 +6127 啜 +6128 喱 +6129 喽 +6130 だ +6131 ダ +6132 麾 +6133 猗 +6134 搂 +6135 艙 +6136 悒 +6137 愕 +6138 懊 +6139 睹 +6140 脣 +6141 慵 +6142 悴 +6143 懦 +6144 涑 +6145 晾 +6146 噌 +6147 噤 +6148 忖 +6149 饴 +6150 馀 +6151 饽 +6152 遽 +6153 邃 +6154 迥 +6155 淅 +6156 闩 +6157 肅 +6158 嘈 +6159 鎬 +6160 苾 +6161 嗳 +6162 迳 +6163 汨 +6164 闼 +6165 媪 +6166 粕 +6167 骝 +6168 嬲 +6169 孳 +6170 辔 +6171 挛 +6172 狹 +6173 逖 +6174 阊 +6175 嶂 +6176 帏 +6177 釵 +6178 罵 +6179 鄒 +6180 嘹 +6181 恻 +6182 阗 +6183 醃 +6184 沚 +6185 诅 +6186 佺 +6187 曠 +6188 绗 +6189 绂 +6190 谴 +6191 菈 +6192 缌 +6193 缗 +6194 缑 +6195 缟 +6196 鏢 +6197 荳 +6198 嬸 +6199 衹 +6200 衆 +6201 衎 +6202 鸷 +6203 痿 +6204 戦 +6205 椋 +6206 瞪 +6207 ド +6208 蒨 +6209 煽 +6210 苫 +6211 啥 +6212 鑲 +6213 吶 +6214 瓤 +6215 榷 +6216 戡 +6217 闌 +6218 隸 +6219 氙 +6220 ニ +6221 吮 +6222 藴 +6223 榧 +6224 虢 +6225 椴 +6226 瘸 +6227 慑 +6228 栲 +6229 肓 +6230 隻 +6231 蔆 +6232 殡 +6233 槲 +6234 晌 +6235 轱 +6236 桁 +6237 杼 +6238 蔵 +6239 觐 +6240 扔 +6241 桷 +6242 牯 +6243 牒 +6244 胗 +6245 艘 +6246 蔬 +6247 鳃 +6248 棂 +6249 闢 +6250 棹 +6251 贽 +6252 膻 +6253 瀏 +6254 僅 +6255 昳 +6256 漣 +6257 婭 +6258 愆 +6259 恚 +6260 虓 +6261 黝 +6262 铟 +6263 蝸 +6264 黠 +6265 秣 +6266 飼 +6267 餃 +6268 罹 +6269 磴 +6270 砻 +6271 锑 +6272 頰 +6273 锢 +6274 礴 +6275 頒 +6276 煨 +6277 绦 +6278 焓 +6279 頜 +6280 砗 +6281 碓 +6282 眦 +6283 碇 +6284 迢 +6285 镉 +6286 秭 +6287 镞 +6288 誊 +6289 钯 +6290 睨 +6291 欽 +6292 鱧 +6293 礙 +6294 玪 +6295 瘪 +6296 餮 +6297 衽 +6298 唁 +6299 衩 +6300 袢 +6301 耜 +6302 鸯 +6303 疡 +6304 馭 +6305 峯 +6306 ズ +6307 氦 +6308 び +6309 踊 +6310 虻 +6311 颦 +6312 颏 +6313 颔 +6314 滓 +6315 遏 +6316 濒 +6317 ピ +6318 鲎 +6319 龈 +6320 霎 +6321 醌 +6322 誡 +6323 伧 +6324 馗 +6325 廿 +6326 蚣 +6327 蹙 +6328 虺 +6329 笈 +6330 蜞 +6331 裟 +6332 剽 +6333 蚱 +6334 築 +6335 褻 +6336 蛐 +6337 鲳 +6338 鲂 +6339 菏 +6340 糸 +6341 羧 +6342 仉 +6343 笪 +6344 繇 +6345 靥 +6346 赳 +6347 鲅 +6348 粼 +6349 糁 +6350 粽 +6351 鲶 +6352 稣 +6353 伢 +6354 踹 +6355 鰐 +6356 蝙 +6357 螯 +6358 糍 +6359 佧 +6360 鰻 +6361 淩 +6362 濺 +6363 弒 +6364 楽 +6365 嬤 +6366 呔 +6367 卟 +6368 擢 +6369 哏 +6370 哧 +6371 呤 +6372 咄 +6373 咛 +6374 璽 +6375 盪 +6376 囤 +6377 讪 +6378 诳 +6379 诜 +6380 岿 +6381 鵡 +6382 俦 +6383 鹼 +6384 麩 +6385 踐 +6386 郇 +6387 埙 +6388 郯 +6389 ボ +6390 茏 +6391 艿 +6392 俳 +6393 阱 +6394 侏 +6395 俾 +6396 茚 +6397 茕 +6398 偈 +6399 苌 +6400 荑 +6401 貶 +6402 脔 +6403 壅 +6404 鷓 +6405 谶 +6406 鷗 +6407 邳 +6408 羸 +6409 垸 +6410 苜 +6411 鸚 +6412 佥 +6413 荦 +6414 苻 +6415 搗 +6416 鱔 +6417 穀 +6418 龑 +6419 荽 +6420 輿 +6421 葶 +6422 蓍 +6423 蓦 +6424 菅 +6425 蓥 +6426 憐 +6427 婠 +6428 蘖 +6429 轎 +6430 抻 +6431 掾 +6432 捋 +6433 辻 +6434 鱷 +6435 鱘 +6436 谆 +6437 瑢 +6438 蹈 +6439 祤 +6440 瘉 +6441 縷 +6442 繃 +6443 窅 +6444 竇 +6445 玘 +6446 玗 +6447 粧 +6448 秾 +6449 ┳ +6450 畝 +6451 ざ +6452 ザ +6453 ケ +6454 摈 +6455 伝 +6456 嚒 +6457 墮 +6458 妺 +6459 幀 +6460 廯 +6461 彙 +6462 摯 +6463 枊 +6464 櫥 +6465 檸 +6466 汙 +6467 潯 +6468 煒 +6469 煖 +6470 そ +6471 骶 +6472 緝 +6473 締 +6474 緬 +6475 紺 +6476 厩 +6477 経 +6478 鞑 +6479 げ +6480 澆 +6481 谗 +6482 掣 +6483 踌 +6484 侈 +6485 烝 +6486 尓 +6487 曖 +6488 翛 +6489 圜 +6490 嘧 +6491 喟 +6492 喹 +6493 嗷 +6494 唰 +6495 垃 +6496 纜 +6497 诲 +6498 樺 +6499 甯 +6500 泞 +6501 т +6502 婁 +6503 浍 +6504 浠 +6505 浈 +6506 淖 +6507 с +6508 愦 +6509 惆 +6510 憧 +6511 惴 +6512 悭 +6513 銑 +6514 髅 +6515 聾 +6516 沤 +6517 憔 +6518 涔 +6519 洳 +6520 溧 +6521 汜 +6522 汊 +6523 崆 +6524 溏 +6525 譬 +6526 彘 +6527 逅 +6528 氖 +6529 渌 +6530 驸 +6531 驽 +6532 沏 +6533 骀 +6534 骟 +6535 嬗 +6536 苪 +6537 尜 +6538 纣 +6539 鉬 +6540 鈍 +6541 犸 +6542 嗤 +6543 囔 +6544 囝 +6545 馔 +6546 逋 +6547 屐 +6548 孱 +6549 銲 +6550 涮 +6551 鉑 +6552 溲 +6553 狍 +6554 嘤 +6555 庥 +6556 砒 +6557 潴 +6558 湔 +6559 抿 +6560 阏 +6561 罷 +6562 狷 +6563 帑 +6564 恹 +6565 妁 +6566 潸 +6567 澌 +6568 馁 +6569 錠 +6570 鄖 +6571 鋤 +6572 徂 +6573 窿 +6574 廪 +6575 妣 +6576 奀 +6577 岀 +6578 绺 +6579 髑 +6580 で +6581 缃 +6582 顼 +6583 莖 +6584 泅 +6585 荘 +6586 莙 +6587 鸶 +6588 皈 +6589 鸲 +6590 喰 +6591 疴 +6592 鹈 +6593 痤 +6594 鹧 +6595 瘊 +6596 汹 +6597 疔 +6598 饃 +6599 濫 +6600 榀 +6601 楂 +6602 楫 +6603 閲 +6604 閻 +6605 磋 +6606 杌 +6607 璁 +6608 瑁 +6609 冴 +6610 掙 +6611 氷 +6612 辍 +6613 闿 +6614 蕪 +6615 纂 +6616 毽 +6617 氅 +6618 氘 +6619 槁 +6620 枘 +6621 薬 +6622 肼 +6623 桄 +6624 鐲 +6625 薈 +6626 栊 +6627 墒 +6628 觇 +6629 闕 +6630 觎 +6631 薔 +6632 橐 +6633 暝 +6634 胍 +6635 嗽 +6636 胫 +6637 辂 +6638 犍 +6639 挈 +6640 膣 +6641 檩 +6642 噁 +6643 濬 +6644 唄 +6645 琲 +6646 痠 +6647 歩 +6648 黜 +6649 悫 +6650 碜 +6651 矸 +6652 欖 +6653 殳 +6654 旄 +6655 欹 +6656 毂 +6657 诽 +6658 兲 +6659 虜 +6660 咁 +6661 瞽 +6662 睽 +6663 撐 +6664 澪 +6665 碉 +6666 囷 +6667 飢 +6668 锘 +6669 蠅 +6670 蠍 +6671 磲 +6672 顱 +6673 屉 +6674 紊 +6675 韌 +6676 眄 +6677 盹 +6678 镬 +6679 镂 +6680 颙 +6681 煅 +6682 斡 +6683 钫 +6684 秕 +6685 秫 +6686 哮 +6687 睐 +6688 钲 +6689 睚 +6690 瀕 +6691 駛 +6692 ぱ +6693 駁 +6694 駄 +6695 嘜 +6696 満 +6697 蟥 +6698 簟 +6699 吭 +6700 吩 +6701 雩 +6702 霰 +6703 鰱 +6704 ぶ +6705 ブ +6706 謹 +6707 戸 +6708 醮 +6709 醅 +6710 蹩 +6711 ふ +6712 澱 +6713 铡 +6714 諒 +6715 卅 +6716 囟 +6717 貍 +6718 鼋 +6719 鼍 +6720 罄 +6721 舐 +6722 蝈 +6723 鲣 +6724 鬲 +6725 乩 +6726 笄 +6727 蜱 +6728 翮 +6729 郧 +6730 笕 +6731 蜩 +6732 蛩 +6733 鲩 +6734 錾 +6735 蹶 +6736 騁 +6737 箜 +6738 鲮 +6739 跆 +6740 仨 +6741 赝 +6742 豊 +6743 匮 +6744 涸 +6745 笥 +6746 粢 +6747 赧 +6748 瞩 +6749 跤 +6750 睁 +6751 伉 +6752 襯 +6753 诌 +6754 筚 +6755 筌 +6756 騏 +6757 豉 +6758 糗 +6759 剀 +6760 瀞 +6761 嘢 +6762 呋 +6763 邏 +6764 吲 +6765 咂 +6766 唠 +6767 吆 +6768 哔 +6769 啕 +6770 甌 +6771 讦 +6772 诟 +6773 殆 +6774 鼯 +6775 侪 +6776 ほ +6777 郓 +6778 诶 +6779 谀 +6780 倮 +6781 黉 +6782 黙 +6783 坻 +6784 兖 +6785 莛 +6786 苄 +6787 貳 +6788 贖 +6789 陬 +6790 谖 +6791 偻 +6792 兕 +6793 傥 +6794 畚 +6795 鶯 +6796 隈 +6797 谥 +6798 谪 +6799 鵲 +6800 僭 +6801 赑 +6802 谮 +6803 嘩 +6804 畑 +6805 攜 +6806 癥 +6807 価 +6808 莠 +6809 荩 +6810 萜 +6811 軼 +6812 媄 +6813 薨 +6814 薤 +6815 蕖 +6816 藁 +6817 迖 +6818 藿 +6819 蘼 +6820 奁 +6821 揄 +6822 尬 +6823 拶 +6824 燴 +6825 狛 +6826 磡 +6827 磧 +6828 磯 +6829 ǒ +6830 鳔 +6831 鳟 +6832 鳏 +6833 鳎 +6834 鲀 +6835 鲹 +6836 瑣 +6837 唉 +6838 皜 +6839 皞 +6840 惮 +6841 郸 +6842 祘 +6843 揩 +6844 繚 +6845 袱 +6846 珝 +6847 珰 +6848 禱 +6849 畬 +6850 ぐ +6851 睏 +6852 乚 +6853 倓 +6854 倞 +6855 僞 +6856 儷 +6857 儘 +6858 啫 +6859 嚮 +6860 噯 +6861 埇 +6862 埗 +6863 垵 +6864 塢 +6865 奭 +6866 妠 +6867 帰 +6868 恵 +6869 憤 +6870 挾 +6871 摳 +6872 攪 +6873 暦 +6874 暐 +6875 柈 +6876 枂 +6877 棲 +6878 棨 +6879 樁 +6880 槓 +6881 檳 +6882 毐 +6883 洑 +6884 湲 +6885 潁 +6886 瀆 +6887 讣 +6888 ゼ +6889 嫉 +6890 絵 +6891 饯 +6892 ゾ +6893 壘 +6894 絃 +6895 抉 +6896 絆 +6897 嫻 +6898 箏 +6899 箓 +6900 剐 +6901 徊 +6902 槻 +6903 碴 +6904 搽 +6905 诧 +6906 伋 +6907 矯 +6908 矻 +6909 瞅 +6910 堿 +6911 啰 +6912 瘁 +6913 岽 +6914 嶙 +6915 岌 +6916 岜 +6917 釗 +6918 啻 +6919 嗫 +6920 啷 +6921 斃 +6922 猬 +6923 夥 +6924 舛 +6925 猊 +6926 鈦 +6927 髀 +6928 跺 +6929 涘 +6930 遄 +6931 澶 +6932 浥 +6933 炁 +6934 浞 +6935 銨 +6936 洧 +6937 髂 +6938 ツ +6939 臑 +6940 泖 +6941 慊 +6942 ッ +6943 娒 +6944 辆 +6945 嗉 +6946 谰 +6947 峁 +6948 夤 +6949 岵 +6950 獠 +6951 獬 +6952 愠 +6953 崃 +6954 拎 +6955 崤 +6956 忉 +6957 怃 +6958 遴 +6959 迓 +6960 娩 +6961 羈 +6962 嗲 +6963 馇 +6964 囵 +6965 庑 +6966 漯 +6967 麈 +6968 挎 +6969 屾 +6970 涙 +6971 妫 +6972 髌 +6973 テ +6974 徠 +6975 芣 +6976 茀 +6977 鄀 +6978 嚅 +6979 忤 +6980 潆 +6981 瞥 +6982 艱 +6983 嫘 +6984 骘 +6985 苨 +6986 翯 +6987 犴 +6988 馓 +6989 怙 +6990 逦 +6991 沩 +6992 囫 +6993 怦 +6994 羼 +6995 嵋 +6996 嵴 +6997 繭 +6998 囹 +6999 圉 +7000 鈕 +7001 怛 +7002 乓 +7003 咆 +7004 阒 +7005 鉗 +7006 徇 +7007 鉚 +7008 嶷 +7009 豳 +7010 咙 +7011 您 +7012 彷 +7013 妪 +7014 漭 +7015 噢 +7016 戕 +7017 鉅 +7018 鰾 +7019 旵 +7020 麋 +7021 绱 +7022 纰 +7023 鐐 +7024 莀 +7025 菂 +7026 橇 +7027 锹 +7028 缡 +7029 鏘 +7030 鏜 +7031 鏽 +7032 甾 +7033 哾 +7034 昫 +7035 饑 +7036 ば +7037 鸺 +7038 鹁 +7039 鹌 +7040 鹩 +7041 鹨 +7042 餡 +7043 疠 +7044 橢 +7045 鏖 +7046 撻 +7047 閪 +7048 榘 +7049 椹 +7050 魃 +7051 囪 +7052 鑵 +7053 鑼 +7054 锜 +7055 溉 +7056 痊 +7057 颧 +7058 葷 +7059 応 +7060 旮 +7061 辚 +7062 陞 +7063 蕗 +7064 忞 +7065 膑 +7066 胱 +7067 氚 +7068 氲 +7069 牖 +7070 霂 +7071 霑 +7072 魉 +7073 瓴 +7074 殁 +7075 赡 +7076 桎 +7077 赈 +7078 肱 +7079 脘 +7080 槠 +7081 肫 +7082 閏 +7083 菴 +7084 桤 +7085 枨 +7086 槭 +7087 樗 +7088 桕 +7089 觌 +7090 腴 +7091 樘 +7092 雑 +7093 闘 +7094 隠 +7095 雖 +7096 萵 +7097 蕁 +7098 橛 +7099 轵 +7100 栌 +7101 纫 +7102 桴 +7103 桫 +7104 柝 +7105 朐 +7106 薙 +7107 橼 +7108 甥 +7109 辄 +7110 脍 +7111 蕎 +7112 甪 +7113 単 +7114 実 +7115 昰 +7116 窯 +7117 旼 +7118 沨 +7119 岺 +7120 濰 +7121 塱 +7122 汭 +7123 疙 +7124 婍 +7125 戆 +7126 怼 +7127 砜 +7128 砀 +7129 頃 +7130 魍 +7131 懲 +7132 戩 +7133 撿 +7134 齑 +7135 熳 +7136 鞏 +7137 囂 +7138 虤 +7139 滎 +7140 瞑 +7141 钭 +7142 畎 +7143 畋 +7144 顎 +7145 魑 +7146 戯 +7147 铯 +7148 铫 +7149 檄 +7150 蠄 +7151 旒 +7152 锛 +7153 砟 +7154 酞 +7155 炝 +7156 炻 +7157 钹 +7158 罾 +7159 盥 +7160 铼 +7161 锪 +7162 戽 +7163 嚏 +7164 硇 +7165 黻 +7166 黼 +7167 铊 +7168 铌 +7169 镪 +7170 锸 +7171 頗 +7172 盱 +7173 铑 +7174 钕 +7175 镱 +7176 飆 +7177 腆 +7178 祢 +7179 祧 +7180 詈 +7181 铗 +7182 镏 +7183 颯 +7184 蝨 +7185 禳 +7186 钶 +7187 淆 +7188 牺 +7189 恓 +7190 玨 +7191 鱈 +7192 攏 +7193 嘚 +7194 黢 +7195 я +7196 褡 +7197 窨 +7198 窕 +7199 駱 +7200 囱 +7201 襦 +7202 裥 +7203 讶 +7204 耋 +7205 耵 +7206 裨 +7207 聒 +7208 褙 +7209 褓 +7210 馴 +7211 慜 +7212 浐 +7213 蟀 +7214 髙 +7215 ビ +7216 売 +7217 を +7218 勻 +7219 蚩 +7220 蚨 +7221 驍 +7222 舀 +7223 覓 +7224 黥 +7225 籀 +7226 臬 +7227 魟 +7228 詭 +7229 岞 +7230 霪 +7231 隹 +7232 龇 +7233 髦 +7234 е +7235 愔 +7236 懼 +7237 謡 +7238 貅 +7239 醺 +7240 酴 +7241 髡 +7242 崭 +7243 鴣 +7244 鴦 +7245 へ +7246 ヘ +7247 踅 +7248 蠊 +7249 龉 +7250 醭 +7251 驛 +7252 筲 +7253 襞 +7254 鯽 +7255 踽 +7256 锗 +7257 跄 +7258 蹉 +7259 芈 +7260 蹑 +7261 跗 +7262 跚 +7263 麴 +7264 鮀 +7265 誅 +7266 仞 +7267 驟 +7268 鬍 +7269 鲭 +7270 蹰 +7271 跎 +7272 仃 +7273 蝾 +7274 酝 +7275 読 +7276 跸 +7277 叵 +7278 驤 +7279 髄 +7280 貉 +7281 跹 +7282 蠛 +7283 篝 +7284 篪 +7285 筘 +7286 蝮 +7287 蛴 +7288 蝤 +7289 蜇 +7290 龊 +7291 鲋 +7292 鲽 +7293 鮫 +7294 蘸 +7295 跻 +7296 豸 +7297 踉 +7298 踟 +7299 狰 +7300 赜 +7301 刎 +7302 驪 +7303 鬚 +7304 鲞 +7305 骉 +7306 喳 +7307 篤 +7308 訶 +7309 髖 +7310 蜉 +7311 蹀 +7312 佝 +7313 乸 +7314 沺 +7315 琍 +7316 琎 +7317 巖 +7318 禦 +7319 ╯ +7320 淪 +7321 咦 +7322 擀 +7323 甙 +7324 呖 +7325 咝 +7326 哞 +7327 哽 +7328 哓 +7329 呲 +7330 哕 +7331 咿 +7332 ╰ +7333 漷 +7334 禩 +7335 檜 +7336 鷲 +7337 髭 +7338 囑 +7339 诂 +7340 凇 +7341 诨 +7342 侉 +7343 佻 +7344 伲 +7345 鸞 +7346 鄞 +7347 郫 +7348 鄯 +7349 鼩 +7350 ぼ +7351 軀 +7352 墀 +7353 転 +7354 酃 +7355 籴 +7356 倥 +7357 坩 +7358 坼 +7359 垆 +7360 茑 +7361 鹀 +7362 矍 +7363 坌 +7364 谑 +7365 陔 +7366 匍 +7367 茈 +7368 陲 +7369 傧 +7370 茼 +7371 芟 +7372 鵰 +7373 谘 +7374 亳 +7375 垩 +7376 隰 +7377 谡 +7378 邙 +7379 袤 +7380 儆 +7381 酆 +7382 鹮 +7383 賁 +7384 賃 +7385 儋 +7386 圮 +7387 苘 +7388 賄 +7389 鸛 +7390 埝 +7391 坜 +7392 鱒 +7393 乂 +7394 朧 +7395 沄 +7396 痺 +7397 穢 +7398 譞 +7399 擷 +7400 ポ +7401 嬢 +7402 葑 +7403 莴 +7404 莩 +7405 菝 +7406 蒺 +7407 蓐 +7408 菔 +7409 輓 +7410 蒹 +7411 蒴 +7412 輛 +7413 軻 +7414 齲 +7415 傀 +7416 拮 +7417 薜 +7418 蕻 +7419 轅 +7420 蓿 +7421 捩 +7422 摒 +7423 奘 +7424 匏 +7425 揿 +7426 尴 +7427 抟 +7428 摁 +7429 辮 +7430 挹 +7431 搦 +7432 辺 +7433 谞 +7434 睜 +7435 搐 +7436 鳜 +7437 鱸 +7438 骺 +7439 鞲 +7440 鳙 +7441 鲉 +7442 鲘 +7443 鳉 +7444 鳑 +7445 讐 +7446 瑝 +7447 瑨 +7448 镑 +7449 皚 +7450 皦 +7451 盁 +7452 祙 +7453 痋 +7454 瘈 +7455 瘍 +7456 瘓 +7457 璣 +7458 瓏 +7459 瓘 +7460 笒 +7461 珖 +7462 豢 +7463 籟 +7464 粬 +7465 ё +7466 禵 +7467 ┛ +7468 ┃ +7469 甡 +7470 ぷ +7471 ぁ +7472 ぇ +7473 ガ +7474 + +7475 狈 +7476 盶 +7477 眬 +7478 睒 +7479 侘 +7480 亅 +7481 仮 +7482 亶 +7483 仏 +7484 偓 +7485 値 +7486 倻 +7487 儉 +7488 叡 +7489 厳 +7490 啱 +7491 噠 +7492 嚕 +7493 嘰 +7494 噭 +7495 噸 +7496 垈 +7497 垕 +7498 墾 +7499 墎 +7500 墘 +7501 姸 +7502 姈 +7503 嫲 +7504 嫆 +7505 嫊 +7506 嫋 +7507 崁 +7508 崈 +7509 崚 +7510 崢 +7511 幍 +7512 帯 +7513 巻 +7514 彫 +7515 弶 +7516 悪 +7517 慬 +7518 懇 +7519 憙 +7520 憫 +7521 挻 +7522 拝 +7523 斂 +7524 攢 +7525 攬 +7526 昞 +7527 暠 +7528 晝 +7529 晧 +7530 晸 +7531 杺 +7532 梶 +7533 梼 +7534 槺 +7535 槃 +7536 槑 +7537 櫞 +7538 櫚 +7539 殭 +7540 殲 +7541 洣 +7542 浛 +7543 洨 +7544 湰 +7545 湴 +7546 湳 +7547 淸 +7548 渼 +7549 漖 +7550 瀧 +7551 瀮 +7552 煢 +7553 焼 +7554 ぜ +7555 兗 +7556 惣 +7557 紓 +7558 紘 +7559 紜 +7560 紮 +7561 鹘 +7562 絹 +7563 綑 +7564 ň +7565 肪 +7566 ぞ +7567 溇 +7568 綻 +7569 継 +7570 緞 +7571 糰 +7572 侥 +7573 続 +7574 綽 +7575 綝 +7576 攫 +7577 絜 +7578 緈 +7579 絪 +7580 綰 +7581 紈 +7582 絎 +7583 紉 +7584 惫 +7585 児 +7586 姽 +7587 暪 +7588 筧 +7589 恫 +7590 ゲ +7591 瞋 +7592 睬 +7593 瞞 +7594 睺 +7595 硚 +7596 碁 +7597 砳 +7598 砕 +7599 珹 +7600 癆 +7601 嚜 +7602 惇 +7603 潽 +7604 嗎 +7605 幄 +7606 釁 +7607 釐 +7608 髁 +7609 劻 +7610 屃 +7611 浟 +7612 羱 +7613 郷 +7614 喁 +7615 唷 +7616 鄘 +7617 喈 +7618 鄲 +7619 骼 +7620 咐 +7621 惱 +7622 繽 +7623 纻 +7624 缷 +7625 罈 +7626 佲 +7627 団 +7628 夆 +7629 猕 +7630 飧 +7631 鈿 +7632 鉄 +7633 屄 +7634 嵚 +7635 聳 +7636 ゅ +7637 ュ +7638 嬪 +7639 濞 +7640 寤 +7641 瀹 +7642 鍊 +7643 鍙 +7644 冧 +7645 変 +7646 庝 +7647 鋇 +7648 洇 +7649 浃 +7650 洌 +7651 鋰 +7652 镊 +7653 臏 +7654 ャ +7655 悝 +7656 惬 +7657 銖 +7658 溍 +7659 谩 +7660 脅 +7661 峋 +7662 浼 +7663 徉 +7664 徨 +7665 郃 +7666 噱 +7667 釧 +7668 邂 +7669 洫 +7670 溽 +7671 悛 +7672 忝 +7673 徭 +7674 怄 +7675 馄 +7676 鈉 +7677 徘 +7678 鈊 +7679 銬 +7680 鎌 +7681 ㄆ +7682 芵 +7683 苼 +7684 苽 +7685 茋 +7686 崛 +7687 嗌 +7688 馐 +7689 馑 +7690 彖 +7691 咫 +7692 涫 +7693 婀 +7694 驺 +7695 芻 +7696 媲 +7697 骛 +7698 苃 +7699 骖 +7700 骢 +7701 苧 +7702 苭 +7703 鎧 +7704 罠 +7705 舎 +7706 镣 +7707 犰 +7708 犷 +7709 嗵 +7710 忪 +7711 錳 +7712 泐 +7713 阄 +7714 銶 +7715 狁 +7716 腘 +7717 狒 +7718 狨 +7719 狲 +7720 嘭 +7721 怍 +7722 怩 +7723 溆 +7724 湓 +7725 郟 +7726 翾 +7727 猄 +7728 噙 +7729 氵 +7730 狴 +7731 狳 +7732 帔 +7733 噘 +7734 儡 +7735 滠 +7736 猇 +7737 狺 +7738 癇 +7739 礳 +7740 昪 +7741 渃 +7742 瀋 +7743 – +7744 廋 +7745 х +7746 ょ +7747 ョ +7748 冪 +7749 绔 +7750 缁 +7751 绁 +7752 暻 +7753 菉 +7754 菫 +7755 玷 +7756 缛 +7757 鏗 +7758 荌 +7759 缣 +7760 莔 +7761 缰 +7762 缯 +7763 鏟 +7764 缵 +7765 莢 +7766 鐮 +7767 痙 +7768 俤 +7769 揹 +7770 梔 +7771 疍 +7772 黟 +7773 婐 +7774 氿 +7775 鸬 +7776 稹 +7777 皤 +7778 穑 +7779 饋 +7780 饹 +7781 餍 +7782 π +7783 袆 +7784 鹆 +7785 鹇 +7786 鹋 +7787 疰 +7788 痃 +7789 鹕 +7790 鹚 +7791 痦 +7792 痼 +7793 鹪 +7794 瘌 +7795 餚 +7796 餛 +7797 疬 +7798 饅 +7799 瘙 +7800 杄 +7801 珽 +7802 夐 +7803 涢 +7804 楱 +7805 椁 +7806 棰 +7807 閬 +7808 熒 +7809 嗚 +7810 欒 +7811 韪 +7812 鑰 +7813 铚 +7814 葇 +7815 涥 +7816 滉 +7817 戋 +7818 検 +7819 浭 +7820 澥 +7821 蔪 +7822 囯 +7823 氹 +7824 氆 +7825 毵 +7826 耄 +7827 毳 +7828 氡 +7829 査 +7830 薦 +7831 藠 +7832 氩 +7833 氤 +7834 槊 +7835 杪 +7836 桡 +7837 蔭 +7838 曷 +7839 脬 +7840 閎 +7841 萩 +7842 晷 +7843 殚 +7844 殛 +7845 呻 +7846 菶 +7847 杷 +7848 隕 +7849 蒄 +7850 镚 +7851 閔 +7852 雋 +7853 轫 +7854 娠 +7855 鐸 +7856 柰 +7857 腠 +7858 胛 +7859 腼 +7860 薺 +7861 轳 +7862 蔔 +7863 胙 +7864 蒾 +7865 轹 +7866 檎 +7867 牦 +7868 媵 +7869 膂 +7870 胝 +7871 胴 +7872 檫 +7873 雝 +7874 蓇 +7875 曩 +7876 犒 +7877 臌 +7878 関 +7879 蒟 +7880 挲 +7881 胼 +7882 辋 +7883 檗 +7884 巉 +7885 昮 +7886 碏 +7887 嶧 +7888 済 +7889 滸 +7890 籬 +7891 琀 +7892 獺 +7893 ╥ +7894 璱 +7895 峣 +7896 抜 +7897 渋 +7898 璉 +7899 ы +7900 図 +7901 栱 +7902 眵 +7903 韡 +7904 懑 +7905 韮 +7906 韾 +7907 蓖 +7908 呁 +7909 浲 +7910 滌 +7911 彀 +7912 欤 +7913 鞕 +7914 ヌ +7915 斕 +7916 蹋 +7917 ь +7918 嗩 +7919 畹 +7920 罘 +7921 顒 +7922 顓 +7923 顗 +7924 顥 +7925 埼 +7926 蝲 +7927 氾 +7928 颼 +7929 锃 +7930 罴 +7931 锝 +7932 砉 +7933 锕 +7934 砝 +7935 礤 +7936 礓 +7937 藺 +7938 钽 +7939 蠲 +7940 锱 +7941 镦 +7942 锲 +7943 锨 +7944 钚 +7945 顳 +7946 焐 +7947 嗡 +7948 颋 +7949 蟞 +7950 蘄 +7951 挝 +7952 镲 +7953 頼 +7954 硌 +7955 頽 +7956 锺 +7957 眙 +7958 椭 +7959 眭 +7960 碚 +7961 碡 +7962 祗 +7963 铙 +7964 锖 +7965 镌 +7966 镓 +7967 頡 +7968 碲 +7969 禊 +7970 颱 +7971 蟯 +7972 蟄 +7973 韜 +7974 頦 +7975 蝀 +7976 蟈 +7977 磔 +7978 忑 +7979 颶 +7980 磙 +7981 忐 +7982 燹 +7983 蠣 +7984 玧 +7985 獼 +7986 甁 +7987 禟 +7988 桲 +7989 譴 +7990 祃 +7991 窵 +7992 琺 +7993 俶 +7994 昺 +7995 渕 +7996 ∕ +7997 鱉 +7998 廙 +7999 琻 +8000 玭 +8001 ﹏ +8002 縉 +8003 烴 +8004 聍 +8005 癔 +8006 瘛 +8007 瘵 +8008 瘠 +8009 駙 +8010 駟 +8011 喲 +8012 癃 +8013 皴 +8014 裢 +8015 耧 +8016 裊 +8017 褛 +8018 聩 +8019 褊 +8020 褫 +8021 颃 +8022 媜 +8023 昽 +8024 梠 +8025 ㎡ +8026 嚭 +8027 埡 +8028 簕 +8029 簫 +8030 黧 +8031 篦 +8032 笞 +8033 蟋 +8034 蟑 +8035 螬 +8036 髪 +8037 в +8038 虼 +8039 颥 +8040 蚍 +8041 蚋 +8042 驊 +8043 驎 +8044 円 +8045 捯 +8046 曇 +8047 眶 +8048 滛 +8049 烎 +8050 魘 +8051 艋 +8052 舢 +8053 魷 +8054 詮 +8055 婗 +8056 滝 +8057 龃 +8058 鲼 +8059 觥 +8060 龌 +8061 鰭 +8062 謬 +8063 鮜 +8064 酽 +8065 醢 +8066 醯 +8067 酡 +8068 鯇 +8069 鯖 +8070 辗 +8071 眨 +8072 圾 +8073 髫 +8074 卮 +8075 丨 +8076 艟 +8077 黾 +8078 艄 +8079 虿 +8080 龀 +8081 罅 +8082 箦 +8083 蜮 +8084 鲠 +8085 鲥 +8086 雠 +8087 誥 +8088 趵 +8089 趼 +8090 蹂 +8091 趺 +8092 嘏 +8093 蜴 +8094 鲦 +8095 襜 +8096 諦 +8097 箸 +8098 笮 +8099 襠 +8100 笊 +8101 箅 +8102 蜿 +8103 鍪 +8104 鏊 +8105 亻 +8106 豨 +8107 鯤 +8108 箪 +8109 筇 +8110 箢 +8111 蛲 +8112 蝻 +8113 籼 +8114 諭 +8115 鲱 +8116 躅 +8117 仂 +8118 諮 +8119 簁 +8120 鯧 +8121 謐 +8122 誰 +8123 鳯 +8124 訫 +8125 豈 +8126 蝰 +8127 粞 +8128 鯪 +8129 鲴 +8130 鮪 +8131 笤 +8132 笾 +8133 蝌 +8134 螋 +8135 蝓 +8136 趄 +8137 糌 +8138 鲇 +8139 鲆 +8140 鲻 +8141 鲺 +8142 鲐 +8143 躞 +8144 貊 +8145 伥 +8146 魆 +8147 鰍 +8148 鮭 +8149 鮍 +8150 髎 +8151 諷 +8152 鳶 +8153 筮 +8154 騮 +8155 詔 +8156 鯰 +8157 鮰 +8158 筅 +8159 篼 +8160 蝥 +8161 蜊 +8162 糅 +8163 酎 +8164 踮 +8165 刿 +8166 諺 +8167 鬢 +8168 骕 +8169 鴛 +8170 糨 +8171 鳊 +8172 巔 +8173 噐 +8174 攔 +8175 丷 +8176 烺 +8177 眘 +8178 譙 +8179 疭 +8180 丼 +8181 奡 +8182 н +8183 ┻ +8184 邨 +8185 哚 +8186 呃 +8187 咤 +8188 呙 +8189 逨 +8190 哳 +8191 呶 +8192 唢 +8193 哂 +8194 啁 +8195 咣 +8196 唿 +8197 玹 +8198 眛 +8199 匱 +8200 噓 +8201 嶶 +8202 鼹 +8203 掹 +8204 鷥 +8205 蹿 +8206 ぺ +8207 広 +8208 讧 +8209 趨 +8210 鶉 +8211 鶗 +8212 ベ +8213 佴 +8214 佾 +8215 鼷 +8216 堺 +8217 燐 +8218 麀 +8219 鸰 +8220 ホ +8221 鄣 +8222 郛 +8223 郏 +8224 鼽 +8225 慄 +8226 嬡 +8227 屲 +8228 堞 +8229 堠 +8230 劬 +8231 芄 +8232 鄄 +8233 艹 +8234 谂 +8235 诿 +8236 貯 +8237 劾 +8238 茔 +8239 鹍 +8240 陉 +8241 訇 +8242 鬯 +8243 荛 +8244 苁 +8245 鵪 +8246 鸊 +8247 偬 +8248 厶 +8249 賚 +8250 垴 +8251 贠 +8252 谝 +8253 邗 +8254 儇 +8255 苤 +8256 冫 +8257 圹 +8258 埸 +8259 黿 +8260 邶 +8261 埤 +8262 茌 +8263 谵 +8264 贍 +8265 瑅 +8266 眞 +8267 亀 +8268 坵 +8269 檞 +8270 玏 +8271 沇 +8272 縴 +8273 凖 +8274 淉 +8275 齷 +8276 龕 +8277 傒 +8278 栞 +8279 蓣 +8280 荸 +8281 荬 +8282 轂 +8283 萋 +8284 萏 +8285 菹 +8286 蓠 +8287 蒡 +8288 葜 +8289 甍 +8290 軽 +8291 軾 +8292 瓃 +8293 倆 +8294 巣 +8295 玓 +8296 淶 +8297 慆 +8298 兀 +8299 м +8300 掁 +8301 栟 +8302 迍 +8303 蘧 +8304 轆 +8305 蕺 +8306 蘩 +8307 掴 +8308 捭 +8309 耷 +8310 掼 +8311 拊 +8312 拚 +8313 捃 +8314 込 +8315 盃 +8316 疇 +8317 偁 +8318 燻 +8319 牤 +8320 牘 +8321 犽 +8322 犢 +8323 磥 +8324 磦 +8325 磻 +8326 磾 +8327 ▕ +8328 ▽ +8329 鼢 +8330 鳓 +8331 靼 +8332 鞯 +8333 鲃 +8334 鲊 +8335 鲏 +8336 鲖 +8337 鲙 +8338 鲯 +8339 鲾 +8340 鳀 +8341 鳠 +8342 讃 +8343 譆 +8344 讎 +8345 讖 +8346 瑒 +8347 瑧 +8348 瑫 +8349 瑮 +8350 瑱 +8351 瑸 +8352 癭 +8353 皛 +8354 癩 +8355 皰 +8356 皸 +8357 礬 +8358 祼 +8359 禇 +8360 痎 +8361 瘄 +8362 瘆 +8363 瘣 +8364 瘧 +8365 瘨 +8366 瘺 +8367 瓔 +8368 瓚 +8369 瓛 +8370 瓟 +8371 縹 +8372 繄 +8373 繅 +8374 繕 +8375 穇 +8376 穫 +8377 窊 +8378 窓 +8379 窣 +8380 笉 +8381 笣 +8382 玚 +8383 玔 +8384 珄 +8385 珌 +8386 珎 +8387 珛 +8388 珵 +8389 粙 +8390 粨 +8391 粩 +8392 Т +8393 О +8394 Л +8395 Э +8396 Б +8397 秬 +8398 稈 +8399 ┗ +8400 ━ +8401 畤 +8402 畯 +8403 ぉ +8404 ぃ +8405 プ +8406 ィ +8407 ゥ +8408 眧 +8409 盷 +8410 眴 +8411 亊 +8412 伒 +8413 亇 +8414 仱 +8415 亜 +8416 亹 +8417 仐 +8418 仚 +8419 偞 +8420 偍 +8421 倕 +8422 倗 +8423 倧 +8424 倴 +8425 倵 +8426 倶 +8427 儈 +8428 僝 +8429 僜 +8430 儔 +8431 儚 +8432 劏 +8433 劵 +8434 劊 +8435 劌 +8436 剺 +8437 叇 +8438 叆 +8439 厔 +8440 厙 +8441 厷 +8442 喛 +8443 啣 +8444 啈 +8445 圇 +8446 嚢 +8447 嚨 +8448 嚟 +8449 噲 +8450 噵 +8451 噽 +8452 嚀 +8453 嚐 +8454 堊 +8455 垿 +8456 埈 +8457 埆 +8458 垻 +8459 垾 +8460 垏 +8461 垝 +8462 垞 +8463 垱 +8464 垳 +8465 夨 +8466 塤 +8467 壆 +8468 墐 +8469 娮 +8470 娀 +8471 妧 +8472 妶 +8473 姀 +8474 嫤 +8475 嫰 +8476 嫽 +8477 媭 +8478 媱 +8479 嫀 +8480 嫃 +8481 嫏 +8482 嫑 +8483 嫕 +8484 嫙 +8485 尨 +8486 宍 +8487 尅 +8488 尪 +8489 孿 +8490 寔 +8491 寗 +8492 寭 +8493 寯 +8494 寳 +8495 対 +8496 専 +8497 峘 +8498 崀 +8499 嶗 +8500 嵂 +8501 崼 +8502 嵒 +8503 崍 +8504 崐 +8505 巂 +8506 巸 +8507 巹 +8508 巿 +8509 帀 +8510 帡 +8511 帩 +8512 彯 +8513 惙 +8514 惢 +8515 悧 +8516 悩 +8517 悰 +8518 悵 +8519 慇 +8520 憚 +8521 憣 +8522 憭 +8523 掲 +8524 抃 +8525 拏 +8526 拠 +8527 拤 +8528 拫 +8529 拵 +8530 拸 +8531 挏 +8532 挐 +8533 挓 +8534 摽 +8535 摜 +8536 摫 +8537 搥 +8538 摑 +8539 敻 +8540 攣 +8541 攱 +8542 攲 +8543 攽 +8544 敐 +8545 暱 +8546 晙 +8547 晛 +8548 晫 +8549 晳 +8550 暁 +8551 暅 +8552 枴 +8553 柅 +8554 枹 +8555 枺 +8556 朶 +8557 枌 +8558 枒 +8559 枓 +8560 枙 +8561 枟 +8562 枦 +8563 枱 +8564 棸 +8565 椏 +8566 梋 +8567 棫 +8568 棻 +8569 梾 +8570 棅 +8571 樋 +8572 槱 +8573 榎 +8574 榿 +8575 槀 +8576 槇 +8577 槈 +8578 槉 +8579 槏 +8580 槖 +8581 槜 +8582 槝 +8583 橿 +8584 檁 +8585 櫌 +8586 檵 +8587 檻 +8588 櫆 +8589 櫈 +8590 毎 +8591 歔 +8592 殢 +8593 洦 +8594 泘 +8595 泑 +8596 洶 +8597 洴 +8598 浉 +8599 洰 +8600 洢 +8601 泚 +8602 洈 +8603 湋 +8604 湙 +8605 湚 +8606 湞 +8607 潿 +8608 澂 +8609 澔 +8610 潄 +8611 潏 +8612 潟 +8613 灤 +8614 灕 +8615 瀅 +8616 瀰 +8617 瀼 +8618 灃 +8619 灄 +8620 煬 +8621 煾 +8622 煡 +8623 煓 +8624 嚰 +8625 糬 +8626 ń +8627 幗 +8628 摻 +8629 絔 +8630 綋 +8631 綎 +8632 痪 +8633 樉 +8634 緙 +8635 緱 +8636 緲 +8637 糀 +8638 紸 +8639 綣 +8640 紂 +8641 綬 +8642 鞴 +8643 肮 +8644 柟 +8645 箋 +8646 箖 +8647 箠 +8648 筯 +8649 筶 +8650 筼 +8651 刽 +8652 筜 +8653 嚥 +8654 嵅 +8655 毑 +8656 瞼 +8657 瞾 +8658 矅 +8659 矖 +8660 矚 +8661 瞐 +8662 睞 +8663 瞕 +8664 瞤 +8665 С +8666 嫵 +8667 槼 +8668 砢 +8669 硞 +8670 硤 +8671 硨 +8672 硵 +8673 矰 +8674 盿 +8675 巃 +8676 焌 +8677 礎 +8678 禔 +8679 朅 +8680 楢 +8681 И +8682 嫪 +8683 敧 +8684 獦 +8685 п +8686 屺 +8687 岣 +8688 岖 +8689 幞 +8690 醽 +8691 醾 +8692 醿 +8693 釄 +8694 釈 +8695 Я +8696 伭 +8697 偭 +8698 熈 +8699 羶 +8700 羾 +8701 翃 +8702 翚 +8703 о +8704 傕 +8705 愢 +8706 曕 +8707 涏 +8708 鄚 +8709 唳 +8710 喾 +8711 啖 +8712 鄴 +8713 尷 +8714 椑 +8715 熇 +8716 繾 +8717 繹 +8718 纮 +8719 缊 +8720 罌 +8721 罍 +8722 呪 +8723 炩 +8724 熲 +8725 鈄 +8726 猞 +8727 獍 +8728 猸 +8729 狻 +8730 饪 +8731 饣 +8732 鈡 +8733 猢 +8734 猡 +8735 獗 +8736 鈷 +8737 β +8738 吢 +8739 埪 +8740 徛 +8741 毬 +8742 浡 +8743 灺 +8744 赂 +8745 聤 +8746 聴 +8747 麇 +8748 у +8749 傚 +8750 冨 +8751 堝 +8752 撳 +8753 欏 +8754 氬 +8755 滃 +8756 濆 +8757 錨 +8758 瀣 +8759 錩 +8760 濉 +8761 鍎 +8762 鍔 +8763 鍖 +8764 鍘 +8765 鍢 +8766 鍥 +8767 づ +8768 偱 +8769 壟 +8770 惻 +8771 斉 +8772 舺 +8773 艅 +8774 艎 +8775 嶄 +8776 曚 +8777 澉 +8778 涠 +8779 洎 +8780 洚 +8781 鋯 +8782 鋶 +8783 鋹 +8784 偰 +8785 縻 +8786 娿 +8787 阕 +8788 悱 +8789 愀 +8790 悃 +8791 惝 +8792 惚 +8793 銆 +8794 銍 +8795 銓 +8796 銚 +8797 銥 +8798 γ +8799 吤 +8800 幟 +8801 捗 +8802 脇 +8803 脛 +8804 脩 +8805 脳 +8806 脷 +8807 饨 +8808 褰 +8809 愎 +8810 岢 +8811 宄 +8812 徜 +8813 崦 +8814 噔 +8815 嗄 +8816 嚆 +8817 辶 +8818 謇 +8819 邋 +8820 迮 +8821 迕 +8822 渑 +8823 淠 +8824 溷 +8825 胊 +8826 憷 +8827 隳 +8828 崞 +8829 嗥 +8830 鉨 +8831 纁 +8832 淝 +8833 鉉 +8834 罝 +8835 狃 +8836 嵝 +8837 錮 +8838 腄 +8839 傛 +8840 忔 +8841 鎊 +8842 妯 +8843 妗 +8844 鎭 +8845 鏃 +8846 伷 +8847 吰 +8848 嬈 +8849 澠 +8850 芧 +8851 芠 +8852 苳 +8853 茖 +8854 茤 +8855 茪 +8856 徼 +8857 彡 +8858 犭 +8859 噼 +8860 嚯 +8861 翬 +8862 忾 +8863 迨 +8864 抨 +8865 渖 +8866 娌 +8867 芶 +8868 胬 +8869 鍮 +8870 媸 +8871 嫠 +8872 骒 +8873 鍺 +8874 骣 +8875 鍼 +8876 苢 +8877 鎡 +8878 芓 +8879 鎢 +8880 迄 +8881 纥 +8882 鎣 +8883 芛 +8884 纩 +8885 孓 +8886 臚 +8887 鄃 +8888 釭 +8889 臜 +8890 嵛 +8891 嵯 +8892 鄆 +8893 囗 +8894 忭 +8895 忸 +8896 馕 +8897 庀 +8898 屣 +8899 渫 +8900 撵 +8901 湎 +8902 阃 +8903 醞 +8904 郕 +8905 羕 +8906 鈑 +8907 臠 +8908 胠 +8909 猰 +8910 狽 +8911 嵫 +8912 郚 +8913 嘌 +8914 臢 +8915 漶 +8916 狯 +8917 嶝 +8918 鄌 +8919 圄 +8920 嗾 +8921 怫 +8922 搴 +8923 逡 +8924 逶 +8925 遑 +8926 鉶 +8927 阌 +8928 阋 +8929 錚 +8930 鉷 +8931 罳 +8932 鉸 +8933 釹 +8934 舠 +8935 銼 +8936 酺 +8937 羣 +8938 纓 +8939 羥 +8940 猁 +8941 彳 +8942 帙 +8943 嘬 +8944 傈 +8945 廑 +8946 舥 +8947 遘 +8948 潲 +8949 溘 +8950 膙 +8951 錡 +8952 醁 +8953 鉞 +8954 醂 +8955 猃 +8956 帻 +8957 廨 +8958 遢 +8959 爿 +8960 繸 +8961 鈀 +8962 舲 +8963 脄 +8964 肸 +8965 赁 +8966 呸 +8967 譩 +8968 璪 +8969 卋 +8970 媌 +8971 揵 +8972 渂 +8973 獴 +8974 濨 +8975 縞 +8976 玞 +8977 塩 +8978 礐 +8979 瑿 +8980 滳 +8981 濩 +8982 嶇 +8983 旂 +8984 栫 +8985 熺 +8986 绋 +8987 缱 +8988 绲 +8989 缈 +8990 绌 +8991 鐓 +8992 鐜 +8993 鐢 +8994 鐫 +8995 デ +8996 喦 +8997 柷 +8998 溓 +8999 荾 +9000 莧 +9001 菋 +9002 菍 +9003 缂 +9004 绻 +9005 缒 +9006 荄 +9007 缧 +9008 莐 +9009 鏻 +9010 莑 +9011 荖 +9012 鏞 +9013 荗 +9014 缫 +9015 荙 +9016 鏤 +9017 缳 +9018 莦 +9019 譫 +9020 礵 +9021 穌 +9022 卍 +9023 坉 +9024 奷 +9025 檇 +9026 沝 +9027 鱀 +9028 窪 +9029 籇 +9030 丏 +9031 嘍 +9032 塂 +9033 廌 +9034 涴 +9035 滒 +9036 燄 +9037 瘅 +9038 瓞 +9039 鸹 +9040 饈 +9041 饗 +9042 饞 +9043 饤 +9044 饸 +9045 饾 +9046 佇 +9047 傂 +9048 椥 +9049 衸 +9050 鸸 +9051 痄 +9052 蠧 +9053 鹎 +9054 痖 +9055 痍 +9056 衒 +9057 餒 +9058 鹜 +9059 鹛 +9060 鹣 +9061 酗 +9062 餸 +9063 瘐 +9064 蠷 +9065 餾 +9066 餞 +9067 磂 +9068 縠 +9069 乪 +9070 僥 +9071 漞 +9072 瀍 +9073 礒 +9074 僂 +9075 楨 +9076 冮 +9077 嗛 +9078 忛 +9079 旈 +9080 氶 +9081 滈 +9082 轺 +9083 楗 +9084 閭 +9085 閶 +9086 閹 +9087 闀 +9088 闆 +9089 娚 +9090 樕 +9091 炆 +9092 蓕 +9093 蓢 +9094 蓪 +9095 蓯 +9096 ㈣ +9097 庤 +9098 氳 +9099 滆 +9100 鑷 +9101 鑾 +9102 钑 +9103 铏 +9104 铻 +9105 壢 +9106 嬋 +9107 斎 +9108 毴 +9109 萚 +9110 萛 +9111 葎 +9112 葖 +9113 葦 +9114 夑 +9115 婈 +9116 甏 +9117 犄 +9118 軎 +9119 戥 +9120 辎 +9121 辏 +9122 陘 +9123 険 +9124 蔃 +9125 蕣 +9126 蕫 +9127 蕶 +9128 侂 +9129 撾 +9130 涬 +9131 熾 +9132 毹 +9133 氍 +9134 雱 +9135 霙 +9136 吽 +9137 喫 +9138 毸 +9139 藘 +9140 藟 +9141 藦 +9142 藨 +9143 昃 +9144 刖 +9145 旯 +9146 璩 +9147 瓿 +9148 锳 +9149 槔 +9150 殄 +9151 殂 +9152 栳 +9153 枇 +9154 枥 +9155 赅 +9156 赍 +9157 氇 +9158 朊 +9159 赕 +9160 菳 +9161 镃 +9162 榍 +9163 赙 +9164 肭 +9165 鐳 +9166 菵 +9167 腽 +9168 殍 +9169 栝 +9170 梃 +9171 觊 +9172 觋 +9173 閒 +9174 雊 +9175 薌 +9176 蔴 +9177 薍 +9178 橥 +9179 菼 +9180 觏 +9181 薸 +9182 镴 +9183 鐺 +9184 蒻 +9185 萂 +9186 檠 +9187 梏 +9188 枵 +9189 蕂 +9190 旰 +9191 暌 +9192 曛 +9193 牾 +9194 擞 +9195 腧 +9196 蓀 +9197 薖 +9198 蓂 +9199 閟 +9200 鑣 +9201 柽 +9202 脒 +9203 閡 +9204 轾 +9205 檑 +9206 柃 +9207 柢 +9208 闡 +9209 蕌 +9210 犏 +9211 贶 +9212 僳 +9213 蔞 +9214 薠 +9215 蓎 +9216 椠 +9217 閆 +9218 闋 +9219 蔂 +9220 薁 +9221 琭 +9222 奻 +9223 渇 +9224 鱂 +9225 禙 +9226 丗 +9227 朏 +9228 桭 +9229 汧 +9230 磄 +9231 縢 +9232 眊 +9233 俫 +9234 峠 +9235 璆 +9236 咷 +9237 圙 +9238 楪 +9239 欸 +9240 甴 +9241 奾 +9242 媓 +9243 榟 +9244 渉 +9245 疕 +9246 璈 +9247 奌 +9248 桯 +9249 礽 +9250 唅 +9251 沬 +9252 両 +9253 朓 +9254 愴 +9255 韃 +9256 恝 +9257 恁 +9258 頊 +9259 囃 +9260 埻 +9261 娡 +9262 樛 +9263 蚡 +9264 蛯 +9265 蛺 +9266 蜆 +9267 嶌 +9268 靆 +9269 歃 +9270 臁 +9271 靰 +9272 飑 +9273 霡 +9274 欷 +9275 膦 +9276 靄 +9277 靺 +9278 魈 +9279 嬏 +9280 藹 +9281 虁 +9282 虒 +9283 栴 +9284 燁 +9285 瞀 +9286 畀 +9287 瞌 +9288 瞟 +9289 瞍 +9290 睥 +9291 頫 +9292 囄 +9293 嬑 +9294 屛 +9295 嵨 +9296 櫸 +9297 蝃 +9298 螄 +9299 螆 +9300 庯 +9301 橈 +9302 濓 +9303 锇 +9304 锆 +9305 锔 +9306 锒 +9307 飩 +9308 飪 +9309 屜 +9310 徬 +9311 愊 +9312 撓 +9313 浵 +9314 蟶 +9315 蟷 +9316 蠂 +9317 蠑 +9318 蠙 +9319 砑 +9320 罱 +9321 锞 +9322 罟 +9323 觳 +9324 畛 +9325 锍 +9326 礅 +9327 砼 +9328 豌 +9329 靉 +9330 烀 +9331 瞠 +9332 盍 +9333 钅 +9334 顰 +9335 镡 +9336 锫 +9337 镢 +9338 礞 +9339 炱 +9340 鞡 +9341 砬 +9342 蘡 +9343 扃 +9344 镧 +9345 蘢 +9346 祓 +9347 镳 +9348 砩 +9349 硎 +9350 硭 +9351 礻 +9352 铋 +9353 钍 +9354 顴 +9355 螮 +9356 镩 +9357 镨 +9358 蟜 +9359 颎 +9360 蟝 +9361 鞨 +9362 頷 +9363 螲 +9364 蚷 +9365 硗 +9366 硖 +9367 祆 +9368 钐 +9369 铒 +9370 颕 +9371 蚃 +9372 頹 +9373 韐 +9374 蟢 +9375 鞮 +9376 蝜 +9377 眈 +9378 霳 +9379 硪 +9380 煸 +9381 熘 +9382 蝟 +9383 钣 +9384 铘 +9385 铞 +9386 飋 +9387 蟧 +9388 矬 +9389 矧 +9390 镆 +9391 鞶 +9392 頠 +9393 磉 +9394 爝 +9395 靦 +9396 飔 +9397 碹 +9398 蘵 +9399 燠 +9400 燔 +9401 铥 +9402 飖 +9403 稂 +9404 镘 +9405 靨 +9406 飗 +9407 蛍 +9408 颳 +9409 靬 +9410 蘗 +9411 蟳 +9412 镙 +9413 靂 +9414 蘘 +9415 蝂 +9416 籮 +9417 媕 +9418 巎 +9419 杍 +9420 榡 +9421 匤 +9422 旿 +9423 汮 +9424 媖 +9425 宬 +9426 昸 +9427 鱇 +9428 圞 +9429 痩 +9430 禠 +9431 甃 +9432 凩 +9433 奓 +9434 昄 +9435 玬 +9436 弇 +9437 杕 +9438 禡 +9439 僊 +9440 慚 +9441 磏 +9442 祅 +9443 籲 +9444 俷 +9445 卬 +9446 坣 +9447 礜 +9448 玁 +9449 瘩 +9450 侎 +9451 傫 +9452 烋 +9453 瘭 +9454 癍 +9455 窀 +9456 瘰 +9457 穸 +9458 瘼 +9459 瘢 +9460 駡 +9461 駭 +9462 а +9463 毖 +9464 炑 +9465 熝 +9466 褆 +9467 褕 +9468 褢 +9469 褣 +9470 褦 +9471 褯 +9472 窬 +9473 癯 +9474 襻 +9475 窭 +9476 疋 +9477 皲 +9478 馼 +9479 馚 +9480 馜 +9481 耩 +9482 袷 +9483 袼 +9484 裎 +9485 裣 +9486 耨 +9487 耱 +9488 裰 +9489 襁 +9490 裈 +9491 聱 +9492 顸 +9493 褴 +9494 裋 +9495 馱 +9496 裏 +9497 唎 +9498 弌 +9499 恛 +9500 渙 +9501 窸 +9502 籓 +9503 労 +9504 椇 +9505 樅 +9506 熀 +9507 簞 +9508 簠 +9509 簷 +9510 Ⅲ +9511 嶓 +9512 蠖 +9513 螅 +9514 螗 +9515 蟊 +9516 髣 +9517 髴 +9518 鬄 +9519 饔 +9520 営 +9521 捰 +9522 臃 +9523 訏 +9524 訑 +9525 訕 +9526 訚 +9527 黩 +9528 岒 +9529 嶒 +9530 栻 +9531 颟 +9532 虮 +9533 騾 +9534 驀 +9535 驁 +9536 驃 +9537 呉 +9538 喴 +9539 襖 +9540 覚 +9541 傯 +9542 掫 +9543 汈 +9544 濘 +9545 舁 +9546 舻 +9547 舣 +9548 艨 +9549 舴 +9550 舾 +9551 舳 +9552 嬙 +9553 懺 +9554 捲 +9555 斣 +9556 詛 +9557 詝 +9558 詡 +9559 詣 +9560 詰 +9561 詼 +9562 匂 +9563 庼 +9564 楒 +9565 欥 +9566 汌 +9567 龅 +9568 鯻 +9569 鯷 +9570 龆 +9571 觫 +9572 謦 +9573 鰟 +9574 鰣 +9575 鰤 +9576 鰥 +9577 鰩 +9578 鰶 +9579 喼 +9580 壷 +9581 娭 +9582 撝 +9583 曋 +9584 氈 +9585 謄 +9586 諤 +9587 謨 +9588 謫 +9589 謳 +9590 侕 +9591 凊 +9592 婖 +9593 曱 +9594 濙 +9595 醑 +9596 醐 +9597 醣 +9598 鯔 +9599 栒 +9600 椪 +9601 誣 +9602 諏 +9603 咗 +9604 岠 +9605 戻 +9606 鴃 +9607 鴂 +9608 鴯 +9609 鴳 +9610 鴷 +9611 ω +9612 堌 +9613 愗 +9614 懾 +9615 椮 +9616 炟 +9617 亍 +9618 鼗 +9619 貐 +9620 蠼 +9621 蚪 +9622 艏 +9623 艚 +9624 跫 +9625 豕 +9626 蟛 +9627 舨 +9628 蟪 +9629 骯 +9630 螵 +9631 筢 +9632 恿 +9633 筻 +9634 襛 +9635 颡 +9636 蚵 +9637 蜾 +9638 詀 +9639 訟 +9640 袈 +9641 鲡 +9642 趿 +9643 踱 +9644 蹁 +9645 剜 +9646 劁 +9647 劂 +9648 詁 +9649 魾 +9650 蚯 +9651 鮟 +9652 蹒 +9653 冂 +9654 覦 +9655 騞 +9656 訢 +9657 粜 +9658 諨 +9659 誨 +9660 跣 +9661 鴇 +9662 鳧 +9663 谼 +9664 覧 +9665 箝 +9666 箨 +9667 笫 +9668 羰 +9669 鯡 +9670 跞 +9671 跏 +9672 詆 +9673 訥 +9674 谿 +9675 笸 +9676 蛱 +9677 綮 +9678 粝 +9679 鲰 +9680 鮥 +9681 跬 +9682 躏 +9683 仡 +9684 匚 +9685 仫 +9686 簀 +9687 覬 +9688 鮦 +9689 鰈 +9690 鮨 +9691 鮈 +9692 覯 +9693 鰉 +9694 諱 +9695 鳰 +9696 鬘 +9697 躐 +9698 伛 +9699 阂 +9700 髈 +9701 篌 +9702 篥 +9703 蛞 +9704 蝣 +9705 蛑 +9706 蛘 +9707 趔 +9708 趑 +9709 趱 +9710 豇 +9711 拄 +9712 鮋 +9713 踔 +9714 跽 +9715 鴒 +9716 豋 +9717 仳 +9718 卣 +9719 覲 +9720 驫 +9721 観 +9722 騫 +9723 謖 +9724 謗 +9725 鴕 +9726 骃 +9727 魋 +9728 髏 +9729 鮐 +9730 鳷 +9731 篣 +9732 螓 +9733 蜍 +9734 魎 +9735 訳 +9736 謚 +9737 鲒 +9738 鳆 +9739 鲔 +9740 鳇 +9741 鲕 +9742 誹 +9743 踬 +9744 觖 +9745 踯 +9746 刭 +9747 觱 +9748 鮒 +9749 髕 +9750 觴 +9751 諼 +9752 豖 +9753 騳 +9754 験 +9755 襶 +9756 酏 +9757 鲚 +9758 踺 +9759 骦 +9760 鴝 +9761 鳽 +9762 蜣 +9763 肄 +9764 剞 +9765 訝 +9766 鱬 +9767 卲 +9768 梡 +9769 廝 +9770 ╭ +9771 琿 +9772 弎 +9773 梣 +9774 禥 +9775 慟 +9776 峳 +9777 璕 +9778 擱 +9779 祍 +9780 峴 +9781 泂 +9782 渟 +9783 漵 +9784 禨 +9785 婼 +9786 擲 +9787 昐 +9788 鬟 +9789 忂 +9790 攮 +9791 攉 +9792 撙 +9793 撺 +9794 邅 +9795 邩 +9796 呒 +9797 哙 +9798 唣 +9799 哐 +9800 咭 +9801 啭 +9802 遅 +9803 遹 +9804 唼 +9805 痶 +9806 籺 +9807 籘 +9808 盩 +9809 俆 +9810 嘥 +9811 昑 +9812 桾 +9813 漈 +9814 畊 +9815 僽 +9816 坲 +9817 塽 +9818 妘 +9819 奤 +9820 孶 +9821 朥 +9822 Ⅹ +9823 夲 +9824 燏 +9825 鷟 +9826 鷂 +9827 鷞 +9828 鷨 +9829 鷯 +9830 鷸 +9831 鷿 +9832 и +9833 ペ +9834 佢 +9835 嗂 +9836 氌 +9837 诖 +9838 诎 +9839 诙 +9840 诋 +9841 冖 +9842 跂 +9843 跅 +9844 跐 +9845 咘 +9846 圌 +9847 滪 +9848 鵠 +9849 鵟 +9850 鶒 +9851 鶘 +9852 ① +9853 佡 +9854 嬞 +9855 俅 +9856 俜 +9857 贄 +9858 凔 +9859 曽 +9860 鸤 +9861 髹 +9862 娵 +9863 撣 +9864 鄹 +9865 邾 +9866 郾 +9867 蹐 +9868 蹓 +9869 蹠 +9870 蹣 +9871 圏 +9872 婞 +9873 孅 +9874 欬 +9875 黐 +9876 鬈 +9877 冘 +9878 嗆 +9879 嵻 +9880 愜 +9881 栜 +9882 炣 +9883 塄 +9884 墁 +9885 芑 +9886 芏 +9887 鼙 +9888 軋 +9889 軏 +9890 軛 +9891 诮 +9892 劢 +9893 诓 +9894 诔 +9895 讵 +9896 卺 +9897 诹 +9898 诼 +9899 哿 +9900 麬 +9901 苈 +9902 苠 +9903 倨 +9904 貰 +9905 阽 +9906 偾 +9907 鷇 +9908 賒 +9909 鷈 +9910 阼 +9911 匐 +9912 踖 +9913 鷉 +9914 鹔 +9915 陧 +9916 垤 +9917 埏 +9918 巯 +9919 芴 +9920 躪 +9921 鹠 +9922 鵑 +9923 傺 +9924 鹡 +9925 鸑 +9926 苎 +9927 鶲 +9928 貽 +9929 僬 +9930 埚 +9931 埘 +9932 埒 +9933 圬 +9934 躉 +9935 芤 +9936 茇 +9937 趐 +9938 躊 +9939 踧 +9940 跶 +9941 鹯 +9942 跼 +9943 賡 +9944 龠 +9945 賂 +9946 鹴 +9947 邡 +9948 蠃 +9949 埴 +9950 茺 +9951 鶺 +9952 赪 +9953 黈 +9954 鶻 +9955 踴 +9956 谳 +9957 鵼 +9958 圯 +9959 鸝 +9960 鸂 +9961 鼱 +9962 鱲 +9963 璿 +9964 憊 +9965 泇 +9966 玍 +9967 嶸 +9968 烿 +9969 皐 +9970 竪 +9971 玾 +9972 杦 +9973 泈 +9974 鱓 +9975 媁 +9976 榃 +9977 淲 +9978 玿 +9979 亁 +9980 碭 +9981 癤 +9982 疿 +9983 揦 +9984 榅 +9985 鱵 +9986 瑈 +9987 儁 +9988 漼 +9989 玒 +9990 甕 +9991 塝 +9992 榊 +9993 圐 +9994 岧 +9995 汖 +9996 齶 +9997 齙 +9998 龁 +9999 龔 +10000 龘 +10001 龢 +10002 冚 +10003 嗇 +10004 堓 +10005 壿 +10006 澼 +10007 炤 +10008 莸 +10009 輻 +10010 輾 +10011 轁 +10012 蒎 +10013 鼴 +10014 菪 +10015 萑 +10016 輋 +10017 軫 +10018 蓊 +10019 菸 +10020 齦 +10021 蒗 +10022 葚 +10023 齪 +10024 輗 +10025 葸 +10026 蔸 +10027 蔹 +10028 輜 +10029 輞 +10030 葺 +10031 輟 +10032 縵 +10033 儂 +10034 唞 +10035 妟 +10036 媧 +10037 瀦 +10038 鱖 +10039 礪 +10040 俍 +10041 嘮 +10042 怹 +10043 堽 +10044 嶠 +10045 橚 +10046 汘 +10047 勣 +10048 堔 +10049 搠 +10050 迀 +10051 薷 +10052 辿 +10053 薅 +10054 蕞 +10055 轡 +10056 迏 +10057 迵 +10058 蕹 +10059 廾 +10060 辀 +10061 掮 +10062 揲 +10063 揸 +10064 揠 +10065 辡 +10066 轍 +10067 尥 +10068 摅 +10069 搛 +10070 搋 +10071 轘 +10072 搡 +10073 摞 +10074 摭 +10075 揶 +10076 鳡 +10077 笭 +10078 厾 +10079 棤 +10080 湢 +10081 犠 +10082 牠 +10083 牁 +10084 牂 +10085 燼 +10086 犼 +10087 燜 +10088 爊 +10089 爍 +10090 爕 +10091 犧 +10092 碻 +10093 碸 +10094 磣 +10095 碷 +10096 磠 +10097 磢 +10098 碄 +10099 碶 +10100 磩 +10101 磪 +10102 磭 +10103 磰 +10104 磱 +10105 磳 +10106 磵 +10107 磶 +10108 磹 +10109 磼 +10110 磿 +10111 礀 +10112 礂 +10113 礃 +10114 礄 +10115 礆 +10116 礇 +10117 礈 +10118 礉 +10119 礊 +10120 礋 +10121 ` +10122 ^ +10123 ╜ +10124 ╚ +10125 ▇ +10126 ㄗ +10127 ▅ +10128 ǖ +10129 ╛ +10130 ǚ +10131 ǜ +10132 ɑ +10133 ╗ +10134 ╙ +10135 ▄ +10136 ▆ +10137 ǘ +10138 ˊ +10139 ╘ +10140 █ +10141 ▉ +10142 ▊ +10143 ▋ +10144 ▌ +10145 ▍ +10146 ▎ +10147 ▏ +10148 ▓ +10149 ▔ +10150 ▼ +10151 ◢ +10152 ◣ +10153 ◤ +10154 ◥ +10155 ☉ +10156 ⊕ +10157 〒 +10158 〝 +10159 〞 +10160 ~ +10161 鞔 +10162 鱜 +10163 鱚 +10164 鱺 +10165 鱛 +10166 鱙 +10167 鱹 +10168 鞫 +10169 鰼 +10170 鱻 +10171 鱽 +10172 鱾 +10173 鲄 +10174 鲌 +10175 鲓 +10176 鲗 +10177 鲝 +10178 鲪 +10179 鲬 +10180 鲿 +10181 鳁 +10182 鳂 +10183 鳈 +10184 鳒 +10185 鳚 +10186 鳛 +10187 譤 +10188 讄 +10189 譥 +10190 譡 +10191 譢 +10192 讉 +10193 讋 +10194 讌 +10195 讑 +10196 讒 +10197 讔 +10198 讕 +10199 讙 +10200 讛 +10201 讜 +10202 讞 +10203 讟 +10204 讬 +10205 讱 +10206 讻 +10207 诇 +10208 诐 +10209 诪 +10210 谉 +10211 < +10212 = +10213 > +10214 琡 +10215 琟 +10216 瑍 +10217 琠 +10218 琜 +10219 琞 +10220 瑊 +10221 瑌 +10222 珸 +10223 琝 +10224 瑎 +10225 瑏 +10226 瑐 +10227 瑑 +10228 瑓 +10229 瑔 +10230 瑖 +10231 瑘 +10232 瑡 +10233 瑥 +10234 瑦 +10235 瑬 +10236 瑲 +10237 瑳 +10238 瑴 +10239 瑵 +10240 瑹 +10241 | +10242 Υ +10243 Θ +10244 ψ +10245 Μ +10246 Ζ +10247 Π +10248 Φ +10249 Ο +10250 Ν +10251 Α +10252 Ψ +10253 Ω +10254 Λ +10255 Η +10256 Χ +10257 Ι +10258 Ξ +10259 Δ +10260 Β +10261 Γ +10262 Ε +10263 Ρ +10264 癪 +10265 皘 +10266 癧 +10267 癅 +10268 癨 +10269 皝 +10270 皟 +10271 皠 +10272 皡 +10273 皢 +10274 皣 +10275 皥 +10276 皧 +10277 皨 +10278 皪 +10279 皫 +10280 皬 +10281 皭 +10282 皯 +10283 皳 +10284 皵 +10285 皶 +10286 皷 +10287 皹 +10288 皻 +10289 皼 +10290 皽 +10291 皾 +10292 盀 +10293 礰 +10294 礮 +10295 祣 +10296 礫 +10297 礭 +10298 祡 +10299 礍 +10300 祦 +10301 祩 +10302 祪 +10303 祫 +10304 祬 +10305 祮 +10306 祰 +10307 祱 +10308 祲 +10309 祳 +10310 祴 +10311 祵 +10312 祶 +10313 祹 +10314 祻 +10315 祽 +10316 祾 +10317 禂 +10318 禃 +10319 禆 +10320 禈 +10321 禉 +10322 禋 +10323 禌 +10324 禐 +10325 禑 +10326 _ +10327 痐 +10328 瘇 +10329 痏 +10330 痆 +10331 痌 +10332 瘂 +10333 疈 +10334 瘎 +10335 瘏 +10336 瘑 +10337 瘒 +10338 瘔 +10339 瘖 +10340 瘚 +10341 瘜 +10342 瘝 +10343 瘞 +10344 瘬 +10345 瘮 +10346 瘯 +10347 瘲 +10348 瘶 +10349 瘷 +10350 瘹 +10351 瘻 +10352 瘽 +10353 癁 +10354 璥 +10355 瓇 +10356 瓅 +10357 璤 +10358 璢 +10359 瑻 +10360 璡 +10361 瓈 +10362 瓉 +10363 瓌 +10364 瓍 +10365 瓎 +10366 瓐 +10367 瓑 +10368 瓓 +10369 瓕 +10370 瓖 +10371 瓙 +10372 瓝 +10373 瓡 +10374 瓥 +10375 瓧 +10376 瓨 +10377 瓩 +10378 瓪 +10379 瓫 +10380 瓬 +10381 瓭 +10382 瓱 +10383 - +10384 , +10385 ; +10386 : +10387 ! +10388 〈 +10389 〃 +10390 △ +10391 ∽ +10392 ‖ +10393 ˇ +10394 “ +10395 〉 +10396 ’ +10397 … +10398   +10399 】 +10400 》 +10401 「 +10402 ~ +10403 』 +10404 ¨ +10405 《 +10406 ‘ +10407 · +10408 、 +10409 。 +10410 ˉ +10411 ” +10412 ? +10413 縙 +10414 縚 +10415 縘 +10416 縶 +10417 縸 +10418 縗 +10419 縺 +10420 縼 +10421 縿 +10422 繀 +10423 繂 +10424 繈 +10425 繉 +10426 繊 +10427 繋 +10428 繌 +10429 繍 +10430 繎 +10431 繏 +10432 繐 +10433 繑 +10434 繒 +10435 繓 +10436 繖 +10437 繗 +10438 繘 +10439 繙 +10440 繛 +10441 繜 +10442 / +10443 穈 +10444 穅 +10445 穨 +10446 穦 +10447 穄 +10448 穧 +10449 稝 +10450 穃 +10451 穪 +10452 穬 +10453 穭 +10454 穮 +10455 穱 +10456 穲 +10457 穵 +10458 穻 +10459 穼 +10460 穽 +10461 穾 +10462 窂 +10463 窇 +10464 窉 +10465 窋 +10466 窌 +10467 窎 +10468 窏 +10469 窐 +10470 窔 +10471 窙 +10472 窚 +10473 窛 +10474 窞 +10475 窡 +10476 竈 +10477 竳 +10478 竉 +10479 竲 +10480 竆 +10481 竴 +10482 竵 +10483 竷 +10484 竸 +10485 竻 +10486 竼 +10487 竾 +10488 笀 +10489 笁 +10490 笂 +10491 笅 +10492 笇 +10493 笌 +10494 笍 +10495 笎 +10496 笐 +10497 笓 +10498 笖 +10499 笗 +10500 笘 +10501 笚 +10502 笜 +10503 笝 +10504 笟 +10505 笡 +10506 笢 +10507 笧 +10508 笩 +10509 ' +10510 " +10511 珇 +10512 珆 +10513 珋 +10514 珒 +10515 珓 +10516 珔 +10517 珕 +10518 珗 +10519 珘 +10520 珚 +10521 珜 +10522 珟 +10523 珡 +10524 珢 +10525 珤 +10526 珦 +10527 珨 +10528 珫 +10529 珬 +10530 珯 +10531 珳 +10532 珴 +10533 珶 +10534 粇 +10535 粅 +10536 籣 +10537 粆 +10538 粈 +10539 粊 +10540 粋 +10541 粌 +10542 粍 +10543 粎 +10544 粏 +10545 粐 +10546 粓 +10547 粔 +10548 粖 +10549 粚 +10550 粛 +10551 粠 +10552 粡 +10553 粣 +10554 粦 +10555 粫 +10556 粭 +10557 粯 +10558 粰 +10559 粴 +10560 粶 +10561 粷 +10562 粸 +10563 粺 +10564 ( +10565 ) +10566 [ +10567 ] +10568 { +10569 } +10570 Ж +10571 К +10572 Е +10573 У +10574 Н +10575 А +10576 Х +10577 Ц +10578 Й +10579 Щ +10580 Ё +10581 Ф +10582 З +10583 М +10584 Г +10585 В +10586 Д +10587 П +10588 禴 +10589 秪 +10590 秥 +10591 禰 +10592 秢 +10593 秨 +10594 禓 +10595 秮 +10596 秱 +10597 秲 +10598 秳 +10599 秴 +10600 秵 +10601 秶 +10602 秷 +10603 秹 +10604 秺 +10605 秼 +10606 秿 +10607 稁 +10608 稄 +10609 稉 +10610 稊 +10611 稌 +10612 稏 +10613 稐 +10614 稑 +10615 稒 +10616 稓 +10617 稕 +10618 稖 +10619 稘 +10620 稙 +10621 稛 +10622 ┐ +10623 ┄ +10624 ﹡ +10625 ┈ +10626 ﹟ +10627 │ +10628 ┌ +10629 ┑ +10630 ┋ +10631 ┉ +10632 ┓ +10633 └ +10634 ┇ +10635 ﹞ +10636 ﹠ +10637 ┒ +10638 ┅ +10639 ┊ +10640 〡 +10641 ─ +10642 ‐ +10643 ┍ +10644 ﹢ +10645 ﹣ +10646 ﹤ +10647 ﹥ +10648 ﹦ +10649 ﹨ +10650 ﹩ +10651 ﹪ +10652 ﹫ +10653 〇 +10654 甞 +10655 畘 +10656 畖 +10657 甠 +10658 甗 +10659 甝 +10660 畕 +10661 畗 +10662 甛 +10663 畞 +10664 畟 +10665 畠 +10666 畡 +10667 畣 +10668 畧 +10669 畨 +10670 畩 +10671 畮 +10672 畱 +10673 畳 +10674 畵 +10675 畷 +10676 畺 +10677 畻 +10678 畼 +10679 畽 +10680 畾 +10681 疀 +10682 疁 +10683 疂 +10684 疄 +10685 @ +10686 ぅ +10687 ⒋ +10688 ⅷ +10689 Ⅶ +10690 ⒆ +10691 ⅵ +10692 ⒌ +10693 ⅰ +10694 ⒖ +10695 ⒎ +10696 ⒏ +10697 ⒒ +10698 ⅶ +10699 ⒍ +10700 ⅸ +10701 ⅳ +10702 ⅱ +10703 ⅲ +10704 ⅴ +10705 ⒈ +10706 ( +10707 W +10708 , +10709 & +10710 5 +10711 / +10712 - +10713 ! +10714 ? +10715 + +10716 ; +10717 ' +10718 ) +10719 . +10720 ¥ +10721 " +10722 # +10723 % +10724 ァ +10725 ェ +10726 ォ +10727 \ +10728 盽 +10729 盺 +10730 眫 +10731 盻 +10732 盵 +10733 眥 +10734 眪 +10735 盄 +10736 眮 +10737 眰 +10738 眱 +10739 眲 +10740 眳 +10741 眹 +10742 眻 +10743 眽 +10744 眿 +10745 睂 +10746 睄 +10747 睅 +10748 睆 +10749 睈 +10750 睉 +10751 睊 +10752 睋 +10753 睌 +10754 睍 +10755 睎 +10756 睓 +10757 睔 +10758 睕 +10759 睖 +10760 睗 +10761 睘 +10762 睙 +10763  +10764  +10765  +10766  +10767  +10768  +10769  +10770 +10771 +10772  +10773  +10774  +10775  +10776  +10777  +10778  +10779  +10780  +10781  +10782  +10783  +10784  +10785  +10786  +10787  +10788  +10789  +10790  +10791 伌 +10792 乣 +10793 乛 +10794 仺 +10795 伂 +10796 仸 +10797 伆 +10798 乢 +10799 伅 +10800 伃 +10801 仭 +10802 伩 +10803 伔 +10804 伀 +10805 乕 +10806 亄 +10807 仹 +10808 伓 +10809 仼 +10810 伄 +10811 丂 +10812 仯 +10813 仴 +10814 乗 +10815 伇 +10816 亐 +10817 亖 +10818 亗 +10819 亙 +10820 亝 +10821 亣 +10822 亪 +10823 亯 +10824 亰 +10825 亱 +10826 亴 +10827 亷 +10828 亸 +10829 亼 +10830 亽 +10831 亾 +10832 仈 +10833 仌 +10834 仒 +10835 仛 +10836 仜 +10837 仠 +10838 仢 +10839 仦 +10840 仧 +10841 俙 +10842 俕 +10843 傋 +10844 倈 +10845 偊 +10846 偘 +10847 偟 +10848 俖 +10849 偗 +10850 偔 +10851 偂 +10852 偪 +10853 偡 +10854 偢 +10855 偒 +10856 偦 +10857 俒 +10858 俔 +10859 倇 +10860 偋 +10861 偠 +10862 偐 +10863 偖 +10864 侤 +10865 偆 +10866 偄 +10867 偅 +10868 俓 +10869 偙 +10870 倎 +10871 倐 +10872 倛 +10873 倝 +10874 倠 +10875 倢 +10876 倣 +10877 倯 +10878 倰 +10879 倱 +10880 倲 +10881 倳 +10882 倷 +10883 倸 +10884 倹 +10885 倽 +10886 倿 +10887 偀 +10888 僠 +10889 儴 +10890 凎 +10891 冏 +10892 儸 +10893 儼 +10894 兊 +10895 僟 +10896 儻 +10897 儹 +10898 儭 +10899 兛 +10900 兎 +10901 兏 +10902 兓 +10903 僛 +10904 儃 +10905 儅 +10906 儳 +10907 儵 +10908 儺 +10909 傽 +10910 儰 +10911 儮 +10912 儯 +10913 儱 +10914 儽 +10915 儊 +10916 儌 +10917 儍 +10918 儎 +10919 儏 +10920 儐 +10921 儑 +10922 儓 +10923 儕 +10924 儖 +10925 儗 +10926 儙 +10927 儛 +10928 儜 +10929 儞 +10930 儠 +10931 儢 +10932 儣 +10933 儤 +10934 儥 +10935 儦 +10936 儧 +10937 儨 +10938 儩 +10939 儫 +10940 劥 +10941 刞 +10942 刕 +10943 剘 +10944 匃 +10945 劕 +10946 剕 +10947 劙 +10948 劦 +10949 刜 +10950 劘 +10951 劖 +10952 効 +10953 劮 +10954 劯 +10955 劔 +10956 刐 +10957 刔 +10958 剓 +10959 剗 +10960 劎 +10961 劧 +10962 劗 +10963 凘 +10964 劋 +10965 刓 +10966 劚 +10967 剙 +10968 剚 +10969 剟 +10970 剠 +10971 剢 +10972 剣 +10973 剤 +10974 剦 +10975 剨 +10976 剫 +10977 剬 +10978 剭 +10979 剮 +10980 剰 +10981 剱 +10982 剳 +10983 剴 +10984 剶 +10985 剷 +10986 剸 +10987 剹 +10988 剻 +10989 剼 +10990 剾 +10991 劀 +10992 劄 +10993 劅 +10994 叴 +10995 卄 +10996 叏 +10997 厏 +10998 咓 +10999 呑 +11000 叕 +11001 厊 +11002 叞 +11003 叺 +11004 卂 +11005 叝 +11006 叚 +11007 叀 +11008 吙 +11009 叿 +11010 吀 +11011 叓 +11012 吇 +11013 匸 +11014 匽 +11015 厈 +11016 厎 +11017 収 +11018 叾 +11019 叐 +11020 叜 +11021 匑 +11022 叅 +11023 叄 +11024 匼 +11025 厐 +11026 厑 +11027 厒 +11028 厓 +11029 厖 +11030 厗 +11031 厛 +11032 厜 +11033 厞 +11034 厡 +11035 厤 +11036 厧 +11037 厪 +11038 厫 +11039 厬 +11040 厯 +11041 厰 +11042 厱 +11043 厵 +11044 厸 +11045 厹 +11046 厺 +11047 厼 +11048 厽 +11049 哷 +11050 哵 +11051 唦 +11052 嗺 +11053 喿 +11054 啲 +11055 啨 +11056 啺 +11057 喌 +11058 哶 +11059 啹 +11060 啳 +11061 喎 +11062 喐 +11063 喕 +11064 哰 +11065 哴 +11066 唟 +11067 唥 +11068 啩 +11069 喍 +11070 啯 +11071 啴 +11072 咢 +11073 啢 +11074 啠 +11075 哱 +11076 啽 +11077 唨 +11078 唩 +11079 唫 +11080 唭 +11081 唲 +11082 唴 +11083 唵 +11084 唶 +11085 唹 +11086 唺 +11087 唻 +11088 唽 +11089 啀 +11090 啂 +11091 啅 +11092 啇 +11093 啋 +11094 啌 +11095 啍 +11096 啎 +11097 啑 +11098 啒 +11099 啔 +11100 啗 +11101 啘 +11102 啙 +11103 啚 +11104 啛 +11105 嚧 +11106 嘸 +11107 嘵 +11108 嚚 +11109 噡 +11110 囎 +11111 嚞 +11112 噟 +11113 嚘 +11114 嘷 +11115 嚡 +11116 嚳 +11117 嚪 +11118 嚫 +11119 嚝 +11120 嚙 +11121 嚩 +11122 嚛 +11123 嚠 +11124 嚖 +11125 嚔 +11126 嚗 +11127 嚤 +11128 噣 +11129 噥 +11130 噦 +11131 噧 +11132 噮 +11133 噰 +11134 噳 +11135 噷 +11136 噺 +11137 噾 +11138 噿 +11139 嚁 +11140 嚂 +11141 嚃 +11142 嚄 +11143 嚈 +11144 嚉 +11145 嚊 +11146 嚌 +11147 埓 +11148 坄 +11149 坁 +11150 埁 +11151 垀 +11152 堶 +11153 坾 +11154 埌 +11155 埖 +11156 坃 +11157 埊 +11158 垺 +11159 埧 +11160 埛 +11161 埜 +11162 埢 +11163 圼 +11164 圿 +11165 坽 +11166 坿 +11167 埀 +11168 埄 +11169 埉 +11170 垽 +11171 垼 +11172 圽 +11173 埍 +11174 垁 +11175 垇 +11176 垉 +11177 垊 +11178 垍 +11179 垎 +11180 垐 +11181 垑 +11182 垔 +11183 垖 +11184 垗 +11185 垘 +11186 垙 +11187 垜 +11188 垥 +11189 垨 +11190 垪 +11191 垬 +11192 垯 +11193 垰 +11194 垶 +11195 垷 +11196 壌 +11197 塦 +11198 塣 +11199 墌 +11200 壸 +11201 壃 +11202 壈 +11203 壍 +11204 壄 +11205 墶 +11206 壙 +11207 壐 +11208 壂 +11209 壔 +11210 塠 +11211 墈 +11212 墋 +11213 墽 +11214 壎 +11215 墿 +11216 堾 +11217 墹 +11218 墷 +11219 墸 +11220 墺 +11221 塡 +11222 壉 +11223 墍 +11224 墏 +11225 墑 +11226 墔 +11227 墕 +11228 墖 +11229 増 +11230 墛 +11231 墝 +11232 墠 +11233 墡 +11234 墢 +11235 墣 +11236 墤 +11237 墥 +11238 墦 +11239 墧 +11240 墪 +11241 墫 +11242 墬 +11243 墭 +11244 墯 +11245 墰 +11246 墱 +11247 墲 +11248 墴 +11249 姶 +11250 奰 +11251 姩 +11252 妦 +11253 婘 +11254 妡 +11255 姲 +11256 姷 +11257 奯 +11258 姱 +11259 姯 +11260 姟 +11261 娍 +11262 姺 +11263 姼 +11264 姭 +11265 奫 +11266 妢 +11267 姧 +11268 姰 +11269 夽 +11270 姢 +11271 姠 +11272 姡 +11273 姤 +11274 姳 +11275 妬 +11276 妭 +11277 妰 +11278 妱 +11279 妳 +11280 妴 +11281 妵 +11282 妷 +11283 妸 +11284 妼 +11285 妽 +11286 妿 +11287 姁 +11288 姂 +11289 姃 +11290 姄 +11291 姅 +11292 姇 +11293 姌 +11294 姎 +11295 姏 +11296 姕 +11297 姖 +11298 姙 +11299 姛 +11300 嫶 +11301 媊 +11302 媈 +11303 嫧 +11304 媬 +11305 嬜 +11306 嫭 +11307 媩 +11308 嫷 +11309 媉 +11310 嫮 +11311 嫛 +11312 嬁 +11313 嫹 +11314 嫺 +11315 嫬 +11316 媅 +11317 媇 +11318 媫 +11319 嫥 +11320 嫸 +11321 嫨 +11322 嫯 +11323 婡 +11324 嫟 +11325 嫝 +11326 嫞 +11327 嫢 +11328 媆 +11329 嫳 +11330 媮 +11331 媯 +11332 媰 +11333 媴 +11334 媶 +11335 媷 +11336 媹 +11337 媺 +11338 媻 +11339 媼 +11340 媿 +11341 嫅 +11342 嫇 +11343 嫈 +11344 嫍 +11345 嫎 +11346 嫐 +11347 嫓 +11348 嫗 +11349 宆 +11350 尐 +11351 寏 +11352 岟 +11353 屪 +11354 尙 +11355 寍 +11356 尠 +11357 尩 +11358 宊 +11359 尟 +11360 尛 +11361 尶 +11362 尫 +11363 尭 +11364 尗 +11365 尰 +11366 宂 +11367 寋 +11368 寎 +11369 尒 +11370 尞 +11371 孈 +11372 尌 +11373 尡 +11374 寑 +11375 寕 +11376 寖 +11377 寘 +11378 寙 +11379 寚 +11380 寛 +11381 寜 +11382 寠 +11383 寣 +11384 寪 +11385 寱 +11386 寲 +11387 寴 +11388 寷 +11389 寽 +11390 尀 +11391 嵈 +11392 峖 +11393 崹 +11394 嵶 +11395 崿 +11396 峾 +11397 崷 +11398 嵃 +11399 嵉 +11400 峗 +11401 嵀 +11402 崱 +11403 嵖 +11404 嵎 +11405 嵏 +11406 峓 +11407 峕 +11408 峿 +11409 崸 +11410 嵍 +11411 崺 +11412 嵁 +11413 岪 +11414 崵 +11415 崲 +11416 崳 +11417 崶 +11418 峔 +11419 嵄 +11420 崄 +11421 崅 +11422 崉 +11423 崊 +11424 崋 +11425 崌 +11426 崏 +11427 崑 +11428 崒 +11429 崓 +11430 崕 +11431 崘 +11432 崙 +11433 崜 +11434 崝 +11435 崠 +11436 崡 +11437 崣 +11438 崥 +11439 崨 +11440 崪 +11441 崫 +11442 崬 +11443 崯 +11444 幋 +11445 帹 +11446 巭 +11447 庽 +11448 巪 +11449 帵 +11450 幇 +11451 幆 +11452 幁 +11453 幙 +11454 幏 +11455 幐 +11456 帿 +11457 幓 +11458 嶿 +11459 巤 +11460 巬 +11461 幎 +11462 帺 +11463 幃 +11464 嶡 +11465 帲 +11466 帴 +11467 嶾 +11468 幈 +11469 巰 +11470 巵 +11471 巶 +11472 巺 +11473 巼 +11474 帄 +11475 帇 +11476 帉 +11477 帊 +11478 帋 +11479 帍 +11480 帎 +11481 帒 +11482 帓 +11483 帗 +11484 帞 +11485 帟 +11486 帠 +11487 帢 +11488 帣 +11489 帤 +11490 帨 +11491 帪 +11492 彺 +11493 彣 +11494 弤 +11495 忳 +11496 徸 +11497 彟 +11498 彴 +11499 彽 +11500 廮 +11501 彲 +11502 彮 +11503 徔 +11504 徃 +11505 彨 +11506 徎 +11507 廩 +11508 弡 +11509 弣 +11510 彠 +11511 彾 +11512 廆 +11513 彜 +11514 彚 +11515 彛 +11516 彞 +11517 廫 +11518 彵 +11519 弨 +11520 弫 +11521 弬 +11522 弮 +11523 弰 +11524 弲 +11525 弳 +11526 弴 +11527 弸 +11528 弻 +11529 弽 +11530 弾 +11531 弿 +11532 彁 +11533 彂 +11534 彃 +11535 彄 +11536 彅 +11537 彆 +11538 彇 +11539 彉 +11540 彋 +11541 彍 +11542 彑 +11543 惔 +11544 恅 +11545 恀 +11546 惃 +11547 悀 +11548 愾 +11549 愖 +11550 惉 +11551 恷 +11552 惁 +11553 惏 +11554 惖 +11555 恄 +11556 惎 +11557 惌 +11558 悺 +11559 惪 +11560 惛 +11561 惈 +11562 怺 +11563 怾 +11564 恾 +11565 惂 +11566 惗 +11567 惄 +11568 惍 +11569 怈 +11570 悿 +11571 悾 +11572 惀 +11573 怽 +11574 惐 +11575 悁 +11576 悂 +11577 悆 +11578 悇 +11579 悈 +11580 悊 +11581 悋 +11582 悎 +11583 悏 +11584 悐 +11585 悑 +11586 悓 +11587 悕 +11588 悗 +11589 悘 +11590 悙 +11591 悜 +11592 悞 +11593 悡 +11594 悢 +11595 悤 +11596 悥 +11597 悮 +11598 悳 +11599 悷 +11600 懘 +11601 慲 +11602 慯 +11603 懆 +11604 憕 +11605 戺 +11606 懽 +11607 懍 +11608 憒 +11609 懄 +11610 懓 +11611 懙 +11612 慱 +11613 懐 +11614 懎 +11615 憽 +11616 懣 +11617 懛 +11618 懜 +11619 懌 +11620 懟 +11621 憓 +11622 懅 +11623 懚 +11624 懏 +11625 懁 +11626 憿 +11627 懀 +11628 懃 +11629 慭 +11630 懕 +11631 憖 +11632 憗 +11633 憘 +11634 憛 +11635 憜 +11636 憞 +11637 憟 +11638 憠 +11639 憡 +11640 憢 +11641 憥 +11642 憦 +11643 憪 +11644 憮 +11645 憯 +11646 憰 +11647 憱 +11648 憳 +11649 憴 +11650 憵 +11651 憸 +11652 憹 +11653 憺 +11654 憻 +11655 抈 +11656 抆 +11657 挩 +11658 拁 +11659 捵 +11660 挰 +11661 抾 +11662 挦 +11663 挵 +11664 挼 +11665 抇 +11666 挴 +11667 挱 +11668 挕 +11669 捒 +11670 挿 +11671 捀 +11672 挮 +11673 捇 +11674 抂 +11675 抅 +11676 抺 +11677 拀 +11678 挧 +11679 挬 +11680 挳 +11681 扏 +11682 挙 +11683 挗 +11684 挘 +11685 挜 +11686 挶 +11687 拃 +11688 拑 +11689 拕 +11690 拞 +11691 拡 +11692 拪 +11693 拰 +11694 拲 +11695 拹 +11696 拺 +11697 拻 +11698 挀 +11699 挃 +11700 挄 +11701 挅 +11702 挆 +11703 挊 +11704 挋 +11705 挍 +11706 挒 +11707 揯 +11708 摠 +11709 搤 +11710 擏 +11711 撟 +11712 摤 +11713 搢 +11714 摝 +11715 摪 +11716 摰 +11717 揰 +11718 摨 +11719 摥 +11720 摗 +11721 摲 +11722 摣 +11723 摶 +11724 揫 +11725 搟 +11726 搣 +11727 摟 +11728 摱 +11729 摡 +11730 摦 +11731 揁 +11732 摛 +11733 摙 +11734 摚 +11735 揬 +11736 搧 +11737 搨 +11738 搩 +11739 搫 +11740 搮 +11741 搯 +11742 搰 +11743 搱 +11744 搲 +11745 搳 +11746 搵 +11747 搷 +11748 搸 +11749 搹 +11750 搻 +11751 搼 +11752 摀 +11753 摂 +11754 摃 +11755 摉 +11756 摋 +11757 摌 +11758 摍 +11759 摎 +11760 摏 +11761 摐 +11762 摓 +11763 摕 +11764 敶 +11765 擿 +11766 擽 +11767 敤 +11768 旝 +11769 斪 +11770 敩 +11771 攟 +11772 敠 +11773 敯 +11774 敮 +11775 敪 +11776 敺 +11777 敨 +11778 敾 +11779 攞 +11780 攠 +11781 敡 +11782 敹 +11783 敥 +11784 敭 +11785 擛 +11786 敜 +11787 敚 +11788 敟 +11789 擻 +11790 敱 +11791 攦 +11792 攧 +11793 攨 +11794 攩 +11795 攭 +11796 攰 +11797 攷 +11798 攺 +11799 攼 +11800 敀 +11801 敁 +11802 敂 +11803 敃 +11804 敄 +11805 敆 +11806 敇 +11807 敊 +11808 敋 +11809 敍 +11810 敎 +11811 敒 +11812 敓 +11813 暣 +11814 昤 +11815 昢 +11816 暔 +11817 晘 +11818 曶 +11819 暚 +11820 晐 +11821 暒 +11822 暟 +11823 暤 +11824 昣 +11825 暞 +11826 暛 +11827 暋 +11828 暩 +11829 暙 +11830 暬 +11831 昜 +11832 昡 +11833 晎 +11834 晑 +11835 暓 +11836 暥 +11837 暕 +11838 暜 +11839 旲 +11840 暏 +11841 暍 +11842 暎 +11843 晜 +11844 晠 +11845 晢 +11846 晣 +11847 晥 +11848 晩 +11849 晪 +11850 晬 +11851 晱 +11852 晲 +11853 晵 +11854 晹 +11855 晻 +11856 晼 +11857 晽 +11858 晿 +11859 暀 +11860 暃 +11861 暆 +11862 柎 +11863 朻 +11864 朸 +11865 枿 +11866 杶 +11867 桏 +11868 栕 +11869 柆 +11870 枽 +11871 柕 +11872 朹 +11873 柉 +11874 柇 +11875 柨 +11876 柗 +11877 柛 +11878 柣 +11879 朳 +11880 朷 +11881 杮 +11882 杴 +11883 枾 +11884 柖 +11885 柀 +11886 朄 +11887 枻 +11888 枼 +11889 柋 +11890 杸 +11891 杹 +11892 杻 +11893 杽 +11894 枀 +11895 枃 +11896 枅 +11897 枆 +11898 枈 +11899 枍 +11900 枎 +11901 枏 +11902 枑 +11903 枔 +11904 枖 +11905 枛 +11906 枠 +11907 枡 +11908 枤 +11909 枩 +11910 枬 +11911 枮 +11912 棿 +11913 梎 +11914 梌 +11915 棬 +11916 梸 +11917 椬 +11918 棳 +11919 棪 +11920 椀 +11921 梍 +11922 棷 +11923 棴 +11924 棥 +11925 椃 +11926 椄 +11927 椈 +11928 梉 +11929 梴 +11930 梷 +11931 椂 +11932 棭 +11933 棶 +11934 棦 +11935 棩 +11936 梊 +11937 梹 +11938 梺 +11939 梻 +11940 梽 +11941 梿 +11942 棁 +11943 棃 +11944 棆 +11945 棇 +11946 棈 +11947 棊 +11948 棌 +11949 棎 +11950 棏 +11951 棐 +11952 棑 +11953 棓 +11954 棔 +11955 棖 +11956 棙 +11957 棛 +11958 棜 +11959 棝 +11960 棞 +11961 棡 +11962 棢 +11963 槾 +11964 榒 +11965 榐 +11966 槰 +11967 榽 +11968 橑 +11969 樧 +11970 槵 +11971 榺 +11972 槮 +11973 槹 +11974 樀 +11975 榑 +11976 槸 +11977 槶 +11978 槨 +11979 樃 +11980 槴 +11981 樆 +11982 榌 +11983 榏 +11984 榹 +11985 榼 +11986 槯 +11987 槷 +11988 楡 +11989 槫 +11990 槩 +11991 槪 +11992 槬 +11993 榾 +11994 槂 +11995 槄 +11996 槅 +11997 槆 +11998 槒 +11999 槕 +12000 槗 +12001 槙 +12002 槚 +12003 槞 +12004 槡 +12005 槢 +12006 槣 +12007 槤 +12008 槥 +12009 槦 +12010 櫒 +12011 欦 +12012 櫖 +12013 檤 +12014 櫐 +12015 櫟 +12016 櫙 +12017 櫗 +12018 櫋 +12019 櫩 +12020 櫡 +12021 櫢 +12022 櫕 +12023 橾 +12024 檣 +12025 檥 +12026 櫑 +12027 櫠 +12028 櫓 +12029 櫘 +12030 橜 +12031 櫎 +12032 櫍 +12033 櫏 +12034 橽 +12035 櫛 +12036 檧 +12037 檨 +12038 檪 +12039 檭 +12040 檮 +12041 檰 +12042 檱 +12043 檲 +12044 檴 +12045 檶 +12046 檷 +12047 檹 +12048 檺 +12049 檼 +12050 檽 +12051 檾 +12052 檿 +12053 櫀 +12054 櫁 +12055 櫂 +12056 櫄 +12057 櫅 +12058 櫉 +12059 毚 +12060 歗 +12061 毃 +12062 殈 +12063 汍 +12064 毈 +12065 殀 +12066 殾 +12067 毜 +12068 歘 +12069 毌 +12070 毉 +12071 殹 +12072 毧 +12073 毞 +12074 毟 +12075 毇 +12076 毣 +12077 歖 +12078 歿 +12079 殅 +12080 毝 +12081 毊 +12082 欯 +12083 殻 +12084 殽 +12085 歕 +12086 殌 +12087 殎 +12088 殏 +12089 殐 +12090 殑 +12091 殕 +12092 殗 +12093 殙 +12094 殜 +12095 殝 +12096 殞 +12097 殟 +12098 殠 +12099 殣 +12100 殥 +12101 殦 +12102 殧 +12103 殨 +12104 殩 +12105 殫 +12106 殬 +12107 殮 +12108 殰 +12109 殱 +12110 殶 +12111 洿 +12112 沗 +12113 沕 +12114 涽 +12115 涀 +12116 洭 +12117 浀 +12118 洯 +12119 洝 +12120 浄 +12121 洬 +12122 浕 +12123 沎 +12124 泏 +12125 泒 +12126 洤 +12127 浂 +12128 洡 +12129 洟 +12130 洠 +12131 沑 +12132 洷 +12133 泙 +12134 泜 +12135 泝 +12136 泟 +12137 泤 +12138 泦 +12139 泧 +12140 泩 +12141 泬 +12142 泭 +12143 泲 +12144 泴 +12145 泹 +12146 泿 +12147 洀 +12148 洂 +12149 洃 +12150 洅 +12151 洆 +12152 洉 +12153 洊 +12154 洍 +12155 洏 +12156 洐 +12157 洓 +12158 洔 +12159 洕 +12160 洖 +12161 洘 +12162 湸 +12163 渀 +12164 淾 +12165 湪 +12166 渵 +12167 滣 +12168 溩 +12169 渱 +12170 湨 +12171 湹 +12172 淿 +12173 湱 +12174 湣 +12175 溈 +12176 湻 +12177 湼 +12178 溁 +12179 淽 +12180 渰 +12181 渳 +12182 湩 +12183 湬 +12184 淍 +12185 湦 +12186 湤 +12187 湥 +12188 湵 +12189 渶 +12190 渷 +12191 渹 +12192 渻 +12193 渽 +12194 渿 +12195 湀 +12196 湁 +12197 湂 +12198 湅 +12199 湆 +12200 湇 +12201 湈 +12202 湌 +12203 湏 +12204 湐 +12205 湑 +12206 湒 +12207 湕 +12208 湗 +12209 湝 +12210 湠 +12211 湡 +12212 澊 +12213 漙 +12214 潹 +12215 濛 +12216 澴 +12217 潀 +12218 潶 +12219 澃 +12220 澋 +12221 漘 +12222 澘 +12223 澐 +12224 澑 +12225 潾 +12226 漑 +12227 潷 +12228 澏 +12229 潻 +12230 澁 +12231 滰 +12232 潳 +12233 潱 +12234 潵 +12235 漒 +12236 澅 +12237 潃 +12238 潅 +12239 潈 +12240 潉 +12241 潊 +12242 潌 +12243 潎 +12244 潐 +12245 潒 +12246 潓 +12247 潕 +12248 潖 +12249 潗 +12250 潙 +12251 潚 +12252 潝 +12253 潠 +12254 潡 +12255 潣 +12256 潥 +12257 潧 +12258 潨 +12259 潩 +12260 潪 +12261 潫 +12262 瀪 +12263 烑 +12264 灛 +12265 灠 +12266 灥 +12267 瀇 +12268 灟 +12269 灜 +12270 灐 +12271 灴 +12272 灧 +12273 灨 +12274 灚 +12275 灮 +12276 灖 +12277 灦 +12278 濦 +12279 灒 +12280 灔 +12281 瀄 +12282 灡 +12283 瀫 +12284 瀭 +12285 瀯 +12286 瀱 +12287 瀲 +12288 瀳 +12289 瀴 +12290 瀶 +12291 瀷 +12292 瀸 +12293 瀺 +12294 瀻 +12295 瀽 +12296 瀿 +12297 灀 +12298 灁 +12299 灂 +12300 灅 +12301 灆 +12302 灇 +12303 灈 +12304 灉 +12305 灊 +12306 灋 +12307 灍 +12308 煷 +12309 焋 +12310 焇 +12311 焴 +12312 燋 +12313 熥 +12314 焲 +12315 煱 +12316 煹 +12317 煰 +12318 煭 +12319 煛 +12320 熆 +12321 煼 +12322 煫 +12323 焄 +12324 焆 +12325 焮 +12326 焳 +12327 煣 +12328 煻 +12329 煯 +12330 煠 +12331 煝 +12332 煟 +12333 焅 +12334 煴 +12335 焵 +12336 焷 +12337 焸 +12338 焹 +12339 焺 +12340 焻 +12341 焽 +12342 焾 +12343 焿 +12344 煀 +12345 煁 +12346 煂 +12347 煃 +12348 煄 +12349 煆 +12350 煇 +12351 煈 +12352 煋 +12353 煍 +12354 煏 +12355 煐 +12356 煑 +12357 煔 +12358 煕 +12359 煗 +12360 煘 +12361 〖 +12362 Ъ +12363 ┘ +12364 ⒓ +12365 < +12366 伡 +12367 偧 +12368 劶 +12369 吋 +12370 喖 +12371 埣 +12372 壖 +12373 娂 +12374 嫾 +12375 尲 +12376 嵓 +12377 幖 +12378 徏 +12379 懠 +12380 捈 +12381 摷 +12382 敿 +12383 暭 +12384 柤 +12385 椉 +12386 樇 +12387 櫦 +12388 毤 +12389 浖 +12390 溂 +12391 澕 +12392 灱 +12393 熂 +12394 紎 +12395 糭 +12396 紏 +12397 糪 +12398 紑 +12399 紒 +12400 紕 +12401 紖 +12402 紝 +12403 紞 +12404 紟 +12405 紣 +12406 紤 +12407 紥 +12408 紦 +12409 紨 +12410 紩 +12411 紪 +12412 紬 +12413 紭 +12414 紱 +12415 紲 +12416 紴 +12417 紵 +12418 〗 +12419 Ы +12420 ┙ +12421 ⒔ +12422 = +12423 伣 +12424 偨 +12425 兘 +12426 劷 +12427 吔 +12428 喗 +12429 嚱 +12430 埥 +12431 壗 +12432 娊 +12433 嫿 +12434 尳 +12435 嵔 +12436 惤 +12437 懡 +12438 捊 +12439 斀 +12440 暯 +12441 柦 +12442 椊 +12443 樈 +12444 櫧 +12445 浗 +12446 溄 +12447 澖 +12448 灲 +12449 絗 +12450 絴 +12451 絖 +12452 絒 +12453 紷 +12454 絓 +12455 絸 +12456 絺 +12457 絻 +12458 絼 +12459 絽 +12460 絾 +12461 絿 +12462 綀 +12463 綂 +12464 綃 +12465 綄 +12466 綅 +12467 綆 +12468 綇 +12469 綈 +12470 綊 +12471 綌 +12472 綍 +12473 綐 +12474 綒 +12475 綔 +12476 綕 +12477 綖 +12478 綗 +12479 【 +12480 Ь +12481 ┚ +12482 ⒕ +12483 > +12484 伨 +12485 偩 +12486 兙 +12487 劸 +12488 吘 +12489 嚲 +12490 埦 +12491 娋 +12492 嬀 +12493 尵 +12494 嵕 +12495 幘 +12496 従 +12497 惥 +12498 懢 +12499 捑 +12500 摼 +12501 斁 +12502 暰 +12503 柧 +12504 椌 +12505 櫨 +12506 毦 +12507 浘 +12508 灳 +12509 熅 +12510 綹 +12511 緗 +12512 綶 +12513 緖 +12514 緘 +12515 綷 +12516 緛 +12517 緜 +12518 緟 +12519 緡 +12520 緢 +12521 緤 +12522 緥 +12523 緦 +12524 緧 +12525 緪 +12526 緫 +12527 緭 +12528 緮 +12529 緰 +12530 緳 +12531 緵 +12532 緶 +12533 緷 +12534 緸 +12535 絘 +12536 綼 +12537 糱 +12538 糂 +12539 絙 +12540 綛 +12541 糲 +12542 糃 +12543 絚 +12544 糳 +12545 糄 +12546 絛 +12547 紻 +12548 糴 +12549 糆 +12550 紼 +12551 緀 +12552 綞 +12553 糵 +12554 糉 +12555 絝 +12556 紽 +12557 緁 +12558 綟 +12559 糶 +12560 紾 +12561 緂 +12562 糷 +12563 糎 +12564 絟 +12565 紿 +12566 緃 +12567 綡 +12568 糹 +12569 絠 +12570 絀 +12571 緄 +12572 糺 +12573 糐 +12574 絁 +12575 緅 +12576 糼 +12577 糑 +12578 緆 +12579 綤 +12580 糽 +12581 糒 +12582 絣 +12583 緇 +12584 糓 +12585 絤 +12586 糿 +12587 糔 +12588 絅 +12589 緉 +12590 綨 +12591 糘 +12592 綩 +12593 紁 +12594 糚 +12595 絧 +12596 絇 +12597 綪 +12598 糛 +12599 絈 +12600 緌 +12601 紃 +12602 糝 +12603 絩 +12604 絉 +12605 緍 +12606 絊 +12607 緎 +12608 糡 +12609 絫 +12610 総 +12611 綯 +12612 紆 +12613 糢 +12614 絬 +12615 緐 +12616 紇 +12617 糣 +12618 絭 +12619 絍 +12620 糤 +12621 絯 +12622 糥 +12623 絰 +12624 絏 +12625 緓 +12626 綳 +12627 糦 +12628 緔 +12629 紌 +12630 絑 +12631 綵 +12632 紶 +12633 綘 +12634 緺 +12635 」 +12636 Ч +12637 ┕ +12638 ⒐ +12639 伖 +12640 偣 +12641 劰 +12642 吂 +12643 喒 +12644 嚬 +12645 埞 +12646 壒 +12647 尮 +12648 幑 +12649 徆 +12650 惞 +12651 懝 +12652 捁 +12653 摴 +12654 敼 +12655 椆 +12656 樄 +12657 櫣 +12658 毠 +12659 浌 +12660 湽 +12661 澒 +12662 灩 +12663 煿 +12664 筦 +12665 筤 +12666 箌 +12667 筥 +12668 筟 +12669 筣 +12670 箎 +12671 笯 +12672 筡 +12673 箑 +12674 箒 +12675 箘 +12676 箙 +12677 箚 +12678 箛 +12679 箞 +12680 箟 +12681 箣 +12682 箤 +12683 箥 +12684 箮 +12685 箯 +12686 箰 +12687 箲 +12688 箳 +12689 箵 +12690 箶 +12691 箷 +12692 箹 +12693 箺 +12694 箻 +12695 箼 +12696 箽 +12697 箾 +12698 箿 +12699 篂 +12700 篃 +12701 笰 +12702 筨 +12703 笲 +12704 筩 +12705 笴 +12706 筪 +12707 笵 +12708 筫 +12709 笶 +12710 筬 +12711 笷 +12712 筭 +12713 笻 +12714 筰 +12715 笽 +12716 筳 +12717 笿 +12718 筴 +12719 筀 +12720 筁 +12721 筸 +12722 筂 +12723 筺 +12724 筃 +12725 筄 +12726 筽 +12727 筿 +12728 筈 +12729 箁 +12730 筊 +12731 箂 +12732 箃 +12733 筎 +12734 箄 +12735 筓 +12736 箆 +12737 筕 +12738 箇 +12739 筗 +12740 箈 +12741 筙 +12742 箉 +12743 箊 +12744 筞 +12745 Σ +12746 〔 +12747 Р +12748 ┎ +12749 ⒉ +12750 伈 +12751 偛 +12752 儾 +12753 劜 +12754 啿 +12755 埐 +12756 壊 +12757 姴 +12758 嫴 +12759 尣 +12760 幉 +12761 彶 +12762 惒 +12763 懖 +12764 挷 +12765 摬 +12766 敳 +12767 暡 +12768 柌 +12769 棽 +12770 櫜 +12771 湶 +12772 灢 +12773 煵 +12774 瞏 +12775 瞊 +12776 瞇 +12777 瞉 +12778 瞷 +12779 瞹 +12780 瞈 +12781 矀 +12782 矁 +12783 矂 +12784 矃 +12785 矄 +12786 矆 +12787 矉 +12788 矊 +12789 矋 +12790 矌 +12791 矐 +12792 矑 +12793 矒 +12794 矓 +12795 矔 +12796 矕 +12797 矘 +12798 矙 +12799 矝 +12800 矞 +12801 矟 +12802 矠 +12803 矡 +12804 瞓 +12805 睟 +12806 睠 +12807 睤 +12808 瞖 +12809 睧 +12810 瞗 +12811 睩 +12812 睪 +12813 瞙 +12814 睭 +12815 瞚 +12816 睮 +12817 瞛 +12818 睯 +12819 瞜 +12820 睰 +12821 瞝 +12822 睱 +12823 睲 +12824 瞡 +12825 睳 +12826 睴 +12827 睵 +12828 瞦 +12829 睶 +12830 瞨 +12831 睷 +12832 瞫 +12833 睸 +12834 瞭 +12835 瞮 +12836 睻 +12837 瞯 +12838 睼 +12839 瞱 +12840 瞲 +12841 瞂 +12842 瞴 +12843 瞃 +12844 瞶 +12845 瞆 +12846 矤 +12847 鞒 +12848 Τ +12849 〕 +12850 ┏ +12851 ⒊ +12852 偝 +12853 兂 +12854 劤 +12855 叧 +12856 喅 +12857 嚦 +12858 埑 +12859 壋 +12860 尦 +12861 嵆 +12862 幊 +12863 彸 +12864 惓 +12865 懗 +12866 挸 +12867 摮 +12868 柍 +12869 棾 +12870 櫝 +12871 毘 +12872 湷 +12873 澇 +12874 煶 +12875 砠 +12876 硘 +12877 砡 +12878 砙 +12879 砞 +12880 硔 +12881 硙 +12882 矦 +12883 砛 +12884 硛 +12885 硜 +12886 硠 +12887 硡 +12888 硢 +12889 硣 +12890 硥 +12891 硦 +12892 硧 +12893 硩 +12894 硰 +12895 硲 +12896 硳 +12897 硴 +12898 硶 +12899 硸 +12900 硹 +12901 硺 +12902 硽 +12903 硾 +12904 硿 +12905 碀 +12906 碂 +12907 砤 +12908 矨 +12909 砨 +12910 砪 +12911 砫 +12912 砮 +12913 矱 +12914 砯 +12915 矲 +12916 砱 +12917 矴 +12918 矵 +12919 矷 +12920 砵 +12921 矹 +12922 砶 +12923 矺 +12924 砽 +12925 砿 +12926 矼 +12927 硁 +12928 砃 +12929 硂 +12930 砄 +12931 砅 +12932 硄 +12933 砆 +12934 硆 +12935 砇 +12936 硈 +12937 砈 +12938 硉 +12939 砊 +12940 硊 +12941 砋 +12942 硋 +12943 砎 +12944 硍 +12945 砏 +12946 硏 +12947 砐 +12948 硑 +12949 砓 +12950 硓 +12951 碃 +12952 ╝ +12953 鱝 +12954 譨 +12955 琣 +12956 礱 +12957 痑 +12958 璦 +12959 穉 +12960 竌 +12961 玜 +12962 籥 +12963 禷 +12964 ゛ +12965 乤 +12966 俛 +12967 僡 +12968 刟 +12969 卆 +12970 哸 +12971 坅 +12972 塧 +12973 奱 +12974 媋 +12975 宎 +12976 峚 +12977 廰 +12978 慳 +12979 抋 +12980 揳 +12981 攁 +12982 昦 +12983 朼 +12984 梐 +12985 榓 +12986 檃 +12987 歛 +12988 沘 +12989 渁 +12990 漚 +12991 ˋ +12992 鰽 +12993 譇 +12994 疉 +12995 緼 +12996 稟 +12997 獳 +12998 籄 +12999 〢 +13000 瓵 +13001 盇 +13002 丄 +13003 侫 +13004 凙 +13005 咥 +13006 嘇 +13007 婣 +13008 孉 +13009 岮 +13010 嶢 +13011 廇 +13012 怉 +13013 慉 +13014 扐 +13015 揂 +13016 旳 +13017 桝 +13018 橝 +13019 欰 +13020 汚 +13021 淎 +13022 滱 +13023 濧 +13024 鳘 +13025 Κ +13026 — +13027 ┆ +13028 ⅹ +13029 * +13030 仾 +13031 偑 +13032 儶 +13033 劒 +13034 叒 +13035 埅 +13036 壀 +13037 崻 +13038 帾 +13039 挭 +13040 摢 +13041 柂 +13042 棯 +13043 槳 +13044 櫔 +13045 湭 +13046 灙 +13047 煪 +13048 猔 +13049 猑 +13050 獈 +13051 獆 +13052 猒 +13053 猍 +13054 狜 +13055 猏 +13056 獉 +13057 獊 +13058 獋 +13059 獌 +13060 獏 +13061 獓 +13062 獔 +13063 獕 +13064 獖 +13065 獘 +13066 獙 +13067 獚 +13068 獛 +13069 獜 +13070 獝 +13071 獞 +13072 獟 +13073 獡 +13074 獤 +13075 獥 +13076 獧 +13077 獩 +13078 獪 +13079 獫 +13080 獮 +13081 獰 +13082 ㄡ +13083 ︶ +13084 ♂ +13085 ┽ +13086 ⑨ +13087 佱 +13088 傖 +13089 冡 +13090 勧 +13091 呩 +13092 囜 +13093 堘 +13094 夅 +13095 娽 +13096 嬦 +13097 屷 +13098 嶀 +13099 忈 +13100 愥 +13101 戓 +13102 掅 +13103 斸 +13104 栣 +13105 樶 +13106 欋 +13107 氠 +13108 涐 +13109 炨 +13110 醏 +13111 醊 +13112 醻 +13113 岈 +13114 醸 +13115 醎 +13116 醄 +13117 醈 +13118 醹 +13119 岍 +13120 醆 +13121 醼 +13122 釂 +13123 釃 +13124 釅 +13125 釒 +13126 釓 +13127 釔 +13128 釕 +13129 釖 +13130 釙 +13131 釚 +13132 釛 +13133 釟 +13134 釠 +13135 釡 +13136 釢 +13137 釤 +13138 α +13139 × +13140 ┝ +13141 ⒘ +13142 A +13143 吜 +13144 喠 +13145 嚵 +13146 埩 +13147 壛 +13148 娏 +13149 嬃 +13150 嵙 +13151 幜 +13152 徚 +13153 惲 +13154 懥 +13155 捔 +13156 摿 +13157 斄 +13158 柫 +13159 椓 +13160 樍 +13161 櫫 +13162 毩 +13163 溋 +13164 澚 +13165 灹 +13166 羆 +13167 羄 +13168 羭 +13169 羀 +13170 羃 +13171 羮 +13172 罖 +13173 羂 +13174 羴 +13175 羵 +13176 羺 +13177 羻 +13178 翂 +13179 翄 +13180 翆 +13181 翈 +13182 翉 +13183 翋 +13184 翍 +13185 翏 +13186 翐 +13187 翑 +13188 翓 +13189 翖 +13190 翗 +13191 翙 +13192 翜 +13193 翝 +13194 翞 +13195 翢 +13196 ㄠ +13197 ︵ +13198 ∴ +13199 ┼ +13200 ⑧ +13201 ` +13202 佮 +13203 冟 +13204 勦 +13205 呧 +13206 嗋 +13207 囙 +13208 堗 +13209 夃 +13210 娻 +13211 嬥 +13212 屶 +13213 嵿 +13214 庎 +13215 忇 +13216 戉 +13217 掄 +13218 撪 +13219 椸 +13220 樴 +13221 氞 +13222 溹 +13223 澿 +13224 炧 +13225 熰 +13226 郶 +13227 鄜 +13228 郲 +13229 鄛 +13230 郂 +13231 郳 +13232 鄝 +13233 鄠 +13234 鄨 +13235 鄩 +13236 鄪 +13237 鄫 +13238 鄮 +13239 鄳 +13240 鄵 +13241 鄶 +13242 鄷 +13243 鄸 +13244 鄺 +13245 鄼 +13246 鄽 +13247 鄾 +13248 鄿 +13249 酂 +13250 ɡ +13251 ± +13252 Ю +13253 ├ +13254 ⒗ +13255 @ +13256 伬 +13257 偫 +13258 劺 +13259 吚 +13260 喞 +13261 埨 +13262 壚 +13263 娎 +13264 嬂 +13265 嵗 +13266 幚 +13267 徖 +13268 捓 +13269 摾 +13270 暲 +13271 柪 +13272 樌 +13273 櫪 +13274 毨 +13275 浝 +13276 溊 +13277 澙 +13278 灷 +13279 纞 +13280 繻 +13281 纚 +13282 繺 +13283 纴 +13284 纼 +13285 绖 +13286 绤 +13287 绬 +13288 绹 +13289 缐 +13290 缞 +13291 缹 +13292 缻 +13293 缼 +13294 缽 +13295 缾 +13296 缿 +13297 罀 +13298 罁 +13299 罃 +13300 罆 +13301 罇 +13302 罉 +13303 罊 +13304 罋 +13305 罎 +13306 罏 +13307 罒 +13308 ㄢ +13309 ︹ +13310 ♀ +13311 р +13312 ┾ +13313 ⑩ +13314 傗 +13315 冣 +13316 勨 +13317 嗏 +13318 堚 +13319 娾 +13320 嬧 +13321 屸 +13322 庘 +13323 忊 +13324 愨 +13325 戔 +13326 掆 +13327 撯 +13328 斺 +13329 曗 +13330 栤 +13331 椻 +13332 樷 +13333 欌 +13334 涒 +13335 溾 +13336 獯 +13337 鈤 +13338 怊 +13339 鈢 +13340 鈅 +13341 鈁 +13342 鈃 +13343 猱 +13344 釦 +13345 猓 +13346 鈂 +13347 鈥 +13348 鈧 +13349 鈨 +13350 鈩 +13351 鈪 +13352 鈫 +13353 鈭 +13354 鈮 +13355 鈯 +13356 鈰 +13357 鈱 +13358 鈲 +13359 鈳 +13360 鈵 +13361 鈶 +13362 鈸 +13363 鈹 +13364 鈻 +13365 鈼 +13366 鈽 +13367 鈾 +13368 鉁 +13369 鉂 +13370 鉃 +13371 ÷ +13372 ┞ +13373 ぢ +13374 ⒙ +13375 B +13376 ヂ +13377 甭 +13378 伮 +13379 偮 +13380 兟 +13381 喡 +13382 嚶 +13383 壜 +13384 娐 +13385 嬄 +13386 幝 +13387 惵 +13388 懧 +13389 捖 +13390 撀 +13391 斅 +13392 暵 +13393 柭 +13394 椔 +13395 樎 +13396 櫬 +13397 溌 +13398 澛 +13399 熉 +13400 耟 +13401 耝 +13402 聕 +13403 耞 +13404 耓 +13405 耛 +13406 翤 +13407 聙 +13408 聛 +13409 聜 +13410 聝 +13411 聟 +13412 聠 +13413 聡 +13414 聢 +13415 聣 +13416 聥 +13417 聦 +13418 聧 +13419 聨 +13420 聫 +13421 聬 +13422 聭 +13423 聮 +13424 聵 +13425 聸 +13426 聹 +13427 聺 +13428 聻 +13429 聼 +13430 ㄥ +13431 ﹀ +13432 ″ +13433 ╁ +13434 ㈠ +13435 佸 +13436 勫 +13437 呭 +13438 嗗 +13439 夊 +13440 婂 +13441 屽 +13442 嶅 +13443 庡 +13444 忓 +13445 愬 +13446 戝 +13447 掑 +13448 斿 +13449 曞 +13450 栧 +13451 楀 +13452 炲 +13453 熷 +13454 錪 +13455 鍉 +13456 鬻 +13457 鍇 +13458 瀵 +13459 錧 +13460 鍆 +13461 鍈 +13462 錊 +13463 鍌 +13464 鍏 +13465 鍐 +13466 鍑 +13467 鍒 +13468 鍓 +13469 鍕 +13470 鍗 +13471 鍚 +13472 鍜 +13473 鍝 +13474 鍞 +13475 鍟 +13476 鍠 +13477 鍡 +13478 鍣 +13479 鍤 +13480 鍦 +13481 鍧 +13482 鍨 +13483 鍩 +13484 ㄅ +13485 ε +13486 ∨ +13487 ┡ +13488 ⑴ +13489 E +13490 ヅ +13491 伵 +13492 吪 +13493 喤 +13494 埮 +13495 娕 +13496 嵟 +13497 徟 +13498 懪 +13499 捙 +13500 撆 +13501 暸 +13502 椗 +13503 櫯 +13504 毰 +13505 溑 +13506 澟 +13507 熍 +13508 臽 +13509 臹 +13510 舼 +13511 臸 +13512 臷 +13513 艀 +13514 艁 +13515 艃 +13516 艈 +13517 艌 +13518 艐 +13519 艑 +13520 艓 +13521 艔 +13522 艕 +13523 艗 +13524 艛 +13525 艝 +13526 艞 +13527 艤 +13528 艧 +13529 ㄤ +13530 ︿ +13531 ′ +13532 ╀ +13533 佷 +13534 勪 +13535 呬 +13536 嗕 +13537 囦 +13538 堜 +13539 嬩 +13540 屼 +13541 忎 +13542 愪 +13543 戜 +13544 掍 +13545 斾 +13546 栦 +13547 椾 +13548 欎 +13549 涗 +13550 滀 +13551 濅 +13552 炰 +13553 熶 +13554 鋊 +13555 鋨 +13556 鋦 +13557 鋉 +13558 鋄 +13559 鋥 +13560 鋩 +13561 鋫 +13562 鋬 +13563 鋮 +13564 鋱 +13565 鋲 +13566 鋳 +13567 鋷 +13568 鋺 +13569 鋻 +13570 鋽 +13571 鋾 +13572 鋿 +13573 錀 +13574 錁 +13575 錂 +13576 錅 +13577 錆 +13578 錇 +13579 錈 +13580 δ +13581 ∧ +13582 ┠ +13583 ⒛ +13584 D +13585 伳 +13586 勀 +13587 吥 +13588 喣 +13589 嚹 +13590 埬 +13591 娔 +13592 嬆 +13593 屇 +13594 嵞 +13595 幠 +13596 惸 +13597 懩 +13598 捘 +13599 斈 +13600 暷 +13601 柲 +13602 椖 +13603 毮 +13604 浤 +13605 溎 +13606 澞 +13607 熌 +13608 腵 +13609 腲 +13610 膥 +13611 膢 +13612 腯 +13613 膡 +13614 膤 +13615 腬 +13616 膧 +13617 膫 +13618 膬 +13619 膮 +13620 膯 +13621 膰 +13622 膲 +13623 膴 +13624 膵 +13625 膶 +13626 膷 +13627 膸 +13628 膹 +13629 膼 +13630 膾 +13631 臄 +13632 臅 +13633 臈 +13634 臋 +13635 臐 +13636 臒 +13637 ㄣ +13638 ︺ +13639 ° +13640 ┿ +13641 ゃ +13642 冦 +13643 勩 +13644 呫 +13645 嗐 +13646 囥 +13647 堛 +13648 夈 +13649 嬨 +13650 屻 +13651 嶃 +13652 庛 +13653 忋 +13654 愩 +13655 戙 +13656 掋 +13657 撱 +13658 斻 +13659 曘 +13660 栥 +13661 椼 +13662 欍 +13663 氥 +13664 涖 +13665 溿 +13666 濄 +13667 炪 +13668 熴 +13669 鉦 +13670 銃 +13671 鉥 +13672 鉡 +13673 銄 +13674 鉢 +13675 銇 +13676 銈 +13677 銉 +13678 銊 +13679 銋 +13680 銏 +13681 銕 +13682 銗 +13683 銙 +13684 銛 +13685 銝 +13686 銞 +13687 銟 +13688 銠 +13689 銡 +13690 銣 +13691 銤 +13692 銦 +13693 ∶ +13694 ┟ +13695 ⒚ +13696 C +13697 伱 +13698 偯 +13699 兠 +13700 劽 +13701 喢 +13702 嚸 +13703 埫 +13704 壝 +13705 嵜 +13706 徝 +13707 惷 +13708 懨 +13709 撁 +13710 斆 +13711 暶 +13712 柮 +13713 椕 +13714 樏 +13715 櫭 +13716 毭 +13717 浢 +13718 澝 +13719 灻 +13720 熋 +13721 胉 +13722 胇 +13723 脋 +13724 胈 +13725 肹 +13726 胅 +13727 肻 +13728 脌 +13729 脕 +13730 脗 +13731 脙 +13732 脜 +13733 脝 +13734 脟 +13735 脠 +13736 脡 +13737 脢 +13738 脤 +13739 脥 +13740 脦 +13741 脧 +13742 脨 +13743 脪 +13744 脭 +13745 脮 +13746 脰 +13747 脴 +13748 脵 +13749 脺 +13750 脻 +13751 脼 +13752 脽 +13753 饧 +13754 饩 +13755 宀 +13756 猘 +13757 狝 +13758 醓 +13759 酇 +13760 羇 +13761 罙 +13762 郺 +13763 繟 +13764 嗬 +13765 鈇 +13766 耡 +13767 翧 +13768 猹 +13769 忄 +13770 饫 +13771 忮 +13772 錋 +13773 臿 +13774 鋋 +13775 銩 +13776 腶 +13777 腁 +13778 溻 +13779 滗 +13780 鉧 +13781 肁 +13782 懔 +13783 汔 +13784 彐 +13785 猙 +13786 狟 +13787 醔 +13788 酈 +13789 罛 +13790 郆 +13791 纀 +13792 繠 +13793 鈈 +13794 釨 +13795 翨 +13796 錬 +13797 舃 +13798 臖 +13799 鋌 +13800 銪 +13801 腷 +13802 腂 +13803 鉈 +13804 胋 +13805 肂 +13806 猚 +13807 狢 +13808 醕 +13809 酑 +13810 羉 +13811 罜 +13812 郼 +13813 郈 +13814 釩 +13815 耤 +13816 錭 +13817 錍 +13818 臗 +13819 鋍 +13820 銫 +13821 腃 +13822 鉩 +13823 胏 +13824 猟 +13825 狣 +13826 醖 +13827 酓 +13828 羋 +13829 郿 +13830 郉 +13831 纃 +13832 繢 +13833 釪 +13834 翫 +13835 錎 +13836 屦 +13837 膁 +13838 鉊 +13839 胐 +13840 肈 +13841 猠 +13842 狤 +13843 ㄦ +13844 ︽ +13845 ℃ +13846 ф +13847 ╂ +13848 ㈡ +13849 佹 +13850 冩 +13851 勬 +13852 呮 +13853 嗘 +13854 囨 +13855 堟 +13856 夋 +13857 婃 +13858 嬫 +13859 嶆 +13860 庢 +13861 愭 +13862 戞 +13863 掓 +13864 撴 +13865 旀 +13866 曟 +13867 栨 +13868 楁 +13869 樻 +13870 欐 +13871 氭 +13872 濇 +13873 炴 +13874 熸 +13875 鎩 +13876 鎋 +13877 鎨 +13878 鍬 +13879 鎈 +13880 姹 +13881 鎯 +13882 鎰 +13883 鎱 +13884 鎲 +13885 鎳 +13886 鎴 +13887 鎵 +13888 鎶 +13889 鎷 +13890 鎸 +13891 鎹 +13892 鎺 +13893 鎻 +13894 鎼 +13895 鎽 +13896 鎾 +13897 鎿 +13898 鏀 +13899 鏁 +13900 鏂 +13901 鏄 +13902 鏅 +13903 鏆 +13904 鏇 +13905 鏉 +13906 鏋 +13907 鏌 +13908 ζ +13909 ∑ +13910 ┢ +13911 ⑵ +13912 F +13913 兤 +13914 勂 +13915 喥 +13916 嚻 +13917 埰 +13918 壠 +13919 娖 +13920 嵠 +13921 幤 +13922 惼 +13923 捚 +13924 斊 +13925 柶 +13926 椘 +13927 樒 +13928 櫰 +13929 毱 +13930 浧 +13931 溒 +13932 炂 +13933 熎 +13934 苸 +13935 苵 +13936 芲 +13937 苶 +13938 芢 +13939 苿 +13940 茊 +13941 茍 +13942 茐 +13943 茒 +13944 茓 +13945 茘 +13946 茙 +13947 茝 +13948 茞 +13949 茟 +13950 茠 +13951 茡 +13952 茢 +13953 茣 +13954 茥 +13955 茦 +13956 茩 +13957 茮 +13958 茰 +13959 茷 +13960 茻 +13961 醗 +13962 酔 +13963 羍 +13964 罞 +13965 崾 +13966 郋 +13967 纄 +13968 繣 +13969 嗍 +13970 釫 +13971 耬 +13972 馊 +13973 錏 +13974 臙 +13975 鋏 +13976 銭 +13977 腅 +13978 鉫 +13979 鉋 +13980 胑 +13981 肊 +13982 闶 +13983 鎍 +13984 鍭 +13985 艫 +13986 驵 +13987 芺 +13988 鎐 +13989 艭 +13990 鎑 +13991 鍰 +13992 芼 +13993 鎒 +13994 鍱 +13995 芿 +13996 艵 +13997 鎓 +13998 苀 +13999 鎔 +14000 鍳 +14001 苂 +14002 鎕 +14003 鍴 +14004 艸 +14005 苅 +14006 艻 +14007 鎗 +14008 鍶 +14009 苆 +14010 艼 +14011 鎘 +14012 鍷 +14013 苉 +14014 芀 +14015 鎙 +14016 鍸 +14017 苐 +14018 芁 +14019 鎚 +14020 鍹 +14021 苖 +14022 鎛 +14023 苙 +14024 芅 +14025 嫜 +14026 鎜 +14027 鍻 +14028 苚 +14029 芆 +14030 嬖 +14031 鎝 +14032 苝 +14033 芇 +14034 鎞 +14035 鍽 +14036 芉 +14037 鎟 +14038 芌 +14039 鎠 +14040 鍿 +14041 芐 +14042 鎀 +14043 苩 +14044 芔 +14045 纟 +14046 孥 +14047 苬 +14048 芕 +14049 鎃 +14050 芖 +14051 鎄 +14052 苮 +14053 芚 +14054 鎦 +14055 鎅 +14056 苰 +14057 苲 +14058 芞 +14059 鏍 +14060 茽 +14061 猣 +14062 狥 +14063 醘 +14064 酕 +14065 羏 +14066 鄁 +14067 纅 +14068 繤 +14069 鈌 +14070 釬 +14071 耭 +14072 翭 +14073 鋐 +14074 銯 +14075 膄 +14076 腇 +14077 鉌 +14078 胒 +14079 肍 +14080 猤 +14081 狦 +14082 醙 +14083 酖 +14084 羐 +14085 罣 +14086 郍 +14087 纆 +14088 耮 +14089 錱 +14090 錑 +14091 舋 +14092 臛 +14093 銰 +14094 膅 +14095 腉 +14096 鉭 +14097 鉍 +14098 胓 +14099 肎 +14100 猦 +14101 狧 +14102 酘 +14103 羑 +14104 鄅 +14105 纇 +14106 繦 +14107 釮 +14108 耯 +14109 翲 +14110 錒 +14111 銱 +14112 膆 +14113 腍 +14114 鉮 +14115 鉎 +14116 胔 +14117 肏 +14118 猧 +14119 狪 +14120 醝 +14121 酙 +14122 羒 +14123 罥 +14124 郔 +14125 纈 +14126 繧 +14127 嘞 +14128 鈏 +14129 耰 +14130 翴 +14131 錓 +14132 舏 +14133 臝 +14134 屙 +14135 鋓 +14136 膇 +14137 漤 +14138 滹 +14139 鉯 +14140 鉏 +14141 胕 +14142 肐 +14143 猨 +14144 狫 +14145 酛 +14146 羓 +14147 罦 +14148 鄇 +14149 繨 +14150 鈐 +14151 釰 +14152 耲 +14153 翵 +14154 舑 +14155 臞 +14156 膉 +14157 腏 +14158 鉰 +14159 鉐 +14160 胘 +14161 肑 +14162 猭 +14163 狵 +14164 醟 +14165 酜 +14166 罧 +14167 鄈 +14168 郖 +14169 纊 +14170 釱 +14171 耴 +14172 翶 +14173 錵 +14174 錕 +14175 舓 +14176 鋕 +14177 銴 +14178 膋 +14179 腒 +14180 鉱 +14181 胟 +14182 肒 +14183 猯 +14184 狶 +14185 醠 +14186 酟 +14187 羖 +14188 罫 +14189 郘 +14190 纋 +14191 鈒 +14192 釲 +14193 耹 +14194 翷 +14195 錖 +14196 舕 +14197 鋖 +14198 膌 +14199 腖 +14200 鉲 +14201 鉒 +14202 肔 +14203 醡 +14204 酠 +14205 羗 +14206 罬 +14207 鄊 +14208 郙 +14209 鈓 +14210 耺 +14211 翸 +14212 錷 +14213 臡 +14214 鋗 +14215 膍 +14216 腗 +14217 鉳 +14218 鉓 +14219 胢 +14220 肕 +14221 猲 +14222 醤 +14223 酦 +14224 羘 +14225 罭 +14226 纍 +14227 繬 +14228 釴 +14229 耼 +14230 庋 +14231 錸 +14232 舗 +14233 鋘 +14234 膎 +14235 鉵 +14236 鉔 +14237 胣 +14238 肗 +14239 猳 +14240 狾 +14241 醥 +14242 酧 +14243 羙 +14244 罯 +14245 郞 +14246 纎 +14247 嘁 +14248 嘣 +14249 圊 +14250 耾 +14251 翺 +14252 夂 +14253 庳 +14254 錙 +14255 舘 +14256 臤 +14257 弪 +14258 艴 +14259 逭 +14260 耪 +14261 屮 +14262 鋙 +14263 膐 +14264 腛 +14265 胦 +14266 肙 +14267 阍 +14268 沲 +14269 猵 +14270 狿 +14271 醦 +14272 酨 +14273 羛 +14274 鄍 +14275 繮 +14276 鈖 +14277 聀 +14278 翽 +14279 舙 +14280 鋚 +14281 膒 +14282 腜 +14283 鉖 +14284 胮 +14285 肞 +14286 猀 +14287 醧 +14288 酫 +14289 羜 +14290 鄎 +14291 郠 +14292 繯 +14293 鈗 +14294 釷 +14295 聁 +14296 錻 +14297 錛 +14298 舚 +14299 臦 +14300 鋛 +14301 銺 +14302 膓 +14303 腝 +14304 胵 +14305 肣 +14306 猺 +14307 猂 +14308 醨 +14309 酭 +14310 羠 +14311 鄏 +14312 郣 +14313 纑 +14314 繰 +14315 釸 +14316 聄 +14317 翿 +14318 錼 +14319 錜 +14320 舝 +14321 鋜 +14322 銻 +14323 膔 +14324 腞 +14325 鉹 +14326 胷 +14327 肦 +14328 猻 +14329 醩 +14330 酳 +14331 羢 +14332 罶 +14333 鄐 +14334 郤 +14335 纒 +14336 鈙 +14337 聅 +14338 耂 +14339 錽 +14340 錝 +14341 臩 +14342 鋝 +14343 膕 +14344 腟 +14345 鉺 +14346 鉙 +14347 胹 +14348 肧 +14349 猼 +14350 猅 +14351 鄑 +14352 郥 +14353 繲 +14354 鈚 +14355 釺 +14356 聇 +14357 耇 +14358 錿 +14359 錞 +14360 舤 +14361 臫 +14362 鋞 +14363 膖 +14364 腡 +14365 胻 +14366 肨 +14367 猽 +14368 猆 +14369 酻 +14370 罸 +14371 郩 +14372 纔 +14373 帱 +14374 鈛 +14375 聈 +14376 耈 +14377 廒 +14378 廛 +14379 鍀 +14380 錟 +14381 臮 +14382 鋟 +14383 銾 +14384 膗 +14385 腢 +14386 鉼 +14387 胾 +14388 肬 +14389 丬 +14390 獀 +14391 醰 +14392 酼 +14393 羦 +14394 罺 +14395 郪 +14396 纕 +14397 繴 +14398 鈜 +14399 釼 +14400 耉 +14401 舦 +14402 臯 +14403 鋠 +14404 銿 +14405 腣 +14406 鉽 +14407 胿 +14408 肰 +14409 獁 +14410 猈 +14411 醱 +14412 醀 +14413 罻 +14414 鄔 +14415 郬 +14416 繵 +14417 耊 +14418 鍂 +14419 舧 +14420 臰 +14421 鋡 +14422 鋀 +14423 腤 +14424 鉾 +14425 鉝 +14426 脀 +14427 肳 +14428 獂 +14429 猉 +14430 醲 +14431 罼 +14432 鄕 +14433 郮 +14434 纗 +14435 繶 +14436 釾 +14437 聏 +14438 耎 +14439 鍃 +14440 舩 +14441 臱 +14442 鋢 +14443 膞 +14444 鉿 +14445 脁 +14446 肵 +14447 獃 +14448 猋 +14449 醳 +14450 羪 +14451 罽 +14452 郰 +14453 纘 +14454 噍 +14455 鈟 +14456 釿 +14457 聐 +14458 耏 +14459 鍄 +14460 錣 +14461 舮 +14462 臲 +14463 鋣 +14464 鋂 +14465 膟 +14466 腨 +14467 鉟 +14468 脃 +14469 肶 +14470 猌 +14471 羫 +14472 罿 +14473 鄗 +14474 郱 +14475 纙 +14476 聑 +14477 耑 +14478 鍅 +14479 錤 +14480 臵 +14481 鋃 +14482 銁 +14483 鉠 +14484 翣 +14485 酄 +14486 罓 +14487 鍫 +14488 錉 +14489 臓 +14490 銧 +14491 脿 +14492 碽 +14493 ╞ +14494 癰 +14495 礲 +14496 痓 +14497 縝 +14498 穊 +14499 竍 +14500 玝 +14501 籦 +14502 禸 +14503 ゜ +14504 乥 +14505 僢 +14506 刡 +14507 哹 +14508 嘼 +14509 坆 +14510 塨 +14511 奲 +14512 宐 +14513 峛 +14514 巄 +14515 廱 +14516 恇 +14517 慴 +14518 抌 +14519 攂 +14520 昩 +14521 朾 +14522 梑 +14523 榖 +14524 檅 +14525 歜 +14526 漛 +14527 瀊 +14528 焍 +14529 碆 +14530 ˙ +14531 譈 +14532 礏 +14533 瑽 +14534 稡 +14535 窧 +14536 禕 +14537 〣 +14538 瓸 +14539 盉 +14540 侭 +14541 傿 +14542 凚 +14543 匓 +14544 咮 +14545 嘊 +14546 圔 +14547 塀 +14548 夿 +14549 婤 +14550 孊 +14551 嶣 +14552 怋 +14553 払 +14554 揃 +14555 擝 +14556 旴 +14557 朆 +14558 桞 +14559 楤 +14560 橞 +14561 欱 +14562 汢 +14563 烞 +14564 碿 +14565 ╟ +14566 鱟 +14567 譪 +14568 琧 +14569 痗 +14570 璫 +14571 穋 +14572 竎 +14573 籧 +14574 禼 +14575 ヽ +14576 甤 +14577 眂 +14578 乧 +14579 俢 +14580 僣 +14581 刢 +14582 卌 +14583 哻 +14584 嘽 +14585 坈 +14586 奵 +14587 媍 +14588 宑 +14589 峜 +14590 巆 +14591 廲 +14592 恈 +14593 抍 +14594 揷 +14595 攃 +14596 朿 +14597 梒 +14598 榗 +14599 檆 +14600 沜 +14601 漜 +14602 焎 +14603 碈 +14604 珻 +14605 癈 +14606 疌 +14607 緾 +14608 稢 +14609 禖 +14610 〤 +14611 盋 +14612 丆 +14613 侰 +14614 匔 +14615 咰 +14616 嘋 +14617 圕 +14618 塁 +14619 婥 +14620 孋 +14621 岰 +14622 嶤 +14623 怌 +14624 慍 +14625 扖 +14626 揅 +14627 朇 +14628 桟 +14629 楥 +14630 欳 +14631 汣 +14632 淐 +14633 烠 +14634 ㄧ +14635 ︾ +14636 $ +14637 ╃ +14638 ㈢ +14639 傜 +14640 勭 +14641 呯 +14642 嗙 +14643 囩 +14644 堢 +14645 夌 +14646 婄 +14647 嬬 +14648 庣 +14649 忕 +14650 愮 +14651 戠 +14652 掔 +14653 撶 +14654 楃 +14655 樼 +14656 欑 +14657 氱 +14658 涚 +14659 濈 +14660 炵 +14661 鏯 +14662 鐍 +14663 鐋 +14664 绨 +14665 鏮 +14666 鏪 +14667 鐊 +14668 鐌 +14669 缍 +14670 绠 +14671 鏫 +14672 鐎 +14673 鐏 +14674 鐑 +14675 鐒 +14676 鐔 +14677 鐕 +14678 鐖 +14679 鐗 +14680 鐙 +14681 鐚 +14682 鐛 +14683 鐝 +14684 鐞 +14685 鐟 +14686 鐠 +14687 鐡 +14688 鐣 +14689 鐤 +14690 鐥 +14691 鐦 +14692 鐧 +14693 鐨 +14694 鐩 +14695 鐪 +14696 鐬 +14697 鐭 +14698 ㄇ +14699 η +14700 ∏ +14701 ┣ +14702 ⑶ +14703 G +14704 伹 +14705 偳 +14706 兦 +14707 勄 +14708 埱 +14709 壡 +14710 娗 +14711 嬊 +14712 屒 +14713 嵡 +14714 幥 +14715 徢 +14716 惽 +14717 捛 +14718 撉 +14719 椙 +14720 櫱 +14721 浨 +14722 澢 +14723 炃 +14724 熐 +14725 莁 +14726 荿 +14727 莮 +14728 莬 +14729 荹 +14730 莭 +14731 茾 +14732 荺 +14733 莯 +14734 莵 +14735 莻 +14736 莾 +14737 莿 +14738 菃 +14739 菄 +14740 菆 +14741 菎 +14742 菐 +14743 菑 +14744 菒 +14745 菕 +14746 菗 +14747 菙 +14748 菚 +14749 菛 +14750 菞 +14751 菢 +14752 菣 +14753 菤 +14754 菦 +14755 菧 +14756 菨 +14757 菬 +14758 鏰 +14759 莂 +14760 茿 +14761 绐 +14762 缋 +14763 缏 +14764 鏱 +14765 鏐 +14766 莃 +14767 荁 +14768 鏑 +14769 莄 +14770 荂 +14771 鏳 +14772 鏒 +14773 莇 +14774 鏴 +14775 莈 +14776 荅 +14777 鏵 +14778 荈 +14779 鏕 +14780 莋 +14781 鏷 +14782 莌 +14783 荋 +14784 鏸 +14785 莍 +14786 鏹 +14787 鏙 +14788 莏 +14789 荍 +14790 鏺 +14791 鏚 +14792 荎 +14793 鏛 +14794 荓 +14795 鏼 +14796 荕 +14797 鏝 +14798 莕 +14799 鏾 +14800 缲 +14801 莗 +14802 鐀 +14803 鏠 +14804 鐁 +14805 莚 +14806 荝 +14807 莝 +14808 荢 +14809 鐃 +14810 鏣 +14811 莟 +14812 荰 +14813 鐄 +14814 莡 +14815 荱 +14816 鐅 +14817 荲 +14818 鐆 +14819 鏦 +14820 莣 +14821 鏧 +14822 莤 +14823 荴 +14824 鏨 +14825 莥 +14826 荵 +14827 巛 +14828 鐉 +14829 鏩 +14830 荶 +14831 菭 +14832 磀 +14833 ╠ +14834 鱠 +14835 琩 +14836 璬 +14837 縟 +14838 竏 +14839 ヾ +14840 眃 +14841 乨 +14842 僤 +14843 刣 +14844 嘾 +14845 塪 +14846 媎 +14847 宒 +14848 峝 +14849 巇 +14850 恉 +14851 慸 +14852 抎 +14853 攄 +14854 杁 +14855 榙 +14856 歞 +14857 渄 +14858 漝 +14859 瀌 +14860 焏 +14861 ― +14862 譊 +14863 珼 +14864 癉 +14865 礑 +14866 璂 +14867 緿 +14868 稤 +14869 獶 +14870 禗 +14871 〥 +14872 盌 +14873 侱 +14874 僁 +14875 凞 +14876 匘 +14877 奃 +14878 孌 +14879 岲 +14880 嶥 +14881 怐 +14882 慏 +14883 扗 +14884 揇 +14885 朌 +14886 桪 +14887 楧 +14888 橠 +14889 欴 +14890 汥 +14891 滵 +14892 濪 +14893 烡 +14894 ︷ +14895 ○ +14896 ю +14897 ゐ +14898 ヰ +14899 侌 +14900 傪 +14901 凁 +14902 勷 +14903 咅 +14904 嗮 +14905 囸 +14906 堭 +14907 夝 +14908 岎 +14909 嶐 +14910 庰 +14911 忦 +14912 掟 +14913 擆 +14914 旔 +14915 曫 +14916 栶 +14917 楌 +14918 橉 +14919 欚 +14920 濔 +14921 烉 +14922 餪 +14923 饉 +14924 鹱 +14925 餧 +14926 饆 +14927 餈 +14928 餦 +14929 饊 +14930 饌 +14931 饍 +14932 饎 +14933 饏 +14934 饐 +14935 饘 +14936 饙 +14937 饚 +14938 饜 +14939 饟 +14940 饠 +14941 饡 +14942 饢 +14943 饦 +14944 饳 +14945 饻 +14946 馂 +14947 ㄐ +14948 ⌒ +14949 ┬ +14950 ⑿ +14951 P +14952 冃 +14953 勑 +14954 呅 +14955 埿 +14956 壭 +14957 娦 +14958 嬓 +14959 屝 +14960 嵭 +14961 幮 +14962 徯 +14963 愋 +14964 捫 +14965 撔 +14966 斝 +14967 曅 +14968 栃 +14969 櫺 +14970 毿 +14971 浶 +14972 溞 +14973 澬 +14974 炐 +14975 熜 +14976 衊 +14977 衈 +14978 衺 +14979 衉 +14980 衃 +14981 衇 +14982 衶 +14983 蠤 +14984 衻 +14985 衼 +14986 袀 +14987 袃 +14988 袇 +14989 袉 +14990 袊 +14991 袌 +14992 袎 +14993 袏 +14994 袐 +14995 袑 +14996 袓 +14997 袔 +14998 袕 +14999 袗 +15000 袘 +15001 袙 +15002 袚 +15003 袛 +15004 袝 +15005 袞 +15006 袟 +15007 袠 +15008 袡 +15009 袥 +15010 袦 +15011 袧 +15012 袨 +15013 袩 +15014 餉 +15015 衋 +15016 蠥 +15017 餬 +15018 蠦 +15019 餋 +15020 衏 +15021 衐 +15022 蠨 +15023 餎 +15024 衑 +15025 餱 +15026 蠪 +15027 餲 +15028 餑 +15029 蠫 +15030 餳 +15031 衕 +15032 衖 +15033 蠭 +15034 餵 +15035 餔 +15036 衘 +15037 蠮 +15038 餶 +15039 餕 +15040 衚 +15041 蠯 +15042 餷 +15043 餖 +15044 蠰 +15045 餗 +15046 衜 +15047 餹 +15048 蠳 +15049 瘃 +15050 餺 +15051 餙 +15052 衞 +15053 餻 +15054 衟 +15055 蠵 +15056 衠 +15057 餽 +15058 餜 +15059 衦 +15060 衧 +15061 蠸 +15062 餿 +15063 衪 +15064 蠺 +15065 饀 +15066 餟 +15067 衭 +15068 疒 +15069 瘗 +15070 瘥 +15071 饁 +15072 餠 +15073 衯 +15074 蠽 +15075 衱 +15076 蠾 +15077 餢 +15078 衳 +15079 蠿 +15080 衴 +15081 衁 +15082 餤 +15083 衵 +15084 衂 +15085 馉 +15086 袪 +15087 ╡ +15088 鱡 +15089 琫 +15090 癳 +15091 礶 +15092 痚 +15093 璭 +15094 竐 +15095 玡 +15096 籩 +15097 秂 +15098 〆 +15099 甧 +15100 眅 +15101 俥 +15102 卐 +15103 唀 +15104 噀 +15105 坋 +15106 塭 +15107 奺 +15108 媏 +15109 宔 +15110 峞 +15111 巈 +15112 廵 +15113 恊 +15114 慹 +15115 抏 +15116 揺 +15117 攅 +15118 昬 +15119 梕 +15120 榚 +15121 檈 +15122 歟 +15123 沞 +15124 渆 +15125 焑 +15126 碋 +15127 ‥ +15128 鱁 +15129 癊 +15130 疎 +15131 璄 +15132 縀 +15133 稥 +15134 窫 +15135 獷 +15136 籈 +15137 禘 +15138 瓻 +15139 盓 +15140 丒 +15141 侲 +15142 凟 +15143 匛 +15144 咵 +15145 嘐 +15146 圗 +15147 塃 +15148 奅 +15149 婨 +15150 孍 +15151 岴 +15152 嶦 +15153 廍 +15154 怑 +15155 慐 +15156 扙 +15157 揈 +15158 擡 +15159 旹 +15160 朎 +15161 桬 +15162 欵 +15163 汦 +15164 淓 +15165 滶 +15166 烢 +15167 ㄩ +15168 ﹂ +15169 ¢ +15170 ч +15171 ╅ +15172 ㈤ +15173 侀 +15174 傞 +15175 勯 +15176 呴 +15177 囬 +15178 堥 +15179 婇 +15180 嬮 +15181 岄 +15182 嶉 +15183 庨 +15184 愰 +15185 掗 +15186 曢 +15187 栭 +15188 楅 +15189 橀 +15190 欓 +15191 濋 +15192 熼 +15193 榇 +15194 閌 +15195 閊 +15196 閇 +15197 閧 +15198 椐 +15199 锧 +15200 閈 +15201 閫 +15202 閮 +15203 閯 +15204 閰 +15205 閳 +15206 閴 +15207 閵 +15208 閷 +15209 閸 +15210 閺 +15211 閼 +15212 閽 +15213 閾 +15214 閿 +15215 闁 +15216 闂 +15217 闃 +15218 闄 +15219 闅 +15220 闈 +15221 闉 +15222 ㄉ +15223 ι +15224 ∩ +15225 ┥ +15226 ⑸ +15227 I +15228 伾 +15229 勆 +15230 吷 +15231 喩 +15232 嚿 +15233 埳 +15234 壣 +15235 屔 +15236 嵣 +15237 徤 +15238 惿 +15239 懮 +15240 捝 +15241 撋 +15242 斏 +15243 暽 +15244 柹 +15245 椛 +15246 櫳 +15247 毶 +15248 浬 +15249 溕 +15250 蒦 +15251 蓗 +15252 蓔 +15253 蒧 +15254 蒣 +15255 蒥 +15256 蓒 +15257 葽 +15258 蒤 +15259 蓘 +15260 蓙 +15261 蓚 +15262 蓛 +15263 蓜 +15264 蓞 +15265 蓡 +15266 蓤 +15267 蓧 +15268 蓨 +15269 蓫 +15270 蓭 +15271 蓱 +15272 蓲 +15273 蓳 +15274 蓴 +15275 蓵 +15276 蓶 +15277 蓷 +15278 蓸 +15279 蓹 +15280 蓺 +15281 蓻 +15282 蓽 +15283 蓾 +15284 蔀 +15285 蔁 +15286 ㄨ +15287 ﹁ +15288 ¤ +15289 ц +15290 ╄ +15291 ヨ +15292 佽 +15293 傝 +15294 冭 +15295 勮 +15296 呰 +15297 堣 +15298 夎 +15299 婅 +15300 嬭 +15301 岃 +15302 嶈 +15303 忚 +15304 愯 +15305 戣 +15306 掕 +15307 撹 +15308 旇 +15309 曡 +15310 栬 +15311 楄 +15312 樿 +15313 涜 +15314 濊 +15315 炶 +15316 熻 +15317 鑐 +15318 鑯 +15319 鑭 +15320 鑏 +15321 杩 +15322 璺 +15323 鑋 +15324 鑍 +15325 鑮 +15326 鑌 +15327 鑱 +15328 鑳 +15329 鑴 +15330 鑶 +15331 鑸 +15332 鑹 +15333 鑺 +15334 鑻 +15335 鑿 +15336 钀 +15337 钁 +15338 钂 +15339 钃 +15340 钄 +15341 钖 +15342 钘 +15343 铇 +15344 铓 +15345 铔 +15346 铦 +15347 ㄈ +15348 θ +15349 ∪ +15350 ┤ +15351 ⑷ +15352 H +15353 伻 +15354 勅 +15355 喨 +15356 嚾 +15357 埲 +15358 娙 +15359 屓 +15360 幦 +15361 徣 +15362 惾 +15363 懭 +15364 捜 +15365 暼 +15366 柸 +15367 椚 +15368 樔 +15369 櫲 +15370 浫 +15371 溔 +15372 澣 +15373 炄 +15374 熑 +15375 萡 +15376 萟 +15377 萠 +15378 萞 +15379 葅 +15380 葈 +15381 菮 +15382 葊 +15383 葋 +15384 葌 +15385 葍 +15386 葏 +15387 葐 +15388 葒 +15389 葓 +15390 葔 +15391 葕 +15392 葘 +15393 葝 +15394 葟 +15395 葠 +15396 葢 +15397 葤 +15398 葥 +15399 葧 +15400 葨 +15401 葪 +15402 葮 +15403 葰 +15404 葲 +15405 葴 +15406 葹 +15407 葻 +15408 ﹃ +15409 £ +15410 ш +15411 ╆ +15412 ㈥ +15413 侁 +15414 勱 +15415 呹 +15416 嗞 +15417 囮 +15418 堦 +15419 岅 +15420 嶊 +15421 庩 +15422 愱 +15423 戧 +15424 撽 +15425 旉 +15426 曣 +15427 栮 +15428 楆 +15429 橁 +15430 欔 +15431 濌 +15432 炾 +15433 阘 +15434 阇 +15435 陗 +15436 陓 +15437 阓 +15438 攴 +15439 闧 +15440 陒 +15441 陖 +15442 甓 +15443 戤 +15444 闬 +15445 陙 +15446 陚 +15447 陜 +15448 陠 +15449 陥 +15450 陦 +15451 陫 +15452 陭 +15453 陮 +15454 陱 +15455 陹 +15456 陻 +15457 陼 +15458 陾 +15459 陿 +15460 隀 +15461 隁 +15462 隂 +15463 隃 +15464 隄 +15465 隇 +15466 隉 +15467 ㄊ +15468 κ +15469 ∈ +15470 ┦ +15471 ⑹ +15472 J +15473 伿 +15474 偸 +15475 兪 +15476 勈 +15477 吺 +15478 囀 +15479 埵 +15480 嬍 +15481 屖 +15482 嵤 +15483 幨 +15484 徥 +15485 愂 +15486 懯 +15487 捠 +15488 斒 +15489 暿 +15490 樖 +15491 櫴 +15492 毷 +15493 炇 +15494 蔨 +15495 蕕 +15496 蕓 +15497 蔩 +15498 蔧 +15499 蕒 +15500 蕔 +15501 蔦 +15502 蕘 +15503 蕚 +15504 蕛 +15505 蕜 +15506 蕝 +15507 蕟 +15508 蕠 +15509 蕡 +15510 蕢 +15511 蕥 +15512 蕦 +15513 蕧 +15514 蕬 +15515 蕮 +15516 蕯 +15517 蕰 +15518 蕱 +15519 蕳 +15520 蕷 +15521 蕸 +15522 蕼 +15523 蕽 +15524 蕿 +15525 薀 +15526 ﹄ +15527 ‰ +15528 щ +15529 ╇ +15530 ㈦ +15531 傠 +15532 冸 +15533 勲 +15534 呺 +15535 嗠 +15536 堧 +15537 夒 +15538 婋 +15539 岆 +15540 庪 +15541 愲 +15542 戨 +15543 旊 +15544 曤 +15545 栯 +15546 楇 +15547 橂 +15548 滊 +15549 濍 +15550 炿 +15551 雦 +15552 隷 +15553 氕 +15554 搿 +15555 肟 +15556 敫 +15557 雥 +15558 雧 +15559 攵 +15560 隌 +15561 毪 +15562 隲 +15563 雬 +15564 雭 +15565 雮 +15566 雰 +15567 雴 +15568 雵 +15569 雸 +15570 雺 +15571 雼 +15572 雽 +15573 雿 +15574 霃 +15575 霅 +15576 霊 +15577 霋 +15578 霌 +15579 霐 +15580 霒 +15581 霔 +15582 霗 +15583 霚 +15584 霛 +15585 霝 +15586 霟 +15587 ㄋ +15588 λ +15589 ∷ +15590 ┧ +15591 ⑺ +15592 K +15593 佀 +15594 偹 +15595 兯 +15596 勊 +15597 囁 +15598 埶 +15599 壦 +15600 娝 +15601 嬎 +15602 屗 +15603 嵥 +15604 幩 +15605 徦 +15606 愃 +15607 懰 +15608 捤 +15609 撍 +15610 斔 +15611 曀 +15612 椝 +15613 櫵 +15614 浰 +15615 溗 +15616 澦 +15617 炈 +15618 熕 +15619 薫 +15620 薧 +15621 藒 +15622 藎 +15623 薣 +15624 藑 +15625 薂 +15626 薥 +15627 藔 +15628 藗 +15629 藙 +15630 藚 +15631 藞 +15632 藡 +15633 藢 +15634 藣 +15635 藧 +15636 藪 +15637 藫 +15638 藭 +15639 藮 +15640 藯 +15641 藰 +15642 藱 +15643 藲 +15644 藳 +15645 藵 +15646 藶 +15647 藷 +15648 蒩 +15649 葾 +15650 榱 +15651 萢 +15652 阛 +15653 闍 +15654 蔄 +15655 赆 +15656 赇 +15657 隺 +15658 薃 +15659 脶 +15660 肜 +15661 脞 +15662 锽 +15663 蒪 +15664 葿 +15665 萣 +15666 阞 +15667 蔮 +15668 蔅 +15669 隑 +15670 薭 +15671 薆 +15672 蒫 +15673 蒀 +15674 鑓 +15675 阠 +15676 蔯 +15677 隿 +15678 隒 +15679 薱 +15680 閐 +15681 镈 +15682 蒬 +15683 蒁 +15684 鑔 +15685 萪 +15686 桊 +15687 阣 +15688 闐 +15689 蔰 +15690 蔇 +15691 牮 +15692 雂 +15693 隓 +15694 薲 +15695 薉 +15696 镋 +15697 蒭 +15698 蒃 +15699 萫 +15700 阤 +15701 闑 +15702 蔱 +15703 蔈 +15704 雃 +15705 薳 +15706 肷 +15707 腙 +15708 胨 +15709 蒮 +15710 鑖 +15711 菷 +15712 阥 +15713 闒 +15714 蔲 +15715 蔉 +15716 雈 +15717 隖 +15718 薴 +15719 薋 +15720 蒅 +15721 鑗 +15722 鐶 +15723 萭 +15724 菺 +15725 阦 +15726 闓 +15727 蔳 +15728 蔊 +15729 薵 +15730 镠 +15731 蒱 +15732 蒆 +15733 鐷 +15734 萮 +15735 菻 +15736 阧 +15737 闔 +15738 蔋 +15739 薶 +15740 镮 +15741 蒳 +15742 蒊 +15743 殪 +15744 萯 +15745 阨 +15746 蔍 +15747 晡 +15748 雐 +15749 隝 +15750 胩 +15751 胂 +15752 閖 +15753 蒵 +15754 蒍 +15755 鑚 +15756 鐹 +15757 萰 +15758 菾 +15759 阩 +15760 蔶 +15761 蔎 +15762 隞 +15763 薐 +15764 閗 +15765 镵 +15766 蒶 +15767 蒏 +15768 鑛 +15769 萲 +15770 菿 +15771 阫 +15772 闗 +15773 蔾 +15774 蔏 +15775 隟 +15776 薻 +15777 蒐 +15778 鑜 +15779 鐻 +15780 萳 +15781 萀 +15782 阬 +15783 蔿 +15784 雔 +15785 薼 +15786 薒 +15787 閙 +15788 镸 +15789 蒑 +15790 鐼 +15791 萴 +15792 阭 +15793 蕀 +15794 蔒 +15795 隡 +15796 薽 +15797 薓 +15798 镹 +15799 蒒 +15800 鑞 +15801 鐽 +15802 萅 +15803 阯 +15804 闚 +15805 隢 +15806 薾 +15807 镺 +15808 蒓 +15809 轷 +15810 鐿 +15811 萶 +15812 萇 +15813 柙 +15814 阰 +15815 蔕 +15816 牿 +15817 犋 +15818 雘 +15819 隣 +15820 薿 +15821 薕 +15822 閜 +15823 镻 +15824 蒔 +15825 鑠 +15826 鑀 +15827 萷 +15828 阷 +15829 蕄 +15830 蔖 +15831 隤 +15832 藀 +15833 閝 +15834 镼 +15835 鑡 +15836 鑁 +15837 萹 +15838 萉 +15839 阸 +15840 蕅 +15841 蔘 +15842 雚 +15843 隥 +15844 藂 +15845 薗 +15846 閞 +15847 蓃 +15848 蒖 +15849 鑢 +15850 萺 +15851 阹 +15852 闞 +15853 蕆 +15854 蔙 +15855 隦 +15856 藃 +15857 薘 +15858 镾 +15859 蓅 +15860 蒘 +15861 鑃 +15862 萻 +15863 萐 +15864 阺 +15865 闟 +15866 蕇 +15867 蔛 +15868 藄 +15869 赀 +15870 閠 +15871 蒚 +15872 鑤 +15873 萾 +15874 萒 +15875 阾 +15876 闠 +15877 蕋 +15878 蔜 +15879 隩 +15880 藅 +15881 薚 +15882 閁 +15883 蒛 +15884 辁 +15885 鑅 +15886 萿 +15887 萓 +15888 棼 +15889 椟 +15890 陁 +15891 蔝 +15892 贳 +15893 藆 +15894 薝 +15895 膪 +15896 脎 +15897 胲 +15898 閂 +15899 蓈 +15900 蒝 +15901 葀 +15902 萔 +15903 陃 +15904 蕍 +15905 雟 +15906 藇 +15907 薞 +15908 蒞 +15909 葁 +15910 萕 +15911 陊 +15912 蔠 +15913 雡 +15914 隬 +15915 藈 +15916 薟 +15917 閤 +15918 閄 +15919 蓌 +15920 鑨 +15921 鑈 +15922 葂 +15923 萖 +15924 陎 +15925 闤 +15926 蕏 +15927 蔢 +15928 隭 +15929 藊 +15930 閅 +15931 蒠 +15932 鑩 +15933 鑉 +15934 葃 +15935 萗 +15936 陏 +15937 闥 +15938 蕐 +15939 隮 +15940 藋 +15941 薡 +15942 閦 +15943 蓏 +15944 蒢 +15945 鑪 +15946 鑊 +15947 葄 +15948 萙 +15949 陑 +15950 闦 +15951 蕑 +15952 蔤 +15953 雤 +15954 藌 +15955 薢 +15956 柁 +15957 锠 +15958 葼 +15959 霠 +15960 藸 +15961 磃 +15962 ╢ +15963 鱢 +15964 譮 +15965 癴 +15966 礷 +15967 痜 +15968 璮 +15969 縡 +15970 玣 +15971 籪 +15972 秄 +15973 ゝ +15974 眆 +15975 乫 +15976 俧 +15977 僨 +15978 刦 +15979 唂 +15980 坒 +15981 塮 +15982 媐 +15983 宖 +15984 峟 +15985 廸 +15986 恌 +15987 慺 +15988 抐 +15989 揻 +15990 攆 +15991 杅 +15992 梖 +15993 榝 +15994 檉 +15995 歠 +15996 沠 +15997 漟 +15998 瀎 +15999 焒 +16000 ‵ +16001 譌 +16002 癋 +16003 礔 +16004 疐 +16005 璅 +16006 縁 +16007 稦 +16008 籉 +16009 侳 +16010 凢 +16011 匜 +16012 咶 +16013 嘑 +16014 塅 +16015 奆 +16016 婩 +16017 孎 +16018 岶 +16019 廎 +16020 怓 +16021 慒 +16022 扚 +16023 揊 +16024 擣 +16025 楩 +16026 橣 +16027 欶 +16028 淔 +16029 烣 +16030 ╣ +16031 鱣 +16032 癵 +16033 礸 +16034 痝 +16035 穏 +16036 竒 +16037 玤 +16038 籫 +16039 秅 +16040 ゞ +16041 甮 +16042 乬 +16043 僩 +16044 刧 +16045 唃 +16046 噂 +16047 坓 +16048 塯 +16049 奼 +16050 媑 +16051 廹 +16052 恎 +16053 慻 +16054 抔 +16055 揼 +16056 杇 +16057 梘 +16058 榞 +16059 檊 +16060 漡 +16061 焔 +16062 碐 +16063 ℅ +16064 鱃 +16065 譍 +16066 珿 +16067 癎 +16068 礕 +16069 疓 +16070 縂 +16071 稧 +16072 獹 +16073 籊 +16074 盙 +16075 侴 +16076 僄 +16077 凣 +16078 匞 +16079 嘒 +16080 塆 +16081 奊 +16082 婫 +16083 孏 +16084 岹 +16085 嶨 +16086 廏 +16087 怗 +16088 慓 +16089 扜 +16090 揋 +16091 擥 +16092 桮 +16093 橤 +16094 汫 +16095 淕 +16096 濭 +16097 烥 +16098 磆 +16099 ╤ +16100 鱤 +16101 琱 +16102 癶 +16103 礹 +16104 痟 +16105 穐 +16106 竓 +16107 秇 +16108 ﹉ +16109 県 +16110 乭 +16111 俬 +16112 僪 +16113 卙 +16114 噃 +16115 坔 +16116 塰 +16117 宧 +16118 峢 +16119 巋 +16120 廻 +16121 恏 +16122 慼 +16123 抙 +16124 揾 +16125 攈 +16126 昲 +16127 杊 +16128 梙 +16129 檋 +16130 歨 +16131 瀐 +16132 碒 +16133 ℉ +16134 鱄 +16135 譎 +16136 縃 +16137 稨 +16138 窰 +16139 籋 +16140 禜 +16141 瓾 +16142 盚 +16143 丠 +16144 凥 +16145 匟 +16146 咹 +16147 嘓 +16148 圚 +16149 塇 +16150 孒 +16151 廐 +16152 怘 +16153 慔 +16154 扝 +16155 揌 +16156 擧 +16157 旽 +16158 朒 +16159 楬 +16160 橦 +16161 欻 +16162 汬 +16163 淗 +16164 滺 +16165 磇 +16166 鱥 +16167 譱 +16168 癷 +16169 縤 +16170 竔 +16171 籭 +16172 秈 +16173 ﹊ +16174 甶 +16175 眎 +16176 乮 +16177 俰 +16178 僫 +16179 刬 +16180 卛 +16181 噄 +16182 坕 +16183 奿 +16184 媔 +16185 宨 +16186 巌 +16187 廼 +16188 恑 +16189 慽 +16190 搃 +16191 杋 +16192 梚 +16193 榠 +16194 檌 +16195 瀒 +16196 焛 +16197 碔 +16198 ↖ +16199 鱅 +16200 譏 +16201 琁 +16202 癐 +16203 礗 +16204 疘 +16205 縄 +16206 窱 +16207 禝 +16208 ㊣ +16209 甀 +16210 侷 +16211 僆 +16212 処 +16213 匢 +16214 咺 +16215 圛 +16216 塈 +16217 奍 +16218 岻 +16219 嶪 +16220 廔 +16221 怚 +16222 扞 +16223 揑 +16224 旾 +16225 桰 +16226 橧 +16227 欼 +16228 滻 +16229 烮 +16230 № +16231 ╉ +16232 ㈨ +16233 冺 +16234 勴 +16235 呿 +16236 堩 +16237 夗 +16238 嬳 +16239 岉 +16240 嶍 +16241 庬 +16242 忢 +16243 戫 +16244 掜 +16245 旐 +16246 曧 +16247 楉 +16248 橅 +16249 欗 +16250 氻 +16251 涰 +16252 滍 +16253 濏 +16254 烅 +16255 燀 +16256 泶 +16257 韣 +16258 憝 +16259 慝 +16260 砘 +16261 韂 +16262 韠 +16263 韢 +16264 鞞 +16265 恧 +16266 韁 +16267 肀 +16268 韤 +16269 韥 +16270 韨 +16271 韯 +16272 韰 +16273 韱 +16274 韲 +16275 韷 +16276 韸 +16277 韹 +16278 韺 +16279 韽 +16280 頀 +16281 頄 +16282 頇 +16283 頉 +16284 頋 +16285 頍 +16286 ㄍ +16287 ν +16288 ⊥ +16289 ┩ +16290 ⑼ +16291 M +16292 佂 +16293 偼 +16294 兺 +16295 喭 +16296 壨 +16297 屚 +16298 嵧 +16299 愅 +16300 捦 +16301 撏 +16302 斖 +16303 曂 +16304 柾 +16305 椡 +16306 櫷 +16307 毻 +16308 溚 +16309 澩 +16310 炌 +16311 熗 +16312 蚟 +16313 蛜 +16314 蛗 +16315 蚠 +16316 蚚 +16317 蚞 +16318 蛖 +16319 蛚 +16320 虭 +16321 蚛 +16322 蛡 +16323 蛢 +16324 蛣 +16325 蛥 +16326 蛦 +16327 蛧 +16328 蛨 +16329 蛪 +16330 蛫 +16331 蛬 +16332 蛶 +16333 蛷 +16334 蛼 +16335 蛽 +16336 蛿 +16337 蜁 +16338 蜄 +16339 蜅 +16340 蜋 +16341 蜌 +16342 蜎 +16343 蜏 +16344 蜑 +16345 蜔 +16346 § +16347 ъ +16348 ╈ +16349 ㈧ +16350 侅 +16351 傡 +16352 冹 +16353 呾 +16354 嗢 +16355 囲 +16356 堨 +16357 夓 +16358 婌 +16359 嬱 +16360 岇 +16361 忟 +16362 愳 +16363 旍 +16364 曥 +16365 栰 +16366 楈 +16367 橃 +16368 氺 +16369 涭 +16370 濎 +16371 烄 +16372 熿 +16373 靱 +16374 靯 +16375 靇 +16376 旆 +16377 靃 +16378 靅 +16379 靮 +16380 靲 +16381 靵 +16382 靷 +16383 靸 +16384 靹 +16385 靻 +16386 靽 +16387 靾 +16388 靿 +16389 鞀 +16390 鞁 +16391 鞃 +16392 鞄 +16393 鞆 +16394 鞈 +16395 鞉 +16396 鞊 +16397 鞌 +16398 鞎 +16399 鞐 +16400 鞓 +16401 鞖 +16402 鞗 +16403 鞙 +16404 鞚 +16405 鞛 +16406 鞜 +16407 ㄌ +16408 μ +16409 √ +16410 ┨ +16411 ぬ +16412 ⑻ +16413 L +16414 佁 +16415 偺 +16416 勌 +16417 吿 +16418 壧 +16419 娞 +16420 屘 +16421 嵦 +16422 幪 +16423 徧 +16424 愄 +16425 懱 +16426 捥 +16427 撎 +16428 曁 +16429 柼 +16430 椞 +16431 樚 +16432 櫶 +16433 浱 +16434 溙 +16435 澨 +16436 炋 +16437 熖 +16438 蘞 +16439 蘜 +16440 虀 +16441 蘾 +16442 蘝 +16443 蘙 +16444 蘽 +16445 虂 +16446 虃 +16447 虅 +16448 虆 +16449 虇 +16450 虈 +16451 虉 +16452 虊 +16453 虋 +16454 虌 +16455 虖 +16456 虗 +16457 虘 +16458 虙 +16459 虝 +16460 虠 +16461 虡 +16462 虣 +16463 虥 +16464 虦 +16465 虨 +16466 虩 +16467 ︻ +16468 ☆ +16469 ╊ +16470 ゎ +16471 ㈩ +16472 ヮ +16473 侇 +16474 傤 +16475 冾 +16476 囶 +16477 堫 +16478 夘 +16479 婎 +16480 嬵 +16481 岊 +16482 嶎 +16483 庮 +16484 忣 +16485 愵 +16486 戭 +16487 掝 +16488 擃 +16489 旑 +16490 曨 +16491 橆 +16492 氼 +16493 涱 +16494 濐 +16495 烆 +16496 頯 +16497 瞵 +16498 顋 +16499 頮 +16500 罨 +16501 頪 +16502 頬 +16503 頏 +16504 顐 +16505 顑 +16506 顕 +16507 顖 +16508 顙 +16509 顚 +16510 顜 +16511 顝 +16512 顟 +16513 顠 +16514 顡 +16515 顢 +16516 顣 +16517 顦 +16518 顩 +16519 顬 +16520 顭 +16521 ㄎ +16522 ξ +16523 ∥ +16524 ┪ +16525 ⑽ +16526 N +16527 佄 +16528 兾 +16529 勎 +16530 娢 +16531 幬 +16532 徫 +16533 愇 +16534 懳 +16535 斘 +16536 曃 +16537 栁 +16538 椢 +16539 樜 +16540 毼 +16541 浳 +16542 溛 +16543 炍 +16544 熚 +16545 蝋 +16546 蝆 +16547 蝵 +16548 蝊 +16549 蝅 +16550 蝱 +16551 蝳 +16552 蜙 +16553 蝄 +16554 蝹 +16555 蝺 +16556 蝿 +16557 螀 +16558 螁 +16559 螇 +16560 螉 +16561 螊 +16562 螌 +16563 螎 +16564 螑 +16565 螒 +16566 螔 +16567 螕 +16568 螖 +16569 螘 +16570 螙 +16571 螚 +16572 螛 +16573 螝 +16574 螠 +16575 螡 +16576 螣 +16577 ︼ +16578 ★ +16579 э +16580 ╋ +16581 侊 +16582 傦 +16583 冿 +16584 勶 +16585 咃 +16586 嗭 +16587 堬 +16588 夛 +16589 婏 +16590 嬶 +16591 岋 +16592 嶏 +16593 忥 +16594 愶 +16595 掞 +16596 擄 +16597 旓 +16598 曪 +16599 栵 +16600 楋 +16601 欙 +16602 涳 +16603 滐 +16604 烇 +16605 燂 +16606 锎 +16607 颺 +16608 铷 +16609 飤 +16610 铴 +16611 锏 +16612 颻 +16613 铩 +16614 锓 +16615 铽 +16616 颷 +16617 飡 +16618 飣 +16619 铹 +16620 颸 +16621 飥 +16622 飫 +16623 飬 +16624 飭 +16625 飮 +16626 飱 +16627 飳 +16628 飴 +16629 飵 +16630 飶 +16631 飸 +16632 飺 +16633 餀 +16634 餁 +16635 餂 +16636 餄 +16637 ㄏ +16638 ο +16639 ∠ +16640 ┫ +16641 ⑾ +16642 O +16643 佅 +16644 傁 +16645 兿 +16646 勏 +16647 呄 +16648 喯 +16649 囅 +16650 埾 +16651 壪 +16652 娤 +16653 嬒 +16654 嵪 +16655 幭 +16656 懴 +16657 捪 +16658 斚 +16659 曄 +16660 栂 +16661 椣 +16662 櫹 +16663 毾 +16664 澫 +16665 炏 +16666 熛 +16667 蟕 +16668 蟐 +16669 蟸 +16670 蟔 +16671 蟏 +16672 蟵 +16673 螥 +16674 蟎 +16675 蟺 +16676 蟼 +16677 蟽 +16678 蟿 +16679 蠀 +16680 蠁 +16681 蠆 +16682 蠇 +16683 蠈 +16684 蠉 +16685 蠋 +16686 蠌 +16687 蠎 +16688 蠏 +16689 蠐 +16690 蠒 +16691 蠗 +16692 蠘 +16693 蠚 +16694 蠜 +16695 蠠 +16696 锊 +16697 韆 +16698 鞟 +16699 蚢 +16700 虯 +16701 愍 +16702 砹 +16703 霢 +16704 蘟 +16705 灬 +16706 爨 +16707 炷 +16708 蝍 +16709 蜛 +16710 钆 +16711 颽 +16712 蟖 +16713 螦 +16714 镟 +16715 镥 +16716 镤 +16717 锬 +16718 锩 +16719 铈 +16720 韇 +16721 蚥 +16722 虰 +16723 靊 +16724 霣 +16725 蘠 +16726 藼 +16727 蝏 +16728 蜝 +16729 颾 +16730 蟗 +16731 韈 +16732 鞢 +16733 蚦 +16734 虲 +16735 靋 +16736 霤 +16737 藽 +16738 頲 +16739 蝐 +16740 蜟 +16741 钋 +16742 颿 +16743 顲 +16744 蟘 +16745 螩 +16746 韉 +16747 鞤 +16748 蚫 +16749 虳 +16750 眇 +16751 靌 +16752 霥 +16753 藾 +16754 頳 +16755 蝑 +16756 蜠 +16757 铕 +16758 飀 +16759 螪 +16760 镄 +16761 韊 +16762 鞥 +16763 蚭 +16764 虴 +16765 黹 +16766 靍 +16767 霦 +16768 蘣 +16769 蘀 +16770 頴 +16771 蝒 +16772 蜤 +16773 钌 +16774 飁 +16775 蟚 +16776 锼 +16777 鞦 +16778 蚮 +16779 虵 +16780 靎 +16781 蘤 +16782 蘁 +16783 頵 +16784 蝔 +16785 飂 +16786 螰 +16787 鞧 +16788 蚲 +16789 虶 +16790 靏 +16791 霨 +16792 蘥 +16793 蘂 +16794 頖 +16795 蜧 +16796 飃 +16797 螱 +16798 韍 +16799 蚳 +16800 虷 +16801 靐 +16802 霩 +16803 蘦 +16804 蘃 +16805 蝖 +16806 蜨 +16807 颒 +16808 韎 +16809 鞩 +16810 虸 +16811 眍 +16812 靑 +16813 霫 +16814 蘨 +16815 煳 +16816 蝘 +16817 蜪 +16818 钔 +16819 飅 +16820 蟟 +16821 螴 +16822 锿 +16823 锾 +16824 韏 +16825 鞪 +16826 蚸 +16827 靔 +16828 霬 +16829 蘪 +16830 蝚 +16831 蜫 +16832 螶 +16833 鞬 +16834 蚹 +16835 蚄 +16836 靕 +16837 霮 +16838 蘫 +16839 頺 +16840 頚 +16841 蝛 +16842 蜬 +16843 飇 +16844 颣 +16845 螷 +16846 蚻 +16847 蚅 +16848 靗 +16849 霯 +16850 蜭 +16851 飈 +16852 蟣 +16853 韒 +16854 蚼 +16855 蚆 +16856 靘 +16857 霱 +16858 蘉 +16859 蝝 +16860 蜯 +16861 飉 +16862 颩 +16863 蟤 +16864 螹 +16865 鞱 +16866 蚽 +16867 蚇 +16868 蘮 +16869 頝 +16870 蝞 +16871 蜰 +16872 飊 +16873 颪 +16874 蟦 +16875 螻 +16876 镅 +16877 韔 +16878 鞳 +16879 蚾 +16880 眢 +16881 眚 +16882 霴 +16883 蘯 +16884 煺 +16885 頾 +16886 頞 +16887 蜲 +16888 钪 +16889 钬 +16890 螼 +16891 镎 +16892 韕 +16893 鞵 +16894 蚿 +16895 蚉 +16896 靝 +16897 蘰 +16898 頿 +16899 頟 +16900 蝡 +16901 蜳 +16902 飌 +16903 蟨 +16904 螾 +16905 韖 +16906 蛁 +16907 蚎 +16908 靟 +16909 蝢 +16910 蜵 +16911 飍 +16912 颭 +16913 蟩 +16914 螿 +16915 韗 +16916 鞷 +16917 蛂 +16918 蚏 +16919 靣 +16920 霷 +16921 蘲 +16922 顁 +16923 蜶 +16924 蟫 +16925 蟁 +16926 韘 +16927 鞸 +16928 蛃 +16929 蚐 +16930 靤 +16931 霺 +16932 蘳 +16933 顂 +16934 頢 +16935 蝧 +16936 飐 +16937 蟂 +16938 钸 +16939 韙 +16940 鞹 +16941 蛅 +16942 蚑 +16943 霻 +16944 蘴 +16945 蘐 +16946 頣 +16947 蜹 +16948 颰 +16949 蟭 +16950 蟃 +16951 韚 +16952 鞺 +16953 蛈 +16954 蚒 +16955 睃 +16956 碥 +16957 靧 +16958 霼 +16959 禚 +16960 顄 +16961 蝩 +16962 蜺 +16963 稆 +16964 稃 +16965 韛 +16966 鞻 +16967 蛌 +16968 蚔 +16969 霽 +16970 蘶 +16971 蘓 +16972 蝪 +16973 蜼 +16974 颲 +16975 蟰 +16976 蟅 +16977 蚖 +16978 靪 +16979 霿 +16980 蘷 +16981 蘔 +16982 蝫 +16983 蜽 +16984 蟇 +16985 韝 +16986 鞽 +16987 蛒 +16988 蚗 +16989 靫 +16990 靀 +16991 蘹 +16992 蘕 +16993 顇 +16994 飜 +16995 颴 +16996 韞 +16997 鞾 +16998 蛓 +16999 蚘 +17000 靁 +17001 蘺 +17002 顈 +17003 頨 +17004 蝭 +17005 蝁 +17006 飝 +17007 颵 +17008 蟉 +17009 韟 +17010 鞿 +17011 蛕 +17012 蚙 +17013 靭 +17014 蘻 +17015 顉 +17016 頩 +17017 蝯 +17018 飠 +17019 蟴 +17020 蟌 +17021 铪 +17022 钷 +17023 頎 +17024 蜖 +17025 鞝 +17026 虪 +17027 顮 +17028 螤 +17029 餇 +17030 磈 +17031 ╦ +17032 鱦 +17033 譲 +17034 琷 +17035 癹 +17036 礿 +17037 痡 +17038 璲 +17039 縥 +17040 穓 +17041 竕 +17042 秊 +17043 ﹋ +17044 甹 +17045 眏 +17046 乯 +17047 俲 +17048 僯 +17049 刯 +17050 卝 +17051 唈 +17052 坖 +17053 塲 +17054 妀 +17055 宩 +17056 峧 +17057 廽 +17058 抝 +17059 搄 +17060 攋 +17061 昷 +17062 梛 +17063 檍 +17064 歫 +17065 沯 +17066 渏 +17067 漥 +17068 瀓 +17069 碕 +17070 ↗ +17071 鱆 +17072 譐 +17073 琂 +17074 癑 +17075 礘 +17076 疛 +17077 璊 +17078 稪 +17079 窲 +17080 禞 +17081 ㎎ +17082 盝 +17083 丣 +17084 侸 +17085 僇 +17086 凧 +17087 咼 +17088 嘕 +17089 圝 +17090 塉 +17091 奐 +17092 婮 +17093 孞 +17094 岼 +17095 嶫 +17096 廕 +17097 怞 +17098 慗 +17099 扟 +17100 揓 +17101 擩 +17102 朖 +17103 桱 +17104 楯 +17105 橨 +17106 淛 +17107 滼 +17108 濲 +17109 烰 +17110 磌 +17111 ╧ +17112 譳 +17113 琸 +17114 祂 +17115 痥 +17116 縦 +17117 穔 +17118 竗 +17119 籯 +17120 秌 +17121 ﹌ +17122 甼 +17123 眐 +17124 乲 +17125 俴 +17126 僰 +17127 刱 +17128 卥 +17129 唊 +17130 噆 +17131 坘 +17132 塳 +17133 妅 +17134 峩 +17135 巏 +17136 弅 +17137 恔 +17138 慿 +17139 択 +17140 搆 +17141 杒 +17142 梜 +17143 榢 +17144 檏 +17145 歬 +17146 沰 +17147 渒 +17148 漦 +17149 瀔 +17150 焝 +17151 ↘ +17152 琄 +17153 疜 +17154 璌 +17155 縆 +17156 稫 +17157 窴 +17158 獽 +17159 籏 +17160 ㎏ +17161 甂 +17162 侹 +17163 僈 +17164 凨 +17165 匥 +17166 咾 +17167 嘖 +17168 奒 +17169 婯 +17170 孠 +17171 岾 +17172 嶬 +17173 廗 +17174 怟 +17175 扠 +17176 揔 +17177 擪 +17178 昁 +17179 朘 +17180 楰 +17181 欿 +17182 汯 +17183 淜 +17184 滽 +17185 濳 +17186 烱 +17187 磍 +17188 ╨ +17189 鱨 +17190 琹 +17191 璴 +17192 縧 +17193 竘 +17194 籰 +17195 秎 +17196 ﹍ +17197 甽 +17198 眑 +17199 乴 +17200 俵 +17201 刲 +17202 卨 +17203 唋 +17204 噇 +17205 坙 +17206 塴 +17207 妉 +17208 宭 +17209 峫 +17210 巐 +17211 弆 +17212 恖 +17213 憀 +17214 抣 +17215 搇 +17216 昹 +17217 杔 +17218 榣 +17219 檒 +17220 歭 +17221 沴 +17222 渓 +17223 漧 +17224 焞 +17225 碙 +17226 ↙ +17227 譒 +17228 癓 +17229 礚 +17230 疞 +17231 璍 +17232 稬 +17233 獿 +17234 籐 +17235 ㎜ +17236 盠 +17237 丩 +17238 侺 +17239 僉 +17240 匧 +17241 哃 +17242 圠 +17243 塋 +17244 婰 +17245 孡 +17246 峀 +17247 嶭 +17248 廘 +17249 怢 +17250 慙 +17251 扡 +17252 揕 +17253 擫 +17254 朙 +17255 桳 +17256 楲 +17257 橪 +17258 歀 +17259 汱 +17260 淟 +17261 濴 +17262 烲 +17263 磎 +17264 ╩ +17265 鱩 +17266 譵 +17267 癿 +17268 祄 +17269 痬 +17270 璵 +17271 穖 +17272 竚 +17273 籱 +17274 秏 +17275 ﹎ +17276 甿 +17277 眒 +17278 乵 +17279 僲 +17280 刴 +17281 卪 +17282 唌 +17283 噈 +17284 坢 +17285 妋 +17286 媘 +17287 峬 +17288 巑 +17289 恗 +17290 抦 +17291 搈 +17292 攎 +17293 梞 +17294 榤 +17295 檓 +17296 歮 +17297 沵 +17298 漨 +17299 瀖 +17300 焟 +17301 譓 +17302 琈 +17303 癕 +17304 礛 +17305 疢 +17306 璏 +17307 縈 +17308 稭 +17309 窶 +17310 玀 +17311 籑 +17312 ㎝ +17313 甅 +17314 丮 +17315 侻 +17316 匨 +17317 哅 +17318 嘙 +17319 圡 +17320 塎 +17321 奙 +17322 孧 +17323 峂 +17324 嶮 +17325 怣 +17326 扢 +17327 揗 +17328 昅 +17329 朚 +17330 桵 +17331 楳 +17332 歁 +17333 汳 +17334 淢 +17335 濵 +17336 烳 +17337 ╪ +17338 譶 +17339 皀 +17340 痭 +17341 璶 +17342 縩 +17343 穘 +17344 竛 +17345 秐 +17346 畁 +17347 乶 +17348 僴 +17349 刵 +17350 唍 +17351 噉 +17352 塶 +17353 妌 +17354 媙 +17355 宯 +17356 峮 +17357 巒 +17358 弉 +17359 恘 +17360 抧 +17361 搉 +17362 昻 +17363 杗 +17364 榥 +17365 歯 +17366 沶 +17367 渘 +17368 焠 +17369 碞 +17370 ∟ +17371 鱊 +17372 譔 +17373 琋 +17374 癗 +17375 疦 +17376 璑 +17377 窷 +17378 籒 +17379 禢 +17380 ㎞ +17381 甆 +17382 丯 +17383 侼 +17384 凬 +17385 匩 +17386 哊 +17387 圢 +17388 塏 +17389 奛 +17390 婲 +17391 孨 +17392 峃 +17393 嶯 +17394 怤 +17395 慛 +17396 扤 +17397 揘 +17398 擭 +17399 朜 +17400 桸 +17401 楴 +17402 歂 +17403 汵 +17404 淣 +17405 漀 +17406 濶 +17407 ︸ +17408 ● +17409 ゑ +17410 Ⅰ +17411 ヱ +17412 凂 +17413 咇 +17414 嗰 +17415 囻 +17416 堮 +17417 夞 +17418 婑 +17419 嬹 +17420 岏 +17421 嶑 +17422 庱 +17423 忨 +17424 戱 +17425 旕 +17426 栺 +17427 楍 +17428 橊 +17429 欛 +17430 汃 +17431 涶 +17432 滖 +17433 燅 +17434 馺 +17435 馸 +17436 駘 +17437 瘳 +17438 駖 +17439 馹 +17440 馵 +17441 馌 +17442 駞 +17443 駢 +17444 駥 +17445 駦 +17446 駨 +17447 駩 +17448 駪 +17449 駬 +17450 駮 +17451 駰 +17452 駴 +17453 駵 +17454 駶 +17455 駸 +17456 ㄑ +17457 ρ +17458 ⊙ +17459 ┭ +17460 ⒀ +17461 Q +17462 パ +17463 傃 +17464 冄 +17465 勓 +17466 呇 +17467 囇 +17468 堁 +17469 娧 +17470 嬔 +17471 屟 +17472 嵮 +17473 幯 +17474 徰 +17475 愌 +17476 捬 +17477 撗 +17478 栄 +17479 椦 +17480 樠 +17481 氀 +17482 浹 +17483 溠 +17484 澭 +17485 裛 +17486 裗 +17487 褈 +17488 裑 +17489 裖 +17490 褅 +17491 袬 +17492 裓 +17493 褉 +17494 褋 +17495 褌 +17496 褍 +17497 褎 +17498 褏 +17499 褑 +17500 褔 +17501 褖 +17502 褗 +17503 褘 +17504 褜 +17505 褝 +17506 褞 +17507 褟 +17508 褤 +17509 褧 +17510 褨 +17511 褩 +17512 褬 +17513 褭 +17514 褱 +17515 褳 +17516 褵 +17517 窆 +17518 馎 +17519 袮 +17520 窳 +17521 衤 +17522 袯 +17523 馽 +17524 馛 +17525 裞 +17526 裠 +17527 袲 +17528 馿 +17529 馝 +17530 袳 +17531 耖 +17532 耔 +17533 耠 +17534 馞 +17535 裦 +17536 袴 +17537 馟 +17538 裧 +17539 袵 +17540 駂 +17541 馠 +17542 裩 +17543 袶 +17544 駃 +17545 馡 +17546 裪 +17547 袸 +17548 耥 +17549 耢 +17550 裉 +17551 馢 +17552 袹 +17553 駅 +17554 馣 +17555 裬 +17556 袺 +17557 駆 +17558 馤 +17559 裭 +17560 袻 +17561 駇 +17562 馦 +17563 裮 +17564 袽 +17565 駈 +17566 馧 +17567 裯 +17568 袾 +17569 駉 +17570 袿 +17571 裼 +17572 裵 +17573 裀 +17574 駋 +17575 馫 +17576 裶 +17577 裃 +17578 駌 +17579 裷 +17580 裄 +17581 裺 +17582 裇 +17583 駎 +17584 裻 +17585 駏 +17586 馯 +17587 裿 +17588 駑 +17589 褀 +17590 裌 +17591 褁 +17592 裍 +17593 駓 +17594 褃 +17595 駔 +17596 褄 +17597 裐 +17598 駹 +17599 褷 +17600 磑 +17601 ╫ +17602 鱫 +17603 琽 +17604 皁 +17605 祇 +17606 痮 +17607 璷 +17608 穙 +17609 玱 +17610 籵 +17611 秓 +17612 ﹐ +17613 畂 +17614 眔 +17615 乷 +17616 俹 +17617 僶 +17618 刼 +17619 卭 +17620 坥 +17621 塷 +17622 妎 +17623 宱 +17624 巓 +17625 憃 +17626 抩 +17627 搊 +17628 攐 +17629 杘 +17630 榦 +17631 歰 +17632 沷 +17633 漮 +17634 碠 +17635 ∣ +17636 譕 +17637 琌 +17638 癘 +17639 礝 +17640 疧 +17641 璒 +17642 縊 +17643 稯 +17644 玂 +17645 禣 +17646 丱 +17647 侽 +17648 凮 +17649 匫 +17650 哋 +17651 圤 +17652 塐 +17653 奜 +17654 峅 +17655 嶰 +17656 廜 +17657 怬 +17658 扥 +17659 揙 +17660 擮 +17661 昈 +17662 朞 +17663 桹 +17664 橭 +17665 歄 +17666 汷 +17667 淥 +17668 濷 +17669 烵 +17670 骱 +17671 『 +17672 Ш +17673 ┖ +17674 ⒑ +17675 : +17676 伜 +17677 偤 +17678 吅 +17679 喓 +17680 姾 +17681 嫼 +17682 尯 +17683 嵑 +17684 幒 +17685 徍 +17686 懞 +17687 捄 +17688 摵 +17689 柡 +17690 櫤 +17691 毢 +17692 澓 +17693 灪 +17694 篳 +17695 篰 +17696 簙 +17697 簗 +17698 篲 +17699 篬 +17700 篯 +17701 簘 +17702 篅 +17703 篭 +17704 簚 +17705 簛 +17706 簜 +17707 簝 +17708 簣 +17709 簤 +17710 簥 +17711 簨 +17712 簩 +17713 簬 +17714 簭 +17715 簮 +17716 簯 +17717 簰 +17718 簱 +17719 簲 +17720 簳 +17721 簴 +17722 簵 +17723 簶 +17724 簹 +17725 簺 +17726 簻 +17727 簼 +17728 ◇ +17729 侒 +17730 傮 +17731 凅 +17732 勼 +17733 咉 +17734 圀 +17735 夡 +17736 婓 +17737 嬻 +17738 岓 +17739 庴 +17740 忬 +17741 愺 +17742 戵 +17743 掦 +17744 擉 +17745 旙 +17746 曮 +17747 栿 +17748 楏 +17749 橌 +17750 欝 +17751 汅 +17752 涹 +17753 滙 +17754 濗 +17755 烍 +17756 燇 +17757 骭 +17758 蟓 +17759 骩 +17760 骫 +17761 髛 +17762 螫 +17763 髝 +17764 髠 +17765 髢 +17766 髤 +17767 髥 +17768 髧 +17769 髨 +17770 髩 +17771 髬 +17772 髱 +17773 髲 +17774 髳 +17775 髵 +17776 髶 +17777 髸 +17778 髺 +17779 髼 +17780 髽 +17781 髾 +17782 髿 +17783 鬀 +17784 鬂 +17785 鬅 +17786 ㄓ +17787 τ +17788 ∮ +17789 ┯ +17790 ⒂ +17791 S +17792 佊 +17793 傆 +17794 冇 +17795 呌 +17796 囉 +17797 堄 +17798 娪 +17799 嵱 +17800 幱 +17801 徲 +17802 懹 +17803 撚 +17804 斢 +17805 栍 +17806 椨 +17807 櫽 +17808 氂 +17809 浻 +17810 溣 +17811 澯 +17812 炗 +17813 熡 +17814 觍 +17815 觺 +17816 觃 +17817 覿 +17818 觷 +17819 觹 +17820 觻 +17821 觽 +17822 觾 +17823 觿 +17824 訁 +17825 訃 +17826 訄 +17827 訆 +17828 訉 +17829 訋 +17830 訌 +17831 訍 +17832 訐 +17833 訒 +17834 訔 +17835 訖 +17836 託 +17837 訛 +17838 訜 +17839 ︱ +17840 ◎ +17841 Ⅱ +17842 ヲ +17843 侐 +17844 凃 +17845 咈 +17846 嗱 +17847 囼 +17848 婒 +17849 嬺 +17850 庲 +17851 忩 +17852 愹 +17853 掤 +17854 擈 +17855 楎 +17856 欜 +17857 汄 +17858 涷 +17859 濖 +17860 烌 +17861 騸 +17862 騶 +17863 騕 +17864 騗 +17865 騵 +17866 虍 +17867 駺 +17868 騖 +17869 騹 +17870 騺 +17871 騻 +17872 騼 +17873 騿 +17874 驂 +17875 驄 +17876 驆 +17877 驇 +17878 驉 +17879 驌 +17880 驑 +17881 驒 +17882 驓 +17883 驔 +17884 驖 +17885 驘 +17886 ㄒ +17887 σ +17888 ∫ +17889 б +17890 ┮ +17891 ⒁ +17892 R +17893 佉 +17894 勔 +17895 囈 +17896 壱 +17897 娨 +17898 嬕 +17899 屢 +17900 嵰 +17901 幰 +17902 徱 +17903 愐 +17904 撘 +17905 斠 +17906 栆 +17907 椧 +17908 樢 +17909 櫼 +17910 氁 +17911 浺 +17912 溡 +17913 澮 +17914 炓 +17915 襚 +17916 襘 +17917 襼 +17918 襹 +17919 襙 +17920 襕 +17921 襗 +17922 襺 +17923 褸 +17924 襽 +17925 襾 +17926 覀 +17927 覂 +17928 覅 +17929 覇 +17930 覈 +17931 覉 +17932 覊 +17933 覌 +17934 覎 +17935 覐 +17936 覑 +17937 覒 +17938 覔 +17939 覕 +17940 覗 +17941 覘 +17942 覙 +17943 覛 +17944 覜 +17945 覝 +17946 覞 +17947 覟 +17948 覠 +17949 ︳ +17950 ◆ +17951 Ⅳ +17952 ヴ +17953 侓 +17954 勽 +17955 咊 +17956 嗶 +17957 圁 +17958 堲 +17959 婔 +17960 嬼 +17961 岕 +17962 嶔 +17963 庺 +17964 忯 +17965 愻 +17966 桇 +17967 楐 +17968 橍 +17969 欞 +17970 涺 +17971 鬬 +17972 鬪 +17973 糇 +17974 舭 +17975 鬫 +17976 舡 +17977 鬩 +17978 魗 +17979 魙 +17980 鬇 +17981 簦 +17982 鬨 +17983 舯 +17984 魛 +17985 魜 +17986 魝 +17987 魞 +17988 魠 +17989 魡 +17990 魢 +17991 魣 +17992 魤 +17993 魥 +17994 魦 +17995 魧 +17996 魨 +17997 魩 +17998 魪 +17999 魫 +18000 魬 +18001 魭 +18002 魮 +18003 魰 +18004 魲 +18005 魳 +18006 魴 +18007 魶 +18008 魸 +18009 魹 +18010 魺 +18011 ㄔ +18012 髟 +18013 υ +18014 ≡ +18015 г +18016 ┰ +18017 ⒃ +18018 T +18019 佋 +18020 傇 +18021 勗 +18022 呍 +18023 囋 +18024 壴 +18025 娫 +18026 屧 +18027 嵲 +18028 幵 +18029 愒 +18030 栐 +18031 椩 +18032 樤 +18033 櫾 +18034 氃 +18035 浽 +18036 溤 +18037 澰 +18038 熢 +18039 訿 +18040 詜 +18041 訽 +18042 訹 +18043 訞 +18044 詟 +18045 詤 +18046 詥 +18047 詧 +18048 詨 +18049 詪 +18050 詫 +18051 詬 +18052 詯 +18053 詴 +18054 詵 +18055 詶 +18056 詷 +18057 詸 +18058 詺 +18059 詻 +18060 詾 +18061 詿 +18062 ■ +18063 Ⅵ +18064 ヶ +18065 傱 +18066 咑 +18067 嗹 +18068 圅 +18069 夦 +18070 嬾 +18071 忲 +18072 愽 +18073 戹 +18074 掱 +18075 旜 +18076 曵 +18077 桍 +18078 橏 +18079 濚 +18080 烐 +18081 鯺 +18082 鰚 +18083 鯹 +18084 鰗 +18085 鰙 +18086 觯 +18087 鯸 +18088 鰛 +18089 鰜 +18090 鰝 +18091 鰞 +18092 鰠 +18093 鰡 +18094 鰢 +18095 鰦 +18096 鰧 +18097 鰨 +18098 鰪 +18099 鰮 +18100 鰯 +18101 鰰 +18102 鰳 +18103 鰴 +18104 鰵 +18105 鰷 +18106 鰹 +18107 鰺 +18108 ㄖ +18109 χ +18110 ≈ +18111 ┲ +18112 ⒅ +18113 V +18114 佒 +18115 冎 +18116 勚 +18117 呏 +18118 堉 +18119 屩 +18120 嵵 +18121 徶 +18122 捴 +18123 斨 +18124 栔 +18125 椫 +18126 樦 +18127 欀 +18128 浿 +18129 溨 +18130 澲 +18131 炛 +18132 熤 +18133 謃 +18134 謁 +18135 謢 +18136 謤 +18137 謥 +18138 謧 +18139 謩 +18140 謪 +18141 謮 +18142 謯 +18143 謰 +18144 謱 +18145 謵 +18146 謶 +18147 謷 +18148 謸 +18149 謺 +18150 謻 +18151 謼 +18152 謽 +18153 謾 +18154 謿 +18155 譀 +18156 譁 +18157 譂 +18158 譃 +18159 譄 +18160 黪 +18161 ︴ +18162 □ +18163 Ⅴ +18164 ヵ +18165 傰 +18166 匁 +18167 咍 +18168 嗸 +18169 圂 +18170 堳 +18171 夣 +18172 嬽 +18173 岝 +18174 嶕 +18175 庻 +18176 愼 +18177 掯 +18178 旛 +18179 桋 +18180 楑 +18181 橎 +18182 欟 +18183 汋 +18184 涻 +18185 滜 +18186 鮚 +18187 酲 +18188 鮺 +18189 鮸 +18190 鮗 +18191 鮙 +18192 鮷 +18193 鮹 +18194 酾 +18195 醵 +18196 魼 +18197 鮘 +18198 鮻 +18199 鮽 +18200 鮿 +18201 鯀 +18202 鯁 +18203 鯃 +18204 鯄 +18205 鯆 +18206 鯈 +18207 鯋 +18208 鯍 +18209 鯎 +18210 鯏 +18211 鯐 +18212 鯑 +18213 鯒 +18214 鯓 +18215 鯕 +18216 鯗 +18217 鯙 +18218 鯚 +18219 ㄕ +18220 φ +18221 ≌ +18222 д +18223 ┱ +18224 ⒄ +18225 U +18226 佌 +18227 傉 +18228 冋 +18229 呎 +18230 喺 +18231 囌 +18232 堈 +18233 壵 +18234 娬 +18235 嬚 +18236 屨 +18237 嵳 +18238 幷 +18239 徴 +18240 愓 +18241 懻 +18242 捳 +18243 撜 +18244 斦 +18245 樥 +18246 氄 +18247 浾 +18248 溦 +18249 炚 +18250 熣 +18251 諂 +18252 諀 +18253 誟 +18254 誁 +18255 諃 +18256 諄 +18257 諅 +18258 諆 +18259 諉 +18260 諌 +18261 諍 +18262 諎 +18263 諑 +18264 諓 +18265 諔 +18266 諕 +18267 諗 +18268 諘 +18269 諙 +18270 諛 +18271 諝 +18272 諞 +18273 諟 +18274 諠 +18275 諡 +18276 諢 +18277 ▲ +18278 Ⅷ +18279 侙 +18280 傴 +18281 凐 +18282 匄 +18283 嗻 +18284 堷 +18285 夬 +18286 婙 +18287 孁 +18288 嶘 +18289 庿 +18290 忴 +18291 慀 +18292 掵 +18293 擑 +18294 桒 +18295 楕 +18296 橒 +18297 欨 +18298 涾 +18299 滧 +18300 濜 +18301 烒 +18302 燌 +18303 鴡 +18304 鴟 +18305 鳾 +18306 鴀 +18307 鴞 +18308 鴠 +18309 鳣 +18310 鴢 +18311 鴤 +18312 鴥 +18313 鴫 +18314 鴬 +18315 鴰 +18316 鴱 +18317 鴴 +18318 鴶 +18319 鴸 +18320 鴹 +18321 鴽 +18322 鴾 +18323 鵀 +18324 鵁 +18325 ㄘ +18326 ∝ +18327 ж +18328 ┴ +18329 X +18330 佖 +18331 傌 +18332 冐 +18333 勜 +18334 呚 +18335 嗀 +18336 囏 +18337 娯 +18338 嬝 +18339 嵷 +18340 庁 +18341 捸 +18342 撠 +18343 曍 +18344 栘 +18345 権 +18346 欂 +18347 氊 +18348 涁 +18349 澵 +18350 豟 +18351 豝 +18352 貇 +18353 貄 +18354 豞 +18355 丿 +18356 豙 +18357 豜 +18358 貃 +18359 貆 +18360 谸 +18361 丌 +18362 豛 +18363 乇 +18364 貎 +18365 貏 +18366 貑 +18367 貒 +18368 貕 +18369 貗 +18370 貙 +18371 貚 +18372 貛 +18373 貜 +18374 貟 +18375 貣 +18376 貤 +18377 貥 +18378 丶 +18379 篴 +18380 篈 +18381 觓 +18382 覣 +18383 竽 +18384 騛 +18385 褹 +18386 鬭 +18387 鬉 +18388 舄 +18389 鯝 +18390 謅 +18391 諥 +18392 鮝 +18393 魽 +18394 誂 +18395 酹 +18396 鴄 +18397 鳤 +18398 豠 +18399 谹 +18400 劐 +18401 羝 +18402 銎 +18403 劓 +18404 篵 +18405 骲 +18406 驜 +18407 觔 +18408 覤 +18409 騜 +18410 鬮 +18411 鬊 +18412 鯾 +18413 鮞 +18414 誃 +18415 鴅 +18416 谺 +18417 篶 +18418 篊 +18419 骳 +18420 觕 +18421 覥 +18422 騝 +18423 襝 +18424 鬰 +18425 鬋 +18426 訡 +18427 鯿 +18428 鯟 +18429 謈 +18430 鲧 +18431 魿 +18432 誧 +18433 誄 +18434 鴆 +18435 鳦 +18436 谻 +18437 篸 +18438 篋 +18439 骴 +18440 驞 +18441 觗 +18442 鬌 +18443 詃 +18444 鯠 +18445 謉 +18446 鮠 +18447 躔 +18448 豥 +18449 匦 +18450 篹 +18451 篍 +18452 骵 +18453 觘 +18454 騟 +18455 襡 +18456 褽 +18457 鬳 +18458 詄 +18459 絷 +18460 鰁 +18461 鋈 +18462 鮡 +18463 鮁 +18464 誩 +18465 誆 +18466 鴈 +18467 豦 +18468 谽 +18469 厣 +18470 骹 +18471 觙 +18472 騠 +18473 騀 +18474 襢 +18475 褾 +18476 鬴 +18477 鬎 +18478 詅 +18479 訤 +18480 鰂 +18481 鯢 +18482 謋 +18483 諪 +18484 谾 +18485 篻 +18486 篏 +18487 骻 +18488 驡 +18489 觛 +18490 覩 +18491 騡 +18492 襣 +18493 褿 +18494 鬵 +18495 鬐 +18496 鰃 +18497 謌 +18498 諫 +18499 鮣 +18500 鮃 +18501 鴊 +18502 鳪 +18503 篽 +18504 篐 +18505 骽 +18506 觝 +18507 騂 +18508 襤 +18509 襀 +18510 鬶 +18511 鬑 +18512 詇 +18513 訦 +18514 鰄 +18515 謍 +18516 鮤 +18517 鮄 +18518 誋 +18519 豩 +18520 豀 +18521 篿 +18522 篒 +18523 骾 +18524 驣 +18525 觟 +18526 騣 +18527 騃 +18528 襥 +18529 襂 +18530 鬷 +18531 鬒 +18532 詉 +18533 訧 +18534 敉 +18535 纛 +18536 鰅 +18537 鯥 +18538 鐾 +18539 鮅 +18540 蹯 +18541 鴌 +18542 鳬 +18543 豂 +18544 篔 +18545 骿 +18546 騤 +18547 騄 +18548 襧 +18549 襃 +18550 訨 +18551 鰆 +18552 鯦 +18553 謏 +18554 鮆 +18555 誮 +18556 鴍 +18557 鳭 +18558 豭 +18559 豃 +18560 篕 +18561 髃 +18562 驥 +18563 觡 +18564 覭 +18565 騅 +18566 襅 +18567 鬹 +18568 鬕 +18569 詋 +18570 訩 +18571 鰇 +18572 諯 +18573 鮧 +18574 鮇 +18575 誎 +18576 鴎 +18577 鳮 +18578 豮 +18579 豄 +18580 簂 +18581 篖 +18582 驦 +18583 騦 +18584 騆 +18585 襆 +18586 鬺 +18587 鬖 +18588 詌 +18589 謑 +18590 諰 +18591 誏 +18592 鴏 +18593 豯 +18594 簃 +18595 髆 +18596 驧 +18597 觤 +18598 騧 +18599 鬽 +18600 鬗 +18601 詍 +18602 誱 +18603 誐 +18604 鴐 +18605 豰 +18606 簄 +18607 篘 +18608 髇 +18609 觧 +18610 覰 +18611 篑 +18612 笱 +18613 騨 +18614 騈 +18615 襫 +18616 襈 +18617 鬾 +18618 詎 +18619 訬 +18620 鰊 +18621 謓 +18622 諲 +18623 鮊 +18624 誑 +18625 鴑 +18626 鳱 +18627 豱 +18628 篛 +18629 驩 +18630 篚 +18631 騩 +18632 騉 +18633 襬 +18634 鬿 +18635 鬙 +18636 詏 +18637 艉 +18638 鰋 +18639 鯫 +18640 謔 +18641 諳 +18642 誳 +18643 誒 +18644 鹾 +18645 躜 +18646 鳲 +18647 豲 +18648 刂 +18649 簆 +18650 篜 +18651 觩 +18652 騪 +18653 騊 +18654 襭 +18655 魀 +18656 訮 +18657 鰌 +18658 鯬 +18659 謕 +18660 諴 +18661 鮌 +18662 誴 +18663 誔 +18664 鴓 +18665 豍 +18666 簈 +18667 篞 +18668 髊 +18669 觪 +18670 騋 +18671 襮 +18672 襋 +18673 鬛 +18674 詑 +18675 訯 +18676 鯭 +18677 諵 +18678 誵 +18679 豵 +18680 簉 +18681 篟 +18682 髍 +18683 驲 +18684 觬 +18685 覴 +18686 騬 +18687 襌 +18688 魊 +18689 詒 +18690 訰 +18691 鯮 +18692 諶 +18693 鮎 +18694 誶 +18695 誖 +18696 鳵 +18697 豶 +18698 簊 +18699 篠 +18700 觭 +18701 覵 +18702 騭 +18703 騍 +18704 襰 +18705 襍 +18706 詓 +18707 鰏 +18708 鮯 +18709 鮏 +18710 誷 +18711 豷 +18712 簍 +18713 篢 +18714 觮 +18715 襎 +18716 魌 +18717 誸 +18718 豻 +18719 簎 +18720 髐 +18721 骍 +18722 覷 +18723 簏 +18724 鬠 +18725 糈 +18726 鯱 +18727 諹 +18728 鮱 +18729 踣 +18730 鴘 +18731 鳸 +18732 豼 +18733 豒 +18734 刳 +18735 簐 +18736 骎 +18737 騐 +18738 襳 +18739 襐 +18740 魐 +18741 鬡 +18742 詖 +18743 鰒 +18744 鮲 +18745 誚 +18746 鴙 +18747 豽 +18748 豓 +18749 簑 +18750 篧 +18751 骔 +18752 觲 +18753 覹 +18754 騱 +18755 騑 +18756 襴 +18757 襑 +18758 魒 +18759 鰓 +18760 謜 +18761 鮳 +18762 鮓 +18763 鴚 +18764 鳺 +18765 豾 +18766 簒 +18767 篨 +18768 騲 +18769 騒 +18770 襵 +18771 襒 +18772 魓 +18773 鬤 +18774 詘 +18775 鰔 +18776 鯴 +18777 鮴 +18778 鮔 +18779 誜 +18780 鳻 +18781 豿 +18782 骙 +18783 觵 +18784 覻 +18785 簖 +18786 魕 +18787 訷 +18788 鰕 +18789 鯵 +18790 諽 +18791 鮕 +18792 誽 +18793 鴜 +18794 鳼 +18795 貀 +18796 豗 +18797 簔 +18798 篫 +18799 髗 +18800 觶 +18801 覼 +18802 騔 +18803 魖 +18804 鬦 +18805 訸 +18806 謟 +18807 鮶 +18808 鮖 +18809 誾 +18810 貁 +18811 豘 +18812 酤 +18813 鳋 +18814 觜 +18815 籂 +18816 覡 +18817 魻 +18818 誀 +18819 譅 +18820 鵂 +18821 貭 +18822 磒 +18823 ╬ +18824 譸 +18825 琾 +18826 皃 +18827 祊 +18828 痯 +18829 璸 +18830 穚 +18831 竝 +18832 玴 +18833 籶 +18834 秔 +18835 ﹑ +18836 畃 +18837 眕 +18838 俻 +18839 僷 +18840 刾 +18841 唒 +18842 噋 +18843 坧 +18844 塸 +18845 妏 +18846 媝 +18847 宲 +18848 峱 +18849 弍 +18850 恜 +18851 憄 +18852 抪 +18853 昿 +18854 杙 +18855 檖 +18856 歱 +18857 漰 +18858 瀙 +18859 焢 +18860 碢 +18861 ≒ +18862 鱌 +18863 譖 +18864 癙 +18865 礟 +18866 疨 +18867 璓 +18868 縋 +18869 稰 +18870 窹 +18871 玃 +18872 籔 +18873 禤 +18874 ㏄ +18875 丳 +18876 侾 +18877 働 +18878 匬 +18879 哖 +18880 嘝 +18881 圥 +18882 塒 +18883 奝 +18884 婸 +18885 孭 +18886 峆 +18887 嶱 +18888 怭 +18889 慞 +18890 扨 +18891 擯 +18892 朠 +18893 桺 +18894 楶 +18895 歅 +18896 汸 +18897 淧 +18898 漃 +18899 濸 +18900 烶 +18901 磓 +18902 譹 +18903 皅 +18904 祋 +18905 痲 +18906 璹 +18907 縬 +18908 穛 +18909 竡 +18910 玵 +18911 籷 +18912 秖 +18913 ﹒ +18914 畄 +18915 眖 +18916 乹 +18917 俼 +18918 僸 +18919 剄 +18920 卶 +18921 唓 +18922 噏 +18923 坬 +18924 塹 +18925 妐 +18926 宷 +18927 峲 +18928 巕 +18929 恞 +18930 憅 +18931 抭 +18932 搎 +18933 攓 +18934 晀 +18935 杚 +18936 榪 +18937 檘 +18938 泀 +18939 渜 +18940 瀜 +18941 焣 +18942 碤 +18943 ≦ +18944 鱍 +18945 癚 +18946 礠 +18947 疩 +18948 璔 +18949 縌 +18950 玅 +18951 籕 +18952 ㏎ +18953 盦 +18954 丵 +18955 俀 +18956 僎 +18957 凲 +18958 匭 +18959 哘 +18960 嘠 +18961 圦 +18962 塓 +18963 奞 +18964 婹 +18965 孮 +18966 峇 +18967 嶲 +18968 廞 +18969 怮 +18970 扱 +18971 擰 +18972 昋 +18973 朡 +18974 桻 +18975 楺 +18976 歈 +18977 漄 +18978 濹 +18979 烸 +18980 磖 +18981 ╮ +18982 鱮 +18983 譺 +18984 皉 +18985 祌 +18986 璻 +18987 縭 +18988 穜 +18989 竢 +18990 玶 +18991 籸 +18992 秗 +18993 ﹔ +18994 畆 +18995 眗 +18996 乺 +18997 俽 +18998 剅 +18999 卹 +19000 唕 +19001 坮 +19002 塺 +19003 妑 +19004 媟 +19005 宺 +19006 弐 +19007 恟 +19008 憆 +19009 抮 +19010 搑 +19011 杛 +19012 梤 +19013 榬 +19014 檙 +19015 渞 +19016 漴 +19017 焤 +19018 碦 +19019 ≧ +19020 鱎 +19021 琑 +19022 癛 +19023 礡 +19024 疪 +19025 稲 +19026 窻 +19027 玆 +19028 籖 +19029 ㏑ +19030 俁 +19031 僐 +19032 凴 +19033 哛 +19034 嘡 +19035 圧 +19036 塕 +19037 奟 +19038 婻 +19039 峈 +19040 嶳 +19041 怰 +19042 慠 +19043 扲 +19044 揜 +19045 昍 +19046 朢 +19047 桼 +19048 楻 +19049 歊 +19050 汻 +19051 漅 +19052 磗 +19053 鱯 +19054 瑂 +19055 皊 +19056 痵 +19057 穝 +19058 竤 +19059 玸 +19060 籹 +19061 秙 +19062 ﹕ +19063 畇 +19064 乻 +19065 俿 +19066 僺 +19067 剆 +19068 唖 +19069 噑 +19070 坰 +19071 塻 +19072 妔 +19073 媠 +19074 宻 +19075 巗 +19076 恠 +19077 憇 +19078 抯 +19079 搒 +19080 攕 +19081 晄 +19082 杝 +19083 梥 +19084 檚 +19085 歴 +19086 焥 +19087 碨 +19088 ⊿ +19089 鱏 +19090 琒 +19091 癝 +19092 璖 +19093 縎 +19094 稴 +19095 窼 +19096 玈 +19097 籗 +19098 ㏒ +19099 盨 +19100 凷 +19101 匰 +19102 哠 +19103 圫 +19104 塖 +19105 孲 +19106 峉 +19107 嶴 +19108 怱 +19109 慡 +19110 扴 +19111 揝 +19112 朣 +19113 桽 +19114 橲 +19115 歋 +19116 汼 +19117 烻 +19118 ㄟ +19119 ∵ +19120 ⑦ +19121 _ +19122 佭 +19123 傔 +19124 冞 +19125 勥 +19126 呥 +19127 嗊 +19128 囘 +19129 堖 +19130 夁 +19131 娺 +19132 屵 +19133 嵾 +19134 庍 +19135 愡 +19136 戇 +19137 撨 +19138 斶 +19139 曔 +19140 栠 +19141 椷 +19142 樳 +19143 欉 +19144 氝 +19145 涍 +19146 溸 +19147 澾 +19148 炦 +19149 熯 +19150 擗 +19151 攥 +19152 遾 +19153 擐 +19154 擤 +19155 邆 +19156 邇 +19157 邉 +19158 邌 +19159 邍 +19160 邎 +19161 邐 +19162 邒 +19163 邔 +19164 邖 +19165 邘 +19166 邚 +19167 邜 +19168 邞 +19169 邟 +19170 邠 +19171 邤 +19172 邥 +19173 邧 +19174 邫 +19175 邭 +19176 邲 +19177 邷 +19178 邼 +19179 邽 +19180 邿 +19181 遖 +19182 逜 +19183 哜 +19184 吣 +19185 遚 +19186 逤 +19187 逥 +19188 遝 +19189 逧 +19190 遟 +19191 逩 +19192 逪 +19193 遡 +19194 逫 +19195 遤 +19196 逬 +19197 遦 +19198 逰 +19199 遧 +19200 遪 +19201 逳 +19202 遫 +19203 逴 +19204 唪 +19205 咴 +19206 啧 +19207 遬 +19208 逷 +19209 遯 +19210 逹 +19211 遰 +19212 逺 +19213 遱 +19214 逽 +19215 逿 +19216 遳 +19217 遀 +19218 遶 +19219 遆 +19220 遈 +19221 啐 +19222 郀 +19223 磘 +19224 譼 +19225 瑃 +19226 皌 +19227 縯 +19228 穞 +19229 竧 +19230 秚 +19231 ﹖ +19232 畉 +19233 乼 +19234 倀 +19235 僼 +19236 卼 +19237 唗 +19238 噒 +19239 坱 +19240 塼 +19241 妕 +19242 媡 +19243 宼 +19244 峵 +19245 巘 +19246 弔 +19247 恡 +19248 憈 +19249 抰 +19250 搕 +19251 攖 +19252 晅 +19253 杢 +19254 梩 +19255 榯 +19256 檛 +19257 泃 +19258 渢 +19259 焧 +19260 ═ +19261 鱐 +19262 琓 +19263 癟 +19264 礣 +19265 疶 +19266 璗 +19267 縏 +19268 稵 +19269 窽 +19270 玊 +19271 ㏕ +19272 乀 +19273 僒 +19274 凾 +19275 圱 +19276 奣 +19277 婽 +19278 孴 +19279 峊 +19280 嶵 +19281 廡 +19282 怲 +19283 扵 +19284 揟 +19285 朤 +19286 楾 +19287 橳 +19288 歍 +19289 汿 +19290 淭 +19291 濼 +19292 烼 +19293 ╱ +19294 皍 +19295 祏 +19296 痷 +19297 璾 +19298 縰 +19299 穟 +19300 竨 +19301 玼 +19302 籾 +19303 秛 +19304 ﹗ +19305 眜 +19306 乽 +19307 倁 +19308 剈 +19309 卽 +19310 唘 +19311 媢 +19312 寀 +19313 巙 +19314 弖 +19315 憉 +19316 抲 +19317 攗 +19318 晆 +19319 杣 +19320 梪 +19321 榰 +19322 泆 +19323 瀠 +19324 碪 +19325 ║ +19326 鱑 +19327 譛 +19328 琔 +19329 癠 +19330 礥 +19331 疷 +19332 縐 +19333 稶 +19334 窾 +19335 玌 +19336 籙 +19337 ︰ +19338 甎 +19339 乁 +19340 俇 +19341 刄 +19342 匲 +19343 哢 +19344 嘦 +19345 圲 +19346 塙 +19347 婾 +19348 峌 +19349 怳 +19350 慤 +19351 扷 +19352 揢 +19353 昒 +19354 楿 +19355 橴 +19356 沀 +19357 淯 +19358 漊 +19359 濽 +19360 烾 +19361 → +19362 侜 +19363 傶 +19364 凓 +19365 匉 +19366 咜 +19367 嗿 +19368 堹 +19369 婜 +19370 岤 +19371 嶛 +19372 忷 +19373 慂 +19374 扂 +19375 旡 +19376 曻 +19377 桗 +19378 楘 +19379 橔 +19380 欪 +19381 汑 +19382 淂 +19383 滫 +19384 鷃 +19385 鷁 +19386 鷡 +19387 鷀 +19388 鶣 +19389 鶿 +19390 鷢 +19391 鷤 +19392 鷧 +19393 鷩 +19394 鷪 +19395 鷫 +19396 鷬 +19397 鷭 +19398 鷰 +19399 鷳 +19400 鷴 +19401 鷵 +19402 鷷 +19403 鷽 +19404 鷾 +19405 鸀 +19406 鸁 +19407 ㄚ +19408 ≮ +19409 ② +19410 Z +19411 傏 +19412 冓 +19413 呞 +19414 堏 +19415 壼 +19416 娳 +19417 嬟 +19418 屭 +19419 徻 +19420 愙 +19421 戁 +19422 捼 +19423 撢 +19424 斱 +19425 曏 +19426 栚 +19427 椱 +19428 涄 +19429 溭 +19430 澸 +19431 炡 +19432 赻 +19433 赹 +19434 赱 +19435 赸 +19436 趠 +19437 贎 +19438 讠 +19439 赲 +19440 趢 +19441 趤 +19442 趦 +19443 趧 +19444 趩 +19445 趪 +19446 趫 +19447 趬 +19448 趭 +19449 趮 +19450 趯 +19451 趰 +19452 趲 +19453 趶 +19454 趷 +19455 趹 +19456 趻 +19457 趽 +19458 跀 +19459 跁 +19460 跇 +19461 跈 +19462 跉 +19463 跍 +19464 跒 +19465 跓 +19466 ※ +19467 Ⅸ +19468 侚 +19469 凒 +19470 匇 +19471 嗼 +19472 堸 +19473 夰 +19474 婛 +19475 孂 +19476 嶚 +19477 廀 +19478 忶 +19479 慁 +19480 戼 +19481 擓 +19482 旟 +19483 曺 +19484 桖 +19485 楖 +19486 橓 +19487 欩 +19488 汏 +19489 淁 +19490 濝 +19491 烓 +19492 燍 +19493 鵿 +19494 鵞 +19495 鵾 +19496 鶀 +19497 鵃 +19498 鶂 +19499 鶄 +19500 鶅 +19501 鶆 +19502 鶇 +19503 鶊 +19504 鶋 +19505 鶏 +19506 鶑 +19507 鶓 +19508 鶔 +19509 鶕 +19510 鶖 +19511 鶙 +19512 鶚 +19513 鶜 +19514 鶞 +19515 鶠 +19516 鶡 +19517 ㄙ +19518 ≠ +19519 з +19520 ┵ +19521 Y +19522 傎 +19523 冑 +19524 呝 +19525 嗁 +19526 囐 +19527 堎 +19528 壻 +19529 娰 +19530 嵸 +19531 庂 +19532 徺 +19533 愘 +19534 捹 +19535 撡 +19536 斮 +19537 栙 +19538 椯 +19539 樫 +19540 欃 +19541 涃 +19542 澷 +19543 炠 +19544 熧 +19545 賎 +19546 侔 +19547 賍 +19548 賋 +19549 賩 +19550 賫 +19551 貮 +19552 賮 +19553 賯 +19554 賰 +19555 賱 +19556 賲 +19557 賳 +19558 賵 +19559 賶 +19560 賷 +19561 賸 +19562 賹 +19563 賻 +19564 賾 +19565 賿 +19566 贁 +19567 贋 +19568 ← +19569 Ⅺ +19570 { +19571 侞 +19572 匊 +19573 咞 +19574 嘂 +19575 圎 +19576 夳 +19577 婝 +19578 孄 +19579 岥 +19580 嶜 +19581 忹 +19582 扄 +19583 掻 +19584 擕 +19585 旣 +19586 桘 +19587 楙 +19588 橕 +19589 欫 +19590 汒 +19591 淃 +19592 濢 +19593 烕 +19594 鸴 +19595 鸧 +19596 鸃 +19597 麁 +19598 麃 +19599 麄 +19600 麅 +19601 麆 +19602 麉 +19603 麊 +19604 麌 +19605 麍 +19606 麎 +19607 麏 +19608 麐 +19609 麑 +19610 麔 +19611 麕 +19612 麖 +19613 麘 +19614 麙 +19615 麚 +19616 麛 +19617 麜 +19618 麞 +19619 麠 +19620 麡 +19621 麢 +19622 麣 +19623 麤 +19624 麧 +19625 麨 +19626 ㄛ +19627 ≯ +19628 й +19629 ┷ +19630 ③ +19631 [ +19632 佦 +19633 傐 +19634 冔 +19635 勠 +19636 呟 +19637 嗃 +19638 囒 +19639 堐 +19640 嬠 +19641 屰 +19642 嵺 +19643 庅 +19644 徾 +19645 戂 +19646 捽 +19647 斲 +19648 曐 +19649 栛 +19650 椲 +19651 樭 +19652 欅 +19653 氎 +19654 涆 +19655 溮 +19656 澺 +19657 熪 +19658 踾 +19659 踻 +19660 郐 +19661 踎 +19662 踇 +19663 踋 +19664 踼 +19665 跕 +19666 踈 +19667 踿 +19668 蹃 +19669 蹅 +19670 蹆 +19671 蹌 +19672 蹍 +19673 蹎 +19674 蹏 +19675 蹔 +19676 蹕 +19677 蹖 +19678 蹗 +19679 蹘 +19680 蹚 +19681 蹛 +19682 蹜 +19683 蹝 +19684 蹞 +19685 蹡 +19686 蹢 +19687 蹧 +19688 蹨 +19689 蹫 +19690 ↑ +19691 Ⅻ +19692 | +19693 侟 +19694 傸 +19695 凕 +19696 匋 +19697 咟 +19698 堻 +19699 夵 +19700 岦 +19701 嶞 +19702 廃 +19703 忺 +19704 扅 +19705 掽 +19706 擖 +19707 旤 +19708 朁 +19709 桙 +19710 楛 +19711 橖 +19712 汓 +19713 淈 +19714 滭 +19715 濣 +19716 烖 +19717 燑 +19718 鼅 +19719 鼃 +19720 黖 +19721 黓 +19722 鼂 +19723 鼄 +19724 麫 +19725 鼆 +19726 鼇 +19727 鼈 +19728 鼉 +19729 鼊 +19730 鼌 +19731 鼏 +19732 鼑 +19733 鼒 +19734 鼔 +19735 鼕 +19736 鼖 +19737 鼘 +19738 鼚 +19739 鼛 +19740 鼜 +19741 鼝 +19742 鼟 +19743 鼡 +19744 鼣 +19745 鼥 +19746 鼦 +19747 鼧 +19748 鼪 +19749 鼫 +19750 鼮 +19751 ㄜ +19752 ≤ +19753 к +19754 ┸ +19755 ④ +19756 \ +19757 佨 +19758 呠 +19759 囓 +19760 堒 +19761 娷 +19762 庈 +19763 徿 +19764 戃 +19765 捾 +19766 斳 +19767 曑 +19768 椳 +19769 樮 +19770 欆 +19771 氒 +19772 溰 +19773 澻 +19774 熫 +19775 躟 +19776 躝 +19777 堋 +19778 躿 +19779 堙 +19780 墚 +19781 堍 +19782 埽 +19783 躙 +19784 軃 +19785 軄 +19786 軆 +19787 軉 +19788 軐 +19789 軓 +19790 軔 +19791 軕 +19792 軗 +19793 軘 +19794 軚 +19795 軞 +19796 軡 +19797 軣 +19798 鶤 +19799 赼 +19800 卩 +19801 阝 +19802 阢 +19803 鵄 +19804 賏 +19805 汆 +19806 馘 +19807 鸻 +19808 踑 +19809 跘 +19810 坫 +19811 躠 +19812 蹵 +19813 塥 +19814 芰 +19815 苊 +19816 冁 +19817 鶥 +19818 赽 +19819 贐 +19820 鵥 +19821 鵅 +19822 鸼 +19823 鸅 +19824 踒 +19825 跙 +19826 黚 +19827 麭 +19828 躡 +19829 蹷 +19830 鷆 +19831 赾 +19832 贑 +19833 谇 +19834 鵆 +19835 賑 +19836 鸆 +19837 踓 +19838 跜 +19839 麮 +19840 蹸 +19841 鶧 +19842 赿 +19843 陴 +19844 鵧 +19845 貲 +19846 踕 +19847 垧 +19848 黡 +19849 麯 +19850 躣 +19851 鶨 +19852 趀 +19853 贓 +19854 鵨 +19855 鵈 +19856 勹 +19857 鹐 +19858 鸈 +19859 坶 +19860 凵 +19861 廴 +19862 黣 +19863 麰 +19864 躤 +19865 蹺 +19866 鶩 +19867 趂 +19868 贔 +19869 鵩 +19870 鵉 +19871 賔 +19872 鹒 +19873 鸉 +19874 踗 +19875 跢 +19876 黤 +19877 躥 +19878 蹻 +19879 鶪 +19880 趃 +19881 鵊 +19882 賕 +19883 貵 +19884 鹓 +19885 踘 +19886 黦 +19887 躦 +19888 蹽 +19889 鷋 +19890 鶫 +19891 趆 +19892 鵋 +19893 賖 +19894 跦 +19895 麳 +19896 躧 +19897 蹾 +19898 鷌 +19899 鶬 +19900 趇 +19901 贗 +19902 賗 +19903 亠 +19904 鹖 +19905 鸌 +19906 跧 +19907 垲 +19908 黫 +19909 躨 +19910 鷍 +19911 鶭 +19912 趈 +19913 贘 +19914 鵭 +19915 賘 +19916 鹙 +19917 鸍 +19918 踛 +19919 跩 +19920 黬 +19921 麶 +19922 躩 +19923 躂 +19924 鷎 +19925 鶮 +19926 趉 +19927 鵎 +19928 賙 +19929 貹 +19930 鹝 +19931 鸎 +19932 踜 +19933 跭 +19934 黭 +19935 麷 +19936 躃 +19937 鷏 +19938 趌 +19939 鵯 +19940 鵏 +19941 貺 +19942 鹟 +19943 踠 +19944 跮 +19945 黮 +19946 麹 +19947 躭 +19948 躄 +19949 鷐 +19950 趍 +19951 鵐 +19952 賛 +19953 鸐 +19954 踡 +19955 跰 +19956 黰 +19957 麺 +19958 鶱 +19959 趎 +19960 贜 +19961 踤 +19962 跱 +19963 黱 +19964 麼 +19965 躰 +19966 趏 +19967 鵒 +19968 賝 +19969 裒 +19970 僦 +19971 鹢 +19972 鸒 +19973 踥 +19974 跲 +19975 墼 +19976 黲 +19977 麿 +19978 躱 +19979 鶳 +19980 鵓 +19981 鹥 +19982 鸓 +19983 踦 +19984 跴 +19985 黳 +19986 黀 +19987 鷔 +19988 趒 +19989 赒 +19990 鵴 +19991 鵔 +19992 賟 +19993 鸔 +19994 黁 +19995 躋 +19996 鷕 +19997 鶵 +19998 趓 +19999 赗 +20000 鵕 +20001 鸕 +20002 踨 +20003 黵 +20004 黂 +20005 躌 +20006 鷖 +20007 鵶 +20008 鵖 +20009 鹲 +20010 鸖 +20011 黶 +20012 躶 +20013 趖 +20014 赥 +20015 鵷 +20016 鸗 +20017 踭 +20018 跿 +20019 黷 +20020 黅 +20021 躷 +20022 躎 +20023 鷘 +20024 鶸 +20025 趗 +20026 赨 +20027 谫 +20028 鵸 +20029 鵘 +20030 氽 +20031 冱 +20032 鸘 +20033 踰 +20034 踀 +20035 埯 +20036 黸 +20037 黆 +20038 躸 +20039 躑 +20040 茳 +20041 鷙 +20042 鶹 +20043 趘 +20044 赩 +20045 鵹 +20046 鵙 +20047 鸙 +20048 踲 +20049 黺 +20050 黇 +20051 躹 +20052 躒 +20053 鷚 +20054 鵺 +20055 鵚 +20056 賥 +20057 賅 +20058 鹷 +20059 踳 +20060 黽 +20061 躻 +20062 躓 +20063 鷛 +20064 趚 +20065 赬 +20066 鵻 +20067 鹸 +20068 踃 +20069 黊 +20070 躼 +20071 鷜 +20072 鶼 +20073 趛 +20074 赮 +20075 鵜 +20076 賧 +20077 鸜 +20078 踶 +20079 踄 +20080 鼀 +20081 黋 +20082 躖 +20083 鷝 +20084 鶽 +20085 趜 +20086 赯 +20087 鵽 +20088 賨 +20089 鹺 +20090 踷 +20091 踆 +20092 鼁 +20093 黌 +20094 躾 +20095 跔 +20096 鶢 +20097 麪 +20098 蹱 +20099 軤 +20100 ╲ +20101 譾 +20102 皏 +20103 祐 +20104 痸 +20105 穠 +20106 玽 +20107 籿 +20108 秜 +20109 ﹙ +20110 畍 +20111 眝 +20112 乿 +20113 倂 +20114 僾 +20115 剉 +20116 卾 +20117 唙 +20118 噕 +20119 坴 +20120 塿 +20121 妚 +20122 媣 +20123 寁 +20124 峷 +20125 巚 +20126 弙 +20127 恦 +20128 抳 +20129 攙 +20130 晇 +20131 杤 +20132 梫 +20133 榲 +20134 檝 +20135 渧 +20136 漹 +20137 瀡 +20138 焩 +20139 碫 +20140 ╒ +20141 琕 +20142 疺 +20143 縑 +20144 稸 +20145 籚 +20146 禫 +20147 ¬ +20148 甐 +20149 盫 +20150 俈 +20151 僔 +20152 刅 +20153 匳 +20154 哣 +20155 嘨 +20156 圴 +20157 奦 +20158 媀 +20159 峍 +20160 怴 +20161 慥 +20162 扸 +20163 揤 +20164 擵 +20165 昖 +20166 梀 +20167 橵 +20168 歏 +20169 淰 +20170 漋 +20171 磜 +20172 ╳ +20173 鱳 +20174 譿 +20175 瑆 +20176 祑 +20177 瓀 +20178 縲 +20179 穡 +20180 粀 +20181 秝 +20182 ﹚ +20183 畐 +20184 倃 +20185 僿 +20186 厀 +20187 唚 +20188 噖 +20189 墂 +20190 妛 +20191 媤 +20192 寃 +20193 峸 +20194 巜 +20195 弚 +20196 恮 +20197 抴 +20198 搘 +20199 晈 +20200 梬 +20201 榳 +20202 渨 +20203 漺 +20204 焪 +20205 碬 +20206 ╓ +20207 譝 +20208 琖 +20209 礧 +20210 疻 +20211 璚 +20212 稺 +20213 竁 +20214 籛 +20215 禬 +20216 ¦ +20217 甒 +20218 盬 +20219 乄 +20220 俉 +20221 刉 +20222 匴 +20223 哤 +20224 圵 +20225 塛 +20226 峎 +20227 廤 +20228 怶 +20229 慦 +20230 扺 +20231 揥 +20232 昗 +20233 朩 +20234 梂 +20235 橶 +20236 漌 +20237 濿 +20238 焀 +20239 磝 +20240 ▁ +20241 鱴 +20242 瑇 +20243 皒 +20244 祒 +20245 痻 +20246 瓁 +20247 縳 +20248 竫 +20249 粁 +20250 秞 +20251 ﹛ +20252 眡 +20253 倄 +20254 厁 +20255 唜 +20256 噚 +20257 坸 +20258 墄 +20259 妜 +20260 媥 +20261 寈 +20262 峹 +20263 巟 +20264 弜 +20265 恱 +20266 憍 +20267 抶 +20268 搙 +20269 攛 +20270 杧 +20271 梮 +20272 榵 +20273 檟 +20274 歺 +20275 泋 +20276 渪 +20277 漻 +20278 瀤 +20279 焫 +20280 ╔ +20281 琗 +20282 礨 +20283 璛 +20284 縓 +20285 稾 +20286 竂 +20287 玐 +20288 禭 +20289 甔 +20290 盭 +20291 乆 +20292 俋 +20293 僗 +20294 刋 +20295 匵 +20296 哫 +20297 嘪 +20298 圶 +20299 塜 +20300 奨 +20301 媂 +20302 孹 +20303 峏 +20304 廥 +20305 怷 +20306 扻 +20307 昘 +20308 梄 +20309 橷 +20310 歑 +20311 沊 +20312 淴 +20313 漍 +20314 瀀 +20315 焁 +20316 磞 +20317 ▂ +20318 讁 +20319 皔 +20320 祔 +20321 痽 +20322 瓂 +20323 穣 +20324 珁 +20325 粂 +20326 秠 +20327 ﹜ +20328 畒 +20329 眣 +20330 倅 +20331 剏 +20332 厃 +20333 唝 +20334 噛 +20335 坹 +20336 墆 +20337 媦 +20338 寉 +20339 峺 +20340 巠 +20341 弝 +20342 恲 +20343 抷 +20344 搚 +20345 晊 +20346 杫 +20347 梱 +20348 榶 +20349 檡 +20350 歽 +20351 泍 +20352 瀥 +20353 焬 +20354 碮 +20355 ╕ +20356 譟 +20357 琘 +20358 礩 +20359 痀 +20360 璝 +20361 縔 +20362 竃 +20363 籝 +20364 ℡ +20365 盰 +20366 乊 +20367 俌 +20368 僘 +20369 刌 +20370 匶 +20371 哬 +20372 嘫 +20373 圷 +20374 奩 +20375 媃 +20376 孻 +20377 峐 +20378 嶻 +20379 廦 +20380 怸 +20381 慪 +20382 扽 +20383 揧 +20384 擸 +20385 昚 +20386 朰 +20387 梇 +20388 橸 +20389 沋 +20390 漎 +20391 瀁 +20392 焂 +20393 ↓ +20394 } +20395 傹 +20396 匌 +20397 咠 +20398 嘄 +20399 堼 +20400 夶 +20401 婟 +20402 孆 +20403 嶟 +20404 廄 +20405 忼 +20406 慅 +20407 扆 +20408 掿 +20409 擙 +20410 旪 +20411 朂 +20412 桚 +20413 楜 +20414 橗 +20415 欭 +20416 滮 +20417 烗 +20418 齕 +20419 齗 +20420 齵 +20421 鼲 +20422 齖 +20423 齹 +20424 齺 +20425 齻 +20426 齼 +20427 齽 +20428 齾 +20429 龂 +20430 龎 +20431 龏 +20432 龒 +20433 龓 +20434 龖 +20435 龗 +20436 龝 +20437 龞 +20438 龡 +20439 郎 +20440 凉 +20441 裏 +20442 ㄝ +20443 鬏 +20444 ≥ +20445 л +20446 ぽ +20447 ⑤ +20448 ] +20449 佪 +20450 呡 +20451 娸 +20452 屳 +20453 嵼 +20454 庉 +20455 忀 +20456 愝 +20457 戄 +20458 捿 +20459 撦 +20460 斴 +20461 曒 +20462 椵 +20463 樰 +20464 欇 +20465 涊 +20466 溳 +20467 熭 +20468 輅 +20469 莰 +20470 輣 +20471 輀 +20472 輂 +20473 輠 +20474 輢 +20475 荮 +20476 軥 +20477 輁 +20478 輥 +20479 輦 +20480 輧 +20481 輫 +20482 輬 +20483 輭 +20484 輮 +20485 輲 +20486 輳 +20487 輴 +20488 輵 +20489 輶 +20490 輹 +20491 輽 +20492 轀 +20493 轃 +20494 菥 +20495 莶 +20496 齛 +20497 鼳 +20498 齜 +20499 齝 +20500 鼵 +20501 輈 +20502 軨 +20503 鼶 +20504 軩 +20505 齟 +20506 鼸 +20507 輊 +20508 萆 +20509 鼺 +20510 輌 +20511 軬 +20512 齢 +20513 齣 +20514 輎 +20515 軮 +20516 齤 +20517 齁 +20518 輏 +20519 齥 +20520 齂 +20521 輐 +20522 軰 +20523 齃 +20524 輑 +20525 軱 +20526 齧 +20527 齅 +20528 輒 +20529 軲 +20530 齨 +20531 齆 +20532 軳 +20533 齇 +20534 軴 +20535 蔌 +20536 齈 +20537 齫 +20538 齉 +20539 輖 +20540 齬 +20541 軷 +20542 輘 +20543 齮 +20544 齌 +20545 輙 +20546 軹 +20547 齯 +20548 齍 +20549 輚 +20550 軺 +20551 葙 +20552 蓰 +20553 蒇 +20554 蒈 +20555 齰 +20556 齎 +20557 齱 +20558 齏 +20559 蔟 +20560 齴 +20561 軿 +20562 蒉 +20563 隣 +20564 磟 +20565 ▃ +20566 瑉 +20567 皕 +20568 痾 +20569 穤 +20570 竮 +20571 珃 +20572 粃 +20573 秡 +20574 ﹝ +20575 眤 +20576 亃 +20577 剒 +20578 厇 +20579 噝 +20580 坺 +20581 墇 +20582 寊 +20583 峼 +20584 弞 +20585 恴 +20586 抸 +20587 搝 +20588 晍 +20589 杬 +20590 梲 +20591 歾 +20592 泎 +20593 渮 +20594 漽 +20595 焭 +20596 碯 +20597 ╖ +20598 譠 +20599 琙 +20600 癦 +20601 痁 +20602 縕 +20603 竄 +20604 籞 +20605 ㈱ +20606 甖 +20607 盳 +20608 乑 +20609 僙 +20610 刏 +20611 哯 +20612 圸 +20613 塟 +20614 孼 +20615 峑 +20616 廧 +20617 慫 +20618 抁 +20619 揨 +20620 昛 +20621 朲 +20622 梈 +20623 橺 +20624 歓 +20625 沍 +20626 瀂 +20627 焃 +20628 齄 +20629 〓 +20630  ̄ +20631 侢 +20632 傼 +20633 凗 +20634 咡 +20635 嘅 +20636 圑 +20637 夻 +20638 孇 +20639 岨 +20640 廅 +20641 怇 +20642 扊 +20643 揀 +20644 旫 +20645 桛 +20646 楟 +20647 欮 +20648 淊 +20649 烚 +20650 燓 +20651 ㄞ +20652 ∞ +20653 ⑥ +20654 ^ +20655 佫 +20656 傓 +20657 冝 +20658 呣 +20659 嗈 +20660 囖 +20661 夀 +20662 娹 +20663 嬣 +20664 屴 +20665 嵽 +20666 庌 +20667 忁 +20668 愞 +20669 戅 +20670 撧 +20671 斵 +20672 曓 +20673 椶 +20674 樲 +20675 欈 +20676 氜 +20677 涋 +20678 溵 +20679 澽 +20680 炥 +20681 熮 +20682 轥 +20683 轣 +20684 迆 +20685 轤 +20686 瞢 +20687 轢 +20688 迃 +20689 迉 +20690 迊 +20691 迋 +20692 迌 +20693 迒 +20694 迗 +20695 迚 +20696 迠 +20697 迡 +20698 迣 +20699 迧 +20700 迬 +20701 迯 +20702 迱 +20703 迲 +20704 迶 +20705 迺 +20706 迻 +20707 迼 +20708 迾 +20709 迿 +20710 逇 +20711 逈 +20712 逌 +20713 逎 +20714 逓 +20715 逕 +20716 嗀 +20717 轪 +20718 掎 +20719 掊 +20720 轇 +20721 﨏 +20722 辌 +20723 﨑 +20724 辒 +20725 扌 +20726 辝 +20727 﨔 +20728 辠 +20729 轋 +20730 礼 +20731 轌 +20732 辢 +20733 辤 +20734 尢 +20735 揞 +20736 揎 +20737 﨡 +20738 辥 +20739 轐 +20740 﨤 +20741 辧 +20742 轑 +20743 﨧 +20744 辪 +20745 轒 +20746 辬 +20747 轓 +20748 轔 +20749 搌 +20750 挢 +20751 轕 +20752 轗 +20753 辳 +20754 捱 +20755 轙 +20756 辵 +20757 轚 +20758 撄 +20759 辷 +20760 辸 +20761 轝 +20762 掭 +20763 撖 +20764 逘 +20765 礌 +20766 瑺 +20767 禒 +20768 癄 +20769 繝 +20770 窢 +20771 珷 +20772 粻 +20773 稜 +20774 仩 +20775 儬 +20776 劆 +20777 啝 +20778 嚑 +20779 垹 +20780 墵 +20781 姞 +20782 尃 +20783 崰 +20784 帬 +20785 彔 +20786 悹 +20787 憼 +20788 挔 +20789 摖 +20790 敔 +20791 暊 +20792 枲 +20793 槧 +20794 櫊 +20795 殸 +20796 洜 +20797 潬 +20798 灎 +20799 煚 +20800 牬 +20801 燸 +20802 牗 +20803 爚 +20804 狑 +20805 牞 +20806 爘 +20807 牔 +20808 牥 +20809 牭 +20810 牎 +20811 牱 +20812 牳 +20813 牜 +20814 牷 +20815 燵 +20816 爗 +20817 爙 +20818 牕 +20819 牰 +20820 牣 +20821 燖 +20822 牑 +20823 牏 +20824 牐 +20825 牓 +20826 燶 +20827 牨 +20828 爜 +20829 爞 +20830 爟 +20831 爠 +20832 爡 +20833 爢 +20834 爣 +20835 爤 +20836 爥 +20837 爧 +20838 爩 +20839 爫 +20840 爮 +20841 爯 +20842 爳 +20843 爴 +20844 爼 +20845 牀 +20846 牃 +20847 牅 +20848 牉 +20849 牊 +20850 牸 +20851 牻 +20852 牼 +20853 牴 +20854 牪 +20855 牫 +20856 燗 +20857 牚 +20858 犪 +20859 犩 +20860 犂 +20861 犫 +20862 犅 +20863 犲 +20864 犱 +20865 犮 +20866 犆 +20867 犳 +20868 犉 +20869 燽 +20870 燘 +20871 燾 +20872 犵 +20873 犌 +20874 燿 +20875 狆 +20876 犘 +20877 爀 +20878 燛 +20879 犻 +20880 犐 +20881 犺 +20882 犎 +20883 爁 +20884 爂 +20885 燝 +20886 爃 +20887 燞 +20888 爄 +20889 犿 +20890 犾 +20891 犖 +20892 狅 +20893 犗 +20894 爅 +20895 燡 +20896 爇 +20897 燢 +20898 爈 +20899 爉 +20900 狇 +20901 犙 +20902 燨 +20903 牶 +20904 狊 +20905 犛 +20906 狉 +20907 犚 +20908 狋 +20909 犜 +20910 狏 +20911 狌 +20912 犝 +20913 狓 +20914 爌 +20915 爎 +20916 燫 +20917 燬 +20918 犨 +20919 燯 +20920 狕 +20921 狔 +20922 狖 +20923 犤 +20924 狘 +20925 犥 +20926 燰 +20927 爓 +20928 燱 +20929 爔 +20930 燳 +20931 狚 +20932 犦 +20933 爖 +20934 牋 +20935 OOV_NUM +20936 OOV_ALPHA +20937 OOV_ALNUM +20938 OOV_HANZ +20940 OOV diff --git a/PaddleNLP/lexical_analysis/downloads.sh b/PaddleNLP/lexical_analysis/downloads.sh new file mode 100644 index 00000000..4a9433e8 --- /dev/null +++ b/PaddleNLP/lexical_analysis/downloads.sh @@ -0,0 +1,43 @@ +#!/bin/bash + +# download baseline model file to ./model_baseline/ +if [ -d ./model_baseline/ ] +then + echo "./model_baseline/ directory already existed, ignore download" +else + wget --no-check-certificate https://baidu-nlp.bj.bcebos.com/lexical_analysis-1.0.0.tar.gz + tar xvf lexical_analysis-1.0.0.tar.gz + /bin/rm lexical_analysis-1.0.0.tar.gz +fi + +# download dataset file to ./data/ +if [ -d ./data/ ] +then + echo "./data/ directory already existed, ignore download" +else + wget --no-check-certificate https://baidu-nlp.bj.bcebos.com/lexical_analysis-dataset-1.0.0.tar.gz + tar xvf lexical_analysis-dataset-1.0.0.tar.gz + /bin/rm lexical_analysis-dataset-1.0.0.tar.gz +fi + +# download ERNIE pretrained model to ./pretrained/ +if [ -d ./pretrained/ ] +then + echo "./pretrained/ directory already existed, ignore download" +else + mkdir ./pretrained/ && cd ./pretrained/ + wget --no-check-certificate https://baidu-nlp.bj.bcebos.com/ERNIE_stable-1.0.1.tar.gz + tar xvf ERNIE_stable-1.0.1.tar.gz + /bin/rm ERNIE_stable-1.0.1.tar.gz + cd ../ +fi + +# download finetuned model file to ./model_finetuned/ +if [ -d ./model_finetuned/ ] +then + echo "./model_finetuned/ directory already existed, ignored download" +else + wget --no-check-certificate https://baidu-nlp.bj.bcebos.com/lexical_analysis_finetuned-1.0.0.tar.gz + tar xvf lexical_analysis_finetuned-1.0.0.tar.gz + /bin/rm lexical_analysis_finetuned-1.0.0.tar.gz +fi diff --git a/PaddleNLP/lexical_analysis/evaluate.py b/PaddleNLP/lexical_analysis/evaluate.py new file mode 100644 index 00000000..108dafa9 --- /dev/null +++ b/PaddleNLP/lexical_analysis/evaluate.py @@ -0,0 +1,266 @@ +#coding=utf-8 +""" +evaluate wordseg for LAC and other open-source wordseg tools +""" +from __future__ import print_function +from __future__ import division + +import sys +import os + + +def to_unicode(string): + """ string compatibility for python2 & python3 """ + if sys.version_info.major == 2 and isinstance(string, str): + return string.decode("utf-8") + else: + return string + + +def to_set(words): + """ cut list to set of (string, off) """ + off = 0 + s= set() + for w in words: + if w: + s.add((off, w)) + off += len(w) + return s + + +def cal_fscore(standard, result, split_delim=" "): + """ caculate fscore for wordseg + Param: standard, list of str, ground-truth labels , e.g. ["a b c", "d ef g"] + Param: result, list of str, predicted result, e.g. ["ab c", "d e fg"] + """ + assert len(standard) == len(result) + std, rst, cor = 0, 0, 0 + for s, r in zip(standard, result): + s = to_set(s.rstrip().split(split_delim)) + r = to_set(r.rstrip().split(split_delim)) + std += len(s) + rst += len(r) + cor += len(s & r) + p = 1.0 * cor / rst + r = 1.0 * cor / std + f = 2 * p * r / (p + r) + + print("std, rst, cor = %d, %d, %d" % (std, rst, cor)) + print("precision = %.5f, recall = %.5f, f1 = %.5f" % (p, r, f)) + #print("| | %.5f | %.5f | %.5f |" % (p, r, f)) + print("") + + return p, r, f + + +def load_testdata(datapath="./data/test_data/test_part"): + """none""" + sentences = [] + sent_seg_list = [] + for line in open(datapath): + sent, label = line.strip().split("\t") + sentences.append(sent) + + sent = to_unicode(sent) + label = label.split(" ") + assert len(sent) == len(label) + + # parse segment + words = [] + current_word = "" + for w, l in zip(sent, label): + if l.endswith("-B"): + if current_word != "": + words.append(current_word) + current_word = w + elif l.endswith("-I"): + current_word += w + elif l.endswith("-O"): + if current_word != "": + words.append(current_word) + words.append(w) + current_word = "" + else: + raise ValueError("wrong label: " + l) + if current_word != "": + words.append(current_word) + sent_seg = " ".join(words) + sent_seg_list.append(sent_seg) + print("got %d lines" % (len(sent_seg_list))) + return sent_seg_list, sentences + + +def get_lac_result(): + """ + get LAC predicted result by: + `sh run.sh | tail -n 100 > result.txt` + """ + sent_seg_list = [] + for line in open("./result.txt"): + line = line.strip().split(" ") + words = [pair.split("/")[0] for pair in line] + labels = [pair.split("/")[1] for pair in line] + sent_seg = " ".join(words) + sent_seg = to_unicode(sent_seg) + sent_seg_list.append(sent_seg) + return sent_seg_list + + +def get_jieba_result(sentences): + """ + Ref to: https://github.com/fxsjy/jieba + Install by `pip install jieba` + """ + import jieba + preds = [] + for sentence in sentences: + sent_seg = " ".join(jieba.lcut(sentence)) + sent_seg = to_unicode(sent_seg) + preds.append(sent_seg) + return preds + + +def get_thulac_result(sentences): + """ + Ref to: http://thulac.thunlp.org/ + Install by: `pip install thulac` + """ + import thulac + preds = [] + lac = thulac.thulac(seg_only=True) + for sentence in sentences: + sent_seg = lac.cut(sentence, text=True) + sent_seg = to_unicode(sent_seg) + preds.append(sent_seg) + return preds + + +def get_pkuseg_result(sentences): + """ + Ref to: https://github.com/lancopku/pkuseg-python + Install by: `pip3 install pkuseg` + You should noticed that pkuseg-python only support python3 + """ + import pkuseg + seg = pkuseg.pkuseg() + preds = [] + for sentence in sentences: + sent_seg = " ".join(seg.cut(sentence)) + sent_seg = to_unicode(sent_seg) + preds.append(sent_seg) + return preds + + +def get_hanlp_result(sentences): + """ + Ref to: https://github.com/hankcs/pyhanlp + Install by: pip install pyhanlp + (Before using pyhanlp, you need to download the model manully.) + """ + from pyhanlp import HanLP + preds = [] + for sentence in sentences: + arraylist = HanLP.segment(sentence) + sent_seg = " ".join([term.toString().split("/")[0] for term in arraylist]) + sent_seg = to_unicode(sent_seg) + preds.append(sent_seg) + return preds + + +def get_nlpir_result(sentences): + """ + Ref to: https://github.com/tsroten/pynlpir + Install by `pip install pynlpir` + Run `pynlpir update` to update License + """ + import pynlpir + pynlpir.open() + preds = [] + for sentence in sentences: + sent_seg = " ".join(pynlpir.segment(sentence, pos_tagging=False)) + sent_seg = to_unicode(sent_seg) + preds.append(sent_seg) + return preds + + +def get_ltp_result(sentences): + """ + Ref to: https://github.com/HIT-SCIR/pyltp + 1. Install by `pip install pyltp` + 2. Download models from http://ltp.ai/download.html + """ + from pyltp import Segmentor + segmentor = Segmentor() + model_path = "./ltp_data_v3.4.0/cws.model" + if not os.path.exists(model_path): + raise IOError("LTP Model do not exist! Download it first!") + segmentor.load(model_path) + preds = [] + for sentence in sentences: + sent_seg = " ".join(segmentor.segment(sentence)) + sent_seg = to_unicode(sent_seg) + preds.append(sent_seg) + segmentor.release() + + return preds + + +def print_array(array): + """print some case""" + for i in [1, 10, 20, 30, 40]: + print("case " + str(i) + ": \t" + array[i]) + + +def evaluate_all(): + """none""" + standard, sentences = load_testdata() + print_array(standard) + + # evaluate lac + preds = get_lac_result() + print("lac result:") + print_array(preds) + cal_fscore(standard=standard, result=preds) + + # evaluate jieba + preds = get_jieba_result(sentences) + print("jieba result") + print_array(preds) + cal_fscore(standard=standard, result=preds) + + # evaluate thulac + preds = get_thulac_result(sentences) + print("thulac result") + print_array(preds) + cal_fscore(standard=standard, result=preds) + + # evaluate pkuseg, but pyuseg only support python3 + if sys.version_info.major == 3: + preds = get_pkuseg_result(sentences) + print("pkuseg result") + print_array(preds) + cal_fscore(standard=standard, result=preds) + + # evaluate HanLP + preds = get_hanlp_result(sentences) + print("HanLP result") + print_array(preds) + cal_fscore(standard=standard, result=preds) + + # evaluate NLPIR + preds = get_nlpir_result(sentences) + print("NLPIR result") + print_array(preds) + cal_fscore(standard=standard, result=preds) + + # evaluate LTP + preds = get_ltp_result(sentences) + print("LTP result") + print_array(preds) + cal_fscore(standard=standard, result=preds) + + +if __name__ == "__main__": + import ipdb + #ipdb.set_trace() + evaluate_all() diff --git a/PaddleNLP/lexical_analysis/gru-crf-model.png b/PaddleNLP/lexical_analysis/gru-crf-model.png new file mode 100644 index 0000000000000000000000000000000000000000..1ca7d5cf0c5381294915dc13bfb4fbd2bf7e99dc GIT binary patch literal 62360 zcmc$`RZt#F`v-`-yF+ky2ofx~2Mre7-QC^Y-QC^Y-614Ma0nh;cXG~`oPX6`?!{iD zDmCv+_cPMnKWoB1$%!Mv;lY7`fFMdrh$w=9fC&QscwwM`BM?61>>wZ{Ad(_N%C4X% zSzZCf(%UlWIW+z5dq%a9u4Am?en%mjs8P zw7-pT`nDqf)5AaQs)_?`R#@0qGymy?vKI>F?so`l9ivCQsy} z$qD&jMUr)UOsnq0%${Y|GXhQWr22*>EuC*X^UV`B@nhleZA`MJX_^j1g2xiCqo_@HT?5W3gxJFd0|NY>w5{qWyg@7zYwt=8GoOMU&SzmfZx>T}X& zZC^#w3T3^~79dS^r|&pH3)9sN7S(CI!`JmFLqp!!R!}S?fat!l zdKM9n=skSe>Uy2i411^DC9JDUvccaKR8-&%u`Qo3oAh1Hm9;KrFl0ll#nXtz`2fE) z@jM1R<(!O~9K!E)W~;>s)SP?uM-8e#?gYC4B853>QrwFww723e`1RBmxPcXIUyBE# zRi{^hPGkfT-CTj{hbmEY6WrUGqi{6Dw@*K*g7*)0^^sTGDe$`%{XbACP~E=Gj+dQNnZNLIG;It^8%tIAX*^!s6cfc@ zY{)mqiO^2EWX?V}=TD{R$az3eb6#XBPxQ%M`z;-0TT~up2`>yfaNQg)V#1D25bipY^2`x=HG4Q)8k|i! z#iV&g`+%36(|NAA**QceD7AmowuMTfVi8-D<~s+|IrP1nW43UH-cp;D0BL&K*4bIC zMu4eMI^ZxBC+gw*_l8hc2OUU9lXLdf)#RbdfS80uHaJbOmVl`B_p44+Tw`pf)?^%% z4t^C;4?cX_>&<8;`8%_2({7GsmqEkn-d|l|IFYLmMHmL_zuch`yMz14zTns1_1~uP zZRP2?#a3BRAiNe-;qX1)T|!6MH-)k9dDDq*^wwX7>=IzTBFG~WSZ2cMf7h`Xl=vAr z{p<3B7%Q!S6^=uCB5dlLj_bJ74wX!=A?#qt`UZp!*<6%SHh62_a~-3q2}i+Y0Yv>X0#E;CW#j1+Z)L5 z*`=qTWo{L|TgtG2u6q+>-ui^TVWkho z-4#-EAYb7w<(2w0*?!&Pu~OJk1j2pgLK$ZMVIlF!YKxFNB-r4wz>}815YoKB=hp|p z?`(l$5@LHLY_`+6VkAdUFijzz0}!dcdN8Y{m7AqU4G13C*BHEZ`UVT1H=gUfY-vr5 zjrQ+D@3=bjTGZ|ojAx+`Ok;H(CJC6ddMZQgqU?3Q&lR=ak3%E=;t4nZaP=*LxqlAy zLFxNgqJ5MAK~Bhq7h?skkPVS`zjluW>B5Fji0SiQPQA#2`!)=IFC%h&t2s{hCxxW~ zQ;J3>yWX@37gPnE>}J8&MnwFy1j=6uqPBP`#oukL&>ofM2+Ztkg_6$G!FZFmZzoW8 zByUKuoPULH zx%@JgZrruXu5_e-;V88Zq6h-xMd-_Ah~BanVf0x9|73yS(aMgHD==^ngwbAWc*~?V zxDKuF=()WKjYmtCQdbx3A>BKubGn?Vg!&}?nhi@k`eQYQcT-4kr&t=XiN0bF>*&vSTyDy2G z$vkCF{x>goXP7ScXV|9?)#21l^7x+p5i3I%2h%jvc^m3W^OsT9zzRJdgEkthC}RZG_I$$}wXJte>NY-at(iY)0WU8sQmu4x8J|5?S%BHZ!Pbn_y4~5b0~?30 zp{-LAXx)?TO`mvT^+{Z1ad@bB$(~Y}Reh_lF)?+%84o?)l9FTNfsY=n{qjAm00V!_ zkU&y%t2bRk3i2L-fE{cOl#2K^M1z0&_AiR&H*bhqD-h(*xQ2OGN9tehU$l8)GM=3a zwA$?9^2~EMTVsCST?r;fn`v_hc@~I~oZ2Wf?fyHAU|Y9JmgFxN%)OH0FVGfKS`m_V zLWjRVgGE5=-R<9u+Icsu=KV70`UtCW)XylRp1`%2^cX~+@GQCykp!uEEGhFmjkd2E0{N*yQ+0C|Jk$} zPdydose44t(^IYptrG9r>{4Uod;V+KWVvK~$ZGZY^V$Wm>b!-*#r^WV{i+u-PdbmS zy=qX?`2(iTaPkz6uy>4y{FKe*VQs|y3acKN=_&TmvB><=P~jp~<4q$Q_@H69gDN=D z3@?ygD<6c5;S#n~X!hcY$Z-?aiOY%PF(syHDZJSbL~e#mFAykn^K`u4`>G z;gn*3lKn1AxJ0R!IO_TwPd(Veg9a=5J}V4$)4{jrd+ta??nzvylw6Z#ZRRz_AVD&q32JiEC^D{fFJ#(MT;E7h3@$tm=p= zX%Di^`nG$c zjb(b@)jQ<6A1h`02ZRr>Je@YFVgf?6=Ed%50FuD>emH(X7T@0#F97w)8~7Z=GUGwg z&QS`bu1q3lg&UIOeRb3yrfHpDH7=1>cV7gTLR>6M!nc9WjSOJX9B~rd8%^8=N z>5Uu}X{5F`A=f*#_{FB^P`|C#g6dmvHLw6enB4Rr>=aBF@D4W zratt>N&{ov{^t?@&k z>V^6g{2r9W~*D}3g z@#K;-tW&C5TC+|FZ*}jC2@z-2Le8OUhJ(L;*`mx2){|QD$8mZH7;X$7z5H3DZ#fda z&3Un=Xjri*ekrkIvUFKuYiDT{HNzGE_ZDZ7{R#uHALGn}6$?js54CG~mV-q=Z?*3& zo|qTubTFLjgt$6((dzsUUoh!1xsL1OF!l3Ndkv za0pVEPYRRHaFu4u#nA#4XrLIXbRxB%R1)ov44dx07=hPNnKV{)Ci8@ zt3$o9QBmJ*adB~NO)&LfJcoYyxm@*Qch_)fYop5}1QDNe>-phVtNRUYHoq6#a$^Fu&2_k?jojuM7!@j@SQ+UNrCNYW1 z0m)$Hdb;K59!HHHs*>J5AY`h$b;s$PFp`^Z)>E6i)4YEEMX)x>9a9xhPIj?OYe@Hv z@Iixqs35EB-TRPu!yU~EDnYU7O7(_X$0J3fzh2LL{i?2;OYDYB z%#VRElg$_L_WFdpv9XcAxi_8lP2*FT`+N-AvCT*{A-YciCY>rqB^(Ad2tGc393?aJ zr{}1RZZFO2_sDmZI9#S6?JnolG6pKMGL`j~APND4{DLW#Mni`)Yd?i97!RuR()?Mv zRLmk*lQ4N-d1I0(D2hNEpX&wOm2YK|4>p$s^7LCM#t?E(ZVrCT)q%n)nT^%k1!AIe zc|UWxUmyC#Q%Hwn!*_W$!d}xkKHZ+KHkcq&%4LS~dp)X6Vbe#c$z<`U;6!EdIHC14 zuCH^vAW~61ocFxxPo;C*U;GHTFGS_AK!8Rf5Io=L5+cJil)$D@6rn?~+34&`X3(^~ z*bt^m}WaYu(Ga&)b4zFgI0k%2lr)c%8)x zV;23Vu+X*6e1qPv5TSW!r`}K*^EJWZEWuS_~haiJPz}~e9?$-cXJC zQn4t!Qtd7tPMZySer$R*5(GJivYw)&F{oInh<(OikgQ|kj4JvI-JOn}#P&VYq|3iV zpdd-L;0Dq#3u+Hnp?mXqyDL)VWhHz!1u@E8Fp4ZcCSBV3`ITNTe4KnOOJXUQDWQle6g1cMbg;cdwht0uGG9$dVeHKs!*-!-|7n(N+;1^Gc>aZ zFda{O)Hs;VnrpWD=IDaV8o1VC!xTB?e7Y9a7hYsedigz1zdq^$K~c zxyiyhnRyQ*b^qXM)hz{#B{w{zRUt7!(H#)mE^gPDv+<;1V}pichy{qgHCqh;wF~^D zC=3tLw(jok-u`^B>GNh~x1gt>pirt^1y#OM3Pnszyj1S9d;^pj^Z`%w=hubNp{KtF7ChDltz9!#?I!Ddc2xxeg zt8G=rxX;&2jwMhHysdlCYj>&^Ng zu$IVWp%w5qT8rn9aR7H?IG?ZMUZyFPYxXg{d%omS*wS=2(iNEI;0+D!m>Gd39fZp` z&f^xA%MC|fc=sDN2gktWL)8#*-?V0*c}&Ifua5!e-RQrz*s(VX)v+tN+`z295TJN_ zp}%?+TdQ)nbCWj6PPEcLu7(<7d2VF)Dq2y+1u|o+JWUe* zYuaQ6clvA<+N$nZRvyX^>O`6=^QPZ%%$y-4PVA0)JFlr%u;PFmG?~aO#MM!Y>kyDjCn>xU{iPpc=|@ z1^D|!qfLXedBQh5iphzRJ>D;YJxu9nJGvhLFax{XHaqpXJM?O%n}gZxC&yrUqmbtz z`t!yTJT#fU;KJvdaLSv9f_OEzqfq;%Zk3HoNOTD)-MM_xem}i=SFgFn`jQ5W*Zqmzww-DYz!5OXV5aX|E6EN|VnZu1^Bm@IA!&cynLpae~x+cmnIU3h5{+pAs4 zx@?&rd8@Nd`H-27J0su0t>megW`<$d`PADT&FlvKSjO4Ja2u2NDr&k}#T4=p9L`+^ zCuR!-!iRGq?{^=vIxQTSk$@4h$iWVx>QA0*<(_!!$la)nsdoIF`r?}=Wry?ws4(Y|Cn^iz|JWJ=k>hONy8Lca= z6EiE5Jv>j0HenIhZoj7Cfg8jC!xt=`g{ur$hA{n6OF^I_r~~3y-&_Y~B}+-=wA;Rk z^Ai_AfeFuw<2LMWQBFLXfMwL#PMBFrni4m5_{2pS%c|r3pz~AX@nhS}k%)@{F}+OZ z{!C5`g>U{dNbZ-SQjn}y+-nT-h;3bRD7<< z{&BOM0tSF}2*N--{EFkB+B)+I6M4;#x0=6hLgm{RHlta~XP7 zK!W)A{DWoFb+6BODaZNHzCAVdxkQO=i3Wb(iu?JD==^NKO=KdA(LGiwYQhrtUVBZ? z@KBJb#0=gO01gOW68rWRrcv$Xu+j3pyR+ZH0es;-0R6x}fBq~SI)jo1d@&fBRb!|n zdEiltbFc@&AAU~HIvp;Q#HfMTAOzR}IQekBqeWd_3BR51nqhMrfn;=7Gs1zVXg5cF z6UHg!AbS(H)+xZA+uVPek<}>j>?3!`SJcDJ9syvL2V1kY7!?%Pot;K=Sbr%R4@MF! za_9wsOEF&6orMOwoH&Mlq0#Je`j^G%PePADqS|F&1j;aEzaQk%qA|d^dh?xI;30C2%S{x;9l!AUxC40P3 zcof>z?om9xvHFCg^aXX**FH@V(^4t#sq~7JJZ{91bSzLNCMH*Rcm0>g8z`m7%<{R! zkeZsBzH#Q}EdV4$VBa^G%pj<;TM1VKyiBxu#B{T}i&@Hau+l)*F_Ovf8ujR@4l1Aq?;RMW2;=+G_0% zeTCmw>sznjjqdIjPYT!o>Q<+n(B0)oY;%|qV8C48Kr{PDpf4Ub;0>=2mKEb|77F%5v07Zw4S?wV0AvzbM_}O)5)ZzAcnC|;p08*4 zk^TMMEX}FU>q80GvCyUysg9CTu}oh$CLMi$5D3He*<1lM0Mv_Zls^*51|+Zu==DPc zjGE-ppx4HYc9r7>PgMAPT(Y*eN#p+MED}I}UaNPEW2>&E4=dw{CtiV62Q`KP)MP;!){ckHR1PQI*J~6Y+3wW8$-SbR*^Gsiz2`TBQ-hVlWJIUc>FMzZhZN z=;SgQ`!2Vt)92+TMcXhv9_4ZE@%8C^NMhKg=cTyCEO{h`xIm}JyI+v`powm+5z~zT zz^KiKE{-rI{9w52;Lu1S08b3G^crP{#N8dO1NgwU?RQXcs6$x_ygWhx(*k0Gr9ubd z4b*6_|D-(xHRIt2sHf4MMU#{Eu$GY(gO}<8z;yzV&0H!;9Yw)iH;(LW-CZTqy3~wW z_ZDjHw#p2}U$e)`9+BW`*>}-bXaB^FJUd4N;F1E8d#9PJavwr4DBl(@dIAa}J}hs*d)V}h z#C`c$`|X_XR{k38a)hZ9CCB6~ojfv!Dg76ZR%JuR{a8soTkXeaEW2h|jiVGa-NAJ^kU8>cYzArVq zEJSo#tW@W3&T(7h;QI#z2(gZ-fXd>lE9R0Zm_{LG!HYW;7#Q)t&*bw^#SzGOZ)3c3 z;GkE`)}xLmNPMP)O~w|~^;u8%mk5>SnP|H1^uC@swLft-cCdur`OTqfPpz6XIxcR2 zebtkJb491%;>XO+UvSvX6XG#gj0Dm7)^PNUhZjE;pEk;gO{aN;87IU58_vDZ}&w2uRBk|-{8qYkKUf;8p9A%D*m(IzyR9HRw4Lm&8 z^P5_4k#lLbsZZE62d-A5UZE-D2Y|NC8kbC>&YeUJZdYZ;# zVs>1MiN-JCg+Pzn6VNuwtymU_N&9be4frJxRZ=S5BJ(@s4%kXAx3`B^m~pq4?Q*tS zE0Sb;rL!SiE}ST}cONf1kB7su#A&XdixNj&ycTpX)lVzxM>9nf{nDrA2Hi**K|_%} zf_>yB_Aup0kw>G-ryTUduzqT1EYswvN=1%mVLT9s%f;0coLo365#QL;sDT2k%T|tp zPbZxpRKrB@KyEIYK?7SbLIvQnOmc$q(oer-H*6fY_#)AzU6WBfFz$*1LRoe^k&fe0 z6M2B<4=w6w+>{*QJ*vn)k}?Jh8e@@n;l;-O20Diqvr;xR-+^+pqertcn>N$=1a85ybsM_NxddvvZp zZ+jg~6Y6!AB*o<5NBaWb9_%al<;9mPGjjZpL~*|M`}Ztc}iI1oset_LRQ!M(CQM;2mvas9h3CJKUyC$RH@)^G*TD=tv1DUab|e+6SQSNV57F9?WaX&uWESsxTGh%>KsGGU?eib! zFx!<$cz!o0ur2x$DQ3*D{)s)sTQ|98I)ASMR1JA0R5uoI6T-GkVwP61%Sc@eHqF<# zD~Ys)Pt|+ZrMd3dF_OZuMVl0>xm+T`8LDyM$&7o3nwtp5;#!c-#>!eQ&D&_1 zD(`n!Om=RM!sC##e4s)eY#jC%8#WCL(f;m;l-|+N;i|x4v!Qho{j?pybgkU}5b=(m zkk=XG&D7EVk?@k&^&*$6_4C~Mp(WEAE{Egcr%SU}4tX8K(Zjqi$&F-jVwPjQ@1PSJ z%lw?F&}M*f7}2AUuKK0Jxysffu6v#$xad@J$957FR z!OlrZtOqgM@Dm?bR<`tReJF`)m!Ou&2Dxd~{0Y=HWqySc=AvUx*_aPpU(xvhPB7 z7jV6>)u$NpNM!HEX;iqwLkwQ%zd)1^i4i&3@dzP5=8%mn)Br07b6hi15 z7(h1U35QU64c=*>9nKdu=;9FP+#ASERH~*$Zb$<%c;k3`hEmbgGDv)(#L?>b4|66@ z7-3;+n7L3{{m?|YYP<~7QV>#|8d!_M7Su4GSs~;YiS?f*DhX)?8z4ukr-N~1lHqaF zHW~3U%Xb0T63A#`ju?BvT|9Iz-$^pkKd|-&<`wsWXe1ayNZ8fHmf(OYR^dk(P8}|A z)ggE=nZ1U)_4iJ!==0+X!ntBy;9_D@7)@uYH{L|yU&V>RmnQdB*lzWL_MJp9bu+uF zC#!+r@?~dd`~SGy8DXdkcduIL^08E`k9U6YJv}|w{LJJ>WJyn{f%cNBjZ&Uh6BmMQow>20yT}4$ z`!`eQ^UGDxg(mroEQ%t5THcct0o%y2#l2`HzxWGCrp?hHnr${2+uGZu0O?B+P?BUm zQaOCeH(My)KKF|pfyZV{4H%~^+5vFBXir03T^wMN7%8@=<+B2cXM8|FfNrz#6rPfD zoRbuZZgX}nVm-Lrw z293IQ8r0mX)5g{mu&hsVCg?>?D%=$6%V_e`52hBpZdXNFx?kEIQN`KSq+f0nE#mi{yO_g3B#X;Yy_h4H^+xSd4CxO6vX0X9D9R5VMRZ zcu9~J_>1MPQzhtfN8JZa%tgu(Ev=ly+w=GZ(`ZdSpV{5i1u0|^b{2_QUu+oJ z?YxL#)DXdLHX7tXYOT$o#5Ps}!7PLnSD1huJvS?4H|z^#LRgQIeTyJ3H#g8Pr>{)I zm&W<==>k^H9-rU=%@qA-YMpyv=*~qod3f6SQjt$r5&ad35>_8cnd$<@Rfs zwhOzWPx8n9e35jiY89R-Tu3qb=l~0$p5VJKasI+ji9=#^kA#3UQ(yvkM18=395N+; z%2u>-=20B#>jQPnWx3(DCz70&3Gnw1B&Nl~+}72AaD^CcTnlK@`>k|!`B!o=ABHaloF3%D0VXVG8{Z^8ldM@J+xW1Hg2$c}%5@{e=Eh0>o zS05^1t}WsbNYmkVVlzRi?%vhRuxk|K4CN{V%L;aZiK8nO&<#q#w-%cM#UiQxL)FN1 z1)o9%q$}sicX1$YGsSa6>90Unk0i2$P!JF>!qhAjI%#mAOG@N2qEyQPfQgC;<|t8K zV6d$~6)tWFfk@1+buyi&@D3>q;&4yc#J>zrAiTt}8#Bt(Rw7GqBAFrSzyno$4YC*p zaO@ytA8aMa1w$@rr)3HtcfyBmr>CcLf6a5H68-*SYGl>Oe!s1R%L0kspKuuDivng0 zV&?BSip%yIicQKxGUENJ%t270&peL1g!G<2Rq8`X#QCe!NTe=Py;-tX-RfHXW3f|; z3nqXX+DoO!eq;w5R0^bAB%vwohx!Dk%a#7G54`(3l2k=SP6#^B@VKu`YF$Ezct-!`XpNGdd8dZ+)?_S@4ZSx@&B_jTM!QYiZh zGERM^#rWG=@0EeMz$015lj?>^Cloyvo4N;nW#xBZ{I^$C2v@{d>nREyh$$uAb_F(X zbD-v#OliY^YQ_X*fyAOpOPamKkr)gSP7;uSgR75NMoR&Td zX9HPH-#|y3ud!nbuL%K1Acj;HQ1phR| z4s$_jy1wT3xBPiQ{*xnAcGFq$frP=*@*Vjf+pB9~sAbxs`W`);I8HX?{J{tT`0*Ph z>1o8QzrzQti6Bh=%ZLIoAVW>{X4q9X8V-KhOKj?zd+?o5(iA zb@x0ydbSVK5r`pqt~5D{Odl^OA_4*M)T9&Nu|`@CK)CI6zkve~xICR%{Xt{w(!>P3 zwYBwFcAE4cVEWn}%~ND8A;QA;rLmcyprGVBXuG($c)q>d@f@%XAvk_~h$iH9JaZxD z_Y`z=bZmblv@>061eTo-GN_W}JdjXO+klT&Ry=ooaeMmB_2J5>-8JrVv0R0&&?&d3 z*q&SNJzz5w_0vh`;hAnWBPt4f=D`Gg4uvfg)ID8m`Yma#HCttT3{Na*Fq#Mk7p#Qd_h{vTxPQqM)9mD}gFcTh zY-`*l7=BZRua?|?_|pS0Vv&Ep!g;yu8Xtv>$iub9tL?e%uB$q?ey=MVn>yLF&2hTl_39$#VVqPRC& zgKMq?2lCdqhX+qO{7rg8&JVp&o$VL-_M3DRoVO8#6ZTr_cHCUb%snkfxPT`|-aL@_uKJ{y&^m zN{2ys4Kae0HP|6xmBm(maKQU_69@@6P+C0OSZ=DC)@nD3uoy$X1EvAh@7c`q3Q)-L zV};!9>i*P1AM<;?T`1P$>TpCemj4!+Ynn7ilol{MD>}@WAyZtYhWc;I%pPFIzf_~b zA~C3{0Weeq#)lObS65rt5>nuGtdgK#fP(d|uaEntajkekPJm?3aoX0t4%%{^B!Pmc zFRn@Ey@X;5#&M;W7uiQ)J+g$pjWO7=1j#(==s(dYT_Fe=Z-~R{P5Z{0T_|W16E0rf zf#Ea!$fsX0{AKL^7!PO_R5!wA9^!Gsl9a+tSm1Vt!@Nn#Oe}ROMAzBf$SKcQ1(8aW zt!>-^^PFd_@W{d#C2bZNa*U5(hZqj$YU>qy==U=8k~Y1P?Dta2mUbQ~b(E_&A>f^R zptiH~-|i5BsJ>2qw2(_nUSlOe^LH54@x|h9`Ir6v;XH7p_15sVFBT*T85z1{4Np+N zZoeyzZ^Qq2?8_3pg@`1q2vVGfK~pOq;~K2l9B-|ukq8Ml?=(G6JFx3O3P+(2Gdj51 zqtpYx*yScZ^ z6DZXDovqNl+R`(ti3!QA&Omp*d3lkVi#%!Ud5rVb@C}M^S+T&baRX}>m z&}&FJWuc-95M2596Vb(lkXZ%^NA3dMB=QLrOKto~xk0oU#bFUcaxu+lr})%lR$x0O zKy8?oKI(kJ`zVQYWuFIQzn#GWlX2HyqL8l0g? zvxzXF7P^*9E+lHu^qDEbAE#S@z`ufYHd29{xO^LCe)@ICmtj4 zYt5~Jc?p`vocdgTY|O(;*@|b&5hB$N4Z1DL#2&~S&$Hav_gy! z`&aX1CM6?7{a5syKujwQ;&MA6Ypf8b2JztP7pnvvPuwQW(#`t$?f4G$f;NpWule@A zUDU$vl=y-r4p!$+Rw2k>V*7HO*6pOlMn6b z&V0LR{fk%T?Qx`=Tj|j^RvION>C_J*=^7ZDe}bUEUb*sf&xCSM$Hea>uJ?=_10)JB zrWz=TjZi!^qg*i%}3NG*u=!8XR;HCH0av{egtWTv(-dn6DtG-9$se0 zG5>Aj%atR!j4CI}`QZXdrwm;SN(3XHFHP_Kx&~HN9JqZy5JmlEosi2O?bVM(P*pss zvG3wryXKyDPl{-{G8UMoEN{rtfJG@Ra;pdIL3VKd$8P%CxniOiDAs1GedsbUv^jY`VvKt) ztqxsQJj0rI5RySB`u%F+^<#STO;_upI73eUIGkJ7LFQ+X%Mx5v)35bKrk~jqzmh8# zoyXbj45tM$?w1Y%^is6PD+gEmt3fsp2{Eq>MmDcdjTgaa@Asn6-Fj5kWp zxU;=Y{{H>@xfLQlcS9_m4F7m4`D~0ysYGf$xZ~6!q%QXxG=vh_4CCZSe9a?3sD{&? zNMU;KJg1e71Yt>Sd}_@rD1?VFP+ZpVJ=VTo;teU*1sd90yV}n z^yBPPt;$d9UH6_Y_fF8?g|-1Gt=4YoYHR^ag@|+->^2H?+ed>j09!tMx_^ zY9m5QdO#eZKD8aUfD|z9N7FgzfbeFZKP3D%pezkjFI!H8hJ^_MRyYbCUij4>?bhbc zmC0tt!VT6$`Y1*;fW0*J1!7Ql5mI9Kz@ArG+}x*>|pg z5}is8M+cRS&JM_<*0!`@ckCagJd~?c2zI1#nacY*ug{qVM@1n5TF%8mj*ofHS2!g` zwOYZ8{Bk87&2G;&Ohx7x+KeHLRJUA1hBV(FF!Y+u2>kr~`gkGQB8`NZ#RJXO>*lQ= z83gTtCue)Q#!>eaAvQT`3xpm8Oy6UKG{Saz+#$8u@A=D6qBA)hdt570MI#}>%{-Y+ z)Fop8nNfW#|JB$&SuzuV2a4I+)-sb+1qHM%CW!k^78R+tcO5k>T0)iG<4Gx1i>vh%st|@`6?}G_wm4TkJ5Jg9h9my?Q-_z}qt$kS=CKlh6Ku zyXpB05sOhP)!CR%hQ(@)ou&OA$a#mN5^H$MQ-RB*GWWrmrFtg;BEk!&K4KN12)zKC z#-nX_O9X%H?*{|1&8uhvNEE~zcE~u#^8JqU?t!ljCJ)$cP}-6Orq_|h+6j2Pjkl3v zb3nQ|gFQlJ6L6BhoJEYdy<8Uie*2wpurEJNDQgocJ&#Hf!U=GL zp&uNx1bn>p9O(j?g&|=dsBi-twZr`ZVfVi8V-AKX_I3y|@!%r~YTsKL1ZV;u+yMDu zEt*U(1PpZ~v26KlI|9-eY@ySxsJTpnbi*`G&}D++X<8l+ZqQ``5nT_NLFV^X+kZ%v zvBqiN+Mfkn7fg%<*vWxP;(+=G@2C2j{oW$~ublViV5A@*-jE6Ill~=hpbMnHg=S6A zz@YeZ5XTRo3Sz#KqW?8tEDBt>V2->T{6D&*+_h)=Ckp`3yx%M-5+p6+518zQkcj{q zDPAI~MEs94K>%M5E+{K@-`}o)F$oA~6?|&=fAFDw_ z)&zLKkAS)5qnTv$7!D^{}Oi&lN@T>2HA12JB?Ixe-14-i8Y78QAvs_a96X zl>9|~1|*QGCq00*-wx&&5F3*0eg}(zQNWQbZ^8!9f9bSjxm@d&>Z~hoAYF|2i)Q@w zv}H5VtVAZw-|}m{-*u7TqY@v6S%P;m;}?yzyVsYek2dovsGgplij&d@X+V5mE)K)# zcu{DfST^Yn@HHWeBsGe+Uwz)5a#$I%w9d}XqJcF!97vsWVF20)5G@i0z)8Xw^`~4A zzzPvv15BNWPT-KzI6kMPTK$J`d&v@DEYdBwySe27LecmWpxYOgy8tF?qS;ENI$0_( z`^n>q%U~K&yc9OWAR}X&0n)u_&g0A7W|A9~OlqvYc6^i7H$Vypw4r>@hpX`?AZsjt zn6E*ZBwmuP^c{F$r7|5rvRq*VaD1tEx*v}#FeZxmF>!FH=jZ*J$Xy0QQO7%hAdnmj zQ&JZoi5RT`Gkh*!oNnhTgnQneq?rn+bg9&2u$$#iO27E6odZ~6ycI}oa1Pa%$#=P4 zek@eEE&~=O+Fy)1-C7Jbeg2R`KT5){5^l=?ON*42R%#a+1_s5)BD_&jzyd%U^x|_R zw_m{P_J$HkSAvTmzk$fYlb%0iVqDVKP8cr*@$+UfdBBefOWt^q8UlJx3x7grV~8*DqP#+&31fmaWK49u- zzq{-G2s`V4l!!H)VexyWUr9FT637tj_I$`~&$@1atOxnGF=qVV;)<+NU4Af0?(y3o z%9T3h|Em3;TLU;He>S7_`u?6mNVKr5)*v=|d}1DNer7bsR}%hv0Zt$=GePx@f1L6I zaf^~YhFl}KHYWV{uY_#<0tB1u2LyGnf6rwGji8(@RUoswO{Y*{y^JN1F2`I6`YZX& z;`{l5Lm$f~m%(hKk|>$M{$nxxuVOsV2u|PpfNYqVByaD7OBS(TlJmpw2qV5>%Wq9C zl?>BFOaG(S1G8+dI9P}CDHwN5Q=#r0)NIksn{WUW;%si;=T%#ZUh31^qpy;Y_^fbr zNE@G%sYEw|XI2pBN)I>#sSW<)ylyCJUG5x=U)l|c#?8x$hVTRxPn0SN9n`|~&c+Ku zRX1?YNNFz^hi9_=1MUJA;l_4!Lo6Y)bW5-%-s4L4&ds4BAtB+Gfu&6SV*b zFa$Oi>efNcKGAvn-f64jhAaZ?XKcM&$!Xmt&N2TcS*HMl`2R3t?CU6W3Q3rKf& zx3qLiijq>&NJ=+KcY`z{4NA92gLFxEcbD+(`_%XT94RA>_eaprDz$COQbi)P_VFvRQnZS_MD)kRmF4{>@7}%ZwHYw(Z2pWA2g7)Zam!m8u?fEw#^uUN z7GTdb^)re1M?cqReMtwK;TIFG)%Z6WOk2ODFC4!m*niaW3DvNg=D+Fau0k$v@%UI4 z@6n;XiEtErPxUHbYK2NMiRF3zX8vCVb&tl#5;-SKJAs9x^EN91#A3+5-=`Oej!y14 zvdhLD!RCsLyfb#M`|p4Q5PMcnZ#ORj z=K?OjJiLNNJEEeR4uXgG#pR7kFZ36z)P@88i3F9;&&|{(3)c=<+4OWLWeHHqzAzH) zMFK`uva_s#%fKo~$L}iFdttk_judSow)5J#Io}G#C8bi$WB*a2DE`~aR=N0g zplTGbx7~C7a?bc`2Gr;Pp}Dy^U&QlL=NS@u2ApR&>sejMXj!J1Pa=nQD!LfPHcNS7 z#hGHOYfml4-~7vj3Sx0y{3|<(L&6u-GzenC(ScZ;z0<~wbj~G+!B=uuR8OnkZxO`) zy>CV>Mdq`PeVu#c~=#RWK(O6bn zo(*(Y%Tf-PDtu1ByrEKt0|gs6UcJ+H1fc0Q;Fs1RgY9aXH@3oa+k<@3i#=-J9c{t2 zGOdEO(K|Lel2Pl?Mb}=LN=f<}!#jF;Ryy}2cpm#m>n$W3|H>}>tCeA*wEFii;=eX6 zR)w)KzO9%U==VH#jtspfD z=Wh(=w}Dm30ndQIVHw$Nj0p#2LDA`2+Mo4|o?%d4W+TtN6!;(1@p2{N95?|g*CR1AHrv`LfJbreA zR>=6f;*mu|G1#=)eljY?0foV$IjvZxMCp&(<3n#Rj)|;N;SUWu8VYm3pg8&>LlzBx zZ$k7nINK{LD>|)Sf=EvYV$9#D{Ih~5NsFH~N2gAm9IliZm#M5oiM;%7gx8yqv7B%` zJi16`)@mD0H^de6c*h*ISb!@u8*I~6l9LojddPYZ(uGXU%-w~k-!pS06Lxngvdc6L zVZ_;Xi!Cq`R2zzlZK*nw$=%w$9+@Lh|6|Q#JqCj*8L={?CfoIk$6Z_;#(Xg4?U&i_ zvwxqLV?hL#UYJ|IGDU3^Ja`XZbma$cTYbem$+~;0U%54fHJj#JYVhM*SCAcKKGpyk z)@CqzgPw3!N|V7#HO_H=YC_o1Owi!}&Gr4$grr1`Gx!8;f6bvI-)i`LT??&699saZ zoVJF(#^z=IY0t|x9D>=eB{ZVWR$w zlvxQjcf_izz^B} z3#D(zA$xS(1xJpVR_u}MzVgi1p=8Da+J={mTj>wX?;R&~)P`!!6sznOo@azcNdlM6 z@$memab0p?$#+OhJrP5waI4urv@s}M&Uh&Wq`_|`({C?&d(ou2sJ6XhbkNuLcQNqk zB~vNYP&~-vUaGj(3ly~U&Y~^v)r1XaiOB+OQ6dW(!UP2YjVKn}qHp!3#!AaXgoQ=M zl`d{jNg_3ewygWH|9QzbRIlq}SU}eWM@DYk6V#4aM5Xxr18_pDPT$k%+ zH1dlBSOP{9ukW98*dB-CygdR7#)<0knf#|=UTZoznmJQKh>62N1DmHo|I47xCAQ1* zZxqnp3=RG=IOv-FOYr^c06Zw#HHyx; z+g9m zwA#v}N>P16oQwSu=^w67sJo*`!$G6`Mi8}J6nb_kT;Kq!A(<)c{<1RL?x=5~!2DR_ z6y8*R<>w^r`ZU9&uh_L+LI*YzDZZn0@rDsSNGqfCo`CI)T+q;9HCat6VH$h!S149m zrZZWaC}(soS<#M*hmRAb(+RC_;h~{R>MemL zDmj6H8}()!yTksV-CF^UI~mq^^ncO`#wm>4kzT@0vjkgZd(ZLhz6)^K3#2pi%scdJ zTLuiiJ}U(XS*jrQ1>(nMMt7VCqDyi{F_(unT{m+1@0y{MU{di!#(g6| zfw!OW=(@uVfyGuW>!jB~{;!Wbqcf_a7N1~om7k@1zRhQ@n#pGT-E=r%y@;<)O`yg* zPj2Re5qeE5>Et}0%0{Ia4T;0$Wgi~9P%lp+am1O=BN|rXz`8bW*EAB%@#%)*M1>(a zAY%B87+Lv=9R(a2uQ}1eqr72A8rPb_ zZm@CA5+zVOdwK#hC>{SQ&>XOP;7lPwcsJ&7=#RA}(`6o-p3SN7$xa6$_RGghA+-G1 z6Tc{8Lar0O?!b8;^wdZf`R1Eoi7EQev4qf&;K?U;s-nrU0|^T-&Qu-;9@~Bzq*RusPm))-q2*YqiS2!9lep4 z#J@t!Ry@a)l;eqSZAB2}r%7dc=*aQItZGOu>E#Li$+fM(?xu@U??D8SMz_&wlR(w> zY+oNLJA2vB2$$sLl+^C#AwuJyR+M43&+YYjb;sLUJZKvV zXs9kRFR|fQ z4T{aBZ-&vI#_hvd>s37IIP(fFo916xdiQhWYj#H^T*iC`Hg}1B@Je^;Pb*GnbkIM6 z9u~6g#!s^oO33}QOWyjeN57BU6C)Y5X-D8uln`I2we=TMF*&G|Cm3<(FSbhg7Uu1Z zf|F!rpu@a`_@(vf;#3U}QQc5rS&98h*C$AJHofVkBlY%%zmps3IC@u{y9Y-1FYgCN zi@wo+k<7WQ5eNXVm@X#;RA-)84k1gQF@A zP27+c?3Rxe(nb-xuD2W{8Ga`vAm{U23q^`~^pGs8_ZbMM(F808zgKjN} zm+ROXd_Cn!v72zw*_8fh^w5|`#2r>6NtD$~ZIZ9Ly4tzZv#s_U2L$|Zn*NewvAiT! z8QKZ6ae8O8?Pl+_LJ&qxyk+#*c6u)P3PbiJsHjo1^Nji_!Z*{tfZa*l@f0HL_s{+Y zyC`ts`0Ta9*my3LpJ?Q{Gj1o>X9o6N(= z8lp-YRHCcF)mFyQ>l-suv0v;P7+c!DYYER?`KgeGCxGZW*28JNDKER}Kax0|XCQS) z?%^a9QLM4@^{#=K^{FEedlH#0w6rrpS~DLa49G*S8blOQq58s2^*`MdkBB!Rm+T#y7-(&c3eG5c|B zt8Szi+G?t4N_~W0)KyTm)#~gPoHw?V{`x{|O6ZU?m}5f*Gyg4Jn!kZ}DwqF4xyN~KL(_Y^HDTShya(TE4LhJP^I>`T!DHP85C+CwT5ReQ> zqfvU{&e+$R>Gor(@!$Uzd;J7K!cK37ZnVq0e;`yl!Wf`evz_0rDJ6+haBx7%-qd^x zDt=G;_YEX23#gABl!$m;4FAeX4!u^d-;PY3y#H_qWq`2A=Vki4<%2@0=cp53)!?fg zI^*E(V87*0tv7BLC4>D|e!2NS2nPmao7(+Lq4=TZD=g1c znV_JdDey)%h?v6n&cBJc_!spVnZg0Ktp_L(5qiaM!@u*Fx+M)EC?)$T(FK0~{CS6| zH|?+gRw<|*oYYu;djp(N88Iv1z5Datl+j$y4?~hwEf+ygf1bzcXf=%x>M-SqGidXP z-29cE2d4G6NAo_(iMgMe+WcH=>H zPd+Reg8|PMmw@9vxjSkz0eot2mY5hwh6w{Y#oDjEI-&trXUBGs(y+Y0&+@SpFqZ*< z9W`EV_ow;;rdX>SyO<)~86C%ta8@=pXfrX1cu;_r)C1n#^#>5H!KK#%@+J>&YOIY{ zI)Yb!0|Wc=-d=bj^Q&CoSEecls*BtCaaDKoC6^G1pd$hq8Cgh3h&XV-1(VSPb^x-B z({~XLe8bp>3At+PX%%5|5s&KY8jt&Hhnbm~kl;h*biUxSGFBVgIN}yurlg)?Ek`>}vAZ&Fkbp-fU$#Tn-9Gy35;-mARd) zt_oh@+*?!b5fL#tqdY## zQL6BW6;drX_1*>glGg185p?G6(OZf){50*MR`X61jJQf=y{@%Mm6Nm;HSLXg_E$I?t(n-6WRSg1bkr=tJv>RQIi+3{OD)pLvNM32|R#o@F@s zT+MrC-etTa6nT?m*yvoO#7DHB`VtIQ4{SbzLmfuqs*W!#0YCcc$PY2^ru1`>gBr{7 z6b&%h5;x3^cCkGw89~CyRMh;K9NlD?rtz?~dQ*qzw|cX?(<&PYgMJHc8@SUIiAks=gl!!%Oq-9`L*n;0? zhJswrfA{Q5BCR5x`!3+!18%`|DmJxD1cdwD1I?m)VV(f^iL;BTfHuE6By^6*3ufM4YgH=$67r;7wPlRF8(L8h#9)Ne3=$dmM${{eAM z=m?a)jXPh`rGWms{x*m`s1*pqbe1Ss&!b6&HVu^@Wa4{k_(BD{YMM^Ei8m+_mJa6X z(hDoD2jl67&79D@1duq7rpoo1D}LbO21CHv!h3KE+mp@82G@Tm;P;V>nR>c2xGcdv zzIwY?fo}whMQ;tnfm0#3T8q&aN}0mHS=g#BK5JC}#~NmN z6ifs$K#Zm8WD>-K@K}E?KWq5+ckXer!gTE=j{i}SLvWij;gs>?!fyyd=vdAE!3=~X zM=T?U3Dc+kU~y77fPb;0|6#$b%n>e0^uu9>SEzU|3hg}|1y4EAH@*PZ$&HcEqbLZn ze*B6&Di&^F&={{dThs{=PNPsIvHoN#VpAoa7Xf4j(bQprz$A+@{KHX<@&azN%4mhTc!Hy0-292JfjM6IKCCf6Ma;LgGs;%V#(oBS z@);4VQ)`vC>1lteXk~H^VJSMEP2!OzH9gQ=gU+|3!q*W zTbgm~`l%Ry2N@xvxYEa2ppI-r*G~$Vt#_%b z#KqCYtHkx|im)pEV17)8Xz!^Cn~s{NT(UlF;r!vRH!8K3*}3S-^L>1f_owuZ-aAT$ zh{G74@CA~ly*q4s^zlbRS#wlQh8VSCMK%D#NHWI5)zc=%03|C&e7NWMAk_2IA;@k( zLvZ_wL_H>LfffP7c9e6>XxQqt9CrIvYB^Sx3^lA%hN)NLbrt`AM1E^1+cwA0S30A^fr^F0Z9WLm zD%OnEpRIfwNWg2AV6oWTP=8Q6w_XKg>Tm#BkoO&d_r4Qw$*f~vl$n<&^JFohjxV($ zb=ucH0LMT<6YQR2C4d*J{>Wr8Tl6X!3{w?`?9g~EUveQ=rSsiM#nTJ3WAGWF(Dhd$ z5GfQ`U<@*$Qvn3RZj~0Y{I22udI2WB#`eb3M+d(h}f=P;vV2K zC}5-da5$d}uGnvbz<|*igo+c_f9G4IQ2`5j zKRz7|i{6#qw}9|BPLMt4=OS1Srm*szzMKET2CQqM8vs~uMhxF)Kl(EOavYm`i1fL$ z9Kfuy25onTO{#e0Vm{3}4UT5n#Q{Ed!$mxnV^bIvx*TJDiVi5lSCH(KF;L zY;U%mtEKY3JzoV$VvdWXVm`t~3t-{V=Pc}1z)-aSM{5l54(uMZ^mJvcd(?npm9zxL z(Fg?I8;$q`Dgy*z`g-%3Q!@aQtKG!*2|c19FwFsk-uc-UDkLflF&=W3qnh=p7=BSA zeEm?E<%8|CIf$y095{#ysu@^qfAeiY>(}k&0rQp*1=0lf)-$JA!HY3PcGOL_^DcbD z5bwK7#A2rMhRw8mP=Qw-fO)cMIny8DZu%@iSY~v}qaPqgu77;62%PE=U_akmvae-tm}T;YS;%k-mG6YhaiR8_;)2NR|8rB;_;K6eIU zzIxtst%r?!aXw@A9LH@wKW-x_Ry;`qFXr|Y)g>RjPrA}Y&3j`c(6lnlm5wo{T(bo? z2G}+AZx1|euMg%h1dfGt+Pt@({n?z==%Mi?8C>%U;O`Jz%VYcVq;6^A>VSXAyMkPJ zN4L`FE;`m{92p#Jx@ejgA+znxC9!Dhy>jMe;Za8J5=z4{yk|UqJp$FEXSnN&UqXgt z)zZX}gX6i!TkrExS-6}jFN52u=;@%zh$c{ozix`CwMn*s-5|-B>HekI zC@S>TL^3O}C`4sHVUTvPRiOX-tslUr$9yO+APfEPkZmN40L{aN*2p?&(y56q&IxD0 zPwa{C@Nj2;^7~y@S1WZR^56Af14QLsOmoqG9B8sE@_YmH4I!CrpBAAvOUi&;`JZwC zA%Z{n{3R^c}v^hgYO+vQAExs@pBJTm|CESFjtBVZ=9$V_J{+}N zg@NVpb$1kEsM2(6YRgYjiW$yVR6j)K`7#*zO}d|)bmFM$bVp_Uju6A9gzV2)*#VO* zj-=+I4DcjK0TzYTnEld2_yRS!-G886L5r<9*!fmdE-}cbR)R*$?gR8KV zLJ|w(@lZ&CP+R{=@w&@;FBYe9Ckm(iqVNvyM}BPQ0oJ+@XcZBDxL(Tzft*yJS~I0z zJgJZl#|y18=>hdmI|T4ciu~6Jj2dCU!Kf>I+zy`$#`$wW21F*IeKq{s)<_aT^?Q(N z_-e4u^N5v-%(po=!~Vohj+|A+uDF6%+^&(Rk)6o@5z;={0?O+uK&Q!M4AH{s>Q6|Y zYue0IN<08+L3yI|wFaBYhk1ujZaW3ZRAE}H((PEz?eKcq<$9D3aO^lj!r8!IOo@a^ z#^Si%O9?GK0}c+&A?bp2c<*046~3Bxq5|b+wdHCNr`|Mh37X$QY~M`z6Lnbyw8@rL z9v4>MtzP(GQ={PfZq5P5qabEYXsO;Fm?mhC3=g+}m$~;P-U*U+qw|jN)wgV3@e@9D z7vDt6_NUMS&b(F_HahfKsEd18n0k8&a5C_Xp5HlEkPHe{NPnc$zdmp$-g;UMqRxsN zu|IgVqetq1=#ehqbA_|&ZZEeO&CLaO=_;@*Cs!$hB2C{NK}e05TTfh_i-BWO4962o0b25$V;;6qxduSAc|x<!qH+jfCs{j-lzd?(61w!od6-x24u)iV1R~E-j4M) z^Xm4B-1lDXeU@X}Qvwc(?Blg)iN$934ezH@AZsb%MWZia-NR+tG5ipKrF%Bxe-s1| zLaOi9<5k1z7CjVOLyy(BIPtA=%`P+T-qmGzsCr~e7velS92fgmVB7+2qTv*FcLDf))nVk zJnP(h&$2AqI1r+z;z981{OT|MrG!fh1Ei-FO3mDcJ~ta*KlS~IBE@y}wWHt1Zs<0k zVuf+vvD&TDc|ieYTFc$%dZM!J78=L)y2mKso|!8a0@ydkM%ivTOxp1f%8FQ``_bR8 zH;y*E86(tK7U~E=H9)8|Qt3oE@oPQDvGwhz>;3P;-N6fFe>%s1Ddr@U0^7)b({SLIO=dfF3vo}C8wv^MlW8gr*)y_A)I4!!Chrbq zMSne@1#uO(`Kgk|BabLFeFEZ|zdE~YJ*Z2E@+@h-{Sp?P_PU~&KP_5@0v46rn200}e%XNO~DYtp@^K82hq z@SWq*cSIi$uURlH!OLCy5l~z?X|_r?maq(8j`zAvLSbMBLp-sqsSKVLoAIbKg@&}a zmAIw8pKIMKZvwK`Tc%g#dJY@8B#bbFn3LD;?u)-Umr&^aY$N9)dq#KNS+@)D{HVMg zw>=!9VeCF5(1i28t;|@ps_DQqS^a@#&mP~&CXKcssf4f(^E8^SyEv{n;&Y}w(>sNI zcW?}s)8bbYKd>nHM|QY(>vTidSG$(YTF0f0-$|0-*0y0&zQZ|_bRi+ScE=TJ+SH|* zFKN9h(`~he?PY!S;)?n$R{hzKPyv0ERXUEaS950RH^AjVSf zIoPgxQTs7@>ochv<^3|!u#0K0Bz(#Dc2*1WChe~|te?*79&m_LPy z3PO9;rXcB~<$d;?99)He(DqKc5aDQ!H3>aGE?dzcfW^^PFG6fg#lZ8RX6(QNv>dVD z>T?&{{o&1COfsv0tjemw%0S7zf`Adz7{dyHF_~r>qL8m}OxFJA^1$=cq%69zK*$yM z(_sOa2M-X&;Q<3(#VqE~i500J6$ImqEbq(IIOzZ+M zrf1)ll@Yc9NvYT8wdRw>YpvZe$X~ah0hX;f+|fS{PhhE#OG3y>bAKYl+=0f+45PwA zq;ApY&NdbIVdWFK_9*Z4ncI4-tW;lIbO?|GWG7#}r!So8A@^k&D^w-Nt#B(=B6{+` zXEOuQs>53DL*Bg5vs!=j06^PpY{de`q&4X(#j;vD@O2zCluW!-<%B8lq&t&)PMpBK z98!nnfQ;+i8`96Q6s5476C~`IQmk) z^S%R}9Yww3eOnKFB9oTFB|q6MksWyr%riFi#teEa2DRLvl5Sq}L4qppJ6DNS3FePU zTfY&hylVvAAlEVFRnwOl z%522!nYR_XQz&}=FfuLUg{t^7=F1&XRU3e=EoXQ~wyYe7xcvi}{27oFlQK;W24-xc zcpFsbr@%3?9~Vn#VX`gbM6|nXkK~NOBimBc2m0o!1HjpdOzuSyx%K;y;+Z%D6_X=y z)fXVaQ=dy!rE_sodI*sPDkiff8rV0x1*O@W#%%e}T_C1)6LOn}fKEGgU+*zAyRbuM zR2g22+-FKTZ`^wnB}m*)DowZ@FdGE931@{UDxa{p2^+z8G3i7wX%*7|5?s{b?x1b} z;|~x-xKJ4|N5LYa^5tBv8j)cnQQ|#9d@QxB!hCOqW|gsYC~0aP<*8TfxFw)Y{^Cpw zUWOX8Jhtyg5Z2QJ8DK8;Q+1)6k5!)5@R=;}HE z8l(jb3iFw}2mptUtLAxaNpRw*s1V8$|)!tgBZ2N(O zfty@zqS24dgSb!7yM$#NKRbXPl|h$JSd&*j3Z{d?7qrIsodrL{lV{{iMYaxzZQS#I zrE+g=;9js>e!2u4fLs&BCON9c>*e3tG^h+eU<`f#xHK#ja4}j4OM-D)Un6O~0$?Bt z$&+RO#gDKLDt#Y`)5&v{ee@O?(C^IRPOhO6!wR#{fBAzt;f}ZC#+mSB;!%gJ2@Dw^ z`lgjqw_hrO#cm_(Vm9=3U^3R97uaIc26Hf7!!=_flmGUL;!2SA%{_qfI4$SOWolqd z@xP^2>|s%0Aj^DD`-j=1;DNL8{Dyl(I|dh%T>gbDoECv&*JQ~c{vSXeBm;w4XAxNa z(eB61cd$Z`IW6QWLY6;bY}D??+3ngv`9f%8_Y}IQBE95Q{9n)?>30d|bagS>GLF)l zA0Cd!8fSN$&m?XgOzag)C5>=clRrH2jKCjs9sTVg02+d)*L{9I%}h}+O78W?gr|1h zVL00ds#jtT0^qn3){#_|vW>gK`ewwF753~YAXcrhk}R2UH2gx1N+;XR`%ZudhV8bB@LE{E{)A=i^W2rmbrb=Ja`XI#9bny8LhDu&czA|(l zVq3??Uh$>opt*Vrwws9O;M^j~R@s89||;Y)=Q&Sj&cF zJThJEhfo3HkPv637t=r!>VJl`ZbHT7oQv%S5lm+ohT4j;aspdij)0m%_gyMo;`b{9 zcj;_O1=APfpe~sZ?!O1~BNE?zxn)uNd2pd*JcT?}_$Z(b$u+NYh%j}(DNxh=0fR1% z)+}mE9P*eixKw!vK&)lr59&ZvF(cX8C+eaxLN*|MJ%Gf@;LPzP^-Z4-ZLz?cCqZc- zoeLS)$0#WKdd4JLo-%}{IRDo+7{B>u>sAC)LPxG>NRJEp+)G55yNfZt5>ZLTT zu{}2jl2zH1P+&u)B9cjzR7G#{CuD&BNx8qW7+YeOshfZpl5zxUxQLF4f7OfHVeq)vImS~prl!bxAVQEg3S4v`o5bVh0K|O z<)sG76yO+R@mG0(g|>i>XZFbk=v-oD+U-1wIC{RG#wSMrq$=IA(SuHK_O#4I*urLO zFqQh@c8@*rWp_q0;Ou=+oso@$VK7N+Bj?4JeD2B^bSkr}--gzTz5ZqzTkOK+K@Z2N zf`?VhlUD%Jq2yUnrm3;Edt<4M_h+hBKYv(MjSTuXxlPgVBr^AP zEK9L2-uFV@;7&6JJ(Jzhmx2dN*j{YC#>!;K@=A1U6)_5X8emA7_t12)32HzDtu~=5 zKbLV6V&DmA2ATHAreT%!s31=Hfaij{a>eb$BkEqCdFCElpEa`!1xSPZhrV^=Pz*xm zeXdsbVO;&O8pafrO@BraWjWEm%As@zI8Shah=~JYwzQ^j1O8xbb??m?MMTetb{45g z>mipTPj~GFA%hLuHWDy!?n6k|rH${bN+IOfku6o~I)8EZzG@ zKp@vZ00)ITerf3iz?k}HyDlfj43)L26VOfsYw*|&fXz?dG`vGos~ruYc_9ERbbNN& z{2jgmaNq+Lhk)jen!DpL5GqH^c588LTvC8gWb2lq+Q*zv2Orgv+WBIAT|O|2n<-GO z@Cpg_BhCKppOswo9iF{hQrPI@zyn|b5|^y2Our&#>*Q9NBiyJcbNy;6-y;{ z&B>tK>n_Q?OL&O_k;Y4&1@sdWF@Lx~YIC2*p;^ab(DqNJ;2x0oDwqDGmbO_1v=wti z$7gRkjhx6*&Xf!M??GqDW;;|VGQQ3N)pqdg7t&-XDBz$_4u~(B?M|^I_W*#JmQyA! zs>PoFJbdo97Grh?ess;LXVd!eMQGNOO zCVZTf5OM(b#2798yD~Ic3>HOcc8ZE`y8jG$50gx-3qubTS{9)K-09XZdw()JRNV3H z5-dUP-KjR|!BA`S@h;I?`*!a%EJAO~pebcf$BH1YCC`-3Fj1zRkkA?LTR4&d8dxy8!A^}D0`Z3;_Xu(^1Aysimv7!<7(?bk z))ZkMup~^ZXolW#Mc|bEVyyKn7)B_{n46#{jhqz92Tbj)28S*FlfX~`XkmT zF|ZjFrgk}C%N&KfP~eH|3!brRgDP4}bbTQY%Ym40%E+cS04ikzaF(eD6GPjwBM8-T zAZBnr)FR^f`(<&D;=`PYLNm}+fZM^KscmXt6a5L5Ia-m!DKpBZa0#&4U!p*q}hc;JuhvbQeJ9E5^Ih%&~v_>89eychTrRejw3c}{!<^;?+Gj~(9iLL z;p*!8u_Vf``rb8lQb_iZbPc}?hpiFrA>yA8_>8d(zMDtN0e{E&dWZNa(Ax086sz(s z(`Ur#-QW#A;j3=P=N3-8pa?0b>BlW~?#Lh@SAM`jy4bWoIF}0SC8-Quo zPorS=mM%z)GfC+VG4}=!>>foV^j&}R;w7wh1zoK$xKgnQ5~IPUOVze>+9aZ0I#wSl zFDTDpG7SihMQ_&Q_eElRQonEO0=R=5TdVYCa#!7G4A?+< zH&ZnWng)8vkkrCES`+}+WETfV3AmVcR1tt%xb^IHy?apTW-OpdLzJ`xK&jCV*J>5OD1aP4O70 zonX80J!7ahUi;$;sNYXaU(V+NeJ}jhd8nEAx%1aNbi%VJH^CDHf?xc=s&>Brd%TL$ z?=?`pNv+w{h{xbw<{G$*9HvnDDKeJNuxvgAH3@}#(j5Lam97AINlfvEnA(bco#aik zTNQ;-mKc^P#?_i!t`R$!MP48JtP6=0f;h~hFRJodO~kCg-fxk^iu?L7hVV4pLMJSX)R|GnF}Dx4j7ZW0Vh&^;4)uPf@EuBc+e`o!sY| zGH%yF_tUi;OXo|9vNrnsEYm31Uu5rMgCMw z5e0nMTecdquX9*nz|jen8 z{zf4I#3%^DAH`=K50RSqpjYzlgoB(t{bd69?s%$1Pr^3PT89C$RQk)yP$`%f+opTr zIuh$RJ@Q_qb z#y7Ai!|i~nmo&T58?XkHSuSX2xL)w_NyJ+C$;C=UF|aU#HjTz9sKK=z?fJ)#u@g)v zeS?A;?`u2K@NsLOU}66EGfjdVwhfGKAC&RO~7!7Y=L(3L0FgN)+JBi_R2DrO+!2Os{Bs6=o_u{kW#BW z-*C9HB7V5YBS8oG`?YyN2bab@>C7hd@X{T2S{$Ap@b_FNHRhtbw8-rTw>hq=`)Kr4 z|2WJrB}NyE$M@x_x&zKfi~0b_Qtvz^dfUxpyU@rh4+Pl=@vh6DV*9x4wJo@*i^Gykln~swUAkAt|w>Wa%w(`xWLM2Dv|(+jxQ?l#j*DBC`TFOWEushHn$(5~ejWsk z9y}0BCKV1JMK@~pkjQ@Z`E{hBb-LfSpRKBUYh90`SO5M;3YVEeeU~kB9k{{r8yqIJ zUJ+W3<#U1pN3;eYa?OM}uwBGow+9Md0EdLmDwb+6CW$}{&mz!;2sf92e#H9J1UOq{ z06i1=j8R>VY#AgsJYNHAZ_;(A#tPVdP+=bsf%Y6*1WusoXTi(?&L+olbumyk<5a<> zW~^tSpU^}+&STaTKR>VO+}|y!-~&5KS~cteM-Bg1bo1j?XrW5F*+v_sUT$HnOu*-5 z-#N0R`WQ({-yKCn3*~lXFbFATTi%a~xb=T>>1xVj^YGM5JSB={)k1SH8RDUmPFdah z!uVNwrP#i23^&~c$0?}--}k;sn#J%waC^47UU=~&^-gNZkj))`A!f}O?#GspFRPn% zfoh&iBrXUI#kJRLGMsX+Tl6-bzcs=4JXsPLGgBa~HY`#%j0lWVVD@D*Ha`)2697h< zbW00r1?*$pmc?iA=7Gk1%1?w^f)rz=z01p<0Ya2%_d>_o?Gi%j^T8XAif@^s^Fh`(&7Fx zsMB_}&Y&kUPfRQs$kR8oPPjQ4l_i(Am*k0L)-J7_w?EX_<+GkrqDyDhZ>%N&9_V&n zgZQ5)QtXlq;XhY+CG({|x6F#KWx?pDk8ioVw@OrWflH*&m)lA|xxd$-Of^oMLA)kk z(%e1&65r#O>hr_9jULtAe!gX|8vwD;|zlHur!#>;~Lk1Tec2-CZ3e z*S7%aH>uGOq?Nor>I}s}B8mljP$*`CkKyLCo3{u}orGX->vzjP{% z{C~^`e#9WP0{gBkPNw9*&Q)8+?^nEfa94#t&w_E^ye7oz#S-}7za5}x`>^9{Jm2@7 zO7S<@YMwSNTv$nT%YBj?m94_bRb$QS2;pI@qVwgv^s5a;*=yI&^%ZU_1DHF8wbXuB z3M?%gBM%7-YAcpZx*SROC|lEZAGGa|yW3BSJc(emNqOd=>Yi z$v{)GJpnEWgg5DcZKGSUC?K|oMQL^iSXQBo_)^1SwSBPoa!jFVGzl0&45?l3qHArN zc0ILW;5Z&x%f8D_hYvTpB1=n z=RR&=z@XJHen5s3{;n*}D;%$xHo#|xDV7Q{fQ<1mhJ}I-xF6)u*zaY2MWx%+2fKX&)s$`&-#XHWH6PRU3bL5Z3R)0%-LcPv`C2vxM?&?f*H;o zl7@1=66bE?;o*0KVM&mudVrdahe#~Tto0Vop-TL=UVjZ6f?nf0M1m!E;%h!B4#qaM zq<^Xl!Wj&Y%U*@H$g(XJDgvok=6ncOA!dNq_S8)&Ehk zlpDC(wVK?2X{RrH-kLkn1>b^y{!E(7{S70&>H6(4u=yjrh&Uo{yqO}zc>ZAK&cUu% z|FU@MXAyaY*@)u_T_j!yjW5gK+7HoDgOnhcb5M8E#6ddC4wfpodKUgK_bb+ysIAaNLcec67A-4Jzl#rGvX`O_$78`|s~# zP4Jk}V0*LhpPqbSlm}T@9XiZJB2y|hLLVBuhq}UD)SrBH%JND#ub8?Yc$HH@A-XT- zHuHEFK2y8&N?d_C%y&l3`R3Xr^&+Kilz-1KVOzrx?ST4s-R^}Lr_-&oL^=IKYZi|Z z4~wIk^ttwTpVgPyYz^LcU6zl>o#Myu;d9D^olZ zBJO;=9rZ8IEY;Jn<%nYGbVWzE1AUBpz6q|jq*`}PiM*38FMUYnF6h;7NOf!4=A}CD zy%g$B9bdiw^#PS~EX1b45tZ^XMDUD1lunxbzQWN;t@bUk+SI4Xlw>%1Q?G0#Gqv*f z&$HrQ2pfFKz80F3G@iGA#3^xYrtIHpPS6)n{%B;6Pv5nRxR@4^?7!NMncFCOF`>bQ z>j(cU*v9x|RXW$SPn~i=M=p zjV96;;B6m0=C&Ca@YT5OCJyE5)~N(^CfvK+W~Xn@7stubmt#dg)NP5ZMwT1@j`d}) zxLv9^^L!UF1Yd_JjNgmCzk80@xyJjgPxRoqm)?}3`!8uqi>s^f&PBhpViV(E6M?XE zBgj7`Vc1#htoTPdf~|PP30{7ueM@xS_%_1z{Kq+rMqfJ0h(S-}0edcH2~BOlNJ^7* zNwK5NG1VjftW2V=_(fg0pFEU7}GVO(4kTfWvJZJoJ<#x0xpgx5aJ^%T4=@+pz2&_6M$#dszu4oS2 zpBx8%n#YdRn8J?l)mr!1s=x2{?!iF<9sxLH=l0VM&P+b=n|U~`dsOq+6}Am5BB)4r zb5nuoI755Rv-i~D&t$Nae?oAV)+_wD!_rrUgADz=t=mXT=Ev5nQ6Z=A78n)@xRrT& zW8Q@L-iGD=9uMsyZe|kQUev0#dTC%f`1(bJz{4=^w9YJszQg&Lzf*VDlL@Cy!wC|7 z7iE=`+vD)jFTOjIFXvMlE8X%2dREU4k3#yd%yCs)Z)!^C&E@8p^$yq_z(74L?CRq& zMDxbHufD|&x$_d=AwemcL~AHneIro@yCV?(a zhdt!$p#r0jyd&fAA?)#^St;(`a7C=HqPgqAT|WXKF;&~^CXLfLwHWxNk+=@B9@Q64)eq*Iif`TC@HWItq%lGA($kQ8YV1t}3!@V)kPp7WdY543CC zBE}p-7;}s5mD0GdtW zugiawL`|X@&>e=V*_dhMA8V|HNb4UwrjZ4;q_-Jz06Zy9=el{{DFX8W&USStO9FMq z=Oo0k|F)zp4m&$_`fHJg{;#Bzfw|xA+?eIrHW3i4SJ=QC*ao2=-Y04}Vwydz9 z7g`)6Z(CYKz9rjpDr^Z#L}M_q`S<0%`e z0(Je(XR0`Nl-P}vo9fG~X`cfjs^%%?62Q`JdOsTX8SRVOLm{E6>tFBxjpW>MC9rC{ z?Tj0TnV?$m9^?rlh{0_~15HACGbMOar@k`8)v_>Za-Tl2PE{UfpM67vn&HOXnSz+I zY;}i3*H1geY$Z2&Vx}#h9IH0F(!IJo0E6yI2T`nQX`0eaq7Rq;oHcIQ6&!;ksk%P(ymktp! z^3DYE)}&tKShBmXmbU$j41VGun)fpa04Kcl!tHvicoMcG5b;aBwGom<_Ar#fzN|Kq z!Ilhrw#oh&BqV3i6KMfX{B4VYnaHZBu0(wDZLI7Qf~&e1^mj{VF}CQqYOoPerVQFFYPsmw;--r{C91BP^H5{crf+*}|r(m_? zEN&_&L#V)*&}cC5)BX+0_V3$e(vFm9oqkCD4o(n71_(@PQ{{eF^`I3+047X&&R!D2 z37)JOP_#rQ@N<0$KgtAy=N7cU-Rq^TTM}C^WAREf;uMsQ5U&PN= z++X)8vC0y>M+kBA5gx=}@L6cZM5OhIM`hLJdD2{iD6s4v3r!cG)DolFe6+egDcROz zuVCC51X)j6v-w3fm!X<_wQAo51VW~n-kdpVDCN(s&-3V7GJfNnWpMuf>O7CE20~h? z#4MXx0C-8vP1V8zFKLTD2!12oc(eKwwE$`>-CY5j?iNUD4VJ_vc&Gupdy22{yIGA< z3L~4(l$|mK9s36BebJIrz*j_&8nd{4gRfxn{%}KMphd600HK$TSq*Fr>ZD3&99Ruy zN|qAJDBV}G720fFd|}6)i6jd{r-_5ZTJ>EH3mLE=+zNP&Qq9onA;nO#WJ552`mpfP zpd(0AtC&X$^Mq_;^dV!;ZtN@CKGHLAi&v}?LuDNv^d|>D-~%?go$cDtPvl7@R&cvL zO9-zA`t)9Lx?FCel#+5;e+vfIYxiPMkFBta=Gq!H=1c*&=B%Pq*!Z{q zZid#!y+kTnZM}KNdCWV-HEUqbR|SKu9Ua46nX5~p9$@DuPvm~eQyzMt-92fAIB(Bd z$p;?$Ri3B*N7%XrV8e91Mz<|87mw$(&ffdZRScl#AHfd;h1^5Udi zw*jxev{Zu0`0)5+-BMCF6l1A^#iT!$A%qjSL~9opI2TCZiQ~`vbqmfV_SpwfG-R5b zI41v~mJrvQ=DEt8b z(&%$GK9tK6>wf`!C`+l0?mZ3|#(R%U*-oV{V=Hv7EIO~wSA<833Up3AIauBrs+c;m z{Ee3}DT`U>d-m4>ENEFRh{YSvReDuhe2glV1Rt4R+2QMMQ8+TGG4@yNBDIU#HV>c| z(eYFJPx3x%eS?DE>j`QZQem53dKjD5%T+M!-F#3ksB;;Hca3t=(yNmZR}Rft)h0OU#d33nsNkOc=@;y!Hv#0r6$(5 znTdq%QHWAR8xxznDG2GnECqLfM%_A6Y566=X%^jqR|7g@ac&VB|9wi|^CjGi4-fT5(`-)V_W`^jE&o%eo^|rKFLuIT@L!QK}P3 zi}kJZsbbeJ&Z%lZ7vtfPg?&IeVoWRH%;8~7G(O#*{p07XQviIyk^h$QbL+?eb zT8{;HFUM*MD$xfI4s7{GEHI$!qr7a6}znikVNmGxoJRng2~ za5OImpfN)ytXMUTRBv4S?KI`jk6t5+ zS}lrj=o0(zKB@$CFKPuDtYrVTC)39!MijEgJr4Ch`_jT8n3leZ<2`&DFMl$Q$-+G5 z^TN4_0o4sgVtu24er`0^KTn~X?+FldA38>iCU$`j`C7*#RoJukJF|A^(b^^&HCwEq zM27GpsZ~Q_N1Y}(K1e~*_S7TKQ9RIA-88(%p7HVC#^%Ul70jB-eJYPss#@EKR|cia6VelswC&E?C% zb%<0{F>P5DC*#|Zxmw_L$QKL}xHfnwOFNLz=fJ&F-8|8UwTH}{Snh|vj_`UfJp}q@ zAiFLPlg3m+pV#f}_g>gr{h4YnZzU-FKKr_JenWz5aGFZv&(I=bjww`PPQgCnX5ZT2 zNUXFQ=4qiYrC-a=;<7E%=l39~G9Mzk^|8<@sb}Ouu-Ma9pJSKc6stH%b)=c_1XJK6 zaWOm%d0ufXe8lO!5q-de@m-<6M7Pcy`fr#sDhJ_LUlT3+E` zB=E?*u=;+-Rluh0V0$bplmYE(@C?ON@J{-b7W=0NweA8$!;WG~k;Ydf>~DGpEJcY= zqst%PkvUeGqi&J#SaU^Uo7u~E&8dFAH(dHUGSmN2S=6*3DHQ%!H8>@Wl&#|};X#@V zTEGtm1sZseMzh!C)~9i)Cje)|AfVbM*2G?)ppW?WS}8UmeNs+rRsygE?_F1>wah;^ z!J}PN?$39T&&opoU{eXOg%=b05j%Rrksz(I-$a&*2XS<6S$mI<=Po}_;zPWj6Lqa) zy4=^{GZH<&Jp)o>PPRu_WFH+pP9WbXSGQo|_`nJ-J=U^tpEj!P@#lulF`my;$%!$4 z1NGrs_UMCpRl=o4VvmA;}z5m-N}zef-A)2wo%wCL;cRZx>}e6k7<*r?ts_% z9j|bEg4gA#mE6|b0g8LgQ4kZbXgHI?M8EHX%1K|Jaaig?+&!ET9K4qDy#(OLM0iTa zBVCG`q}!Oc`Ng6#Vw41#4lwX8VFjTpG4icMTI0e0KrN`{#hPzhKt*2q4>$-VXNM=V z`6|NMR(?b@d15Z?h7nfxDREuRR>Hx-j&-(qCOfglr_CYPZVx}X>A*Q^M#?f z<;EHH5UvQqMRI4WBx&?YbmBG((H%toEx*wV7&%G~h5fOlytmWp=k-NSZ9O3^h{a>A za}dgZ|4yo0`#|FTU?ia&l@z+Fn3w7lsPE^_rNY}qKpo^11Y35xLM6+sb67%XV5mu{ zsvp$?;hQ)TDbM1~419O?;kFu*PYtQPh^RA%`g5a(gdA*ft3~FCd?^td`W|k2Y5(!z z0_j$yh{#Hn`PQV_pRbiKm}^|r%Yj*>h2^9r(r-MlhWL0(X>PV)4q=vp5kEd_360vL8Lo|Lw zcKkMm`{zJ~jbbXE-nG=d@u)aGr_`vyq@bsNGS_3dN0R*&xQD)AS%4s}@9uPPTo8r^ zWmpd%VJ}h{v46ZyZ(6K`si<8F-?NN!NEd!Xa69+_Fp3XsbryAqN-Kej5@x^CeAN>Y z&8PXjX}vllE+Xh#UF)c%B=_Zm9aZ@_{42P=hw;y6aP8(Ec6fq`CPSb%!a1RwsaYx; zp6^EOhb*4ZJwGc$cg$hUzW89vf|=r(v(oy#OQrJmP>c0Sux^>D%3|GT?CzCzUzI!t z56JMMa5{}zsZMW8T8{ zDMqJp;_*Rbb%ceF79Uo#iz>JWYU<1KOB{psC9gi_@b)T;9;m6P*`BT1*(E&8W7bnp z`WK$Tf!K&2Uv?c!U%E*sCQbjc`KHSq!GFi4;R-%6Ewz9%c&FoXq_M@6uu5XOwBU~% zsQG$!4D+E8ZL#!8<`5633z0sjR%EBMp+(p}+@<{~s6i+ZYa#jfYOGBW-R>Mmhv+nl z&#GAh0v_%5Sjm zPl$yRce6V}#@ER~Cz9s(+9k-5FJXK*`c$)|51rhU?z>jtpTX0}ftL_Sq0d35eIrCv z0UV1Dya(uHr|wa4hsvxi_Bw-vO`!;*6`5o7mU4?4-QjOzTI2#mS46Of-<3jOFpA!D zF9FtDi-6|1sv(&9F7Cp!xBj4pWA*ccBVD*%@LbtZZrZ2>brjGH1L}^hHdS=HlvFpB zBbysAtst1np8s+LzpA}{6l?{yk6@E0 zAr6N@YLs{W$+D(v^v392rF>#E-b}G`s&1>A1m{o8k^vHHf>HVxtC}71LOa+C0-K8386t=P28ggaUkUX3>!*_hrX{h z50DHImizg!S`^0cQ?5)eINx%73tYrOoRQOfQ>T=i+ek25z>=OIG45N$ecmta9z&p3 zy!rYmd)JlTr8$VA)Sk|xHzuHXl+>8V*PVsCA(H5jO*1_6PqX~;ZtAt6voNu=2vDI8 zMXcIk+u8pub7JfjaX)KhlvqMFp_Zo);QnB0>@>@p$YI>(Z|-ojLvG2!?*Y2Qg0Xhj z0~F!(=@&S$DEn^0bVCV5Aq@*u5{IUZFcc1TgloyeIKe{Q{CtAm?+LH}(Uu7cGXc)v z&ql$Q16?r(rr~iu0IK$zKOT4DDj+%*^^aqjUd@uF(lA;W z5QVj=d+G@uOy~DcibnZ2O(?hp!!~CF%x}2vzyUIN>Y#k~W-0|L{>@|21oCujJONS& zD>Gw@dy%Q~VG!(`9clyglZ(U@6S0DWJ8(g@LkW(rz%^1;^3pzeK6gRElL@fl#Bnyo>GLJb-6`Jq*G5@*`AOqc-5k+()V1>!z9WwxmKNeaW9yCRS zG~=*SUUnd9Slk)DPQIi*mK^J*xPFqRY=qz;*+Aw^A{vo6jV*+`q;^{FZtHw4v#u-& z&Sd{v=9pPfhOkuEzr67)Z$U#n;vDJBYK%fTXT_NJ$z2>}DwE^h0mKYvZMty`nd*mB z+t_;S+U*aL5`^O`-!_8B^c~ zVx1KnVUfO4CPu5$w>nCpqem(mrM zIF>KB3hxpLFkzsUTe9x^;T7N1eIfS_Ai6p-ipEeTL8jayKzK+0^i_8We?C^~a|%DB zBKqYBp`F{Z`zm_3cM$?K7}00AfZ6ceH%RRhMsr0?!;2s2aoVh#e8QuSve|PFqInbx znJKtBaEdqJY728HbX`tcu&wLGe_cOE7LlWgicL zFQlRtc@9|#95^0|)SQyx6U2!O&u4oOKi;8X;j#jx(c1IJaeo%@`KVT;XlMO;uUUBZPD*g+bqbAp1-(^ zV3l5v4&^T8cDA4i4^(2zPA`fdbusvNeU%I_>2rCve{`^Yf_U;qDFinTP8TaDm#U{? zxt#(oI0(l>GC%`VAVGb$6G_o>i4lAWutnyBM__VIh8dCls^xSH>Xef2TL#a!fi^uE zt|VF*9}>MFXR}BLn*1a>7PG)wm@3>6(e#IzX7%4Quwo|O(6 zep`^W)MPN9-GuCnOBZ*0Ft*jO{2caXDg+I3{f7pfIcGmx;8WCKc}+%=2S)~)7Gz<~ z#Lbj*fsgUMl2J^pTu~P_7l*7`d(W|32#{g)t91T|Ka+Y(2%dm>+;>NuGz5#AbVA5RtnuAo3xQ7^ zFge++_DAfxw%g#DkH32SKIW-Z;w4e zs{C9^hJk|9Bv7exqva`J$KlEWBdgT*`E&BZ^z7iH9_$@|!?K@|^R`KjxXugpb*@QB z=(jESa_3o3uHUv~?i8$BOCLT0Yvm54t&WrTCSV_0tJ4?NjCH8EOd4Me!M~}njj;L~ z&(!SPYm4!N!L0D^u56zOmaSD1ztCV3=2A5(vnUGxFRGeSA;?B3>`QA_i6<$6AO~(^ zwed*AI+ao_d(zCvlv)Cb$evBJTUuDAnen!u$kvI7HeKSC`gk%>YCW!wVb0X*qtL9FDB37-gNx9^ne`r>O}+WK25aSN2MOEL zs@)fK@)5HeJr3=d9QWl`PhB?vlQaVGUL7uMj#X#tEZf^jaqaz~2|G6K?Y*({Uj^}q zse=GR6K)~uE!67O>4J8;52pG*f8J<)5%3ArXHZtxHqggEG={Tc5olAx9@upABhZso zCZvg+fIsPpbBq*SjcQ{v_ssGwRr(EXhxek(sd?Ek-K<&--LGJtGmR>wPa&u7S zu0L3}cHn4y^RX*9NIDNO8iZ8XHrfnC6sB(XLyXP#Rpd;odDMp76spXUsCfz`Ilk4R zn=kMpL50R>qbUNrSgbh55o8ufo#Lgi{V40&<8OTb$!Ee)n{O*N-~0UjeUhQ=m$1T+ z-%iwD9N_WcFHZUt_C9c}Wv<*+#50i;f=Ps06xgv`3TG1mt-nQ3zBFiednkoZ@Nfo5 zP(!OTH%jZaJGAc}5A4#vaWUh*a7NX@OvsH@W7NVh64=HdU_A)G>FoEfm_#ELbNVZ5 z`kAtrI|iPeG+GB~2hI=9&U_n1jC8x1ffhAPp&Ns?5=gf1F=JEop6GF}ieN8XQ(R8D zW??D;4m?ZlCCPB{j&1j(w?tQR<5<5_`QI?8C={&f#&N74D<(-rnz7auMn|#_LxvUu zfmhv~N_(MF#|O$<`__Xr?a2I2K+jd66Vj$c2TvUZ?X@St8%^bit5njnagiwv=^`LQ zq-R#i-B=%DtL#O2BCPXXeg%UHPG#Vny~g@)SU6QCI7qM-QKN;U*bB7V&`9(faT3EQ z=G~PR%lVd*oFa9~T43%S7q78Je~?$5m2i+)sz2j>0TJ`GC76ID<5h8r|Z7F40$E2!diB#-qeT zBQ)T`U|Bn7c)Z8NwHlNA?CkT1jK#RvCf-2AA{JiO)2Bwl5ZF@CsjgW5?4g&-&)fa&Ckm_F4Ax($V(Ujkg9Ng0FyrVTGTY3ZV?gaS9q*2u<+ zBEa`wCabNa7Lj5WS{e66QS=uy$l7BrVzXF?P8X3VMZAr~W)s(K{NRc8t@4K#lD8>- z&q0P}r{|#Pix>SmXBNqq4G*`m+VoRI9B@#$eOA3G)=BEm%kJMldQ8bV?&BZQ6s*um zosky99K}fC-i*D-oi0n?8BFO{DOCEtu{XITa|NvwKd_k1hKb|k7t%TI7Hd3*j8S01 zmROxDV5L|v9&h!;VDwZVx&V=cU6WDQ>#Kg*-!P+cxTVj8a(I>UbqFaVHZA(^xxh9W zVXV?NRZ6WCbkM@rRO^ZB=NkJJlyh>rf?)Z4`YVOGqmiz1M#fLK&*AqTO=pD>Sas63pKaoJ&_gpQ&% zidN}hbg|2G*f?4Es~3O9iHS>=mYW1ahnR_64Q^37Gv4F21+%*yt1hbn8&+z0J%{?u zDxJ_X@J4G*qUJYc0-@K>H3>S&CWyibjROyG2mBfs% zNLJ*y5d!$hk1(H;87?>jJ@)OXwCx=0aShKtAV)`HkAXf^0rh8pqCgkx=@TXt zDIE+=!=1Z!Yk8yccmh`ys(zo^H)JV| zrA$YO{|0q$_LO@*`>GP$8e>`0R|q`RmSDjaL&d;XM~sF80?TrZ>ZvH{W}$keq#M+| z>uIf;6wHA|E0eBpWdu zNIcs7J&1GMp1)Whw4ty0;b0;y=XE(06w8;O(9ew3Mbw+i#VhO{kC2&KxqZT$-WU(Sd>oh6J5}w};f3Q(U{DZ_S8K`@K2k7vNp*mHHQC-lN{lmnm%dKSmx!KxEXv%y3bVBMCrk6^}ATk?b*f zh~tpc>i0qyggnNDopdhB3o7~7@4nsJh;@Om6HkXu{!(I36x6wa&Uq3~ULxyn5i`6G z@bP&jB!uA0}#h*6{`t~_zhl>;NjXMl2LTD1(*}IUWWPe zQVndtyirILMp5X=5^gyRMN*uCNUuBfs5cnTZL*v* z`!DLqoHc6JSiEchq=UJgsuw5DyNEV^sn=T~Fbij#=0VyIp^h%w%DW9#ibE|5XvydV zL@Q?3F=tQt?!8v_$X(A{v-i)Qi{b*MT- zK0D6x+tkkKoXc(`8N!<>DjRdepi^Reiegue*H#KxjYU`VgKA1mlV<%zsVE*hl@DrF zw-(v0UX=8r{DFW*a>!!Mc#x{+RwJqY8-Z_QtGVb?r4ipFnXmCJ{GKxyOCt`-jnixL ztsG+!eL}EXt;8zPQguq(d)WO6BNGGU1W8uXppql1NW%8;+(-I58V&KH&c8r1;ZwJ2 zs?ai-29dbAj)7Jk{PK7j9c?ygC)jelYhS}|07C|aM@!GWR~9$#Em=iR^EO~cc-nAm zpeW#%=*6`8w-9=`{!F2lt=aDC#Usy?lYXP%7#es(g|-6a*Dqr?>198i{})|?kf?~x zMAzp+*@vJY=^tWIl(uOwij4$n$90W;~9`Tv&ex!xR4zTMl{sm8WRm2{Cf&10b;Q9)s!)q zK_w*(6&&)fNJ7{%LC-@4Nd&@O|E_~h^|a;TR`LLbNJWzY+=%QyN@3{b+4wSM2LR8Y zTk9u>=@{^=PSX^H*%Zs=a3YE({Yi_Lhtj+uepag;Q$EQs{<9K_!riC>{BUd-Tul{G z`i(QS2#9Q%_-{PO{crjsH|-j@i`ietheDjhXrS7dMl{y&cV+NJ+nX`9m>RKBGj94s zpQOf;M@@Z?{qzFPgt``;PP%*qe0Un+mG-+`@>ge>Nj>Nu%hO~i;XL-t!740ilK(ND z`9&fPNl0rA&%pxtwv(XIg9di7#NP*h!FVq0-gW7)1>sEk%R`Pi2cO@6hF)SHQC_r< z2di&ir*qt)w*@K{bt*O}P2vc^WNJd$votS=A@Qu0GDrzRsbg7IBJVf84Gr#d)rK7V zjj)`4fS&FbGw?TZbHhROWM@!|K0S_{0eRX^@J~4R)h3V%)QkndolAD@g^}^jjnIRB zTFD{z28%OHuZoZj3oYj+0TDam)WZX9G@++1n=V?;1&@wJIS(W>JsuhBOx4>U&l0bF_Ok z$4s;HC!K=a=cyAVm4v2X9f@S5thnnFo4?=LwfL!QSc9R)BdUAyD<}xE>GS(vHEGTD z{P8(j@5y*JYQz+nEqq@_wUB7*41#=!89gEItrVn{#B|3E;cKv({=C=$u0&?zeVeZ#`E*omueS6v%?#$YFQT9%fgE+Gp|+PVrzpVj z`?$mw#4C<969LP^z$|y;g?j+Q;lisI)m>Nse5!S3X7#_6xJ{k3-2IiUQKY0 zU|6$8p+29P*(Bbo1W*9k0g+TCTghE4#)3s5ZsbpN_zgChtYy(%(q9~I#6;x6KlD9W z7CEF^yZKkOfBK(Etp)DeRUP{e_=d`+hTSx^>MCknC)#;7J_jB?BGzcOJJX;RvCAbG zC7DYbp5!%h7xX?q2yOV`zoo{{+hP|NFbkfq9)I+^lTR# zUS)0S1&jC0!RbcUEdyV7nbU^}M+~#rZunA?019ibx2$?~VDL(05ugbgCBzmy0DM0! zmVTXX^V&)RDC8Cmh2l4Q2>5O%ja-2gmx%1d1yYBLXuXx<^b~?fYfmJd$85hb`WQ~q z<_XD_{vL$+QA4AXcJ$8pkr1kkUXfbdD<9qZaLm$v6tjaj5_Dvp_yFCcd? zYtg#4x0jmNq|T!t@VC2rc=h+w7y{aDah@DSmD&Y621pBOA~c;aVIdMit2rCyfVR}e-z6lZ9 zG*IaivxOJi^s2TMkb7*UG8=dN+DQ~*T!JbSD8jxE-1k{Ps6=(EqB`2D_+9aAEGcLE zQ2YRrbp%^Hmq37jn;TQTh2dzpPb7tpuG7Q#duJT24;DdMg0TWZy1#(=K1D=}CQ>B* z{_!ShxHPb3S}g=1IJxsRFw9y~s7@O)!{Vc4U%)2%GCG3%-iWH-iF&w zNP{YmVLV(qA)Y)K8_PqSKZ317wh1|RdTJlbr3CmjxG@;LL*Z3#51KHwOasMhC-VXV zwFZ80Kl;YeXh-xL0NPh^*ynIL4`0Z&xq*<#{OHf?DY?wQo}s^Jss*Dg+B}P?1g*kW z+55{2ax~kzl|grUMq^h`lS&b0<|pO*P!x@lxDD!7?CG96l6W<_oes04vSNN^iN*}S zjAQ7H{ODp~#aO9?!n`f7c*yuY9_BWF%o4y@EjlIXuTmX&mY-6Une@EQ@TEd_i=5*3 z)IcUnAz9lDigg5e+D_$S!!&#eRX zh-%$NubVXF-wT@qcz>um{^ZsBnx|B*;l%UM8&uG%{X)qY`;`+>L$z}0Xp41oi#1X{ z3JQxZ*Kc``Tr2R8-|7DDpuuRy(jZ95O;l5mLaS+}?!N-Jf%b>I@+Gi2$R{PLYGmyu zL16KQNHjxWkj{m*c2eqr9L76GoC87u*{*H&jaZbL zw*CK;VT;QA7iO_P+53_v##tyT_(jMJtY_vNe;8_8j5ZvWwFjK(8XceUJ%gA{wjJ@r zX2Px&%k-1DXO9uXGs~#QyuJFx5W!V{^BfLKJ9ps)f5C_p(Cwm4`;vPwDm2e~bX^X< ziPV3Swq>mZ!Qf2fZRzVzw;M2&v{{u(!-*xgQlB=6_3b8YSH>%FgUw=^1Y<@5D{ZA= zOUeL*6|H_eHP?_y#eLzs6R3t#dM}Dm4*e?qK~K_2d(ba;C*|2zkI!XHwhA|^=!X`$ zdk$7vWu!s1?s83rjF7aCn1`O_AJj(Qkc=NBei7%1nK^}f)zk(*M}Y&IookP2aI0s(_n9h# zO_!iYT(~WXDJ}9J2I#(ls2HqTQDw4qOay<={;YcDxRmOVT4v#+-RDJqS6__MgATLA zIV+gXfT4-}ST4^He3rhHlqBYdl#p-^x@H;Gn`t7!V#g~5pM=NkVi4i(_({QMG^tei z3RIM_ovnYrkWCU*w6e8j#tKlah40S6LoWlr_;?lf-m?&xTNda_d^H099uMt!>>_$t zxJX}i{mb9{E%b_MSBm~g&prvte5fum=1epAE4&N%E!pYL6fU5NIJ4}PaTUF1Wk!oC zAj~d}QwUc<702n*#dp=YLrKqBRgX^G!il_SZ`tFrg0x)c)}W0C|FkCdzdsSN6vVM6 z+KsB#UOgYXS(n8a9Cw9IGapyPQWnzk7RjuS$5L+DulB@+8&DN-u>y7))$Ho|hdv zWoDZfwVua&7_Z=%I*q@onbN34M1@^_`H~gJiQ@(fqZfHt z%d}ZHV(|v^_?&|bLRFOF9Yl6giKTiPLKC6{Lx6kn??uR4^;2;6t^vBvs~l5-Gro8- z`+su87+o<+h{L+*1O=qBkDK3juyVmjo^cB63jLYg5sq$ z_FPea_1t`9%gv;1z3ChxDj5YW?bWeQ8~HO{6K373U<2o{-ny-FPzj}|gs@X2I+jyhU7`Q*l9fhvA9w0rZGu8&d`xT2F#!AI84yI>jRlWZ7R>}Ld|Lb> zmd5Z#YpRsjNh&D4aRCrPi425K`L*CiphiNJUY8G9mhEnwDQ6rRFOOVjG9L2JDDg>p zEB3mo;3-(6l_xr3*o33-w+FI4Kit?jLnWlbH#v^dGD=WEVVRl_4RTDtE$7bwAkJm<~C z&_;Mo>w>|_1{~C_nQC0zWKw#h*qH6W{Xg`XJEr>u%D=q8D3Nn zRe&J>G15w}XmD)A3sKQOtgk*B1?w8uidIZ=o1$_d-Gu8u6Jwxqnz*Pt1BKGCp* zF2X8QoNY};d91X>J~c`C(+>r5?xS0BNc|v4 zTzRr+r0!_?;(9btPum=a{mEIsQSF8h1-0;nlLeY+*!Wo9n{$Ui*&axr}#p?&d*dqf+6cb%b zJyH@uEd@G%u9Vkpk4`IQ&XP15;~DM?*-s0);cCD1Z4dp|)$GjwnpHU>jLXn!v;P7@ z`gz8RU$BwtlrLqq?`lr7%N zphA5-!@u17Q0wA)akQu4@)25phJWKfBMXk+*qE3P&)9C(c(zjxHVb-scl7V2dbWJA zr^bM{s3xz6M>3q*omk-`nVd-qp)%!+YE%8UM;6(16DhCd;UlVHf8gH)CaMP}{bUCs zmWzpGwb1r}AN@J2E8jxhn;#aB#M@7zNNB;S~MwNXhJO2C1dRs-$yv=Kb@W&c9)8zj>tjG&nlc zRaCaMQ5v3HdZ&ofuo?QM!)28s>%xwjiL-sqOli80_vK4b5o4zPlEL6=Tf8!7bm(o6 zz_{Y5cdg{E7zA#}@K^ZuU(0t2<7|tgu~rP!TfXO)N_x2KP*sI9jl#W8Z*|3Lg@!61 zAmEf2-1w<$!*9JCRZ>c7BUxfiFv&r(K*1d=jp~ZHvTMvvxM?&pthKduhDAtNH-zK; z%Oh`*u&7fX5O+wK=aAn||2&dthR&@3`Rqz)o2o45=p?$8Kt|1e`68M^A3YN$gwDy& ze*>~P#V;4pN+aA@4d~*TTC9lF6A;;(B&8|6p<3S^#(8*9do;c;mCrby4W=LxCkXp` zdz&0~&C^MXP>r_D6);`X6>Myn`-#y6mRG0Kk^`yZE!U`Tcc?DG6S+ zUe?si%*^8Hyv^)DqOrE+^T*{C70le+xV_dAw#}O$7b2RR0SUUOW|FZEk{`}IdGNSm zd)p4_qp+MRkR%0g#{TH^G&48g*P;2>9sc}MAD^_zNyQ^ly1E09dt8d0t$zA+41UnB zsaV9bmYeH_$E&#~-xKET01d{*#&+qm_xnBC+?)SJ6^+&E9$K%*Lpa<&aMKv_i2uF& zT%Vnd4HGw-c}slij+Zvlb1G!r8zpmU;$314zrAwe+Dz?tO+KVCr5DR0Ki6lW!SePs ztxf#vHZ>V8&c0JSc;7sVR_d4QmH8jGm9CwH3J%|ZV ztL{(CqCCYS=15^wv%tI#xr@I^%^F_9=K1eBSVcu8M<*VaBCgYE#Cv`Ku?DxvWLxS1FFuRPfl(A z25QCV8^)Ao7&v&*7!-%kd#|IO;^yC6#7;|Cypc%q_deIBsN z*Ygj@$W01R%XB)U6z|e7v-0Dl1#~w}*iQGh-uf;kbP+uEYfoC7N`8VN%-vnUdg^IQ z1Ge5;4}EJ5XL-XW$FfcK#A!LbtM<7l+lU*mm zpqdwV#k=P`0rtS=#3r zPJmLKI1rA~Kw%Q6M&qMV8o?rYh}JH6NY74zk6L?*ifbxq@$j!?O7{-O%CSY|ZqD?MZm8Rq*8&nT9Ac;;9xVD*b&ahZqqf55I_}`ny7VV=w zic>vgEBw;@MC@%t(QgHZ^O!)Ai_qDI44Tf`@C0Ek*HRqw(V$50yzh0JW zVq!)<w3 z&dTsJ@dw1mH^Hy1+>1x{H+-J? zj#K>`)ysYH-!sUn1H=19u`z0*w;y*&)Uipi%$pyQ)*?2BgtmUe6s)VOgrV3$W+r}Q zt%l7ShL-(FY@z50Ym!%jI6=xv4O`t#D&KRsM~sH#rnpSkQ1epEqi?y3)wnQgbBDVA zyq|HrHDiNYKTu)gqk%|al~o-cx(*Ykd#!Y>5Luj9JU?hh2%UuiD#MC$tH6}O=MPR!myFU|bMH9)6klT`XkwNg>Pv5k% z3Rby+%s`fyJC6&PDwU=u)FnvDv+@k&=!Ya2(zLL#Q&s)0$O=h+Yd?r6YceI;$>7%U z%Zj2)Nz;^5VEYjC_3Kah1|d1>yXXrxR%10^ujS8=zKWBaw(}3{@%11&L&bL2%fRwfvUDDxxzxQPF2_#eMJA7u*Whj(UkOI&->HGprXCa;f1K z9((t4Q1|4{-cy;qLZ$QI>y4fDIh^0z+r-8GpLb*!c41Vvz4&)^Z&YBm@fU+QehgkN zaC70!U&0>7Yg;nh)$F6I7<175?~{-75aZ=2ig(R^D$dVU!K*obUi>T4#0Z+*y%(ZC zop`Sf1-RCJylF!5)t!(Mc1UcyK7BlLQB85d7h)-gR`~Vyl;L%ggfogg&ke)T6{(=j z0}4{;h9zGu&`$ zbw>$T=E0;|;(kTYdztpo+ZnZ4S@RHRH-Y{{;%@8a=pklpl{U|h8=w7OX~Ixuc%M=C zA1|Br!pWs%Za(gP!YR7JJ2BK-4+hk|SJ#9?I9m#*Rxzb@Zic7QimO(h*LlkFn+Rn6 z@1((qx`px58AC&~*r9cb=){X^tssajGw8Q<_8kWN5W{fksaelQ=2ep(>i4i+LaFMd zar**0eEL5+GOA@N9pBd={L9h8Rx+=vck@b9SW9L1E{j^J*91A$2gYC;o;(c0F~W=W ze|kY`v?{ze6B333iaf@g<7D6Ce!DS<(sYYz@%X3j2o39?3(5q)s7Fb!OLD~V=_b>K zv#tQf6RL#zF8Vz=U#h1#Q^&tz{4xsP#6*{9VyfqlRM2|-zk#ZWq@jtJvAB~I?F(JE zNAYa=4AIX^KjQ{Fv6h!JQhcqnzr`lyWupHXaiza0fBR`9zl>}uCbID$^oQogmGq6x zv*MeQH3#8o2gQ3E+bCtx)~7FiF&b8_*AK@}%$=m{m~QN41yjv6iEd8*=>K*4<@R9e z!$d-_^-U%WqhaIE_izu?QdHf7{=c@aGAyd^>rz8Yceiv%cPb@Fi4rm(-AGAy2@KsK z-JQ~cfFNB0N;5RlHH7e9{y*WU}ay#&eRgD?@^?J}s`T@;}g~C&3sya--vkxs1 zfiH1nRB9GhMt=TPo)WiQ)o);!@>w%xnK6V?(n!}m=Xr@$Ma@koK2n)6PFAu1%gUhE zbSGEPkyYP#Q!!~K$0Du@%FEtf_$=BD^$b2xv7>8fe3wy4NBgGioZeRzpK&#s0ZW1x5FnF7qb}dT75|szI8$;}$;Uw(7ZbtS1ZsE`f5D~N`5VSBo{8XwH5!BVy@6lCTd+(*=M)N8Z zIu6j36D@6p1|V0A0{9vN;x(t$f6J7r;_1}};q#?MHlzgWXs!(U0LTpxglx_?Icb2N zdgO0V`R@T4AP7iXSXj_12yz|+a@*nA@LNn{F>GUu5V$AV0V@hD3^7~?pFe*FOkiXH za7(Uoykj~6RDbox@>%jhk1D`=e&0@;dlTd5(h{|!`w^_v`QyjXyiyTeu!ZQrCQ;Jd=b8EzuGC=-XuyZ@dv$yw6}`` zwrpE%10adP(MhDHWq?$EJ;z;@!~=QY1%615v{H!7#N3fg4>4ozK(hG_rxbNbj&e@b zbGG$OLP936?5R(0N1!i_$X~hZdFqls?k*Q!!A(`e~F5)%okH z;C;5NSL;u7;Il}55swP_PNh_EG+9GI(BjnXdTE)@pJvs6l14DB#-&Iey0-XtZ|St42b~>LrpH_5qe?u*QUr(Gmskn@U+X z8yNx1!VO5@@d{++?zYs~TP}T2dGGpUnXY7*lQVbtAAp^xox<<1#}%1!iF3xqBx@I? z^mIka`y@oacQeWE?k@Z9-_$76X_(Pdo91P^#6D#ssH!pW|3bD20OB=Xu%FxPg_*+X zdx_*83|`>0{3S?&?1q`PtXeEIQ&ZU6cE2+3K3p1EEcW7{$}HP0{3-{6B6upDwGoUF zSLjzMtea!n2fj;|{b1!oEd=S9e3p0Ukp-u9DTEHRNUh}K^+dscd7UEW#FV2X~2f8&~)Hk?UqeLMBFmf()zyqR-h`Oe-I z{5??Cy5RUi)m{o%zJs{7*dTE!rK9!{<_h1V{h|vFUtmP6z+X(|Lp(^<*j`6`brL(1onkarN!meihowIOO-Llq1&2CH?Qj(1 z*SmTAWjJTZ&)zOpA+Z~4+~EkUwvi%~Rm6<{54QLXuGh{XNWOwpUkBBI3)Zy zy=8lrD*D3yQ^G{tlS2Ep#dCR3qFsUVbsILFmjQ=K6g;uqlKMSO z6PdFL>jIz2tzTJg#%LxDNt>%Jcv3v+cFJ~|dFmbOv7;g;$R1-*CW}TNF?$j(n0_9` z8RgCy@k(_<9VDzl@I55u;%&vKj#R7*-zuU(OxuI7`S?6Ef4T zU)OV&o}T5~8icmTW{P{XT;)084ZV^$w4-?ul|;woePh)TzM-OIJnBQk#0g^Fv!LNZ zPF9uMJ=n`&OUZZxX3p~8sS-rO36BumS3uS`lkbDBEiPOpeE$PdSqAnlAl%1LYOpnA<$v&gK*Ic(X{u_myP6!>QHAWOQQ$X0n!#dS`}_Z?n#HfzjQ z*)cY0kEi8FgVRGgxkPdJtY}s3Zb&|(FECHkyq@IwtA+V&Gk2i#wC%1Ovfr?yBtrde zf|%{<(%6DG)BmH-358d6aG%u;Zz<@_4q#|$a^A)R98B5{GGH3TDf7LeuBE0Ah<0{% z6JLq>zI4V>OL&C=DICCl_ENG70pHF{dezkDZ0EI{y!kB+n++i+xWlHQs4Ie+B{nwp z@a&Ak=B3AFu5Zr;V%4{&KtN;K=hFXw&3F5->IfL% zdGMe+J66-+RrFv4GDz&@uK=63w1cJUdB{LRn*cvQKbcAv%^Se`q1Ie$2eLFPcL@RN zWQ9&mX#vTzk&zL%)2-Oo2qz~evcS1wAk~Ugw|%$VHU3>Uvhh{NR-`7EK1IJc+}5o%WT)ET9|q z1mon7IW^U&YX8`uUTeA;OStS1q)Z8Cg=anU-Gy>=B;e$U|CQ-S< zukLI!S4{_Yv+&)-Y5r`79LwQ0! zqMxt-!Fp<1NK1p?N?PXRA;QM?rTaNVNQ{2>%yF(}%&KpPo*eTX=H6Iu%eC}hZgt8) znA{VJ4COBVz8X@lX?avuoetc93(4<)Qb>BN`P7&*)dzIY*cGef!L23kb(F!e%IW^) zm-T5+g{n8-ScuO-72=RrVNvy`^fHZ6zvwJu<9gfYq zVN#ypvl2(wz@1=8Zk0*-^$lcAU+Cu3suJ57rulEat)~eu>WB`o`}{Vly5Nt5YVF8c zMuZWBFlFL@lX^0wvL;^s!%o?5IQ=4Anqt3uwBf3q&bxCD7USQxEww~BOQdCfBUQQH z(WV^!C4k2NA*kS@hia1hBvVkm+^kYhv-1&JX=P7 z2>tO6JLEo>zKgr_0EZaVwvd{z)pF>~Kn#W%QgU)gK#JB z<+`}70ncJA8y={y7zT(>b^I&9f(+ZriyqPj8)@CUgn?oR*uXhJZ=*0l*2$R}V&JST zf0~(XtdEaRSVRQxwUeORp`o=bB0iwL4VSm!;o;F-eR{Kk9|L5!ZCBoOlRZA2 zLZ>6@7)rn6RY2M!Bv1JSJTb4o8UMdvNFgN2`XR@xpMv-wUpO~DpgEW!3|l?U#2yNa z8r;UPl6fFuhD`VBC{o(b$C=+^0i%*8iKl`dm_+AGAke93d}WU_S5w>v?te1gzj1!4 zsAF)?@GZ0-BVZ_eFG&7RGkpj+?|mql#>zvGZaLs9n9vl-Xc^IBzu2w#y!`f+@W=7J z68B`^Kq4?gF!yEuAUlJhIFcJl>DehEe}go_j~$ZvCRePl)U(0O_#0ijWqGp~HEd}@ z?%k%->g850h@T}UkN&<&Kk~Uw6Hs8T`!RlDC!3ioI;WF2E|ne^5R|a#c6&n#_(zdl zoWJ&Bn0>4$B!7{raZ;LD`ra>QZp8PgyV07ueO1ZT++t`1@0!T zB;yl1tjzGkw*S(3QI?b`1#xzPV`IbkR+cLI$!B4v zD4io93U}DkDU1%x>zw6O4LYY^;&3KR-ju%?Ll#kOq1 zC2JLDx4f{pKpde8{yw;{7~E#B>D}R*6ZD#h<2qdOcRK=u7fz)3lG{qd3l())+U_Bh z`0APk0*1~fR8)+yS6hN!Cwoua+D|yrv_8}iG!lhO@DL~EMJ`>thfxLAmh!xq^c5lv z?m82TEF_#!IHJ%0@jw~Thd|v6l<(5lsly$5{TpQ(FejkARMb@{6 zYnaeMRW;;$7yNLaDk4twECGi;VeM)pi~L;ur)2IzXlRqpG^yS+MeIa+qkGhyTl4aH zFvrB-u|<31_Goe{yV;WTmf3UK7ag8B7pvBWid&dkuYVEMQQ#1Y;J3~J%0ONkeq5aM ztyWZ&BJAu$XEuI<2)~HWKkpWF5C&`QF`?VonV3*FH7)K4f5MVa!csoFKqb)TOw@H*coQ~nt!zDx%9KBdy z0fv5iayBu}{YSAp81Qi@zkF|Z8^HfnA`~2C5(b|#8s%lPL$5X zemkScb^%VwH*S%XhtyvpgtLWAh#DM>|4P(oU@b@I1qps-d7UMq0$K2D*ow)JrV+Yy zjvC$7yrY&IFDeH1(zk1fcx+4zLaGef(^O1fr93&|TN6@sx%;gP*G*d);XA6(%zlJs zk*-vZnp7d&NN}Y5@=R-u$z7Jk>UQyU+;jFMpLviIc0zCA(oC+IBIyL)dt9!M{=6p4 z@A49O#(PCn(-C?K3XR6FbKXky>sjpv%HGK~CmHidz~oQeUR&1xU8%V4_B`asvuq?w z%{Zp$UnNXr&WWMLrAy!;WTV5^h_fNoOmZ#D$UM8SD{$Uj(^pRP*RKq!t2;7(1@`l& zqnAP~?jNgrkJqo*+0{&Ob2G4w&-Yph@fgcuv5CrxEK==s{}+)HcG0)F8NKj3Q&;o) zR4v>uQ%m0lzZZT*Vo&`BLVOzitu3c2Z?3C@8Gfs%!z^+un37PR0R=2Rdy1P4XiGe6 z5!A~YhR{iHh~crr_&RGfd-kc|rr@jIr?fL_(>fuup2P((_q?n3U=VSB$w8j>sC&x9 z8}pQ13?8+$``77CtOk$QALtjD67CkBvSpzU5etS4e#zd4H5VKFMpQ_R9>ktmHvRR= z;$#~O0LaMZ&9)u=rUR@b&n%G>cyM8-r|-0`{|aViMVDjg=e#h<7eQG|Ne9~QoxmkU z*{0GDC(*}E$h-6yDEU!1HX|#JDV22wVJLsndeXCM->i={%o_#w$^3bGT$tot2gcWD zt_qOpO9abYsDp@2@)|i`O(=vTzAjFZhD=I(LS1Xp6*Y`9i~6~EPo`*c&1NU5loNuz zNAoT``<%{;XWyGKRsBU_8T8RiK?J)-%e(x#SbB#MJ0t|P(5k?QkKohv8aPR> zd6FM!-y2Ic9Xw?2v`mpDEVQtwO%}<5zGv6JQ@!I0#1tSu-4myL<++FHIycv_*lW^$D>~xFS@RW7FrU{jK3A_} zq2&Z;%KevOR<^efIYyF=f7vUiQ%fD<5GszN=x;SPdp_O<$Z*XvUm9d;%h^3t(jtB< zFLq@#9MHSb6m_!iUH-#ffEc!`{Bx|#4z8`mIpphCYN~^*Peylj-2*PlYIB_^sK*n` zdMcm{C8^#Ku+iCCBhK7OC|Cpf8t}rqeIW2{8(-1mHuIiNL_DoHMj0xF(n;Aw#XT6Z3QX}C@NXeR8?m0K-#-UgDE%l)I>vrE$Hk=Rl{-c zYx!!k41DIqDEbmjaQJWZgFMj|4y|vx$?+njvN*$FTJ$RZ&oPg?$_T5nW8=Bmjcwx+ z83odiV;IlTU21g@&Z$&tZck~wuSXZrKsaa>LQvMfrkRBuA;R>!#ycezEa1M6BDoou z3a`Pk2Q9Z2O{YNBwc6pjiuzA8hdPrN`I*_tw?fF0*i<2qCH79myLFOXqjt$$Vbk_wfMI)2SMI_`7{j|75dT(N!%|Tf=A4dI;p#CR zq`yB>IpEO<=KMus`v?`Kd{BFOfo}uJNkoGFh~ePZkgJe=oTl!8U54kS2vzgsMt}8B zUcBKFai$KI=2ulv-uP?48J|@e{@j3v_57#bMoR6OM9<~Ry1<(l94G1(^KRUt5rNWJ zqL7BJx}$0xr?!2~Cxl`P%gmc^jB4C)hUUZCB}clp*gj9(Dui^XHwxKos%ktMD5j8fj_ZMDAPriXyJEDR#pdv-XZHgw0&yO~c)AE00 z3T^jjAX(6w{(f^efam|%