From 5c8136b0970471d7fa19e0420767e6f2a00bc602 Mon Sep 17 00:00:00 2001
From: KP <109694228@qq.com>
Date: Mon, 8 Nov 2021 15:33:38 +0800
Subject: [PATCH] Add speech models. (#1678)
---
docs/docs_ch/get_start/mac_quickstart.md | 2 +-
.../audio/asr/deepspeech2_aishell/README.md | 153 +
.../audio/asr/deepspeech2_aishell/__init__.py | 0
.../assets/conf/augmentation.json | 1 +
.../assets/conf/deepspeech2.yaml | 68 +
.../assets/data/mean_std.json | 1 +
.../deepspeech2_aishell/assets/data/vocab.txt | 4301 ++++++++++++++
.../deepspeech2_aishell/deepspeech_tester.py | 81 +
.../audio/asr/deepspeech2_aishell/module.py | 92 +
.../asr/deepspeech2_aishell/requirements.txt | 12 +
.../asr/deepspeech2_librispeech/README.md | 153 +
.../asr/deepspeech2_librispeech/__init__.py | 0
.../assets/conf/augmentation.json | 1 +
.../assets/conf/deepspeech2.yaml | 68 +
.../deepspeech_tester.py | 81 +
.../asr/deepspeech2_librispeech/module.py | 93 +
.../deepspeech2_librispeech/requirements.txt | 11 +
.../audio/asr/u2_conformer_aishell/README.md | 156 +
.../asr/u2_conformer_aishell/__init__.py | 0
.../assets/conf/augmentation.json | 1 +
.../assets/conf/conformer.yaml | 102 +
.../assets/data/mean_std.json | 1 +
.../assets/data/vocab.txt | 4233 ++++++++++++++
.../audio/asr/u2_conformer_aishell/module.py | 73 +
.../asr/u2_conformer_aishell/requirements.txt | 12 +
.../u2_conformer_tester.py | 80 +
.../asr/u2_conformer_librispeech/README.md | 156 +
.../asr/u2_conformer_librispeech/__init__.py | 0
.../assets/conf/augmentation.json | 1 +
.../assets/conf/conformer.yaml | 116 +
.../assets/data/bpe_unigram_5000.model | Bin 0 -> 325121 bytes
.../assets/data/bpe_unigram_5000.vocab | 5000 ++++++++++++++++
.../assets/data/mean_std.json | 1 +
.../assets/data/vocab.txt | 5002 +++++++++++++++++
.../asr/u2_conformer_librispeech/module.py | 74 +
.../u2_conformer_librispeech/requirements.txt | 12 +
.../u2_conformer_tester.py | 80 +
.../PANNs/cnn10/README.md | 141 +-
.../PANNs/cnn14/README.md | 141 +-
.../audio_classification/PANNs/cnn6/README.md | 137 +-
modules/audio/tts/fastspeech2_baker/README.md | 156 +
.../audio/tts/fastspeech2_baker/__init__.py | 0
.../default.yaml | 104 +
.../phone_id_map.txt | 268 +
.../pwg_baker_ckpt_0.4/pwg_default.yaml | 128 +
modules/audio/tts/fastspeech2_baker/module.py | 125 +
.../tts/fastspeech2_baker/requirements.txt | 1 +
.../audio/tts/fastspeech2_ljspeech/README.md | 156 +
.../tts/fastspeech2_ljspeech/__init__.py | 0
.../default.yaml | 104 +
.../phone_id_map.txt | 80 +
.../pwg_ljspeech_ckpt_0.5/pwg_default.yaml | 119 +
.../audio/tts/fastspeech2_ljspeech/module.py | 130 +
.../tts/fastspeech2_ljspeech/requirements.txt | 1 +
54 files changed, 21827 insertions(+), 182 deletions(-)
create mode 100644 modules/audio/asr/deepspeech2_aishell/README.md
create mode 100644 modules/audio/asr/deepspeech2_aishell/__init__.py
create mode 100644 modules/audio/asr/deepspeech2_aishell/assets/conf/augmentation.json
create mode 100644 modules/audio/asr/deepspeech2_aishell/assets/conf/deepspeech2.yaml
create mode 100644 modules/audio/asr/deepspeech2_aishell/assets/data/mean_std.json
create mode 100644 modules/audio/asr/deepspeech2_aishell/assets/data/vocab.txt
create mode 100644 modules/audio/asr/deepspeech2_aishell/deepspeech_tester.py
create mode 100644 modules/audio/asr/deepspeech2_aishell/module.py
create mode 100644 modules/audio/asr/deepspeech2_aishell/requirements.txt
create mode 100644 modules/audio/asr/deepspeech2_librispeech/README.md
create mode 100644 modules/audio/asr/deepspeech2_librispeech/__init__.py
create mode 100644 modules/audio/asr/deepspeech2_librispeech/assets/conf/augmentation.json
create mode 100644 modules/audio/asr/deepspeech2_librispeech/assets/conf/deepspeech2.yaml
create mode 100644 modules/audio/asr/deepspeech2_librispeech/deepspeech_tester.py
create mode 100644 modules/audio/asr/deepspeech2_librispeech/module.py
create mode 100644 modules/audio/asr/deepspeech2_librispeech/requirements.txt
create mode 100644 modules/audio/asr/u2_conformer_aishell/README.md
create mode 100644 modules/audio/asr/u2_conformer_aishell/__init__.py
create mode 100644 modules/audio/asr/u2_conformer_aishell/assets/conf/augmentation.json
create mode 100644 modules/audio/asr/u2_conformer_aishell/assets/conf/conformer.yaml
create mode 100644 modules/audio/asr/u2_conformer_aishell/assets/data/mean_std.json
create mode 100644 modules/audio/asr/u2_conformer_aishell/assets/data/vocab.txt
create mode 100644 modules/audio/asr/u2_conformer_aishell/module.py
create mode 100644 modules/audio/asr/u2_conformer_aishell/requirements.txt
create mode 100644 modules/audio/asr/u2_conformer_aishell/u2_conformer_tester.py
create mode 100644 modules/audio/asr/u2_conformer_librispeech/README.md
create mode 100644 modules/audio/asr/u2_conformer_librispeech/__init__.py
create mode 100644 modules/audio/asr/u2_conformer_librispeech/assets/conf/augmentation.json
create mode 100644 modules/audio/asr/u2_conformer_librispeech/assets/conf/conformer.yaml
create mode 100644 modules/audio/asr/u2_conformer_librispeech/assets/data/bpe_unigram_5000.model
create mode 100644 modules/audio/asr/u2_conformer_librispeech/assets/data/bpe_unigram_5000.vocab
create mode 100644 modules/audio/asr/u2_conformer_librispeech/assets/data/mean_std.json
create mode 100644 modules/audio/asr/u2_conformer_librispeech/assets/data/vocab.txt
create mode 100644 modules/audio/asr/u2_conformer_librispeech/module.py
create mode 100644 modules/audio/asr/u2_conformer_librispeech/requirements.txt
create mode 100644 modules/audio/asr/u2_conformer_librispeech/u2_conformer_tester.py
create mode 100644 modules/audio/tts/fastspeech2_baker/README.md
create mode 100644 modules/audio/tts/fastspeech2_baker/__init__.py
create mode 100644 modules/audio/tts/fastspeech2_baker/assets/fastspeech2_nosil_baker_ckpt_0.4/default.yaml
create mode 100644 modules/audio/tts/fastspeech2_baker/assets/fastspeech2_nosil_baker_ckpt_0.4/phone_id_map.txt
create mode 100644 modules/audio/tts/fastspeech2_baker/assets/pwg_baker_ckpt_0.4/pwg_default.yaml
create mode 100644 modules/audio/tts/fastspeech2_baker/module.py
create mode 100644 modules/audio/tts/fastspeech2_baker/requirements.txt
create mode 100644 modules/audio/tts/fastspeech2_ljspeech/README.md
create mode 100644 modules/audio/tts/fastspeech2_ljspeech/__init__.py
create mode 100644 modules/audio/tts/fastspeech2_ljspeech/assets/fastspeech2_nosil_ljspeech_ckpt_0.5/default.yaml
create mode 100644 modules/audio/tts/fastspeech2_ljspeech/assets/fastspeech2_nosil_ljspeech_ckpt_0.5/phone_id_map.txt
create mode 100644 modules/audio/tts/fastspeech2_ljspeech/assets/pwg_ljspeech_ckpt_0.5/pwg_default.yaml
create mode 100644 modules/audio/tts/fastspeech2_ljspeech/module.py
create mode 100644 modules/audio/tts/fastspeech2_ljspeech/requirements.txt
diff --git a/docs/docs_ch/get_start/mac_quickstart.md b/docs/docs_ch/get_start/mac_quickstart.md
index ba765fdf..f49160d1 100755
--- a/docs/docs_ch/get_start/mac_quickstart.md
+++ b/docs/docs_ch/get_start/mac_quickstart.md
@@ -192,7 +192,7 @@
-
## 第6步:飞桨预训练模型探索之旅
-- 恭喜你,到这里PaddleHub在windows环境下的安装和入门案例就全部完成了,快快开启你更多的深度学习模型探索之旅吧。[【更多模型探索,跳转飞桨官网】](https://www.paddlepaddle.org.cn/hublist)
+- 恭喜你,到这里PaddleHub在mac环境下的安装和入门案例就全部完成了,快快开启你更多的深度学习模型探索之旅吧。[【更多模型探索,跳转飞桨官网】](https://www.paddlepaddle.org.cn/hublist)
diff --git a/modules/audio/asr/deepspeech2_aishell/README.md b/modules/audio/asr/deepspeech2_aishell/README.md
new file mode 100644
index 00000000..a75ba672
--- /dev/null
+++ b/modules/audio/asr/deepspeech2_aishell/README.md
@@ -0,0 +1,153 @@
+# deepspeech2_aishell
+
+|模型名称|deepspeech2_aishell|
+| :--- | :---: |
+|类别|语音-语音识别|
+|网络|DeepSpeech2|
+|数据集|AISHELL-1|
+|是否支持Fine-tuning|否|
+|模型大小|306MB|
+|最新更新日期|2021-10-20|
+|数据指标|中文CER 0.065|
+
+## 一、模型基本信息
+
+### 模型介绍
+
+DeepSpeech2是百度于2015年提出的适用于英文和中文的end-to-end语音识别模型。deepspeech2_aishell使用了DeepSpeech2离线模型的结构,模型主要由2层卷积网络和3层GRU组成,并在中文普通话开源语音数据集[AISHELL-1](http://www.aishelltech.com/kysjcp)进行了预训练,该模型在其测试集上的CER指标是0.065。
+
+
+
+
+
+
+更多详情请参考[Deep Speech 2: End-to-End Speech Recognition in English and Mandarin](https://arxiv.org/abs/1512.02595)
+
+## 二、安装
+
+- ### 1、系统依赖
+
+ - libsndfile, swig >= 3.0
+ - Linux
+ ```shell
+ $ sudo apt-get install libsndfile swig
+ or
+ $ sudo yum install libsndfile swig
+ ```
+ - MacOs
+ ```
+ $ brew install libsndfile swig
+ ```
+
+- ### 2、环境依赖
+ - swig_decoder:
+ ```
+ git clone https://github.com/PaddlePaddle/DeepSpeech.git && cd DeepSpeech && git reset --hard b53171694e7b87abe7ea96870b2f4d8e0e2b1485 && cd deepspeech/decoders/ctcdecoder/swig && sh setup.sh
+ ```
+
+ - paddlepaddle >= 2.1.0
+
+ - paddlehub >= 2.1.0 | [如何安装PaddleHub](../../../../docs/docs_ch/get_start/installation.rst)
+
+- ### 3、安装
+
+ - ```shell
+ $ hub install deepspeech2_aishell
+ ```
+ - 如您安装时遇到问题,可参考:[零基础windows安装](../../../../docs/docs_ch/get_start/windows_quickstart.md)
+ | [零基础Linux安装](../../../../docs/docs_ch/get_start/linux_quickstart.md) | [零基础MacOS安装](../../../../docs/docs_ch/get_start/mac_quickstart.md)
+
+
+## 三、模型API预测
+
+- ### 1、预测代码示例
+
+ ```python
+ import paddlehub as hub
+
+ # 采样率为16k,格式为wav的中文语音音频
+ wav_file = '/PATH/TO/AUDIO'
+
+ model = hub.Module(
+ name='deepspeech2_aishell',
+ version='1.0.0')
+ text = model.speech_recognize(wav_file)
+
+ print(text)
+ ```
+
+- ### 2、API
+ - ```python
+ def check_audio(audio_file)
+ ```
+ - 检查输入音频格式和采样率是否满足为16000
+
+ - **参数**
+
+ - `audio_file`:本地音频文件(*.wav)的路径,如`/path/to/input.wav`
+
+ - ```python
+ def speech_recognize(
+ audio_file,
+ device='cpu',
+ )
+ ```
+ - 将输入的音频识别成文字
+
+ - **参数**
+
+ - `audio_file`:本地音频文件(*.wav)的路径,如`/path/to/input.wav`
+ - `device`:预测时使用的设备,默认为`cpu`,如需使用gpu预测,请设置为`gpu`。
+
+ - **返回**
+
+ - `text`:str类型,返回输入音频的识别文字结果。
+
+
+## 四、服务部署
+
+- PaddleHub Serving可以部署一个在线的语音识别服务。
+
+- ### 第一步:启动PaddleHub Serving
+
+ - ```shell
+ $ hub serving start -m deepspeech2_aishell
+ ```
+
+ - 这样就完成了一个语音识别服务化API的部署,默认端口号为8866。
+
+ - **NOTE:** 如使用GPU预测,则需要在启动服务之前,请设置CUDA_VISIBLE_DEVICES环境变量,否则不用设置。
+
+- ### 第二步:发送预测请求
+
+ - 配置好服务端,以下数行代码即可实现发送预测请求,获取预测结果
+
+ - ```python
+ import requests
+ import json
+
+ # 需要识别的音频的存放路径,确保部署服务的机器可访问
+ file = '/path/to/input.wav'
+
+ # 以key的方式指定text传入预测方法的时的参数,此例中为"audio_file"
+ data = {"audio_file": file}
+
+ # 发送post请求,content-type类型应指定json方式,url中的ip地址需改为对应机器的ip
+ url = "http://127.0.0.1:8866/predict/deepspeech2_aishell"
+
+ # 指定post请求的headers为application/json方式
+ headers = {"Content-Type": "application/json"}
+
+ r = requests.post(url=url, headers=headers, data=json.dumps(data))
+ print(r.json())
+ ```
+
+## 五、更新历史
+
+* 1.0.0
+
+ 初始发布
+
+ ```shell
+ $ hub install deepspeech2_aishell
+ ```
diff --git a/modules/audio/asr/deepspeech2_aishell/__init__.py b/modules/audio/asr/deepspeech2_aishell/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/modules/audio/asr/deepspeech2_aishell/assets/conf/augmentation.json b/modules/audio/asr/deepspeech2_aishell/assets/conf/augmentation.json
new file mode 100644
index 00000000..0967ef42
--- /dev/null
+++ b/modules/audio/asr/deepspeech2_aishell/assets/conf/augmentation.json
@@ -0,0 +1 @@
+{}
diff --git a/modules/audio/asr/deepspeech2_aishell/assets/conf/deepspeech2.yaml b/modules/audio/asr/deepspeech2_aishell/assets/conf/deepspeech2.yaml
new file mode 100644
index 00000000..ecbe9123
--- /dev/null
+++ b/modules/audio/asr/deepspeech2_aishell/assets/conf/deepspeech2.yaml
@@ -0,0 +1,68 @@
+# https://yaml.org/type/float.html
+data:
+ train_manifest: data/manifest.train
+ dev_manifest: data/manifest.dev
+ test_manifest: data/manifest.test
+ min_input_len: 0.0
+ max_input_len: 27.0 # second
+ min_output_len: 0.0
+ max_output_len: .inf
+ min_output_input_ratio: 0.00
+ max_output_input_ratio: .inf
+
+collator:
+ batch_size: 64 # one gpu
+ mean_std_filepath: data/mean_std.json
+ unit_type: char
+ vocab_filepath: data/vocab.txt
+ augmentation_config: conf/augmentation.json
+ random_seed: 0
+ spm_model_prefix:
+ spectrum_type: linear
+ feat_dim:
+ delta_delta: False
+ stride_ms: 10.0
+ window_ms: 20.0
+ n_fft: None
+ max_freq: None
+ target_sample_rate: 16000
+ use_dB_normalization: True
+ target_dB: -20
+ dither: 1.0
+ keep_transcription_text: False
+ sortagrad: True
+ shuffle_method: batch_shuffle
+ num_workers: 2
+
+model:
+ num_conv_layers: 2
+ num_rnn_layers: 3
+ rnn_layer_size: 1024
+ use_gru: True
+ share_rnn_weights: False
+ blank_id: 0
+ ctc_grad_norm_type: instance
+
+training:
+ n_epoch: 80
+ accum_grad: 1
+ lr: 2e-3
+ lr_decay: 0.83
+ weight_decay: 1e-06
+ global_grad_clip: 3.0
+ log_interval: 100
+ checkpoint:
+ kbest_n: 50
+ latest_n: 5
+
+decoding:
+ batch_size: 128
+ error_rate_type: cer
+ decoding_method: ctc_beam_search
+ lang_model_path: data/lm/zh_giga.no_cna_cmn.prune01244.klm
+ alpha: 1.9
+ beta: 5.0
+ beam_size: 300
+ cutoff_prob: 0.99
+ cutoff_top_n: 40
+ num_proc_bsearch: 10
diff --git a/modules/audio/asr/deepspeech2_aishell/assets/data/mean_std.json b/modules/audio/asr/deepspeech2_aishell/assets/data/mean_std.json
new file mode 100644
index 00000000..6770184f
--- /dev/null
+++ b/modules/audio/asr/deepspeech2_aishell/assets/data/mean_std.json
@@ -0,0 +1 @@
+{"mean_stat": [-13505966.65209869, -12778154.889588555, -13487728.30750011, -12897344.94123812, -12472281.490772562, -12631566.475106332, -13391790.349327326, -14045382.570026815, -14159320.465516506, -14273422.438486755, -14639805.161347123, -15145380.07768254, -15612893.133258691, -15938542.05012206, -16115293.502621327, -16188225.698757892, -16317206.280373082, -16500598.476283036, -16671564.297937019, -16804599.860397574, -16916423.142814968, -17011785.59439087, -17075067.62262626, -17154580.16740178, -17257812.961825978, -17355683.228599995, -17441455.258318607, -17473199.925130684, -17488835.5763828, -17491232.15414511, -17485000.29006962, -17499471.646940477, -17551398.97122984, -17641732.10682403, -17757209.077974595, -17843801.500521667, -17935647.58641936, -18020362.347413756, -18117633.806080323, -18232427.58935143, -18316024.35215119, -18378789.145393644, -18421147.25807373, -18445805.18294822, -18460946.27810118, -18467914.04034822, -18469404.319909714, -18469606.974339806, -18470754.294192698, -18458320.91921723, -18441354.111811973, -18428332.216321833, -18422281.413955193, -18433421.585668042, -18460521.025954794, -18494800.856363494, -18539532.288011573, -18583823.79899225, -18614474.56256926, -18646872.180154275, -18661137.85367877, -18673590.719379324, -18702967.62040798, -18736434.748098046, -18777912.13098326, -18794675.486509323, -18837225.856196072, -18874872.796128694, -18927340.44407057, -18994929.076545004, -19060701.164406348, -19118006.18996682, -19175792.05766062, -19230755.996405277, -19270174.594219487, -19334788.35904946, -19401456.988906194, -19484580.095938426, -19582040.4715673, -19696598.86662636, -19810401.513227757, -19931755.37941177, -20021867.47620737, -20082298.984455004, -20114708.336475413, -20143802.72793865, -20146821.988139726, -20165613.317683898, -20189938.602584295, -20220059.08673595, -20242848.528134122, -20250859.979931064, -20267382.93048284, -20267964.544716164, -20261372.89563879, -20252878.74023849, -20247550.771284755, -20231778.31093504, -20231376.103159923, -20236926.52293088, -20248068.41488535, -20255076.901920393, -20262924.167151034, -20263926.583205637, -20263790.273742784, -20268560.080967404, -20268997.150654405, -20269810.816284582, -20267771.864327505, -20256472.703380838, -20241790.559690386, -20241865.794732895, -20244924.716114976, -20249736.631184842, -20257257.816903576, -20268027.212145977, -20277399.95533857, -20281840.8112546, -20270512.52002465, -20255938.63066214, -20242421.685443826, -20241986.654626504, -20237836.034444932, -20231458.31132546, -20218092.819713395, -20204994.19634715, -20198880.142133974, -20197376.49014031, -20198117.60450857, -20197443.473929476, -20191142.03632657, -20174428.452719454, -20159204.32090646, -20137981.294740904, -20124944.79897834, -20112774.604521394, -20109389.248600915, -20115248.61302806, -20117743.853294585, -20123076.93515528, -20132224.95454374, -20147099.26793121, -20169581.367630124, -20190957.518733896, -20215197.057997894, -20242033.589256056, -20282032.217160087, -20316778.653784916, -20360354.215504933, -20425089.908502825, -20534553.0465662, -20737928.349233944, -21091705.14104186, -21646013.197923105, -22403182.076235127, -23313516.63322832, -24244679.879594248, -25027534.00417361, -25502455.708560493, -25665136.744125813, -26602318.88405537], "var_stat": [209924783.1093623, 185218712.4577822, 209991180.89829063, 196198511.40798286, 186098265.7827955, 191905798.58923203, 214281935.29191792, 235042114.51049897, 240179456.24597096, 244657890.3963041, 256099586.32657292, 271849135.9872555, 287174069.13527167, 298171137.28863454, 304112589.91933817, 306553976.2206335, 310813670.30674237, 316958840.3099824, 322651440.3639528, 327213725.196089, 331252123.26114285, 334856188.3081607, 337217897.6545214, 340385427.82557064, 344400488.5633641, 348086880.08086526, 351349070.53148264, 352648076.18415344, 353409462.33704513, 353598061.4967693, 353405322.74993587, 353917215.6834277, 355784796.898883, 359222461.3224974, 363671441.7428676, 366908651.69908494, 370304677.0615045, 373477194.79721, 377174088.9808273, 381531608.6574547, 384703574.426059, 387104126.9474883, 388723211.11308575, 389687817.27351815, 390351031.4418706, 390659006.3690262, 390704649.89417714, 390702370.1919126, 390731862.59274197, 390216004.4126628, 389516083.054853, 389017745.636457, 388788872.1127645, 389269311.2239042, 390401819.5968815, 391842612.97859454, 393708801.05223197, 395569598.4694, 396868892.67152405, 398210915.02133286, 398743299.4753882, 399330344.88417244, 400565940.1325846, 401901693.4656316, 403513855.43933284, 404103248.96526104, 405986814.274556, 407507145.4104169, 409598353.6517908, 412453848.0248063, 415138273.0558441, 417479272.96907294, 419785633.3276395, 422003065.1681787, 423610264.8868346, 426260552.96545905, 428973536.3620236, 432368654.40899384, 436359561.5468266, 441119512.777527, 445884989.25794005, 451037422.65838546, 454872292.24179226, 457497136.8780015, 458904066.0675219, 460155836.4432799, 460272943.80738074, 461087498.6828549, 462144907.7850926, 463483598.81228757, 464530694.44478536, 464971538.85301507, 465771535.6019992, 465936698.93801653, 465741012.7287712, 465448625.0011534, 465296363.8603534, 464718299.2207512, 464720391.25778216, 465016640.5248736, 465564374.0248998, 465982788.8695927, 466425068.01245564, 466595649.90489674, 466707658.8296169, 467015570.78026086, 467099213.08769494, 467201640.15951264, 467163862.3709329, 466727597.56313753, 466174871.71213347, 466255498.45248336, 466439062.65458614, 466693130.99620277, 467068587.1422199, 467536070.1402474, 467955819.1549621, 468187227.1069643, 467742976.2778335, 467159585.250493, 466592359.52916145, 466583195.8099961, 466424348.9572719, 466155323.6074322, 465569620.1801811, 465021642.5158305, 464757658.6383867, 464713882.60103834, 464724239.2941314, 464679163.728191, 464407007.8705965, 463660736.0136739, 463001339.2385198, 462077058.47595775, 461505071.67199403, 460946277.95973784, 460816158.9197017, 461123589.268546, 461232998.1572812, 461445601.0442877, 461803238.28569543, 462436966.22005004, 463391404.7434971, 464299608.85523456, 465319405.3931429, 466432961.70208246, 468168080.3331244, 469640808.6809098, 471501539.22440934, 474301795.1694898, 479155711.93441755, 488314271.10405815, 504537056.23994666, 530509400.5201074, 566892036.4437443, 611792826.0442055, 658913502.9004005, 699716882.9169292, 725237302.8248898, 734259159.9571886, 789267050.8287783], "frame_num": 899422}
diff --git a/modules/audio/asr/deepspeech2_aishell/assets/data/vocab.txt b/modules/audio/asr/deepspeech2_aishell/assets/data/vocab.txt
new file mode 100644
index 00000000..e272b576
--- /dev/null
+++ b/modules/audio/asr/deepspeech2_aishell/assets/data/vocab.txt
@@ -0,0 +1,4301 @@
+
+
+一
+丁
+七
+万
+丈
+三
+上
+下
+不
+与
+丐
+丑
+专
+且
+世
+丘
+丙
+业
+丛
+东
+丝
+丞
+丢
+两
+严
+丧
+个
+丫
+中
+丰
+串
+临
+丸
+丹
+为
+主
+丽
+举
+乃
+久
+么
+义
+之
+乌
+乍
+乎
+乏
+乐
+乒
+乓
+乔
+乖
+乘
+乙
+九
+乞
+也
+习
+乡
+书
+买
+乱
+乳
+乾
+了
+予
+争
+事
+二
+于
+亏
+云
+互
+五
+井
+亚
+些
+亟
+亡
+亢
+交
+亥
+亦
+产
+亨
+亩
+享
+京
+亭
+亮
+亲
+亳
+亵
+人
+亿
+什
+仁
+仄
+仅
+仇
+今
+介
+仍
+从
+仑
+仓
+仔
+仕
+他
+仗
+付
+仙
+仡
+代
+令
+以
+仨
+仪
+们
+仰
+仲
+件
+价
+任
+份
+仿
+企
+伉
+伊
+伍
+伎
+伏
+伐
+休
+众
+优
+伙
+会
+伞
+伟
+传
+伢
+伤
+伦
+伪
+伯
+估
+伴
+伶
+伸
+伺
+似
+伽
+佃
+但
+位
+低
+住
+佐
+佑
+体
+何
+佘
+余
+佛
+作
+佟
+你
+佣
+佩
+佬
+佳
+佶
+佼
+使
+侃
+侄
+侈
+例
+侍
+侑
+侗
+供
+依
+侠
+侣
+侥
+侦
+侧
+侨
+侬
+侮
+侯
+侵
+便
+促
+俄
+俊
+俏
+俐
+俗
+俘
+俚
+保
+俞
+信
+俨
+俩
+俪
+俭
+修
+俯
+俱
+俸
+俺
+俾
+倍
+倒
+倘
+候
+倚
+倜
+借
+倡
+倦
+倩
+倪
+债
+值
+倾
+假
+偏
+做
+停
+健
+偶
+偷
+偿
+傅
+傍
+傥
+储
+催
+傲
+傻
+像
+僚
+僧
+僮
+僵
+僻
+儒
+儿
+兀
+允
+元
+兄
+充
+兆
+先
+光
+克
+免
+兑
+兔
+兖
+党
+兜
+兢
+入
+全
+八
+公
+六
+兰
+共
+关
+兴
+兵
+其
+具
+典
+兹
+养
+兼
+兽
+冀
+内
+冈
+冉
+册
+再
+冒
+冕
+冗
+写
+军
+农
+冠
+冤
+冥
+冬
+冯
+冰
+冲
+决
+况
+冶
+冷
+冻
+净
+凄
+准
+凇
+凉
+凋
+凌
+减
+凑
+凝
+几
+凡
+凤
+凭
+凯
+凰
+凳
+凶
+凸
+凹
+出
+击
+函
+凿
+刀
+刁
+刃
+分
+切
+刊
+刑
+划
+列
+刘
+则
+刚
+创
+初
+删
+判
+刨
+利
+别
+刮
+到
+制
+刷
+券
+刹
+刺
+刻
+剁
+剂
+剃
+削
+前
+剐
+剑
+剔
+剖
+剥
+剧
+剩
+剪
+副
+割
+剽
+剿
+劈
+力
+劝
+办
+功
+加
+务
+劣
+动
+助
+努
+劫
+励
+劲
+劳
+劵
+势
+勃
+勇
+勉
+勋
+勒
+勘
+募
+勤
+勺
+勾
+勿
+匀
+包
+匆
+匈
+匕
+化
+北
+匙
+匝
+匠
+匡
+匣
+匪
+匮
+匹
+区
+医
+匾
+匿
+十
+千
+升
+午
+卉
+半
+华
+协
+卑
+卒
+卓
+单
+卖
+南
+博
+卜
+卞
+占
+卡
+卢
+卤
+卦
+卧
+卫
+卯
+印
+危
+卲
+即
+却
+卵
+卷
+卸
+卿
+厂
+厄
+厅
+历
+厉
+压
+厌
+厕
+厘
+厚
+原
+厢
+厥
+厦
+厨
+厩
+厮
+去
+县
+参
+又
+叉
+及
+友
+双
+反
+发
+叔
+取
+受
+变
+叙
+叛
+叠
+口
+古
+句
+另
+叨
+叩
+只
+叫
+召
+叭
+叮
+可
+台
+叱
+史
+右
+叵
+叶
+号
+司
+叹
+叼
+吁
+吃
+各
+吆
+合
+吉
+吊
+同
+名
+后
+吏
+吐
+向
+吓
+吕
+吗
+君
+吝
+吞
+吟
+否
+吧
+吨
+吩
+含
+听
+吭
+启
+吴
+吵
+吸
+吹
+吻
+吼
+吾
+吿
+呀
+呃
+呆
+呈
+告
+呐
+呕
+呗
+员
+呛
+呜
+呢
+呦
+周
+呲
+味
+呵
+呼
+命
+咀
+咄
+咋
+和
+咎
+咏
+咐
+咒
+咔
+咕
+咖
+咙
+咚
+咣
+咤
+咧
+咨
+咪
+咫
+咬
+咯
+咱
+咳
+咸
+咽
+哀
+品
+哄
+哆
+哇
+哈
+哉
+响
+哎
+哑
+哒
+哗
+哟
+哥
+哦
+哨
+哪
+哭
+哲
+哺
+哼
+哽
+唁
+唇
+唉
+唏
+唐
+唠
+唤
+唬
+售
+唯
+唱
+唾
+啃
+商
+啊
+啕
+啡
+啤
+啥
+啦
+啧
+啪
+啬
+啰
+啵
+啶
+啸
+啼
+喀
+喂
+善
+喆
+喇
+喉
+喊
+喔
+喘
+喜
+喝
+喧
+喱
+喵
+喷
+喻
+喽
+嗅
+嗑
+嗒
+嗓
+嗡
+嗣
+嗤
+嗦
+嗨
+嗬
+嗯
+嗲
+嗷
+嗽
+嘀
+嘈
+嘉
+嘎
+嘘
+嘛
+嘟
+嘭
+嘱
+嘲
+嘴
+嘶
+嘻
+噎
+噘
+器
+噩
+噪
+噬
+噱
+噼
+嚎
+嚏
+嚓
+嚣
+嚷
+嚼
+囊
+囚
+四
+回
+因
+团
+囤
+囧
+园
+困
+围
+固
+国
+图
+圃
+圆
+圈
+土
+圣
+在
+圩
+圪
+圭
+地
+圳
+场
+圾
+址
+坂
+均
+坊
+坍
+坎
+坏
+坐
+坑
+块
+坚
+坛
+坝
+坞
+坟
+坠
+坡
+坤
+坦
+坪
+坯
+坷
+垂
+垃
+垄
+垅
+型
+垌
+垒
+垛
+垡
+垢
+垣
+垤
+垦
+垫
+垮
+埃
+埋
+城
+埔
+埜
+域
+埠
+培
+基
+堂
+堆
+堕
+堡
+堤
+堪
+堰
+堵
+塌
+塍
+塑
+塔
+塘
+塞
+填
+塬
+塾
+境
+墅
+墓
+墙
+增
+墟
+墨
+墩
+壁
+壑
+壕
+壤
+士
+壮
+声
+壳
+壶
+壹
+处
+备
+复
+夏
+夕
+外
+夙
+多
+夜
+够
+大
+天
+太
+夫
+夭
+央
+夯
+失
+头
+夷
+夸
+夹
+夺
+奂
+奇
+奈
+奉
+奋
+奎
+奏
+契
+奔
+奕
+奖
+套
+奘
+奚
+奠
+奢
+奥
+女
+奴
+奶
+奸
+她
+好
+如
+妃
+妄
+妆
+妇
+妈
+妊
+妍
+妒
+妖
+妙
+妞
+妤
+妥
+妧
+妨
+妩
+妮
+妯
+妹
+妻
+姆
+姊
+始
+姐
+姑
+姓
+委
+姗
+姚
+姜
+姝
+姣
+姥
+姨
+姬
+姻
+姿
+威
+娃
+娄
+娅
+娇
+娌
+娘
+娜
+娟
+娠
+娥
+娩
+娱
+娴
+娶
+娼
+婀
+婆
+婉
+婕
+婚
+婧
+婪
+婴
+婵
+婶
+婷
+婿
+媒
+媚
+媛
+媞
+媲
+媳
+嫁
+嫂
+嫉
+嫌
+嫔
+嫖
+嫚
+嫡
+嫣
+嫦
+嫩
+嬉
+嬛
+嬷
+孀
+子
+孔
+孕
+字
+存
+孙
+孚
+孜
+孝
+孟
+孢
+季
+孤
+学
+孩
+孪
+孰
+孱
+孵
+孺
+宁
+它
+宅
+宇
+守
+安
+宋
+完
+宏
+宓
+宕
+宗
+官
+宙
+定
+宛
+宜
+宝
+实
+宠
+审
+客
+宣
+室
+宦
+宪
+宫
+宰
+害
+宴
+宵
+家
+宸
+容
+宽
+宾
+宿
+寂
+寄
+寅
+密
+寇
+富
+寐
+寒
+寓
+寝
+寞
+察
+寡
+寥
+寨
+寮
+寰
+寸
+对
+寺
+寻
+导
+寿
+封
+射
+将
+尊
+小
+少
+尔
+尖
+尘
+尚
+尝
+尤
+尧
+尬
+就
+尴
+尸
+尹
+尺
+尼
+尽
+尾
+尿
+局
+屁
+层
+居
+屈
+届
+屋
+屌
+屎
+屏
+屑
+展
+属
+屠
+屡
+履
+屯
+山
+屹
+屿
+岁
+岂
+岌
+岐
+岔
+岖
+岗
+岚
+岛
+岩
+岬
+岭
+岱
+岳
+岷
+岸
+峁
+峙
+峡
+峥
+峨
+峪
+峭
+峰
+峻
+崂
+崃
+崇
+崎
+崔
+崖
+崛
+崧
+崩
+崭
+崴
+嵋
+嵌
+嵘
+嵛
+嵩
+嶝
+巅
+巍
+川
+州
+巡
+巢
+工
+左
+巧
+巨
+巩
+巫
+差
+己
+已
+巴
+巷
+巾
+巿
+币
+市
+布
+帅
+帆
+师
+希
+帐
+帕
+帖
+帘
+帚
+帜
+帝
+带
+席
+帮
+帷
+常
+帼
+帽
+幂
+幄
+幅
+幌
+幕
+幢
+干
+平
+年
+并
+幸
+幺
+幻
+幼
+幽
+广
+庄
+庆
+庇
+床
+序
+庐
+库
+应
+底
+店
+庙
+庚
+府
+庞
+废
+度
+座
+庭
+庵
+庶
+康
+庸
+庾
+廉
+廊
+廓
+廖
+延
+廷
+建
+开
+异
+弃
+弄
+弈
+弊
+式
+弑
+弓
+引
+弗
+弘
+弛
+弟
+张
+弥
+弦
+弧
+弩
+弯
+弱
+弹
+强
+归
+当
+录
+彝
+形
+彤
+彦
+彩
+彪
+彬
+彭
+彰
+影
+彷
+役
+彻
+彼
+彿
+往
+征
+径
+待
+徇
+很
+徉
+徊
+律
+徐
+徒
+得
+徘
+徙
+御
+循
+微
+德
+徽
+心
+必
+忆
+忌
+忍
+忐
+忑
+志
+忘
+忙
+忠
+忧
+忪
+快
+忱
+念
+忻
+忽
+怀
+态
+怂
+怅
+怎
+怒
+怕
+怖
+怜
+思
+怠
+怡
+急
+怦
+性
+怨
+怪
+怯
+怵
+总
+恋
+恍
+恐
+恒
+恙
+恢
+恣
+恤
+恨
+恩
+恪
+恬
+恭
+息
+恰
+恳
+恶
+恸
+恺
+恼
+恿
+悄
+悉
+悌
+悍
+悔
+悖
+悚
+悟
+悠
+患
+悦
+您
+悬
+悯
+悲
+悴
+悸
+悼
+情
+惆
+惊
+惋
+惑
+惕
+惚
+惜
+惟
+惠
+惦
+惧
+惨
+惩
+惫
+惬
+惮
+惯
+惰
+想
+惶
+惹
+惺
+愁
+愈
+愉
+意
+愕
+愚
+感
+愤
+愧
+愿
+慈
+慌
+慎
+慑
+慕
+慢
+慧
+慨
+慰
+慷
+憋
+憔
+憧
+憨
+憩
+憬
+憷
+憾
+懂
+懈
+懊
+懋
+懒
+懵
+懿
+戈
+戎
+戏
+成
+我
+戒
+或
+战
+戚
+戛
+戟
+截
+戬
+戮
+戳
+戴
+户
+房
+所
+扁
+扇
+扉
+手
+才
+扎
+扑
+扒
+打
+扔
+托
+扛
+扣
+执
+扩
+扫
+扬
+扭
+扮
+扯
+扰
+扳
+扶
+批
+扼
+找
+承
+技
+抄
+抉
+把
+抑
+抒
+抓
+投
+抖
+抗
+折
+抚
+抛
+抠
+抡
+抢
+护
+报
+抨
+披
+抬
+抱
+抵
+抹
+押
+抽
+抿
+拄
+担
+拆
+拇
+拈
+拉
+拌
+拍
+拎
+拐
+拒
+拓
+拔
+拖
+拗
+拘
+拙
+招
+拜
+拟
+拢
+拣
+拥
+拦
+拧
+拨
+择
+括
+拭
+拮
+拯
+拱
+拳
+拴
+拷
+拼
+拽
+拾
+拿
+持
+挂
+指
+按
+挎
+挑
+挖
+挚
+挛
+挝
+挟
+挠
+挡
+挣
+挤
+挥
+挨
+挪
+挫
+振
+挺
+挽
+捂
+捅
+捆
+捉
+捍
+捎
+捏
+捐
+捕
+捞
+损
+捡
+换
+捣
+捧
+据
+捷
+捺
+捻
+掀
+掂
+授
+掉
+掌
+掏
+掐
+排
+掖
+掘
+掠
+探
+掣
+接
+控
+推
+掩
+措
+掬
+掮
+掰
+掳
+掴
+掷
+掺
+揄
+揉
+揍
+描
+提
+插
+握
+揣
+揩
+揪
+揭
+援
+揶
+揽
+搀
+搁
+搂
+搅
+搏
+搜
+搞
+搡
+搪
+搬
+搭
+携
+搽
+摁
+摄
+摆
+摇
+摊
+摒
+摔
+摘
+摧
+摩
+摸
+摹
+撂
+撇
+撑
+撒
+撕
+撞
+撤
+撩
+撬
+播
+撮
+撰
+撵
+撸
+撼
+擂
+擅
+操
+擎
+擒
+擘
+擞
+擦
+攀
+攒
+攥
+支
+收
+改
+攻
+放
+政
+故
+效
+敌
+敏
+救
+敖
+教
+敛
+敝
+敞
+敢
+散
+敦
+敬
+数
+敲
+整
+敷
+文
+斋
+斌
+斐
+斑
+斓
+斗
+料
+斛
+斜
+斟
+斡
+斤
+斥
+斧
+斩
+断
+斯
+新
+方
+施
+旁
+旅
+旋
+族
+旗
+无
+既
+日
+旦
+旧
+旨
+早
+旬
+旭
+旱
+时
+旷
+旺
+昀
+昂
+昆
+昊
+昌
+明
+昏
+易
+昔
+昕
+昙
+星
+映
+春
+昧
+昨
+昭
+是
+昱
+昵
+昼
+显
+晃
+晋
+晏
+晒
+晓
+晔
+晕
+晖
+晗
+晚
+晟
+晤
+晦
+晨
+普
+景
+晰
+晴
+晶
+智
+晾
+暂
+暄
+暇
+暑
+暖
+暗
+暧
+暨
+暮
+暴
+曙
+曝
+曦
+曰
+曲
+更
+曹
+曼
+曾
+替
+最
+月
+有
+朋
+服
+朐
+朔
+朗
+望
+朝
+期
+朦
+木
+未
+末
+本
+札
+术
+朱
+朴
+朵
+机
+朽
+杀
+杂
+权
+杆
+杉
+李
+杏
+材
+村
+杖
+杜
+杞
+束
+杠
+条
+来
+杨
+杭
+杯
+杰
+杳
+松
+板
+极
+构
+枉
+析
+枕
+林
+枚
+果
+枝
+枞
+枢
+枣
+枪
+枫
+枭
+枯
+架
+枷
+柄
+柏
+某
+染
+柔
+柚
+柜
+柞
+柠
+查
+柬
+柯
+柱
+柳
+柴
+柿
+栅
+标
+栈
+栋
+栏
+树
+栓
+栖
+栗
+校
+株
+样
+核
+根
+格
+栽
+栾
+桂
+桃
+框
+案
+桉
+桌
+桎
+桐
+桑
+桓
+桔
+档
+桥
+桦
+桩
+桶
+梁
+梅
+梓
+梗
+梦
+梧
+梨
+梭
+梯
+械
+梳
+梵
+检
+棉
+棋
+棍
+棒
+棕
+棘
+棚
+棠
+森
+棱
+棵
+棺
+椅
+椋
+植
+椎
+椒
+椰
+椿
+楂
+楔
+楚
+楞
+楠
+楣
+楷
+楼
+概
+榄
+榆
+榈
+榉
+榔
+榕
+榜
+榨
+榭
+榴
+榷
+榻
+槌
+槎
+槐
+槛
+槟
+槽
+槿
+樊
+樟
+模
+横
+樱
+橄
+橘
+橙
+橡
+橱
+檀
+檐
+檬
+欠
+次
+欢
+欣
+欧
+欲
+欺
+款
+歆
+歇
+歉
+歌
+止
+正
+此
+步
+武
+歧
+歪
+歹
+死
+殃
+殆
+殉
+殊
+残
+殒
+殓
+殖
+殚
+殡
+殭
+殴
+段
+殷
+殿
+毁
+毂
+毅
+毋
+母
+每
+毒
+毓
+比
+毕
+毗
+毙
+毛
+毫
+毯
+毽
+氏
+民
+氓
+气
+氛
+氟
+氢
+氦
+氧
+氨
+氪
+氮
+氯
+氰
+水
+永
+氾
+汀
+汁
+求
+汇
+汉
+汕
+汗
+汛
+汝
+汞
+江
+池
+污
+汤
+汪
+汰
+汲
+汴
+汶
+汹
+汽
+汾
+沁
+沂
+沃
+沅
+沈
+沉
+沏
+沐
+沓
+沙
+沛
+沟
+没
+沣
+沥
+沦
+沧
+沪
+沫
+沮
+沱
+河
+沸
+油
+治
+沼
+沽
+沾
+沿
+泄
+泉
+泊
+泌
+泓
+泔
+法
+泗
+泛
+泞
+泠
+泡
+波
+泣
+泥
+注
+泪
+泯
+泰
+泱
+泳
+泵
+泷
+泸
+泻
+泼
+泽
+泾
+洁
+洋
+洒
+洗
+洙
+洛
+洞
+津
+洪
+洱
+洲
+洵
+活
+洼
+洽
+派
+流
+浅
+浆
+浇
+浈
+浊
+测
+济
+浏
+浑
+浓
+浙
+浚
+浦
+浩
+浪
+浮
+浴
+海
+浸
+涂
+涅
+消
+涉
+涌
+涎
+涓
+涕
+涛
+涝
+涞
+涟
+涠
+涡
+涤
+润
+涧
+涨
+涩
+涮
+涯
+液
+涵
+涿
+淀
+淄
+淆
+淇
+淋
+淌
+淑
+淖
+淘
+淝
+淞
+淡
+淤
+淫
+淮
+深
+淳
+混
+淹
+添
+淼
+渀
+清
+渊
+渍
+渎
+渐
+渔
+渗
+渚
+渝
+渠
+渡
+渣
+渤
+渥
+温
+渭
+港
+渲
+渴
+游
+渺
+湃
+湄
+湍
+湖
+湘
+湛
+湾
+湿
+溃
+溅
+溉
+源
+溜
+溢
+溥
+溧
+溪
+溯
+溶
+溺
+滁
+滇
+滋
+滑
+滔
+滕
+滚
+滞
+满
+滢
+滤
+滥
+滨
+滩
+滴
+漂
+漆
+漏
+漓
+演
+漕
+漠
+漩
+漫
+漭
+漯
+漱
+漳
+漾
+潇
+潘
+潜
+潞
+潢
+潦
+潭
+潮
+潼
+澄
+澈
+澎
+澜
+澡
+澳
+激
+濑
+濒
+濠
+濡
+濮
+瀑
+瀚
+瀛
+灌
+灞
+火
+灭
+灯
+灰
+灵
+灶
+灸
+灼
+灾
+灿
+炅
+炉
+炊
+炎
+炒
+炕
+炖
+炙
+炜
+炫
+炬
+炭
+炮
+炯
+炳
+炷
+炸
+点
+炼
+炽
+烁
+烂
+烃
+烈
+烊
+烘
+烙
+烛
+烟
+烤
+烦
+烧
+烨
+烫
+热
+烯
+烷
+烹
+烽
+焉
+焊
+焕
+焖
+焘
+焚
+焦
+焯
+焰
+焱
+然
+煊
+煌
+煎
+煜
+煞
+煤
+煦
+照
+煮
+煲
+熄
+熊
+熏
+熔
+熙
+熟
+熠
+熨
+熬
+熹
+燃
+燊
+燎
+燕
+燥
+爆
+爪
+爬
+爱
+爵
+父
+爷
+爸
+爹
+爽
+片
+版
+牌
+牙
+牛
+牟
+牡
+牢
+牧
+物
+牲
+牵
+特
+牺
+牾
+犀
+犁
+犄
+犊
+犒
+犬
+犯
+状
+犷
+犹
+狂
+狄
+狈
+狐
+狒
+狗
+狙
+狞
+狠
+狡
+狩
+独
+狭
+狮
+狰
+狱
+狸
+狼
+猎
+猖
+猛
+猜
+猝
+猥
+猩
+猪
+猫
+猬
+献
+猴
+猾
+猿
+獒
+獗
+獾
+玄
+率
+玉
+王
+玖
+玛
+玟
+玥
+玩
+玫
+玮
+环
+现
+玲
+玳
+玺
+玻
+珀
+珉
+珊
+珍
+珏
+珑
+珜
+珠
+班
+珮
+珲
+珺
+球
+琅
+理
+琉
+琊
+琏
+琐
+琛
+琢
+琥
+琦
+琨
+琪
+琬
+琰
+琳
+琴
+琵
+琶
+琼
+瑁
+瑄
+瑕
+瑙
+瑚
+瑛
+瑜
+瑞
+瑟
+瑰
+瑶
+瑾
+璀
+璃
+璇
+璋
+璐
+璞
+璧
+璨
+瓜
+瓢
+瓣
+瓦
+瓮
+瓯
+瓶
+瓷
+甄
+甘
+甚
+甜
+生
+甥
+用
+甩
+甫
+甬
+甯
+田
+由
+甲
+申
+电
+男
+甸
+町
+画
+畅
+畊
+界
+畏
+畔
+留
+畜
+略
+番
+畴
+畸
+畿
+疃
+疆
+疏
+疑
+疗
+疚
+疝
+疤
+疫
+疯
+疲
+疵
+疹
+疼
+疾
+病
+症
+痉
+痊
+痒
+痕
+痘
+痛
+痣
+痪
+痫
+痰
+痱
+痴
+痹
+痼
+瘀
+瘁
+瘟
+瘠
+瘤
+瘦
+瘩
+瘪
+瘫
+瘸
+瘾
+癌
+癖
+癣
+癫
+登
+白
+百
+皂
+的
+皆
+皇
+皋
+皎
+皓
+皖
+皙
+皮
+皱
+盆
+盈
+益
+盎
+盐
+监
+盒
+盔
+盖
+盗
+盘
+盛
+盟
+目
+盯
+盲
+直
+相
+盹
+盼
+盾
+省
+眈
+眉
+看
+真
+眠
+眨
+眬
+眯
+眶
+眷
+眺
+眼
+着
+睁
+睐
+睛
+睡
+督
+睦
+睫
+睬
+睹
+睾
+睿
+瞄
+瞅
+瞌
+瞎
+瞒
+瞟
+瞧
+瞩
+瞪
+瞬
+瞰
+瞳
+瞻
+瞿
+矗
+矛
+矜
+矢
+矣
+知
+矩
+矫
+短
+矮
+石
+矶
+矸
+矿
+码
+砂
+砌
+砍
+砒
+研
+砖
+砚
+砝
+砥
+砰
+砲
+破
+砷
+砸
+砺
+砾
+础
+硅
+硕
+硚
+硝
+硫
+硬
+确
+碉
+碌
+碍
+碎
+碑
+碗
+碘
+碚
+碟
+碧
+碰
+碱
+碳
+碴
+碾
+磁
+磅
+磊
+磋
+磐
+磕
+磡
+磨
+磴
+磷
+磺
+礁
+示
+礼
+社
+祁
+祈
+祉
+祖
+祛
+祝
+神
+祠
+祢
+祥
+票
+祭
+祯
+祷
+祸
+祺
+禀
+禁
+禄
+禅
+福
+禧
+禹
+禺
+离
+禽
+禾
+秀
+私
+秃
+秆
+秉
+秋
+种
+科
+秒
+秘
+租
+秣
+秤
+秦
+秧
+秩
+积
+称
+秸
+移
+秽
+稀
+程
+稍
+税
+稚
+稠
+稣
+稳
+稻
+稼
+稽
+稿
+穆
+穗
+穴
+究
+穷
+空
+穿
+突
+窃
+窄
+窈
+窍
+窑
+窒
+窕
+窖
+窗
+窘
+窜
+窝
+窟
+窥
+窦
+窨
+窿
+立
+竖
+站
+竞
+竟
+章
+竣
+童
+竭
+端
+竲
+竹
+竺
+竽
+竿
+笃
+笈
+笋
+笑
+笔
+笙
+笛
+符
+笨
+第
+笼
+等
+筋
+筏
+筐
+筑
+筒
+答
+策
+筛
+筱
+筵
+筷
+筹
+签
+简
+箍
+箔
+箕
+算
+管
+箫
+箭
+箱
+篇
+篡
+篪
+篮
+篷
+簇
+簧
+簸
+簿
+籁
+籍
+米
+类
+籽
+粉
+粒
+粕
+粗
+粘
+粟
+粤
+粥
+粪
+粮
+粱
+粹
+粽
+精
+糊
+糕
+糖
+糗
+糙
+糟
+糯
+系
+紊
+素
+索
+紧
+紫
+累
+絮
+綦
+繁
+纠
+红
+纣
+纤
+约
+级
+纪
+纬
+纯
+纰
+纱
+纲
+纳
+纵
+纶
+纷
+纸
+纹
+纺
+纽
+线
+练
+组
+绅
+细
+织
+终
+绊
+绌
+绍
+绎
+经
+绑
+绒
+结
+绕
+绘
+给
+绚
+络
+绝
+绞
+统
+绢
+绣
+继
+绩
+绪
+续
+绮
+绯
+绰
+绳
+维
+绵
+绷
+绸
+综
+绽
+绿
+缀
+缄
+缅
+缆
+缇
+缉
+缓
+缔
+缕
+编
+缘
+缙
+缚
+缜
+缝
+缠
+缤
+缨
+缩
+缪
+缭
+缮
+缰
+缴
+缸
+缺
+罂
+罄
+罐
+网
+罕
+罗
+罚
+罡
+罢
+罩
+罪
+置
+署
+罹
+羁
+羊
+美
+羔
+羚
+羞
+羡
+羣
+群
+羲
+羹
+羽
+羿
+翁
+翅
+翌
+翔
+翘
+翟
+翠
+翡
+翩
+翰
+翱
+翻
+翼
+耀
+老
+考
+耄
+者
+耋
+而
+耍
+耐
+耒
+耕
+耗
+耘
+耳
+耶
+耷
+耸
+耻
+耽
+耿
+聂
+聆
+聊
+聋
+职
+联
+聘
+聚
+聪
+肃
+肆
+肇
+肉
+肋
+肌
+肖
+肘
+肚
+肛
+肝
+肠
+股
+肢
+肤
+肥
+肩
+肪
+肮
+肯
+育
+肴
+肺
+肾
+肿
+胀
+胁
+胃
+胆
+背
+胎
+胖
+胚
+胛
+胜
+胞
+胡
+胤
+胧
+胫
+胯
+胰
+胱
+胳
+胶
+胸
+胺
+能
+脂
+脆
+脉
+脊
+脍
+脏
+脐
+脑
+脖
+脚
+脯
+脱
+脸
+脾
+腆
+腊
+腋
+腌
+腐
+腑
+腓
+腔
+腕
+腥
+腩
+腮
+腰
+腱
+腹
+腺
+腻
+腼
+腾
+腿
+膀
+膊
+膏
+膑
+膛
+膜
+膝
+膨
+膳
+膺
+臀
+臂
+臃
+臆
+臣
+自
+臭
+至
+致
+臻
+舀
+舅
+舆
+舌
+舍
+舒
+舛
+舜
+舞
+舟
+航
+般
+舰
+舱
+舵
+舶
+舸
+船
+艇
+艋
+艘
+良
+艰
+色
+艳
+艺
+艾
+节
+芊
+芋
+芒
+芙
+芜
+芝
+芦
+芪
+芬
+芭
+芮
+芯
+花
+芳
+芷
+芸
+芽
+苇
+苍
+苏
+苑
+苗
+苛
+苟
+苡
+苣
+若
+苦
+苯
+英
+苹
+茁
+茂
+范
+茄
+茅
+茆
+茎
+茗
+茜
+茨
+茫
+茬
+茵
+茶
+茸
+茹
+荃
+荆
+荇
+草
+荐
+荒
+荔
+荚
+荞
+荟
+荡
+荣
+荤
+荧
+荫
+药
+荷
+荼
+莅
+莆
+莉
+莎
+莓
+莘
+莞
+莠
+莫
+莱
+莲
+莴
+获
+莹
+莺
+莽
+菁
+菇
+菊
+菌
+菜
+菠
+菡
+菩
+菱
+菲
+萃
+萄
+萋
+萌
+萍
+萎
+萝
+萤
+营
+萦
+萧
+萨
+萱
+落
+葆
+著
+葛
+葡
+董
+葩
+葫
+葬
+葱
+葵
+蒂
+蒋
+蒙
+蒜
+蒲
+蒸
+蒿
+蓁
+蓄
+蓉
+蓝
+蓟
+蓬
+蔑
+蔓
+蔗
+蔚
+蔡
+蔫
+蔬
+蔷
+蔺
+蔽
+蕉
+蕊
+蕙
+蕲
+蕴
+蕾
+薄
+薇
+薙
+薛
+薪
+薯
+薰
+藏
+藜
+藤
+藩
+藻
+蘑
+虎
+虏
+虐
+虑
+虚
+虞
+虫
+虱
+虹
+虽
+虾
+蚀
+蚁
+蚂
+蚊
+蚌
+蚓
+蚕
+蚝
+蚣
+蚯
+蛀
+蛆
+蛇
+蛋
+蛐
+蛙
+蛛
+蛟
+蛮
+蛰
+蜀
+蜂
+蜇
+蜈
+蜊
+蜒
+蜓
+蜕
+蜗
+蜘
+蜚
+蜜
+蜡
+蜥
+蜴
+蜷
+蜻
+蜿
+蝇
+蝉
+蝎
+蝗
+蝙
+蝠
+蝴
+蝶
+螂
+螃
+融
+螳
+螺
+蟀
+蟋
+蟑
+蟒
+蟹
+蠕
+蠢
+血
+衅
+行
+衍
+衔
+街
+衙
+衡
+衣
+补
+表
+衫
+衬
+衰
+衷
+袁
+袂
+袄
+袆
+袈
+袋
+袍
+袒
+袖
+袜
+被
+袭
+袱
+裁
+裂
+装
+裆
+裔
+裕
+裙
+裟
+裤
+裳
+裴
+裸
+裹
+褂
+褒
+褓
+褚
+褛
+褪
+褴
+褶
+襁
+襄
+襟
+西
+要
+覃
+覆
+见
+观
+规
+觅
+视
+览
+觉
+觊
+觎
+觐
+觑
+角
+解
+觥
+触
+言
+詹
+誉
+誓
+警
+譬
+计
+订
+认
+讧
+讨
+让
+讪
+训
+议
+讯
+记
+讲
+讳
+讶
+许
+讹
+论
+讼
+讽
+设
+访
+诀
+证
+评
+诅
+识
+诈
+诉
+诊
+词
+译
+诓
+试
+诗
+诙
+诚
+话
+诞
+诟
+诠
+诡
+询
+该
+详
+诧
+诩
+诫
+诬
+语
+误
+诱
+诲
+说
+诵
+诶
+请
+诸
+诺
+读
+诽
+课
+诿
+谀
+谁
+调
+谅
+谈
+谊
+谋
+谌
+谍
+谎
+谐
+谑
+谓
+谕
+谙
+谚
+谜
+谢
+谣
+谤
+谦
+谨
+谩
+谬
+谭
+谱
+谴
+谷
+豁
+豆
+豚
+象
+豪
+豫
+豹
+貅
+貉
+貌
+貔
+贝
+贞
+负
+贡
+财
+责
+贤
+败
+账
+货
+质
+贩
+贪
+贫
+贬
+购
+贮
+贯
+贱
+贴
+贵
+贷
+贸
+费
+贺
+贼
+贾
+贿
+赁
+赂
+赃
+资
+赋
+赌
+赎
+赏
+赐
+赔
+赖
+赘
+赚
+赛
+赝
+赞
+赠
+赡
+赢
+赣
+赤
+赦
+赫
+走
+赴
+赵
+赶
+起
+趁
+超
+越
+趋
+趟
+趣
+足
+趴
+趸
+趾
+跃
+跄
+跆
+跌
+跑
+跛
+距
+跟
+跤
+跨
+跪
+路
+跳
+践
+跷
+跺
+跻
+踉
+踊
+踏
+踝
+踞
+踢
+踩
+踪
+踵
+踹
+蹂
+蹄
+蹈
+蹊
+蹚
+蹦
+蹬
+蹭
+蹲
+蹴
+蹶
+蹼
+蹿
+躁
+躏
+身
+躬
+躯
+躲
+躺
+车
+轧
+轨
+轩
+转
+轮
+软
+轰
+轴
+轶
+轻
+载
+轿
+较
+辄
+辅
+辆
+辈
+辉
+辍
+辐
+辑
+输
+辖
+辗
+辘
+辙
+辛
+辜
+辞
+辟
+辣
+辨
+辩
+辫
+辰
+辱
+边
+辽
+达
+迁
+迂
+迄
+迅
+过
+迈
+迎
+运
+近
+返
+还
+这
+进
+远
+违
+连
+迟
+迢
+迥
+迪
+迫
+迭
+述
+迷
+迸
+迹
+追
+退
+送
+适
+逃
+逅
+逆
+选
+逊
+逍
+透
+逐
+递
+途
+逗
+通
+逛
+逝
+逞
+速
+造
+逡
+逢
+逮
+逵
+逸
+逻
+逼
+逾
+遁
+遂
+遇
+遍
+遏
+遐
+道
+遗
+遛
+遢
+遣
+遥
+遨
+遭
+遮
+遴
+遵
+避
+邀
+邂
+邃
+邋
+邑
+邓
+邛
+邝
+邢
+那
+邦
+邪
+邬
+邮
+邯
+邱
+邵
+邹
+邺
+邻
+郁
+郊
+郎
+郑
+郜
+郝
+郡
+部
+郫
+郭
+郸
+都
+鄂
+鄙
+鄞
+鄢
+酋
+酌
+配
+酒
+酗
+酝
+酣
+酪
+酬
+酯
+酱
+酵
+酶
+酷
+酸
+酿
+醇
+醉
+醋
+醍
+醐
+醒
+醛
+采
+釉
+释
+里
+重
+野
+量
+金
+釜
+鉴
+鏖
+鑫
+针
+钉
+钊
+钒
+钓
+钛
+钜
+钝
+钞
+钟
+钠
+钢
+钥
+钦
+钧
+钩
+钮
+钰
+钱
+钴
+钵
+钻
+钾
+铀
+铁
+铂
+铃
+铅
+铆
+铉
+铎
+铐
+铜
+铝
+铠
+铡
+铣
+铨
+铬
+铭
+铮
+铰
+铲
+银
+铸
+铺
+链
+铿
+销
+锁
+锂
+锄
+锅
+锆
+锈
+锋
+锌
+锏
+锐
+错
+锚
+锜
+锟
+锡
+锢
+锣
+锤
+锥
+锦
+锭
+键
+锯
+锰
+锵
+锷
+锹
+锻
+镀
+镁
+镇
+镉
+镊
+镍
+镐
+镑
+镖
+镜
+镯
+镳
+镶
+长
+门
+闪
+闫
+闭
+问
+闯
+闰
+闲
+闳
+间
+闵
+闷
+闸
+闹
+闺
+闻
+闽
+阀
+阁
+阂
+阅
+阎
+阐
+阔
+阙
+阚
+阜
+队
+阮
+阱
+防
+阳
+阴
+阵
+阶
+阻
+阿
+陀
+陂
+附
+际
+陆
+陇
+陈
+陋
+陌
+降
+限
+陕
+陡
+院
+除
+陨
+险
+陪
+陬
+陵
+陶
+陷
+隅
+隆
+隋
+隍
+随
+隐
+隔
+隘
+隙
+障
+隧
+隶
+隼
+隽
+难
+雀
+雁
+雄
+雅
+集
+雇
+雌
+雍
+雏
+雕
+雨
+雪
+雯
+雳
+零
+雷
+雾
+需
+霁
+霄
+霆
+震
+霈
+霉
+霍
+霎
+霏
+霖
+霜
+霞
+露
+霸
+霹
+霾
+靑
+青
+靓
+靖
+静
+靛
+非
+靠
+靡
+面
+革
+靳
+靴
+靶
+鞋
+鞍
+鞘
+鞠
+鞭
+韦
+韧
+韩
+韬
+音
+韵
+韶
+页
+顶
+顷
+项
+顺
+须
+顽
+顾
+顿
+颁
+颂
+预
+颅
+领
+颇
+颈
+颊
+颍
+颐
+频
+颓
+颖
+颗
+题
+颚
+颜
+额
+颠
+颤
+风
+飒
+飓
+飘
+飙
+飚
+飞
+食
+餐
+餮
+饕
+饥
+饪
+饭
+饮
+饰
+饱
+饲
+饵
+饶
+饺
+饼
+饽
+饿
+馀
+馅
+馆
+馈
+馊
+馋
+馑
+馒
+首
+馗
+香
+馥
+馨
+马
+驭
+驯
+驰
+驱
+驳
+驴
+驶
+驻
+驼
+驾
+驿
+骁
+骂
+骄
+骅
+骆
+骇
+骊
+骋
+验
+骏
+骐
+骑
+骗
+骚
+骜
+骤
+骥
+骨
+骷
+骸
+骼
+髅
+髋
+髌
+髓
+高
+髦
+鬼
+魁
+魂
+魄
+魅
+魇
+魏
+魔
+鱼
+鲁
+鲍
+鲜
+鲟
+鲤
+鲨
+鲶
+鲷
+鲸
+鳄
+鳅
+鳌
+鳖
+鳝
+鳞
+鸟
+鸠
+鸡
+鸣
+鸥
+鸦
+鸭
+鸯
+鸳
+鸵
+鸽
+鸾
+鸿
+鹃
+鹅
+鹊
+鹏
+鹜
+鹞
+鹤
+鹭
+鹰
+鹿
+麋
+麒
+麓
+麟
+麦
+麻
+麾
+黄
+黍
+黎
+黏
+黑
+黔
+默
+黛
+黝
+黯
+鼎
+鼓
+鼠
+鼻
+鼾
+齐
+齿
+龄
+龙
+龚
+龟
+a
+c
+k
+t
+
diff --git a/modules/audio/asr/deepspeech2_aishell/deepspeech_tester.py b/modules/audio/asr/deepspeech2_aishell/deepspeech_tester.py
new file mode 100644
index 00000000..6b1f8975
--- /dev/null
+++ b/modules/audio/asr/deepspeech2_aishell/deepspeech_tester.py
@@ -0,0 +1,81 @@
+# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Evaluation for DeepSpeech2 model."""
+import os
+import sys
+from pathlib import Path
+
+import paddle
+
+from deepspeech.frontend.featurizer.text_featurizer import TextFeaturizer
+from deepspeech.io.collator import SpeechCollator
+from deepspeech.models.ds2 import DeepSpeech2Model
+from deepspeech.utils import mp_tools
+from deepspeech.utils.utility import UpdateConfig
+
+
+class DeepSpeech2Tester:
+ def __init__(self, config):
+ self.config = config
+ self.collate_fn_test = SpeechCollator.from_config(config)
+ self._text_featurizer = TextFeaturizer(unit_type=config.collator.unit_type, vocab_filepath=None)
+
+ def compute_result_transcripts(self, audio, audio_len, vocab_list, cfg):
+ result_transcripts = self.model.decode(
+ audio,
+ audio_len,
+ vocab_list,
+ decoding_method=cfg.decoding_method,
+ lang_model_path=cfg.lang_model_path,
+ beam_alpha=cfg.alpha,
+ beam_beta=cfg.beta,
+ beam_size=cfg.beam_size,
+ cutoff_prob=cfg.cutoff_prob,
+ cutoff_top_n=cfg.cutoff_top_n,
+ num_processes=cfg.num_proc_bsearch)
+ #replace the '' with ' '
+ result_transcripts = [self._text_featurizer.detokenize(sentence) for sentence in result_transcripts]
+
+ return result_transcripts
+
+ @mp_tools.rank_zero_only
+ @paddle.no_grad()
+ def test(self, audio_file):
+ self.model.eval()
+ cfg = self.config
+ collate_fn_test = self.collate_fn_test
+ audio, _ = collate_fn_test.process_utterance(audio_file=audio_file, transcript=" ")
+ audio_len = audio.shape[0]
+ audio = paddle.to_tensor(audio, dtype='float32')
+ audio_len = paddle.to_tensor(audio_len)
+ audio = paddle.unsqueeze(audio, axis=0)
+ vocab_list = collate_fn_test.vocab_list
+ result_transcripts = self.compute_result_transcripts(audio, audio_len, vocab_list, cfg.decoding)
+ return result_transcripts
+
+ def setup_model(self):
+ config = self.config.clone()
+ with UpdateConfig(config):
+ config.model.feat_size = self.collate_fn_test.feature_size
+ config.model.dict_size = self.collate_fn_test.vocab_size
+
+ model = DeepSpeech2Model.from_config(config.model)
+ self.model = model
+
+ def resume(self, checkpoint):
+ """Resume from the checkpoint at checkpoints in the output
+ directory or load a specified checkpoint.
+ """
+ model_dict = paddle.load(checkpoint)
+ self.model.set_state_dict(model_dict)
diff --git a/modules/audio/asr/deepspeech2_aishell/module.py b/modules/audio/asr/deepspeech2_aishell/module.py
new file mode 100644
index 00000000..3e18e4b0
--- /dev/null
+++ b/modules/audio/asr/deepspeech2_aishell/module.py
@@ -0,0 +1,92 @@
+# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+from pathlib import Path
+import sys
+
+import numpy as np
+from paddlehub.env import MODULE_HOME
+from paddlehub.module.module import moduleinfo, serving
+from paddlehub.utils.log import logger
+from paddle.utils.download import get_path_from_url
+
+try:
+ import swig_decoders
+except ModuleNotFoundError as e:
+ logger.error(e)
+ logger.info('The module requires additional dependencies: swig_decoders. '
+ 'please install via:\n\'git clone https://github.com/PaddlePaddle/DeepSpeech.git '
+ '&& cd DeepSpeech && git reset --hard b53171694e7b87abe7ea96870b2f4d8e0e2b1485 '
+ '&& cd deepspeech/decoders/ctcdecoder/swig && sh setup.sh\'')
+ sys.exit(1)
+
+import paddle
+import soundfile as sf
+
+# TODO: Remove system path when deepspeech can be installed via pip.
+sys.path.append(os.path.join(MODULE_HOME, 'deepspeech2_aishell'))
+from deepspeech.exps.deepspeech2.config import get_cfg_defaults
+from deepspeech.utils.utility import UpdateConfig
+from .deepspeech_tester import DeepSpeech2Tester
+
+LM_URL = 'https://deepspeech.bj.bcebos.com/zh_lm/zh_giga.no_cna_cmn.prune01244.klm'
+LM_MD5 = '29e02312deb2e59b3c8686c7966d4fe3'
+
+
+@moduleinfo(name="deepspeech2_aishell", version="1.0.0", summary="", author="Baidu", author_email="", type="audio/asr")
+class DeepSpeech2(paddle.nn.Layer):
+ def __init__(self):
+ super(DeepSpeech2, self).__init__()
+
+ # resource
+ res_dir = os.path.join(MODULE_HOME, 'deepspeech2_aishell', 'assets')
+ conf_file = os.path.join(res_dir, 'conf/deepspeech2.yaml')
+ checkpoint = os.path.join(res_dir, 'checkpoints/avg_1.pdparams')
+ # Download LM manually cause its large size.
+ lm_path = os.path.join(res_dir, 'data', 'lm')
+ lm_file = os.path.join(lm_path, LM_URL.split('/')[-1])
+ if not os.path.isfile(lm_file):
+ logger.info(f'Downloading lm from {LM_URL}.')
+ get_path_from_url(url=LM_URL, root_dir=lm_path, md5sum=LM_MD5)
+
+ # config
+ self.model_type = 'offline'
+ self.config = get_cfg_defaults(self.model_type)
+ self.config.merge_from_file(conf_file)
+
+ # TODO: Remove path updating snippet.
+ with UpdateConfig(self.config):
+ self.config.collator.mean_std_filepath = os.path.join(res_dir, self.config.collator.mean_std_filepath)
+ self.config.collator.vocab_filepath = os.path.join(res_dir, self.config.collator.vocab_filepath)
+ self.config.collator.augmentation_config = os.path.join(res_dir, self.config.collator.augmentation_config)
+ self.config.decoding.lang_model_path = os.path.join(res_dir, self.config.decoding.lang_model_path)
+
+ # model
+ self.tester = DeepSpeech2Tester(self.config)
+ self.tester.setup_model()
+ self.tester.resume(checkpoint)
+
+ @staticmethod
+ def check_audio(audio_file):
+ sig, sample_rate = sf.read(audio_file)
+ assert sample_rate == 16000, 'Excepting sample rate of input audio is 16000, but got {}'.format(sample_rate)
+
+ @serving
+ def speech_recognize(self, audio_file, device='cpu'):
+ assert os.path.isfile(audio_file), 'File not exists: {}'.format(audio_file)
+ self.check_audio(audio_file)
+
+ paddle.set_device(device)
+ return self.tester.test(audio_file)[0]
diff --git a/modules/audio/asr/deepspeech2_aishell/requirements.txt b/modules/audio/asr/deepspeech2_aishell/requirements.txt
new file mode 100644
index 00000000..e6f929d0
--- /dev/null
+++ b/modules/audio/asr/deepspeech2_aishell/requirements.txt
@@ -0,0 +1,12 @@
+# system level: libsnd swig
+loguru
+yacs
+jsonlines
+scipy==1.2.1
+sentencepiece
+resampy==0.2.2
+SoundFile==0.9.0.post1
+soxbindings
+kaldiio
+typeguard
+editdistance
diff --git a/modules/audio/asr/deepspeech2_librispeech/README.md b/modules/audio/asr/deepspeech2_librispeech/README.md
new file mode 100644
index 00000000..a7d4aee0
--- /dev/null
+++ b/modules/audio/asr/deepspeech2_librispeech/README.md
@@ -0,0 +1,153 @@
+# deepspeech2_librispeech
+
+|模型名称|deepspeech2_librispeech|
+| :--- | :---: |
+|类别|语音-语音识别|
+|网络|DeepSpeech2|
+|数据集|LibriSpeech|
+|是否支持Fine-tuning|否|
+|模型大小|518MB|
+|最新更新日期|2021-10-20|
+|数据指标|英文WER 0.072|
+
+## 一、模型基本信息
+
+### 模型介绍
+
+DeepSpeech2是百度于2015年提出的适用于英文和中文的end-to-end语音识别模型。deepspeech2_librispeech使用了DeepSpeech2离线模型的结构,模型主要由2层卷积网络和3层GRU组成,并在英文开源语音数据集[LibriSpeech ASR corpus](http://www.openslr.org/12/)进行了预训练,该模型在其测试集上的WER指标是0.072。
+
+
+
+
+
+
+更多详情请参考[Deep Speech 2: End-to-End Speech Recognition in English and Mandarin](https://arxiv.org/abs/1512.02595)
+
+## 二、安装
+
+- ### 1、系统依赖
+
+ - libsndfile, swig >= 3.0
+ - Linux
+ ```shell
+ $ sudo apt-get install libsndfile swig
+ or
+ $ sudo yum install libsndfile swig
+ ```
+ - MacOs
+ ```
+ $ brew install libsndfile swig
+ ```
+
+- ### 2、环境依赖
+ - swig_decoder:
+ ```
+ git clone https://github.com/paddlepaddle/deepspeech && cd DeepSpeech && git reset --hard b53171694e7b87abe7ea96870b2f4d8e0e2b1485 && cd deepspeech/decoders/ctcdecoder/swig && sh setup.sh
+ ```
+
+ - paddlepaddle >= 2.1.0
+
+ - paddlehub >= 2.1.0 | [如何安装PaddleHub](../../../../docs/docs_ch/get_start/installation.rst)
+
+- ### 3、安装
+
+ - ```shell
+ $ hub install deepspeech2_librispeech
+ ```
+ - 如您安装时遇到问题,可参考:[零基础windows安装](../../../../docs/docs_ch/get_start/windows_quickstart.md)
+ | [零基础Linux安装](../../../../docs/docs_ch/get_start/linux_quickstart.md) | [零基础MacOS安装](../../../../docs/docs_ch/get_start/mac_quickstart.md)
+
+
+## 三、模型API预测
+
+- ### 1、预测代码示例
+
+ ```python
+ import paddlehub as hub
+
+ # 采样率为16k,格式为wav的英文语音音频
+ wav_file = '/PATH/TO/AUDIO'
+
+ model = hub.Module(
+ name='deepspeech2_librispeech',
+ version='1.0.0')
+ text = model.speech_recognize(wav_file)
+
+ print(text)
+ ```
+
+- ### 2、API
+ - ```python
+ def check_audio(audio_file)
+ ```
+ - 检查输入音频格式和采样率是否满足为16000
+
+ - **参数**
+
+ - `audio_file`:本地音频文件(*.wav)的路径,如`/path/to/input.wav`
+
+ - ```python
+ def speech_recognize(
+ audio_file,
+ device='cpu',
+ )
+ ```
+ - 将输入的音频识别成文字
+
+ - **参数**
+
+ - `audio_file`:本地音频文件(*.wav)的路径,如`/path/to/input.wav`
+ - `device`:预测时使用的设备,默认为`cpu`,如需使用gpu预测,请设置为`gpu`。
+
+ - **返回**
+
+ - `text`:str类型,返回输入音频的识别文字结果。
+
+
+## 四、服务部署
+
+- PaddleHub Serving可以部署一个在线的语音识别服务。
+
+- ### 第一步:启动PaddleHub Serving
+
+ - ```shell
+ $ hub serving start -m deepspeech2_librispeech
+ ```
+
+ - 这样就完成了一个语音识别服务化API的部署,默认端口号为8866。
+
+ - **NOTE:** 如使用GPU预测,则需要在启动服务之前,请设置CUDA_VISIBLE_DEVICES环境变量,否则不用设置。
+
+- ### 第二步:发送预测请求
+
+ - 配置好服务端,以下数行代码即可实现发送预测请求,获取预测结果
+
+ - ```python
+ import requests
+ import json
+
+ # 需要识别的音频的存放路径,确保部署服务的机器可访问
+ file = '/path/to/input.wav'
+
+ # 以key的方式指定text传入预测方法的时的参数,此例中为"audio_file"
+ data = {"audio_file": file}
+
+ # 发送post请求,content-type类型应指定json方式,url中的ip地址需改为对应机器的ip
+ url = "http://127.0.0.1:8866/predict/deepspeech2_librispeech"
+
+ # 指定post请求的headers为application/json方式
+ headers = {"Content-Type": "application/json"}
+
+ r = requests.post(url=url, headers=headers, data=json.dumps(data))
+ print(r.json())
+ ```
+
+## 五、更新历史
+
+* 1.0.0
+
+ 初始发布
+
+ ```shell
+ $ hub install deepspeech2_librispeech
+ ```
diff --git a/modules/audio/asr/deepspeech2_librispeech/__init__.py b/modules/audio/asr/deepspeech2_librispeech/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/modules/audio/asr/deepspeech2_librispeech/assets/conf/augmentation.json b/modules/audio/asr/deepspeech2_librispeech/assets/conf/augmentation.json
new file mode 100644
index 00000000..0967ef42
--- /dev/null
+++ b/modules/audio/asr/deepspeech2_librispeech/assets/conf/augmentation.json
@@ -0,0 +1 @@
+{}
diff --git a/modules/audio/asr/deepspeech2_librispeech/assets/conf/deepspeech2.yaml b/modules/audio/asr/deepspeech2_librispeech/assets/conf/deepspeech2.yaml
new file mode 100644
index 00000000..c5c2e466
--- /dev/null
+++ b/modules/audio/asr/deepspeech2_librispeech/assets/conf/deepspeech2.yaml
@@ -0,0 +1,68 @@
+# https://yaml.org/type/float.html
+data:
+ train_manifest: data/manifest.train
+ dev_manifest: data/manifest.dev-clean
+ test_manifest: data/manifest.test-clean
+ min_input_len: 0.0
+ max_input_len: 30.0 # second
+ min_output_len: 0.0
+ max_output_len: .inf
+ min_output_input_ratio: 0.00
+ max_output_input_ratio: .inf
+
+collator:
+ batch_size: 20
+ mean_std_filepath: data/mean_std.json
+ unit_type: char
+ vocab_filepath: data/vocab.txt
+ augmentation_config: conf/augmentation.json
+ random_seed: 0
+ spm_model_prefix:
+ spectrum_type: linear
+ target_sample_rate: 16000
+ max_freq: None
+ n_fft: None
+ stride_ms: 10.0
+ window_ms: 20.0
+ delta_delta: False
+ dither: 1.0
+ use_dB_normalization: True
+ target_dB: -20
+ random_seed: 0
+ keep_transcription_text: False
+ sortagrad: True
+ shuffle_method: batch_shuffle
+ num_workers: 2
+
+model:
+ num_conv_layers: 2
+ num_rnn_layers: 3
+ rnn_layer_size: 2048
+ use_gru: False
+ share_rnn_weights: True
+ blank_id: 0
+ ctc_grad_norm_type: instance
+
+training:
+ n_epoch: 50
+ accum_grad: 1
+ lr: 1e-3
+ lr_decay: 0.83
+ weight_decay: 1e-06
+ global_grad_clip: 5.0
+ log_interval: 100
+ checkpoint:
+ kbest_n: 50
+ latest_n: 5
+
+decoding:
+ batch_size: 128
+ error_rate_type: wer
+ decoding_method: ctc_beam_search
+ lang_model_path: data/lm/common_crawl_00.prune01111.trie.klm
+ alpha: 1.9
+ beta: 0.3
+ beam_size: 500
+ cutoff_prob: 1.0
+ cutoff_top_n: 40
+ num_proc_bsearch: 8
diff --git a/modules/audio/asr/deepspeech2_librispeech/deepspeech_tester.py b/modules/audio/asr/deepspeech2_librispeech/deepspeech_tester.py
new file mode 100644
index 00000000..6b1f8975
--- /dev/null
+++ b/modules/audio/asr/deepspeech2_librispeech/deepspeech_tester.py
@@ -0,0 +1,81 @@
+# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Evaluation for DeepSpeech2 model."""
+import os
+import sys
+from pathlib import Path
+
+import paddle
+
+from deepspeech.frontend.featurizer.text_featurizer import TextFeaturizer
+from deepspeech.io.collator import SpeechCollator
+from deepspeech.models.ds2 import DeepSpeech2Model
+from deepspeech.utils import mp_tools
+from deepspeech.utils.utility import UpdateConfig
+
+
+class DeepSpeech2Tester:
+ def __init__(self, config):
+ self.config = config
+ self.collate_fn_test = SpeechCollator.from_config(config)
+ self._text_featurizer = TextFeaturizer(unit_type=config.collator.unit_type, vocab_filepath=None)
+
+ def compute_result_transcripts(self, audio, audio_len, vocab_list, cfg):
+ result_transcripts = self.model.decode(
+ audio,
+ audio_len,
+ vocab_list,
+ decoding_method=cfg.decoding_method,
+ lang_model_path=cfg.lang_model_path,
+ beam_alpha=cfg.alpha,
+ beam_beta=cfg.beta,
+ beam_size=cfg.beam_size,
+ cutoff_prob=cfg.cutoff_prob,
+ cutoff_top_n=cfg.cutoff_top_n,
+ num_processes=cfg.num_proc_bsearch)
+ #replace the '' with ' '
+ result_transcripts = [self._text_featurizer.detokenize(sentence) for sentence in result_transcripts]
+
+ return result_transcripts
+
+ @mp_tools.rank_zero_only
+ @paddle.no_grad()
+ def test(self, audio_file):
+ self.model.eval()
+ cfg = self.config
+ collate_fn_test = self.collate_fn_test
+ audio, _ = collate_fn_test.process_utterance(audio_file=audio_file, transcript=" ")
+ audio_len = audio.shape[0]
+ audio = paddle.to_tensor(audio, dtype='float32')
+ audio_len = paddle.to_tensor(audio_len)
+ audio = paddle.unsqueeze(audio, axis=0)
+ vocab_list = collate_fn_test.vocab_list
+ result_transcripts = self.compute_result_transcripts(audio, audio_len, vocab_list, cfg.decoding)
+ return result_transcripts
+
+ def setup_model(self):
+ config = self.config.clone()
+ with UpdateConfig(config):
+ config.model.feat_size = self.collate_fn_test.feature_size
+ config.model.dict_size = self.collate_fn_test.vocab_size
+
+ model = DeepSpeech2Model.from_config(config.model)
+ self.model = model
+
+ def resume(self, checkpoint):
+ """Resume from the checkpoint at checkpoints in the output
+ directory or load a specified checkpoint.
+ """
+ model_dict = paddle.load(checkpoint)
+ self.model.set_state_dict(model_dict)
diff --git a/modules/audio/asr/deepspeech2_librispeech/module.py b/modules/audio/asr/deepspeech2_librispeech/module.py
new file mode 100644
index 00000000..c05d484f
--- /dev/null
+++ b/modules/audio/asr/deepspeech2_librispeech/module.py
@@ -0,0 +1,93 @@
+# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+from pathlib import Path
+import sys
+
+import numpy as np
+from paddlehub.env import MODULE_HOME
+from paddlehub.module.module import moduleinfo, serving
+from paddlehub.utils.log import logger
+from paddle.utils.download import get_path_from_url
+
+try:
+ import swig_decoders
+except ModuleNotFoundError as e:
+ logger.error(e)
+ logger.info('The module requires additional dependencies: swig_decoders. '
+ 'please install via:\n\'git clone https://github.com/PaddlePaddle/DeepSpeech.git '
+ '&& cd DeepSpeech && git reset --hard b53171694e7b87abe7ea96870b2f4d8e0e2b1485 '
+ '&& cd deepspeech/decoders/ctcdecoder/swig && sh setup.sh\'')
+ sys.exit(1)
+
+import paddle
+import soundfile as sf
+
+# TODO: Remove system path when deepspeech can be installed via pip.
+sys.path.append(os.path.join(MODULE_HOME, 'deepspeech2_librispeech'))
+from deepspeech.exps.deepspeech2.config import get_cfg_defaults
+from deepspeech.utils.utility import UpdateConfig
+from .deepspeech_tester import DeepSpeech2Tester
+
+LM_URL = 'https://deepspeech.bj.bcebos.com/en_lm/common_crawl_00.prune01111.trie.klm'
+LM_MD5 = '099a601759d467cd0a8523ff939819c5'
+
+
+@moduleinfo(
+ name="deepspeech2_librispeech", version="1.0.0", summary="", author="Baidu", author_email="", type="audio/asr")
+class DeepSpeech2(paddle.nn.Layer):
+ def __init__(self):
+ super(DeepSpeech2, self).__init__()
+
+ # resource
+ res_dir = os.path.join(MODULE_HOME, 'deepspeech2_librispeech', 'assets')
+ conf_file = os.path.join(res_dir, 'conf/deepspeech2.yaml')
+ checkpoint = os.path.join(res_dir, 'checkpoints/avg_1.pdparams')
+ # Download LM manually cause its large size.
+ lm_path = os.path.join(res_dir, 'data', 'lm')
+ lm_file = os.path.join(lm_path, LM_URL.split('/')[-1])
+ if not os.path.isfile(lm_file):
+ logger.info(f'Downloading lm from {LM_URL}.')
+ get_path_from_url(url=LM_URL, root_dir=lm_path, md5sum=LM_MD5)
+
+ # config
+ self.model_type = 'offline'
+ self.config = get_cfg_defaults(self.model_type)
+ self.config.merge_from_file(conf_file)
+
+ # TODO: Remove path updating snippet.
+ with UpdateConfig(self.config):
+ self.config.collator.mean_std_filepath = os.path.join(res_dir, self.config.collator.mean_std_filepath)
+ self.config.collator.vocab_filepath = os.path.join(res_dir, self.config.collator.vocab_filepath)
+ self.config.collator.augmentation_config = os.path.join(res_dir, self.config.collator.augmentation_config)
+ self.config.decoding.lang_model_path = os.path.join(res_dir, self.config.decoding.lang_model_path)
+
+ # model
+ self.tester = DeepSpeech2Tester(self.config)
+ self.tester.setup_model()
+ self.tester.resume(checkpoint)
+
+ @staticmethod
+ def check_audio(audio_file):
+ sig, sample_rate = sf.read(audio_file)
+ assert sample_rate == 16000, 'Excepting sample rate of input audio is 16000, but got {}'.format(sample_rate)
+
+ @serving
+ def speech_recognize(self, audio_file, device='cpu'):
+ assert os.path.isfile(audio_file), 'File not exists: {}'.format(audio_file)
+ self.check_audio(audio_file)
+
+ paddle.set_device(device)
+ return self.tester.test(audio_file)[0]
diff --git a/modules/audio/asr/deepspeech2_librispeech/requirements.txt b/modules/audio/asr/deepspeech2_librispeech/requirements.txt
new file mode 100644
index 00000000..66d8ba6c
--- /dev/null
+++ b/modules/audio/asr/deepspeech2_librispeech/requirements.txt
@@ -0,0 +1,11 @@
+loguru
+yacs
+jsonlines
+scipy==1.2.1
+sentencepiece
+resampy==0.2.2
+SoundFile==0.9.0.post1
+soxbindings
+kaldiio
+typeguard
+editdistance
diff --git a/modules/audio/asr/u2_conformer_aishell/README.md b/modules/audio/asr/u2_conformer_aishell/README.md
new file mode 100644
index 00000000..bd0bc64f
--- /dev/null
+++ b/modules/audio/asr/u2_conformer_aishell/README.md
@@ -0,0 +1,156 @@
+# u2_conformer_aishell
+
+|模型名称|u2_conformer_aishell|
+| :--- | :---: |
+|类别|语音-语音识别|
+|网络|DeepSpeech2|
+|数据集|AISHELL-1|
+|是否支持Fine-tuning|否|
+|模型大小|284MB|
+|最新更新日期|2021-11-01|
+|数据指标|中文CER 0.055|
+
+## 一、模型基本信息
+
+### 模型介绍
+
+U2 Conformer模型是一种适用于英文和中文的end-to-end语音识别模型。u2_conformer_aishell采用了conformer的encoder和transformer的decoder的模型结构,并且使用了ctc-prefix beam search的方式进行一遍打分,再利用attention decoder进行二次打分的方式进行解码来得到最终结果。
+
+u2_conformer_aishell在中文普通话开源语音数据集[AISHELL-1](http://www.aishelltech.com/kysjcp)进行了预训练,该模型在其测试集上的CER指标是0.055257。
+
+
+
+
+
+
+
+
+
+更多详情请参考:
+- [Unified Streaming and Non-streaming Two-pass End-to-end Model for Speech Recognition](https://arxiv.org/abs/2012.05481)
+- [Conformer: Convolution-augmented Transformer for Speech Recognition](https://arxiv.org/abs/2005.08100)
+
+## 二、安装
+
+- ### 1、系统依赖
+
+ - libsndfile
+ - Linux
+ ```shell
+ $ sudo apt-get install libsndfile
+ or
+ $ sudo yum install libsndfile
+ ```
+ - MacOs
+ ```
+ $ brew install libsndfile
+ ```
+
+- ### 2、环境依赖
+
+ - paddlepaddle >= 2.1.0
+
+ - paddlehub >= 2.1.0 | [如何安装PaddleHub](../../../../docs/docs_ch/get_start/installation.rst)
+
+- ### 3、安装
+
+ - ```shell
+ $ hub install u2_conformer_aishell
+ ```
+ - 如您安装时遇到问题,可参考:[零基础windows安装](../../../../docs/docs_ch/get_start/windows_quickstart.md)
+ | [零基础Linux安装](../../../../docs/docs_ch/get_start/linux_quickstart.md) | [零基础MacOS安装](../../../../docs/docs_ch/get_start/mac_quickstart.md)
+
+
+## 三、模型API预测
+
+- ### 1、预测代码示例
+
+ ```python
+ import paddlehub as hub
+
+ # 采样率为16k,格式为wav的中文语音音频
+ wav_file = '/PATH/TO/AUDIO'
+
+ model = hub.Module(
+ name='u2_conformer_aishell',
+ version='1.0.0')
+ text = model.speech_recognize(wav_file)
+
+ print(text)
+ ```
+
+- ### 2、API
+ - ```python
+ def check_audio(audio_file)
+ ```
+ - 检查输入音频格式和采样率是否满足为16000
+
+ - **参数**
+
+ - `audio_file`:本地音频文件(*.wav)的路径,如`/path/to/input.wav`
+
+ - ```python
+ def speech_recognize(
+ audio_file,
+ device='cpu',
+ )
+ ```
+ - 将输入的音频识别成文字
+
+ - **参数**
+
+ - `audio_file`:本地音频文件(*.wav)的路径,如`/path/to/input.wav`
+ - `device`:预测时使用的设备,默认为`cpu`,如需使用gpu预测,请设置为`gpu`。
+
+ - **返回**
+
+ - `text`:str类型,返回输入音频的识别文字结果。
+
+
+## 四、服务部署
+
+- PaddleHub Serving可以部署一个在线的语音识别服务。
+
+- ### 第一步:启动PaddleHub Serving
+
+ - ```shell
+ $ hub serving start -m u2_conformer_aishell
+ ```
+
+ - 这样就完成了一个语音识别服务化API的部署,默认端口号为8866。
+
+ - **NOTE:** 如使用GPU预测,则需要在启动服务之前,请设置CUDA_VISIBLE_DEVICES环境变量,否则不用设置。
+
+- ### 第二步:发送预测请求
+
+ - 配置好服务端,以下数行代码即可实现发送预测请求,获取预测结果
+
+ - ```python
+ import requests
+ import json
+
+ # 需要识别的音频的存放路径,确保部署服务的机器可访问
+ file = '/path/to/input.wav'
+
+ # 以key的方式指定text传入预测方法的时的参数,此例中为"audio_file"
+ data = {"audio_file": file}
+
+ # 发送post请求,content-type类型应指定json方式,url中的ip地址需改为对应机器的ip
+ url = "http://127.0.0.1:8866/predict/u2_conformer_aishell"
+
+ # 指定post请求的headers为application/json方式
+ headers = {"Content-Type": "application/json"}
+
+ r = requests.post(url=url, headers=headers, data=json.dumps(data))
+ print(r.json())
+ ```
+
+## 五、更新历史
+
+* 1.0.0
+
+ 初始发布
+
+ ```shell
+ $ hub install u2_conformer_aishell
+ ```
diff --git a/modules/audio/asr/u2_conformer_aishell/__init__.py b/modules/audio/asr/u2_conformer_aishell/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/modules/audio/asr/u2_conformer_aishell/assets/conf/augmentation.json b/modules/audio/asr/u2_conformer_aishell/assets/conf/augmentation.json
new file mode 100644
index 00000000..0967ef42
--- /dev/null
+++ b/modules/audio/asr/u2_conformer_aishell/assets/conf/augmentation.json
@@ -0,0 +1 @@
+{}
diff --git a/modules/audio/asr/u2_conformer_aishell/assets/conf/conformer.yaml b/modules/audio/asr/u2_conformer_aishell/assets/conf/conformer.yaml
new file mode 100644
index 00000000..b6925dfc
--- /dev/null
+++ b/modules/audio/asr/u2_conformer_aishell/assets/conf/conformer.yaml
@@ -0,0 +1,102 @@
+data:
+ train_manifest: data/manifest.train
+ dev_manifest: data/manifest.dev
+ test_manifest: data/manifest.test
+ min_input_len: 0.5
+ max_input_len: 20.0 # second
+ min_output_len: 0.0
+ max_output_len: 400.0
+ min_output_input_ratio: 0.05
+ max_output_input_ratio: 10.0
+
+collator:
+ vocab_filepath: data/vocab.txt
+ unit_type: 'char'
+ spm_model_prefix: ''
+ augmentation_config: conf/augmentation.json
+ batch_size: 64
+ raw_wav: True # use raw_wav or kaldi feature
+ spectrum_type: fbank #linear, mfcc, fbank
+ feat_dim: 80
+ delta_delta: False
+ dither: 1.0
+ target_sample_rate: 16000
+ max_freq: None
+ n_fft: None
+ stride_ms: 10.0
+ window_ms: 25.0
+ use_dB_normalization: False
+ target_dB: -20
+ random_seed: 0
+ keep_transcription_text: False
+ sortagrad: True
+ shuffle_method: batch_shuffle
+ num_workers: 2
+
+decoding:
+ alpha: 2.5
+ batch_size: 128
+ beam_size: 10
+ beta: 0.3
+ ctc_weight: 0.0
+ cutoff_prob: 1.0
+ cutoff_top_n: 0
+ decoding_chunk_size: -1
+ decoding_method: attention
+ error_rate_type: cer
+ lang_model_path: data/lm/common_crawl_00.prune01111.trie.klm
+ num_decoding_left_chunks: -1
+ num_proc_bsearch: 8
+ simulate_streaming: False
+model:
+ cmvn_file: data/mean_std.json
+ cmvn_file_type: json
+ decoder: transformer
+ decoder_conf:
+ attention_heads: 4
+ dropout_rate: 0.1
+ linear_units: 2048
+ num_blocks: 6
+ positional_dropout_rate: 0.1
+ self_attention_dropout_rate: 0.0
+ src_attention_dropout_rate: 0.0
+ encoder: conformer
+ encoder_conf:
+ activation_type: swish
+ attention_dropout_rate: 0.0
+ attention_heads: 4
+ cnn_module_kernel: 15
+ dropout_rate: 0.1
+ input_layer: conv2d
+ linear_units: 2048
+ normalize_before: True
+ num_blocks: 12
+ output_size: 256
+ pos_enc_layer_type: rel_pos
+ positional_dropout_rate: 0.1
+ selfattention_layer_type: rel_selfattn
+ use_cnn_module: True
+ input_dim: 0
+ model_conf:
+ ctc_weight: 0.3
+ ctc_dropoutrate: 0.0
+ ctc_grad_norm_type: instance
+ length_normalized_loss: False
+ lsm_weight: 0.1
+ output_dim: 0
+training:
+ accum_grad: 2
+ global_grad_clip: 5.0
+ log_interval: 100
+ n_epoch: 300
+ optim: adam
+ optim_conf:
+ lr: 0.002
+ weight_decay: 1e-06
+ scheduler: warmuplr
+ scheduler_conf:
+ lr_decay: 1.0
+ warmup_steps: 25000
+ checkpoint:
+ kbest_n: 50
+ latest_n: 5
diff --git a/modules/audio/asr/u2_conformer_aishell/assets/data/mean_std.json b/modules/audio/asr/u2_conformer_aishell/assets/data/mean_std.json
new file mode 100644
index 00000000..fff0005d
--- /dev/null
+++ b/modules/audio/asr/u2_conformer_aishell/assets/data/mean_std.json
@@ -0,0 +1 @@
+{"mean_stat": [533749178.75492024, 537379151.9412827, 553560684.251823, 587164297.7995199, 631868827.5506272, 662598279.7375823, 684377628.7270963, 695391900.076011, 692470493.5234187, 679434068.1698124, 666124153.9164762, 656323498.7897255, 665750586.0282139, 678693518.7836165, 681921713.5434498, 679622373.0941861, 669891550.4909347, 656595089.7941492, 653838531.0994304, 637678601.7858486, 628412248.7348012, 644835299.462052, 638840698.1892803, 646181879.4332589, 639724189.2981818, 642757470.3933163, 637471382.8647255, 642368839.4687729, 643414999.4559816, 647384269.1630985, 649348352.9727564, 649293860.0141628, 650234047.7200857, 654485430.6703687, 660474314.9996675, 667417041.2224753, 673157601.3226709, 675674470.304284, 675124085.6890339, 668017589.4583111, 670061307.6169846, 662625614.6886193, 663144526.4351237, 662504003.7634674, 666413530.1149732, 672263295.5639057, 678483738.2530766, 685387098.3034457, 692570857.529439, 699066050.4399202, 700784878.5879861, 701201520.50868, 702666292.305144, 705443439.2278953, 706070270.9023902, 705988909.8337733, 702843339.0362502, 699318566.4701376, 696089900.3030818, 687559674.541517, 675279201.9502573, 663676352.2301354, 662963751.7464145, 664300133.8414352, 666095384.4212626, 671682092.7777623, 676652386.6696675, 680097668.2490273, 683810023.0071762, 688701544.3655603, 692082724.9923568, 695788849.6782106, 701085780.0070009, 706389529.7959046, 711492753.1344281, 717637923.73355, 719691678.2081754, 715810733.4964175, 696362890.4862831, 604649423.9932467], "var_stat": [5413314850.92017, 5559847287.933615, 6150990253.613769, 6921242242.585692, 7999776708.347419, 8789877370.390867, 9405801233.462742, 9768050110.323652, 9759783206.942099, 9430647265.679018, 9090547056.72849, 8873147345.425886, 9155912918.518642, 9542539953.84679, 9653547618.806402, 9593434792.936714, 9316633026.420147, 8959273999.588833, 8863548125.445953, 8450615911.730164, 8211598033.615433, 8587083872.162145, 8432613574.987708, 8583943640.722399, 8401731458.393406, 8439359231.367369, 8293779802.711447, 8401506934.147289, 8427506949.839874, 8525176341.071184, 8577080109.482346, 8575106681.347283, 8594987363.896849, 8701703698.13697, 8854967559.695303, 9029484499.828356, 9168774993.437275, 9221457044.693224, 9194525496.858181, 8997085233.031223, 9024585998.805922, 8819398159.92156, 8807895653.788486, 8777245867.886335, 8869681168.825321, 9017397167.041729, 9173402827.38027, 9345595113.30765, 9530638054.282673, 9701241750.610865, 9749002220.142677, 9762753891.356327, 9802020174.527405, 9874432300.977995, 9883303068.689241, 9873499335.610315, 9780680890.924107, 9672603363.913414, 9569436761.47915, 9321842521.985804, 8968140697.297707, 8646348638.918655, 8616965457.523136, 8648620220.395298, 8702086138.675117, 8859213220.99842, 8999405313.087536, 9105949447.399998, 9220413227.016796, 9358601578.269663, 9451405873.00428, 9552727080.824707, 9695443509.54488, 9836687193.669691, 9970962418.410656, 10135881535.317768, 10189390919.400673, 10070483257.345238, 9532953296.22076, 7261219636.045063], "frame_num": 54068199}
diff --git a/modules/audio/asr/u2_conformer_aishell/assets/data/vocab.txt b/modules/audio/asr/u2_conformer_aishell/assets/data/vocab.txt
new file mode 100644
index 00000000..bf3f823b
--- /dev/null
+++ b/modules/audio/asr/u2_conformer_aishell/assets/data/vocab.txt
@@ -0,0 +1,4233 @@
+
+
+一
+丁
+七
+万
+丈
+三
+上
+下
+不
+与
+丐
+丑
+专
+且
+世
+丘
+丙
+业
+丛
+东
+丝
+丞
+丢
+两
+严
+丧
+个
+丫
+中
+丰
+串
+临
+丸
+丹
+为
+主
+丽
+举
+乃
+久
+么
+义
+之
+乌
+乍
+乎
+乏
+乐
+乒
+乓
+乔
+乖
+乘
+乙
+九
+乞
+也
+习
+乡
+书
+买
+乱
+乳
+乾
+了
+予
+争
+事
+二
+于
+亏
+云
+互
+五
+井
+亚
+些
+亟
+亡
+亢
+交
+亥
+亦
+产
+亨
+亩
+享
+京
+亭
+亮
+亲
+亳
+亵
+人
+亿
+什
+仁
+仄
+仅
+仇
+今
+介
+仍
+从
+仑
+仓
+仔
+仕
+他
+仗
+付
+仙
+仡
+代
+令
+以
+仨
+仪
+们
+仰
+仲
+件
+价
+任
+份
+仿
+企
+伉
+伊
+伍
+伎
+伏
+伐
+休
+众
+优
+伙
+会
+伞
+伟
+传
+伢
+伤
+伦
+伪
+伯
+估
+伴
+伶
+伸
+伺
+似
+伽
+佃
+但
+位
+低
+住
+佐
+佑
+体
+何
+佘
+余
+佛
+作
+佟
+你
+佣
+佩
+佬
+佳
+佶
+佼
+使
+侃
+侄
+侈
+例
+侍
+侑
+侗
+供
+依
+侠
+侣
+侥
+侦
+侧
+侨
+侬
+侮
+侯
+侵
+便
+促
+俄
+俊
+俏
+俐
+俗
+俘
+俚
+保
+俞
+信
+俨
+俩
+俪
+俭
+修
+俯
+俱
+俸
+俺
+俾
+倍
+倒
+倘
+候
+倚
+倜
+借
+倡
+倦
+倩
+倪
+债
+值
+倾
+假
+偏
+做
+停
+健
+偶
+偷
+偿
+傅
+傍
+傥
+储
+催
+傲
+傻
+像
+僚
+僧
+僮
+僵
+僻
+儒
+儿
+兀
+允
+元
+兄
+充
+兆
+先
+光
+克
+免
+兑
+兔
+兖
+党
+兜
+兢
+入
+全
+八
+公
+六
+兰
+共
+关
+兴
+兵
+其
+具
+典
+兹
+养
+兼
+兽
+冀
+内
+冈
+冉
+册
+再
+冒
+冕
+写
+军
+农
+冠
+冤
+冥
+冬
+冯
+冰
+冲
+决
+况
+冶
+冷
+冻
+净
+凄
+准
+凇
+凉
+凋
+凌
+减
+凑
+凝
+几
+凡
+凤
+凭
+凯
+凰
+凳
+凶
+凸
+凹
+出
+击
+函
+凿
+刀
+刁
+刃
+分
+切
+刊
+刑
+划
+列
+刘
+则
+刚
+创
+初
+删
+判
+刨
+利
+别
+刮
+到
+制
+刷
+券
+刹
+刺
+刻
+剁
+剂
+剃
+削
+前
+剐
+剑
+剔
+剖
+剥
+剧
+剩
+剪
+副
+割
+剽
+剿
+劈
+力
+劝
+办
+功
+加
+务
+劣
+动
+助
+努
+劫
+励
+劲
+劳
+劵
+势
+勃
+勇
+勉
+勋
+勒
+勘
+募
+勤
+勺
+勾
+勿
+匀
+包
+匆
+匈
+匕
+化
+北
+匙
+匝
+匠
+匡
+匣
+匪
+匮
+匹
+区
+医
+匾
+匿
+十
+千
+升
+午
+卉
+半
+华
+协
+卑
+卒
+卓
+单
+卖
+南
+博
+卜
+卞
+占
+卡
+卢
+卤
+卦
+卧
+卫
+卯
+印
+危
+卲
+即
+却
+卵
+卷
+卸
+卿
+厂
+厄
+厅
+历
+厉
+压
+厌
+厕
+厘
+厚
+原
+厢
+厥
+厦
+厨
+厩
+厮
+去
+县
+参
+又
+叉
+及
+友
+双
+反
+发
+叔
+取
+受
+变
+叙
+叛
+叠
+口
+古
+句
+另
+叨
+叩
+只
+叫
+召
+叭
+叮
+可
+台
+叱
+史
+右
+叵
+叶
+号
+司
+叹
+叼
+吁
+吃
+各
+吆
+合
+吉
+吊
+同
+名
+后
+吏
+吐
+向
+吓
+吕
+吗
+君
+吝
+吞
+吟
+否
+吧
+吨
+吩
+含
+听
+吭
+启
+吴
+吵
+吸
+吹
+吻
+吼
+吾
+吿
+呀
+呃
+呆
+呈
+告
+呐
+呕
+呗
+员
+呛
+呜
+呢
+呦
+周
+呲
+味
+呵
+呼
+命
+咀
+咄
+咋
+和
+咎
+咏
+咐
+咒
+咔
+咕
+咖
+咚
+咣
+咤
+咧
+咨
+咪
+咫
+咬
+咯
+咱
+咳
+咸
+咽
+哀
+品
+哄
+哆
+哇
+哈
+哉
+响
+哎
+哑
+哒
+哗
+哟
+哥
+哦
+哨
+哪
+哭
+哲
+哺
+哼
+哽
+唁
+唇
+唉
+唏
+唐
+唠
+唤
+唬
+售
+唯
+唱
+唾
+啃
+商
+啊
+啕
+啡
+啤
+啥
+啦
+啧
+啪
+啬
+啰
+啵
+啶
+啸
+啼
+喀
+喂
+善
+喆
+喇
+喉
+喊
+喔
+喘
+喜
+喝
+喧
+喱
+喵
+喷
+喻
+喽
+嗅
+嗑
+嗒
+嗓
+嗡
+嗣
+嗤
+嗦
+嗨
+嗬
+嗯
+嗲
+嗷
+嗽
+嘀
+嘉
+嘎
+嘘
+嘛
+嘟
+嘭
+嘱
+嘲
+嘴
+嘻
+噎
+器
+噩
+噪
+噬
+噱
+噼
+嚎
+嚏
+嚓
+嚣
+嚷
+嚼
+囊
+囚
+四
+回
+因
+团
+囤
+囧
+园
+困
+围
+固
+国
+图
+圆
+圈
+土
+圣
+在
+圩
+圪
+圭
+地
+圳
+场
+圾
+址
+坂
+均
+坊
+坍
+坎
+坏
+坐
+坑
+块
+坚
+坛
+坝
+坞
+坟
+坠
+坡
+坤
+坦
+坪
+坯
+坷
+垂
+垃
+垄
+垅
+型
+垌
+垒
+垛
+垢
+垣
+垤
+垦
+垫
+垮
+埃
+埋
+城
+埔
+埜
+域
+培
+基
+堂
+堆
+堕
+堡
+堤
+堪
+堰
+堵
+塌
+塑
+塔
+塘
+塞
+填
+塬
+塾
+境
+墅
+墓
+墙
+增
+墟
+墨
+墩
+壁
+壑
+壕
+壤
+士
+壮
+声
+壳
+壶
+壹
+处
+备
+复
+夏
+夕
+外
+夙
+多
+夜
+够
+大
+天
+太
+夫
+夭
+央
+夯
+失
+头
+夷
+夸
+夹
+夺
+奂
+奇
+奈
+奉
+奋
+奎
+奏
+契
+奔
+奕
+奖
+套
+奘
+奚
+奠
+奢
+奥
+女
+奴
+奶
+奸
+她
+好
+如
+妃
+妄
+妆
+妇
+妈
+妊
+妍
+妒
+妖
+妙
+妞
+妤
+妥
+妧
+妨
+妩
+妮
+妯
+妹
+妻
+姆
+姊
+始
+姐
+姑
+姓
+委
+姗
+姚
+姜
+姝
+姣
+姥
+姨
+姬
+姻
+姿
+威
+娃
+娄
+娅
+娇
+娌
+娘
+娜
+娟
+娠
+娥
+娩
+娱
+娴
+娶
+娼
+婀
+婆
+婉
+婕
+婚
+婧
+婪
+婴
+婵
+婶
+婷
+婿
+媒
+媚
+媛
+媞
+媲
+媳
+嫁
+嫂
+嫉
+嫌
+嫔
+嫖
+嫚
+嫣
+嫦
+嫩
+嬉
+嬛
+嬷
+孀
+子
+孔
+孕
+字
+存
+孙
+孚
+孜
+孝
+孟
+孢
+季
+孤
+学
+孩
+孪
+孰
+孱
+孵
+孺
+宁
+它
+宅
+宇
+守
+安
+宋
+完
+宏
+宓
+宕
+宗
+官
+宙
+定
+宛
+宜
+宝
+实
+宠
+审
+客
+宣
+室
+宦
+宪
+宫
+宰
+害
+宴
+宵
+家
+宸
+容
+宽
+宾
+宿
+寂
+寄
+寅
+密
+寇
+富
+寐
+寒
+寓
+寝
+寞
+察
+寡
+寥
+寨
+寮
+寰
+寸
+对
+寺
+寻
+导
+寿
+封
+射
+将
+尊
+小
+少
+尔
+尖
+尘
+尚
+尝
+尤
+尧
+尬
+就
+尴
+尸
+尹
+尺
+尼
+尽
+尾
+尿
+局
+屁
+层
+居
+屈
+届
+屋
+屌
+屎
+屏
+屑
+展
+属
+屠
+屡
+履
+屯
+山
+屹
+屿
+岁
+岂
+岌
+岐
+岔
+岖
+岗
+岚
+岛
+岩
+岬
+岭
+岱
+岳
+岷
+岸
+峁
+峙
+峡
+峥
+峨
+峪
+峭
+峰
+峻
+崂
+崃
+崇
+崎
+崔
+崖
+崛
+崧
+崩
+崭
+崴
+嵋
+嵌
+嵘
+嵛
+嵩
+嶝
+巅
+巍
+川
+州
+巡
+巢
+工
+左
+巧
+巨
+巩
+巫
+差
+己
+已
+巴
+巷
+巾
+巿
+币
+市
+布
+帅
+帆
+师
+希
+帐
+帕
+帖
+帘
+帚
+帜
+帝
+带
+席
+帮
+帷
+常
+帼
+帽
+幂
+幄
+幅
+幌
+幕
+幢
+干
+平
+年
+并
+幸
+幺
+幻
+幼
+幽
+广
+庄
+庆
+庇
+床
+序
+庐
+库
+应
+底
+店
+庙
+庚
+府
+庞
+废
+度
+座
+庭
+庵
+康
+庸
+庾
+廉
+廊
+廓
+廖
+延
+廷
+建
+开
+异
+弃
+弄
+弈
+弊
+式
+弓
+引
+弗
+弘
+弛
+弟
+张
+弥
+弦
+弧
+弩
+弯
+弱
+弹
+强
+归
+当
+录
+彝
+形
+彤
+彦
+彩
+彪
+彬
+彭
+彰
+影
+彷
+役
+彻
+彼
+彿
+往
+征
+径
+待
+徇
+很
+徉
+徊
+律
+徐
+徒
+得
+徘
+徙
+御
+循
+微
+德
+徽
+心
+必
+忆
+忌
+忍
+忐
+忑
+志
+忘
+忙
+忠
+忧
+忪
+快
+忱
+念
+忽
+怀
+态
+怂
+怎
+怒
+怕
+怖
+怜
+思
+怠
+怡
+急
+怦
+性
+怨
+怪
+怯
+怵
+总
+恋
+恍
+恐
+恒
+恙
+恢
+恣
+恤
+恨
+恩
+恪
+恬
+恭
+息
+恰
+恳
+恶
+恸
+恺
+恼
+恿
+悄
+悉
+悍
+悔
+悖
+悚
+悟
+悠
+患
+悦
+您
+悬
+悯
+悲
+悴
+悸
+悼
+情
+惊
+惋
+惑
+惕
+惚
+惜
+惟
+惠
+惦
+惧
+惨
+惩
+惫
+惬
+惮
+惯
+惰
+想
+惶
+惹
+惺
+愁
+愈
+愉
+意
+愕
+愚
+感
+愤
+愧
+愿
+慈
+慌
+慎
+慑
+慕
+慢
+慧
+慨
+慰
+慷
+憋
+憔
+憧
+憨
+憩
+憬
+憷
+憾
+懂
+懈
+懊
+懋
+懒
+懵
+懿
+戈
+戎
+戏
+成
+我
+戒
+或
+战
+戚
+戛
+戟
+截
+戬
+戮
+戳
+戴
+户
+房
+所
+扁
+扇
+扉
+手
+才
+扎
+扑
+扒
+打
+扔
+托
+扛
+扣
+执
+扩
+扫
+扬
+扭
+扮
+扯
+扰
+扳
+扶
+批
+扼
+找
+承
+技
+抄
+抉
+把
+抑
+抒
+抓
+投
+抖
+抗
+折
+抚
+抛
+抠
+抡
+抢
+护
+报
+抨
+披
+抬
+抱
+抵
+抹
+押
+抽
+抿
+拄
+担
+拆
+拇
+拈
+拉
+拌
+拍
+拎
+拐
+拒
+拓
+拔
+拖
+拗
+拘
+拙
+招
+拜
+拟
+拢
+拣
+拥
+拦
+拧
+拨
+择
+括
+拭
+拮
+拯
+拱
+拳
+拴
+拷
+拼
+拽
+拾
+拿
+持
+挂
+指
+按
+挎
+挑
+挖
+挚
+挛
+挝
+挟
+挠
+挡
+挣
+挤
+挥
+挨
+挪
+挫
+振
+挺
+挽
+捂
+捅
+捆
+捉
+捍
+捎
+捏
+捐
+捕
+捞
+损
+捡
+换
+捣
+捧
+据
+捷
+捺
+捻
+掀
+掂
+授
+掉
+掌
+掏
+掐
+排
+掖
+掘
+掠
+探
+掣
+接
+控
+推
+掩
+措
+掬
+掮
+掰
+掴
+掷
+掺
+揉
+揍
+描
+提
+插
+握
+揣
+揩
+揪
+揭
+援
+揽
+搀
+搁
+搂
+搅
+搏
+搜
+搞
+搡
+搪
+搬
+搭
+携
+搽
+摁
+摄
+摆
+摇
+摊
+摒
+摔
+摘
+摧
+摩
+摸
+摹
+撂
+撇
+撑
+撒
+撕
+撞
+撤
+撩
+撬
+播
+撮
+撰
+撵
+撸
+撼
+擂
+擅
+操
+擎
+擒
+擘
+擞
+擦
+攀
+攒
+攥
+支
+收
+改
+攻
+放
+政
+故
+效
+敌
+敏
+救
+敖
+教
+敛
+敝
+敞
+敢
+散
+敦
+敬
+数
+敲
+整
+敷
+文
+斌
+斐
+斑
+斓
+斗
+料
+斛
+斜
+斟
+斤
+斥
+斧
+斩
+断
+斯
+新
+方
+施
+旁
+旅
+旋
+族
+旗
+无
+既
+日
+旦
+旧
+旨
+早
+旬
+旭
+旱
+时
+旷
+旺
+昀
+昂
+昆
+昊
+昌
+明
+昏
+易
+昔
+昕
+昙
+星
+映
+春
+昧
+昨
+昭
+是
+昱
+昵
+昼
+显
+晃
+晋
+晏
+晒
+晓
+晔
+晕
+晖
+晗
+晚
+晟
+晤
+晦
+晨
+普
+景
+晰
+晴
+晶
+智
+晾
+暂
+暄
+暇
+暑
+暖
+暗
+暧
+暨
+暮
+暴
+曙
+曝
+曦
+曰
+曲
+更
+曹
+曼
+曾
+替
+最
+月
+有
+朋
+服
+朐
+朔
+朗
+望
+朝
+期
+朦
+木
+未
+末
+本
+札
+术
+朱
+朴
+朵
+机
+朽
+杀
+杂
+权
+杆
+杉
+李
+杏
+材
+村
+杖
+杜
+杞
+束
+杠
+条
+来
+杨
+杭
+杯
+杰
+杳
+松
+板
+极
+构
+枉
+析
+枕
+林
+枚
+果
+枝
+枞
+枢
+枣
+枪
+枫
+枭
+枯
+架
+枷
+柄
+柏
+某
+染
+柔
+柜
+柞
+柠
+查
+柬
+柯
+柱
+柳
+柴
+柿
+栅
+标
+栈
+栋
+栏
+树
+栓
+栖
+栗
+校
+株
+样
+核
+根
+格
+栽
+栾
+桂
+桃
+框
+案
+桉
+桌
+桎
+桐
+桑
+桓
+桔
+档
+桥
+桦
+桩
+桶
+梁
+梅
+梓
+梗
+梦
+梧
+梨
+梭
+梯
+械
+梳
+梵
+检
+棉
+棋
+棍
+棒
+棕
+棘
+棚
+棠
+森
+棱
+棵
+棺
+椅
+椋
+植
+椎
+椒
+椰
+椿
+楂
+楔
+楚
+楞
+楠
+楣
+楷
+楼
+概
+榄
+榆
+榈
+榉
+榔
+榕
+榜
+榨
+榭
+榴
+榷
+榻
+槌
+槎
+槐
+槛
+槟
+槽
+槿
+樊
+樟
+模
+横
+樱
+橄
+橘
+橙
+橡
+橱
+檀
+檐
+檬
+欠
+次
+欢
+欣
+欧
+欲
+欺
+款
+歆
+歇
+歉
+歌
+止
+正
+此
+步
+武
+歧
+歪
+歹
+死
+殃
+殆
+殉
+殊
+残
+殒
+殓
+殖
+殚
+殡
+殭
+殴
+段
+殷
+殿
+毁
+毂
+毅
+毋
+母
+每
+毒
+毓
+比
+毕
+毗
+毙
+毛
+毫
+毯
+毽
+氏
+民
+氓
+气
+氛
+氟
+氢
+氦
+氧
+氨
+氪
+氮
+氯
+氰
+水
+永
+汀
+汁
+求
+汇
+汉
+汕
+汗
+汛
+汝
+汞
+江
+池
+污
+汤
+汪
+汰
+汲
+汴
+汶
+汹
+汽
+汾
+沁
+沃
+沅
+沈
+沉
+沏
+沐
+沓
+沙
+沛
+沟
+没
+沣
+沥
+沦
+沧
+沪
+沫
+沮
+沱
+河
+沸
+油
+治
+沼
+沽
+沾
+沿
+泄
+泉
+泊
+泌
+泓
+泔
+法
+泗
+泛
+泞
+泠
+泡
+波
+泣
+泥
+注
+泪
+泯
+泰
+泱
+泳
+泵
+泷
+泸
+泻
+泼
+泽
+泾
+洁
+洋
+洒
+洗
+洙
+洛
+洞
+津
+洪
+洱
+洲
+洵
+活
+洼
+洽
+派
+流
+浅
+浆
+浇
+浈
+浊
+测
+济
+浏
+浑
+浓
+浙
+浚
+浦
+浩
+浪
+浮
+浴
+海
+浸
+涂
+涅
+消
+涉
+涌
+涎
+涓
+涕
+涛
+涝
+涞
+涠
+涡
+涤
+润
+涧
+涨
+涩
+涮
+涯
+液
+涵
+涿
+淀
+淄
+淆
+淇
+淋
+淌
+淑
+淖
+淘
+淝
+淞
+淡
+淤
+淫
+淮
+深
+淳
+混
+淹
+添
+淼
+渀
+清
+渊
+渍
+渎
+渐
+渔
+渗
+渚
+渝
+渠
+渡
+渣
+渤
+渥
+温
+渭
+港
+渲
+渴
+游
+渺
+湃
+湍
+湖
+湘
+湛
+湾
+湿
+溃
+溅
+溉
+源
+溜
+溢
+溥
+溧
+溪
+溯
+溶
+溺
+滁
+滇
+滋
+滑
+滔
+滕
+滚
+滞
+满
+滢
+滤
+滥
+滨
+滩
+滴
+漂
+漆
+漏
+漓
+演
+漕
+漠
+漩
+漫
+漭
+漯
+漱
+漳
+漾
+潇
+潘
+潜
+潞
+潢
+潭
+潮
+潼
+澄
+澈
+澎
+澜
+澡
+澳
+激
+濑
+濒
+濠
+濡
+濮
+瀑
+瀚
+瀛
+灌
+灞
+火
+灭
+灯
+灰
+灵
+灶
+灼
+灾
+灿
+炅
+炉
+炊
+炎
+炒
+炕
+炖
+炙
+炜
+炫
+炬
+炭
+炮
+炯
+炳
+炷
+炸
+点
+炼
+炽
+烁
+烂
+烃
+烈
+烊
+烘
+烙
+烟
+烤
+烦
+烧
+烨
+烫
+热
+烯
+烷
+烹
+烽
+焉
+焊
+焕
+焖
+焘
+焚
+焦
+焯
+焰
+焱
+然
+煊
+煌
+煎
+煜
+煞
+煤
+煦
+照
+煮
+煲
+熄
+熊
+熏
+熔
+熙
+熟
+熠
+熨
+熬
+熹
+燃
+燊
+燎
+燕
+燥
+爆
+爪
+爬
+爱
+爵
+父
+爷
+爸
+爹
+爽
+片
+版
+牌
+牙
+牛
+牟
+牡
+牢
+牧
+物
+牲
+牵
+特
+牺
+牾
+犀
+犊
+犒
+犬
+犯
+状
+犷
+犹
+狂
+狄
+狈
+狐
+狗
+狙
+狞
+狠
+狡
+狩
+独
+狭
+狮
+狰
+狱
+狸
+狼
+猎
+猖
+猛
+猜
+猝
+猥
+猩
+猪
+猫
+猬
+献
+猴
+猾
+猿
+獒
+獗
+獾
+玄
+率
+玉
+王
+玖
+玛
+玟
+玥
+玩
+玫
+玮
+环
+现
+玲
+玳
+玺
+玻
+珀
+珉
+珊
+珍
+珏
+珑
+珜
+珠
+班
+珮
+珲
+珺
+球
+琅
+理
+琉
+琊
+琏
+琐
+琛
+琢
+琥
+琦
+琪
+琬
+琰
+琳
+琴
+琵
+琶
+琼
+瑁
+瑄
+瑕
+瑙
+瑚
+瑛
+瑜
+瑞
+瑟
+瑰
+瑶
+瑾
+璀
+璃
+璇
+璋
+璐
+璞
+璧
+璨
+瓜
+瓢
+瓣
+瓦
+瓮
+瓯
+瓶
+瓷
+甄
+甘
+甚
+甜
+生
+甥
+用
+甩
+甫
+甬
+田
+由
+甲
+申
+电
+男
+甸
+町
+画
+畅
+畊
+界
+畏
+畔
+留
+畜
+略
+番
+畴
+畸
+畿
+疃
+疆
+疏
+疑
+疗
+疚
+疝
+疤
+疫
+疯
+疲
+疵
+疹
+疼
+疾
+病
+症
+痉
+痊
+痒
+痕
+痘
+痛
+痣
+痪
+痫
+痰
+痱
+痴
+痹
+痼
+瘀
+瘁
+瘟
+瘠
+瘤
+瘦
+瘩
+瘪
+瘫
+瘸
+瘾
+癌
+癖
+癣
+癫
+登
+白
+百
+皂
+的
+皆
+皇
+皋
+皎
+皓
+皖
+皙
+皮
+皱
+盆
+盈
+益
+盎
+盐
+监
+盒
+盔
+盖
+盗
+盘
+盛
+盟
+目
+盯
+盲
+直
+相
+盹
+盼
+盾
+省
+眈
+眉
+看
+真
+眠
+眨
+眬
+眯
+眶
+眷
+眺
+眼
+着
+睁
+睐
+睛
+睡
+督
+睦
+睫
+睬
+睹
+睿
+瞄
+瞅
+瞌
+瞎
+瞒
+瞟
+瞧
+瞩
+瞪
+瞬
+瞰
+瞳
+瞻
+瞿
+矗
+矛
+矜
+矢
+矣
+知
+矩
+矫
+短
+矮
+石
+矶
+矿
+码
+砂
+砌
+砍
+砒
+研
+砖
+砚
+砝
+砥
+砰
+砲
+破
+砷
+砸
+砺
+砾
+础
+硅
+硕
+硚
+硝
+硫
+硬
+确
+碉
+碌
+碍
+碎
+碑
+碗
+碘
+碚
+碟
+碧
+碰
+碱
+碳
+碴
+碾
+磁
+磅
+磊
+磋
+磐
+磕
+磡
+磨
+磴
+磷
+磺
+礁
+示
+礼
+社
+祁
+祈
+祉
+祖
+祛
+祝
+神
+祠
+祢
+祥
+票
+祭
+祯
+祷
+祸
+祺
+禀
+禁
+禄
+禅
+福
+禧
+禹
+禺
+离
+禽
+禾
+秀
+私
+秃
+秆
+秉
+秋
+种
+科
+秒
+秘
+租
+秣
+秤
+秦
+秧
+秩
+积
+称
+秸
+移
+秽
+稀
+程
+稍
+税
+稚
+稠
+稣
+稳
+稻
+稼
+稽
+稿
+穆
+穗
+穴
+究
+穷
+空
+穿
+突
+窃
+窄
+窈
+窍
+窑
+窒
+窕
+窖
+窗
+窘
+窜
+窝
+窟
+窥
+窦
+窨
+窿
+立
+竖
+站
+竞
+竟
+章
+竣
+童
+竭
+端
+竲
+竹
+竺
+竽
+竿
+笃
+笈
+笋
+笑
+笔
+笙
+笛
+符
+笨
+第
+笼
+等
+筋
+筐
+筑
+筒
+答
+策
+筛
+筱
+筵
+筷
+筹
+签
+简
+箍
+算
+管
+箫
+箭
+箱
+篇
+篡
+篪
+篮
+篷
+簇
+簧
+簸
+簿
+籁
+籍
+米
+类
+籽
+粉
+粒
+粕
+粗
+粘
+粟
+粤
+粥
+粪
+粮
+粱
+粹
+精
+糊
+糕
+糖
+糗
+糙
+糟
+糯
+系
+紊
+素
+索
+紧
+紫
+累
+絮
+綦
+繁
+纠
+红
+纣
+纤
+约
+级
+纪
+纬
+纯
+纰
+纱
+纲
+纳
+纵
+纶
+纷
+纸
+纹
+纺
+纽
+线
+练
+组
+绅
+细
+织
+终
+绊
+绌
+绍
+绎
+经
+绑
+绒
+结
+绕
+绘
+给
+绚
+络
+绝
+绞
+统
+绣
+继
+绩
+绪
+续
+绮
+绯
+绰
+绳
+维
+绵
+绷
+绸
+综
+绽
+绿
+缀
+缄
+缅
+缆
+缇
+缉
+缓
+缔
+缕
+编
+缘
+缙
+缚
+缜
+缝
+缠
+缤
+缨
+缩
+缪
+缭
+缮
+缰
+缴
+缸
+缺
+罂
+罄
+罐
+网
+罕
+罗
+罚
+罡
+罢
+罩
+罪
+置
+署
+罹
+羁
+羊
+美
+羚
+羞
+羡
+羣
+群
+羲
+羹
+羽
+羿
+翁
+翅
+翌
+翔
+翘
+翟
+翠
+翡
+翩
+翰
+翱
+翻
+翼
+耀
+老
+考
+耄
+者
+耋
+而
+耍
+耐
+耒
+耕
+耗
+耘
+耳
+耶
+耷
+耸
+耻
+耽
+耿
+聂
+聆
+聊
+聋
+职
+联
+聘
+聚
+聪
+肃
+肆
+肇
+肉
+肋
+肌
+肖
+肘
+肚
+肛
+肝
+肠
+股
+肢
+肤
+肥
+肩
+肪
+肮
+肯
+育
+肴
+肺
+肾
+肿
+胀
+胁
+胃
+胆
+背
+胎
+胖
+胚
+胛
+胜
+胞
+胡
+胤
+胧
+胫
+胯
+胰
+胱
+胳
+胶
+胸
+胺
+能
+脂
+脆
+脉
+脊
+脍
+脏
+脐
+脑
+脖
+脚
+脯
+脱
+脸
+脾
+腆
+腊
+腋
+腌
+腐
+腑
+腓
+腔
+腕
+腥
+腩
+腰
+腱
+腹
+腺
+腻
+腼
+腾
+腿
+膀
+膊
+膏
+膑
+膛
+膜
+膝
+膨
+膳
+膺
+臀
+臂
+臃
+臆
+臣
+自
+臭
+至
+致
+臻
+舀
+舅
+舆
+舌
+舍
+舒
+舛
+舜
+舞
+舟
+航
+般
+舰
+舱
+舵
+舶
+舸
+船
+艇
+艋
+艘
+良
+艰
+色
+艳
+艺
+艾
+节
+芊
+芋
+芒
+芙
+芜
+芝
+芦
+芬
+芭
+芮
+芯
+花
+芳
+芷
+芸
+芽
+苇
+苍
+苏
+苑
+苗
+苛
+苟
+苡
+苣
+若
+苦
+苯
+英
+苹
+茁
+茂
+范
+茄
+茅
+茆
+茎
+茗
+茜
+茨
+茫
+茵
+茶
+茸
+茹
+荃
+荆
+草
+荐
+荒
+荔
+荚
+荞
+荟
+荡
+荣
+荤
+荧
+荫
+药
+荷
+荼
+莅
+莆
+莉
+莎
+莓
+莘
+莞
+莠
+莫
+莱
+莲
+莴
+获
+莹
+莺
+莽
+菁
+菇
+菊
+菌
+菜
+菠
+菡
+菩
+菱
+菲
+萃
+萄
+萋
+萌
+萍
+萎
+萝
+萤
+营
+萦
+萧
+萨
+萱
+落
+葆
+著
+葛
+葡
+董
+葩
+葫
+葬
+葱
+葵
+蒂
+蒋
+蒙
+蒜
+蒲
+蒸
+蒿
+蓁
+蓄
+蓉
+蓝
+蓟
+蓬
+蔑
+蔓
+蔗
+蔚
+蔡
+蔫
+蔬
+蔷
+蔺
+蔽
+蕉
+蕊
+蕙
+蕲
+蕴
+蕾
+薄
+薇
+薛
+薪
+薯
+薰
+藏
+藜
+藤
+藩
+藻
+蘑
+虎
+虐
+虑
+虚
+虞
+虫
+虱
+虹
+虽
+虾
+蚀
+蚁
+蚂
+蚊
+蚌
+蚓
+蚕
+蚝
+蚣
+蚯
+蛀
+蛇
+蛋
+蛐
+蛙
+蛛
+蛟
+蛮
+蛰
+蜀
+蜂
+蜇
+蜈
+蜊
+蜒
+蜓
+蜕
+蜘
+蜚
+蜜
+蜡
+蜥
+蜴
+蜷
+蜿
+蝇
+蝉
+蝎
+蝗
+蝙
+蝠
+蝴
+蝶
+螂
+螃
+融
+螳
+螺
+蟑
+蟹
+蠢
+血
+衅
+行
+衍
+衔
+街
+衙
+衡
+衣
+补
+表
+衫
+衬
+衰
+衷
+袁
+袂
+袄
+袆
+袈
+袋
+袍
+袒
+袖
+袜
+被
+袭
+袱
+裁
+裂
+装
+裆
+裔
+裕
+裙
+裟
+裤
+裳
+裴
+裸
+裹
+褂
+褒
+褓
+褚
+褛
+褪
+褴
+褶
+襁
+襄
+襟
+西
+要
+覃
+覆
+见
+观
+规
+觅
+视
+览
+觉
+觊
+觎
+觐
+觑
+角
+解
+觥
+触
+言
+詹
+誉
+誓
+警
+譬
+计
+订
+认
+讧
+讨
+让
+讪
+训
+议
+讯
+记
+讲
+讳
+讶
+许
+讹
+论
+讼
+讽
+设
+访
+诀
+证
+评
+诅
+识
+诈
+诉
+诊
+词
+译
+诓
+试
+诗
+诙
+诚
+话
+诞
+诟
+诠
+诡
+询
+该
+详
+诧
+诩
+诫
+诬
+语
+误
+诱
+诲
+说
+诵
+诶
+请
+诸
+诺
+读
+诽
+课
+诿
+谀
+谁
+调
+谅
+谈
+谊
+谋
+谌
+谍
+谎
+谐
+谑
+谓
+谕
+谙
+谚
+谜
+谢
+谣
+谤
+谦
+谨
+谩
+谬
+谭
+谱
+谴
+谷
+豁
+豆
+豚
+象
+豪
+豫
+豹
+貅
+貉
+貌
+貔
+贝
+贞
+负
+贡
+财
+责
+贤
+败
+账
+货
+质
+贩
+贪
+贫
+贬
+购
+贮
+贯
+贱
+贴
+贵
+贷
+贸
+费
+贺
+贼
+贾
+贿
+赁
+赂
+赃
+资
+赋
+赌
+赎
+赏
+赐
+赔
+赖
+赘
+赚
+赛
+赝
+赞
+赠
+赡
+赢
+赣
+赤
+赦
+赫
+走
+赴
+赵
+赶
+起
+趁
+超
+越
+趋
+趟
+趣
+足
+趴
+趸
+趾
+跃
+跄
+跆
+跌
+跑
+跛
+距
+跟
+跤
+跨
+跪
+路
+跳
+践
+跷
+跺
+跻
+踉
+踊
+踏
+踝
+踞
+踢
+踩
+踪
+踵
+踹
+蹂
+蹄
+蹈
+蹊
+蹚
+蹦
+蹬
+蹭
+蹲
+蹴
+蹶
+蹼
+蹿
+躁
+躏
+身
+躬
+躯
+躲
+躺
+车
+轧
+轨
+轩
+转
+轮
+软
+轰
+轴
+轶
+轻
+载
+轿
+较
+辄
+辅
+辆
+辈
+辉
+辍
+辐
+辑
+输
+辖
+辗
+辘
+辙
+辛
+辜
+辞
+辟
+辣
+辨
+辩
+辫
+辰
+辱
+边
+辽
+达
+迁
+迂
+迄
+迅
+过
+迈
+迎
+运
+近
+返
+还
+这
+进
+远
+违
+连
+迟
+迢
+迥
+迪
+迫
+迭
+述
+迷
+迸
+迹
+追
+退
+送
+适
+逃
+逅
+逆
+选
+逊
+逍
+透
+逐
+递
+途
+逗
+通
+逛
+逝
+逞
+速
+造
+逡
+逢
+逮
+逵
+逸
+逻
+逼
+逾
+遁
+遂
+遇
+遍
+遏
+遐
+道
+遗
+遛
+遢
+遣
+遥
+遨
+遭
+遮
+遴
+遵
+避
+邀
+邂
+邃
+邋
+邑
+邓
+邛
+邝
+邢
+那
+邦
+邪
+邬
+邮
+邯
+邱
+邵
+邹
+邺
+邻
+郁
+郊
+郎
+郑
+郜
+郝
+郡
+部
+郫
+郭
+郸
+都
+鄂
+鄙
+鄞
+鄢
+酋
+酌
+配
+酒
+酗
+酝
+酣
+酪
+酬
+酯
+酱
+酵
+酶
+酷
+酸
+酿
+醇
+醉
+醋
+醍
+醐
+醒
+醛
+采
+釉
+释
+里
+重
+野
+量
+金
+釜
+鉴
+鏖
+鑫
+针
+钉
+钊
+钓
+钛
+钝
+钞
+钟
+钠
+钢
+钥
+钦
+钧
+钩
+钮
+钰
+钱
+钵
+钻
+钾
+铀
+铁
+铂
+铃
+铅
+铆
+铉
+铎
+铐
+铜
+铝
+铠
+铣
+铨
+铬
+铭
+铮
+铰
+铲
+银
+铸
+铺
+链
+铿
+销
+锁
+锂
+锄
+锅
+锆
+锈
+锋
+锌
+锏
+锐
+错
+锜
+锟
+锡
+锢
+锣
+锤
+锥
+锦
+锭
+键
+锯
+锰
+锵
+锷
+锹
+锻
+镀
+镁
+镇
+镉
+镊
+镍
+镑
+镖
+镜
+镯
+镳
+镶
+长
+门
+闪
+闫
+闭
+问
+闯
+闰
+闲
+闳
+间
+闵
+闷
+闸
+闹
+闺
+闻
+闽
+阀
+阁
+阂
+阅
+阎
+阐
+阔
+阙
+阚
+阜
+队
+阮
+阱
+防
+阳
+阴
+阵
+阶
+阻
+阿
+陀
+陂
+附
+际
+陆
+陈
+陋
+陌
+降
+限
+陕
+陡
+院
+除
+陨
+险
+陪
+陬
+陵
+陶
+陷
+隅
+隆
+隋
+隍
+随
+隐
+隔
+隘
+隙
+障
+隧
+隶
+隼
+隽
+难
+雀
+雁
+雄
+雅
+集
+雇
+雌
+雍
+雏
+雕
+雨
+雪
+雯
+雳
+零
+雷
+雾
+需
+霁
+霄
+霆
+震
+霈
+霉
+霍
+霎
+霏
+霖
+霜
+霞
+露
+霸
+霹
+霾
+靑
+青
+靓
+靖
+静
+靛
+非
+靠
+靡
+面
+革
+靳
+靴
+靶
+鞋
+鞍
+鞘
+鞠
+鞭
+韦
+韧
+韩
+韬
+音
+韵
+韶
+页
+顶
+顷
+项
+顺
+须
+顽
+顾
+顿
+颁
+颂
+预
+颅
+领
+颇
+颈
+颊
+颍
+颐
+频
+颓
+颖
+颗
+题
+颚
+颜
+额
+颠
+颤
+风
+飒
+飓
+飘
+飙
+飚
+飞
+食
+餐
+餮
+饕
+饥
+饪
+饭
+饮
+饰
+饱
+饲
+饵
+饶
+饺
+饼
+饽
+饿
+馀
+馅
+馆
+馈
+馊
+馋
+馑
+馒
+首
+馗
+香
+馥
+馨
+马
+驭
+驯
+驰
+驱
+驳
+驴
+驶
+驻
+驼
+驾
+驿
+骁
+骂
+骄
+骅
+骆
+骇
+骊
+骋
+验
+骏
+骐
+骑
+骗
+骚
+骜
+骤
+骥
+骨
+骷
+骸
+骼
+髅
+髋
+髓
+高
+髦
+鬼
+魁
+魂
+魄
+魅
+魇
+魏
+魔
+鱼
+鲁
+鲍
+鲜
+鲟
+鲨
+鲶
+鲷
+鲸
+鳄
+鳅
+鳌
+鳖
+鳝
+鳞
+鸟
+鸠
+鸡
+鸣
+鸥
+鸦
+鸭
+鸯
+鸳
+鸵
+鸽
+鸾
+鸿
+鹃
+鹅
+鹊
+鹏
+鹜
+鹞
+鹤
+鹭
+鹰
+鹿
+麋
+麒
+麓
+麟
+麦
+麻
+麾
+黄
+黍
+黎
+黏
+黑
+黔
+默
+黛
+黝
+黯
+鼎
+鼓
+鼠
+鼻
+鼾
+齐
+齿
+龄
+龙
+龚
+龟
+
diff --git a/modules/audio/asr/u2_conformer_aishell/module.py b/modules/audio/asr/u2_conformer_aishell/module.py
new file mode 100644
index 00000000..8ce72804
--- /dev/null
+++ b/modules/audio/asr/u2_conformer_aishell/module.py
@@ -0,0 +1,73 @@
+# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+from pathlib import Path
+import sys
+
+import numpy as np
+from paddlehub.env import MODULE_HOME
+from paddlehub.module.module import moduleinfo, serving
+from paddlehub.utils.log import logger
+
+import paddle
+import soundfile as sf
+
+# TODO: Remove system path when deepspeech can be installed via pip.
+sys.path.append(os.path.join(MODULE_HOME, 'u2_conformer_aishell'))
+from deepspeech.exps.u2.config import get_cfg_defaults
+from deepspeech.utils.utility import UpdateConfig
+from .u2_conformer_tester import U2ConformerTester
+
+
+@moduleinfo(name="u2_conformer_aishell", version="1.0.0", summary="", author="Baidu", author_email="", type="audio/asr")
+class U2Conformer(paddle.nn.Layer):
+ def __init__(self):
+ super(U2Conformer, self).__init__()
+
+ # resource
+ res_dir = os.path.join(MODULE_HOME, 'u2_conformer_aishell', 'assets')
+ conf_file = os.path.join(res_dir, 'conf/conformer.yaml')
+ checkpoint = os.path.join(res_dir, 'checkpoints/avg_20.pdparams')
+
+ # config
+ self.config = get_cfg_defaults()
+ self.config.merge_from_file(conf_file)
+
+ # TODO: Remove path updating snippet.
+ with UpdateConfig(self.config):
+ self.config.collator.vocab_filepath = os.path.join(res_dir, self.config.collator.vocab_filepath)
+ # self.config.collator.spm_model_prefix = os.path.join(res_dir, self.config.collator.spm_model_prefix)
+ self.config.collator.augmentation_config = os.path.join(res_dir, self.config.collator.augmentation_config)
+ self.config.model.cmvn_file = os.path.join(res_dir, self.config.model.cmvn_file)
+ self.config.decoding.decoding_method = 'attention_rescoring'
+ self.config.decoding.batch_size = 1
+
+ # model
+ self.tester = U2ConformerTester(self.config)
+ self.tester.setup_model()
+ self.tester.resume(checkpoint)
+
+ @staticmethod
+ def check_audio(audio_file):
+ sig, sample_rate = sf.read(audio_file)
+ assert sample_rate == 16000, 'Excepting sample rate of input audio is 16000, but got {}'.format(sample_rate)
+
+ @serving
+ def speech_recognize(self, audio_file, device='cpu'):
+ assert os.path.isfile(audio_file), 'File not exists: {}'.format(audio_file)
+ self.check_audio(audio_file)
+
+ paddle.set_device(device)
+ return self.tester.test(audio_file)[0][0]
diff --git a/modules/audio/asr/u2_conformer_aishell/requirements.txt b/modules/audio/asr/u2_conformer_aishell/requirements.txt
new file mode 100644
index 00000000..49fb307f
--- /dev/null
+++ b/modules/audio/asr/u2_conformer_aishell/requirements.txt
@@ -0,0 +1,12 @@
+loguru
+yacs
+jsonlines
+scipy==1.2.1
+sentencepiece
+resampy==0.2.2
+SoundFile==0.9.0.post1
+soxbindings
+kaldiio
+typeguard
+editdistance
+textgrid
diff --git a/modules/audio/asr/u2_conformer_aishell/u2_conformer_tester.py b/modules/audio/asr/u2_conformer_aishell/u2_conformer_tester.py
new file mode 100644
index 00000000..c4f8d470
--- /dev/null
+++ b/modules/audio/asr/u2_conformer_aishell/u2_conformer_tester.py
@@ -0,0 +1,80 @@
+# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Evaluation for U2 model."""
+import os
+import sys
+
+import paddle
+
+from deepspeech.frontend.featurizer.text_featurizer import TextFeaturizer
+from deepspeech.io.collator import SpeechCollator
+from deepspeech.models.u2 import U2Model
+from deepspeech.utils import mp_tools
+from deepspeech.utils.utility import UpdateConfig
+
+
+class U2ConformerTester:
+ def __init__(self, config):
+ self.config = config
+ self.collate_fn_test = SpeechCollator.from_config(config)
+ self._text_featurizer = TextFeaturizer(
+ unit_type=config.collator.unit_type, vocab_filepath=None, spm_model_prefix=config.collator.spm_model_prefix)
+
+ @mp_tools.rank_zero_only
+ @paddle.no_grad()
+ def test(self, audio_file):
+ self.model.eval()
+ cfg = self.config.decoding
+ collate_fn_test = self.collate_fn_test
+ audio, _ = collate_fn_test.process_utterance(audio_file=audio_file, transcript="Hello")
+ audio_len = audio.shape[0]
+ audio = paddle.to_tensor(audio, dtype='float32')
+ audio_len = paddle.to_tensor(audio_len)
+ audio = paddle.unsqueeze(audio, axis=0)
+ vocab_list = collate_fn_test.vocab_list
+
+ text_feature = self.collate_fn_test.text_feature
+ result_transcripts = self.model.decode(
+ audio,
+ audio_len,
+ text_feature=text_feature,
+ decoding_method=cfg.decoding_method,
+ lang_model_path=cfg.lang_model_path,
+ beam_alpha=cfg.alpha,
+ beam_beta=cfg.beta,
+ beam_size=cfg.beam_size,
+ cutoff_prob=cfg.cutoff_prob,
+ cutoff_top_n=cfg.cutoff_top_n,
+ num_processes=cfg.num_proc_bsearch,
+ ctc_weight=cfg.ctc_weight,
+ decoding_chunk_size=cfg.decoding_chunk_size,
+ num_decoding_left_chunks=cfg.num_decoding_left_chunks,
+ simulate_streaming=cfg.simulate_streaming)
+
+ return result_transcripts
+
+ def setup_model(self):
+ config = self.config.clone()
+ with UpdateConfig(config):
+ config.model.input_dim = self.collate_fn_test.feature_size
+ config.model.output_dim = self.collate_fn_test.vocab_size
+
+ self.model = U2Model.from_config(config.model)
+
+ def resume(self, checkpoint):
+ """Resume from the checkpoint at checkpoints in the output
+ directory or load a specified checkpoint.
+ """
+ model_dict = paddle.load(checkpoint)
+ self.model.set_state_dict(model_dict)
diff --git a/modules/audio/asr/u2_conformer_librispeech/README.md b/modules/audio/asr/u2_conformer_librispeech/README.md
new file mode 100644
index 00000000..f16da3f5
--- /dev/null
+++ b/modules/audio/asr/u2_conformer_librispeech/README.md
@@ -0,0 +1,156 @@
+# u2_conformer_librispeech
+
+|模型名称|u2_conformer_librispeech|
+| :--- | :---: |
+|类别|语音-语音识别|
+|网络|DeepSpeech2|
+|数据集|LibriSpeech|
+|是否支持Fine-tuning|否|
+|模型大小|191MB|
+|最新更新日期|2021-11-01|
+|数据指标|英文WER 0.034|
+
+## 一、模型基本信息
+
+### 模型介绍
+
+U2 Conformer模型是一种适用于英文和中文的end-to-end语音识别模型。u2_conformer_libirspeech采用了conformer的encoder和transformer的decoder的模型结构,并且使用了ctc-prefix beam search的方式进行一遍打分,再利用attention decoder进行二次打分的方式进行解码来得到最终结果。
+
+u2_conformer_libirspeech在英文开源语音数据集[LibriSpeech ASR corpus](http://www.openslr.org/12/)进行了预训练,该模型在其测试集上的WER指标是0.034655。
+
+
+
+
+
+
+
+
+
+更多详情请参考:
+- [Unified Streaming and Non-streaming Two-pass End-to-end Model for Speech Recognition](https://arxiv.org/abs/2012.05481)
+- [Conformer: Convolution-augmented Transformer for Speech Recognition](https://arxiv.org/abs/2005.08100)
+
+## 二、安装
+
+- ### 1、系统依赖
+
+ - libsndfile
+ - Linux
+ ```shell
+ $ sudo apt-get install libsndfile
+ or
+ $ sudo yum install libsndfile
+ ```
+ - MacOs
+ ```
+ $ brew install libsndfile
+ ```
+
+- ### 2、环境依赖
+
+ - paddlepaddle >= 2.1.0
+
+ - paddlehub >= 2.1.0 | [如何安装PaddleHub](../../../../docs/docs_ch/get_start/installation.rst)
+
+- ### 3、安装
+
+ - ```shell
+ $ hub install u2_conformer_librispeech
+ ```
+ - 如您安装时遇到问题,可参考:[零基础windows安装](../../../../docs/docs_ch/get_start/windows_quickstart.md)
+ | [零基础Linux安装](../../../../docs/docs_ch/get_start/linux_quickstart.md) | [零基础MacOS安装](../../../../docs/docs_ch/get_start/mac_quickstart.md)
+
+
+## 三、模型API预测
+
+- ### 1、预测代码示例
+
+ - ```python
+ import paddlehub as hub
+
+ # 采样率为16k,格式为wav的英文语音音频
+ wav_file = '/PATH/TO/AUDIO'
+
+ model = hub.Module(
+ name='u2_conformer_librispeech',
+ version='1.0.0')
+ text = model.speech_recognize(wav_file)
+
+ print(text)
+ ```
+
+- ### 2、API
+ - ```python
+ def check_audio(audio_file)
+ ```
+ - 检查输入音频格式和采样率是否满足为16000
+
+ - **参数**
+
+ - `audio_file`:本地音频文件(*.wav)的路径,如`/path/to/input.wav`
+
+ - ```python
+ def speech_recognize(
+ audio_file,
+ device='cpu',
+ )
+ ```
+ - 将输入的音频识别成文字
+
+ - **参数**
+
+ - `audio_file`:本地音频文件(*.wav)的路径,如`/path/to/input.wav`
+ - `device`:预测时使用的设备,默认为`cpu`,如需使用gpu预测,请设置为`gpu`。
+
+ - **返回**
+
+ - `text`:str类型,返回输入音频的识别文字结果。
+
+
+## 四、服务部署
+
+- PaddleHub Serving可以部署一个在线的语音识别服务。
+
+- ### 第一步:启动PaddleHub Serving
+
+ - ```shell
+ $ hub serving start -m u2_conformer_librispeech
+ ```
+
+ - 这样就完成了一个语音识别服务化API的部署,默认端口号为8866。
+
+ - **NOTE:** 如使用GPU预测,则需要在启动服务之前,请设置CUDA_VISIBLE_DEVICES环境变量,否则不用设置。
+
+- ### 第二步:发送预测请求
+
+ - 配置好服务端,以下数行代码即可实现发送预测请求,获取预测结果
+
+ - ```python
+ import requests
+ import json
+
+ # 需要识别的音频的存放路径,确保部署服务的机器可访问
+ file = '/path/to/input.wav'
+
+ # 以key的方式指定text传入预测方法的时的参数,此例中为"audio_file"
+ data = {"audio_file": file}
+
+ # 发送post请求,content-type类型应指定json方式,url中的ip地址需改为对应机器的ip
+ url = "http://127.0.0.1:8866/predict/u2_conformer_librispeech"
+
+ # 指定post请求的headers为application/json方式
+ headers = {"Content-Type": "application/json"}
+
+ r = requests.post(url=url, headers=headers, data=json.dumps(data))
+ print(r.json())
+ ```
+
+## 五、更新历史
+
+* 1.0.0
+
+ 初始发布
+
+ ```shell
+ $ hub install u2_conformer_librispeech
+ ```
diff --git a/modules/audio/asr/u2_conformer_librispeech/__init__.py b/modules/audio/asr/u2_conformer_librispeech/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/modules/audio/asr/u2_conformer_librispeech/assets/conf/augmentation.json b/modules/audio/asr/u2_conformer_librispeech/assets/conf/augmentation.json
new file mode 100644
index 00000000..0967ef42
--- /dev/null
+++ b/modules/audio/asr/u2_conformer_librispeech/assets/conf/augmentation.json
@@ -0,0 +1 @@
+{}
diff --git a/modules/audio/asr/u2_conformer_librispeech/assets/conf/conformer.yaml b/modules/audio/asr/u2_conformer_librispeech/assets/conf/conformer.yaml
new file mode 100644
index 00000000..72342e44
--- /dev/null
+++ b/modules/audio/asr/u2_conformer_librispeech/assets/conf/conformer.yaml
@@ -0,0 +1,116 @@
+# https://yaml.org/type/float.html
+data:
+ train_manifest: data/manifest.test-clean
+ dev_manifest: data/manifest.test-clean
+ test_manifest: data/manifest.test-clean
+ min_input_len: 0.5 # seconds
+ max_input_len: 30.0 # seconds
+ min_output_len: 0.0 # tokens
+ max_output_len: 400.0 # tokens
+ min_output_input_ratio: 0.05
+ max_output_input_ratio: 100.0
+
+collator:
+ vocab_filepath: data/vocab.txt
+ unit_type: 'spm'
+ spm_model_prefix: 'data/bpe_unigram_5000'
+ mean_std_filepath: ""
+ augmentation_config: conf/augmentation.json
+ batch_size: 16
+ raw_wav: True # use raw_wav or kaldi feature
+ spectrum_type: fbank #linear, mfcc, fbank
+ feat_dim: 80
+ delta_delta: False
+ dither: 1.0
+ target_sample_rate: 16000
+ max_freq: None
+ n_fft: None
+ stride_ms: 10.0
+ window_ms: 25.0
+ use_dB_normalization: True
+ target_dB: -20
+ random_seed: 0
+ keep_transcription_text: False
+ sortagrad: True
+ shuffle_method: batch_shuffle
+ num_workers: 2
+
+
+# network architecture
+model:
+ cmvn_file: "data/mean_std.json"
+ cmvn_file_type: "json"
+ # encoder related
+ encoder: conformer
+ encoder_conf:
+ output_size: 256 # dimension of attention
+ attention_heads: 4
+ linear_units: 2048 # the number of units of position-wise feed forward
+ num_blocks: 12 # the number of encoder blocks
+ dropout_rate: 0.1
+ positional_dropout_rate: 0.1
+ attention_dropout_rate: 0.0
+ input_layer: conv2d # encoder input type, you can chose conv2d, conv2d6 and conv2d8
+ normalize_before: True
+ use_cnn_module: True
+ cnn_module_kernel: 15
+ activation_type: 'swish'
+ pos_enc_layer_type: 'rel_pos'
+ selfattention_layer_type: 'rel_selfattn'
+
+ # decoder related
+ decoder: transformer
+ decoder_conf:
+ attention_heads: 4
+ linear_units: 2048
+ num_blocks: 6
+ dropout_rate: 0.1
+ positional_dropout_rate: 0.1
+ self_attention_dropout_rate: 0.0
+ src_attention_dropout_rate: 0.0
+
+ # hybrid CTC/attention
+ model_conf:
+ ctc_weight: 0.3
+ ctc_dropoutrate: 0.0
+ ctc_grad_norm_type: instance
+ lsm_weight: 0.1 # label smoothing option
+ length_normalized_loss: false
+
+
+training:
+ n_epoch: 120
+ accum_grad: 8
+ global_grad_clip: 3.0
+ optim: adam
+ optim_conf:
+ lr: 0.004
+ weight_decay: 1e-06
+ scheduler: warmuplr # pytorch v1.1.0+ required
+ scheduler_conf:
+ warmup_steps: 25000
+ lr_decay: 1.0
+ log_interval: 100
+ checkpoint:
+ kbest_n: 50
+ latest_n: 5
+
+
+decoding:
+ batch_size: 64
+ error_rate_type: wer
+ decoding_method: attention # 'attention', 'ctc_greedy_search', 'ctc_prefix_beam_search', 'attention_rescoring'
+ lang_model_path: data/lm/common_crawl_00.prune01111.trie.klm
+ alpha: 2.5
+ beta: 0.3
+ beam_size: 10
+ cutoff_prob: 1.0
+ cutoff_top_n: 0
+ num_proc_bsearch: 8
+ ctc_weight: 0.5 # ctc weight for attention rescoring decode mode.
+ decoding_chunk_size: -1 # decoding chunk size. Defaults to -1.
+ # <0: for decoding, use full chunk.
+ # >0: for decoding, use fixed chunk size as set.
+ # 0: used for training, it's prohibited here.
+ num_decoding_left_chunks: -1 # number of left chunks for decoding. Defaults to -1.
+ simulate_streaming: False # simulate streaming inference. Defaults to False.
diff --git a/modules/audio/asr/u2_conformer_librispeech/assets/data/bpe_unigram_5000.model b/modules/audio/asr/u2_conformer_librispeech/assets/data/bpe_unigram_5000.model
new file mode 100644
index 0000000000000000000000000000000000000000..ad6748af9e3f3ab9c36052b28d46084b7c8f315d
GIT binary patch
literal 325121
zcmY(McYxDH^Z%8m9ttXs1+bULuU)YV_FnN36ctPAUASBl@=5MEK?PBK>iqNuH-D`qrwYgi?e
zoT$hFQ?HuQuv$ZhOj@p6aCNQhwnQYomq1$Cxr*HN&ozO?|H$FJ#?_)VnGWT9A*HTvFF`x+sM@(4(Z
zPs^;Y@2+KnM0Q$uZ;dJzq-(={K|c9*S?{OGwe(H77U{9~gSFbxhV8QJk5g-(6ZyP!
zdg-Cs=cYt!Mt0le;aaxIb~&{V6jGxZBs$Q)L`9i64hpt1vdQa&YE4D$N{c*ROS@cs
z?jtiAx;Ly!4sA`jRFm926~Nl7B_mUNK2}Q{C30!m>td&~
z<4#%)
zu9l>6rP-mTEixfR$l@h4RH5G#Xe*O~~?WDI$ldpE{i@F!pcg`C6@vLa|K_yK$}wJCZvm5ww)8nR2^qxy)&XWLY*n
z;`LBkN>=%qkj0FO^q3ubzA>X=&4$(JQ2AV?lFvx5hd?YwB}a^Mk2h`fSr1L=p_CB
z@e?7f)uN`*pS~UiVzq5aq%$&Y479PI$W~x~<~kqMfW~r$QL^D-09Hae(Js%N0U2jd
zjVjX8@Ntb!X-BzlU1*EmL>>EX%O6%)jqfJd6%(}R&q91MhLr~@OdqMGLdSQSEfV9twIA{
z@*1QSn`n`jK7xux-k6{V{?uzhEp|lipC9F93sb@!r$E;FX90mXLa)`(z({1Fi8A@D
zWX5=uFwyS$HMFj}42fm`U05rv(v&MS%it3L&)N-dN%e*e`#S(%^!c_FfR&qR&qOl*Igb!kbYa?I=f$-M`2>s&ycXJuqhjRn#2k;%
zltI55@(#3B4!wvj(E
zrXu@)3R!DQX5n?ehek+=vTU-(x3waY87;H_#tvkQRl3;8TR4D9L%Ba413;h4$%@&8
zjDslACc{2-vXN3puYlB23UbgI-_>S>Ql?lUlfMoGF_yB4R7Uz`p-nuji10DuQU~CI
zIi&SXPf;w8V!L@xS9JZja*gk6InX&+F^=5Bfl^+}7~2oP$}AVrwweY(!(cuq4G?z_;C1_CZA8LpVSot~!LhNRFvxSf~
zZ$jIb8f`Wd$m(1U&AW7=19XXUMK1XPz}PA=7=`d+u
z{-D!s#d5Q3yy}ur;Y?mm*}#HqB0la*(6%fYnR^C=$tbTy%skWLF+P4t5j18BnKpU9
zU|({1d1xd-x?ZM?wtqEY%b=uH`1?Mmo3(#k_B^yN4iRh3dM|;*y$EG;!Ec_RB}f0;
z@pn(49I+)j{2vh7jkGtd`BSa5Jo<(l-3yve<%D$DpP)%M5oxkl4z9<|WGiy`D36d2
z;(zcbfS5IwROb964*J=$D@tP=q<3tQ0@aa5f$LvF*h)dfEd6gh1z?$kK^)1WFMyat
znslW;=u=N(!~CONmxfNl7~a!DP15fmLX?rM=4GR!2%CV~n%X5(gsia{I?}+497*Jv
zV!OF&sxkKU4?CD(qL
zmT3n8^k`U}wNSNGtHxjb0qN^)(rYjw;}F%Y1=(+MItWFe9rS#fpbdm%CY8{saOj6X
zR>2G{-sfjX6Ii8)NO#)m*V<>DYH#TaX`jjumNcdpmw*1dpva)Q`>9<<4wxaG2mURU%jv7%mU+WG@vSMDQZcb49I&=DyTgPF#
zO0SbG3d-Q!7g(^8krj^;M9z8TW%kPu#&}kH_Ge!^-K-<(%ypK9QrP+ZvJF9t8WmZr
zKHACYOadWpxrftH4wY)aAZXKciYR+`K*273=lP!r#?y`}J#c->VZYb%F6iX8#sf~H
zu0=BXVQ7=#&}xoKtT_w7gpq=mw?BZivRV>y#R^Z|mOu_8d;L*M-A-SL-`z3
zO^>>GFM?KwWTue@J@aq?E0&GFw#L%S9LW@dF41QKh%u5+q}yfE0?&kfDT~&;uRTJN
zDO^73`d2Lh{k$M2^n`}tPAF`PhwUTnF><@VL%e)Z&e?&W6;Y{{3&fk*+#*YtJDtts
zD>As@pIWY}Zb!1ongAx!miDj*c<~UBxc9P($f1WrTLGD2a_`YlCIoh6VNu@UG$3ur
zg#3AC{VQvyZYfqB0TkDRH9jp5PX@63SwGX#>4AD06r(b&dfy{JRwPq+a*5}FicE@o
zyRWG6oXEGRl6>HQKwyCyKb0jImzPtu>U{)^-)TdUZ0Su~pWo
zaQ{eMEnb3{>YT4|-3u@p*~a#Nxd$+N2i(Iz>{d7&F&iY!;9v^GIwhvgK3
z5A9*!T1xWi<^B4Dg
zy%vIsG085TU^wL`Wv?lQV@VnG5JAf^RaVAFJOU6io=8=s>2;@@GWn7`J`fg
zz70(oxr}`F5kV7!YDzjHkNOzM`fHZ#=?%jdc=~jav9-fOXp?H5-Pr+)J&$DAVO3T!agC&DJKOf>q
zOCqc{-AW+VH&GM+JJc!m0evooGX4|HwDYcpj2olMq%!e3=$H{x`ER(*V>A^@8M)$a
zr}Mf&Pu&X*qm69m;~=Jpf9~^GBr3@KC!rBxg>CD=>HaO7VdePc1!!X_$0&GaHY6Fc
zp*{XJOO<0`e03gS8>?yg^L>bzZB=BbXJ^Zqz(@m;C{&zcQ
zDy$mAs6PHJuRY?%I2~2#e_!qcO);oFXB=w5QcrAfVMib
ze6d*nmi7p#1lb&R4m1+WXusL)TnKB}LW0FIBMqZLtZK|K!!Lrej-1Re!64LKF9Wgy
z3%WkG-w18(q?2;PEuOwyl$n#9YC_At8_IZ#N|{8nY&jVqW?u7}`LNTe1gb%Y$2@_W
zG72*AX#nFFqpzN$oHE;!sE#fdz2-?0!IqYL9blb?HJE`IRBFI#W%#Gw07aP97P;mv
zf>y^8bGq(-{`m^XifO@!!BFh^4@f+~8Hc%5JJ!fH3jI${ND~@OGO{<6)vD6O+WY?h
z)#-9IBb^WR#I%UaRMMgc1>&87(D6*5CKDY_mu11Mo43h6EfDbDl9Q{d1Wlr75}f-C
zZntlwIZK1aR5~GJMtaN=D{@&5{~v(WF`2>al9mS*(auR{oO19z}V9XLr;i)qr<4HMFuBavS
zrB^L7Gr=m02*&+BFYhjeGI=-3kH0}!WlM=DqR@eVf>_Q8#@tsOJ6VGiW#=^sk}#)A
z*Q#qn#8s@Qa`#0K2efl$q`E$UagV{gP4?;yZCoT1lHJ_D7IaSUv4zulo!?g7-f6}<
zCaXTs##V{J_tAvCQuJ
zAVHH3YH41YXF(eaOqlfHXXbc>lq$amy$l^UBD(Q>C@U`_w|_o|)GnQnl@<}SjMRWs
zkS)Fhu#LU0ymspG@V9_gd^&*{=^dplRVuF?bhjToLbAxl>HgoKO~k>{70H-o9wSUF
zrGKEcp6r4_u
z)?>3W;VA#aerWcwP&RCaR#}zQZoTnIU{*Kgn6fN7#cAC$$%G=bm0t-~*VoSiu+h_)
zp?J=hejn~}n0Z-hem)<-1fHZ0cU%Uk^9jT2t*Z#b95LvFPCVXIC2SD)9spu}rh?XYl2cJaP8(E@UC`N8wodtXj;q&ehN3x>
ze3?1I!t^J(??S>lu5xn9yiHJQr$?d?D&
zS3R>VNRRtHPAX9fvv%cqAS;D42>EI*q{)U|mTJkn&GSq!YcY8Cc?ZB4iFE23_nzl~
zjZ|Lz)JbxXfnP!zTkHjbCVtc(K-962lkYpMQp<~;mzMpyI?YBUEkCUeZQLbujq>C=
zkk&5cOl*$0X+tn8fT=i9V0`So5s(&8l+*Sl7=q2NBJLF+Yi
z)f=-;VH=vNlyTdfDOF_UCg`~O6tnpdkDRQ^JxW>RiZ<>Vd1wX=>qDcQRe`WNM0wp{
z9Xtv^7YMl7`65r*q&t*T#zLD2ZG#ud_16KZ2rJTc0>M}ujJFm^D_}Czm4uac{zMRC
zh5b@VKEA_g7Ws;7e3z$Cb1hR)mwQ3TzgUv)Q~V=`gv$JX9t5#!>+W^@OpD7(diN{~
z;-qug%N{Ao95!?=q|T3Q4_En!a6B**a>7scCC4CcT1MFFk_>X1s{ld^dTIYZJRL(}
z_s*-<;_56CN#`yA##WgnaOxV6l&E9k9SvK@Oege^m5#MrZ%@y1MOz=cIRJG|wKF3T
zG=Zh`tnF&0|78ouCnJ(mcLua(p@Es}Bk8t>rz;n!^<_sv8%vGsL&qNrY1Olt@z68?
zMKVdgktGPbx-Xg1Xki`SJGKzEEL!MeUlt%^&eU#x-f*Y0MYafiM>w6yQ>i)UL0f0w
zlu+w^&S((Itfu8=4I*8(3(YcXJVe~_OY+2RP{wMdTxGrMe>;E`j4?VP8{7?Td_`y(
ze@=lk2`8%&6CT#V7as?Uhki9VBtu_$;yKS{NU^CPA1H0bsD-p5XU%q?jJ8sir{)3}
zTiA|9_(K!P5t1
zja62wEu`TU?UOjN}BNII9zqF6CFpHa~GL3xg3
z_ny!QA1}BcHX>*O)G>O{rXCYJH0~u^LQ`*!-WhTuxACu)M4nk(X)9h=8`8hJo5iOs
z^Hte%cc<8X+`K20RRGHXoH6FFInZ;6%z@*&LqOuehixe>%aQ=bJiZf={F#ELy9A@h
z*Fz!VLEI!4p6h9ws%mN8db9&}{`19!0Mr~GoS!fCZ|Zo|EQ_xKu!8csb$Ip~XcJ8e
z>(9U&A+55BRGAi#VRrdhUo;lk3%*p;S*rsIQ6ibLB;J`Tq
zE!RS|MHl)diqx{whQIwKfVlOv(a(Pcnyhu7L-d7`77!q^OjYI7P5ElQrOd%z5Y3D_
zmM!X>4GyvS+11KMR!
z1==`9^cWFjixD6y$&7q|EdH$bX@6VmA>|DxN4=O#LdBZ{nYJEUceiKfEpv;93FCabLK1Gi6w
zCM!HAJ3r!|+H}Y+ddxl*BboUGL2Gvw8#-lLR-FYD^T=UMMfQFfz?vF4qbd<;zGAh~
z*k|4{*J-r~$S5vt|Nb08&F&yvkS3kETJyf^K#dMH562aW3zw%T$u$B6iQV*txPQ)tYq!7sf%
zkTIbxOKT4917bP0<~Soiq@UAKQ?4YR?C&W|o0O3Udkh9}UIrfuZNeDB@!NvKon*5p
zSDxyznd@+D9Sm(u72>Vb_NM_E<9NEl)3tS=T41XrU!6{jSkno1ENVA10cOP_9vP7N
z7m0#XhD7q}^>;y)u;q{OG?SIfTOCjvURrJ#4iI;ngiIdkpIdVgUP@1%4`4-c+$`r_
z3~3^3Q;XLP*FwkKsUbpD<;`vcvm99*_8Sjn97O1Y8M&|y;CzolcJf5e6VnN{nwvd6
zj$gYz;9t>k^U^p4n%d>)5`Cr;jt3Z;s_sp{dkDyitAs({VVb8esL6AO*P!WU)s);Y
z&y%AV26OHtg{-f=yqrH&vb-jGIF=%^T3;!3qkXM)xm
zEUx&>ck$@DNQc>ZD$>O2!F8J=-ssKJ
zy0yoMO1Uz<p?P9SLgsxQvj$a^Q`Q98eyvgypTbgCpw77eHHv$xORiOZ)!MVBq4`Q
zhq74{6DfBVRO!D~!8i}LyNhN(q7H^ha-Z1{R_VN6i@AIrv@xtcgzeJvGk~}&P-i;F
zE}w%K)5R2LWNOPDpcrDr+Vr0V1Y|-Pyn6?;r#FI+?}|;Fo?L
zJso`DUO3nRLTpva<{nAdYNE@Tstpeh
z@C-0Qv`d#0p-t4;qHJ?IRNU-L+rK9uE%!vWtZn^q7Q`yomZ-3te%9(K*rZ7BvmuSY
z1Rey^cm*`vU_z8>HxQ2Hnkl!co77u30a@uoGP2Lj9)TIXT0wIkcMFh}r&=KHgm*eX
zg;>9(i+yKPuEoDL8REFm8&WrK%_o#CMFljl7Ts8b4^v8Qe$==Tit&fNg5;^|_eY}y^txDD1Io!@7!4@PmE9j&$jK_rZF
zA-y$hOa})86o~)p4HS=su)&h;>j;Js-L!Px)Ur?u;VoNw{76p{8>P!mAo1DGFboek
z_%H-c-x)OSAhdgNzdDH;Q#=qd9y669<)&rKkwC^s8D~i*nC#JxsJjN8;*e88tUbe$
zHF7X?t?HPwv_;oG6O8)OYVuXa<1>oeWNIU{mC8BT8kF-D4|TtUSfptwnrvR0l5
zD#>Pfa;WFlrtYr$wRu`~Y1M
zt?6|n7lqkv!|Oc}9TL&}_a*?Zcp@lJ-`xsk5{DOg?(^q&W5;*8ql
zoe!bn7Ob#ptp16A9a5D3^POtrT=-L{xRY?}P&s}}A*+K5LQM~!gTxu?G*SH8W9Z5N
zhaY?gVl1n7ep+7m*@0kpkj+^(wSZTQ$&mJvLjh!_X6c_SEJto`pD`n~$V4ULCc%h}=iYk%2NKISp;o*jx3~D=
zVD>yEtl8U|P*0V&&vpVcaYxmp9K4H1!Se&_FcNrbS1`-8E%+MTvIl@oD|)Vn7Rt_k
zl7cl!;_;N|Tc`EHVp~7YC7a74`lAj4u=df(sRi|a_d`8S3r6;oJaPnpO%9EE6|)e0
zrr$f-qp|w5rRA$rJu5u57+jI8c{-5QD_I;S=bZ^@ET>WSteKumcs`6S2K>rN2k`A>
z{Pk3TmeUz#WQwr$M9z%Q%y?9~5~81x1+ePi@fW(=O-&vri@~`pXS8}8HI3?J(rt$W
znb?wQCZDD>69xxt&F6S>cD%6HbtHhbw^dVC9t8qVtXI@|{un6Bt6YSoEiZzWhHiA0
zTnkHZjy7i+JVW8Q+i)qKLYG|%5_bZ&3Flq|X$_ucj&7EZCIQeX!&0`|0}ydTm)MFe
zdIZ|CORJ5iEZ08@Vv=gh$;VGZ#a*0QROE~4o*<&iaJ1PB5MADyr0;Y7v6Kimo<5!h
zLL1!DYsmR;`d7XB%2wp_d7c5Y967G^l||1*
zEpnHJYw`{LX3G_^{%7{XFd(k{8q~yvYA-)6hBSdRs&)U)@1U*vL#lGj3MlIr*z{7(
zRCn?!Yt_#^7kX
zzJKGin?u<3{l`Kvsr@ls--
zknd(i5M+oouUsN%MYPdHWS?`Pt$CP)*@vxqp2ba6MJ^#oQB536e{~t*Sgd7EDhjgq
zDDWNY#uTjsV+aMoXv3Ex6nRdMtaR@
z`5xMMFJ`m4{gNdfpLOh;rBEgs6j&6Yo0d5+q^c`l)1LtF#B9FhU;Zr&cA3*@?Z71F
zGim5T*!W0SrGIxQYf?6mGOG7PEJoCoof?e6EOPS+0`7sqIc<
z=;2|6kxoZV903m<4Q(PR6m>WI$0Yz(Vk6ETdtd7DOX>)|_Z1$WyJ6@Cn_uJcIWU)R
z$2u7`w@Y*_q)j{ca|
zUYiPLOmhtauI_lu0mceL>6d36z(Rzkar+E_xGN(nUBb7R>5#fqX&a4t9>~}Y4@(sE
z`e5UmfW}--ccZ^4Z4ye7^Q5;S;guIurfSe)W?H%6@}APhCu%a%U+F!In`o3l8Z?Gb
z3yY1+5c|LH$-@rypO2ueAoZuHe&U~b|0XZHF7h1o2#s+dn|ujmk`4Z|*kitSgnf=G
z-qB(Z29+M^FZ+ftykjIE_8mbhOy`W&Um;`ms)Zn&ek*{icHC#k%X3PbutHY|k?!f-
zqgLgnM3SlCitYfGFK>6qTI)hmVkse8_aqoMNnSnVco=D8Amg8N3KR(`Z0gBVF#NHS
zmIe!Nldh?iEr6_pnCCssf2m?|q4bx2|8bHd(c4&Fd36kBo`b4VY0#9SbJ;lqd;mkFD8Ti>6T+XRtBpE4hSb5?}(Z4
z~Z_c%pPh-5sRR2x82oid;33E=A9!O*P!HWlzFHpc34(
z0zywy-_vRwZbfQ84fCv!5_9|Hb^vQGv@Q8;IHd8HRI9&`Y7?k;A3HW0a)Ux-Krd%+vBh6=JyYrh3|NHTf3km}xAkXmc0b=Lq_e
zx?XKI#SzYk^Eh7(P=r2&XT}p#{UhB=h5713AXXucG!WnV4|^27Gl3K~eZs0N>d~&9tVkghwd-ec?#O9$1OeC>skMtuChL^^t@-l9Svp}S@Z&kF@RSEo#}x&
z0H%L5ztys2U#nvrB(wj(|KtrYsWht5FK-dFhQ$~a#=@|9
z_7zjYR~odhLpUVT3oS2v2xMYrzpW0p$A1bEbAe|;O82MZ{)OqyA{}G*f9~<<(&b?M
zoc;xvan`6loekeY#}&@0_io1@0OIL~F1F^6kj4-`R87n=OgIQT95_vg6X)6mKe46L)6%Odj9hj!OtP|vSxM|3Yc2za49$*qyUE7*k+Syyk7;IW%$yA(Dj(
z7~5Q~klVI_viVN)pG<(dOL
zr=&JR>tWFG_D=o0@QI@XeSZX~)fvO2UJYD+B#4!WFKswjf8-b-`Y|3plaD26{9@W{
zmeg_3Hc-$i8~gj$f^H9P83@h9q6g%sok-Y9;?PW6=iQS*OuF#G%6a9P4kVdb(sFFt
z0oz@oL#*jGr_*Kbj3n=XrZ~2=vg{thmYsS%
z!9b@eASRtOU09tL`ak56s)cZo@Zx7ctO(?lmV2J{7&zo;D;8#h)EHNPzT_MS3Pqjm
zetikRyaNG8I8hqs>UNJ}oArd1|>SoePbv7&P=2{vLGx#%|_%QEzJ8T=1`HC>V;8aaApsio0Hr;2iTYzH8V
z(yqEfsta^Hq0+R+bcZy)!)3L!bYIIO;P)fO*Id?B5fBR@Eh7z~=rjGYt92
zhg$-~jaABIP*z_0KS$Uss9@IE9t1gXta8cD7DQ?Ia90cFlJePZ1g%K+dW^dX`#7Kv
z7;s~0UjUO0(=e7{UJBSB$Y$2&gxr3Jr@(SdU%d8c2l8Py9dVpz!6lvjY
zC}t(9wl4pj0&NATTk=1TImwn&PJ7m4a}69<<=N1PE;ymR@*;%wR(7tc=-o93#Mn=U
z!~M=LS$uAvVuO0wT>_+a%d}Cb#`1j%JS$+>()9%&_igGUAh2S
zo_Ihp>eFiiSUxHBy1HsTXv;^Br?^j(kr^9+SG1}
zLE<$A3nEvu8CK^g7-y{8MD@rSubA~%b-Px14f>Bq(>uE1j?G5fI~F{Tb@f@aqbJp)
zMD^p_XjdSsN|V04w?OII6c^O4!Fzh5a8iIkZmUD7s%>c3+M8G=BU160#*8|Evdkhn
zzArK8`?fgkz8}IZ92#<$$Nzms$gS$r6zu{$Kenz8vY
z{u%E@*-}aSjJ8^ly^i(IDePO%^@pr=9=(hC`SAe%k|kaEvHt?uoWwzHZQp(4iJ&GH
z2AXQKubt!wgIA52`ws@8*YY~^(`OK@^+L7ht(F3a=Lz=MXS6_CW5=&8Fzs(q28=~-
z#-pt&AXc4{sz5PsX{hxG~u}0s`)?;y3peX|72S7
zWQB~8u*3z;=BO(?US=34^YHe6S9@M~Bp}%%t^qJ+*%RuCdB3rqDh;A1zq?=Wsr0@j
z4%g#7l^S7C)6Z8V?tgmvwZ|lSb7}CF30oq4j8vXJo!X_17NUPA#U-;P(tn>Ohz-&cK_}oR1
z4S$EU@;GdZr1>vsTbx-w*IxmpjgQyQ10C0^H9jL317ccN|4Tc8T6$)_j67PW!zuM8
zYw?{iS#6ZN*CS|MJ(0#pP|+E2@Wz0~GKWd3N*%Kah}BSC)48s+1%MTSs2ST^djxep
z=ehmu9nfthI{i+b0y}6)?%f4C9{gNYJZ)DG3*`OV9K-8}~!Md$^m?(IOi*iMWg
z3Q&*cob=e2pyif}cns+vCzDKH-4BDbF@la9&aL67&yk>3DeThZcf0~740EW^)%(f+
z0>+J@x6LjXP^a|`-K$T8wvv;a030+J(t5AmAdRF=p*RP1tGVYiXlqW6P7h8)F)XfJ{J#?8y)2TY0zSh2XjaP>vH{13zyUAC2&K@s4*i#=IXsnU=0mwF_#In#A+
zS1jgNpB8!Pa*v}AWvc6tbiT?nLJ4Fxx^yf+Jbf`G7C7;}^;(Bw+l)cRHWp~E;ygKy
zII1G0<=2}CTCw47)J2n^EiO|t7T|jnup%%B=~=$azZ1x6jYpz3{yKMoSgqM}q{I93
zm*3|J=}cKoxO_4Y<)TRUf55-0BBI0TSA{HFmV_M5oSzC}ZK5`V-5&7>sU}peiah&h
z9pQ+HN#n_BAdGsQAz#$6)s4H9xWGIGZCo<6FebC`ygD7sIvJM~Pkau_3e>ss##xY7
z_t?K(uAB`-{;C}gnqz704cHG~g0dc?H+A)Jm`nF_ExKAmM!re7)>*jOkd}4c0IXtk^jz2avetD|a3a?vl)COJ+HG0v#caD#%Dv0Vj_!ml94dWuJ&@-kF`S`_tV
zk;j*TSlMa%`NMyD3eM|M@Y5@=U&}&$=ps3(1AuX-fVM!PqeHr3Rgp}nLu$R#So!Y6
zvN~}43vcc9~`r+k50l@AC50#SS3x{2j)Tz8^HD
z51+3{MBgV#h(O-#>S=W+D0tCDPUJpdgN%i#+*S{R
zt%(G!JUzRjR_ojhV(c~Qqmn&tgSI}v$wCzd%5vcy9*4({Bk6Ld)2(V`x^gnKiA%j}
z+k>vyI0Z0nW8%!7>T#$!4?3&y_TfhyWSg78J%hn@!DE1N17g~4lH;BPKw?$3>%TD#
z0->u`JZidq2)CeT&xEk5wN%5?TK_0w9H=YzInP0wWYcJ`>cKVOc~7IRSIF^;IUpvE
zGB@I7-b>K2IKoZ|1O0NvEF;@arX~F{kQK#43G&=4kTA(wEWc>jK4sZ5^?ei4YR80!
zmbLX;0G1m{FT4MWbesod9f;9({ku?PrXF$kYuGBuxroNy?qd*VP>=WU+5Hp@frY!{
zt9=P!{}oysc#1zhd8@)xLEocw*89eU|`PIgz>~WaJ1%jBivQ^pbu*0cb;@RiCWk
zm@Nd$oO<#f9$gJXJlX!IM+s-&X*uREkAhPwm0PhKz+}&>{5*29(gw96nN(0|2Xz9l
z2<`eP{yR!r)i@WEt2%r9R6f^|l*DQtKdlZh=QcQ<#RM&ncJ%~m;48=`Yder-(y7SR
z>jU@k%0YKKM;rv~mIuL-|FzhhwkA$%GQ=Q_NpH$U6@hDKs7}tGHLgjr$=n1r|
z?0<}fSt|Iq3Bp$8bd~Bn^0EV9o7WYjx=F*SWI~B+pfVCM>
zsy_eyU_c7g*TGjlov`%*bQ|e+24viE+p+PsNS`x7j0+wzi)4!gv=tJ(Ov&$`tVb#6
z*6F1tXe$&+1&081#T=mJ%w}4Rp$`rLG3ikwR>AL@0c_kBsxqwL3DnP@$~;;01iD{s
zmS+_(-h%hU5fQYxu49A2>`*P}J4NfA1#B$ao->jc&heB<9iHo+3tbzY+&q(`6tJvQ
zYMQy~f6yjx&6;%miL%wLWT__L|@+^rnvVoaj+7BIVUb=vIYj&Spf^?;ekiHry@;PKJ)_6^7{D
z3dC&_?;$u9xZh(%JUm;G!~>oUf4$cvPbqEn(rUgw)k(eceAdH|Frd%yb$OIaL6LR~L79Dk4_MA!B_|v-_Ob*XGpy%A|yI%k>nW#-jW%9xtkCWhu__VyK
zKz;i_Cs$*}Rxf!ii&X_W#^;J)SAxR%SknVFG(ziCW_|fkG
zS+ncbn$GdS`yf_nURq}FM_W!&%xcEVc4#I6KuS4Q;c*}Eu{J{VI9&7H@k7r=Z_)5z
zCJzaG?767jsw_Kv4q!D1*6$4#L0b)YT|nduNRt*iE~W(j(CSMd6Ar_!_OSHbUwb~K
z1pa(qEQV(6r9-cq@*R{h&sho6?XVvKtQg&yZoUN4M3AtT+D={yVtlbzYnLUzLK`FO
zO1LhHTw-dmQxZlIoi&TON%E16rxd
zX<3FH0$^2V+Gvr#4}~^P)6A24X=nZs^(YzUp(~E|C~P!S`m)6-$9a@i9oVJgoyLCE
zBnJ(28kgWE+2KUb0{aM&j)LWT=Ks;!XAd#h;33ZyhH04<;B(
z0ekWeXIMJ*fIBX7~oo30ZbTODFH
z3Ny<TMn=vR$)WKg=Pn<%UZNCl3d*DHi>w+Nl2Z0gHPh3b5LzPtj;`
ze`O;Y<(THRgF`q
zaPeh>>pcqp;gywMN}INXsGq$7DwdBPx#<**iRwP=j)-EV;|0nFA2fPMQp6GDD@1
z#ScQpm1GCY68y*$ASN@
z4l(tH>E`R_fnuJ`QWeSVv%!p+WR<^jn*(XgpdRZTp&efJe45ob!Zvuj*MS&ftfJEY
zO;5m#Uq+>V>upEa1@PyI@~0xkE>8_JT<-l4z!(Ycl6oJ1i_aXhyPwR4MaAMlLrX?-
zuL4%jV4)<=T3-Me$NKkB7!BT6#3Jd|UFFmDOV23LrX6BS1uPEx_@LH(tB_@+w@|41
z?cX_)Me$*({9p+PgEU;^+~FtB17BWFHn4OkWYSbGvL}~9T9;`r%C^5kSpobzv96D6
zFY_Fj5qbS9FL(S6WK~vYoV;A{kLSUoAs??WieAjCs&L@?U9|k0a}9vW0uZv%h&d7!W1=b
zU8lJ;$Dw3zPow|M8BPMu+Sucib<4cXme95KG;`_^TYEAy?lRa$Z{x{Qs>Xk`Ei}!*
zwr_|$z8yr|W#J34PTd|v7hT@d+jvL9vE~;JBGnJ!sGU3selJ>?Mn!64vQ4TxL&gIP
zD^o!xDL{z{Or7$39|#j@r5%^Kjd!opdfRG`I-M-y$g};Pe79PvB}bRf1^WQOqCRP}
zsIMpIAJkf;xgWIg!lEzz_J=gt$4{z7@^&3#BOK*2^v%f!cswq+vKgf}Yv>9Dh4pPVpK_lYNyxN=&8pWz-c_zAIQJJ+Bp|u#avet0I
zszD@V&Ip1gHtgtQ&W5u3=;Z`8y-UM+U?!*xmgutVccCZ2&uR1jK^Z3zdj(l^IkeR#
z%fMZFm8aJ0gS_E69soYDbjsKXgsrN4W3`prZU(WcCbIfOBL`@=0L2C9GWW|Qk3!Sx
zi>uP-PN2A}hkIB@+yxL%0O8qB)jE4U;2Cj_nUVH7-K_uQcFh#em=`*zYFc)F&>_3B
z$)9#S3IxyG9LBdv^DRE^`O?>$W!Y1nC~Q&4y84VqY1F-P=2>VH68?yiRnn@M*L$Rw
zP4~}DX#CeGX<}0K5WafN1hM+*duD3==joE?0d2|UN~O+K%H2>!O``h8pE^+td7+-4
zo>AbQILq_n>3XgrsX}CuQ{V7py5jK1
zo$mlx?O3JSyCvs-I>kwW^VVZqWp?J#B7m3v<-u@Ao)er`cFRw%ML&Yeg&%9?fo-s?x>*
z&V<;V8#ky&&|TT{8$vS;sV>-M3;#x)_&Y?Ut?OlmWy&e_XuokAFcSq1**U%I6(Hk+
zKjdQ?mgzeY$=b2@ml=@^+S60E=IA2CJMbVNOUdw66WGy;7{A;BLkJ%n0$>$N>z45C
z!#$T^*hHx1BY^DEbNJVB*301F8F0%>kNi21KZ=OdRSn1Ejv-u|If8+U;XmtGKpR*3
zEGb(DwL{!`9H=&ux;cI_z|+N72ASN~J_#`HAKKM1IA;~Iszoh4quwOl2iFlUni6NN
zI>J<=`u6!vo>8QmD@OI0I>aoBpFb}vD-;hSj#}ST(nM6`bwAlG=LO*mqvNVc5lfKZ
z&mQ;7Lz-a1^eBxj045B3R7+=`VlS+3^mRg6tki@>28--OVBnG245Vw(~0$?
zp}+`QJJ`UAXT$k*u2JXWPE|0ISGAyL?=fWZ!bqV{i5@-N^Jf0=$Aljr0c>^Qg;800
z4rI)_y19l^l3hjuS_L^T;_Uy5Q4Zh$gw{D$0V|HiZ>Q~_KHsBY;9;a3c?kf`&h&=z
zbvZ;_aaDHjxXNSe=?DK;dW{2gHV&3s#{yU#!y-XXPOo=F@5sW&RyTlH
zLCCT!SB>)+>h6k0`ivs+2-LUe&?P2M0D{$|ZlZs`*>k|vyjccKg0^;~19AC*Qbye2
z7#-c_+4Nm;Ce?DIRFyC(Q$MNR3u$HYml;8C98rhzIo>*_$2@gEv8;aSOmKUDMiDC;
zpG$@YtLL+IEc}(0ZsGb(^^{SfRlBS_;w9=nQ#Jkd>JZ&l9Vn!A8nLXQ@uy~J{I3q!
zw(Oc0Jcj;lEh@uoFA8a@{{G`j9wm|1bu;@)JqqX9$n?h7K&<{;SqUCXw<~0oMSIm{
zWu_wWu*5`m>FXXr&tS-9`8-F$o{v9}dMbDPEU09DWv?{}uH11h1*<|nl^H)G|r?5vS+6{}lXV5ui
z_wPKR{yUrw?Ji3^A=(kwZ?OCgP%Q4V`YL5s=~y%zB%x1_R>U|+ar~|gyxA|F5#5qS
z7xge;CXR@A`Bk-yBbqoES=f8F)Rb|3S<;Z$f9i-nEsAT>v2}!Z-Bc!LD8ihS=Lq5Y
zWrVGq@CP;FS&|$61Pohejeet2R)}7D({7?j+}F6^UywiQKpLY#UZ(#=3~Lt-9OTp$
zo&}GI>wbJfhYf2hRs9VSrqQ)JgTtHJ*k04H71JK>)LycR1G*RCZ{`)SVv2ay&RrGK
zq_2O~EZ=m4CNqu>q(gWA7IskoUB|wmjmgIvR?Bi#c3zL5RW{fjSgh9vu?FMM*4d-*
zv`H_Ilu3uzfsfp*j_6S0gl%gOctJjST|@3s+PWFZ6zB
z6WduH7zBcm{L~$wjg|QNk@VipA$9rDe%VhUraXX)UG>#zm@D%#*qnhH@
zJ^vMng)%J<>;;143cM@xZeRae(tjII%OCw5!QQRNh6jSgJ%VE@mhU4x3Z?4PQCA)3
zh`LL%^4zKjh3F;tuKfvH&3Q;d2A=>)55x}%%_kZFW$ntVkrA2)w8^rZ!C69O`QeFR
z#sNAHjg%VFp=?ZH1;vbJc({NlQ_pSbS8l)A{R8>tO0d0lo<&SVBtfH7n
zQ(dl@Wq(n`G%h<o}|v3J&u;*dIG1{EnQVuHx>
z+)Muor12AUtc6P3XI|ZrW7<5&7M)_bDm$zmfi>flvpqd;7c@(^bD)h)nt|7Re^tN=
zDCzuHIL{++?}mE(rhth){@*MTjrK^)eW*!CoeyB`%dDx6wP=m~#sC_xdEOV@??On1
z0*7?`1(6a~Y&yeRTen~2G!0ylovwg3K6Mk?A}1s$?Bt)g$M
zGJ|!#0mPcGTA&&59#A#uC52;l&5eV$=y78`wJyNC_%seAo^2SKIy!e54`h6yP$RGQ
z2>@0ReOOPfzsVzTtin1maiV9!t6>>EQM}~lI;4gbhUoXVIuteq@N@d@4zWvP8E%*T
z?gX;7K-FNo!lH42V#ZNpRktSpceexV3UMN8t^?TC87Ge_VA+Jtv|coxejk|aG1&8P
z#uu*KR`2(iv4JI$s}zex(4>2cXCDMGY3h**#ySShtcL-uErKIryZoU@98I5!%V@ES
z9|g2Nf;CIVKLcszE^Y?gp%eqN#G|5XOebt@q4}%2eyu__+!HBWvZzZKoX5@tkG21B
z71!)Q0gi>&)Xt-ljGus6u4pjfH0y3n9ak@}mf#y+0I(Jgw;Y>iIi1uel6QN#o&c|{
z7WrBMOQ2&+PM-^Hg@ivWrrL`XvM8k-Cd5v!*Yn^dMy?4dP@CMs<(5zD2nT5T1m5HC
zcqSZ6Me>KzR>5#FNHy5$pL#xO51{C8KL;_1C(%(hS?Ce;*(1GjtcYb3_L{nQrxmld
zi=|Ia*Ddmh9G$6qJa6QE17_0WB^%Bb*7^}39z(%6-Y!EGs&yd!m$unU0j#o|2k4+#
zw#<pL8bs7gUmh8XyZd~hLCl$uaSqkbISG#oo
zn_#VBf;X2e{s+idLBCSV!r3c8tPp)kQ5sg-s1`&2iycMb){c&Z8b-3T6NoVzuIA|8
z=FzT>p;Q)mm?bTDtqvATqJkY*?ol9?y)NC_gmb%rS%Yy@C)2un8a2SS%Q0&?ki@5;
zNv`by5Q|e^KcgH)tei$pOtbP=9Z+)ycBA!)Q5&2|HH*;-qk00Ge3+(ky34yvAyeEN-3r3q7HJ}wt
zq2Y!4HifKU_2R)Szl{T!p(y#`9RQ3I+>n~(#2ulnY{sd+?Rc6ZmTi3VTOx&6l=L6t
zq;I0Nzk5M)m7o`V?7yO
zQCxqK$BqNBZZrfdZ@2zVW_cF(?}3mtiwS*(l|!f4vrhI{Y_Mg5l2$Y~iTERdQ=R60
zfk?I)>@?bGUS2%|+Jr!t;;`WQW(V}>u`C-E0OIP=u1qzzDH01VT;4ddRF4w;66AhG
zj19d9E0mt0;X0y0!vv={!!XZ@2CNxd;ec6sC9>I>|6Q8XovVipwLM7UH}wx
zXpaTlUq{RuFP~jVoVXLKkqCu4Exj)Sw2Cybjud3)OB~2@j=%#E3Ru>90g%(y4KDL2
zDvi9{bOk^>1PkUxzV+2WR(t)APx<^>=-SMP%c3ec$K2@2bmyr$`FG=i=xl6}
zjo)zJ8P~^lX}!hsiAN&NjEY%V9Jr=s*E^tNOJtHsj6WfuYmZda>Y?`}>68234HDmm
z)a0By;=I|2UwY@9Z)aumnYJ`Mo-c)~{H_p>m>>)4xAhV>KX?E*kUspbtdJ>K$ba?)!wEVhmjbcW
z_#af8l*VrEvGi#NtZWKcorCjJL0Y%*C_Gt2ltl_y6kc@Js}#Q|VxvT-ws_13)J7Qp
zg+PB`Z}g|eLzJA`pWh3zHfyTKUD&A2Rn*!_FTm-^(vSN9slg{0%a-m7W$l8C
zKFe0aegIZ-#s1jikOM%hr3&mRwmJ~fT0x)tk;Ml?$Magb(TaWiaL3S>m{C|D17;=h
zY9ABBLq`%xZGnw4@EFe%r_8cEp|ml9v$HxyZ+~1pM?H1a%g~?JF+CRJuMsdU^!JoJ
z&!>+pH5~5c^H*_^
z>~X4RrG^XLfA=|~9z`zz?60(mFU{XQ-JkH!d3{prgEX|2W9~FTF-&DZjYaLW`po3pleDU%VfFAJG=b1c^=rr%mWb^h%*XU)v#`}R!H9hh_#p=k_RjJeu`PF
zaP2oM2P+b1ME3kaSjhoZ(wpUfWdLK6SvwrUdl$gcBTIUlMuEg*(e9oD&N?3uL9k0oG-4r?FBCBOsktk-GykKQ
zRS8>fK~}lIbJF)lY*Y^z185w_*Z*XKLMCNS!)T3i>Qa7djFXO9KEZF7L
zCRV))EaqANv>1U*Rw&L@9b~29Y5>H}%f%cpU+-Vb#iW`S6fm~*FqVb-%kdyqPOcDJ
zl^0I%WE{Jq%+0z9z$D7yAdg>6^ay%wI+AIV04!g$Ni{6|_cjn~UwXB?bQh$tuBQnq
zxZmytvIdUtL{gt6_XApe!Xhf8rU1l(WSONu-u#dwS@k!FWKtbTq2BX4$f!p=8+8WY
z>1qWm8+0Sx?YBJ&Vs)YeaB)#-V@EHVhPh6ToaV`pLjmPIAha5jm6V_1t5r(>K2@9O
z<>?NgsNrXdwP+@gNuta$KmQd-D;L$7H&S2o6v0CSA$;~akO_gdr`fu^;c-~;^axiT
zod;B#tAf+3swFqQ=P9|G1+dZk0P*->!^A4VL(Cs|Zq$MWK5vkf$Si=ZV`GhD04EdJ
z$FKg-0sSk
zN5_cGLqvZ&jXOq3-dqlCtvQ5#rcL+P3QN!7XIeJsut`lMoKaJSn-s7rlvqvH>
zlsLDQJ6G}e+~t6q@m(FzRV0!{3Ro>w-RFLtu4}if?kJ;=K7#7VZI|wz7&ARBbiD$`
zE;Mfd@xRLTIO#Hu75@gfX~gaG5W^C2Az1J6%dMKo*ygVy8L4%ewJjlI)~tWm8$*42
z)XU{nKbbufl4-A^oe|z!4nh2LERgtG|0>XG=rIR-{C2ZW8
zyKqG=+5*V9V~4;)id#X)9hEl_OEP2|2bg>j!qM9T1Qoj=w`ka6^Y>xApWJgt02>a>
zvu3B^_-Y(ej}n`f5QV+%)E_(p>!;XVa(d7phq4JWI3y47kds*
zXDVhHVeZpe?_EVq|1o=?t+Dft^@!LBRCRw_5o3xu9}V_b1*{@X*u73T(Nl1RFVAM=
z1|D`8hrLd+~HnMw_8~{iZ-qRwABu_%azbe_Nf3Go8gaTv^9q*W)Za$@X7<=yjG8>7wp0_3vVkHcZ34}Dw~m`
z+CZ!#Jef}axp7z>2saDHoaJd$Ut!Q(GXlg~ONUiEH$G{cPtWmqd7fM1$m;Vt!q7!|
zl|_nJ=Rv)Z`$j>Ugp%qi_}Tf;aYJF6O))3`IL4t;5g%gK-vAlQECV$74;K-~Y876M
z=cdZ#AXZMArSMH9EnjpyT*FaY6f%)v|G=K9@H`D!qe26{cNahzUrllgj3E0O~#1jj0_kQGT+1yXfzvm5`v9Q9hT?0GYo4NiUJ
zC(UJDJqQi36&|&-_^6vsz)Tj6O?q>CFHLKOXc2mUB$6BN1hiV{i!y!g@;JJI(Q8F%
z8KPw}kuH3>7!k5bSW)ioomBbz=1W-Rch
zf%4R&o>de34P7YKdK`$YHEJ0wANv%PwQ2B^IuX7ELvmbeNTuVzgoSkbwsa`apro3
zB4|kJ!X}9iAguXmNmQbK^X;p?a4PRA5x1W{J9FG;9y<{fq)uNtpr^qpIrJ+469und
zb6kVrXzABrRvVU9?Vr6D|2q~ZA_#1SVpbN<-IV3*rO8x@7
zAApuSbB6kHU95=Z#Ee^#mVcbacdjfel(w98Rn*!1yOlSsLEPVG7E8-j9e_|O^iMka
ztU}ldE|~d)$?2Wd0PCkp6p{$JGOXnSn!kCg*_1Y_WBaYc-w?d&HuC9lAiD
zzB;H?N8ebbGfe0P5;swc`kY;`Hh^(smlAcTy`-R3O5bzR5pY}&AZv@7Rw+NP17@9B
zpKVyOu9N!jYVGSmTH)Fo$abCeL9Ek;M*-po;6CgLY^CX+9Z<0%TmpPQPf=0gP+I
zwPK77A8Se-q)GJyTcDK5K#vt@K6Tn&q^Q-H<41IaUlp)yxLBinZMzW&t;1uma`R^X
zHPR`oMfz^;h#q^=>DS*9giaqG*W6+&OU#LvWVaz)>t?#os>sdTI#ey_oA~Vp8ArIP
zGH0i^BTh|H9Gs+O)m=RmhNke4MpuO_65h`V~f8TQfW>(
z+7WwPS}r{n#M&LcSj#Fw>lrw}BkDzJhRKx!L0o=A)N=fRLe}q+xIHj`HOhzoiWW|B
zBI$IZr^LjR$BZI*h3Fi?QYg=z1ZDML1*k}25Ohrp+$NC5lRW|_!}#7PZ4v0bi01ZF
z0czc=82T48?w^A}jXzdAGmsBD9n56Hb1Nksh>FxoYp1l=6)<*~#WJ${8JAy)I>
zp$X3>N@Vpf)fOf}OaTl>E2>WiUzjV>e6ai}k#XKBQ%|eTG5jA$lSjedi3)aZ_9%38
ztfEpZI)eL^PGw_S9SJWd@c={{h)Ds39u+%GG(RgE_h9U@xK0JcnzCB3|9tuvq}FC_
z$yLrHo$>wus5%euDvPC$?;$``y1FVVVne`+y%+2q8&|xFm~cW8NRuRlSgr{z1hF7u
z!-j~Uf{2QOioKzU6%}mgMX!nt3o3f`e!u_Dd-l!etgw06xA
z;E6CQp=k;Ly)#%He>*j1Q)Q~e@z;S^Huh!#XO7ncS=FL6=4gCIjKnmdn^l8vi4?5L
zct@KZ6A3ptXx6oN1KHHZQtmIC7b%LymXsD!S+nks6vd>u$%06MB33esAM^;2Rf-8H
z{!Jmq8r!f6KXfT!>paZbW+t0i24+=K82lWxO9A<n5K?&R4a3(n%BXM18$Yz`G{o_Gi9=V4n{oqFpH??yuD#NxM|yV7=R?kwW?o#Dy_JlA2joghiMXc}1gxjGh*7h$lOp|XwrN9%dK1r=2+d#i0=2?v
zVEAR*5oUP_Hvs(wJH>ykDx;}Z?n2ngRl&(ImGhMl<55@lv`fFQFOby_?iBn5{Rqa6
z5p*S#?f(9tmM=4VnLlYa!j`hk%Ah!QFA%F&na6}Z*5>n=l=5p0Fo*~z}JjX
za8RTPW`Tb5!(u|gubEcStq%vY?ATaK^sEzNNr&SQ-zp%P3o46z*CS#KxKZDJ>nM<<
zZwH^Dy#D6j90*9ykr}EVeRPad$MFt@`sEl9%Oz+V{JqCUfU7!kxhKFXhV`q?zjabf
zgRZ6Ps%M=HVik%Ju8d$L6fY5KB??}Taf*;4dU1=1d5sV&Qd#YA|JNx56Ty{sL8B4Q
zv7S3MrX+!r4bq9Hff!F&`R>X;op91Lh|xh~xz8XlYYEm8?grqB_-r66EZ+Z+$=(!T
zJj(SH^XUgE2@im??fE?=k^Y9w%kAgHR4~>+2v~4lOr^TMY)mP))Gq+B8exNMvm#FL
zief}M5d3^Az>0wix6prG+J-=q=1!FWRtEOEFwe~{S+{(q~u^pqov2yF)fO#3;jo38{?w>
zu-lenrh^$**dWtxw+pc1VKB>PidS}KL@3-bVsfVjnb<{W6$Im61??0
zFsm|h3U86r`vcZRLfg61y^DSVvyzXb^AL5KklZR(&MXr)2zPFXaf1U#ZKG!iv$*&X
z)4+dPh}A)n-v97>{t*FWF7jKJ0zo|WxBVF@?BM%>{{dKW@TJTD@U+71lq*i?0YR|n
zln0pWao9j}F5Ov>ui!D3UN2-37!zqfL
z&(!P#Y8(a&dl^gA3A6k$L}sa2Ex=M|BH(|2*}foFY1ps&lMf+iB^WOA@Ii;g6cG1V
zRGSV5u!gex=$z^t31o?CT}Ohr>9`n64j6dBP=LkKQ)Q~rIf6FYBs0=qcM^XY=XE)i
z^G(8X{Q>D67Zke=0gXK>lSb8}FSNza*8y3v3M#mf`J2MDL}}-8w6UG!43Caj}W$kGVbJLFzfp*iAj}n1WA+Z|9C`n
z?;M(dB%K|d6FYN}Sck#7(GPfnVDj(Sa&i+_fMr~*!7=687-k1j?`JA(5rU19pCiC3
zgRvb9*S-~GWyHE{bTO~sy$}&B?WB?DAtV=#*HPhh(-hJfgY@r1Qi%SHqM*e8(MrT*
zh-rom`oSS@0~*&aVa2BiS`@h00$3(N?r>_7qM#u2V5G(Je@5hLU{5oL7
zYH1}{(swJ-aQO3A#gu~JOiM5Q6v(*Ju@07t^u#wm12nGil9u(~D{Mqzw;l`#hpdU9
z4%0DKAi2LjkDwjCY1tV2MNCddB&eVWTc6W2$8vhc^!Ca(pjM`loa7-#2gC|)mVNcU
z)3*@C+GjU_?ZtOMNh#5$mU#C)fM!;y+t2@jfOVTPPM>=GNYFB&r&C`4$Ip>~Yw5n`
z7lKw)J1^3l_WXLlqy@MLrw#5)f)H$Y_=j%tsX|s^db8KR^lOX|>`bV`vwjCcV#IjJ
zKmI3w8y8im1mXGH|AJT{u*1fv;xV^#E=#nyK}UUQJ0P;;UOwBtsysrrv|>pU_BYy`
zzq2v0)h$zIZp$7}D*z6_`3LSzi7+<-kM+%6W3U>b7=s@2z_yX7tWrK1*7XK3j%zE_ABIbJzhysAYX>?#
z_bF7ktpr+ntWv=Y8IK1M3!_6)-ZsJ#&WdoK;4czj?NcWcmhE>ZOcz0aQS7_!75{Ox
z?)|UWC&uQep5gHBz7b%bO6@G%55V}4?P^%iRtmE$^dy__cOYS_U)CLe;~@l-@`Uq$
zb(5mQ0IldWAXOL!LO?BbW^x2Wwu=F?63H?y>t`rxZXjStN~u2sIz54t$w6n$D8Lr02nOtG0ADpxr8h$-Ni&ouJD%*iBYQSWSh6~`2fbX?#pO3
z>mrw66PgLB!0b8}+tV7R<_3P>9J22A4%3Rt~}}
zjY!N1nq^-mEH)$yYNn*bTql29ZTZ*1ujJY3%Ne
zUoFJCC|X~?TLnQYTXh9lhp=T$ho(-)h`OewRg%
zSz!!Y1;Do&VxF8_gj0h<8iA}xNZqtLRucm!L^A2Y%Fu2cBPi)VM8rIx#vhxKVG~kY
zMrHg{kgIqa+kt3NTmqG=p*P@@4On=GK61nop>mHPeBN0UPO$&j3gou>xZcnb~%@IbxEt!|l|r
z!mKP=^u8Q5tWFV}gGl9a+H7dZUjBDsaL(NXt!&(G<1NH{2wTh0Ul?MV!w;Abm@63z
zi@NXE3nFIfCDtdu5X=~7BEb3Vmp_S%lX?F4elo-g_X9(6!DqH)OWFf
zWHjKS4aVwHgjHMiqT^FU+Z9@6vp0xkt$?h>tzj*%69rm{S;0p8XHp`@i_Uf++U3od
zpV`^@@83?NNv(10ibzpgQ!azGU*8AGb<#21EzSDh1X<&xPRo%5Mtx|BvDk#3(c$Ar
z7e%k|Qs}?It=RBd#ol?A5Gzt;p{yL#v3mU{g7Hamf9zL)#t!P-aLenTf`VqT(!Zd;
z=|s4s!Gz!&0@lhE_?@Wq{l5dSjL~sO>f0+t3QTK%qL8F(gemKWlmw#;>A*UDZ<&Pp
zdFKZLR`ERgnf0gt6vLy$#D?{Rts{h|FExgz{}yRDx#5{Gg_9vv%ox^j-8sKU8l3s!
zFm3~3t8@lPrG(6|P}a8VlPlA3Jz#`QT8iLgayZ8gEgMA|b1~-M7GkBuj0_fD*aX0)
ztx~LV3bB-{Kohn{ml!h~^Pmyz4w8&EotQF@{N5uH>CQUHA404^Op-irz?`&Z%ZS;;
z$P{qdR$w*-jASW1qi0NrGefOLHQRt#$6#wOn^XGVCA|TSc@8>QLtfY+CL^1*>3s>>
zO3b>01`+=aHU5NMBbr>GRVVAaJE&zKm-!q3F!XO1Xc@3IMO$3D2SCCncCokY7yq4F
zVQTnle}LTdVTa+@9{`kaZXCe*YXTE#EAjgRdcZ-TR$Ist;cB|CI5ein3Djtvw`_Z4
zOc8q(Zsi08?Jm*kRvEp#r!E19eAYOYKV3K*bP8FNiJt&*cC~m(Yw`6^ykXr~u7Pb7nfbNKn|~Fi%gZC14y@P-{U&ac_N$
z!;X-*{L1`&Da6I3Fk8+SWE}CJF84iN5Mafiu(~2ZF?wAJXm!LaR`373Jf^MZzBOaa
zpAaEt*Ptp4P?9zZkt*MAA~e=wyga8C>np?-<<;XU{zah-(u>{MZ>ziU`
z>JZY%JbDd~WyX07+L5;f)@1;eG+jtG1#dhZQ{nDLx&KvRYnK|f6gAHgw5lqo
zNG=)-eHko~fHC~i>tvc~1Dv!)Y4FV(5aTHKl)}%i5Z5>g&R_iZZ$)5uskTcuzYSoK
zO4$cU9{GCHapMM3jl|Md(tVMR;-@%P}+p7BK{sc+dv!I^!wcmdMl73Z*mOAgR
z7-m0K=KCpZ5s;7QBzp@mF6)@nq;wqOc8O`o*l7Dq46`s*`1*E)Ee(ysB7aC;B*13_
z5972aoXl{Ek+QDkwev(I*l~$3fS5sXs{U>|
zq&JT8&y>e-I1hZWlCTXFrZ(z?39rVts|L0Fu{0AS0&gmd1*Pz9wfA5$^
ztgbDT8|H38lA&6qyABr!u-P1e!EalW{yTPmJegGskd%~L#uDS31Q|Ppu*g}@W%VFd
ztl;?Rs>@?4I1fkd|LDH}ay<)^h+qe>M4+WZpVxKZfEXDUNfH&r?;^9rIijlfD;r}N
z|4(K9m~n)yNN5Ns(vcGZtR@)WVawpFr+`^8I4{?oCx3?^%U*dgO*LH!lC+MDUi(~4
z*x0dcl-7{Lu8Bl7ItILB24PDXi$R^4_=9c$G6pyVtPOgVgHuco69n(bhYCwLK%<45
z`G2NJkPYj7-i*f!{O*u-s#=9xA|!C5tTT?3}w}
z9Cjde9CObBu$pG@V`Z;!Y(o+~Yd(OnBNojFZS{f}t5|aDy$fR)ae_M|j}W$ubu$X3
zPUYf=1ku07-}_iZ?CmhWehEm@%H`N%a0R;BKd}_hs!g(f@bL1au%wrB9Ov_&j1lc9
zM03~~%K&rAzhFBX_ULtjEfX;^dQ2%3_1!Nrx_K
z{ekDK|05;Ba$6=I`@T&yV-`1B)XDS}09$R+g9EK};~i?C%e#@3F@pbsvjkXma-Lf3
z=Y2rfSfC<{(BWP4F^KV5r4@<>4UpAi745_wJXY9iMHgpH^cF$bnV;`kPXK
z4o+g*tcJokV9z_sXFert9AG(!ACi9rD&h`Bwp^<*0%1!`k4Q0Yk&_lLK~?<D5Z?1Mwlra|<_{D;f+NqE!)1v~~j07CBaHY8&pj8;BAV+?9Xw3Pe4mwI}r)5e+Wt%Fl>7|
zZVg}^mZ2(>H?1tEYzJzsfZ~LH!$5yjm}OzxcZvPG?h*-gT7(@JlCLn!Ru0MN@P2(t
z795O8f^6*znH2@;hv#4_*~laRKGH8nr_XY6A8#W4f!N~Ij`e-?r*VPor~$^m_a6Xg
zrJ|S&gs1lgurA2@S|n$2Iv&aH18lX6RR>3TzBEO2paF8xe$ZIaF>>TW8|hCyAjZ?3
zIZ~Q?D2TC96`UZel=F@Nw1RU1FkI_je{xKXh24C8{GMVQWFu_Rej9{Y31n1Pk6*3`
z=n#c|IVb|0<52a#3$QL$Rml|9@l3+D_;Nzv_db_kZuU1#R%ltl#<-rkuvzOAfmUY5
zKE%X7RhZQk`aYih^`{B3;-Ngi$xqe~N{Ms^LoYc~NYe2-1Na7-fOB^uDx!K$4HV
z9H2S&_YIF2-3kj46g>9}Gh2kZ5?^0T$nw>dPc`_oQlO;4=_4FWi~_LIN=e3D`_PxJ
z5p1OmKKH`5c1(GUjBhhK?BKBg#xvKlvp#bPVR#Ob*yKwI7|$#tc1}I@vX}zq6if&+
zQh>85I&p`~W0J-4pgCV*W4NYLr~b!Y5tHQwV{Y4F;c!bs#0v2a5PYin#u!nyEL`mp
zV9bt^F@KH1*6bJ8R{G5*#WdvVCrHWdo57McXQ?y^CV*C}th-@`?kGBIa`d6<=I07A
zwz0URwXPIku{in;v)F7w#!$AXkS)TLG?j4o-CuDHh*crtE#^IJ@UEN@V^v8~zUpSe
zmJ@>lb?&e`022O6Sywn`-{!6eg_(g(M1U+BYX$ald!{rfhQnTNvLGv#-i2pb^s{3^
zoOOb5#pNELq=AB8h03`xmR*PPD}-1v!ep$QXOBIQrc_!T{1+|}l&}=8$?driz$!=v
zynerj2q)Dn*FNYWAC6HFQ`wB2{{%p8RxU3`nM1|-@z2JD!olXP&&7n$3NumruCK&+
zdYqmaO+&1^P|J`C(F+QLgji9qkz>5DRVZ8@itkbNr&Wu0@K&k
zLXxu7pg;51Z-}uNhnn?#|385&as0i?k^_2
z{+9e5;|$kzFJGD0FW1B-C`)qOtOH%AgPu<3_py&x-L@Tk9VKf=an
zZ3TjnU1$95P*5up`~1SN&Hh`MWe65ti|%_Eka0O8oVgD=9Ds$6+GI}r&$02JvFqpU
zpX!qVELpChARyDnjyXLN>6xApJhUE(aG1wUU1WoR*`U(pUCc9elGBz{J(lWYHd1tJ
zR9zmTqZJ5A`Vb?{w;R$X17B*j@TRM#n^|
zaK`HI8AI3>$?CF77I=aQx19TEzS;Lupkz!|hLv~02L~FY%0j)&LYUNeTDFL#qnfu6kC(&5KJSJV^+uumodKc$Wl!p?%Iy2I6
z?+cyvZGB>99C*L^j;cM3zM=A
zu!lVn<5iWQXesi~JQJzpEIS;Z1kCuNU(+}*LSx)x>Y6)EI`(x?t0mq>qRn0sV9~I8
z=N*nW2^*6fZPc-Q|K)uUt4A}zGEKp|A4WpXtT~bMn|%b5G-EB7-~4a^R;NtY;c3zH
zKZ%I}(I~cy>$Do!dJ2LFH(9kiulgF$Y8I&iVT^O8fLW6E;1kD=BC~7}N$?^)It7Z#
zaQ@SKReSu*El*nPE5jHZ4YWCLC!%{@R5Mnw^&LMbXRr(
zOA14nkuYde5My2zx4Nl#l^`o$wiYGLub!~B=188H=WR{M<`|AT{I}Z>%vBF}pep=7
zwg<5kb!%7_CFct=-pt2i%Z@RY-uR`xz7S$PPHHCYl6BhCC#J=fml2YW4(c1D%JPn$
zvR!`=OIn{yilF$GAU;5db($b|gyFevZ$K+KjxY**r+s6};bNzs
zc}Pr|$(|nQUpN#b*THpE$oxV@cRmc*DgkX~*meZSYN|@U)<^p-Qlet^aZ+6TeK_%q
zEuAW(wZ{x%=YcWTYK*t&5<8s)Vr{ES155{5f3Yx2pO%sx$e%)t33;p$QP-1!tgcW2
zGFx7Eb_D2#)bIOkfLbmX`o8BvVR@nUF_XYwg;{GUfAkJIrUo+^)Fk08>+Us?NGhCqe|0T@u`+^-`Whg4VyM4v>|*06jP_UhXfV1d-E=B7ayK4LK%Uf}A-%ro_!s1c4HN
z{f_(P_@ccf{1>Hj(aJEyPBLT}rU&f44a?IlXO;kmiTZ1xosk
zE=OyQ{BV9GW4Htx;x-TpW-f?exU?H&
z?qkEb*og_oeeJnab}D*yvm+ssBGcEvh6XG8MJ`lb2@OHQhRBELp}Eh93W2rqt6
z$7Hnkll+Sn8CoF;311;DCZkD@WbqXt|KX2oE%mrG3-AS26G4o7*$
z2n(3;QeH5Y%Pm(ao)Lw2!X?J9egiD&Ml=RB`i9K`4&y
zA~8D4{OJDxSOuD{O&vObiD0DXWvo93)Jia>v{+ubs*TcElk!D1*3dS^pzffCUj2$}
zEJb-UtgHw_b;jDrUx^sb`U&5GSf9{-&~zozKKSqXhot&sC&pCo|NaRqX~JNd#Ju}U
zgu?x9y2$T>EGDjgr1+};e;9Ylf}LuhGZuqaBOJD|jSFZ}z*;6@rtJ+%(7MDJ6#e*d
zUmpRwHinQrQ-Bwe8B#|Ju%!yh0*ns?cpk&>UkI^ELxkjDmN$QP0!nIwb*zHh=QaNOodK;j
zWb@(|?iz`TDn?e;hbp^uPe3aGrf)0~x9tUxjN!7dC@m6VBsyWr`sFDJ>!=3Gru)Q5
zEc?DD6I74
z$HjB`Q^mvT1(6YM*Oh9~>m|s_9!ztCKk{OU)adV7Q
z$RnFlwrzfEj8mmI8?&YCA~Ckn@9U^
zUIa=yGw+^LES?2zgW>&5lb>Oh7B$bt_(u`t06iS_vj8iS&e5p*EnWh#+JGtD{nPY&
z^qZi`IOCZk9>(#nza7)!sFPEq6W)nw$v~9*!$-dhV%0-uPwdwNn3N|;{RGh&gkDn}
z$&V0}a7xGJ<-zv=j4u{{Y4yd|yDMYN%OsI>4@OtbcgR_qO<@aC*I%%SslnDvz}w9
z_q~1rw8Ee>VK`T-GY8bF!6w@>zpDrhlJ_4`ogv7YOm}3MlI{{>gFx>j_)Y#MoMc&p
zXjq5^sh^d%TP{l`EY0LHAPuPXBrS1hTkA)qpffOVs#3g_jK*ga;3ZbR1lU_59d_fQ(DswyN>>
z9SmTFLuIZ;-t%yfI0|#d9VZ?gBjN_1z0kLV0F3e4Di%dNNisfjYRVdazDF&KL)hbTL_D-Y_wSdAyWY
z>?Xx92P{Q?K@;Ib@Ul&M=uU@E2DXGv-R1wd8YHRKIvp%-m=;M8&ULH)4_S0
z4i}Jsb`)aOz{3PJe)qOv{(b*6JiHP=*+mK`Q!umLM+zn-sX;fce2Zs;Sp|&1#Sb2&
z1sEeG>@Bq&96l$~$j<|>Pp1G|Bf8K<4?A2zRlec4fkOryYs}z!QvyAP`
zb??M1q!;D>!xbw)tcLMX$`td=y8u>MRV*T>tR!feX<9cUvm@6;P)F{zOAKIAXpRb`
zW(ttiG&?U_PTFA9KAvWfZ;n=a78N~75QA0=N?N|Qf~9t^F956pwJr6#{XjU^d*yqV
z6Qd&k(9ba?+?B&cnSV@>rBqfqf)XxGX;7xJg5u{brQz)tIDcM1PI8qzMQ>zsT_xC<
zV3W&BGV2Lj)0^tRpPSh|S6{ROukA>QctCqVV*weIoc-
z@kRia8@kRKKe-2C%MFbhD~!)>3St$^#G?b6df&1+m{n$Q=1Bf41X;vnrSG>zM9_tE
z|JG002E^)g!WwM#^5kt3M8dhH2f
ztQH~m1%v!u4**LhLPXEvWPcD~D*=e!SnvU|*l0D`u+eC{DYjwk%j{L71B6*kV;+n;
z;}rqMnv_ROAqO1@LIZ>fq;qEf_s5SmYK^CVlc!7CmG(ghC;S9wOsE-I;dkiz}6GD<=
zkEOrTsa_Oh>EIfK@%(xkOSbx)ZvG|2xWUy1NA;J)C@i|nf|dUQu6_SPKp3(%;A%2__=|Sy1oqRr?=iMo=fh
zVe?_26`Z+E2WWxJaz<0Z{+eG`TiC6ZfLm2mqkCaqL>PYx%=qH!A8y_*
zP62Y@^8IK5N%{;Z)`rVd5*_uI`OBVx#JCE(1$x&9f|6L+xG>@V^el)qqIU62rK~6~
zzX2Fq0^y(A6-OjR=ho0#%2mTxZbZeXppCZ5#*QGDC8wlyAw%;R{c$PB^J*5>OONnP7
zVBmDoSY@Ki#sG{n!yxYU+K7L!=3@I%;TyWe#1h{YeyxuX@9PCiv0nsP^sW=&|HQ!GF6=`I_o_=2gH=mgmlrNU`&3bmG!%y5)+b#CX}SwL=>cGaw|YXdZf@?
z@}p%FRh=R{@~JJ#lD3GniT(>AWXCJsHGa_PF$oL@WR5jzD2UY}OWY{$M-aBchJ#o7
z!thEUt5|u9DDg!Cj1lB$%}XPMSS++C;efeZkW~>vOAspQ{!d>V(+Xmpe|B_?nY`%h
zZ@&U4*)VH#(kMZY7~cqP*|Lk#^K$_)&e(e+)=d(L@nM1)ZcxSpSrgzT0N$9{ADa$l
z!-CGu#S_^}wE@GUehk?n|FS5Pg0X4QV>-VJvP|*6#1z^8rU)R^QDJA#1h6b+?29cV
zmJats3=2aW1DO=l){o6$)*md)3Rz*Vs1HsN9p=!;UFOEL=*G5@`TJflD-uWTreejT
z6QI?kreQN$p%nsd#8_}Ax7^x*+FO|essI{0%yTjUY(q8%Cn@&0QX5c>jb$>q3{&nF
zH(VpH@r6RH>QGh}l=u-Tz~v+sj$hL_yjdfaLeIyEV;yv4@DH$zuvKn&w@pV*+dLI%
z=qt7U%$EpTt8wudtHN^s_Nx(MM;DaA0kV`NRtEtxU{+%&CiNy8XGBsczD9JbG5SS~
zImi0v$1zQWL2A9m2gvAZD;OL>K5RqBt<{lo7|{lj>5r7A=PNaf8v0)?7K{GEGz;d7
zuot~ZP}2W`@99zjRt`-?;RIu3N`y@vjWIz$GI)ju_28E&5)7<(bl?+ctnf_01<~*7
zh9AI_!gG+q7-Dt}pe4vd(el3AYaL|PQd#yIen*8ZT3kbX!5@GYjebg1`kOO*<`6a;
zx^TUq9f&m_7X#1**BNAmMytV~`?qK;9qe8-^<9z|V=d~!
zj$M!H5TljLbq7oG7!PFiz~yq(mNKef+(MewSXjqT^0%Hq=+`)
zY`D%I2925C$QwP^I*~a;ng3M~6V!DK+29ArnxC7uh=CJCV*t*ZslU|%l2O4W*D%!m
z(wYdCveRIEGGqg2{EnpUb$rlTphfW{uXrC<%&Rgk4-7Y_Uhx5Sw2
zj}i;U`k4`wS(xO%tiM{IRn&0q6c_pYy8&Wzh3AzE=0^xgd93W~e|-SR#;V!+(kbM`
zG~kvT`wVI`vk<~$c9N5!J<~9|c%FV!*dp-8SUK<62(bKlpgo*kf0RPhLogdW_9>{W
z$PCCDgp%u?i}Bd|qSYDgZxCd8qiw49_bP0W3h@KT@VV%D5NjJ|*Ah&0PzJmZsaR0e
zyM_ue9=Wo}2K>+$0gOi$Fyx8~f7E+1))@S3vZe;aV!>kBC_hIe3BY}V3jg%V2o&JO
z1kHX56zUYwFaJ1Zl)U=nPyZxBns24IZnq{v_HaX)-*_#Mb$3}zvblk76M;!TDJ{gc
zpCQS-gJ~1iE=)$9L}q*za3aM0oD>)tj+cfApkmonXtDMeDr~(WF{m3Ij?4Zn+%lxc
z$vpMIZz96DXU6g0{sm%9K>M>YG2Q}}V+#u$20mvnDPfg-AZx6)!L$tz({LLMH7++I
zF%D^ypk5QRW4Sh<25?n)ha!MBeM+~IJ6a8}m824lGM#Iqzu`Mh$a2RQHfkN+Ub*6g
z18d1ipX7mAg=V>MRpYAcU@o*)
zsaf(&YbYnaNt0*&k{GfsCBbPKyEeByaV#0lU0Q?w>;sf^nXJ}vP4fK?iJT%W$qIEh
zAvsPpAJbkZ3o{Pn;(>eghXSy4F_#zm^{2)d$z;UEfYZS&Gd5rBkJZ8fw4`OZqB|Xi
zSv7O4BI}`|EP`PmwHnmsh;{2?rD%~kv$T_=`-NF4!@Y7?ts4{}Oh|B#Q{nFwl&e2o
z@_P780LzmCsJDc5=gmT3<5{~pZCrWbECqQ^jXo(exHg~;XIMq3$`ztaSk|+PyjUZ|
zST@lKs#}N0ObW92C<)TdDRqD*%j-pCQPSLho9PwZ}Orf=*926I!o~G~9qtU;Ijl@y|_PMk#a5wReJ9CDCC=
zSF$O=pM^lXrCxXl}bX`%rOOHEr41uSv|8~Zk~s}RD%
zki1E;tqMw6Y$aOF5@bzY#t5nKcc(dEW|nAFFIWtf)e2X8q-wZOh$W>3RGRMl1X)r+
zLh&a)4PZ5Z9!8!~kWUA`3TUMQM2}B53z*ePHPd6zGuIoMG#D2?RL^Z9hqn;5CIu|^
z(ZZZn)$z*5xM6Ef!3GJiOxPFE7{MOuT%pDz$}l#3;_>L!fR-~COv5#r09g%S&L@
zL9DX0ULiPZk-2H(0Xd}{1H8@K#^E1~T@-^l!@NbLN#o(?P4m*Nf{b6Co*>B<`}rxE
zbpDc6UKf<4l?@W%_uGLOGn$34GLy!<}kGv
zW;KF*U0>jTPl51km4B^Yj1|=V8qdS_i(If6=JXS7QMy3^&bQ$$pw_E2e=~|Ge;ZPi
z@Uk#b4FuFGimbBgsafS@r9d>1-R)Q*#&IE+p-O#+{Q)c%h1UbPfE2K#
zG)Z@>6q!NcVj9ixA3-)|cv7P(*}a}}Wz3d0n%NOEWt=I@B3D$}Zt&U^(yc3<+GeLh
zW#z)(oW!3ygjf-@xens*imSnRa_nM1^d|nYR?`VbB~2v3=FC~ZN$-n~t_IL}tq&3c
zM~3hvQfm(`up>R4C%`r_HF#t2m!t$*Z?GRvfr3JO9s7p_D?M)iu8bP!5Z;b$!YFutLuikT=;U!uslYSup2zmO)!FwB`f_WT^WBr2;L4e5IcvZ$E1sQ0{;5|z|f
zVS`laZ}|fvV}q#_H4Fo6rZB4|6hvI8GUY;mjZ_E7>_0~vsbP9CFs#Xe&YD!m7g&Jd
zIBBUA?q%N~2@N~a5idc8tR*4#w?Xx(;u?ZlyO<8O
zCPfm*)xV^OIoa{&pH3`Gn>JPJRQP`n2D4@eGU^CPX#upl)v=G>%U&Zo%TcEkC?!7^
zVm&duT&3fAB!d5>ge)i8m~Z_E
zf+Pc6ueZFHjj8GxFfGL!l}CuBtWdpi;}p_9TWa1;DFhWVR=wX;kQFX#t_${SgD}Pr
z>5dd)`J*E*_pKF#;g`vaI4w0Y=j^T&)Fpf%xXg0M=j>3iOlLB
zR+t(!=LOK>hT8zr0SC-@l&K(8XDF&68Y~E+XA7|bFuH&vU4Kpruqv|AC`|z@hM9)P
zq(C*3UyW}TU_7$zEzth#k(ofo4v}mk37BP$tMuWVum;2;a;sCmz5L=dVknsIegw>T
zMCwC@_?d{P$f}@t+u?3PmTpZwqrb)<@i2(h5a%H}m1c%MQecujx6zThP85>x&9s5u
z+Mk>fY0aTsqA@7*T`>OO{2Oiuwe
z4((J@EA`(>!8pW%T-~$XYaq!Kh!t_U?<~MbxW=k|!v#VtB~yuMHGlHokxWK$$O{26
zKA6Cm{LoLcF%Pg+0gD@Fm%QGc#?_Fg49io5K3Pujm#>L@Arw`GHr*#;Od3HhT`tVZjK@s+QUHv{+$Qrq(O5Oe$qCo+-xp$I
zt%k+HuWdsi)p@0#xj7U`Me&LdGd5FChur`z?Ybc1)EH!ZqN}v~ZXHEt7%Gxl#DJ{N
z7iK&l^{KTw3rSk5qKMQ7w*l}kS>rzzkeiX1N~?D#Y}sok#B~gR$o`Q;GOpirAYsb}
z*;?mWBaQ*Fjz+(hDUX^wph?f*?mH7=K&-$l$Gps-8gB!#ugCaLD{YBV%oPO-x*Fdk
z#290)3MV{8$Hi>%lB|KQLzM5%2ep31Y>AH8x_y8xzRdEa75ZM3mWf<-l#TE|39*96
ze1%=N3KCF@iuNR2^ja@MgLE5$d$oSmg)uc50wN4PKMcg$sF=|wfz97i6C-llSg!#a
zWQ~OXXBOkRDGi!(uB7p-xS-s`jYz&wVM|(DC1gV^-h^2?$W%xUPo|OZ{>PiKwJ{N_
zf=2n>6lT)OR?4mNt78dSkr)k}M~wF6R{>djNlwG36Q|Pww5&&REdrg!|HGtW;D%JG
z(OG)Kxe!~%a#Er63}V2KkP4>_G(zXB4CP8uS+kUtvaDu(LmE#9uj(LE1X+b37FPH@
zuZ~GmAsho#`w{m5B@+x&fF{b&-vk@~GSm*co?>CSyXM$d=^b3Vju`&Ms-R9T5irN9
z@c~SVCa)|sVwF-brsWF)i`j8$KAfA#O<}#C{4`Y7M7rEj<);X+l&VLSmg#n8fQ-NJ
zaE`7;w2I8IAOotzU7i6m+aMXK-Kd~-T`EAffm)g0TnTFI>$wOFa_NHsj4ObZmTn&4
zT*XLWqTjuf#@7XgQ~_V4kZ#K867}CH#6l5PLANSKWcP{$UF0_s
zlw^vFt)MjQCM+43Xue==rw>4q!r_mUvB}wjANW&5b!4r}XXG1D8}wBE2BE{2c7oK(J_}|9zb({m|_wY*!yM;vphMa4*JDFX6&-&6xAg}#)X81Dqrhk
z>Zr-sPOx3Py$@Kzy^dKBUjk&zQV-S&b48YNK
zQM^$Cm}7Wof$H>o5VCwUU321J;inu4X6#_Cgj8N*P(l_o!j%6?ky%fs$6*SunK^()
zXdV$L%M1}(t%Os75m^DQY09IThr6shZd
zn1YyqV0VCW$^W}LGAkt?1`%v^+kjGvQEr%9BT8d%}TS`P=O@Q1X
zQ&vWqm{iDqIG|R#y2_v=3P{qJtS4Cte72$*QHfdW779s-l1m6zq>vUU?I~Zn3o5H9
zZIVm<7k2|#OV~|MO%Pwtiy+tCSaH?y18Cd^)v#R)3aBYGzgZ8z5T&(W~L@(H8rKX^QG=Xv50m1>*qa)i^5n3JaXbG3de@uaqwVG-E7Lc%kV*%M8UHE5&II~3eHr99Z
z`{k(20)gj81X*^$lu_*vFyl+-XmZAp5GFZb!%|dG>$|6MxrJg^^Sv-*Ri->+eNO>a
z8#uN`@bc#gv6S?pp!RE12ShH|igLce5_DCH(Y;ELB@@h|Vuv9AuSRC+)7?-u_`4L*
zh`72F-fP)pZ=hrwsm;MN`w(V9MZ_jleH0-ZcT)YLEs~n&kW)de7IfmN1Iqsga(4vZ
zTAEw}rDZs#h$}iwEo~rk5?<{OPtz+h<6D;c;{+P_ELaks0+O`9ycMcJ&Pb`~h?q$D
zAwn!mNy;+Pcq1iZpP}0T$DbL?h473+ksuguSrSou2a>ICy$`5mivcPPj&=WbqBQ1M
zfpvJWy^w^tU<^y=yg-<-!qgg`fm@J993!XIyg*VKlPbtc2KKv^$D~H^U_zKt`b+?`
zDpyhnH*}W(CPgh8tL=wD)?h)Bx0^}*7262eVKQrxiJ;gyux;>%2(kV-h9_jQyf7SN
z(anPI)p-+REL?f3+3%kMLVsa1OV8{!Ev7NNUiL-VJ7WY?K?S@<77*iw85F|;j$%Y)
zg-a9=fJFk0j>E-bIRprh6;4N{H9;izBhuJr_$lk+09tbt$nSl+xMPKi(E2!)(g3xJrm+!6aL)
zcb`8^iL${ZjQ=7+Jz)T?j_HZ4QM!uOHc9D@H6j9`F~p_f
zAo;590&G<>rXcLn8^li1&fJNr^Al1ayxZ!pP8nv*1Wna%f{YC;MkL)zEq3;=F{x2S
zQbPqG;WjxxqF@2F(rPrZ!GYfZTCK=ql$x&%gzvMUARMe5k}eXgVyk_CtVBo>_?_@W
zQ#N$BsM;5$vBJ{I`a4<>gH?BnvETfgkZ~k}Qay>_|LyjVd2y!C9?D;|8Ia{w!hA$!
z6$nWtI~`wWnk*D%Oj5ov@@6nxkhMl)a2KZ8qLflQ2dQ971zB@&5=I@?2uSkOF~w*<
zR!GwSwL#Nv<*or>Rstj-oZsp-?EvQb081!7!L=0KEm})aE`f+WS4gf8Xe04R3UC=3
zE*1;0!Cz9(b?~PZHXc|}>FP}UC3}Mz51b2Y3qjI7PY28${hRBpaI-Yv)-|{gN7<3K
z>y0|8It)g1L3wK>VB8|L>H(&J7?F0dyfN;#6_K$M)-@HgR~iwMcidMRV#Um2;fvQ-
zkr)$lGfOYlzzb;X4U+4ag!`m9$k-&fS0B(O2YK))@Mj9KYC_Dc^0Uq$oa+i`%js*U
zofjd5uq;Yg|H`Nc>Gm#8lnk;0)v*6X3?1DTLmI~22D~m;RHG}Fk&UcpUQ02FDx6Yd
z*Cx!$q}5Q)&gu;`?wHnsf#KVtO9nPZ`?^y=kNZSut{5tQq5o2lHA~j+3jCN7Aq=9d
z#BWtq%$5FKOAS-n2byDI^1rS70s%7a*q7)z!W?Kh@Rq9lkvyoFh9LjxM%hDYTHIEm
z_(lHN6r%LVt7H5NDa7#B3yHq_bup;|d;*bp%Ulqv$r0g702LH48$j%*@gPl?Ml1=i
zRfpX3uqw5Ms@OG1z4(R_G;V|AfqMUmkYt$QrwE7b^8hR%J6tUFTP=)is6EH1C-3wU
zSkfW&?z*HZ1C2L)JFs5_X4VR{Rtz;1X7+V{7sWSDIBSzpWYBU{;(9{g*>uF
z8Wj|-_o2fc{T6{-{bRk43HjVkw8!L;QyVOCDvq0x!u
zrC+2FhZDNlclv+Q3^<E`Sc6^1Z5CP29B__GFO{3yQ
zI@To7jT2~eQYy+Xcz{gn5?sN{-*M2oOct$G3}>j^#PXfngIM3es{xziFFHg(gQ3n3
z=m=m{S5(jHR9T1rw;jQf*%t#PU6l;8Z@`Tg?&fg%TVkXJQGJz_>}~>;6$2SUeV+~%
zz@%GqF|DxLH!G8*V7$&NZT1vF3I7aK1dnS|1a5W9>JA}EiLmg)7GNiU*!J{%rrkBW
z$AqxW)YXvh_60GfWDtdkmj>#<7?tS_Sph9oz>HA)HNCQ0)|OC~!eJ8^2yM;_-nA$J
zYkiyO!F+MJuIsj@`6ODs8pEut)dl6XWb(SGtYmCjD8*YTfck_U_oV>iEqQj7-gSCG
z1kp;;z#{shn=TZoUh4pR8FyE*NwooG5^q@_;?^RP?rEmWO2NV?T
zS6IKP=Ly(?y$Y*}hM|B%q$?k`|9}Alw#sZjx$odZS6^}DkcQ{p-)rlpXS!~(QAK&p
zu!<3*hHtvz>8|dPcFs+3`mZ6w*Ye+XZb=8{uG*2mKj-hmoom$J-x@wme;?*t>yysi
zzOieV#dkN~_+CWr2`%3-y1jINx08n)y9^NS-_F#5{M*Ca&R#l0280
z2n(5v%e%_CcFoRpnBiReY0l-}=3K{{o!j_c=Qf(jom2t9DY<+SGaB?SJbbY%Qkd%#VfnH5$n3S(yGp`WI$&(YF%eHa%E>%
zeqt9_wrv+z*?m)2QP#zsL^-NDZ|17!ZRSRQyP2!)*WJ~e+TGQ?*~5+5yt}J^a|<{2
zm+o#jd0g_#7Vc7htNE_DJmZ=>yM}=c?c8UbT*EP2Id}9k&K-B6bH`rf+zHoqbjMG~
zxRd&IbSIW&+$jg;yOY=DyVI)j-KodsyFu&PyVG0RyZfluGj{Lb&c1UKcUJcfZm`?L
zopZ(}?%X#wa{uVw$(?uNMsCQGjogGD&i(VSja&oeZ@kU956N?1_!wW>&W$^+oon2`
zoonE0D$jG12IslPgK3wdc5Z8UoRVqpn%~WHlfTPz*KF+E{lwiJd@6tM!*BE6d2Ski
zx2$yTe5J|v{MN2v5Ovc~L|R=)^Kj?J_sVlE-P^fV$~CS(^fh^Ic4J5P8ng|I;DPTQ
z{%+*o=Fp}e@^{Nud9L-Mc5X0f&7lyOT5-w%(Ii1V4-&3$@VU$$6W3%mMf^9~+Km1lYcf4gc2kSq4T3
zccIMn&eg+HBfkwZGOp#IjBDLIz1xZKZqPL?@8}+ek7>kv
zlHc9HtD!lR-z)j|Yt+^oI=Zg>HoV)>H9p%h)JJRk4sP6;l(9D7H7o#M+R;tgKi@U=
z&vz3BIya3p<}&V%A)U!z=ey?Ce0L+*HP7a|spNGVe}6|`nme?kdyqakoAAASvj=u`
zrx53Sz6SCHM)CU`JhuE!zt}y`y~y9Q8RM_;JM}c@788Fi{p1OL7aiWw
zHIeS58P3h?1y9Y+JqteWUfSke=N4r0T`S>f^v!WMXWVO~(b~BKeWZhH1Z$d@ag
z+@<9C8uhRUUZ>JGj)kX_`Q8C*SWo|eZd_x=jlV5J9vSyCX->K~