From 888b3caf7aa597427a4ccc9e0c3ca955ec2a4697 Mon Sep 17 00:00:00 2001 From: ranchlai Date: Wed, 25 Aug 2021 14:23:30 +0800 Subject: [PATCH] Add Speaker model examples (#5337) * added speaker * update readme --- .../examples/speaker/.pre-commit-config.yaml | 36 + PaddleAudio/examples/speaker/.style.yapf | 3 + PaddleAudio/examples/speaker/README.md | 124 + .../speaker/data/speaker_set_vox12.txt | 7205 +++++++++++++++++ PaddleAudio/examples/speaker/data/stat.pd | Bin 0 -> 920 bytes PaddleAudio/examples/speaker/dataset.py | 214 + .../speaker/egs/ecapa-tdnn/config.yaml | 97 + .../examples/speaker/egs/resnet/config.yaml | 98 + PaddleAudio/examples/speaker/losses.py | 197 + PaddleAudio/examples/speaker/metrics.py | 93 + .../examples/speaker/models/__init__.py | 17 + .../examples/speaker/models/ecapa_tdnn.py | 417 + .../examples/speaker/models/resnet_blocks.py | 132 + .../examples/speaker/models/resnet_se34.py | 151 + .../examples/speaker/models/resnet_se34v2.py | 152 + PaddleAudio/examples/speaker/requirements.txt | 2 + PaddleAudio/examples/speaker/test.py | 135 + PaddleAudio/examples/speaker/train.py | 301 + PaddleAudio/examples/speaker/utils.py | 195 + 19 files changed, 9569 insertions(+) create mode 100644 PaddleAudio/examples/speaker/.pre-commit-config.yaml create mode 100644 PaddleAudio/examples/speaker/.style.yapf create mode 100644 PaddleAudio/examples/speaker/README.md create mode 100644 PaddleAudio/examples/speaker/data/speaker_set_vox12.txt create mode 100644 PaddleAudio/examples/speaker/data/stat.pd create mode 100644 PaddleAudio/examples/speaker/dataset.py create mode 100644 PaddleAudio/examples/speaker/egs/ecapa-tdnn/config.yaml create mode 100644 PaddleAudio/examples/speaker/egs/resnet/config.yaml create mode 100644 PaddleAudio/examples/speaker/losses.py create mode 100644 PaddleAudio/examples/speaker/metrics.py create mode 100644 PaddleAudio/examples/speaker/models/__init__.py create mode 100644 PaddleAudio/examples/speaker/models/ecapa_tdnn.py create mode 100644 PaddleAudio/examples/speaker/models/resnet_blocks.py create mode 100644 PaddleAudio/examples/speaker/models/resnet_se34.py create mode 100644 PaddleAudio/examples/speaker/models/resnet_se34v2.py create mode 100644 PaddleAudio/examples/speaker/requirements.txt create mode 100644 PaddleAudio/examples/speaker/test.py create mode 100644 PaddleAudio/examples/speaker/train.py create mode 100644 PaddleAudio/examples/speaker/utils.py diff --git a/PaddleAudio/examples/speaker/.pre-commit-config.yaml b/PaddleAudio/examples/speaker/.pre-commit-config.yaml new file mode 100644 index 00000000..b6a299ba --- /dev/null +++ b/PaddleAudio/examples/speaker/.pre-commit-config.yaml @@ -0,0 +1,36 @@ +repos: +- repo: https://github.com/PaddlePaddle/mirrors-yapf.git + rev: 0d79c0c469bab64f7229c9aca2b1186ef47f0e37 + hooks: + - id: yapf + files: \.py$ +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: a11d9314b22d8f8c7556443875b731ef05965464 + hooks: + - id: check-merge-conflict + - id: check-symlinks + - id: detect-private-key + files: (?!.*paddle)^.*$ + - id: end-of-file-fixer + files: \.md$ + - id: trailing-whitespace + files: \.md$ +- repo: https://github.com/Lucas-C/pre-commit-hooks + rev: v1.0.1 + hooks: + - id: forbid-crlf + files: \.md$ + - id: remove-crlf + files: \.md$ + - id: forbid-tabs + files: \.md$ + - id: remove-tabs + files: \.md$ +- repo: local + hooks: + - id: clang-format + name: clang-format + description: Format files with ClangFormat + entry: bash .clang_format.hook -i + language: system + files: \.(c|cc|cxx|cpp|cu|h|hpp|hxx|cuh|proto)$ diff --git a/PaddleAudio/examples/speaker/.style.yapf b/PaddleAudio/examples/speaker/.style.yapf new file mode 100644 index 00000000..4741fb4f --- /dev/null +++ b/PaddleAudio/examples/speaker/.style.yapf @@ -0,0 +1,3 @@ +[style] +based_on_style = pep8 +column_limit = 80 diff --git a/PaddleAudio/examples/speaker/README.md b/PaddleAudio/examples/speaker/README.md new file mode 100644 index 00000000..2b044ce6 --- /dev/null +++ b/PaddleAudio/examples/speaker/README.md @@ -0,0 +1,124 @@ +# Speaker verification using and ResnetSE ECAPA-TDNN + +## Introduction +In this example, we demonstrate how to use PaddleAudio to train two types of networks for speaker verification. +The networks supported here are +- Resnet34 with Squeeze-and-excite block \[1\] to adaptively re-weight the feature maps. +- ECAPA-TDNN \[2\] + +## Requirements +Install the requirements via +``` +# install paddleaudio +git clone https://github.com/PaddlePaddle/models.git +cd models/PaddleAudio +pip install -e . +``` +Then install additional requirements by +``` +cd examples/speaker +pip install -r requirements.txt +``` + +## Training +### Training datasets +Following from this example and this example, we use the dev split [VoxCeleb 1](https://www.robots.ox.ac.uk/~vgg/data/voxceleb/vox1.html) which consists aof `1,211` speakers and the dev split of [VoxCeleb 2](https://www.robots.ox.ac.uk/~vgg/data/voxceleb/vox2.html) consisting of `5,994` speakers for training. Thus there are `7,502` speakers totally in our training set. + +Please download the two datasets from the [official website](https://www.robots.ox.ac.uk/~vgg/data/voxceleb) and unzip all audio into a folder, e.g., `./data/voxceleb/`. Make sure there are `7502` subfolders with prefix `id1****` under the folder. You don't need to further process the data because all data processing such as adding noise / reverberation / speed perturbation will be done on-the-fly. However, to speed up audio decoding, you can manually convert the m4a file in VoxCeleb 2 to wav file format, at the expanse of using more storage. + +Finally, create a txt file that contains the list of audios for training by +``` +cd ./data/voxceleb/ +find `pwd`/ --type f > vox_files.txt +``` +### Augmentation datasets +The following datasets are required for dataset augmentation +- [Room Impulse Response and Noise Database](https://openslr.org/28/) +- [MUSAN](https://openslr.org/17/) + +For the RIR dataset, you must list all audio files under the folder `RIRS_NOISES/simulated_rirs/` into a text file, e.g., data/rir.list and config it as rir_path in the `config.yaml` file. + +Likewise, you have to config the the following fields in the config file for noise augmentation +``` yaml +muse_speech: #replace with your actual path +muse_speech_srn_high: 15.0 +muse_speech_srn_low: 12.0 +muse_music: #replace with your actual path +muse_music_srn_high: 15.0 +muse_music_srn_low: 5.0 +muse_noise: #replace with your actual path +muse_noise_srn_high: 15 +muse_noise_srn_low: 5.0 +``` + +To train your model from scratch, first create a folder(workspace) by +``` bash +cd egs +mkdir +cd +cp ../resnet/config.yaml . #Copy an example config to your workspace +``` +Then change the config file accordingly to make sure all audio files can be correctly located(including the files used for data augmentation). Also you can change the training and model hyper-parameters to suit your need. + +Finally start your training by + +``` bash +python ../../train.py -c config.yaml -d gpu:0 +``` + +## Testing +## Testing datasets +The testing split of VoxCeleb 1 is used for measuring the performance of speaker verification duration training and after the training completes. You will need to download the data and unzip into a folder, e.g, `./data/voxceleb/test/`. + +Then download the text files which list utterance pairs to compare and the true labels indicating whether the utterances come from the same speaker. There are multiple trials and we will use [veri_test2](https://www.robots.ox.ac.uk/~vgg/data/voxceleb/meta/veri_test2.txt). + +To start testing, first download the checkpoints for resnet or ecapa-tdnn, + +| checkpoint |size| eer | +| --------------- | --------------- | --------------- | +| [ResnetSE34 + SAP + CMSoftmax](https://bj.bcebos.com/paddleaudio/models/speaker/resnetse34_epoch92_eer0.00931.pdparams) |26MB | 0.93%| +| [ecapa-tdnn + AAMSoftmax ](https://bj.bcebos.com/paddleaudio/models/speaker/tdnn_amsoftmax_epoch51_eer0.011.pdparams)| 80MB |1.10%| + +Then prepare the test dataset as described in [Testing datasets](#test_dataset), and set the following path in the config file, +``` yaml +mean_std_file: ../../data/stat.pd +test_list: ../../data/veri_test2.txt +test_folder: ../../data/voxceleb1/ +``` + +To compute the eer using resnet, run: + +``` bash +cd egs/resnet/ +python ../../test.py -w -c config.yaml -d gpu:0 +``` +which will result in eer 0.00931. + +for ecapa-tdnn, run: +``` bash +cd egs/ecapa-tdnn/ +python ../../test.py -w -c config.yaml -d gpu:0 +``` +which gives you eer 0.0105. + +## Results +We compare our results with [voxceleb_trainer](https://github.com/clovaai/voxceleb_trainer). + +### Pretrained model of voxceleb_trainer +The test list is veri_test2.txt, which can be download from here [VoxCeleb1 (cleaned)](https://www.robots.ox.ac.uk/~vgg/data/voxceleb/meta/veri_test2.txt) + +| model |config|checkpoint |eval frames| eer | +| --------------- | --------------- | --------------- |--------------- |--------------- | +| ResnetSE34 + ASP + softmaxproto| - | [baseline_v2_ap](http://www.robots.ox.ac.uk/~joon/data/baseline_v2_ap.model)|400|1.06%| +| ResnetSE34 + ASP + softmaxproto| - | [baseline_v2_ap](http://www.robots.ox.ac.uk/~joon/data/baseline_v2_ap.model)|all|1.18%| + +### This example +| model |config|checkpoint |eval frames| eer | +| --------------- | --------------- | --------------- |--------------- |--------------- | +| ResnetSE34 + SAP + CMSoftmax| [config.yaml](./egs/resent/config.yaml) |[checkpoint](https://bj.bcebos.com/paddleaudio/models/speaker/resnetse34_epoch92_eer0.00931.pdparams) | all|0.93%| +| ECAPA-TDNN + AAMSoftmax | [config.yaml](./egs/ecapa-tdnn/config.yaml) | [checkpoint](https://bj.bcebos.com/paddleaudio/models/speaker/tdnn_amsoftmax_epoch51_eer0.011.pdparams) | all|1.10%| + +## References +[1] Hu J, Shen L, Sun G. Squeeze-and-excitation networks[C]//Proceedings of the IEEE conference on computer vision and pattern recognition. 2018: 7132-7141 + +[2] Desplanques B, Thienpondt J, Demuynck K. Ecapa-tdnn: Emphasized channel attention, propagation and aggregation in tdnn based speaker verification[J]. arXiv preprint arXiv:2005.07143, 2020. diff --git a/PaddleAudio/examples/speaker/data/speaker_set_vox12.txt b/PaddleAudio/examples/speaker/data/speaker_set_vox12.txt new file mode 100644 index 00000000..13446713 --- /dev/null +++ b/PaddleAudio/examples/speaker/data/speaker_set_vox12.txt @@ -0,0 +1,7205 @@ +id00012 +id00015 +id00016 +id00018 +id00019 +id00020 +id00021 +id00022 +id00024 +id00025 +id00026 +id00027 +id00028 +id00029 +id00032 +id00033 +id00035 +id00036 +id00039 +id00040 +id00042 +id00043 +id00044 +id00046 +id00047 +id00049 +id00050 +id00051 +id00052 +id00053 +id00055 +id00056 +id00058 +id00059 +id00060 +id00062 +id00063 +id00064 +id00066 +id00067 +id00068 +id00069 +id00070 +id00071 +id00073 +id00074 +id00075 +id00076 +id00078 +id00079 +id00080 +id00082 +id00083 +id00084 +id00085 +id00086 +id00087 +id00088 +id00089 +id00090 +id00091 +id00092 +id00096 +id00097 +id00098 +id00099 +id00100 +id00103 +id00104 +id00105 +id00109 +id00111 +id00114 +id00116 +id00117 +id00126 +id00127 +id00128 +id00129 +id00134 +id00137 +id00144 +id00145 +id00146 +id00149 +id00151 +id00155 +id00159 +id00161 +id00163 +id00164 +id00165 +id00166 +id00167 +id00168 +id00169 +id00170 +id00171 +id00172 +id00173 +id00175 +id00176 +id00177 +id00179 +id00180 +id00181 +id00183 +id00184 +id00185 +id00186 +id00187 +id00188 +id00189 +id00190 +id00191 +id00192 +id00195 +id00197 +id00198 +id00201 +id00202 +id00203 +id00206 +id00220 +id00221 +id00222 +id00223 +id00225 +id00231 +id00232 +id00234 +id00235 +id00236 +id00238 +id00239 +id00241 +id00242 +id00243 +id00244 +id00245 +id00246 +id00249 +id00251 +id00253 +id00254 +id00255 +id00257 +id00258 +id00259 +id00261 +id00262 +id00264 +id00265 +id00266 +id00268 +id00269 +id00270 +id00271 +id00272 +id00274 +id00275 +id00276 +id00277 +id00279 +id00281 +id00282 +id00283 +id00285 +id00286 +id00287 +id00288 +id00289 +id00290 +id00291 +id00292 +id00296 +id00297 +id00298 +id00299 +id00300 +id00301 +id00302 +id00303 +id00304 +id00305 +id00306 +id00309 +id00311 +id00312 +id00313 +id00314 +id00316 +id00317 +id00318 +id00319 +id00320 +id00322 +id00323 +id00324 +id00325 +id00328 +id00329 +id00330 +id00331 +id00332 +id00336 +id00341 +id00342 +id00343 +id00344 +id00345 +id00346 +id00347 +id00348 +id00350 +id00351 +id00352 +id00353 +id00354 +id00355 +id00356 +id00357 +id00358 +id00359 +id00361 +id00363 +id00365 +id00366 +id00368 +id00371 +id00372 +id00373 +id00374 +id00376 +id00377 +id00378 +id00379 +id00380 +id00381 +id00383 +id00385 +id00386 +id00387 +id00388 +id00389 +id00390 +id00391 +id00393 +id00395 +id00397 +id00398 +id00399 +id00401 +id00402 +id00403 +id00405 +id00406 +id00407 +id00409 +id00411 +id00412 +id00413 +id00414 +id00415 +id00416 +id00417 +id00418 +id00420 +id00421 +id00422 +id00423 +id00425 +id00426 +id00427 +id00428 +id00429 +id00430 +id00431 +id00432 +id00434 +id00435 +id00436 +id00437 +id00438 +id00444 +id00445 +id00448 +id00449 +id00450 +id00456 +id00457 +id00458 +id00459 +id00460 +id00461 +id00462 +id00463 +id00464 +id00465 +id00467 +id00468 +id00469 +id00472 +id00473 +id00475 +id00476 +id00478 +id00479 +id00481 +id00482 +id00483 +id00484 +id00486 +id00488 +id00489 +id00490 +id00491 +id00492 +id00494 +id00495 +id00496 +id00498 +id00500 +id00501 +id00502 +id00503 +id00504 +id00505 +id00506 +id00507 +id00508 +id00509 +id00510 +id00512 +id00513 +id00514 +id00515 +id00516 +id00517 +id00518 +id00519 +id00520 +id00521 +id00522 +id00524 +id00525 +id00526 +id00527 +id00528 +id00529 +id00530 +id00531 +id00534 +id00535 +id00539 +id00542 +id00543 +id00544 +id00545 +id00546 +id00548 +id00550 +id00552 +id00553 +id00554 +id00555 +id00557 +id00558 +id00559 +id00560 +id00561 +id00566 +id00567 +id00568 +id00569 +id00570 +id00571 +id00573 +id00574 +id00575 +id00577 +id00578 +id00579 +id00581 +id00582 +id00584 +id00585 +id00586 +id00587 +id00588 +id00589 +id00591 +id00592 +id00593 +id00594 +id00596 +id00597 +id00607 +id00608 +id00610 +id00611 +id00612 +id00613 +id00614 +id00615 +id00616 +id00617 +id00618 +id00620 +id00621 +id00622 +id00623 +id00624 +id00628 +id00629 +id00631 +id00632 +id00633 +id00634 +id00636 +id00637 +id00638 +id00641 +id00642 +id00643 +id00644 +id00645 +id00647 +id00648 +id00649 +id00650 +id00651 +id00652 +id00653 +id00654 +id00655 +id00656 +id00657 +id00659 +id00663 +id00664 +id00665 +id00666 +id00667 +id00668 +id00669 +id00670 +id00672 +id00673 +id00674 +id00675 +id00676 +id00677 +id00678 +id00680 +id00681 +id00685 +id00686 +id00687 +id00690 +id00691 +id00692 +id00693 +id00694 +id00695 +id00696 +id00699 +id00700 +id00701 +id00707 +id00708 +id00709 +id00710 +id00711 +id00712 +id00714 +id00715 +id00717 +id00721 +id00723 +id00724 +id00725 +id00726 +id00727 +id00729 +id00730 +id00731 +id00732 +id00735 +id00736 +id00739 +id00740 +id00741 +id00742 +id00745 +id00746 +id00747 +id00750 +id00752 +id00753 +id00754 +id00755 +id00758 +id00759 +id00760 +id00761 +id00762 +id00763 +id00765 +id00766 +id00769 +id00770 +id00773 +id00774 +id00775 +id00776 +id00777 +id00778 +id00779 +id00780 +id00781 +id00782 +id00783 +id00784 +id00785 +id00786 +id00787 +id00788 +id00792 +id00794 +id00796 +id00797 +id00799 +id00801 +id00802 +id00803 +id00804 +id00805 +id00806 +id00809 +id00810 +id00811 +id00813 +id00814 +id00815 +id00816 +id00818 +id00820 +id00823 +id00824 +id00825 +id00826 +id00827 +id00828 +id00829 +id00830 +id00831 +id00832 +id00833 +id00835 +id00841 +id00842 +id00843 +id00844 +id00846 +id00848 +id00851 +id00852 +id00857 +id00859 +id00860 +id00863 +id00865 +id00867 +id00868 +id00869 +id00875 +id00876 +id00879 +id00880 +id00881 +id00882 +id00883 +id00884 +id00885 +id00886 +id00888 +id00890 +id00891 +id00892 +id00893 +id00894 +id00895 +id00897 +id00898 +id00899 +id00901 +id00902 +id00903 +id00904 +id00905 +id00906 +id00907 +id00908 +id00909 +id00910 +id00911 +id00917 +id00918 +id00919 +id00920 +id00922 +id00923 +id00924 +id00925 +id00927 +id00928 +id00929 +id00930 +id00931 +id00933 +id00934 +id00935 +id00937 +id00938 +id00939 +id00941 +id00942 +id00943 +id00944 +id00945 +id00946 +id00947 +id00948 +id00959 +id00960 +id00963 +id00964 +id00965 +id00966 +id00967 +id00969 +id00971 +id00974 +id00975 +id00976 +id00978 +id00979 +id00980 +id00981 +id00982 +id00983 +id00984 +id00985 +id00987 +id00989 +id00990 +id00991 +id00992 +id00993 +id00994 +id00995 +id00996 +id00997 +id00999 +id01001 +id01002 +id01003 +id01004 +id01005 +id01006 +id01007 +id01008 +id01010 +id01011 +id01014 +id01015 +id01016 +id01017 +id01018 +id01021 +id01023 +id01024 +id01025 +id01026 +id01028 +id01029 +id01030 +id01031 +id01035 +id01036 +id01037 +id01038 +id01039 +id01040 +id01042 +id01043 +id01044 +id01045 +id01047 +id01048 +id01049 +id01050 +id01051 +id01052 +id01053 +id01054 +id01056 +id01057 +id01058 +id01060 +id01061 +id01062 +id01063 +id01065 +id01067 +id01068 +id01069 +id01070 +id01071 +id01073 +id01074 +id01075 +id01076 +id01078 +id01080 +id01081 +id01083 +id01084 +id01085 +id01090 +id01091 +id01092 +id01093 +id01094 +id01095 +id01096 +id01097 +id01098 +id01099 +id01100 +id01101 +id01102 +id01103 +id01104 +id01105 +id01108 +id01109 +id01110 +id01111 +id01112 +id01113 +id01115 +id01116 +id01117 +id01119 +id01120 +id01121 +id01122 +id01123 +id01124 +id01125 +id01126 +id01129 +id01130 +id01132 +id01139 +id01140 +id01141 +id01142 +id01149 +id01150 +id01154 +id01155 +id01156 +id01157 +id01158 +id01159 +id01160 +id01161 +id01162 +id01163 +id01164 +id01165 +id01166 +id01167 +id01168 +id01169 +id01170 +id01171 +id01172 +id01173 +id01174 +id01175 +id01178 +id01179 +id01182 +id01183 +id01184 +id01185 +id01187 +id01188 +id01189 +id01190 +id01191 +id01192 +id01193 +id01196 +id01199 +id01200 +id01201 +id01203 +id01204 +id01205 +id01206 +id01207 +id01209 +id01210 +id01211 +id01212 +id01213 +id01214 +id01215 +id01216 +id01217 +id01218 +id01220 +id01221 +id01222 +id01223 +id01225 +id01227 +id01229 +id01230 +id01231 +id01232 +id01234 +id01235 +id01236 +id01237 +id01238 +id01239 +id01240 +id01241 +id01243 +id01244 +id01245 +id01246 +id01248 +id01249 +id01251 +id01252 +id01253 +id01254 +id01255 +id01256 +id01257 +id01258 +id01261 +id01262 +id01263 +id01264 +id01265 +id01266 +id01268 +id01269 +id01270 +id01271 +id01272 +id01273 +id01274 +id01275 +id01276 +id01279 +id01280 +id01281 +id01284 +id01285 +id01286 +id01288 +id01289 +id01290 +id01291 +id01292 +id01293 +id01294 +id01295 +id01296 +id01300 +id01301 +id01303 +id01304 +id01305 +id01306 +id01309 +id01313 +id01314 +id01315 +id01316 +id01317 +id01323 +id01325 +id01326 +id01327 +id01328 +id01329 +id01330 +id01331 +id01332 +id01334 +id01335 +id01337 +id01338 +id01339 +id01340 +id01341 +id01342 +id01344 +id01346 +id01347 +id01348 +id01349 +id01350 +id01351 +id01352 +id01353 +id01354 +id01355 +id01356 +id01358 +id01359 +id01361 +id01362 +id01363 +id01364 +id01365 +id01367 +id01368 +id01369 +id01370 +id01371 +id01372 +id01373 +id01374 +id01376 +id01379 +id01380 +id01384 +id01385 +id01386 +id01387 +id01388 +id01389 +id01391 +id01392 +id01394 +id01395 +id01396 +id01397 +id01399 +id01402 +id01403 +id01404 +id01405 +id01406 +id01407 +id01408 +id01409 +id01410 +id01411 +id01413 +id01418 +id01420 +id01422 +id01423 +id01424 +id01425 +id01426 +id01429 +id01430 +id01431 +id01432 +id01433 +id01434 +id01436 +id01438 +id01439 +id01445 +id01447 +id01448 +id01449 +id01450 +id01451 +id01452 +id01455 +id01456 +id01457 +id01458 +id01459 +id01461 +id01464 +id01466 +id01468 +id01470 +id01471 +id01476 +id01487 +id01488 +id01489 +id01490 +id01491 +id01492 +id01493 +id01497 +id01498 +id01499 +id01500 +id01501 +id01502 +id01503 +id01504 +id01505 +id01508 +id01510 +id01511 +id01512 +id01513 +id01514 +id01515 +id01516 +id01521 +id01522 +id01523 +id01524 +id01527 +id01528 +id01529 +id01530 +id01531 +id01532 +id01533 +id01534 +id01535 +id01536 +id01539 +id01540 +id01542 +id01543 +id01544 +id01545 +id01546 +id01547 +id01548 +id01549 +id01550 +id01551 +id01552 +id01554 +id01555 +id01556 +id01557 +id01558 +id01560 +id01561 +id01564 +id01566 +id01568 +id01569 +id01570 +id01571 +id01572 +id01574 +id01578 +id01579 +id01580 +id01581 +id01582 +id01583 +id01584 +id01585 +id01589 +id01590 +id01591 +id01592 +id01594 +id01595 +id01596 +id01597 +id01598 +id01599 +id01605 +id01608 +id01609 +id01610 +id01611 +id01613 +id01614 +id01615 +id01617 +id01619 +id01620 +id01624 +id01625 +id01626 +id01627 +id01628 +id01629 +id01630 +id01633 +id01634 +id01637 +id01639 +id01640 +id01641 +id01643 +id01646 +id01648 +id01649 +id01650 +id01652 +id01653 +id01654 +id01655 +id01656 +id01657 +id01658 +id01659 +id01660 +id01661 +id01662 +id01663 +id01664 +id01666 +id01667 +id01668 +id01669 +id01671 +id01672 +id01674 +id01676 +id01677 +id01678 +id01680 +id01681 +id01682 +id01683 +id01684 +id01685 +id01686 +id01687 +id01688 +id01689 +id01690 +id01691 +id01692 +id01693 +id01694 +id01695 +id01696 +id01697 +id01699 +id01708 +id01710 +id01711 +id01713 +id01716 +id01717 +id01718 +id01719 +id01720 +id01721 +id01722 +id01723 +id01725 +id01726 +id01727 +id01728 +id01729 +id01733 +id01734 +id01735 +id01736 +id01738 +id01739 +id01741 +id01742 +id01743 +id01745 +id01746 +id01748 +id01749 +id01750 +id01751 +id01753 +id01754 +id01755 +id01756 +id01758 +id01759 +id01760 +id01761 +id01762 +id01763 +id01764 +id01765 +id01766 +id01767 +id01768 +id01769 +id01770 +id01772 +id01773 +id01774 +id01775 +id01776 +id01777 +id01778 +id01779 +id01780 +id01781 +id01782 +id01783 +id01785 +id01786 +id01787 +id01788 +id01792 +id01793 +id01794 +id01795 +id01796 +id01797 +id01798 +id01799 +id01800 +id01801 +id01802 +id01803 +id01805 +id01806 +id01808 +id01809 +id01821 +id01823 +id01826 +id01827 +id01829 +id01830 +id01832 +id01833 +id01835 +id01836 +id01837 +id01838 +id01840 +id01841 +id01842 +id01848 +id01849 +id01852 +id01853 +id01854 +id01856 +id01857 +id01858 +id01859 +id01861 +id01862 +id01864 +id01866 +id01868 +id01869 +id01870 +id01873 +id01874 +id01875 +id01876 +id01877 +id01878 +id01879 +id01880 +id01881 +id01883 +id01885 +id01886 +id01887 +id01888 +id01889 +id01890 +id01891 +id01893 +id01894 +id01895 +id01898 +id01899 +id01900 +id01904 +id01905 +id01906 +id01907 +id01908 +id01909 +id01910 +id01912 +id01913 +id01915 +id01916 +id01918 +id01919 +id01920 +id01921 +id01925 +id01927 +id01928 +id01931 +id01932 +id01933 +id01934 +id01937 +id01939 +id01941 +id01945 +id01946 +id01947 +id01948 +id01949 +id01951 +id01953 +id01955 +id01959 +id01960 +id01961 +id01962 +id01963 +id01964 +id01965 +id01966 +id01967 +id01969 +id01971 +id01972 +id01973 +id01974 +id01975 +id01976 +id01979 +id01981 +id01982 +id01985 +id01986 +id01987 +id01988 +id01990 +id01992 +id01993 +id01994 +id01995 +id01996 +id01997 +id01999 +id02000 +id02001 +id02004 +id02005 +id02006 +id02007 +id02008 +id02009 +id02010 +id02011 +id02013 +id02014 +id02015 +id02016 +id02017 +id02018 +id02020 +id02021 +id02022 +id02023 +id02024 +id02030 +id02032 +id02033 +id02034 +id02035 +id02036 +id02037 +id02038 +id02039 +id02040 +id02041 +id02043 +id02045 +id02047 +id02048 +id02049 +id02051 +id02052 +id02053 +id02055 +id02058 +id02059 +id02060 +id02061 +id02062 +id02067 +id02068 +id02070 +id02071 +id02072 +id02073 +id02074 +id02075 +id02077 +id02078 +id02079 +id02080 +id02081 +id02083 +id02084 +id02085 +id02087 +id02088 +id02089 +id02090 +id02091 +id02092 +id02093 +id02094 +id02095 +id02096 +id02097 +id02098 +id02099 +id02100 +id02101 +id02102 +id02103 +id02104 +id02105 +id02106 +id02108 +id02109 +id02110 +id02111 +id02112 +id02113 +id02114 +id02115 +id02116 +id02119 +id02120 +id02121 +id02122 +id02123 +id02124 +id02133 +id02134 +id02139 +id02140 +id02141 +id02147 +id02148 +id02149 +id02150 +id02151 +id02152 +id02153 +id02155 +id02156 +id02157 +id02158 +id02159 +id02160 +id02162 +id02163 +id02164 +id02166 +id02168 +id02172 +id02173 +id02174 +id02175 +id02176 +id02177 +id02178 +id02179 +id02180 +id02183 +id02185 +id02186 +id02187 +id02188 +id02189 +id02191 +id02193 +id02194 +id02196 +id02197 +id02201 +id02202 +id02203 +id02204 +id02205 +id02206 +id02208 +id02209 +id02210 +id02211 +id02212 +id02213 +id02214 +id02215 +id02216 +id02219 +id02220 +id02222 +id02223 +id02226 +id02228 +id02229 +id02230 +id02231 +id02233 +id02234 +id02235 +id02236 +id02239 +id02240 +id02242 +id02243 +id02245 +id02247 +id02249 +id02252 +id02253 +id02254 +id02255 +id02257 +id02258 +id02260 +id02262 +id02263 +id02264 +id02266 +id02267 +id02268 +id02270 +id02271 +id02273 +id02275 +id02276 +id02281 +id02282 +id02287 +id02288 +id02289 +id02291 +id02292 +id02293 +id02294 +id02295 +id02296 +id02297 +id02298 +id02299 +id02300 +id02301 +id02302 +id02303 +id02304 +id02306 +id02310 +id02311 +id02312 +id02314 +id02315 +id02316 +id02318 +id02322 +id02323 +id02325 +id02327 +id02329 +id02331 +id02332 +id02338 +id02339 +id02340 +id02341 +id02342 +id02343 +id02344 +id02346 +id02348 +id02349 +id02350 +id02351 +id02353 +id02355 +id02356 +id02357 +id02358 +id02359 +id02361 +id02362 +id02363 +id02364 +id02365 +id02367 +id02368 +id02381 +id02384 +id02388 +id02389 +id02390 +id02391 +id02392 +id02395 +id02409 +id02410 +id02411 +id02413 +id02414 +id02416 +id02418 +id02420 +id02421 +id02426 +id02432 +id02436 +id02437 +id02438 +id02439 +id02440 +id02441 +id02444 +id02446 +id02448 +id02449 +id02450 +id02452 +id02453 +id02455 +id02457 +id02458 +id02459 +id02460 +id02461 +id02462 +id02463 +id02464 +id02466 +id02467 +id02469 +id02471 +id02472 +id02473 +id02475 +id02477 +id02479 +id02482 +id02484 +id02485 +id02486 +id02488 +id02489 +id02490 +id02493 +id02494 +id02495 +id02496 +id02497 +id02499 +id02501 +id02502 +id02505 +id02508 +id02509 +id02511 +id02513 +id02515 +id02530 +id02531 +id02532 +id02534 +id02535 +id02537 +id02538 +id02539 +id02540 +id02544 +id02545 +id02546 +id02547 +id02553 +id02554 +id02555 +id02558 +id02561 +id02562 +id02563 +id02565 +id02566 +id02567 +id02568 +id02569 +id02570 +id02572 +id02573 +id02574 +id02575 +id02578 +id02579 +id02580 +id02583 +id02584 +id02585 +id02586 +id02587 +id02590 +id02591 +id02592 +id02593 +id02596 +id02597 +id02598 +id02599 +id02607 +id02608 +id02609 +id02613 +id02615 +id02617 +id02619 +id02622 +id02623 +id02625 +id02627 +id02628 +id02629 +id02631 +id02633 +id02634 +id02636 +id02638 +id02639 +id02640 +id02641 +id02642 +id02643 +id02647 +id02650 +id02653 +id02654 +id02655 +id02656 +id02658 +id02659 +id02660 +id02661 +id02662 +id02665 +id02666 +id02667 +id02668 +id02669 +id02672 +id02673 +id02680 +id02683 +id02684 +id02687 +id02693 +id02695 +id02696 +id02698 +id02700 +id02702 +id02703 +id02705 +id02706 +id02707 +id02710 +id02711 +id02713 +id02714 +id02715 +id02719 +id02720 +id02721 +id02722 +id02724 +id02728 +id02729 +id02730 +id02731 +id02733 +id02734 +id02735 +id02736 +id02738 +id02739 +id02740 +id02746 +id02747 +id02751 +id02759 +id02760 +id02761 +id02764 +id02765 +id02766 +id02780 +id02781 +id02783 +id02784 +id02786 +id02787 +id02790 +id02791 +id02793 +id02795 +id02805 +id02806 +id02807 +id02808 +id02809 +id02810 +id02812 +id02815 +id02816 +id02817 +id02818 +id02820 +id02821 +id02823 +id02824 +id02825 +id02827 +id02829 +id02830 +id02832 +id02833 +id02836 +id02837 +id02838 +id02839 +id02840 +id02844 +id02845 +id02846 +id02847 +id02848 +id02851 +id02853 +id02854 +id02857 +id02858 +id02863 +id02864 +id02865 +id02866 +id02869 +id02871 +id02872 +id02873 +id02874 +id02875 +id02876 +id02877 +id02878 +id02879 +id02880 +id02881 +id02882 +id02883 +id02884 +id02886 +id02887 +id02888 +id02889 +id02890 +id02891 +id02892 +id02893 +id02896 +id02897 +id02898 +id02899 +id02900 +id02901 +id02902 +id02903 +id02904 +id02905 +id02908 +id02909 +id02911 +id02912 +id02914 +id02916 +id02917 +id02918 +id02920 +id02924 +id02926 +id02928 +id02935 +id02936 +id02939 +id02940 +id02942 +id02943 +id02944 +id02945 +id02946 +id02948 +id02950 +id02952 +id02953 +id02954 +id02955 +id02956 +id02957 +id02958 +id02959 +id02961 +id02963 +id02964 +id02965 +id02978 +id02979 +id02980 +id02981 +id02982 +id02983 +id02984 +id02986 +id02987 +id02988 +id02989 +id02991 +id02993 +id02995 +id02996 +id02998 +id02999 +id03000 +id03001 +id03002 +id03003 +id03004 +id03005 +id03006 +id03007 +id03009 +id03013 +id03014 +id03015 +id03016 +id03017 +id03018 +id03020 +id03026 +id03027 +id03028 +id03029 +id03031 +id03037 +id03038 +id03039 +id03040 +id03042 +id03043 +id03056 +id03057 +id03059 +id03060 +id03064 +id03066 +id03067 +id03070 +id03071 +id03073 +id03074 +id03075 +id03077 +id03078 +id03079 +id03082 +id03084 +id03087 +id03088 +id03089 +id03090 +id03091 +id03092 +id03098 +id03099 +id03100 +id03101 +id03102 +id03103 +id03107 +id03109 +id03110 +id03111 +id03112 +id03115 +id03118 +id03119 +id03123 +id03124 +id03125 +id03129 +id03133 +id03134 +id03136 +id03137 +id03145 +id03146 +id03147 +id03148 +id03149 +id03150 +id03158 +id03168 +id03174 +id03175 +id03176 +id03177 +id03179 +id03180 +id03182 +id03183 +id03184 +id03187 +id03188 +id03189 +id03190 +id03192 +id03193 +id03194 +id03195 +id03196 +id03198 +id03199 +id03200 +id03201 +id03202 +id03203 +id03205 +id03207 +id03209 +id03210 +id03211 +id03212 +id03213 +id03214 +id03215 +id03216 +id03220 +id03221 +id03223 +id03226 +id03227 +id03228 +id03229 +id03230 +id03232 +id03236 +id03238 +id03240 +id03241 +id03242 +id03243 +id03244 +id03245 +id03246 +id03247 +id03248 +id03249 +id03250 +id03253 +id03258 +id03260 +id03261 +id03262 +id03263 +id03264 +id03265 +id03266 +id03267 +id03269 +id03272 +id03273 +id03274 +id03279 +id03281 +id03283 +id03284 +id03285 +id03286 +id03287 +id03288 +id03289 +id03290 +id03291 +id03292 +id03293 +id03295 +id03298 +id03299 +id03300 +id03303 +id03304 +id03306 +id03307 +id03312 +id03313 +id03314 +id03315 +id03316 +id03317 +id03318 +id03319 +id03320 +id03323 +id03324 +id03325 +id03326 +id03327 +id03328 +id03329 +id03331 +id03332 +id03333 +id03334 +id03335 +id03336 +id03337 +id03338 +id03339 +id03340 +id03341 +id03342 +id03343 +id03344 +id03345 +id03346 +id03348 +id03349 +id03350 +id03351 +id03352 +id03353 +id03354 +id03360 +id03361 +id03363 +id03364 +id03366 +id03371 +id03372 +id03373 +id03374 +id03375 +id03377 +id03379 +id03381 +id03383 +id03384 +id03386 +id03387 +id03388 +id03389 +id03390 +id03398 +id03399 +id03400 +id03401 +id03402 +id03405 +id03409 +id03410 +id03412 +id03413 +id03414 +id03417 +id03422 +id03424 +id03425 +id03426 +id03427 +id03428 +id03429 +id03430 +id03432 +id03433 +id03434 +id03435 +id03436 +id03439 +id03443 +id03444 +id03445 +id03446 +id03448 +id03449 +id03452 +id03453 +id03454 +id03455 +id03457 +id03458 +id03459 +id03460 +id03461 +id03462 +id03463 +id03467 +id03471 +id03477 +id03479 +id03480 +id03483 +id03485 +id03486 +id03487 +id03488 +id03489 +id03493 +id03495 +id03497 +id03499 +id03501 +id03502 +id03503 +id03506 +id03508 +id03509 +id03511 +id03512 +id03514 +id03515 +id03517 +id03518 +id03519 +id03520 +id03521 +id03522 +id03523 +id03525 +id03526 +id03527 +id03528 +id03529 +id03530 +id03531 +id03532 +id03534 +id03535 +id03537 +id03538 +id03540 +id03543 +id03546 +id03548 +id03550 +id03556 +id03557 +id03559 +id03560 +id03569 +id03570 +id03572 +id03573 +id03575 +id03577 +id03578 +id03580 +id03581 +id03582 +id03584 +id03587 +id03588 +id03589 +id03592 +id03597 +id03599 +id03605 +id03606 +id03609 +id03613 +id03614 +id03615 +id03617 +id03618 +id03619 +id03620 +id03621 +id03622 +id03623 +id03624 +id03625 +id03628 +id03631 +id03632 +id03633 +id03634 +id03639 +id03640 +id03643 +id03644 +id03645 +id03647 +id03649 +id03650 +id03651 +id03652 +id03654 +id03656 +id03658 +id03659 +id03661 +id03663 +id03664 +id03665 +id03667 +id03668 +id03669 +id03670 +id03671 +id03672 +id03673 +id03675 +id03678 +id03679 +id03680 +id03681 +id03682 +id03683 +id03684 +id03685 +id03686 +id03687 +id03688 +id03689 +id03690 +id03692 +id03693 +id03694 +id03696 +id03699 +id03701 +id03702 +id03703 +id03704 +id03705 +id03706 +id03707 +id03709 +id03710 +id03711 +id03713 +id03714 +id03715 +id03716 +id03717 +id03719 +id03720 +id03721 +id03722 +id03723 +id03724 +id03725 +id03727 +id03728 +id03729 +id03730 +id03731 +id03732 +id03733 +id03735 +id03736 +id03740 +id03741 +id03743 +id03744 +id03745 +id03746 +id03747 +id03748 +id03749 +id03750 +id03751 +id03752 +id03753 +id03754 +id03755 +id03756 +id03757 +id03758 +id03759 +id03760 +id03761 +id03762 +id03763 +id03764 +id03765 +id03767 +id03768 +id03769 +id03770 +id03772 +id03773 +id03774 +id03775 +id03777 +id03778 +id03779 +id03780 +id03781 +id03782 +id03784 +id03785 +id03786 +id03788 +id03790 +id03791 +id03795 +id03796 +id03797 +id03800 +id03803 +id03804 +id03810 +id03811 +id03812 +id03813 +id03815 +id03816 +id03817 +id03818 +id03819 +id03820 +id03821 +id03822 +id03823 +id03824 +id03825 +id03827 +id03828 +id03829 +id03830 +id03837 +id03838 +id03840 +id03841 +id03842 +id03843 +id03844 +id03845 +id03846 +id03847 +id03848 +id03852 +id03853 +id03854 +id03855 +id03856 +id03858 +id03859 +id03860 +id03861 +id03863 +id03864 +id03865 +id03866 +id03867 +id03868 +id03869 +id03870 +id03871 +id03873 +id03876 +id03877 +id03878 +id03884 +id03885 +id03887 +id03888 +id03889 +id03890 +id03891 +id03892 +id03893 +id03894 +id03895 +id03896 +id03897 +id03898 +id03899 +id03900 +id03902 +id03903 +id03904 +id03905 +id03906 +id03907 +id03909 +id03910 +id03912 +id03913 +id03914 +id03915 +id03916 +id03917 +id03918 +id03919 +id03920 +id03921 +id03922 +id03923 +id03924 +id03927 +id03928 +id03929 +id03930 +id03931 +id03932 +id03934 +id03935 +id03936 +id03938 +id03939 +id03940 +id03941 +id03942 +id03943 +id03945 +id03946 +id03947 +id03949 +id03950 +id03951 +id03952 +id03953 +id03954 +id03955 +id03956 +id03958 +id03961 +id03962 +id03963 +id03965 +id03966 +id03967 +id03968 +id03970 +id03971 +id03972 +id03974 +id03976 +id03977 +id03983 +id03984 +id03985 +id03986 +id03987 +id03989 +id03991 +id03992 +id03993 +id03994 +id03996 +id03998 +id03999 +id04005 +id04007 +id04009 +id04010 +id04012 +id04013 +id04014 +id04015 +id04016 +id04017 +id04018 +id04019 +id04021 +id04022 +id04024 +id04025 +id04026 +id04027 +id04028 +id04029 +id04031 +id04032 +id04033 +id04034 +id04035 +id04036 +id04037 +id04038 +id04039 +id04042 +id04044 +id04045 +id04046 +id04047 +id04048 +id04049 +id04050 +id04051 +id04052 +id04053 +id04054 +id04055 +id04057 +id04058 +id04062 +id04063 +id04064 +id04065 +id04066 +id04070 +id04073 +id04074 +id04076 +id04077 +id04079 +id04080 +id04081 +id04082 +id04084 +id04085 +id04086 +id04089 +id04091 +id04092 +id04093 +id04095 +id04096 +id04097 +id04098 +id04100 +id04102 +id04103 +id04105 +id04106 +id04107 +id04108 +id04109 +id04111 +id04114 +id04115 +id04116 +id04117 +id04118 +id04121 +id04122 +id04125 +id04126 +id04127 +id04128 +id04129 +id04130 +id04132 +id04133 +id04134 +id04136 +id04137 +id04140 +id04144 +id04145 +id04146 +id04147 +id04148 +id04149 +id04150 +id04151 +id04152 +id04153 +id04154 +id04155 +id04157 +id04159 +id04160 +id04161 +id04162 +id04163 +id04164 +id04165 +id04166 +id04167 +id04168 +id04169 +id04171 +id04173 +id04174 +id04175 +id04177 +id04178 +id04179 +id04180 +id04181 +id04182 +id04183 +id04184 +id04186 +id04187 +id04188 +id04189 +id04190 +id04191 +id04192 +id04193 +id04194 +id04195 +id04196 +id04197 +id04198 +id04199 +id04202 +id04203 +id04204 +id04205 +id04206 +id04207 +id04215 +id04216 +id04219 +id04221 +id04222 +id04223 +id04224 +id04225 +id04227 +id04228 +id04229 +id04230 +id04231 +id04234 +id04235 +id04236 +id04237 +id04238 +id04239 +id04240 +id04243 +id04244 +id04245 +id04246 +id04247 +id04249 +id04250 +id04251 +id04254 +id04255 +id04256 +id04257 +id04258 +id04259 +id04260 +id04261 +id04262 +id04263 +id04264 +id04265 +id04266 +id04267 +id04268 +id04269 +id04270 +id04271 +id04272 +id04273 +id04274 +id04275 +id04278 +id04279 +id04280 +id04282 +id04284 +id04285 +id04287 +id04288 +id04289 +id04292 +id04293 +id04294 +id04296 +id04297 +id04298 +id04300 +id04301 +id04302 +id04303 +id04304 +id04305 +id04309 +id04311 +id04312 +id04313 +id04314 +id04316 +id04318 +id04319 +id04320 +id04321 +id04322 +id04325 +id04326 +id04332 +id04333 +id04334 +id04335 +id04336 +id04340 +id04341 +id04342 +id04343 +id04344 +id04345 +id04346 +id04349 +id04350 +id04352 +id04353 +id04355 +id04356 +id04357 +id04358 +id04365 +id04367 +id04368 +id04370 +id04372 +id04373 +id04374 +id04375 +id04376 +id04377 +id04378 +id04379 +id04380 +id04383 +id04386 +id04387 +id04388 +id04392 +id04393 +id04396 +id04397 +id04398 +id04399 +id04402 +id04404 +id04405 +id04406 +id04407 +id04408 +id04409 +id04413 +id04414 +id04415 +id04416 +id04417 +id04418 +id04419 +id04420 +id04421 +id04422 +id04423 +id04424 +id04425 +id04427 +id04428 +id04429 +id04430 +id04432 +id04433 +id04434 +id04436 +id04437 +id04441 +id04442 +id04443 +id04444 +id04445 +id04446 +id04447 +id04449 +id04450 +id04451 +id04452 +id04453 +id04455 +id04456 +id04458 +id04459 +id04460 +id04462 +id04463 +id04474 +id04475 +id04476 +id04477 +id04479 +id04481 +id04482 +id04483 +id04484 +id04485 +id04486 +id04487 +id04488 +id04489 +id04490 +id04491 +id04493 +id04494 +id04495 +id04496 +id04497 +id04498 +id04499 +id04501 +id04502 +id04503 +id04504 +id04505 +id04506 +id04508 +id04509 +id04510 +id04511 +id04512 +id04513 +id04514 +id04516 +id04517 +id04518 +id04519 +id04520 +id04522 +id04523 +id04524 +id04525 +id04526 +id04527 +id04528 +id04529 +id04530 +id04531 +id04534 +id04537 +id04538 +id04540 +id04542 +id04543 +id04545 +id04547 +id04549 +id04551 +id04552 +id04554 +id04555 +id04556 +id04557 +id04559 +id04560 +id04561 +id04562 +id04563 +id04564 +id04565 +id04566 +id04567 +id04569 +id04571 +id04573 +id04576 +id04578 +id04579 +id04580 +id04582 +id04583 +id04584 +id04585 +id04586 +id04588 +id04589 +id04590 +id04593 +id04597 +id04599 +id04600 +id04601 +id04602 +id04606 +id04608 +id04609 +id04610 +id04613 +id04614 +id04615 +id04616 +id04617 +id04618 +id04619 +id04621 +id04623 +id04624 +id04625 +id04626 +id04628 +id04629 +id04635 +id04637 +id04638 +id04639 +id04640 +id04641 +id04642 +id04643 +id04644 +id04645 +id04646 +id04647 +id04648 +id04649 +id04650 +id04651 +id04652 +id04653 +id04654 +id04655 +id04658 +id04660 +id04662 +id04665 +id04666 +id04667 +id04668 +id04675 +id04676 +id04677 +id04684 +id04686 +id04687 +id04689 +id04690 +id04691 +id04692 +id04693 +id04694 +id04695 +id04696 +id04697 +id04698 +id04699 +id04701 +id04703 +id04704 +id04705 +id04706 +id04708 +id04709 +id04710 +id04712 +id04713 +id04714 +id04715 +id04716 +id04717 +id04718 +id04720 +id04721 +id04723 +id04725 +id04726 +id04727 +id04728 +id04730 +id04731 +id04733 +id04734 +id04736 +id04737 +id04738 +id04739 +id04740 +id04741 +id04742 +id04743 +id04744 +id04745 +id04746 +id04747 +id04748 +id04750 +id04751 +id04752 +id04753 +id04754 +id04755 +id04756 +id04757 +id04758 +id04759 +id04761 +id04762 +id04763 +id04770 +id04774 +id04776 +id04778 +id04780 +id04781 +id04782 +id04783 +id04784 +id04787 +id04788 +id04789 +id04800 +id04801 +id04802 +id04803 +id04814 +id04818 +id04820 +id04821 +id04822 +id04823 +id04825 +id04826 +id04827 +id04830 +id04834 +id04835 +id04836 +id04837 +id04838 +id04839 +id04841 +id04842 +id04844 +id04845 +id04846 +id04847 +id04848 +id04851 +id04852 +id04854 +id04855 +id04856 +id04857 +id04858 +id04863 +id04866 +id04869 +id04872 +id04873 +id04875 +id04876 +id04877 +id04884 +id04885 +id04886 +id04887 +id04888 +id04889 +id04890 +id04893 +id04894 +id04895 +id04896 +id04897 +id04898 +id04899 +id04900 +id04901 +id04902 +id04903 +id04904 +id04905 +id04906 +id04907 +id04908 +id04909 +id04910 +id04911 +id04914 +id04915 +id04917 +id04918 +id04926 +id04927 +id04928 +id04930 +id04933 +id04934 +id04935 +id04938 +id04939 +id04940 +id04941 +id04943 +id04945 +id04946 +id04948 +id04951 +id04953 +id04956 +id04957 +id04958 +id04959 +id04961 +id04962 +id04963 +id04964 +id04967 +id04968 +id04969 +id04970 +id04971 +id04972 +id04973 +id04974 +id04976 +id04977 +id04978 +id04979 +id04980 +id04981 +id04982 +id04983 +id04984 +id04985 +id04986 +id04987 +id04988 +id04990 +id04991 +id04993 +id04994 +id04995 +id04996 +id04997 +id04998 +id04999 +id05000 +id05003 +id05004 +id05008 +id05009 +id05010 +id05011 +id05012 +id05013 +id05014 +id05016 +id05018 +id05019 +id05020 +id05021 +id05022 +id05023 +id05025 +id05027 +id05028 +id05029 +id05030 +id05031 +id05032 +id05034 +id05035 +id05038 +id05039 +id05040 +id05041 +id05042 +id05044 +id05048 +id05049 +id05052 +id05054 +id05056 +id05057 +id05058 +id05060 +id05061 +id05062 +id05063 +id05066 +id05067 +id05075 +id05076 +id05079 +id05082 +id05087 +id05088 +id05090 +id05091 +id05092 +id05093 +id05094 +id05095 +id05096 +id05099 +id05100 +id05101 +id05102 +id05103 +id05104 +id05105 +id05106 +id05107 +id05108 +id05121 +id05123 +id05125 +id05129 +id05130 +id05131 +id05132 +id05133 +id05134 +id05135 +id05136 +id05137 +id05139 +id05141 +id05145 +id05147 +id05148 +id05149 +id05150 +id05152 +id05153 +id05154 +id05159 +id05160 +id05161 +id05162 +id05165 +id05166 +id05167 +id05168 +id05169 +id05170 +id05171 +id05172 +id05173 +id05174 +id05178 +id05179 +id05181 +id05182 +id05186 +id05187 +id05188 +id05189 +id05190 +id05191 +id05194 +id05196 +id05197 +id05198 +id05199 +id05200 +id05201 +id05203 +id05204 +id05205 +id05206 +id05207 +id05208 +id05209 +id05210 +id05211 +id05212 +id05213 +id05218 +id05220 +id05221 +id05223 +id05224 +id05226 +id05227 +id05228 +id05229 +id05231 +id05235 +id05236 +id05237 +id05239 +id05240 +id05241 +id05242 +id05244 +id05245 +id05246 +id05248 +id05249 +id05250 +id05251 +id05252 +id05253 +id05255 +id05256 +id05257 +id05258 +id05260 +id05261 +id05262 +id05263 +id05264 +id05265 +id05266 +id05268 +id05269 +id05270 +id05271 +id05272 +id05273 +id05274 +id05275 +id05276 +id05277 +id05278 +id05280 +id05282 +id05283 +id05284 +id05286 +id05287 +id05288 +id05289 +id05290 +id05291 +id05292 +id05293 +id05295 +id05296 +id05297 +id05300 +id05301 +id05303 +id05306 +id05308 +id05310 +id05312 +id05313 +id05314 +id05315 +id05316 +id05317 +id05319 +id05321 +id05323 +id05324 +id05328 +id05332 +id05333 +id05334 +id05335 +id05339 +id05341 +id05342 +id05343 +id05344 +id05345 +id05346 +id05349 +id05350 +id05351 +id05352 +id05353 +id05354 +id05357 +id05358 +id05369 +id05370 +id05371 +id05372 +id05373 +id05375 +id05376 +id05378 +id05379 +id05380 +id05381 +id05383 +id05384 +id05387 +id05388 +id05392 +id05398 +id05399 +id05401 +id05402 +id05403 +id05404 +id05405 +id05408 +id05421 +id05422 +id05423 +id05424 +id05426 +id05427 +id05428 +id05429 +id05430 +id05431 +id05432 +id05433 +id05434 +id05435 +id05438 +id05441 +id05442 +id05443 +id05444 +id05445 +id05448 +id05449 +id05450 +id05451 +id05453 +id05454 +id05456 +id05457 +id05460 +id05465 +id05466 +id05467 +id05468 +id05469 +id05470 +id05471 +id05472 +id05474 +id05476 +id05477 +id05478 +id05479 +id05480 +id05481 +id05483 +id05486 +id05487 +id05490 +id05491 +id05492 +id05493 +id05494 +id05496 +id05502 +id05503 +id05504 +id05505 +id05506 +id05507 +id05508 +id05509 +id05510 +id05512 +id05513 +id05515 +id05516 +id05517 +id05518 +id05519 +id05520 +id05521 +id05522 +id05524 +id05526 +id05527 +id05528 +id05529 +id05534 +id05535 +id05536 +id05538 +id05541 +id05545 +id05546 +id05547 +id05549 +id05551 +id05552 +id05553 +id05554 +id05555 +id05556 +id05557 +id05558 +id05559 +id05561 +id05562 +id05563 +id05564 +id05567 +id05568 +id05570 +id05573 +id05574 +id05575 +id05576 +id05577 +id05579 +id05581 +id05583 +id05584 +id05585 +id05586 +id05588 +id05591 +id05607 +id05609 +id05610 +id05612 +id05613 +id05614 +id05615 +id05616 +id05619 +id05620 +id05621 +id05622 +id05623 +id05624 +id05625 +id05627 +id05628 +id05631 +id05632 +id05633 +id05634 +id05639 +id05640 +id05641 +id05642 +id05643 +id05644 +id05645 +id05647 +id05648 +id05649 +id05657 +id05658 +id05660 +id05663 +id05664 +id05665 +id05666 +id05667 +id05668 +id05670 +id05671 +id05672 +id05677 +id05678 +id05681 +id05684 +id05685 +id05686 +id05687 +id05688 +id05689 +id05690 +id05691 +id05692 +id05693 +id05694 +id05695 +id05696 +id05697 +id05698 +id05699 +id05700 +id05703 +id05704 +id05706 +id05707 +id05708 +id05710 +id05715 +id05716 +id05718 +id05719 +id05721 +id05722 +id05723 +id05724 +id05725 +id05726 +id05727 +id05728 +id05729 +id05731 +id05733 +id05734 +id05735 +id05737 +id05738 +id05739 +id05740 +id05741 +id05742 +id05743 +id05745 +id05746 +id05747 +id05748 +id05749 +id05750 +id05751 +id05752 +id05754 +id05756 +id05757 +id05758 +id05759 +id05760 +id05762 +id05764 +id05765 +id05766 +id05767 +id05768 +id05769 +id05770 +id05771 +id05772 +id05774 +id05776 +id05777 +id05779 +id05783 +id05784 +id05788 +id05792 +id05793 +id05796 +id05797 +id05798 +id05799 +id05801 +id05804 +id05805 +id05806 +id05808 +id05810 +id05811 +id05812 +id05813 +id05814 +id05815 +id05827 +id05828 +id05829 +id05830 +id05831 +id05832 +id05833 +id05834 +id05835 +id05836 +id05837 +id05838 +id05841 +id05843 +id05844 +id05845 +id05847 +id05849 +id05851 +id05852 +id05853 +id05856 +id05858 +id05860 +id05861 +id05864 +id05868 +id05869 +id05870 +id05871 +id05873 +id05874 +id05875 +id05876 +id05877 +id05878 +id05879 +id05880 +id05881 +id05882 +id05887 +id05890 +id05891 +id05892 +id05893 +id05894 +id05895 +id05904 +id05905 +id05906 +id05907 +id05908 +id05913 +id05914 +id05915 +id05916 +id05917 +id05919 +id05920 +id05921 +id05922 +id05923 +id05926 +id05927 +id05930 +id05931 +id05932 +id05934 +id05935 +id05936 +id05938 +id05939 +id05940 +id05942 +id05943 +id05944 +id05945 +id05946 +id05951 +id05952 +id05953 +id05954 +id05956 +id05957 +id05958 +id05959 +id05960 +id05961 +id05962 +id05963 +id05964 +id05966 +id05969 +id05970 +id05976 +id05977 +id05978 +id05980 +id05981 +id05982 +id05983 +id05984 +id05985 +id05986 +id05987 +id05988 +id05989 +id05990 +id05991 +id05993 +id05994 +id05996 +id05997 +id05998 +id06000 +id06002 +id06003 +id06004 +id06005 +id06006 +id06007 +id06008 +id06010 +id06011 +id06012 +id06013 +id06014 +id06015 +id06016 +id06017 +id06018 +id06019 +id06020 +id06021 +id06022 +id06023 +id06024 +id06025 +id06026 +id06027 +id06028 +id06029 +id06038 +id06040 +id06042 +id06044 +id06045 +id06046 +id06049 +id06050 +id06051 +id06052 +id06054 +id06055 +id06056 +id06058 +id06059 +id06060 +id06061 +id06062 +id06064 +id06065 +id06066 +id06067 +id06068 +id06070 +id06071 +id06073 +id06081 +id06084 +id06086 +id06087 +id06092 +id06095 +id06096 +id06097 +id06103 +id06105 +id06106 +id06107 +id06108 +id06109 +id06110 +id06111 +id06112 +id06113 +id06114 +id06115 +id06116 +id06117 +id06118 +id06119 +id06120 +id06121 +id06122 +id06124 +id06128 +id06130 +id06131 +id06133 +id06134 +id06135 +id06138 +id06139 +id06140 +id06142 +id06143 +id06145 +id06146 +id06148 +id06152 +id06154 +id06155 +id06156 +id06158 +id06159 +id06161 +id06162 +id06163 +id06164 +id06165 +id06166 +id06168 +id06172 +id06173 +id06174 +id06176 +id06178 +id06183 +id06184 +id06185 +id06188 +id06189 +id06191 +id06194 +id06195 +id06196 +id06197 +id06199 +id06200 +id06202 +id06203 +id06206 +id06207 +id06210 +id06211 +id06212 +id06213 +id06214 +id06215 +id06216 +id06217 +id06218 +id06220 +id06221 +id06222 +id06225 +id06226 +id06227 +id06228 +id06229 +id06231 +id06232 +id06233 +id06234 +id06235 +id06236 +id06237 +id06238 +id06239 +id06242 +id06244 +id06245 +id06246 +id06247 +id06248 +id06249 +id06250 +id06251 +id06252 +id06254 +id06255 +id06256 +id06258 +id06261 +id06262 +id06263 +id06264 +id06265 +id06267 +id06268 +id06269 +id06270 +id06274 +id06275 +id06276 +id06277 +id06278 +id06279 +id06280 +id06288 +id06289 +id06292 +id06293 +id06295 +id06297 +id06298 +id06299 +id06301 +id06302 +id06303 +id06304 +id06307 +id06311 +id06319 +id06320 +id06321 +id06322 +id06323 +id06324 +id06328 +id06331 +id06332 +id06333 +id06334 +id06335 +id06336 +id06338 +id06339 +id06340 +id06341 +id06342 +id06343 +id06344 +id06346 +id06347 +id06349 +id06350 +id06351 +id06353 +id06354 +id06355 +id06356 +id06357 +id06358 +id06360 +id06361 +id06362 +id06364 +id06368 +id06370 +id06371 +id06375 +id06376 +id06377 +id06378 +id06379 +id06380 +id06381 +id06382 +id06383 +id06384 +id06385 +id06386 +id06388 +id06389 +id06390 +id06392 +id06393 +id06394 +id06395 +id06396 +id06397 +id06398 +id06399 +id06400 +id06403 +id06404 +id06405 +id06406 +id06408 +id06409 +id06411 +id06412 +id06413 +id06414 +id06415 +id06416 +id06417 +id06418 +id06422 +id06423 +id06424 +id06425 +id06426 +id06427 +id06428 +id06429 +id06430 +id06431 +id06432 +id06434 +id06435 +id06436 +id06437 +id06438 +id06439 +id06440 +id06443 +id06445 +id06446 +id06447 +id06448 +id06449 +id06450 +id06453 +id06454 +id06456 +id06457 +id06459 +id06462 +id06465 +id06467 +id06468 +id06469 +id06470 +id06471 +id06472 +id06474 +id06476 +id06477 +id06478 +id06479 +id06480 +id06481 +id06482 +id06483 +id06485 +id06486 +id06487 +id06489 +id06490 +id06491 +id06493 +id06494 +id06495 +id06496 +id06497 +id06498 +id06499 +id06500 +id06502 +id06503 +id06504 +id06505 +id06506 +id06507 +id06509 +id06511 +id06516 +id06517 +id06518 +id06519 +id06520 +id06524 +id06526 +id06528 +id06529 +id06530 +id06531 +id06535 +id06536 +id06537 +id06538 +id06539 +id06541 +id06544 +id06547 +id06548 +id06550 +id06551 +id06552 +id06553 +id06554 +id06555 +id06556 +id06557 +id06559 +id06560 +id06574 +id06575 +id06576 +id06577 +id06578 +id06581 +id06582 +id06583 +id06584 +id06586 +id06588 +id06589 +id06591 +id06593 +id06594 +id06595 +id06596 +id06597 +id06598 +id06599 +id06607 +id06608 +id06609 +id06612 +id06613 +id06614 +id06616 +id06617 +id06619 +id06621 +id06622 +id06624 +id06625 +id06631 +id06633 +id06637 +id06640 +id06642 +id06643 +id06645 +id06648 +id06649 +id06650 +id06651 +id06661 +id06662 +id06663 +id06672 +id06673 +id06674 +id06676 +id06683 +id06684 +id06685 +id06686 +id06687 +id06688 +id06689 +id06691 +id06694 +id06695 +id06696 +id06697 +id06698 +id06701 +id06702 +id06703 +id06704 +id06706 +id06708 +id06709 +id06713 +id06714 +id06715 +id06716 +id06717 +id06721 +id06722 +id06723 +id06724 +id06725 +id06726 +id06730 +id06732 +id06741 +id06743 +id06744 +id06745 +id06747 +id06748 +id06749 +id06750 +id06752 +id06753 +id06754 +id06757 +id06758 +id06759 +id06760 +id06761 +id06763 +id06765 +id06767 +id06768 +id06771 +id06772 +id06775 +id06776 +id06779 +id06782 +id06784 +id06791 +id06792 +id06793 +id06794 +id06795 +id06797 +id06798 +id06799 +id06807 +id06809 +id06810 +id06812 +id06813 +id06815 +id06817 +id06818 +id06819 +id06820 +id06821 +id06822 +id06823 +id06828 +id06829 +id06830 +id06833 +id06835 +id06836 +id06837 +id06838 +id06839 +id06840 +id06841 +id06842 +id06843 +id06844 +id06845 +id06846 +id06847 +id06848 +id06849 +id06850 +id06851 +id06852 +id06853 +id06854 +id06855 +id06856 +id06857 +id06858 +id06860 +id06861 +id06862 +id06863 +id06864 +id06866 +id06867 +id06869 +id06870 +id06874 +id06875 +id06876 +id06878 +id06879 +id06881 +id06883 +id06887 +id06888 +id06889 +id06890 +id06892 +id06893 +id06896 +id06900 +id06905 +id06906 +id06907 +id06908 +id06909 +id06910 +id06911 +id06912 +id06914 +id06915 +id06916 +id06918 +id06920 +id06922 +id06926 +id06927 +id06928 +id06929 +id06930 +id06931 +id06932 +id06934 +id06935 +id06936 +id06937 +id06938 +id06939 +id06940 +id06941 +id06942 +id06944 +id06945 +id06946 +id06948 +id06949 +id06950 +id06951 +id06952 +id06953 +id06954 +id06957 +id06958 +id06960 +id06961 +id06962 +id06963 +id06964 +id06966 +id06967 +id06968 +id06969 +id06970 +id06971 +id06972 +id06973 +id06974 +id06976 +id06982 +id06983 +id06984 +id06985 +id06986 +id06987 +id06988 +id06989 +id06990 +id06991 +id06994 +id06995 +id06996 +id06997 +id06998 +id06999 +id07001 +id07002 +id07003 +id07004 +id07005 +id07007 +id07008 +id07009 +id07010 +id07011 +id07012 +id07015 +id07017 +id07019 +id07020 +id07022 +id07026 +id07031 +id07032 +id07034 +id07036 +id07037 +id07039 +id07042 +id07043 +id07044 +id07045 +id07046 +id07048 +id07049 +id07050 +id07051 +id07052 +id07053 +id07054 +id07055 +id07056 +id07057 +id07058 +id07059 +id07060 +id07062 +id07063 +id07064 +id07065 +id07066 +id07071 +id07072 +id07074 +id07076 +id07077 +id07078 +id07080 +id07084 +id07085 +id07086 +id07087 +id07088 +id07089 +id07092 +id07093 +id07094 +id07095 +id07097 +id07098 +id07099 +id07100 +id07102 +id07103 +id07104 +id07105 +id07107 +id07108 +id07109 +id07112 +id07113 +id07114 +id07115 +id07116 +id07117 +id07118 +id07120 +id07121 +id07122 +id07123 +id07124 +id07128 +id07130 +id07131 +id07132 +id07133 +id07134 +id07135 +id07136 +id07137 +id07140 +id07145 +id07146 +id07148 +id07151 +id07153 +id07154 +id07155 +id07158 +id07160 +id07161 +id07162 +id07163 +id07164 +id07165 +id07166 +id07167 +id07168 +id07169 +id07170 +id07171 +id07173 +id07175 +id07176 +id07177 +id07178 +id07179 +id07180 +id07181 +id07182 +id07183 +id07185 +id07186 +id07187 +id07188 +id07189 +id07191 +id07192 +id07194 +id07195 +id07196 +id07197 +id07198 +id07199 +id07200 +id07202 +id07204 +id07205 +id07206 +id07207 +id07208 +id07209 +id07210 +id07212 +id07213 +id07214 +id07215 +id07217 +id07218 +id07219 +id07220 +id07221 +id07223 +id07227 +id07228 +id07229 +id07230 +id07232 +id07233 +id07234 +id07235 +id07236 +id07238 +id07240 +id07241 +id07242 +id07243 +id07244 +id07246 +id07247 +id07250 +id07251 +id07253 +id07254 +id07255 +id07256 +id07258 +id07259 +id07262 +id07263 +id07264 +id07265 +id07268 +id07269 +id07272 +id07273 +id07275 +id07276 +id07277 +id07278 +id07279 +id07283 +id07284 +id07285 +id07288 +id07290 +id07292 +id07293 +id07294 +id07295 +id07296 +id07297 +id07299 +id07303 +id07305 +id07306 +id07308 +id07311 +id07314 +id07316 +id07320 +id07321 +id07322 +id07332 +id07333 +id07334 +id07335 +id07337 +id07338 +id07341 +id07342 +id07343 +id07344 +id07345 +id07346 +id07349 +id07350 +id07351 +id07352 +id07353 +id07355 +id07356 +id07357 +id07358 +id07359 +id07360 +id07361 +id07362 +id07363 +id07366 +id07367 +id07368 +id07369 +id07370 +id07371 +id07374 +id07375 +id07376 +id07377 +id07383 +id07384 +id07385 +id07388 +id07389 +id07392 +id07393 +id07394 +id07395 +id07397 +id07398 +id07399 +id07400 +id07401 +id07402 +id07403 +id07404 +id07405 +id07407 +id07408 +id07409 +id07411 +id07412 +id07413 +id07415 +id07416 +id07417 +id07418 +id07419 +id07421 +id07422 +id07423 +id07427 +id07428 +id07429 +id07430 +id07431 +id07432 +id07433 +id07434 +id07437 +id07438 +id07439 +id07441 +id07442 +id07445 +id07446 +id07447 +id07448 +id07452 +id07453 +id07454 +id07455 +id07456 +id07457 +id07459 +id07460 +id07461 +id07462 +id07463 +id07465 +id07466 +id07468 +id07469 +id07470 +id07471 +id07473 +id07474 +id07477 +id07478 +id07483 +id07484 +id07489 +id07491 +id07492 +id07493 +id07495 +id07496 +id07497 +id07498 +id07500 +id07502 +id07506 +id07507 +id07508 +id07510 +id07511 +id07512 +id07517 +id07519 +id07520 +id07522 +id07524 +id07525 +id07526 +id07527 +id07528 +id07529 +id07531 +id07532 +id07533 +id07534 +id07535 +id07536 +id07537 +id07538 +id07539 +id07540 +id07541 +id07543 +id07545 +id07546 +id07547 +id07548 +id07549 +id07550 +id07551 +id07552 +id07553 +id07554 +id07555 +id07556 +id07558 +id07560 +id07561 +id07562 +id07564 +id07565 +id07566 +id07567 +id07568 +id07569 +id07570 +id07571 +id07572 +id07574 +id07575 +id07576 +id07577 +id07578 +id07579 +id07580 +id07581 +id07582 +id07583 +id07585 +id07586 +id07587 +id07588 +id07589 +id07590 +id07591 +id07592 +id07593 +id07598 +id07599 +id07600 +id07611 +id07612 +id07613 +id07614 +id07615 +id07616 +id07617 +id07618 +id07622 +id07623 +id07624 +id07625 +id07626 +id07627 +id07628 +id07630 +id07631 +id07635 +id07636 +id07637 +id07638 +id07639 +id07640 +id07641 +id07642 +id07643 +id07644 +id07646 +id07647 +id07648 +id07649 +id07652 +id07656 +id07658 +id07660 +id07661 +id07662 +id07664 +id07666 +id07669 +id07670 +id07671 +id07672 +id07673 +id07675 +id07678 +id07679 +id07680 +id07681 +id07682 +id07683 +id07685 +id07686 +id07688 +id07689 +id07691 +id07692 +id07693 +id07694 +id07695 +id07696 +id07697 +id07700 +id07701 +id07702 +id07703 +id07704 +id07705 +id07706 +id07707 +id07708 +id07711 +id07713 +id07714 +id07715 +id07716 +id07717 +id07719 +id07720 +id07721 +id07723 +id07724 +id07725 +id07726 +id07727 +id07728 +id07730 +id07732 +id07733 +id07734 +id07735 +id07736 +id07737 +id07738 +id07739 +id07740 +id07741 +id07742 +id07744 +id07745 +id07747 +id07748 +id07749 +id07750 +id07751 +id07752 +id07753 +id07755 +id07756 +id07758 +id07759 +id07760 +id07761 +id07764 +id07768 +id07769 +id07770 +id07771 +id07773 +id07774 +id07775 +id07776 +id07780 +id07783 +id07785 +id07786 +id07798 +id07799 +id07800 +id07801 +id07803 +id07805 +id07807 +id07809 +id07811 +id07812 +id07814 +id07816 +id07818 +id07819 +id07820 +id07824 +id07825 +id07826 +id07827 +id07828 +id07829 +id07831 +id07832 +id07833 +id07834 +id07835 +id07837 +id07838 +id07839 +id07840 +id07841 +id07844 +id07845 +id07846 +id07847 +id07848 +id07849 +id07850 +id07851 +id07852 +id07853 +id07856 +id07857 +id07858 +id07860 +id07861 +id07862 +id07863 +id07864 +id07866 +id07869 +id07870 +id07875 +id07876 +id07882 +id07883 +id07884 +id07885 +id07886 +id07887 +id07894 +id07895 +id07898 +id07901 +id07902 +id07910 +id07911 +id07912 +id07914 +id07916 +id07917 +id07926 +id07927 +id07928 +id07941 +id07943 +id07944 +id07947 +id07948 +id07949 +id07950 +id07951 +id07953 +id07954 +id07955 +id07956 +id07957 +id07958 +id07959 +id07960 +id07962 +id07963 +id07964 +id07967 +id07968 +id07970 +id07971 +id07972 +id07973 +id07974 +id07975 +id07976 +id07977 +id07978 +id07979 +id07980 +id07981 +id07982 +id07983 +id07984 +id07985 +id07987 +id07988 +id07989 +id07990 +id07991 +id07992 +id07993 +id07995 +id07996 +id07997 +id07998 +id07999 +id08000 +id08001 +id08002 +id08003 +id08012 +id08013 +id08014 +id08015 +id08016 +id08018 +id08019 +id08020 +id08022 +id08023 +id08024 +id08025 +id08026 +id08027 +id08028 +id08029 +id08030 +id08037 +id08039 +id08040 +id08041 +id08042 +id08043 +id08044 +id08046 +id08047 +id08048 +id08049 +id08052 +id08053 +id08054 +id08064 +id08065 +id08066 +id08067 +id08068 +id08071 +id08073 +id08074 +id08078 +id08079 +id08080 +id08081 +id08086 +id08088 +id08089 +id08093 +id08094 +id08095 +id08096 +id08097 +id08098 +id08099 +id08106 +id08108 +id08110 +id08113 +id08114 +id08115 +id08119 +id08120 +id08121 +id08122 +id08123 +id08124 +id08125 +id08126 +id08127 +id08128 +id08129 +id08130 +id08132 +id08134 +id08137 +id08138 +id08139 +id08142 +id08143 +id08144 +id08145 +id08146 +id08147 +id08148 +id08154 +id08155 +id08158 +id08159 +id08160 +id08162 +id08163 +id08165 +id08166 +id08167 +id08169 +id08170 +id08171 +id08172 +id08173 +id08175 +id08176 +id08178 +id08179 +id08180 +id08183 +id08184 +id08185 +id08186 +id08187 +id08188 +id08190 +id08191 +id08192 +id08193 +id08194 +id08195 +id08198 +id08200 +id08201 +id08202 +id08203 +id08204 +id08206 +id08209 +id08210 +id08211 +id08212 +id08213 +id08215 +id08217 +id08218 +id08219 +id08220 +id08221 +id08222 +id08230 +id08231 +id08232 +id08234 +id08235 +id08236 +id08238 +id08239 +id08240 +id08241 +id08242 +id08243 +id08244 +id08245 +id08246 +id08247 +id08249 +id08250 +id08251 +id08253 +id08258 +id08259 +id08260 +id08261 +id08263 +id08264 +id08266 +id08267 +id08271 +id08273 +id08274 +id08277 +id08278 +id08279 +id08280 +id08281 +id08282 +id08283 +id08284 +id08285 +id08286 +id08287 +id08288 +id08289 +id08290 +id08291 +id08292 +id08293 +id08294 +id08295 +id08297 +id08298 +id08299 +id08300 +id08302 +id08303 +id08305 +id08306 +id08307 +id08309 +id08311 +id08312 +id08313 +id08314 +id08315 +id08317 +id08318 +id08320 +id08321 +id08323 +id08324 +id08327 +id08329 +id08330 +id08331 +id08333 +id08334 +id08335 +id08337 +id08338 +id08339 +id08340 +id08343 +id08344 +id08347 +id08348 +id08349 +id08350 +id08351 +id08352 +id08353 +id08354 +id08355 +id08356 +id08358 +id08360 +id08362 +id08364 +id08365 +id08368 +id08370 +id08372 +id08373 +id08375 +id08376 +id08377 +id08380 +id08381 +id08382 +id08394 +id08395 +id08396 +id08397 +id08398 +id08399 +id08400 +id08402 +id08403 +id08404 +id08405 +id08406 +id08408 +id08411 +id08414 +id08415 +id08416 +id08417 +id08418 +id08419 +id08420 +id08421 +id08422 +id08423 +id08424 +id08426 +id08432 +id08433 +id08434 +id08437 +id08438 +id08440 +id08441 +id08442 +id08443 +id08445 +id08446 +id08451 +id08453 +id08454 +id08455 +id08457 +id08458 +id08459 +id08462 +id08463 +id08464 +id08465 +id08467 +id08468 +id08470 +id08471 +id08472 +id08474 +id08475 +id08476 +id08477 +id08478 +id08480 +id08481 +id08482 +id08483 +id08489 +id08490 +id08492 +id08494 +id08496 +id08497 +id08498 +id08499 +id08500 +id08501 +id08502 +id08510 +id08511 +id08512 +id08513 +id08515 +id08516 +id08517 +id08518 +id08520 +id08521 +id08522 +id08523 +id08524 +id08525 +id08527 +id08528 +id08529 +id08530 +id08531 +id08532 +id08533 +id08534 +id08535 +id08536 +id08538 +id08540 +id08543 +id08544 +id08545 +id08546 +id08547 +id08549 +id08551 +id08553 +id08554 +id08555 +id08556 +id08563 +id08564 +id08565 +id08566 +id08567 +id08568 +id08569 +id08570 +id08572 +id08573 +id08574 +id08577 +id08578 +id08580 +id08581 +id08583 +id08584 +id08585 +id08586 +id08587 +id08588 +id08589 +id08590 +id08592 +id08593 +id08595 +id08597 +id08598 +id08599 +id08600 +id08601 +id08602 +id08603 +id08604 +id08605 +id08606 +id08607 +id08608 +id08609 +id08610 +id08611 +id08612 +id08613 +id08614 +id08615 +id08616 +id08617 +id08618 +id08622 +id08623 +id08624 +id08625 +id08626 +id08627 +id08628 +id08629 +id08633 +id08634 +id08635 +id08637 +id08638 +id08639 +id08640 +id08641 +id08642 +id08643 +id08645 +id08646 +id08647 +id08648 +id08649 +id08650 +id08651 +id08652 +id08653 +id08655 +id08656 +id08657 +id08658 +id08659 +id08660 +id08661 +id08663 +id08665 +id08666 +id08667 +id08668 +id08669 +id08670 +id08671 +id08672 +id08674 +id08676 +id08677 +id08678 +id08679 +id08680 +id08681 +id08683 +id08684 +id08685 +id08686 +id08687 +id08688 +id08689 +id08691 +id08692 +id08693 +id08694 +id08695 +id08698 +id08699 +id08700 +id08702 +id08703 +id08704 +id08705 +id08706 +id08707 +id08708 +id08709 +id08710 +id08711 +id08713 +id08714 +id08715 +id08716 +id08719 +id08721 +id08722 +id08723 +id08724 +id08725 +id08726 +id08727 +id08728 +id08730 +id08731 +id08732 +id08733 +id08736 +id08737 +id08738 +id08739 +id08740 +id08741 +id08742 +id08743 +id08745 +id08746 +id08747 +id08748 +id08749 +id08750 +id08751 +id08754 +id08755 +id08756 +id08757 +id08758 +id08760 +id08761 +id08762 +id08764 +id08765 +id08766 +id08768 +id08769 +id08771 +id08773 +id08774 +id08775 +id08776 +id08781 +id08782 +id08783 +id08784 +id08785 +id08786 +id08788 +id08789 +id08790 +id08792 +id08794 +id08795 +id08797 +id08799 +id08800 +id08801 +id08802 +id08803 +id08804 +id08805 +id08806 +id08808 +id08809 +id08810 +id08811 +id08819 +id08820 +id08821 +id08822 +id08823 +id08826 +id08830 +id08831 +id08832 +id08834 +id08837 +id08839 +id08840 +id08850 +id08853 +id08860 +id08862 +id08863 +id08864 +id08866 +id08868 +id08870 +id08871 +id08872 +id08873 +id08875 +id08876 +id08877 +id08878 +id08879 +id08881 +id08882 +id08883 +id08884 +id08885 +id08887 +id08890 +id08891 +id08892 +id08896 +id08898 +id08900 +id08901 +id08902 +id08903 +id08904 +id08906 +id08907 +id08909 +id08910 +id08912 +id08913 +id08915 +id08916 +id08917 +id08919 +id08920 +id08923 +id08925 +id08926 +id08927 +id08928 +id08929 +id08930 +id08931 +id08933 +id08936 +id08937 +id08938 +id08940 +id08941 +id08942 +id08944 +id08945 +id08950 +id08953 +id08954 +id08955 +id08967 +id08968 +id08969 +id08971 +id08972 +id08973 +id08974 +id08975 +id08976 +id08977 +id08980 +id08981 +id08982 +id08983 +id08984 +id08992 +id08993 +id08994 +id08996 +id08997 +id08998 +id08999 +id09000 +id09003 +id09004 +id09005 +id09006 +id09007 +id09008 +id09009 +id09010 +id09012 +id09013 +id09015 +id09016 +id09018 +id09020 +id09021 +id09022 +id09023 +id09024 +id09025 +id09027 +id09028 +id09029 +id09030 +id09031 +id09032 +id09033 +id09034 +id09035 +id09036 +id09038 +id09039 +id09040 +id09044 +id09046 +id09047 +id09048 +id09049 +id09051 +id09052 +id09053 +id09054 +id09055 +id09056 +id09057 +id09059 +id09061 +id09062 +id09063 +id09064 +id09065 +id09066 +id09067 +id09068 +id09069 +id09070 +id09071 +id09074 +id09075 +id09076 +id09078 +id09079 +id09081 +id09082 +id09083 +id09087 +id09088 +id09091 +id09092 +id09094 +id09095 +id09098 +id09099 +id09100 +id09108 +id09109 +id09110 +id09111 +id09112 +id09116 +id09119 +id09121 +id09123 +id09124 +id09125 +id09138 +id09139 +id09143 +id09144 +id09145 +id09146 +id09147 +id09149 +id09152 +id09153 +id09154 +id09155 +id09156 +id09159 +id09160 +id09161 +id09162 +id09163 +id09164 +id09171 +id09172 +id09174 +id09175 +id09179 +id09181 +id09185 +id09186 +id09187 +id09188 +id09189 +id09190 +id09192 +id09193 +id09194 +id09197 +id09198 +id09199 +id09200 +id09201 +id09202 +id09203 +id09204 +id09205 +id09207 +id09210 +id09211 +id09212 +id09213 +id09214 +id09215 +id09216 +id09217 +id09218 +id09220 +id09222 +id09224 +id09225 +id09226 +id09227 +id09229 +id09230 +id09231 +id09232 +id09234 +id09235 +id09236 +id09237 +id09238 +id09240 +id09242 +id09243 +id09245 +id09252 +id09255 +id09262 +id09263 +id09268 +id09269 +id09271 +id09272 +id10001 +id10002 +id10003 +id10004 +id10005 +id10006 +id10007 +id10008 +id10009 +id10010 +id10011 +id10012 +id10013 +id10014 +id10015 +id10016 +id10017 +id10018 +id10019 +id10020 +id10021 +id10022 +id10023 +id10024 +id10025 +id10026 +id10027 +id10028 +id10029 +id10030 +id10031 +id10032 +id10033 +id10034 +id10035 +id10036 +id10037 +id10038 +id10039 +id10040 +id10041 +id10042 +id10043 +id10044 +id10045 +id10046 +id10047 +id10048 +id10049 +id10050 +id10051 +id10052 +id10053 +id10054 +id10055 +id10056 +id10057 +id10058 +id10059 +id10060 +id10061 +id10062 +id10063 +id10064 +id10065 +id10066 +id10067 +id10068 +id10069 +id10070 +id10071 +id10072 +id10073 +id10074 +id10075 +id10076 +id10077 +id10078 +id10079 +id10080 +id10081 +id10082 +id10083 +id10084 +id10085 +id10086 +id10087 +id10088 +id10089 +id10090 +id10091 +id10092 +id10093 +id10094 +id10095 +id10096 +id10097 +id10098 +id10099 +id10100 +id10101 +id10102 +id10103 +id10104 +id10105 +id10106 +id10107 +id10108 +id10109 +id10110 +id10111 +id10112 +id10113 +id10114 +id10115 +id10116 +id10117 +id10118 +id10119 +id10120 +id10121 +id10122 +id10123 +id10124 +id10125 +id10126 +id10127 +id10128 +id10129 +id10130 +id10131 +id10132 +id10133 +id10134 +id10135 +id10136 +id10137 +id10138 +id10139 +id10140 +id10141 +id10142 +id10143 +id10144 +id10145 +id10146 +id10147 +id10148 +id10149 +id10150 +id10151 +id10152 +id10153 +id10154 +id10155 +id10156 +id10157 +id10158 +id10159 +id10160 +id10161 +id10162 +id10163 +id10164 +id10165 +id10166 +id10167 +id10168 +id10169 +id10170 +id10171 +id10172 +id10173 +id10174 +id10175 +id10176 +id10177 +id10178 +id10179 +id10180 +id10181 +id10182 +id10183 +id10184 +id10185 +id10186 +id10187 +id10188 +id10189 +id10190 +id10191 +id10192 +id10193 +id10194 +id10195 +id10196 +id10197 +id10198 +id10199 +id10200 +id10201 +id10202 +id10203 +id10204 +id10205 +id10206 +id10207 +id10208 +id10209 +id10210 +id10211 +id10212 +id10213 +id10214 +id10215 +id10216 +id10217 +id10218 +id10219 +id10220 +id10221 +id10222 +id10223 +id10224 +id10225 +id10226 +id10227 +id10228 +id10229 +id10230 +id10231 +id10232 +id10233 +id10234 +id10235 +id10236 +id10237 +id10238 +id10239 +id10240 +id10241 +id10242 +id10243 +id10244 +id10245 +id10246 +id10247 +id10248 +id10249 +id10250 +id10251 +id10252 +id10253 +id10254 +id10255 +id10256 +id10257 +id10258 +id10259 +id10260 +id10261 +id10262 +id10263 +id10264 +id10265 +id10266 +id10267 +id10268 +id10269 +id10310 +id10311 +id10312 +id10313 +id10314 +id10315 +id10316 +id10317 +id10318 +id10319 +id10320 +id10321 +id10322 +id10323 +id10324 +id10325 +id10326 +id10327 +id10328 +id10329 +id10330 +id10331 +id10332 +id10333 +id10334 +id10335 +id10336 +id10337 +id10338 +id10339 +id10340 +id10341 +id10342 +id10343 +id10344 +id10345 +id10346 +id10347 +id10348 +id10349 +id10350 +id10351 +id10352 +id10353 +id10354 +id10355 +id10356 +id10357 +id10358 +id10359 +id10360 +id10361 +id10362 +id10363 +id10364 +id10365 +id10366 +id10367 +id10368 +id10369 +id10370 +id10371 +id10372 +id10373 +id10374 +id10375 +id10376 +id10377 +id10378 +id10379 +id10380 +id10381 +id10382 +id10383 +id10384 +id10385 +id10386 +id10387 +id10388 +id10389 +id10390 +id10391 +id10392 +id10393 +id10394 +id10395 +id10396 +id10397 +id10398 +id10399 +id10400 +id10401 +id10402 +id10403 +id10404 +id10405 +id10406 +id10407 +id10408 +id10409 +id10410 +id10411 +id10412 +id10413 +id10414 +id10415 +id10416 +id10417 +id10418 +id10419 +id10420 +id10421 +id10422 +id10423 +id10424 +id10425 +id10426 +id10427 +id10428 +id10429 +id10430 +id10431 +id10432 +id10433 +id10434 +id10435 +id10436 +id10437 +id10438 +id10439 +id10440 +id10441 +id10442 +id10443 +id10444 +id10445 +id10446 +id10447 +id10448 +id10449 +id10450 +id10451 +id10452 +id10453 +id10454 +id10455 +id10456 +id10457 +id10458 +id10459 +id10460 +id10461 +id10462 +id10463 +id10464 +id10465 +id10466 +id10467 +id10468 +id10469 +id10470 +id10471 +id10472 +id10473 +id10474 +id10475 +id10476 +id10477 +id10478 +id10479 +id10480 +id10481 +id10482 +id10483 +id10484 +id10485 +id10486 +id10487 +id10488 +id10489 +id10490 +id10491 +id10492 +id10493 +id10494 +id10495 +id10496 +id10497 +id10498 +id10499 +id10500 +id10501 +id10502 +id10503 +id10504 +id10505 +id10506 +id10507 +id10508 +id10509 +id10510 +id10511 +id10512 +id10513 +id10514 +id10515 +id10516 +id10517 +id10518 +id10519 +id10520 +id10521 +id10522 +id10523 +id10524 +id10525 +id10526 +id10527 +id10528 +id10529 +id10530 +id10531 +id10532 +id10533 +id10534 +id10535 +id10536 +id10537 +id10538 +id10539 +id10540 +id10541 +id10542 +id10543 +id10544 +id10545 +id10546 +id10547 +id10548 +id10549 +id10550 +id10551 +id10552 +id10553 +id10554 +id10555 +id10556 +id10557 +id10558 +id10559 +id10560 +id10561 +id10562 +id10563 +id10564 +id10565 +id10566 +id10567 +id10568 +id10569 +id10570 +id10571 +id10572 +id10573 +id10574 +id10575 +id10576 +id10577 +id10578 +id10579 +id10580 +id10581 +id10582 +id10583 +id10584 +id10585 +id10586 +id10587 +id10588 +id10589 +id10590 +id10591 +id10592 +id10593 +id10594 +id10595 +id10596 +id10597 +id10598 +id10599 +id10600 +id10601 +id10602 +id10603 +id10604 +id10605 +id10606 +id10607 +id10608 +id10609 +id10610 +id10611 +id10612 +id10613 +id10614 +id10615 +id10616 +id10617 +id10618 +id10619 +id10620 +id10621 +id10622 +id10623 +id10624 +id10625 +id10626 +id10627 +id10628 +id10629 +id10630 +id10631 +id10632 +id10633 +id10634 +id10635 +id10636 +id10637 +id10638 +id10639 +id10640 +id10641 +id10642 +id10643 +id10644 +id10645 +id10646 +id10647 +id10648 +id10649 +id10650 +id10651 +id10652 +id10653 +id10654 +id10655 +id10656 +id10657 +id10658 +id10659 +id10660 +id10661 +id10662 +id10663 +id10664 +id10665 +id10666 +id10667 +id10668 +id10669 +id10670 +id10671 +id10672 +id10673 +id10674 +id10675 +id10676 +id10677 +id10678 +id10679 +id10680 +id10681 +id10682 +id10683 +id10684 +id10685 +id10686 +id10687 +id10688 +id10689 +id10690 +id10691 +id10692 +id10693 +id10694 +id10695 +id10696 +id10697 +id10698 +id10699 +id10700 +id10701 +id10702 +id10703 +id10704 +id10705 +id10706 +id10707 +id10708 +id10709 +id10710 +id10711 +id10712 +id10713 +id10714 +id10715 +id10716 +id10717 +id10718 +id10719 +id10720 +id10721 +id10722 +id10723 +id10724 +id10725 +id10726 +id10727 +id10728 +id10729 +id10730 +id10731 +id10732 +id10733 +id10734 +id10735 +id10736 +id10737 +id10738 +id10739 +id10740 +id10741 +id10742 +id10743 +id10744 +id10745 +id10746 +id10747 +id10748 +id10749 +id10750 +id10751 +id10752 +id10753 +id10754 +id10755 +id10756 +id10757 +id10758 +id10759 +id10760 +id10761 +id10762 +id10763 +id10764 +id10765 +id10766 +id10767 +id10768 +id10769 +id10770 +id10771 +id10772 +id10773 +id10774 +id10775 +id10776 +id10777 +id10778 +id10779 +id10780 +id10781 +id10782 +id10783 +id10784 +id10785 +id10786 +id10787 +id10788 +id10789 +id10790 +id10791 +id10792 +id10793 +id10794 +id10795 +id10796 +id10797 +id10798 +id10799 +id10800 +id10801 +id10802 +id10803 +id10804 +id10805 +id10806 +id10807 +id10808 +id10809 +id10810 +id10811 +id10812 +id10813 +id10814 +id10815 +id10816 +id10817 +id10818 +id10819 +id10820 +id10821 +id10822 +id10823 +id10824 +id10825 +id10826 +id10827 +id10828 +id10829 +id10830 +id10831 +id10832 +id10833 +id10834 +id10835 +id10836 +id10837 +id10838 +id10839 +id10840 +id10841 +id10842 +id10843 +id10844 +id10845 +id10846 +id10847 +id10848 +id10849 +id10850 +id10851 +id10852 +id10853 +id10854 +id10855 +id10856 +id10857 +id10858 +id10859 +id10860 +id10861 +id10862 +id10863 +id10864 +id10865 +id10866 +id10867 +id10868 +id10869 +id10870 +id10871 +id10872 +id10873 +id10874 +id10875 +id10876 +id10877 +id10878 +id10879 +id10880 +id10881 +id10882 +id10883 +id10884 +id10885 +id10886 +id10887 +id10888 +id10889 +id10890 +id10891 +id10892 +id10893 +id10894 +id10895 +id10896 +id10897 +id10898 +id10899 +id10900 +id10901 +id10902 +id10903 +id10904 +id10905 +id10906 +id10907 +id10908 +id10909 +id10910 +id10911 +id10912 +id10913 +id10914 +id10915 +id10916 +id10917 +id10918 +id10919 +id10920 +id10921 +id10922 +id10923 +id10924 +id10925 +id10926 +id10927 +id10928 +id10929 +id10930 +id10931 +id10932 +id10933 +id10934 +id10935 +id10936 +id10937 +id10938 +id10939 +id10940 +id10941 +id10942 +id10943 +id10944 +id10945 +id10946 +id10947 +id10948 +id10949 +id10950 +id10951 +id10952 +id10953 +id10954 +id10955 +id10956 +id10957 +id10958 +id10959 +id10960 +id10961 +id10962 +id10963 +id10964 +id10965 +id10966 +id10967 +id10968 +id10969 +id10970 +id10971 +id10972 +id10973 +id10974 +id10975 +id10976 +id10977 +id10978 +id10979 +id10980 +id10981 +id10982 +id10983 +id10984 +id10985 +id10986 +id10987 +id10988 +id10989 +id10990 +id10991 +id10992 +id10993 +id10994 +id10995 +id10996 +id10997 +id10998 +id10999 +id11000 +id11001 +id11002 +id11003 +id11004 +id11005 +id11006 +id11007 +id11008 +id11009 +id11010 +id11011 +id11012 +id11013 +id11014 +id11015 +id11016 +id11017 +id11018 +id11019 +id11020 +id11021 +id11022 +id11023 +id11024 +id11025 +id11026 +id11027 +id11028 +id11029 +id11030 +id11031 +id11032 +id11033 +id11034 +id11035 +id11036 +id11037 +id11038 +id11039 +id11040 +id11041 +id11042 +id11043 +id11044 +id11045 +id11046 +id11047 +id11048 +id11049 +id11050 +id11051 +id11052 +id11053 +id11054 +id11055 +id11056 +id11057 +id11058 +id11059 +id11060 +id11061 +id11062 +id11063 +id11064 +id11065 +id11066 +id11067 +id11068 +id11069 +id11070 +id11071 +id11072 +id11073 +id11074 +id11075 +id11076 +id11077 +id11078 +id11079 +id11080 +id11081 +id11082 +id11083 +id11084 +id11085 +id11086 +id11087 +id11088 +id11089 +id11090 +id11091 +id11092 +id11093 +id11094 +id11095 +id11096 +id11097 +id11098 +id11099 +id11100 +id11101 +id11102 +id11103 +id11104 +id11105 +id11106 +id11107 +id11108 +id11109 +id11110 +id11111 +id11112 +id11113 +id11114 +id11115 +id11116 +id11117 +id11118 +id11119 +id11120 +id11121 +id11122 +id11123 +id11124 +id11125 +id11126 +id11127 +id11128 +id11129 +id11130 +id11131 +id11132 +id11133 +id11134 +id11135 +id11136 +id11137 +id11138 +id11139 +id11140 +id11141 +id11142 +id11143 +id11144 +id11145 +id11146 +id11147 +id11148 +id11149 +id11150 +id11151 +id11152 +id11153 +id11154 +id11155 +id11156 +id11157 +id11158 +id11159 +id11160 +id11161 +id11162 +id11163 +id11164 +id11165 +id11166 +id11167 +id11168 +id11169 +id11170 +id11171 +id11172 +id11173 +id11174 +id11175 +id11176 +id11177 +id11178 +id11179 +id11180 +id11181 +id11182 +id11183 +id11184 +id11185 +id11186 +id11187 +id11188 +id11189 +id11190 +id11191 +id11192 +id11193 +id11194 +id11195 +id11196 +id11197 +id11198 +id11199 +id11200 +id11201 +id11202 +id11203 +id11204 +id11205 +id11206 +id11207 +id11208 +id11209 +id11210 +id11211 +id11212 +id11213 +id11214 +id11215 +id11216 +id11217 +id11218 +id11219 +id11220 +id11221 +id11222 +id11223 +id11224 +id11225 +id11226 +id11227 +id11228 +id11229 +id11230 +id11231 +id11232 +id11233 +id11234 +id11235 +id11236 +id11237 +id11238 +id11239 +id11240 +id11241 +id11242 +id11243 +id11244 +id11245 +id11246 +id11247 +id11248 +id11249 +id11250 +id11251 diff --git a/PaddleAudio/examples/speaker/data/stat.pd b/PaddleAudio/examples/speaker/data/stat.pd new file mode 100644 index 0000000000000000000000000000000000000000..bb1c48fbaa49212cb90bdfe06f484075176aadee GIT binary patch literal 920 zcmZo*ncBgAT^lw>9r6(v?q>EVelN=?qs zD=sN2O)i--c}fo}SmBf&_Pi8`lF3uN8Cs_}GbT-GpAs}h!<*4NpmjVkKmaDZ8A_%kbvijPGB6}o7~5}PthL|6IKe)YVW#~S z)7kdz$EVu=b?C95u(ZoQ@j7}%}dPIvd*@gUvSkX701@{y5Xd&ewO3?o!)5i_Rb4}{31yLWX2*S=Y2Irp)pne981kUB7)**CO_@>@VHty}D%|gL?nI1$vA2 z-7ws^uWZNuefL+N-8Z%2_P*Fj&-YF8{JifE&)}Pe+n=pdzgz$Ql@0EWUzyxC4&<=QUphXnQFl-9SqJArA4VJA^8D`MTxno wC8 0, phi, cosine) + else: + phi = paddle.where(cosine > self.th, phi, cosine - self.mm) + target_one_hot = F.one_hot(targets, self.n_classes) + outputs = (target_one_hot * phi) + ((1.0 - target_one_hot) * cosine) + outputs = self.scale * outputs + pred = F.log_softmax(outputs, axis=-1) + + return self.nll_loss(pred, targets), pred + + +class CMSoftmax(AngularMargin): + def __init__(self, + margin=0.0, + margin2=0.0, + scale=1.0, + feature_dim=256, + n_classes=1000, + easy_margin=False): + super(CMSoftmax, self).__init__(margin, scale) + self.easy_margin = easy_margin + self.w = paddle.create_parameter((feature_dim, n_classes), 'float32') + self.cos_m = math.cos(self.margin) + self.sin_m = math.sin(self.margin) + self.th = math.cos(math.pi - self.margin) + self.mm = math.sin(math.pi - self.margin) * self.margin + self.nll_loss = nn.NLLLoss() + self.n_classes = n_classes + self.margin2 = margin2 + + def forward(self, logits, targets): + logits = F.normalize(logits, p=2, axis=1, epsilon=1e-8) + wn = F.normalize(self.w, p=2, axis=0, epsilon=1e-8) + cosine = logits @ wn + + sine = paddle.sqrt(1.0 - paddle.square(cosine)) + phi = cosine * self.cos_m - sine * self.sin_m # cos(theta + m) + if self.easy_margin: + phi = paddle.where(cosine > 0, phi, cosine) + else: + phi = paddle.where(cosine > self.th, phi, cosine - self.mm) + target_one_hot = F.one_hot(targets, self.n_classes) + outputs = (target_one_hot * phi) + ( + (1.0 - target_one_hot) * cosine) - target_one_hot * self.margin2 + outputs = self.scale * outputs + pred = F.log_softmax(outputs, axis=-1) + + return self.nll_loss(pred, targets), pred diff --git a/PaddleAudio/examples/speaker/metrics.py b/PaddleAudio/examples/speaker/metrics.py new file mode 100644 index 00000000..3725e75f --- /dev/null +++ b/PaddleAudio/examples/speaker/metrics.py @@ -0,0 +1,93 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import namedtuple +from typing import List, Tuple, Union + +import numpy as np + + +def compute_eer( + scores: Union[np.ndarray, List[float]], labels: Union[np.ndarray, List[int]] +) -> Tuple[float, float, np.ndarray, np.ndarray]: + """Compute equal error rate(EER) given matching scores and corresponding labels + + Parameters: + scores(np.ndarray,list): the cosine similarity between two speaker embeddings. + labels(np.ndarray,list): the labels of the speaker pairs, with value 1 indicates same speaker and 0 otherwise. + + Returns: + eer(float): the equal error rate. + thresh_for_eer(float): the thresh value at which false acceptance rate equals to false rejection rate. + fr_rate(np.ndarray): the false rejection rate as a function of increasing thresholds. + fa_rate(np.ndarray): the false acceptance rate as a function of increasing thresholds. + + """ + if isinstance(labels, list): + labels = np.array(labels) + if isinstance(scores, list): + scores = np.array(scores) + label_set = list(np.unique(labels)) + assert len( + label_set + ) == 2, f'the input labels must contains both two labels, but recieved set(labels) = {label_set}' + label_set.sort() + assert label_set == [ + 0, 1 + ], 'the input labels must contain 0 and 1 for distinct and identical id. ' + eps = 1e-8 + #assert np.min(scores) >= -1.0 - eps and np.max( + # scores + # ) < 1.0 + eps, 'the score must be in the range between -1.0 and 1.0' + same_id_scores = scores[labels == 1] + diff_id_scores = scores[labels == 0] + thresh = np.linspace(np.min(diff_id_scores), np.max(same_id_scores), 1000) + thresh = np.expand_dims(thresh, 1) + fr_matrix = same_id_scores < thresh + fa_matrix = diff_id_scores >= thresh + fr_rate = np.mean(fr_matrix, 1) + fa_rate = np.mean(fa_matrix, 1) + + thresh_idx = np.argmin(np.abs(fa_rate - fr_rate)) + result = namedtuple('speaker', ('eer', 'thresh', 'fa', 'fr')) + result.eer = (fr_rate[thresh_idx] + fa_rate[thresh_idx]) / 2 + result.thresh = thresh[thresh_idx, 0] + result.fr = fr_rate + result.fa = fa_rate + + return result + + +def compute_min_dcf(fr_rate, fa_rate, p_target=0.05, c_miss=1.0, c_fa=1.0): + """ Compute normalized minimum detection cost function (minDCF) given + the costs for false accepts and false rejects as well as a priori + probability for target speakers + + Parameters: + fr_rate(np.ndarray): the false rejection rate as a function of increasing thresholds. + fa_rate(np.ndarray): the false acceptance rate as a function of increasing thresholds. + p_target(float): the prior probability of being a target. + c_miss(float): cost of miss detection(false rejects). + c_fa(float): cost of miss detection(false accepts). + + Returns: + min_cdf(float): the normalized minimum detection cost function (minDCF) + + """ + + dcf = c_miss * fr_rate * p_target + c_fa * fa_rate * (1 - p_target) + c_det = np.min(dcf) + c_def = min(c_miss * p_target, c_fa * (1 - p_target)) + min_cdf = c_det / c_def + return min_cdf diff --git a/PaddleAudio/examples/speaker/models/__init__.py b/PaddleAudio/examples/speaker/models/__init__.py new file mode 100644 index 00000000..6f4f3794 --- /dev/null +++ b/PaddleAudio/examples/speaker/models/__init__.py @@ -0,0 +1,17 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .ecapa_tdnn import EcapaTDNN +from .resnet_se34 import ResNetSE34 +from .resnet_se34v2 import ResNetSE34V2 diff --git a/PaddleAudio/examples/speaker/models/ecapa_tdnn.py b/PaddleAudio/examples/speaker/models/ecapa_tdnn.py new file mode 100644 index 00000000..f6171be8 --- /dev/null +++ b/PaddleAudio/examples/speaker/models/ecapa_tdnn.py @@ -0,0 +1,417 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import math +import os + +import numpy as np +import paddle +import paddle.nn as nn +import paddle.nn.functional as F + + +def length_to_mask(length, max_len=None, dtype=None): + assert len(length.shape) == 1 + + if max_len is None: + max_len = length.max().astype( + 'int').item() # using arange to generate mask + mask = paddle.arange(max_len, dtype=length.dtype).expand( + (len(length), max_len)) < length.unsqueeze(1) + + if dtype is None: + dtype = length.dtype + + mask = paddle.to_tensor(mask, dtype=dtype) + return mask + + +class Conv1d(nn.Layer): + def __init__( + self, + in_channels, + out_channels, + kernel_size, + stride=1, + padding="same", + dilation=1, + groups=1, + bias=True, + padding_mode="reflect", + ): + super(Conv1d, self).__init__() + + self.kernel_size = kernel_size + self.stride = stride + self.dilation = dilation + self.padding = padding + self.padding_mode = padding_mode + + self.conv = nn.Conv1D( + in_channels, + out_channels, + self.kernel_size, + stride=self.stride, + padding=0, + dilation=self.dilation, + groups=groups, + bias_attr=bias, + ) + + def forward(self, x): + if self.padding == "same": + x = self._manage_padding(x, self.kernel_size, self.dilation, + self.stride) + else: + raise ValueError("Padding must be 'same'. Got {self.padding}") + + return self.conv(x) + + def _manage_padding(self, x, kernel_size: int, dilation: int, stride: int): + L_in = x.shape[-1] # Detecting input shape + padding = self._get_padding_elem(L_in, stride, kernel_size, + dilation) # Time padding + x = F.pad(x, padding, mode=self.padding_mode, + data_format="NCL") # Applying padding + return x + + def _get_padding_elem(self, L_in: int, stride: int, kernel_size: int, + dilation: int): + if stride > 1: + n_steps = math.ceil(((L_in - kernel_size * dilation) / stride) + 1) + L_out = stride * (n_steps - 1) + kernel_size * dilation + padding = [kernel_size // 2, kernel_size // 2] + else: + L_out = (L_in - dilation * (kernel_size - 1) - 1) // stride + 1 + + padding = [(L_in - L_out) // 2, (L_in - L_out) // 2] + + return padding + + +class BatchNorm1d(nn.Layer): + def __init__( + self, + input_size, + eps=1e-05, + momentum=0.9, + weight_attr=None, + bias_attr=None, + data_format='NCL', + use_global_stats=None, + ): + super(BatchNorm1d, self).__init__() + + self.norm = nn.BatchNorm1D( + input_size, + epsilon=eps, + momentum=momentum, + weight_attr=weight_attr, + bias_attr=bias_attr, + data_format=data_format, + use_global_stats=use_global_stats, + ) + + def forward(self, x): + x_n = self.norm(x) + return x_n + + +class TDNNBlock(nn.Layer): + def __init__( + self, + in_channels, + out_channels, + kernel_size, + dilation, + activation=nn.ReLU, + ): + super(TDNNBlock, self).__init__() + self.conv = Conv1d( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=kernel_size, + dilation=dilation, + ) + self.activation = activation() + self.norm = BatchNorm1d(input_size=out_channels) + + def forward(self, x): + return self.norm(self.activation(self.conv(x))) + + +class Res2NetBlock(nn.Layer): + def __init__(self, in_channels, out_channels, scale=8, dilation=1): + super(Res2NetBlock, self).__init__() + assert in_channels % scale == 0 + assert out_channels % scale == 0 + + in_channel = in_channels // scale + hidden_channel = out_channels // scale + + self.blocks = nn.LayerList([ + TDNNBlock(in_channel, + hidden_channel, + kernel_size=3, + dilation=dilation) for i in range(scale - 1) + ]) + self.scale = scale + + def forward(self, x): + y = [] + for i, x_i in enumerate(paddle.chunk(x, self.scale, axis=1)): + if i == 0: + y_i = x_i + elif i == 1: + y_i = self.blocks[i - 1](x_i) + else: + y_i = self.blocks[i - 1](x_i + y_i) + y.append(y_i) + y = paddle.concat(y, axis=1) + return y + + +class SEBlock(nn.Layer): + def __init__(self, in_channels, se_channels, out_channels): + super(SEBlock, self).__init__() + + self.conv1 = Conv1d(in_channels=in_channels, + out_channels=se_channels, + kernel_size=1) + self.relu = paddle.nn.ReLU() + self.conv2 = Conv1d(in_channels=se_channels, + out_channels=out_channels, + kernel_size=1) + self.sigmoid = paddle.nn.Sigmoid() + + def forward(self, x, lengths=None): + L = x.shape[-1] + if lengths is not None: + mask = length_to_mask(lengths * L, max_len=L) + mask = mask.unsqueeze(1) + total = mask.sum(axis=2, keepdim=True) + s = (x * mask).sum(axis=2, keepdim=True) / total + else: + s = x.mean(axis=2, keepdim=True) + + s = self.relu(self.conv1(s)) + s = self.sigmoid(self.conv2(s)) + + return s * x + + +class AttentiveStatisticsPooling(nn.Layer): + def __init__(self, channels, attention_channels=128, global_context=True): + super().__init__() + + self.eps = 1e-12 + self.global_context = global_context + if global_context: + self.tdnn = TDNNBlock(channels * 3, attention_channels, 1, 1) + else: + self.tdnn = TDNNBlock(channels, attention_channels, 1, 1) + self.tanh = nn.Tanh() + self.conv = Conv1d(in_channels=attention_channels, + out_channels=channels, + kernel_size=1) + + def forward(self, x, lengths=None): + C, L = x.shape[1], x.shape[2] # KP: (N, C, L) + + def _compute_statistics(x, m, axis=2, eps=self.eps): + mean = (m * x).sum(axis) + std = paddle.sqrt( + (m * (x - mean.unsqueeze(axis)).pow(2)).sum(axis).clip(eps)) + return mean, std + + if lengths is None: + lengths = paddle.ones([x.shape[0]]) + + # Make binary mask of shape [N, 1, L] + mask = length_to_mask(lengths * L, max_len=L) + mask = mask.unsqueeze(1) + + # Expand the temporal context of the pooling layer by allowing the + # self-attention to look at global properties of the utterance. + if self.global_context: + total = mask.sum(axis=2, keepdim=True).astype('float32') + mean, std = _compute_statistics(x, mask / total) + mean = mean.unsqueeze(2).tile((1, 1, L)) + std = std.unsqueeze(2).tile((1, 1, L)) + attn = paddle.concat([x, mean, std], axis=1) + else: + attn = x + + # Apply layers + attn = self.conv(self.tanh(self.tdnn(attn))) + + # Filter out zero-paddings + attn = paddle.where( + mask.tile((1, C, 1)) == 0, + paddle.ones_like(attn) * float("-inf"), attn) + + attn = F.softmax(attn, axis=2) + mean, std = _compute_statistics(x, attn) + + # Append mean and std of the batch + pooled_stats = paddle.concat((mean, std), axis=1) + pooled_stats = pooled_stats.unsqueeze(2) + + return pooled_stats + + +class SERes2NetBlock(nn.Layer): + def __init__( + self, + in_channels, + out_channels, + res2net_scale=8, + se_channels=128, + kernel_size=1, + dilation=1, + activation=nn.ReLU, + ): + super(SERes2NetBlock, self).__init__() + self.out_channels = out_channels + self.tdnn1 = TDNNBlock( + in_channels, + out_channels, + kernel_size=1, + dilation=1, + activation=activation, + ) + self.res2net_block = Res2NetBlock(out_channels, out_channels, + res2net_scale, dilation) + self.tdnn2 = TDNNBlock( + out_channels, + out_channels, + kernel_size=1, + dilation=1, + activation=activation, + ) + self.se_block = SEBlock(out_channels, se_channels, out_channels) + + self.shortcut = None + if in_channels != out_channels: + self.shortcut = Conv1d( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=1, + ) + + def forward(self, x, lengths=None): + residual = x + if self.shortcut: + residual = self.shortcut(x) + + x = self.tdnn1(x) + x = self.res2net_block(x) + x = self.tdnn2(x) + x = self.se_block(x, lengths) + + return x + residual + + +class EcapaTDNN(nn.Layer): + def __init__(self, + input_size, + lin_neurons=192, + activation=nn.ReLU, + channels=[1024, 1024, 1024, 1024, 3072], + kernel_sizes=[5, 3, 3, 3, 1], + dilations=[1, 2, 3, 4, 1], + attention_channels=128, + res2net_scale=8, + se_channels=128, + global_context=True): + + super(EcapaTDNN, self).__init__() + + assert len(channels) == len(kernel_sizes) + assert len(channels) == len(dilations) + self.channels = channels + self.blocks = nn.LayerList() + self.emb_size = lin_neurons + + # The initial TDNN layer + self.blocks.append( + TDNNBlock( + input_size, + channels[0], + kernel_sizes[0], + dilations[0], + activation, + )) + + # SE-Res2Net layers + for i in range(1, len(channels) - 1): + self.blocks.append( + SERes2NetBlock( + channels[i - 1], + channels[i], + res2net_scale=res2net_scale, + se_channels=se_channels, + kernel_size=kernel_sizes[i], + dilation=dilations[i], + activation=activation, + )) + + # Multi-layer feature aggregation + self.mfa = TDNNBlock( + channels[-1], + channels[-1], + kernel_sizes[-1], + dilations[-1], + activation, + ) + + # Attentive Statistical Pooling + self.asp = AttentiveStatisticsPooling( + channels[-1], + attention_channels=attention_channels, + global_context=global_context, + ) + self.asp_bn = BatchNorm1d(input_size=channels[-1] * 2) + + # Final linear transformation + self.fc = Conv1d( + in_channels=channels[-1] * 2, + out_channels=self.emb_size, + kernel_size=1, + ) + self.drop = nn.Dropout(0.25) + + def forward(self, x, lengths=None): + + xl = [] + for layer in self.blocks: + try: + x = layer(x, lengths=lengths) + except TypeError: + x = layer(x) + xl.append(x) + + # Multi-layer feature aggregation + x = paddle.concat(xl[1:], axis=1) + x = self.mfa(x) + + # Attentive Statistical Pooling + x = self.asp(x, lengths=lengths) + x = self.asp_bn(x) + + # Final linear transformation + x = self.drop(x) + x = self.fc(x) + x = x[:, :, 0] + return x diff --git a/PaddleAudio/examples/speaker/models/resnet_blocks.py b/PaddleAudio/examples/speaker/models/resnet_blocks.py new file mode 100644 index 00000000..70a5bed6 --- /dev/null +++ b/PaddleAudio/examples/speaker/models/resnet_blocks.py @@ -0,0 +1,132 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import paddle +import paddle.nn as nn + + +class SEBasicBlock(nn.Layer): + expansion = 1 + + def __init__(self, + inplanes, + planes, + stride=1, + downsample=None, + reduction=8): + super(SEBasicBlock, self).__init__() + self.conv1 = nn.Conv2D(inplanes, + planes, + kernel_size=3, + stride=stride, + padding=1, + bias_attr=False) + self.bn1 = nn.BatchNorm2D(planes) + self.conv2 = nn.Conv2D(planes, + planes, + kernel_size=3, + padding=1, + bias_attr=False) + self.bn2 = nn.BatchNorm2D(planes) + self.relu = nn.ReLU() + self.se = SELayer(planes, reduction) + self.downsample = downsample + self.stride = stride + + def forward(self, x): + residual = x + + out = self.conv1(x) + out = self.relu(out) + out = self.bn1(out) + + out = self.conv2(out) + out = self.bn2(out) + out = self.se(out) + + if self.downsample is not None: + residual = self.downsample(x) + + out += residual + out = self.relu(out) + return out + + +class SEBottleneck(nn.Layer): + expansion = 4 + + def __init__(self, + inplanes, + planes, + stride=1, + downsample=None, + reduction=8): + super(SEBottleneck, self).__init__() + self.conv1 = nn.Conv2D(inplanes, planes, kernel_size=1, bias_attr=False) + self.bn1 = nn.BatchNorm2D(planes) + self.conv2 = nn.Conv2D(planes, + planes, + kernel_size=3, + stride=stride, + padding=1, + bias_attr=False) + self.bn2 = nn.BatchNorm2D(planes) + self.conv3 = nn.Conv2D(planes, + planes * 4, + kernel_size=1, + bias_attr=False) + self.bn3 = nn.BatchNorm2D(planes * 4) + self.relu = nn.ReLU() + self.se = SELayer(planes * 4, reduction) + self.downsample = downsample + self.stride = stride + + def forward(self, x): + residual = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + out = self.relu(out) + + out = self.conv3(out) + out = self.bn3(out) + out = self.se(out) + + if self.downsample is not None: + residual = self.downsample(x) + + out += residual + out = self.relu(out) + + return out + + +class SELayer(nn.Layer): + def __init__(self, channel, reduction=8): + super(SELayer, self).__init__() + self.avg_pool = nn.AdaptiveAvgPool2D(1) + self.fc = nn.Sequential(nn.Linear(channel, channel // reduction), + nn.ReLU(), + nn.Linear(channel // reduction, channel), + nn.Sigmoid()) + + def forward(self, x): + b, c, _, _ = x.shape + y = self.avg_pool(x).reshape((b, c)) + y = self.fc(y).reshape((b, c, 1, 1)) + return x * y diff --git a/PaddleAudio/examples/speaker/models/resnet_se34.py b/PaddleAudio/examples/speaker/models/resnet_se34.py new file mode 100644 index 00000000..8261aeaa --- /dev/null +++ b/PaddleAudio/examples/speaker/models/resnet_se34.py @@ -0,0 +1,151 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import numpy as np +import paddle +import paddle.nn as nn +import paddle.nn.functional as F +from paddleaudio.transforms import LogMelSpectrogram, MelSpectrogram + +from .resnet_blocks import SEBasicBlock, SEBottleneck + + +class ResNetSE(nn.Layer): + def __init__(self, + block, + layers, + num_filters, + feature_dim, + feature_config, + encoder_type='SAP', + n_mels=80, + log_input=True, + **kwargs): + super(ResNetSE, self).__init__() + + print('Embedding size is %d, encoder %s.' % (feature_dim, encoder_type)) + + self.inplanes = num_filters[0] + self.encoder_type = encoder_type + self.n_mels = n_mels + self.log_input = log_input + + self.conv1 = nn.Conv2D(1, + num_filters[0], + kernel_size=3, + stride=1, + padding=1) + self.relu = nn.ReLU() + self.bn1 = nn.BatchNorm2D(num_filters[0]) + + self.layer1 = self._make_layer(block, num_filters[0], layers[0]) + self.layer2 = self._make_layer(block, + num_filters[1], + layers[1], + stride=(2, 2)) + self.layer3 = self._make_layer(block, + num_filters[2], + layers[2], + stride=(2, 2)) + self.layer4 = self._make_layer(block, + num_filters[3], + layers[3], + stride=(2, 2)) + + # self.instancenorm = nn.InstanceNorm1D(n_mels) + + outmap_size = int(self.n_mels / 8) + + self.attention = nn.Sequential( + nn.Conv1D(num_filters[3] * outmap_size, 128, kernel_size=1), + nn.ReLU(), + nn.BatchNorm1D(128), + nn.Conv1D(128, num_filters[3] * outmap_size, kernel_size=1), + nn.Softmax(axis=2), + ) + + if self.encoder_type == "SAP": + out_dim = num_filters[3] * outmap_size + elif self.encoder_type == "ASP": + out_dim = num_filters[3] * outmap_size * 2 + else: + raise ValueError('Undefined encoder') + + self.fc = nn.Linear(out_dim, feature_dim) + self.melspectrogram = LogMelSpectrogram(**feature_config) + + def _make_layer(self, block, planes, blocks, stride=1): + downsample = None + if stride != 1 or self.inplanes != planes * block.expansion: + downsample = nn.Sequential( + nn.Conv2D(self.inplanes, + planes * block.expansion, + kernel_size=1, + stride=stride, + bias_attr=False), + nn.BatchNorm2D(planes * block.expansion), + ) + + layers = [] + layers.append(block(self.inplanes, planes, stride, downsample)) + self.inplanes = planes * block.expansion + for i in range(1, blocks): + layers.append(block(self.inplanes, planes)) + + return nn.Sequential(*layers) + + def new_parameter(self, size): + out = paddle.create_parameter(size, 'float32') + nn.initializer.XavierNormal(out) + return out + + def forward(self, x): + + x = x.unsqueeze(1) + x = self.conv1(x) + x = self.relu(x) + x = self.bn1(x) + x = self.layer1(x) + x = self.layer2(x) + x = self.layer3(x) + x = self.layer4(x) + + x = x.reshape((x.shape[0], -1, x.shape[-1])) + w = self.attention(x) + if self.encoder_type == "SAP": + x = paddle.sum(x * w, axis=2) + elif self.encoder_type == "ASP": + mu = paddle.sum(x * w, axis=2) + sg = paddle.sum((x**2) * w, axis=2) - mu**2 + sg = paddle.clip(sg, min=1e-5) + sg = paddle.sqrt(sg) + x = paddle.concat((mu, sg), 1) + x = x.reshape((x.shape[0], -1)) + x = self.fc(x) + return x + + +def ResNetSE34(feature_dim=256, scale_factor=1, **kwargs): + # Number of filters + num_filters = [ + 32 * scale_factor, 64 * scale_factor, 128 * scale_factor, + 256 * scale_factor + ] + model = ResNetSE(SEBasicBlock, [3, 4, 6, 3], num_filters, feature_dim, + **kwargs) + return model + + +if __name__ == '__main__': + print(ResNetSE34()) diff --git a/PaddleAudio/examples/speaker/models/resnet_se34v2.py b/PaddleAudio/examples/speaker/models/resnet_se34v2.py new file mode 100644 index 00000000..2592e06f --- /dev/null +++ b/PaddleAudio/examples/speaker/models/resnet_se34v2.py @@ -0,0 +1,152 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import numpy as np +import paddle +import paddle.nn as nn +import paddle.nn.functional as F +import paddleaudio +from paddleaudio.transforms import LogMelSpectrogram, MelSpectrogram + +from .resnet_blocks import SEBasicBlock, SEBottleneck + + +class ResNetSE(nn.Layer): + def __init__(self, + block, + layers, + num_filters, + feature_dim, + encoder_type='SAP', + n_mels=40, + log_input=True, + **kwargs): + super(ResNetSE, self).__init__() + + print('Embedding size is %d, encoder %s.' % (feature_dim, encoder_type)) + + self.inplanes = num_filters[0] + self.encoder_type = encoder_type + self.n_mels = n_mels + self.log_input = log_input + + self.conv1 = nn.Conv2D(1, + num_filters[0], + kernel_size=3, + stride=1, + padding=1) + self.relu = nn.ReLU() + self.bn1 = nn.BatchNorm2D(num_filters[0]) + + self.layer1 = self._make_layer(block, num_filters[0], layers[0]) + self.layer2 = self._make_layer(block, + num_filters[1], + layers[1], + stride=(2, 2)) + self.layer3 = self._make_layer(block, + num_filters[2], + layers[2], + stride=(2, 2)) + self.layer4 = self._make_layer(block, + num_filters[3], + layers[3], + stride=(2, 2)) + + outmap_size = int(self.n_mels / 8) + + self.attention = nn.Sequential( + nn.Conv1D(num_filters[3] * outmap_size, 128, kernel_size=1), + nn.ReLU(), + nn.BatchNorm1D(128), + nn.Conv1D(128, num_filters[3] * outmap_size, kernel_size=1), + nn.Softmax(axis=2), + ) + + if self.encoder_type == "SAP": + out_dim = num_filters[3] * outmap_size + elif self.encoder_type == "ASP": + out_dim = num_filters[3] * outmap_size * 2 + else: + raise ValueError('Undefined encoder') + + self.fc = nn.Linear(out_dim, feature_dim) + + def _make_layer(self, block, planes, blocks, stride=1): + downsample = None + if stride != 1 or self.inplanes != planes * block.expansion: + downsample = nn.Sequential( + nn.Conv2D(self.inplanes, + planes * block.expansion, + kernel_size=1, + stride=stride, + bias_attr=False), + nn.BatchNorm2D(planes * block.expansion), + ) + + layers = [] + layers.append(block(self.inplanes, planes, stride, downsample)) + self.inplanes = planes * block.expansion + for i in range(1, blocks): + layers.append(block(self.inplanes, planes)) + + return nn.Sequential(*layers) + + def new_parameter(self, *size): + + out = paddle.create_parameter(size, 'float32') + nn.initializer.XavierNormal(out) + return out + + def forward(self, x): + + x = x.unsqueeze(1) + x = self.conv1(x) + x = self.relu(x) + x = self.bn1(x) + + x = self.layer1(x) + x = self.layer2(x) + x = self.layer3(x) + x = self.layer4(x) + + x = x.reshape((x.shape[0], -1, x.shape[-1])) + w = self.attention(x) + if self.encoder_type == "SAP": + x = paddle.sum(x * w, axis=2) + elif self.encoder_type == "ASP": + mu = paddle.sum(x * w, axis=2) + sg = paddle.sum((x**2) * w, axis=2) - mu**2 + sg = paddle.clip(sg, min=1e-5) + sg = paddle.sqrt(sg) + x = paddle.concat((mu, sg), 1) + + x = x.reshape((x.shape[0], -1)) + x = self.fc(x) + + return x + + +def ResNetSE34V2(feature_dim=256, scale_factor=1, **kwargs): + # Number of filters + num_filters = [ + 32 * scale_factor, 64 * scale_factor, 128 * scale_factor, + 256 * scale_factor + ] + model = ResNetSE(SEBasicBlock, [3, 4, 6, 3], num_filters, feature_dim, + **kwargs) + return model + + +if __name__ == '__main__': + print(ResNetSE34V2()) diff --git a/PaddleAudio/examples/speaker/requirements.txt b/PaddleAudio/examples/speaker/requirements.txt new file mode 100644 index 00000000..7861f0db --- /dev/null +++ b/PaddleAudio/examples/speaker/requirements.txt @@ -0,0 +1,2 @@ +paddlepaddle-gpu==2.1.1 +PyYAML==5.4.1 diff --git a/PaddleAudio/examples/speaker/test.py b/PaddleAudio/examples/speaker/test.py new file mode 100644 index 00000000..a80590af --- /dev/null +++ b/PaddleAudio/examples/speaker/test.py @@ -0,0 +1,135 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import os + +import metrics +import numpy as np +import paddle +import paddle.nn as nn +import paddle.nn.functional as F +import paddleaudio +import yaml +from dataset import get_val_loader +from paddleaudio.transforms import * +from paddleaudio.utils import get_logger + +from models import EcapaTDNN, ResNetSE34, ResNetSE34V2 + +logger = get_logger() + +file2feature = {} + + +def get_feature(file, model, melspectrogram, random_sampling=False): + global file2feature + if file in file2feature: + return file2feature[file] + s0, _ = paddleaudio.load(file, sr=16000) #, norm_type='gaussian') + s = paddle.to_tensor(s0[None, :]) + s = melspectrogram(s).astype('float32') + with paddle.no_grad(): + feature = model(s) #.squeeze() + feature = feature / paddle.sqrt(paddle.sum(feature**2)) + + file2feature.update({file: feature}) + return feature + + +class Normalize: + def __init__(self, mean_file, eps=1e-5): + self.eps = eps + mean = paddle.load(mean_file)['mean'] + self.mean = mean.unsqueeze((0, 2)) + + def __call__(self, x): + assert x.ndim == 3 + return x - self.mean + + +def get_score(features1, features2): # feature mean + score = float(paddle.dot(features1.squeeze(), features2.squeeze())) + return score + + +def compute_eer(config, model): + transforms = [] + melspectrogram = LogMelSpectrogram(**config['fbank']) + transforms += [melspectrogram] + if config['normalize']: + transforms += [Normalize(config['mean_std_file'])] + transforms = Compose(transforms) + global file2feature # to avoid repeated computation + file2feature = {} + test_list = config['test_list'] + test_folder = config['test_folder'] + model.eval() + with open(test_list) as f: + lines = f.read().split('\n') + label_wav_pairs = [l.split() for l in lines if len(l) > 0] + logger.info(f'{len(label_wav_pairs)} test pairs listed') + labels = [] + scores = [] + for i, (label, f1, f2) in enumerate(label_wav_pairs): + full_path1 = os.path.join(test_folder, f1) + full_path2 = os.path.join(test_folder, f2) + feature1 = get_feature(full_path1, model, transforms) + feature2 = get_feature(full_path2, model, transforms) + score = get_score(feature1, feature2) + labels.append(label) + scores.append(score) + if i % (len(label_wav_pairs) // 10) == 0: + logger.info(f'processed {i}|{len(label_wav_pairs)}') + + scores = np.array(scores) + labels = np.array([int(l) for l in labels]) + result = metrics.compute_eer(scores, labels) + min_dcf = metrics.compute_min_dcf(result.fr, result.fa) + logger.info(f'eer={result.eer}, thresh={result.thresh}, minDCF={min_dcf}') + return result, min_dcf + + +if __name__ == '__main__': + + parser = argparse.ArgumentParser() + parser.add_argument('-c', + '--config', + type=str, + required=False, + default='config.yaml') + parser.add_argument( + '-d', + '--device', + default="gpu", + help="Select which device to train model, defaults to gpu.") + parser.add_argument('-w', '--weight', type=str, required=True) + args = parser.parse_args() + + with open(args.config) as f: + config = yaml.safe_load(f) + paddle.set_device(args.device) + logger.info('model:' + config['model']['name']) + logger.info('device: ' + args.device) + + logger.info(f'using ' + config['model']['name']) + ModelClass = eval(config['model']['name']) + model = ModelClass(**config['model']['params']) + state_dict = paddle.load(args.weight) + if 'model' in state_dict.keys(): + state_dict = state_dict['model'] + + model.load_dict(state_dict) + result, min_dcf = compute_eer(config, model) + logger.info(f'eer={result.eer}, thresh={result.thresh}, minDCF={min_dcf}') diff --git a/PaddleAudio/examples/speaker/train.py b/PaddleAudio/examples/speaker/train.py new file mode 100644 index 00000000..1f4b7905 --- /dev/null +++ b/PaddleAudio/examples/speaker/train.py @@ -0,0 +1,301 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import glob +import os +import random +import time +from test import compute_eer + +import numpy as np +import paddle +import paddle.distributed as dist +import paddle.nn as nn +import paddle.nn.functional as F +import yaml +from dataset import get_train_loader +from losses import AdditiveAngularMargin, AMSoftmaxLoss, CMSoftmax +from paddle.optimizer import SGD, Adam +from paddle.utils import download +from paddleaudio.transforms import * +from paddleaudio.utils import get_logger +from utils import NoiseSource, Normalize, RIRSource + +from models import * + + +def get_lr(step, base_lr, max_lr, half_cycle=5000, reverse=False): + if int(step / half_cycle) % 2 == 0: + lr = (step % half_cycle) / half_cycle * (max_lr - base_lr) + lr = base_lr + lr + else: + lr = (step % half_cycle / half_cycle) * (max_lr - base_lr) + lr = max_lr - lr + lr = max_lr - lr + + return lr + + +def freeze_bn(layer): + if isinstance(layer, paddle.nn.BatchNorm1D): + layer._momentum = 0.8 + print(layer._momentum) + if isinstance(layer, paddle.nn.BatchNorm2D): + layer._momentum = 0.8 + print(layer._momentum) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('-c', '--config', type=str, required=True) + parser.add_argument( + '-d', + '--device', + default="gpu", + help='Select which device to train model, defaults to gpu.') + parser.add_argument( + '-r', + '--restore', + type=int, + required=False, + default=-1, + help= + 'the epoch number to restore from(the checkpoint contains weights for model/loss/optimizer)' + ) + parser.add_argument('-w', + '--weight', + type=str, + required=False, + default='', + help='the model wieght to restore form') + parser.add_argument('-e', + '--eval_at_begin', + type=bool, + choices=[True, False], + required=False, + default=False) + parser.add_argument('--distributed', + type=bool, + choices=[True, False], + required=False, + default=False) + args = parser.parse_args() + with open(args.config) as f: + config = yaml.safe_load(f) + + os.makedirs(config['log_dir'], exist_ok=True) + logger = get_logger(__file__, + log_dir=config['log_dir'], + log_file_name=config['log_file']) + + prefix = config['model_prefix'] + + if args.distributed: + dist.init_parallel_env() + local_rank = dist.get_rank() + print(local_rank) + else: + paddle.set_device(args.device) + local_rank = 0 + + logger.info(f'using ' + config['model']['name']) + ModelClass = eval(config['model']['name']) + model = ModelClass(**config['model']['params']) + #define loss and lr + LossClass = eval(config['loss']['name']) + loss_fn = LossClass(**config['loss']['params']) + loss_fn.train() + params = model.parameters() + loss_fn.parameters() + + transforms = [] + if config['augment_wav']: + noise_source1 = NoiseSource(open( + config['muse_speech']).read().split('\n')[:-1], + sample_rate=16000, + duration=config['duration'], + batch_size=config['batch_size']) + noisify1 = Noisify(noise_source1, + snr_high=config['muse_speech_srn_high'], + snr_low=config['muse_speech_srn_low'], + random=True) + + noise_source2 = NoiseSource(open( + config['muse_music']).read().split('\n')[:-1], + sample_rate=16000, + duration=config['duration'], + batch_size=config['batch_size']) + noisify2 = Noisify(noise_source2, + snr_high=config['muse_music_srn_high'], + snr_low=config['muse_music_srn_low'], + random=True) + noise_source3 = NoiseSource(open( + config['muse_noise']).read().split('\n')[:-1], + sample_rate=16000, + duration=config['duration'], + batch_size=config['batch_size']) + noisify3 = Noisify(noise_source3, + snr_high=config['muse_noise_srn_high'], + snr_low=config['muse_noise_srn_low'], + random=True) + rir_files = open(config['rir_path']).read().split('\n')[:-1] + random_rir_reader = RIRSource(rir_files, random=True, sample_rate=16000) + reverb = Reverberate(rir_source=random_rir_reader) + muse_augment = RandomChoice([noisify1, noisify2, noisify3]) + wav_augments = RandomApply([muse_augment, reverb], 0.25) + transforms += [wav_augments] + melspectrogram = LogMelSpectrogram(**config['fbank']) + transforms += [melspectrogram] + if config['normalize']: + transforms += [Normalize(config['mean_std_file'])] + + if config['augment_mel']: + #define spectrogram masking + time_masking = RandomMasking( + max_mask_count=config['max_time_mask'], + max_mask_width=config['max_time_mask_width'], + axis=-1) + freq_masking = RandomMasking( + max_mask_count=config['max_freq_mask'], + max_mask_width=config['max_freq_mask_width'], + axis=-2) + + mel_augments = RandomApply([freq_masking, time_masking], p=0.25) + transforms += [mel_augments] + transforms = Compose(transforms) + + if args.restore != -1: + logger.info(f'restoring from checkpoint {args.restore}') + fn = os.path.join(config['model_dir'], + f'{prefix}_checkpoint_epoch{args.restore}.tar') + ckpt = paddle.load(fn) + model.load_dict(ckpt['model']) + optimizer = Adam(learning_rate=config['max_lr'], parameters=params) + opti_state_dict = ckpt['opti'] + try: + optimizer.set_state_dict(opti_state_dict) + except: + logger.error('failed to load state dict for optimizers') + try: + loss_fn.load_dict(ckpt['loss']) + except: + logger.error('failed to load state dict for loss') + + start_epoch = args.restore + 1 + else: + start_epoch = 0 + optimizer = Adam(learning_rate=config['max_lr'], parameters=params) + + if args.weight != '': + logger.info(f'loading weight from {args.weight}') + sd = paddle.load(args.weight) + model.load_dict(sd) + + os.makedirs(config['model_dir'], exist_ok=True) + + if args.distributed: + model = paddle.DataParallel(model) + train_loader = get_train_loader(config) + epoch_num = config['epoch_num'] + if args.restore != -1 and local_rank == 0 and args.eval_at_begin: + result, min_dcf = compute_eer(config, model) + best_eer = result.eer #0.022#result.eer + logger.info(f'eer: {best_eer}') + else: + best_eer = 1.0 + step = start_epoch * len(train_loader) + + if config.get('freeze_param', None): + for p in list(model.parameters())[:config['freezed_layers']]: + if not isinstance(p, nn.BatchNorm1D): + p.stop_gradient = True + if not isinstance(p, nn.BatchNorm1D): + p.stop_gradient = True + + for epoch in range(start_epoch, epoch_num): + + avg_loss = 0.0 + avg_acc = 0.0 + model.train() + model.clear_gradients() + t0 = time.time() + if config['max_lr'] > config['base_lr']: + lr = get_lr(epoch - start_epoch, config['base_lr'], + config['max_lr'], config['half_cycle'], + config['reverse_lr']) + optimizer.set_lr(lr) + logger.info(f'Setting lr to {lr}') + + for batch_id, (x, y) in enumerate(train_loader()): + + x_mel = transforms(x) + logits = model(x_mel) + loss, pred = loss_fn(logits, y) + loss.backward() + optimizer.step() + model.clear_gradients() + + acc = np.mean(np.argmax(pred.numpy(), axis=1) == y.numpy()) + if batch_id < 100: + avg_acc = acc + avg_loss = loss.numpy()[0] + else: + factor = 0.999 + avg_acc = avg_acc * factor + acc * (1 - factor) + avg_loss = avg_loss * factor + loss.numpy()[0] * (1 - factor) + + elapsed = (time.time() - t0) / 3600 + remain = elapsed / (1 + batch_id) * (len(train_loader) - batch_id) + + msg = f'epoch:{epoch}, batch:{batch_id}' + msg += f'|{len(train_loader)}' + msg += f', loss:{avg_loss:.3}' + msg += f', acc:{avg_acc:.3}' + msg += f', lr:{optimizer.get_lr():.2}' + msg += f', elapsed:{elapsed:.3}h' + msg += f', remained:{remain:.3}h' + + if batch_id % config['log_step'] == 0 and local_rank == 0: + logger.info(msg) + + if step % config['checkpoint_step'] == 0 and local_rank == 0: + fn = os.path.join(config['model_dir'], + f'{prefix}_checkpoint_epoch{epoch}.tar') + + obj = { + 'model': model.state_dict(), + 'loss': loss_fn.state_dict(), + 'opti': optimizer.state_dict(), + 'lr': optimizer.get_lr() + } + paddle.save(obj, fn) + + if step != 0 and step % config['eval_step'] == 0 and local_rank == 0: + + result, min_dcf = compute_eer(config, model) + eer = result.eer + model.train() + model.clear_gradients() + + if eer < best_eer: + logger.info('eer improved from {} to {}'.format( + best_eer, eer)) + best_eer = eer + fn = os.path.join(config['model_dir'], + f'{prefix}_epoch{epoch}_eer{eer:.3}') + paddle.save(model.state_dict(), fn + '.pdparams') + else: + logger.info(f'eer {eer} did not improve from {best_eer}') + + step += 1 diff --git a/PaddleAudio/examples/speaker/utils.py b/PaddleAudio/examples/speaker/utils.py new file mode 100644 index 00000000..42cae114 --- /dev/null +++ b/PaddleAudio/examples/speaker/utils.py @@ -0,0 +1,195 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import glob +import json +import os +import pickle +import random +from typing import Any, List, Optional, Union + +import numpy as np +import paddle +import paddle.nn as nn +import paddle.nn.functional as F +import paddleaudio +import paddleaudio.functional as F +from paddle import Tensor +from paddle.utils import download + +__all__ = [ + 'NoiseSource', + 'RIRSource', + 'Normalize', +] + + +class NoiseSource: + """Read audio files randomly or sequentially from disk and pack them as a tensor. + Parameters: + audio_path_or_files(os.PathLike|List[os.PathLike]]): the audio folder or the audio file list. + sample_rate(int): the target audio sample rate. If it is different from the native sample rate, + resampling method will be invoked. + duration(float): the duration after the audio is loaded. Padding or random cropping will take place + depending on whether actual audio length is shorter or longer than int(sample_rate*duration). + The audio tensor will have shape [batch_size, int(sample_rate*duration)] + batch_size(int): the number of audio files contained in the returned tensor. + random(bool): whether to read audio file randomly. If False, will read them sequentially. + Default: True. + Notes: + In sequential mode, once the end of audio list is reached, the reader will start over again. + The AudioSource object can be called endlessly. + Shapes: + - output: 2-D tensor with shape [batch_size, int(sample_rate*duration)] + Examples: + .. code-block:: python + import paddle + import paddleaudio.transforms as T + reader = AudioSource(, sample_rate=16000, duration=3.0, batch_size=2) + audio = reader(x) + print(audio.shape) + >> [2,48000] + """ + def __init__(self, + audio_path_or_files: Union[os.PathLike, List[os.PathLike]], + sample_rate: int, + duration: float, + batch_size: int, + random: bool = True): + if isinstance(audio_path_or_files, list): + self.audio_files = audio_path_or_files + elif os.path.isdir(audio_path_or_files): + self.audio_files = glob.glob(audio_path_or_files + '/*.wav', + recursive=True) + if len(self.audio_files) == 0: + raise FileNotFoundError( + f'no files were found in {audio_path_or_files}') + elif os.path.isfile(audio_path_or_files): + self.audio_files = [audio_path_or_files] + else: + raise ValueError( + f'rir_path_or_files={audio_path_or_files} is invalid') + + self.n_files = len(self.audio_files) + self.idx = 0 + self.random = random + self.batch_size = batch_size + self.sample_rate = sample_rate + self.duration = int(duration * sample_rate) + self._data = paddle.zeros((self.batch_size, self.duration), + dtype='float32') + + def load_wav(self, file: os.PathLike): + s, _ = paddleaudio.load(file, sr=self.sample_rate) + s = paddle.to_tensor(s) + s = F.random_cropping(s, target_size=self.duration) + s = F.center_padding(s, target_size=self.duration) + + return s + + def __call__(self) -> Tensor: + + if self.random: + files = [ + random.choice(self.audio_files) for _ in range(self.batch_size) + ] + else: + files = [] + for _ in range(self.batch_size): + file = self.audio_files[self.idx] + self.idx += 1 + if self.idx >= self.n_files: + self.idx = 0 + files += [file] + for i, f in enumerate(files): + self._data[i, :] = self.load_wav(f) + + return self._data + + def __repr__(self): + return ( + self.__class__.__name__ + + f'(n_files={self.n_files}, random={self.random}, sample_rate={self.sample_rate})' + ) + + +class RIRSource(nn.Layer): + """Gererate RIR filter coefficients from local file sources. + Parameters: + rir_path_or_files(os.PathLike|List[os.PathLike]): the directory that contains rir files directly + (without subfolders) or the list of rir files. + Examples: + .. code-block:: python + import paddle + import paddleaudio.transforms as T + reader = T.RIRSource(, sample_rate=16000, random=True) + weight = reader() + """ + def __init__(self, + rir_path_or_files: Union[os.PathLike, List[os.PathLike]], + sample_rate: int, + random: bool = True): + super(RIRSource, self).__init__() + if isinstance(rir_path_or_files, list): + self.rir_files = rir_path_or_files + elif os.path.isdir(rir_path_or_files): + self.rir_files = glob.glob(rir_path_or_files + '/*.wav', + recursive=True) + if len(self.rir_files) == 0: + raise FileNotFoundError( + f'no files were found in {rir_path_or_files}') + elif os.path.isfile(rir_path_or_files): + self.rir_files = [rir_path_or_files] + else: + raise ValueError( + f'rir_path_or_files={rir_path_or_files} is invalid') + + self.n_files = len(self.rir_files) + self.idx = 0 + self.random = random + self.sample_rate = sample_rate + + def forward(self) -> Tensor: + if self.random: + file = random.choice(self.rir_files) + else: + i = self.idx % self.n_files + file = self.rir_files[i] + self.idx += 1 + if self.idx >= self.n_files: + self.idx = 0 + + rir, _ = paddleaudio.load(file, sr=self.sample_rate, mono=True) + rir_weight = paddle.to_tensor(rir[None, None, ::-1]) + rir_weight = paddle.nn.functional.normalize(rir_weight, p=2, axis=-1) + return rir_weight + + def __repr__(self): + return ( + self.__class__.__name__ + + f'(n_files={self.n_files}, random={self.random}, sample_rate={self.sample_rate})' + ) + + +class Normalize: + def __init__(self, mean_file, eps=1e-5): + self.eps = eps + mean = paddle.load(mean_file)['mean'] + std = paddle.load(mean_file)['std'] + + self.mean = mean.unsqueeze((0, 2)) + + def __call__(self, x): + assert x.ndim == 3 + return x - self.mean -- GitLab