From 79658a5f20df8948afc1431c815637c691c40b6e Mon Sep 17 00:00:00 2001 From: TianYuan Date: Fri, 17 Jun 2022 11:10:58 +0000 Subject: [PATCH] add ernie sat inference, test=tts --- examples/aishell3/ernie_sat/README.md | 1 + examples/aishell3_vctk/README.md | 1 + examples/aishell3_vctk/ernie_sat/README.md | 1 + examples/ernie_sat/.meta/framework.png | Bin 0 -> 143263 bytes examples/ernie_sat/README.md | 137 ++++ examples/ernie_sat/local/align.py | 441 +++++++++++++ examples/ernie_sat/local/inference.py | 601 ++++++++++++++++++ examples/ernie_sat/local/sedit_arg_parser.py | 84 +++ examples/ernie_sat/local/utils.py | 162 +++++ examples/ernie_sat/path.sh | 13 + examples/ernie_sat/prompt/dev/text | 3 + examples/ernie_sat/prompt/dev/wav.scp | 3 + examples/ernie_sat/run_clone_en_to_zh.sh | 27 + examples/ernie_sat/run_gen_en.sh | 26 + examples/ernie_sat/run_sedit_en.sh | 27 + examples/ernie_sat/test_run.sh | 6 + examples/ernie_sat/tools/.gitkeep | 0 examples/vctk/ernie_sat/README.md | 1 + paddlespeech/t2s/datasets/am_batch_fn.py | 190 ++++++ paddlespeech/t2s/exps/syn_utils.py | 21 +- paddlespeech/t2s/models/__init__.py | 1 + paddlespeech/t2s/models/ernie_sat/__init__.py | 14 + paddlespeech/t2s/models/ernie_sat/mlm.py | 601 ++++++++++++++++++ paddlespeech/t2s/modules/losses.py | 52 ++ paddlespeech/t2s/modules/nets_utils.py | 434 ++++++++++++- .../t2s/modules/transformer/attention.py | 96 +++ .../t2s/modules/transformer/embedding.py | 58 ++ 27 files changed, 2981 insertions(+), 20 deletions(-) create mode 100644 examples/aishell3/ernie_sat/README.md create mode 100644 examples/aishell3_vctk/README.md create mode 100644 examples/aishell3_vctk/ernie_sat/README.md create mode 100644 examples/ernie_sat/.meta/framework.png create mode 100644 examples/ernie_sat/README.md create mode 100755 examples/ernie_sat/local/align.py create mode 100644 examples/ernie_sat/local/inference.py create mode 100644 examples/ernie_sat/local/sedit_arg_parser.py create mode 100644 examples/ernie_sat/local/utils.py create mode 100755 examples/ernie_sat/path.sh create mode 100644 examples/ernie_sat/prompt/dev/text create mode 100644 examples/ernie_sat/prompt/dev/wav.scp create mode 100755 examples/ernie_sat/run_clone_en_to_zh.sh create mode 100755 examples/ernie_sat/run_gen_en.sh create mode 100755 examples/ernie_sat/run_sedit_en.sh create mode 100755 examples/ernie_sat/test_run.sh create mode 100644 examples/ernie_sat/tools/.gitkeep create mode 100644 examples/vctk/ernie_sat/README.md create mode 100644 paddlespeech/t2s/models/ernie_sat/__init__.py create mode 100644 paddlespeech/t2s/models/ernie_sat/mlm.py diff --git a/examples/aishell3/ernie_sat/README.md b/examples/aishell3/ernie_sat/README.md new file mode 100644 index 00000000..8086d007 --- /dev/null +++ b/examples/aishell3/ernie_sat/README.md @@ -0,0 +1 @@ +# ERNIE SAT with AISHELL3 dataset diff --git a/examples/aishell3_vctk/README.md b/examples/aishell3_vctk/README.md new file mode 100644 index 00000000..330b2593 --- /dev/null +++ b/examples/aishell3_vctk/README.md @@ -0,0 +1 @@ +# Mixed Chinese and English TTS with AISHELL3 and VCTK datasets diff --git a/examples/aishell3_vctk/ernie_sat/README.md b/examples/aishell3_vctk/ernie_sat/README.md new file mode 100644 index 00000000..1c6bbe23 --- /dev/null +++ b/examples/aishell3_vctk/ernie_sat/README.md @@ -0,0 +1 @@ +# ERNIE SAT with AISHELL3 and VCTK dataset diff --git a/examples/ernie_sat/.meta/framework.png b/examples/ernie_sat/.meta/framework.png new file mode 100644 index 0000000000000000000000000000000000000000..c68f62467952aaca91f290e5dead723078343d64 GIT binary patch literal 143263 zcmeFZcR*9Ynm2rC(m|xxC`CX7q<4Y^5Ks^lkRnk*0qFt)5{QC;bU^_@rB^8tX;LFy zL5%d?drzo=wD)-1-rc?5-F@%g@7?`l2L?_~nVDzinWz3v$Rp$#;H06RfgV6X0RUIQ zAAmdyT+{V;eh2{9uLI`*0H6aXBAfwg@CdvG-~%fFP-Rj8RN#M#-{&$Z|9q9|c_!6g zj?I4GNZtfgZ#a4Qcz8Q`c%D{}xd5nMGPq9tyEU-<;aKbshbIRFV@BhF&(y2W=-+PO zE(Mdn0&H|tW>n9qD1-q@HVP^>3UV6&0sEw(_(S=_W8fbON-Aoa6SQ>n42<9nRVM*T z3Mwi}YAPC<-`!9If!6_QHkwmso-ZsDQ{ELvU76t@(T)!K7OvKtg5d0Qd`&3+ScCD`R#jGU;n`1(D2CU z*zDZ=!s62M%IX?+XLoP^;1G9o{F^RtKL1D-`1g;L{TsU2K)NWYsi~-Gf73-l>Gzv( zHfowPawkq*G^M@g#V#!WoQ~sCLRNV*y@-MthV%a8UIs2v#aS`zZ_@ri*?*3(kbj7> zzYz8>bd3X-0m?rVDoSvbsHnh|q6P~MEzR#jOGo>MLieY_@Q1?qyDF}k?aQ&=F%~w@;za0%RphM?DiB%w%CE@)4I@UT814$4JvB7rRy(m{GasL`^q~yV ze5rvZTaJ0w&CXN(P4Wmr1=%%CGtog3t(~AGL^vVD_ubs?Cn#MlF=p18@^Cpk37#(f@CM`>A`%f z)h*{h!vsVik+rG?>*{=0NC6_w4?RmV5IOK}g<)!v=#v$^JL~r`r+;&S{d{Dg%Ip{+v!fEqvuZ$Isz8uWCAapy1xG^LfQ^LQga9!Y> zX|#_BWz+q1DhkcM+veQgyV8bHKbM!$Z9jakjgOiZHpVpZ7fx^QLa$XqagvbMMi0wS z0~7l73yRwnKN4NsinqMZy7a*>;@$iqI&o^Ezp5k&KhterF$KFzVeqYI+%!!bCCq6Y zG@qJs5yWZ=m$NxZn-)+_B^1%s+BQ{v62 zK=IPa!yV|AZPZK-l=VE_E?m0gWMHHl3b$wu!%6D5EPaqB1G9WXHCH{gUEq%EOO^|p zu@(M_4-GDhy1crewpSzzKkKu4CW*>0T6jU|%W>2p;*)&gchkbOjA9LE?7*tQJ-Mn< z>v1vu0QZvM=26o06lH@|8kG6beAidC#Oe1jWB_g0F@TuKCIhUMD-FY#{Lf@S?oP2- zwf{EhGfJsJOTdkj^zAB@hj^kYXSaV;?>l2yQh;BlRxMJN@y*PGs3hL@QKjL!+MOf6 zNVSp*u?VEnci%qAwX$PKWgYis#4NL)VW>sK#X*rcUnY*DYkGkXZbua%NuwmDU#Hp8 zvqWn%W97m~W3AwaoJTpckW*8uQ+zcclk}7qXf;_roa>BI8gDc*w6PLS82Qwe9`WT6 z*>f+VN94V_@A(z)Dwo`&;PusY@1MxIGFK$sE!*#8;M`WwxGaG~AS}OMk8^lWUja4d>n-O~tf?Z9rCnt-3#5^PUw%D!qu3APbmi6bl?Mlg- z!ir`-!#`KeyL64;NhcZ1=_#=H80TGCc67+D!IuPA*^q%|J#C>0yJ)%DJ9kfAoqAZ+ z2{L5eRj3D)68%7I4d-!-k)LyXX?$V6e;L`ye2$;l+OqMg6`NpucS8G9t_=4KOk9^e zZMUUoQ{y^QzgV;B;(>F4PX>C)zD@h|63$A(G~;~AxkRtH*r{V;=PD!E<5g|Jc8$xj zVM+;=>s*w%>)boYV^3Y72)WPo}jz8gpLIO~uMwCfh`#yp9N!HYHD`xIA7ef0Vc%qAlb zGZkRNf2XfVV@z^0Z~6%0Z>r0QSstU|FxC0*sI-dSx*QbbU;hPZ+(rh*U7T?HACu3~ zj+C4TBm+g(#81d}=(vksLj!KNXhJXUzOpi~$t!S)cGhm2T4ebv#2 zCGYT5L1vG2)1c6_nN6Vx9sYvX;Zu@t|D!PzaJ?vwR#fx9qnP4jEo%Hikg zKWCixTZ30g_Wb08j8+^;L1cAL)(oH7snvL{gqjyLzL+<^#KbSsmp`ykI`iK6Sqk+% zR0LMIcGFJH{fwHpyZ`d=ui-V!0@Cb*>u6XMg3%>kg{Un#zsFB3@G+Z0x?#w9BhMeRdW`l0EdENJ z{-t?WjO^)bPfZ3rs!p1da^7Bu3+l4Pd_`yqC8f@dzuKXWjih<{`a8Adue*=e-e3>Y zCdG%dYrKmkyW|urp8BkJjEN7I7Zk^t)OTsr^^Bo!W{SM!c+sjCgvBh& z*gxt?oh4!r?O0whkZ|ifqr_J-@Tj4?y?XPYKuABiRpSc}TB~@_2gYr!D_L{(L<_(? z-CypSHm_-=^&{a*6v|Hwmd6$&GpY6E$Au}#12yP(J zpVqP_-crw=^!+@%gjhbJ_Olv$P?&r+!CS^!D2T0j2RaRB0Cw+fK z&oPdoeZ|U&7r_I%95Bj)+lEP`=>gX6{#Dz<@ss|5Z zdlOl=^e4UgvAGyVUWq=pQje@7M6??jxUVaFbOhLAw> zb6f>A49%G&;!aYSsbnE>Dl4ZZ$7@h6^jz zA|H_S=a01Y8T?0vd}T@zT1oLbl~IoA&1DK--X1f>Jeh0MZq*NP7ftu4h&0f6@#IK# zdsl!AST9t*78ol_M!x870alNW@xzsm>>xf?3(ChF&k`?kiz*<#UiJ(shoxYHg9MZw z-40OVR>UKeD(mByceLF<@VyOR+gat`TqC3|kS>h%>r0mya-6+jdQj9br!@ITaxF4T2DBCOjRG>#T7SptdhNouGM#EdvD6NWk4g#Rhwetsiin`$=MZtM|Sn+-|v)5pl!W+ z+}DMis>8(6Xur+0#z4q`IXqnWMR2X2=WC0mw6csLGJu(WKk!0w?(S+Me&qn)b<9jq zwn*{1^3reXytZCPP<+o3UspwKh*52#PQ9mz>Uzf0;#{jbF?0`m#i>>VGcHJqGo(3C zdx=&}%(C?90Or;Q>jtEy{a856u<9u$rflN_Y2LAwa$xOnl1{mAzMv2}3LCLgik=BD zr*^bz)Qou&hhnR&IIkkDXj4o5>;#Jfq0!}#|3Jiwd*+3n`B+cUWchO0C2u<@XX|M3 z^jytM>M@kI*9VuRBvo)7S=TxI##9%shJR(M%K`j%RBwGnQymDo?{7;SJ9q@2!QG#*9asK7yX)w)D{`MjP@&eCg^q(_6wX1+X_B^s`wv1NPLDCRs%t0A z6;_(g2`ZRk7AI1y-4)O>%m*^bgQrwdmJFuL z-;|4ZD8+O=F0iK$P#ouBqYqx;D#Saz@mj$@>k#3x4Ht890S&q(leo04+ipqL7X`RH z&ol9$&zv*Rl(_dExE`dBY3-<6en}F<1-3y>?%_R{{Ub~(rV;tl3zlrnUR+m-IX-TE zK0W^BWLuc#4s);xTmfe}!)$V}kFBmKa<1mf&vL{R8)0u*zo0aa-S7T3W3^8g%6Ydx zQh?B<>VAuFH$K9S@t3w3PU_$S#xh1Yc~@`H7s4mK{~3K=cuBQqApKS>ztr1_LYDeZ z-AS6KB&ZI{I?ej`jdW4{?)U1d;hSmnTvx|EXqj@0nhy*2i1vnqFA9trYU)p_#oeSX zlIavQuVxPyibx~KWA~9SgP946L?lU&z~fRX-Z~|SWetn1$}8EOH|SS#x*ud(T3unp zC;uAdc!rU!uK=et+XHQGx?sW)jBZlE1=$x(r(*$GiM^bMCaAB{J1TNKD;J_IwV=MY*E(E<~4<_n8MV&9o!r^Cgtx{Ld}-E*up zJ;B=30+?>`w;$suXF8t&YRlrss_{f?BJEBlZSTL?m6nUNn_hW}(_B^jDTzyp$z8vg zt~i8Mx6z7HRW6|{*LP6|aRaVy!J?C`+6BH2n{FQXERSW-)T-w}ca7|6NiE}|mD?Wd zIS;osf3FpDc-PlMeA}}}vfP;<9Jc)s)HF8EGP5l*(CS;ke?hhpst3+f2Bfl-bW$VX zkRKLG?TL}k-(`nH6*U~CJ!nb|2O8eog&uBJO@bM6pIzCgXdki<~-L5v+cQSqBmEgAP{II&_ zuxJ40+vF%Vhh{Lj7gTM+{4OYpr{uLlIqkHc;m<*WX3^~at8pHbUz4J*TlW`ic^w;W z*lPZ0W1CBjH2P6$gZ}f1{Fmk0(@d6`Yr|s*f-q#XZ}MfT>zhjU%Z;I&v_2jyPxp~B z8C(zjZG~*qn!C(CTo&X#`z8&g*r!R%%X!19$c{=u=v@tfUdiYmWq?C^RFg#Gz3&P>ClaK z@QYL-uHNW*beWYeQp?(6xOi9XliHMUEKiqgXCqk9fp6 z2hp=iUo38m*3^Do(3yb}cfJOjPZ4;MfA;LtHl~(l^IuIC-BtUFZ>5b1wEQ?Z>XeqH zZCsRVyX7A5?k2F@`_-0@5D`VtZ(lsxx0Nc8b>!qX&~tlKwlyGq*dvtW%6ZM(f5hC- zZ#+$MwfcP*>_|#m4#Co;#FNS2{O~j-<@{b*oCo94<8$&CBx*Ind-t_F#~PF1(k^cE z{N`ykelKc{b7bF*?AB{ZRSoaC(KUGRMsW(b%O7Ev2WUj&vyhkNu*Pg{_g4C^7qzCX zK3#2`nq*gfSi!gVL0_>}Yp9wiJzN@}xGyNg6{qKWI*k8G=tEow`t!KYY2AtZelSeRY=5`&GifbDThm^r^E&iT`%=dt7!L7t+pdnzaQ+$d=ighe zKW&cWUDNF7v~z)Uq-ZM*l-}bbB>}$F9--amP=}Hly)S82 zV9gn{mU8{hW_sw;6iR=cdV?~MVDB!=Ns^z3fWU`X`|AvKA_$cM^h;xnQC!JyW zi%g$$B@c5QE;ZBEvpwH(*{M13#Kq%p1^+roMsUK_uzs!f8zhknE})Qu=s)Mb)_;) zIQQkW0srF?HQKxYHexa2;U3m?iaKRZKYVt0rXi4C@u-R}J5{E)n&;}{OjSe*?|Qp0 z893FX0vE+;xHtx4FSpm3gx?>yotHJ>p2+q7`Rm0pqYG3;R8+MKH)xIKA%MTg9A|73 zD4}VMhd141%yy;(^~N;bgfGd~p1-g2oyPjy5%*I2w9|2N{D~}&yyX5Zt^n(;h1l|!JN%269G-Lx z##%*w&1B7#CP8p~!)+A z1oN(RXUe>ubrb&KZ&e1la?(ndr{8+pFWt>E=ezut+oD{(xRy(3{6Q}9!$NO~<|Wk0 z25rS2GpVDJt+9PB-uZsHgU@jzwBNt6PMo3@eQx1M%ttwndr*xM48P#QW=9(23a=du z)xBFgS;B6@*4p->dR`cDYW|$sX{H;rd1E;7<^#*Ir99{Ck;{H_H+hF18acJDosb~p z;tj^}o8^SiV9y4hGRtNdwKfYuWnjw}89}1|rKwhUC){eQvPXSt$eA*_{S~SAMNrz1 zPME{v+e{F-6BMFcK+Jm!bQmanVIkVenBa=}ZF=0e?jRgGyXZIQvFYA-)ir@Tz!|%% zr@X_+#P;6t#51kLsH3^@gDPUGhpZAKBP?=?E748n$Mj_3zzn6v1M`RQWQGwYd5&b> zRl9i1{2a11BYKP-_ii@jWhKbxs|i1bc5GMsOTN86ORwK~?(0cUote3=ZljrKD$sxYw~D&rKI@YsztoCQ<1C^;zui6c#fTB z(oAl>Vj97OvW}S2L~U0ah-9$&W`I_hh+mZWC?EViqhg>y-d}wr0kqbx)_8kAyUWFpRS@`B_TL zG%^4gNKfHrK0c}~62-A%vRx3Ldop!JFJ7VvC~%e7e0|nsxUhtaiXn&RoN1-6QIZp( z9v&IMVm{`Aw{YQY3h}x_ z&^_NEcHO7Zye|#AJhmM=BX+(N>oc2>0RSA2U~ceF2#u!ZXM_Xp5x=RpY;|A=iQrjl=8I zk8x%#bIy8?B-=}6k|&!;AJ7q{Eu-#2#d(_2N}^;S5xU?XoP}3`vkzLOfm->XF9ko{ znCeXi;NQ+MYaBepoyj$JuS7BFx=7v$o2#ugvn-Q8dw}%It~WD`9~^G3ku|Be7kl-< zdGcNs0;5le7koDZX?gcNveMCO#lsy-268^WBmG32Scv$BV3pD*_WSwhlj4x>*`Er+ zbFN#zKecDT5^%ywCE#8r8QAV9PH;}vc?}0~<&avsT~h{U3l8l``sjXAH;h8&fpY;SsiJ<_ zOJTx09knym=+R=widlYM>3{)UE7#;!lgQ{?hMzNB9RW}5NBqjtqP9L5To)7fsovRB zYFShjP^`u5+0#}Hm?1*I7-{C-Uv}kR@6i7T$5d%*ph-a<_;-q(GQ7x7Dei-JfCts2 z5w-l)SNeww4ra~VlP|v>D&d^5+#M)J?b8H1jCh;JXmo43G{ IG+9XP-8YtjoQcE*NGKAE^(* zsA)f2qLt6;Y*=iR^AzyE+|u)uBf+A6Sku5hD68tt?&@y1u)k0HE$M=bj~A1n z=OrhCuQ6E`2lr|#?iv|jKsY0#wdwlZ3H%tu%{t6M)@-BvL3`zTkiB)^ zQcdF0)e^@YRIiK`uc)s11iz=*jl!{Cdqmkem9LO^GQhX#i=vqH4{f)j{nQWU*g0^4 z{BJ%*eJk8`KjXtqxpWE7|H$ntm3T~WIjJ8}u|xW$&D!e|ef^Hlb(>bc$%2=P#gQBH z1FqL6-#)liUy-tq&c*ULf>yPea}h#K1{{#5a0>G`2?4m*b8h0du+)4aE6PuTd4A^m7W;)dTt>Aw!4c*PmzJ{Yw5K}A7W~Q26q_Z z(e#+n^**1ae#Pim{nnIMx&qA1#XU0V`HsmE(P_DFToDOuGICf1DG)|`F)H9g!viRII zJY9~T*M)E6)q?von8<)Lir#1rN`Y3YI>-c*Q zXqP1~o+`d~oM~o9T{O=f_#>p=AM^wb!)QPhgMl{pBhc9<0}mP<@szE2OH)kVenw<= z{{gS|u+aOqi7V#84^Pm(de!j$?hP8~==>aP;{jfaAkwSNhqIi6g=XP7KPnNw4Z1aq zj~|`nkB+qQJimR`D?N%b2B3i(fzSpbZWZGKnuTm6HsL#}yyY6VsZg;QF{oY=#jQ9imwJaQG57IOjoryDz`|*!YzhQ7^*8xwzDx zfG=CdPvOIp!A|>+w}K7dCy$b@?IB;nP6jjLSPy1!Fl@y^Fg7J(I|mnC$V%_OY}kv; zetU=Uvo<sclhNsB>$4f&Go?EIH66#{sr(OrGAh(D8aT*&PZ2XZeH5x0q(-%0+y-Ud+`hB5 z7a4O??!5h*WIgR4k6ZUSyEE=9oKtnw{G9flFuhPmTwgk$AIDJoK)`3hegUP@zlD!C zsHnB+zUn?`SkhbaOeph0k5w5T`fHl{n2t=MhpJx4^@=7HKp{rJUdfSk(v9}h7c%fQ z5{mmMG3TvsEOSdFP_}xQG|%wn0)v}gNhyhw415<1&>HfLQeo`8!d(SoXr_ER$gJxm6kfx(VvwWUA3Fq@2Y*qW2MR(i0StsRvhGxN19#bY)BBH4<&uFH+MQL4I5JR^Qm|jr zN`lK0Sjj*Y>%A9X14z(a05M0tZ!AT?VFe*wHgyo=-DVKl=5f;jT4IBeZ0QnYj|GKM zB3(XiVkaS>1X>dPaIdW&;SBa*Pzj{i)>=Ap8!c8K4M)P zF*zcFc5lV!>@O(Xp1aw262K0 zbc@p%u}lbF1?NN@rW;U= z@|e?Ny%wbFTbnP}U~jL>L#;ux#y!=@CvU}(o=astv&(H)xnlh8MfU6A6vsfg^nt^i zr(u%nV9da4H|LD)3&bcB0tu=v=rS1|5=vP0#WzUVoaOz)|D3WMFlr*rkbM=d$Cu z%O`G5q2T#s;1(I!%2@N=8gxCPz2Ka5@)sqJf@Ez*@TSVu^?1}PhbgxPYP zt9;iWl9v&u`KzdCwWB^=|O$A?!oUQ`S_VOn%yb>R^3JS1+P7-M1pRrwz)q^Vp{XSl~RHqSqH=a01y3`N_ z+fghg(IHmNSq%&DFQDzRUDN}nx=)wW{*liA-ZOvNb^j68uyE~ZY<_(H;BOi;qKG?rP8U;JGuj| zREQXsU6Xn;Owt08|6Yh!$g>%Qm^DNQZwF_qdoec4XFH2fhDP8 zaG=pB3I?7s4j3B9K(91R1$Ty>bdt!C1i>z{-Xo43L+7A`*I@1eF>-d73|y&#FV4gt zu~)*5v7k|S5t%!SoHzM(WG6*p(n4I$-|5E}A2BG4A6o8k97}C%z_mI8oQZ>kFn;#m z7lJr$%P4f4Y#|pUMlvR0-)^pCH+!X9BYY8O^x8;kq)QpPT#PaQsu2k=-JAUTUej`D z9qLs+$fpDaVjP?we|mkI zfpDiN_*ZDlyhTKt$~ynr@VZNIwF}?L`>_uVQZI9jnHt-Oq$e%S%b80K#;QLV!XF8x zGcp>NN)8qUN!NTpPq=a4{?%ZRZ?kR(X{Z{g#OJZn4CW@FnW6%Vvrdx&IE(&360$R6 zgtn8!iF3TnTrboe6{Y!gS?~{yoPX>UB zlQceP?o09uZ<2mG-7RMid4G58Da7&UuULYBC%y$;FhSb+{GvR6sO2vuBUUySOUo&v z=7*g|QFPZ5uarKvU3?)yrF;RFDPXyxWdUI`ZU|Vd&z4ht^d$hV{_)4$9vXt9wZhuv z&D;^c1BNSgb0pj@8;1Lj?F<@EIYe(u91YiCS*k!i-mAB z(M_ljL?JBs)kC@Eu~t*0Z=Ej8ZD+V6^*Xyd0v^zM{GbVL4DKNVmHIDGPGlhBf7}zUK^%9d{3BC;6`TI5g5I*U?i)6|X7(|c&H67#E8y~aM3sG zviE@%+~`DFPoBuKvm5?J7{wNQ!Z)5$N&8j2#6U@(I~hpj{JhpQc56jkv=mpIkty_7 zd$5U%ScGsx(-G)<6(tDL5Q3=sa3t5ytzYf@-K6C9mnrEiy zI`|1t=lNmDqAcMf%5j3Zh#`_89PET;664?UFjxvK!59k zNzkDwU;bMW#sA8=KRGo2A28P6(M7QS4PBs$gOILla+4aez!AHwOuqzaF(DX`fxGX} z#8ZR2DIw4W<6gvi2ZVT{>2)|N7J7NnABnrLPh`mlr>0Vy=kGyUdWN72YzQ)7mjXQ& zTgReF^k5uFg>VX2h1qUvI){s&)zAvOS%tm6S#t9ZFWqfZVb6D*gI6cob~J`Je)Q^l zF*Zwd8OFqDnKxTNUY>23pY>~DjhH^+V}oKPsP&P7&n2=*(E%CZU59r;ZJ8(F-+t&_ zdj8B)uJ4P~46)SzR-&<_)=z@6Qg4>JmzzP=$DKwr+5{(I5(3wnfrLSseHs}rM;qdt z^ZZ+eOxT={Zwd3z)X&rOU0`$QIB>Fe^ugl(u%&M!b zi!ovuUJy6dwS|!+=D(xKs4>K;w>oNCJy+`iayx} zubFKYSf}@#Ap5uEY4hs1sjE5Dio8~WygLkpT=te~-JtJS$AduJMQlr_Nk2)<|EZv< zVrENwpMi8XmTxv}))F?5hmb+OA)ie^X5s*>WwySiShJJb4&4W+d^;skau zOfbb#K}>?n&JJR^5?to!-F-ErY^0*UaV<=JS1e@+plRq&G;f-v2`aXn?RZWGX!vTd zOQF!V=~FoM^_KGKD`O4Hre9?oDrpa+d2OzodAS-+JJtBQ6;Y1Z7@vg*UBrsCPMwTG z+f>!5ByXjt2wmZ(e%++1iI%=9I%Ds&AqO)X`U2;)yV_Aw+FkW-c%YL#E^kv@-4vzV zv}0)(u@tefk0x+%AG{Q7YJULcx6ezxBxVPn!l4gLaiH$2%Gfw`f?ceI`wzF?pp>4X zhsggdKMTZ0P8fnO&7~ZME=b4sB3a>Qa6z*kFSUhnIa$sj(`{2ySK=)icO5(`&p(vU zv6s_rj?`We)UH}dd6HanP;q%)g>VU2a((AS>v z8l5kG-oA4Cr25Rz^4DVi3<#;)BApJ*jUUajGU4ve8>e@gW43Z~oD=R?D`@auiPs*@ zef3`br;42Fna?MnR@z)}7Tg_-b-#H(iO(rvWvFfvJ%+qzAeyZ5iZX{$MAe+ZljE*^ zmH!k9PaE9TB#LG3jI5HSvMHUMDAinb^YZd=-%;yUdCd_O{bk`*ChJ98FxKh&=3a%f z;=YStaD;tNp`mwg?wIqaN_I7RSDS;FrHEp|sT>&LIPc})vSm3}ti@x`&qdhX9^-jX z9b%KN6MV%h>|8;X`L{Wg9YJN@g9evw<*F%LwwhMeo_nF&ODMySrvFOf^YV=EevI|i zSCzW_LuVXYU>tCjh*_@k*=Sz4m}^3(N4_vmPR&l7X|}_e-e%El<}Znljg22Sv5`J?Ju*1Q9% zt|?H)+nVf^x+yN3R73YVphRj5%jf{i6liSP@>TBTe31V|2Kwnxdu(=BpbO5><3kAX zoPM6{A-MRlBvaX5UC#A%r+@Za8h?5N<8R>;|K)1}s1mRQ_DO&KHszt-%Fo^Tce3@O ztWwj?0)RYd9&zqihWoC6kZK)sf9IFwlI>#SH1|p-)l+X#U}j^fX+FE&^Y+?XD>bvK zY3~R~N?&{XrP7UhhybF+h78=04g>{;AB3PQOAsp{J+nR}F>FC`QFy+nJ;cy?Sjr@r z0^1=2gE5c{(1QSQ^Q2SL$Io4+aZ+FoVNMb;D-GSF97bU*y+AL^a%8}iRDPe(w2eDM zD!EG_kpUcSCj^rWM&lUmK&$=^xbN7&^a{N*8OY>n1oIm8@g(7P}w;u&fV=fbKH~&s~U|8dzv!Lrx_?LmCkWTf3 z`HH{HNl>gVWSvX4dkY*WWUYf0y$y!g;2@>+9z>l#jfDe+NzOQ?`16cNZ9D?ar`zSh zFh9acGGK8b7=a*A&Bx=65Nm%K447hg0pi_${mY!d>`nEL+oN~zsF{f4^3Yv8m;m{Y zmk9dj6@vbcTb~Da)~)Y^ZB}Rac_vKUNff>m;EFiE$GhfVn8AMkG*X2Fg;X$vi-#8; z-OZg&NhY$s23hGpSPR&nW+;^m{O_Uz)c^Aq{eR4+iCtql*f+%#o=DiI^3JV9TKg1DE+`LRLu5MZ1eX>Z+9Ub>qv-x z8c+0m^sD!$&pVNI4{p*kDQG)lN9GZ9ACw4PNT&#Ot;qfA2vB5j&UcdZK7+{(7tr5H zSVbLiBeAIOd_JbSfwXV`UUuN$UgMmf7PNkV3-J8ZbN+&-r>jpb*`WqWl! zklBB`6LusH%AtGG3Sd`rC2RE;D^<6=F3+IDuhQc6ZCAmll~FwDTs-ckHZLe;!@($V zDCpDRKJMq?b54T<5a~>$7Il=oG99yA# zCx@XJXDE2WcDY41?f}818HknrI&^%k4xYiH^wKBNUC2DagU@O7@MGK!<}c0r$IrL- zOX1rpIogIv3+eH}YwMW%u?+H-ruvD9mQ)OyNbQRz@Yy1;=3V{;U(@_TbAS1|iQE{B zL6@x#ZHNz?O;(Px^88xlDlpd+j+Dg}fDuO_2pK4U4cdymRVxd;TlXyGvW!ceRcod4 z4;9*O|~9EU~WSr ztXX4xWm(}$813?TEtM;2R~iSi67D|F4y$hHm0+2=;reXHl}T+yH)tC#8$1YO2GL1W zzmfHo2Ps?%jBX}|jbk-WZnD+-xjwYi7sx3(Q!8*%2RObT1x7~WGk6qH&tP;7`dB~f z{^h~*4QkXkyPY4cyfzd|R|PCyg;DcTEbIwY9f7)+wF(E$^Of~BD2^92o7;cUZYFVJ z<2Rn;9dVWisB)~>VYsG7;R@qyZRx!?ux_R%uR6WEmRzN2^MdBFD>1oQ|L&v&tGJY|JfkknT*k$FXdC6*VR)#x{*nR-vMG+KKV zu36LPb-N>tYP`2`;(`%*X19_wdh#cby)F(VzED<8SKqhX`hy1Cy)tYg_CIw&6Sxy$ z*HpEZWPaXt-b>U|ucRS_?II-Z++dI~>{1mULsG!5($G-ePlzZ1=osOL@W@3r}KO&Y4 za5vnOCTL4wuqRizx&8e7xKOVCj!_0_7IO5ORYysWEuYZ;)cuWZqInhO0d+=GjCPr% z$VRxmiq|>pr-~xQH)x%@oVAMSxKQLmStxLqZh!$8MGE3TOI;K+n>NJq5v#W58|L#U zLBv7^iuj@uO_YkNC?%*pWzqN`Pwd^}$SG9{UO8s)MO;u=MYn@VA~L`dtOCD?!{d$G zF{rs)8LY17;Wu8}@@w#HdC1D@HiS=F8{B%iIY8AQ`=e8^cyoHhejXAVtVE!$!m_p* zWY=R^UVnVVC%CzBRfIeC-f8m>2|DI}7W^U4=y|{Pc)(BL*ydQnA#B0&{+;Hi%DQS? zLTP)^2)HxkrzB?myLxJMCX-30^dyK0+SItvgQqxq#0=up^w=X6zu?mY`@YHDh-BeL zUx%AZ=RSO8eHY`Hg)HjvX3`dZF`wfyy0e1BSX+;6+j#7#Ip2KakbztZGO~kVO-Pz> zP(Iq9O#=O1mjS76k|gM`M}R6}!=Vqg=jMqSCd7Jzma^X4j9H^!$1+V$#LvF(z|BwU zn&$KklGh7gn{G)fND)58Yt-`Xsz^dB-?3do#XO>Q35UcWI$+F+x=|*lR#L`XAFH+G zrkwoTVH3I&(c!-e>!tmfkpf28pb=ACwZwOt=Ez5UK~)0!&$29JoY`YfwR}k+?1{`5wd_r$EJ+Q*Js^5=| z*bqNXUr#qVVRjG)lanPLMb%cuK0&m0;Da&;a&gj1R+;J-xZ>Iyid9Y6V# z&$bljl>H1!{&(<5JyA+sK$;U55QH}*4GVGO0zubgOgC8A-{l5oIu!2qaRr76UmLUa zoVa5(x(0n{9i##z-HN8;Fa4Q zw%;Y($~PczHM2&n?cuW~4LCn8$FFFFpn*+~p5u?@;cAL%*-=ZQz8SJXK?W>e$IeTp zE9eN7>jtERlN1T3v2l&EVFdAc80)%v8+GLQ^GZf8?-D8>p!@`4Iz6y=1q(t$ZAmD+ z0h|(3U3@ld4*F7Cu)m7Omvy7a&0UGdK22(hHA$Fj;)Of!dCHiW1RbGq-2jvLU^Rju z?v8S*85+J9Yo>SXCu!>v}tQ$NFT%+ zCre)xc_EnG1ba5_JBi%2IsKiDg?`>c^!c6GjJ6D5O?iyC81YhZ{iWu)pLG-E6C!8R z%{ae!XokLMJ1@G7yQ(EYRh`{{Y(ofuFUaXP4?$1Dug?qeU&kyJ@b%ai*-q)bx+4(?=8!I^xxJ`obE{cR`=isx0jK}LQA4z3j0-lS5p!F z#Zzq%#TUT)ng7Gydq*|-ZtJ2b0#XF2(xcLpDk@S0A|hSHMi-*ega}BJ77_sgsgW*S zrAZAv^w5#sL3$VI2_*zlJl}7xwa;pM?X~Y1_l$eS9qYdklCQj(^Zn+0<};s3y*?%J zon(yLUBLKPNJ+Fc%HWDav(}TBv_zBKJbXFqz4@!JU<0Pp&MHctdZ;LDtX>RzNN`(3 z(dD)Y__j^yBZIRdZIMRFdz@v_{5DK(PLrazqq}V8B{xsa4nfXR`XLp5^Bn_Ix0mTc zDp=S|y+WGO1uL)zQ)d$N_%w@r8B;RmzA>%TE(C+GaxXRR{G}f&OrjI8gqOpO3|_YL zZ6}Qx9(n36ZxaF}?)tYXl{?-3&VB5#J`%5UMa`m|$w)jwFs@)C{wvz9WACs11lA-Q2S@CCL}#XVp?TIn-^7H zBX@bELO}Y*#o4V%v2UM37AbZpJ=xYV$GgU8;7WuJ8pB9)CBR|gV7Er4W5#6|p{LT~ zU2UqCT{__8x4f9`-39%NS7Td!3jKp!%hZ}kmNoe+glGNF>QuWOM;esArDAzprY7qb$*mL_DV9*7Ymgoi07v1{B@` z5KbA?gjs39f!5?o%;b+?=D|{h-M!M{mLq%()-S$3@jB4fe($l@(=~w_mCe?Um0A(U>AoeSeoa?2(sGYPUJi^|IpU z4?v=ye7^j5kSOp!TCq@CcY!;Ik@}48oWXu~DEn`!2ROv9%SlV8(wIqqynjNUJ;>~? zNBze?01n*cdc<+^Z>kwEx%vR{lbSM5KKBPhBe4H}7Niz~JLgofAuRy$cSH3y?q@C+ z^7FcEAv&=<_$phuxb$5KS&>gvRKl@$K>fKS&ZS=v%7i7&uvA<_n-Xo(UX;#_`GE?! z0%{$k@wiq$xuls38%Vf3u@BAF100_QNJ9fYniA%l6O2Jvf3L@D1_|ct%Pt_iW$@BH znZ1ulUzZH&Tdj_8A6d}wuTz>-P5e|>PC>iiwA?71HSuq>UUqfF8-!GMljbJ!w30rT*T3Q67m^7rF zUey&L7Fd-Xbu`)m)+fZomA|8M0YyiM`N=nM9ymy@O3gVYbi$h%<=3|}wIiOA&pC&i zyEx+wq?FU@lp8W_=k=>QR7#Dmd}c^U;4)(9vN`jRPVohs_-U=;!5fd-bL_D*Dc_5} z=u8WLFa7LRS8T$xKc9^G3(feyrU>VtXTjhF_E`iMlH3b7=xdySrhxR>HmFe78Sk{0ceo>7gs%O7*)M}8*OozBxaE0T zc>9#(EzEf#?0c|d#ZZN!dzv0md~RcO?nf6mn(KI$Gx3F|=f&-QZ*pjPS;fc#*Z9NE zGk0Rxgr7dju%Uv-TMlRsAFNv^cMLOnix9Utbok`qy5GcbpIKTmSU2a(k!{;Ti*)`7 zYlE|Txg_%;W%VyJJcaIx2h$AA&}D`bm#0hWL}7EFz}u z+Ehib<7s(Nt`kb7KsI?S&duUp$+}B-0FMP@&ZO(|3p!5~7f5NGINDO?oHQ*HManK*b*{P>Bec@-2O*X|QWy^B3g_XNt~zl~`Dfi?eo#j!_i*L6LOGps$xAs>5gr z6S%sjgmf+}9F3*y%|8u!IHa+~HI?MAUzFWa$d-`I9d8xlC7I8`^xU5)ry8DRahB_6 z_uQk`RPU9qv?#kz$KQ5|`jNL=F@5fVT*yOo*eWYpypQ)gHS@x?iy$8=QYAS}l>;U~ z=pt&uhXiGCkoN~A5yb} zzTVSuW^$>h(~Hx1JgrHqO|8Jsg#RXev*^~&g2l{Kvt)O$tWaCTBKLV?mop11D|Q^G z2;jxzdd39{bv%@iMAUrs*a5+Kk)ThTXz$^X)0JnRZ1+Jp)+J`+6n_}6HuI>hIR6wNHbLQ%2{{hzWK>(1Tz%e!45#rg$y_xenWEQ|dMdXhnc+WYG1*jb zc9`3EJqN(^NkHPP+RWNixd{48$WtbhUN=lel{u2}v8P)lk_y}7N13V~tMZI~h$}l! z*KO|zySm`a=%v0OA5obwq3nyj&_WTzNlvAr8$s!FbM(&2<0Z9@60alN3E3}&L>Ph; zrlVMP#`df&CI{;niN>kBY_;#<$<)E3j3VhT-;wo(dnPNOZzY(;4c$m+ZFGF5&-^VJ# zs>l>u{9#`iT@)Xhb=$2&)7N3XIx#r7ZQu4<-+E)d!B12^{YTjC$_dN zZ`x`_##|!c<}ixAZcZ>K!Ma%IIY`1UQV1yhrHjXlaFqPj+`9QL(ACXb%}(@QE&LR@ z1E(YuHw+~3SwjNm;74`XU@1Bpo;sk*LtrV zD2{Cl57aN2KJ8rnq^RyYHJ8V~auPUi;TI-6-4ONQwjJjew}mhK1~;YKN`!;?tyx}} z!0D6C8=M?}Qz@akseRTe{Uq>uYa@3%ovs_!TB%5+sP@|=80J1mi}9VE-dC(nx!G>* z_2W6#ay!OpYVKsytrI5W%>U#^%X6#C<)>L59AXNmofCE$t_i(HswZO{L6dTP>MqOz z5Meu>1)Sc}Rbg#uF`|OnMTzGA8-$4zV;=EOzq(b75xYJa0v|dD*rS3x>Vas!zIE|I zwrV7PK!o1*V+;pbX`%=mj-Bb29;eL$w zfm{bbuZ)@D5w7;@Otlg^K-rDF_TV0bZ)tlid>l($B6l7cXTZneQ|U8-MX~>OPCwx%BRqM)iDM zdU||IyQ8OI1Jg&Xj=O1@-*!`t3)XL)KKEdI#*xi@O|fY}v@t}rP28QIeNKLN3}oDH z@=Y`1)kw-}oQxB6vloRJp`2Q^6sh#~xR&|e^yzt@Wx2zNt*g%b;EKiRcg?zh{ zlF_5IYT{qP%XT zg8s491b9(qV=4VUIXP1q{`C-q%`4L;$aN_##$3yE+R3lBthyIgUsO}UpCNQANkfQc zvxStq-2Lm|T5s@b@B}GoeB(FK`6hOq{OmoOjQ+(JT02z<~G*5ea{Vc(G1ynLWA=mA?emC%d%vj~LkL6o58$=0h0lf;O_=u4Yz#Yg6 zHDKFhr<{P6A<3X0W(dS>0I>wZ`K3)E%Mq!oU;bb z*_sA1)dmZerI~inSGRO^>x$#Q%@!XHr4lgAzPr&ooPsU!}DG4hSqNEaFjK|}=U7EbH~3FwHV2l*GmSn{Xl zXXn>@e6uyYMl5d1cH3xu(A&RyN7MVvLt6Q5d7>dn38ymAqC&Tj(bk|!eA-AzTl^u% zXuSzn`xi|&Kluj|<3+!oLs$1I^9D1Mj^4jts~|IN!4a;|5Q_^Hr+ z^TR!R`{ys!-ms>jd$VG0pPJY~fnf5ux7j!}-eK%!0~bX?hGku8HofTE)bye|s^S;w zj3soL6{PU)9j>1F3iZw`)NB~0m}R+na=4-dPsvpi>6;yE{jeCo9ammg_a;A|UU=fW zB|EthVJE}yHLr#h3px~dxwqRkv7NInAEMC^@}NFVo_*CqkL})OkeF+X?11jRK`5pa z-Bvdrn85~<&4!ubJfl2h*k4_rG_i+^)}o0F{fXyiuRLLrEvpD%r`4I_QcqU|-Pp`v z_dv3fjL5Od=TI;i9JsYXt7T5)R-%1@{kmh0^573|8c8SCNjrmBuZv7q1%6QPwjua<@mpTzE$E|Ur)=<~$bQUH$1l@fHh$+fq6%l2So_|jE0b$5b^{@TQovW# z+olq7Vy7dOH{`2M3}-tsvH{g`VOO*8>8yx$M(vm{&593O6;S=VJoXTNk{nS9W=l?3 zAb^+1P_uE^lY))px1~8|!YB7mY%}w+H}0bapUYf4yr&zY*B<^ zSI~gmDP0IM_<#;OqslcbHP`yAUR#Ry#KIOA*E?+fDZ=GWVc~rTDg_>@lO7 zQ=y=jx6L~fGNcn>Rm%I>5X#Wbt2RO$C$d8ll z>bf#$k7s4l!pplgVRv}c{cBtmAG4@KqJR*b&R?2xn=+WeZ~&px$R`N3fq0Zvr5Z00 zTz6>nzSYa-SV&EE!Cp$|$zJ<(r{Bv^jE9qG`t{Y6U5LA{0JR2MPamvb6+ApXcd7Dn zz!}z#u-oGK@946r9R8X^XWR;Oy%)u|AGN3`Njz`wC})J#r+T{@&v5P`w_#YZ))aPy z$KZ

1zLh1hV}38-y_}9!bfB%M3e8KaSI{=ZSlZin`3+Bi;1lh5Gp#L=W|gSWMSZ z`JMA|4oeu;d)a+AReE_^XkxB{WbBgLm4ELAV}qe5n=g@RoT~<5PY!eaTzhS{!lM~b z7_n#ac;;SGourmq*8mEkXstA$UNv)`)nMHU_O zxCyJ3wQY9___)jU{PNE#tKN(Iz?4KCvC}!;psqC{EhD*=a8`Njhdg}evX&Udhm{P? z{2===Q}B;Hg!g#H4AVCtXI_`^b= zljB2mwU8f=25N`IEzx=dYdXr+d&K zJ68-vWA(C=E=H%k5iqz5?R6H`-nmy=!ib6S?sL}jN>p;wTnT7nIIw)8o02W=Fq3V( z=)++XT%>bb6m^7GT#Zip@VZ1M)!OZ#&)vtdE&5q2L&C#;dFFaMKm)$qsvK=@a4ofp z#1!NEbZDmRZ18q|jLft2!*!!d?dmK)+80+|UR)oEJTjyB8GHrMD>nNRXxpWT@0#+? z%E0shWOpF}{UnAme*2S^M`#EA+4k!J&5 z^rz+0rY=IembdpSD4fUAGMBytw?FRKLrY8^&Yn!Gz`9 z)%rOz2{*F0)YxAkWRzQy_W-v#UrZF-*1nrHM*pNCv$1#aFipEJFLyH0Y5n}WCudS8 zX2B)vk&P;f$pVQ&di67t-<8Dpee*uaDiSvkwu|!Ioh^sAVXG{gO;uu1a^fL&D zy(q>6k%Wr8k`?Z4XW35}_pq~lRd;09=7|1&4$V_6wqVzmN0EU^4pJ`9{K!d@M)CKL z57k6l?PYsa+petHdLI@c!V!DAFdJb00|i*stTN*@^i~h>%?)AlN; z%u&k6PFs&_#}QXuyC1!YtuQ=%phTV^4pgu&%Z*Tm`mk5lqr*M0NBDU>^3$B{mjCK8 z!ju1SQwPhqtTEBy2X4PGp=9mz0(ENadb|zA`>n5%DV~RYZ!qPr$TFPd^ z_P8L9Y_N`xuP^pn#Az){&9}~nf|yD4v`L&RaCuBjzj;V}bWuFBym%CMhPq|l67rbJ z;x|=o1|W&Bf1?wE9Ry*44C@R)LGWZ@rlHIrN)XR0B1!7K`>=slCy9fBb{l}qNt}pM?EBm+I^EZyY)i?$_8hTgW|?1eUOeFp>{X1_71t3h9L9T+ z4#C--+|2tZ;QXCZS{;ym2BaK7{)hi7+tIsO4SDEfoy z7$q)aKE=&j^f=LP$iHiX1C;Q`!>`2zAa2R*UC)n}5?`8&9LfnJk zcl;DlqXs_$@t9!*G$S}s@KQpz3aD*Lc+-^P4L4KBLn` zhRHiZm}>3P=}*KH1}btXVH)S$ia6cKNkA@P!sV;d7B_FzcmW%SsdC|6bq2f0;qLBt z>9nq1oCN3v5{Ej4fpKYo#Q`D!W5-VDI|nOS?+ka5({AEKDwve}CP1%DAaEBVOu-X8EUY(?L>3N`EwAxv+2{1GPdx0U3~OHH1*$)nQEv#6;M4g(_tfc zB*6;niUY zs>&D2*l#MjZHmx(f=o7K`dL@b#*}Z>A-W;RFr^2v96B}@jZ{!j4um|JSouvwVxu!6 zK;*Y(5nl+r=0C@`D&J_ucB~w z+fQ;1VHaMlI?Sw`A*bB?o2sVHbN#{C<;D#C+K~J4&+EKwOoU&=>*$`3(E9wEmn26{ zgVABx29}Icu=DuDA&&tX%HRni)Ork6HbR7GAe6RiHivyYb-FN8ahSQn7zEr0ROhq+Hw)}t$8TJFT&AK$!~(Q~JJyOY5W zI@)-N^20|u97XVK#?UKE1z(FL`XsJ~z?y{xvm#e5F^@p~ord3fN@eJ!I!#*+$ zQH26hl~W|CI-KEr(=O06P4dAbGh6NRXUw`@4Q-T(7q)oWPHpMz8FHkRU*El?{*Z2+ z#w)R%#sB0&JD3lqgljDLHaj^(k{=fxfBgc@`l;9^86=!FJ(Ti_KQib6!XS-gL3|Ch zJk@{|3np1$k!@9tk~qRxqCWPpp)YN0Aw_?BRfu~3-->8X)nzuKcl10F0(WW4Bt1kymKmuDeKI|Gy`VJ)JF?~ z)>3-*GK2~WXTyP89A)Jkja$+C=3SEb*d{yw#p=*x_MhU>T?@e^hveimguIkNI7sOS(9 zFn6;ee2FtfZ=q_|c5hSO;RC>Llch(S1r%X=Q>e5;&oX$Ou;5|juSFCEgQa;Ehen=J zWuB!0YKN=2P#?i;;U`^;2A~wC(3?y{-($os=8?||Q?CO;>wlSA1+?DH--dJ9Z>N2B zz*%q2QSGg4C@nSiqGB+wq4Ma zb6t{JQOLr?XM`map7?TAKP}a1^38aNqbdA@RNjX0|5# zAvN6(E5)P7baJhIHnm)m!`w$PTCg?$s20rAZrsz2!CbjjvOB3!YrkJVGHZSK@tliDBYi9?FUI5`1&}liSEtW7uGYknbY-me=C&8bmDCTR!p13ZCh@ibQjTg!zli z`*ok#ubtBBWfAD})5`L?yGLv_liEgyY*%LA`o^VMEvxJYe@x9x?t`l>Re5UBFu0{b z?D`%Rpt({0hjHsDQksuSl&*rQRXM+UE}e9hPGdJd!umHAt+!L%A~87Xal(mTy~DhZ zM8G5D=N{e#tBO6+VVk%whj1SFQ7Z2diqlJUX?&nBvE}hp!fU&2Nt|`m4~OLnXBOdv zBjNRJXoa^EA1qp)*qPSYQB&<4QBrw*pindSrc**p;6`mn5x~4Pck)V#I^D=y-r6sv zvuT@EPZ@m)(4N-(UN%Q;i;m64rh8w6%k|ARozYc4_lcWoq7dryFzl!M-lvOi4Vx7@ z(UmT-_NN^S7_8n*UGwhulQ?2RUk}oKly;!nDKxOit@bp&22s3rgR{0y()XGC9TPjT zsoEP^y`_$t%(=U6BiNf{nY+G`mJ;`vt?d;Id4vSuISo#`EiZ)k?&G&FHp%wGLNVv> zY);kOaJtyK=+Kh7R$zRLu=Gz~+sl2-5MDcf0_X&&!fVR>!Ea5fpQKgyvlN-%I-9V# zH}H<-zN?S1cQN6m`K{+h+QdEDv*2y!%y~JV`IbF|U8nGe?VmKA{8mx&?gOR8AsH{o zapHNu`owXLU@Q9lhC>U4u=i4n#urbq-8ZsPE^~Rxl6&3?w_RCA1rOH~m3TY2E?Ayg z<4;e+&DDIk886>*cE2>6ip^D1TVV6Gs-14uvypu!%!hAdKa+im^nP}aU5>h;@b-I4 zOSRaKRQJQ;Vvd>+-B`Ej%l=0f1YZ)9#~ZM5__EKn>Em;!@61KzXo7YkQYnVT6cF~) zh?q&{sG!$iNwvpidGa+dF9GI!e$kG>NviKwBlo@t%8!j?imQ&$stPj{sj7MlK;kDS z>0p5ZodPXcukRhQVzVbW!^2Pajic*09&<-fH`9%r82e?tI&b-)baL8zx9-E7p`pT- ztJ?bg36dms-Yu#T25PZ(&4{>ocm-MYOYG z&||UV)aTeVpp5bfpG)|TA?Yi4K^}rUEioir5ILcaS0aQ+O*0IXK&UeSR=GP+y?^Ob z2{hMrlA^#A)`q8kvw*JOSp?n;vUTn?@W&c1>ITrVI|0!EZ%G20!6aq`=_966RuB*r zM0{UClq%m<-KmpVX0khZ!l#bAstnyj5?_D_##S1jA6`!T0Kq37e0AjQYp7*MnkQ!av0rLULBCqRux)fcS?Pc)hC_o*W7&0Joh3p#u`^ZxuQwt*+$&Mj}5qR@5>&TNTusMR#;Bf$)R4*HdRu_QGW$!L!IB%R4xD+9Z7XNFKSj@<}Q7(l}W}mlrbvzY}<)mnxcdYlD+I{ew3ge0hQczoN z9$(f6W9h*yb!Os5dZ1(S6JU|8C?km^VhH7Q6z|eV=Y?B)(>kK5Mo+DU=})zuR#Dl^ z94r(C#COf&xYepGmt2&k^63`N0 zg6n$frGLRs`_KHE#9Qd4< zr_L}+M4dm8>((`M-Cx=wrb`+x>8YYqt&JfXRGj~q7hfW*Wo}HZwHC~RHOdnx+qsn< z0jFwyXkEYY-1pS8AK!J|#m?@GkRq>iiAOuvV0Xh0xNrvVD^W~YEt8dwN*itWpH@fA zv<>ZwpOr$tb%{=1YNUhROj$T;*=AYnoeJ1MWp-ZYtN74Vc3f7UwAUm1%(-(;MdAmb zJxK_o1urs~csV9z!w|BMD(x5x?&L`BDvkEdi0CQX}hv!@|mX^^-tIEy4cQ0?PN-%3;I2 zQSa3?(rt$P3Ws877rAH^^PmbK#;(nWfoP>wFT*qle#_OB)+Gki^Icx}KBwW7k`;k3 zt=}RdJT4WC*FOSsPDacTU~-yL9$;(~NduUfe8U7VPAE{>Gjb|gWC`U>NRtcm0voAc zdB~IbjOm(F-?n8glb2OzX}@cdE%#M%1&5DS733ej5PPY#xuOQA6X_3YyJWG3tp>D>V!WqKG02$&ST$>5`K{~*}`q1-@Fazwycm|pg?AM_t) zS9&gk#Ge6qaKT3jA6%#0{4Wi8=E#?ZFcsK3DO4QfJ9&$Lw z1`&}_HnG)=6YGrqQ4i)MJ<8Gd$XMu;? zvX2Oc0~MBLEmd9^YX<(IL11nCvPfh{piR-O6c6R`ae1}OSL0r(lZ|a}O|q`#DM8>2fDvaN2EfYRG~RieGGS>}`j z!ww(a#0b5`$N$_lRcX zPpUWOU{|pQAC%b_Giba(ODw=6gU?{8Pog#X^@z@w88v12{gP+Tt|vf~ViV$M_-Z*f zEgTKG7)0@O5%Xr%xZrQmvzCC3|PgK|g~Q;ovPgA1r^i|o-J7lnwkfuEX?)L)K~ zK4dO{c&RMe$O|9Bu&aV$Ai@YvtL5ZEuod<(A?)1U?oLCZZzm_Y-ep>;G}5!K+OHYD zv>e;#OtL2E;3it;D^zl~rLfFBS~$6}wGg^sxdG-)V>^R=sl}w+U|;hud}rRQ&>Ag7 zbt?NUYEZ!iVSa=cIOAmTlaK49j#nm4t3e2Bwj;S zN#;2@X!gE%+f3$K7bIo@IG$dYt1yo$l}3pw;Hb&%>kQs zv^qQ8^GIGnmdCV+mlRRbRe~Q5*(wD@3!zFkdFSG6qgF4+r^^Q~NglhYOWu!Z7q{Ma zw|NoU_hy~}kOcz=AZQNjQ0y=;mT0yq|IshOQ5S!*|D@p-G0=5mj^_ z6~Kyb0|HerpEWtUsF4>Yj0w@ft>y%NU2l0>OT2b@NNTwunD29LxBX=uHpM$MI!p|y ztT>rqgbjF#xeqS4{+2>BRIic?Gr-<7Gc1NZ5-m_xDk`eeN(s%4=(;h}y}I(TYE*#H z9X082g?e#-okUCU&xdA|i;%O7f@OkYFW~5*?-;1FiZXU+!M?;nAFC)RnxKP4cBnFw zzfx|(bjLHREZ95QUp=;$;%NT);)QjI{?AVTRnl@qY;>QW>0hY1e{>3S?k8@`yV0QD zZRb?iCU1ivrVYyg+Y- z6IE)roEbFL6v6AB5Ry-0NKnB@&ovILM+z)|>dG|sA;kR=A%t`{b;@wf%<#qT?O$sr zITSqEpe>xQv!Vj}3SWFojZ{Z=hMlO=N8gO(x14mCbC~R)Ho4!!p7%?ej^`&v=K}r? zIc+dTSp)<=IPwJeIt9g5>_%R84E7`Do6*iQsRO^E6>t3gX}J0yZnaL9+q*lJUc*NUbrOm zbHyO38kou9m$Q&(HBYM!Nqwnsxe}INWuYiCAg8iZF>h?(NnVv1-N{|taW>#%dgky1 zRtoh->0>mJ9T)+r_sQ+cQp554Chwx-@1t{7B>aaitL1CPUjP0DeI5=;z&Pz9Z07xw zBO;~Nf*(!Rh;7RY9qURcz`q0BXKZYrAg9HT4)~ zvK~oCxg_dD&WtyMChvNykgeQsoh=0ngyAWZcY7o}59o&Yek#jDi4FZbzmx)B>suMT zWvnPV486`$5X5;DkKD?{UeK>ICxVzHpL~%lmfk6&Kk2j3gRu}E7nXKqYMSm>DDTDm zSXq{dZHIvbtvPQLI`UhxWg^Rw()H+u-&73@8G+#{^_W)J2n+p_xV*lbXb&YsNFuK0 zSYnxl-aPziH9EbIqohYm(d(02OD3BdIQy;M=U1(*B^M*v3`FRyQ<>h}sJ-qA|7?rV zMS<;P0x$fss>O~n7frDBVosO>fM;E>H$fRzHYX62%*ha8`{^Uy>*J(iNlo<>qn3wX zl#MD@Rz3WDr4EnRR|l6uf6zRyGMSeRPAJQ*SOR8Q)-%4&I&P@#8hLpA&v?vdA+9-_X z#8&k~Na3V`0pDr;OL9WeX$#&vyQ6_W^{!r{J#4<@&|i=byPKk2UD1d=%x?H{U-IFx z`NRaD4mb@Si)k_lX0GGYubv(OIoE-RW_ifTfyJ}7E?;|Us)grgV-Si3dp^o^A6#>l z^>QDzjC%<9M4BHz%g<(sGd-wOV3fnVeAyU*b3? zBSrZvK|e`SY^-M8@~9z=tXT5|9a{_Lg1HIgLnDbvR>#Jxu>lI}eSR9m^WS%Imb=fR zzi{1nr>1K5kv%{Kx-0brZ$%I)(p%er^O>$J|CBernCd6OIqU^mK(>NHcmp>YnTCSX z+{DZ33iowBHc_|5T-#oUSZHFTjSc8k>^M0NYljLZcSrP99*{y>`8%nThC9ac=(xM_ z56mr3q|Kil0=+Xe$eFwU6OQ2jfzSLoDEse~@BW|a0Gv)E9~LLsNgZwlD7<*w+c}7M zGRSZYX#<8}9!7e|LG5{Gr66R^Q^a?IS?(g3@ad@dgJR5L4-4C!gxc`lOPaG~R~cwk z!mWyL8~b~qT9O|imx|=I%hqj^qbKM*^qA2vO2eN)M6c_D}RZ#?a_4M6)UY z-)ARwxZ?A=3k-6?BoHY8o-%`4o&tDW}T=nunmR^!a#?!}eE=rv@t+ zS4VGl)I=H5Xy0Hf%q?erZw!SzN%RATJa?FyzzhZ@7bQ}&q5dN1U4#Hhs`=;dqA}k$$EJ)hzh43yVN68+<-zkOQLZQZA}WH zXb6#!=o~$vPA?wqdYeI<(R+AF_A*?kT294Jj^SYQiJ|@N_Kl>TE7f&?PKm@L;BNkg z2K#3*#Q$DV3NVTGQDvRQZBZA;{EfE>Zr4Lt2b4714b+1#E{9P~eXVgOJWvKG>1wlo z5Gua{gvxIKqieZ2<3L~pgB6#dh`&=xmH@gm|K>g9KYWQg@85^|e|xR4NDzQuk-z#m z0+Tm9d^|~Ff~q zrRj6_{N1}+fB&shqyPF)cLe@@pntX8|6Zei+tL4JzQ&O-s5_l?r*m}Q zaKoV7EU&`SnU0uhW+9XKOy~0MTz7|!1=pv8besBr1QW0zoV?D6Tt;N{)W&$J<=sX> zW;Pq= zot!d3%m)EU*n5^{$iZoVH-=%fu;DzpF!l;)RwE+&RMC**`MG{Q%6TviVvwPgLXwgp zU4#P=&u7jMGQn)~O6EodBHFL{u=Y1qKLzg!!TJ*9p&9sl zrc4tj`~P}*|JH&apJYjI$@$>d;0sV*Ao_D*Fc6leU0NeZR>*&kNH|I8oqW`!(< z&u5N)q*MJFJuAeQ^*r;U%?IjqI*8ey9OXYvLkmd^#oLI|t@O zk-3D#0zSu{yZOE8&*`iRtV9er*OnX>e~rFzN7;Bo z|IDBkS*95A5xC0EE%{86H&$S#k!Ql=OXhvc8M-!OXMedrLo62aBARhxPUuY8c9oO1;AtVJJx z{BJgtOBYWYsEsDC|Hnq@()XVW_)m;vT&%s%`Qad(-K(?BjI#^fIC3DMxih?^EF1O8 zadv3pn~2G2rLaBFkGf7(NkS}9jsWg}D~wu?RTgQ;==ZHOtM{zV_VKPU?ogjW=B>s{9WO>2g#kFj8aFfXo)Zhi*e0vwC1o4#6jE z6Jx3XZV|0@?>AM6M=m8)m#}jwm)kfjpemz4bv{`9ftR;p-R6l<_8)S3_0;Pu|0t;U zr^y~z1bzvP3yB2A?8s1idEyO&_j;C9{WIwgRUiXVjAysFm7^tF?sRuY>)xw5qokcz znZo_Ek=~qSighM&AX=eB=Ze~a$Vxvp6J?%nllqMIZ)!{pb+y<&GdywZCm1 ze!R9L#UpDjaN4!$T12+lQ2HPH>umrOLvgbLDOZ3fywk#ko@>$R2#1pMfT1JlBPbSi1-h z#CZba5_8tgNUeM0JLh_uC2Dh%SevMd7rL9MKN_NGQEv%7L~+ui@TE3T#f7c0r4$Rg zj>jggxf^)165En7A%CuG{dcub+1#5cyk@F?)o5jz0K%IQ)^LnV_&{7Dz67V4l&`ZW zw(kyRd8ON9#j&9-S;qFE9^89pruXh@Rb*w45~qOHjw&3Q2hoE}R3#`ahGylV{Cri- z6{gLnM;g&Ji?#ACdNN2b8q$F}OX#tjkg8YAYPItaP0E??u@{)GF!~mLNxen0{~1k( z2@|fl4$HE*I96{=yhrKs!8a7m5N@=NOwHNbZTHJag`^4_>Ku!6i-&Sh6$M6jp->bq z*l8Rx0O0@$L-5H#iyIK*0If!k)C{Nc)vNPhy z<&2ZIs6PTL0_=xX^#5Y-&EuhN`~G3AT2Yb^Q`z?GkRy6@lb^*qn-_59Jx9H#Hrv3!ot z{&|0>dv+kyHA*5Wt7S1wLfx_Suu-9qtsAwr<1>GhV+kEE!`fN7!O#5c+NR?kMiX~M zGdd{Qvl-7xyGORA1`;Kq%C>R`q`QYCZ7QR?!embIBypaRF)-ip2-xSl(tsWkTe(Sp zjl2fp*v6HmRc8L1I%(?bN#-h{QdY(fV)$-NK&>3YnmPFSPlu8HAmgA^hWS(4Nzfw& zRO*DJalL6Et3Jth2klGD47ytC%|c%rzj}B4gmnnxq`6GW`9ij5Ia-ygi`B4&oU+?? zISNtnDqOA(+ih8;z8*?1Ku=4XX6X<(NgEUD@k>#xBFjH5d*ip?%z%qau;!N3_z?fC zAWi6n{3f1H`-;_tNB#FP+FYpUt5h>`MGKm(RvQ)uDrC!y6U2)X>>qwif7Zapq;v9o z*ivPp;--y<8a#*5yhcS)uHmpYnBeXBx|xu&t#NGqK$)7CMRgr}i$GzEuG-CmeEnvb zH;aL2kdMPc83k?H}Cwk^UevN6hKg-l)cEC+ZcA zO#hFV$C&yRvYM1Q6Y#Itf)7Z zIhu}+98EPJs~#L2n?0YKJ@skw^nl|KV*Dt6iG3W)^%!-M=1lavjw3ARp$G;02l02k zEXuqp)F}x&o#Oj1>$_af{&Kk$x=D|N?xJ>#-$xzeY}r(B2Yrje9Ln;f7q_eut!syd zi??Fb%I|pZaoWs5S;GVB&r{D@l37Mt5*7S9s3k4(!=s7c1!J!!;P?*|uS6*qvy40J z=^woOq|PK!$#Z&|E@dAIs(!*3XK1z90<7M6^j`mODE41&lf88E`wZLs`(}uGZn8ky zi9Yu$-AS(d3Yv>PJXne~zvy`^F1G5_P6nT%e4x8BD`D{I1nN0Ab89fQ%x+L2V}aAh zezN*exaDG(&+}4)r`Jv#9Xf(oXC`NMVApR6VDBUxtzWu*84bVU;uNpSwF+Zm-&fhb^Ygh59 zN^|YJ*3Dcm$5Pp$0Q}@xD60J}b!Gv0PUafN>e(EJ-cWj|=p6;BHG~L_%cDxJAC=cB zA(hYNC!2~C^ggz?j+eANVsM^w`c>gLi)$bpRDhN_HaZ@oIa67Q+ApBT)4E3V2RhL) zk~_5?3l!fE@9h^u`H$qXozG73LriE%kHcsu%tsoyQH@l8elK}d(neY;OH_%G>XJ9d zE18h1#?a+;|Tnjy7` z07wi=x^p;sQTdSck#>R>;ueWM7`LbNy^c7uv0 zi;r(ECJ`XVeo!@U;%Y@L^B0DL0=Ep~t|u(*yvCoIu!6Zc89#jzbgl+=%)eoCTWvAY zdk2P88YqyWUCTwBOzuk0wTqyT?%!8Ack%V=M9hFbn}ppp<@R>DF{C@lnlzmFPTNO5 zOo$FWN5VF*1aG^SMP{un`zpRBgdaWeTJ?c^LKBCWPLlB*ui)V3GH*$5OJ7@GdG}53 zd;<=3k&ZjI!`s7i#@M7a);3rmp9np8u~a(>zrWTKV{U9>=i$LU7FuY|ljl1tWorBB z(A#DHy~nCts}7v2W)0^w7o-;eN3gOy1;ux+A1O1g%-VF)T(IAMQRw`Ayz*2n=UL4o z%V*5gPW?Fa*5HBsF*Ey>#i|`W#y9AhZv=WS&ElQf_|ooW#in;uTjM+F7u5nqS7QvF z#)~Y2gza*!VASd9C~IwqS0K?NlDtWABXeXwr@3but7U8L8;-H`vppaZvZH%d<|Mz^ zynehPk4E6lj~l10<{EN6%~uTZnCuS|VehjzbZz@f>y|@nOqR#0M>G?<7rP1FLz@KY zmN^CRPHEpy5w8OO0zM=F^Rj)a~8C@wlXr;xJs zj)#6w!xSrUwbwjM`Yt(oyNh45S}Of)A~)IAfn9z@pQm3Sp#o5d?h{nKwq&(76r0yW z3k=m|;Z{#6o%>SM0cj~t$6?{b_M^`-bC~LPedd@+fPIWG!7pM7+CgdOYfAeVXQ&s= zv@@u2>Yk3p_v`vAj)}a=bg31n9Lu_9uoZV?msBBl<}l@$vw@GjpFS?WJtHi8pI~V6 ztEz;Wkx^TiBE*Syf$9$WLz1A0W*zC!u3`S{{o zp}zG<(b~Yu5=6FU1^*Sa9Q^+Izv?^q-}#Q{&ksV%;RPsM4(J0iSf4|(p@k_3(*p}& z>l%h%@SrSVQe_Yn2KR%Cd`s#z;OGdiDt;bE#H{x42t|@T8Yov5v-uCYVS;XJzt4mOYm6>7 zMRf~+xR@FhB+VN;!#W^sv?jViBkE%P21BhC)Swh0!P&s{P~(E#pz0)paZ>s@YG&S% zwzmqXO8OMFAxZ2^EDFDeCi0J2K^CNhw=6+f^lMb^ckSuKG3;k5{?AW>zPx`6m;IY3VzmJEiMTF=Q-F0)F5pQ+Gd} zdJtH(tsj6Y8BTy~ZG-%uT~a9OfL{zn({XDOy$Ph_A!xh(PbQ@}#ukL2{ok6~&_9;P zO3L`h+)(+I=&9%lh6N+f0+tN-0d)@(sJ|@izalUW?i-e8-Bm&(Y!FN*mM(X(rnd+; ze~&Eq?L_q(K}8XkbrlM%-aju6@|P3F6#spNR0!ekr^``EvHHtWC@+6d+3ew>TmY#s4#bDGKedLrnqMDgGD1 z^!F>?_7}$W|53s8KVj^w8wBd;ce&M10f z%9OYdQ8iTz_8!YN+0D8aBB+dT{SWJq<1!NyGm~L_RGT)3oI9>tyT6G!=ORCVY z!!|${JL{d1EKC#KJGn!eezk)FP?4mhKPtQIFY4aPk&&XU0K+|aZbPv+4>xPE`LeLY*q8)iha{5;lAPOPSyqm4b$)QMe5GH4N;;M z60e&sJo9S1Y5y7FK_)7lPx|x>`;wruo;`c7N_a&+uK_+d=9oYIR#4#G7Nul#@%tN%K0IAaC-gL`LlCwjL*M5kdnAV`CZ^w=W6EEYzQzx!ZlkIzM?2my?=Tk}#~+;YSx%K4n0uvg`|cC_45mGF#E`vx zOF^v%VeA@r(2*I5s~gG-(ScKuULyHgDU!=I3l81u3e@+PRl?(%Ml~3#<%!E!-4=in zZi4&-z(#kqdSP2+W)139s*d`eZ_y@snfdb))gth91I{c&lJN-jJG`!Ei#+G*^<3wihkTX$qx&?OcTG3RZU+~PNob_FW6(IT*nuD zp`MGFj@}(wW0*+6?PpaOcCRRW6GccbxH9hCDY`cB-B@|j^0Erm?vWnF=bP0nt;f|y ze%gIo?CG~%QTx1yvIoa_+jb`^&X0}xXZxFOz#Xz89An(fkkRK!GZ8Cpbgjw$OxuEG zw5&~R1S(Xm!b#Dv7No2;mt1e?`DFdT<$!Vva*e=yyxVjjtGI5A|JLY>roi|I`+Un| zG8-~X=wSnsUQY#YM%Fm^?wMPHi#Ko-^}QXn>AbVtwVjQd?9L)?8(khWZ#T(`A1Fm+ zcXGedQSlohTNpvl;+EVFv%6jy<+oDd^Y#FS|Bu0qhjsu}y39DJHT{Y3%cE~gs`=Ov#7+~uiM@5E)avP5&uJ0p(Y6PQ zKHQs2!uq9yPhz(_iwiart1I03i#R4vR!b!cM+^6wk7~Vh965PsKR~dTo2sAlqSbRZG}BnzE&Xa)O$cNVRqv--2PoGvQt{d z;x2V@xby;Wjqs2Yzm@Awn_nS2evc{jlLI)8ryts$02l;%v@<`MycmIFb)x;%7xs%+ z9;@<+*yNakijf@RvTZqi@+B3!f=n-{R~}#9B^>30>vC2q%G(Fa8G8I9bwgd!ej2nH zmi7&{eC}0Aw_CXvTOL`@xHj&{$vO0H-Z5d$QJfFED5FegC773(i;j06oJ$@{*{pb> zA1@^KUVbEiiJ#n#dGtbB=tQ`$deQY|*M!T<+O9^V_}foD%4`p$*#CB+zKw(uo=07P z8}{JG40`Ff97%AWbdleG-KS3Kn`Q$Br07C9AMtjGSbKq z2;YjYLzC&S=8SR$%ig*h`lLsReW1ieDmP=;w9Xq;2CnZ#&=EVf!n?BBVN|+ z-6piY3wdv9ms1|}dN|0`r&NzI9ENH6{(*^oUj}t_^|T7Eq?Tbdy~WcO51cvn`C^&2 zcCi+;SMl zt9^3@?N;TQQaD`x(O?rYD))jf3Gw}+Jqzx7P(l7={5#8=p{D&{91?q+DdGtpCZ?)s_=`_^R5&(mT7LBAA$<99I|jU*?z^J4ISX z=Peixa{AlEJ7OOVHf&i9%Tf(zjCVT6tW|pLM9be|Mm(kUskYO8)3=Jt0&;WznZEY_ ziM~u{%dwVav4$bX@7)?HpZUNSI9!HUKXm-VT(-W6nZY*tnDI5OTnF;pH*h@tz*V0l z<{eXN5$3Sh!TdnC^NRY?+rgRFUNbG&M}|FaIQqg7?Q`f#R#M=+@Ae_br^x#b92d>* z-2RfmcB^gWDQ)j|)L5NZcDNF6maU0$Yqmq*#re6f(KUiUc59zbvqL|liN8mf_>PU1 z=1I85>+vkL?tbZCCAXECXCu?CYws%OZ@N=kF=D?yHbQTkmC1@%N~(e0Tk5p;u30Is z@lY-dixQA&ElRK%X8Q3XE^1~h6Vp35j7BT31DM{|5d}7er9Benl%^gi9{>yTNm$#zzcl$ml5n?=1D@>Sj(vhG5#f;dFQD2wYC>E zFOj1uTzHCO=|#Bj%>ZaOPQ=(=({<&Q?7zM%JTwHY3e!Sg|x-3@*_k132 zqbki7i#D~ZS8IKkp`suAb|hMJ8L(ov6oLwe$!fp(h`L=DIX|zo)CTcjqhU$!Qom_f zjhN`AI1jZLFMP|AGymi5d(TyP9@^fePeU3HEV9D&%;cZd8dcZsxl{i@`r&Ezw&V=# zP7`+KK8RI{zJGj)?Z@{d&-P2LVw_VmXwFUre^a}Z2F=o-La#VasV|dNe{|-K8zEpSD(}fjHcorJ{ z>puKm(fRG~p_Em06b@Hy)1b{aZ}MbycyyTRsGoe0;Ci~~RzmTGYK50yzFa-?>;Py= zI`9EA!>me;u^_+$(TDuc0_>0(BAy-TIIwcjXa&hBg@u;cdkcPjYu4awEuA!Z?!AlV z5sxdA{-s{o6goYX#!pn%Yyy9*g{Hvqt!U7_K>Aj7eUDqm+v~NS@bPC#@N_(ZC$I5epyw)9lBWu)3)Q!inRXn) zXyxIcFDSm%acHM?Osba?Jbn~ba;`EBa=huCU;Qabi~fwV&~$I-y|@eFy9CalrCnV# z4EtehAaW}G9M8NXuI7zXLi47G2gujo13TDcUskm+ z`RS-2<4`*e{@vv;X3mMXgHnaa^U0;$yx*U7-SD$S$u6EBH92qm&Z}GMP+Zi#OR8q$ zDctdyv(;N)mx7p%q%nad#H(-+@ue*3s4SYq zfV5jjQi0TQO|#+#ewB06R?J5-kHs`h2PnS}qxG3~v&HYF2AfQ>ZiRp_Q%f%$(+pcb zzW}G62LNs1n8Wmkjox%TNxKIZRMoct0J}iu<~D!9tbqx=+7!;UTXm-`VdRc;^bHe_ zLGrMv3gV>4^>FT)I53qRGW-n+L1v~OQNS^d*nnE%*&TQYYSv;9OR|80b}~k4S2`y2 z2P5FJJvEMwNu{+HyR)=(T%yH>T%lP0^};Y(KX?H4+XI%r9xxx-QJVK@X$yBB;k}_( zSN}p>+*75dD%LN*yPc=KjbI+deknaPXK`+9CNRzEj^!&teOcJlRJDm>4K{Ys&rgM8 zYsh;78(Lvf_~8NBo?fbB$N94Dc>YZ>71`VXjhO(Z!difr!FPJYEQV;!bxB$lvw=MA zorPFVKwqjszjIg9>l+_NSJpgDr zx->*`>?E9^cn93j(g&KK^{pM+CADT|zkeK&Al}-MhygCi_Z; zzb741OoUoGTsgyfZO&9~3mnOxU!+D8f}Oz5y7j|We!yv3T1Q5tGPHCvmpF<`K*DXF zmtV8~qxpMT9-GK-Aaonu6DGdar%Ik_&O@=!vYAnHx0lCO?v+&}a2gF53piJvmhwJr zIBVupiPTacQC_9?ta=yfg>@9M9u1F{7)UKlV--(C&r@HT_)hz&5_VYGkBAq2Un7$JZ46H(vt@4wRYIid8adFD|70*hL z{(P77CB+*JOQyqyM)I`X{KGVlckPY3m203nKK7nMDM5A*Shs0eITc>dxHOK;4PRc5pL4cw6h<|`tqCRicohnM## zrEBQt>XbfwQS_Xwr1 z_g}62pI%&en-3=5s7EpKe7vo>q2P=>*j$;LHdrT)CEuWR|01GznhjZ9=M95h`9^^D zkMIiG{`v;0ZT@^UuF*{1Xuu(}sy?mNGuHEr;)T)x_R?J`yL#)8QVLN*jFz#n?5{IQ zSE9dELF-t|5hM2~WfaG%g^PyMERlJ?I*8s0TSl=hK&{@CRNPHrITiJ0L3_#nEC7%T zHUU^?&jFgXnUj9^$I@wn_^55sx)hN62nFRpcX=bf~5V%#s&KO z+mk1^uH3DYnN>yr(W_lX4P?CFMF4n4S&$dt=JzM|R%pW21y_;NkTfe_9|bu{evU_F z=Q|Z*!;6bI(>kn+whdMM?Z6|D-#DVoFOFCk{mBO#@_?E|&{UVJ(&5rhqz5an)stwa_0D@;J0~7<;Hnp0(U{lFm zI;HW>Ud|?4Z4Ek{_~OdcmE4z3T8bFoiq8O7Z#j@NI6tjR)Z{@gm;8CARi-wG2LAtU z7e6%37#%v1p&Dx^ZtX}FG$i#w3v>qZM6M{)SE$T$W*l2@qzZ_pqV~e!&D>3pa+7pc zl%l9X(yqOnlWd6fFLn8R7t)0IdecTATxpY+Q?l7Uey?$r^W$CjG)sK&u3mtO&shj3 zE0{t14EiMs`n1Ylsv+DZMC+2CD(VEZZyP55S!fmBHgFDZ>UYaEX9SnTISJV$5iG-- zTvltA7uP)&{o`?>=~M$=7RL0pV(_aUGh$zu?w2p07hn6ywB!#=^uLi^#4oagAjqtL zlU~BVT8niRyqOxXyIydsLS$^ra4T&}`e@0vQ~YI(gPMMx74&NV_#SvN`cD#XzHU05 zcvK=QiFb3O8}_<#wR}Be(PSjI+xzA)uZ)R z7phol0xV*p#siJvq-q8S>RYG9PbL&AkU(%@nn784CAEyG4HbxU{7$aBVM*szu;M1d zN-nN!Z3Hst(p1C1h;EDE{MkzLF`Kz7oWUGxN=IZq7QV~!lG)C%h4} z%>_z*31Ig@fucK2Oo^2pR2Me^8H)pn6@PfSIKTPFqVQW)UQ0yPgAMoYUi&@I9 zLeO=77C12C`n8p67}8`h|B&x2^xjzayQ=MYMf3c30O10=(4zC_q2Y8ijei?P+r~`eC)9rXxFZcd z!j&1tV6pKG$h3b35V?tfX`z86Z~xU#kLPwdiSnjk456O)X~h#Olk{+x=SPPFS0*_6 zQI3z%>ulhnV?l}yS$_*Wdw>!BlL?x)1)9wMeb500I4F-|g9VYG`bJ}kHb&PX^Km}f zn9Me)V*CYwp~qpFy55tb5*9IAd@`)oip&z}}Ei7vg{-Pn26L;mCg z%6L}*t_4ks23pkGBK8d`()RTuH&T??I|q7{KNsPMfhTutQG#3VZwn+NY>dRp z&KhK0`r)yP{kpvk<(xS|5Oc!p&8pu9YF74~CxsFUKywbmcL3f2Kub~k%NU*v``eu7 zf6W>4hxxnxO7B5Up@O$wqnZUliuKGoY}Ju*lYY6ZU&2xIuwqEI_~vsL@d;>+dW(tY zt)9%7XSvUeLq9RO3*g$YB`UR-(uBy$b=8XzrbBFRV=rjh6!Z1%FI$EADk*)lcc!mk zI^l!>XXyc646tC6B|{erWzj}Izon}w&bwzW)TAkxT09ac5~ z-_q{!bMf1Azt5`AYCw``eCYVXMXShso^{gU@n}Ysfv#5~lCBddq0_JABVbZCCE`aTo^kdzRM#7-ivq zIsmjV*g+ehw^C#;h~?lM;? z9qMF^^}uG&0yP6Q0eDqjDE0F#su@w2h4s4tBYul+n{Io-h}RH5v2y$gg$MA_Di$-ZD0)sh{hJ1Q%BUK%irV0t5*?) zGb4mrnXbpKzTBkyU%LGa8N2-x|I9e<{dxFb@)5)Bj$no%6_wQaS3Wg7qSuEJkTEg4 zA^SA8=yRd-LV$YS0BL1$$Hv~03?+)+TkR>eH40GQyVVmliTlC2$uo=i0m|;pWAk{E zw#FP2`a8&R4Fk4cq&f>~l7h{8uARXTKLf#cIE32!*C|HYfW}9*5}W6lW0s{{n#=D9 zIWHIhZT~k0_51x@a6Wp-T~MoKR1sV^*<~l_sYjRd<&@RZ-#&_7*i zCpGy`YHVbUH#2sfTYi*TpIAVy@%vu56F#T*;!w1`bg4g+OYW_*R3Yz^ryT7kW4bsD ztR4w}-xCKxNsGt^)3LaLXWLKWXoWpry+M`c||uSC_o}4kjjqsA3i@> z8r&8y2!bPfavn}7y_|uTk=p{@x`(C5wHq*}7#-he?EOjz_YYR zBtf^TeiLt9t5_NNm0j2Gd}6+hTRf_V?U!)N6D{MtLloCEwYs3Cv|OaZEiIVzJ?O4t z?A5odZP8)ql9fo>F_IcVyQ!4K8IIJRfbH?}O66~Z?(df*)b*5A#J?U^=WYI&^yGE% zGpDA9>&(d;AU&A{`#4g2qPtN$6vDZ#s-@aZxRajg6@K@5qaRB<=897TJ_^!#ao}5WV>}P|1L8jA&|5{3F$*U zjOJ#vZNd&JU3Kr^&(U@)5E!(758p~|G`pUVy|i1+mj97bZqOkY*A_pBI{`pNmuxO^ z`jN4>sIECQwQoqv@y^4lmEo$%-|eaeDe>I=H_uhYp0S@EsVbM*iqal!phNRJ#Z7wU zK3iAzVC*sPUf5K`EWKBc7YZTtgvm0^ds!<*5d&b{?&zV8iMB8r*90oW2mQ=cZNUE= z&5(o*A(G;2KWbe{&9c>d*Wv7@PeUu*)DuWIt`bPA#ANzkqA5~8qwKu>MQN8wI7*5K zY=2q7xklpn%_r8lA2D48d-}3-gOOt!Ms0}1U6Rh+ix>W5sQ~-I61Zx)GNmcu_ofy2)sRxg{v!nbz z+gV)0zk6TZCr!lOPeucG6_2wF3Y7T`>>TxCZ^0i$J%TWMNw@jjAHGzP4KV48D@Clr zzt$$->}s{1I>mQz0dCTBb)Oe}=`83Z5Dl(;VL5tRS$tF+=xesDTLL~6boWxw18eRo z|D@GU_&SH0{(yv=meh*zfRL%L;wizZ%J}+ul|zXh(1vSr?oc9qA982M5I<^*EkQ>Y z=6}z`4oRL8@0aLpV`n^Gq8d)Uvfa@r3ihA+T&E6ut5 zt>?hyPv^Cir{-tICe~QxR~UHC`YoBa>?oo>uMr{JB%nO@#81R8qDm@g(NM6bn8R>7 zmnmsx2j9*JenUjlLJg@CTe6GEw6E~31M@$bC>&T2-3%39@50J*GOlxD=|PSFneLs& z=H+PmfDfjhw$q5&I^98IT?>6!TSLUQLUz-*NnaU)K zEazoAk!GwmU3CRZlm45pYjxi;vK>$>PiogeSOn|(nx4pzLqPbTd}CfC#vd#C(v%-A ze>o+&`SMJNYR`()S4{$``N8qayX&JcL8x)`9>_kqtFT#{e$@H_#B%P)`Rr2Vi5iR0p@MBd{)1ktoebKutdIF-nFAr8u~EEq(%|5z7+I@e5dcNz?J-3vo@&2ihybPM^IG zX^%1OiuGn9E3hsJlJy!;$JSxN*iys5#ad{ZXpH@Oq^WwP{7HM4&Qi6rwgOxd`d(*p z9^4Xhf#%ncvY%e;X8VD>jqS^qbPtgjKDd#iulsnh$KTr9D1iUam2Rd(RCZIGG(g{p z_UA4p-M)ZotfnG9L|$Jru@XtF=&7#%Y!vC0& zP+5{?)Gh&f)Wv;9l?u~d(fJvlx&d%iGWIG^vq!~zWVhm;!zSH>fC-~iT+2)epw1vj z+D*gA^5iGUGa<-p9v7E5Z$~WcsC1AHf1ErQD;uSL=7>Arj*$;0h$de+W@N`bm}ukY zp7lkzIGMON!PHhwk?5iO*>zUM-(B@?MBj(P8c2WTw)_0uSLget-V6;>mY2pxywM!$ zJGW@AREvUHUH@w*#ot^+yM8F|5NWNE|ai4u@iFICwwd2t$*PVcnxoOTv z9Rc4To!%4So5JZv7tcs@>8~}htV>lES@5t+Sj{3Ta(_;2pbHl zuK*z-yU8Mq2@5*p)F^7Mr(P*AnE}cc%ge@+)A~qgqWlt!2;(-jg|jnwhCtVVl_M*c z()abm_ujm2c^CUQZl!{G3KrxChwn!!QrpNCpCEh^^?Q&$X-)Ux$4uo~gA2Z2FQ1|u zJirq2;ry81*?pekma#|uX7r-pPv|HLB=_P<*7|Y5GBSC=vHHTIs#;KAQI)xjV$Dv+ zdf+nY?PFl`!JYRpoSX8uuv3XF|x;Y*?$&ESk?${< zmHIN*lcbSwX8ib}7DMUVp5|Z5k z@6=iwhkGnFH&l}6Gc&rik6L$#jLufr*5LhEA)E?@QNnUFiKW?w2?{kLIjTyv6(w4i z#*%;RP>O!pwPz66P1uJ5bb+N;Fykr?zKqf_zr#u=qH}58W!DuXI@C0$VS@fb^$`4GOf8y}W3=acm z&JXaeu6i*yp;!pDeMn~S+;)T{jpS`(Y<2y<6v?9zEydJiHC6NplBG{dZ1a=91Dt7( z;Q($xv)A6DxTG9g7scc2H7JsCHRI5S0I}!?;>*M9GK}tolpNJ|%)l}d$D*WL5SAUD z(=8}h*lvEzL-QSO!zbK)YX|Vu&o^4I8)JQd!-SE(6-Z2`mdCiu#Q zpG>Ut(TQYBhRQ`y+erjkhLtA(mD8rhjsStWRs2h9hM&dZ(Es!+ZFbr{1}c4udV{89 zm=B{&1XGhHE>b{Its zfLV5f%xqv#7GLm@vVBu`cdWW>#JkU7KJp!Wp3nn_kWY?e{$%pJBU)cDAG@_Q?}tCu zn{}e-RBrEGjW28tQhL6|E${I+%jdgQ=!yQafW2}vc*?7Yh+Fv^okH98;yR|NjSE^# zdedjxhfkgVG8yn`B4XFNpBH>_iDUXDbphYZRyl?s~1cDz`}#DjLziaq0zu5Ga0N?KWx6%nPT zO*wYSDXwLo2lnRY3O;g;7ZbSt_6S?^OCAruE0+zL?r4Z>8cX)py>ZlJ!1yO^8|#_n z(>2_M`nO(X343*xxn=N_ovh=3htONURE-aUtQsr9fQE>}Z%g3#%{&{UwS*VAsgm5e_F;^y_?!F6MAI=bx+e!cE;}YW#5e&^BMa7(K97i!jn1zy~397G~RC z!dO%09G>V}AzoYh=3FXiWtn2;-y7iyw;ME?b$KX!BEJwQ2^GOIEcJ%Ld7`fJ2S&%X zhs+s%-Fj3Yumm+6cXk6AnlXVX!!OH!!?i$m0Wp>X?4$?AsC5^^;gw(Rz~$BGO$Gnp zwdm7;pKY=OG_W;R4dB8D`aR|intpmC%edN*<_n`E?*lPCQy@mYjoRtdHrzt@`~Xt* zT#7v1H<%F{l=&`fQaK%=_A7>SxArWM1ztd|GUpHXlGz9oQc5M z;mb_6g+ejUof_B#_6QVTcsChvu>Fh`?koN<)?OTag1U+)bT>!u{y`Ns!d(m>b!|#7 zGv(ElR2^>FTim>4d-28Cq^jqs6gTE%3TzlVewCi-zn22Zzp+HnU!zCi>YJ_)I-+&v^_)F=o7R4!X*QR!Nb8r^_?s= zLT{(6M>@P&4xE~*EW}YF=t)q1nh`adh+(JNuTx8xTnQ|g@i*R5*&-de=0Ea_9Q9Q# zPWE&JIQiaXPGxjr%P?U|`~=592rE>aCNn@2Bw#pfxi38hKeRj@-R=E+%GD=B(?dH( z!e2gbsgbz0H$XNjqXj~`OP5s}M5^S^Lib_^>rZc^?!l=S&BEcaxRZWU<@7N$yc zxH}(_8kfzDSiN`bKKdETrh7Pp$V9z1t{s^G+KXYI*0YV;CI=`ePgZP4m3U^DhDh#` zI&_U|XVpz$a#tUp2foO#6%uSNMt&4Djtzz%W}Kz^HY*HFW3muq(rS#JKF+&-EBn4a zMq{Hm1>WAD^qgVZvj_`?m$h`IPK?SEv_Ez4g(L5h^{2y`Lzdr_RKI9!ZPTb4wn#wa zd^TGX7mZ}Pa^wmZCK6^XJ$y;}8M>rA?*p7_sW(Won1F@z3G}YR56yjzHS5mN7Kta= zY95Hae16O-bBW2yMs6Jy;=h~bO|>T5;U-X@LX@QQ$L8yg_bJH`VXX7om)<;vXx?M{ zVcUB%yF=wW`xn=MG`f=KCv4Ho(w?_SnQyh42&t!;G+od<+RwFh+d95^)ohOQ()O3v zGo=YuK!mh@<@h9ci)-QKu1Ri8Gm70DX+h6K+LALGe6j-zpD@^Mg*I>YZM^PWd6@{2 zNZ1^l%zrXYa*la+U}NWo5k0R~li+y5%XGZ0api?ccT`!b+9wmw&?Tn+CE{wq4~c7z zQc}iu?(M3EC#8P3xTDU;i(z!4%UE0dv?mh7Yf z-hlIxFo(K5x>P-aBbP1T;lT~olT=KzVcM}$QUt3*MRN4E1~j9s*VxQ!DjHhA~- z{VsJ!eLS0H9cVhFg;sb8x(Oa+;_6j#b+C7b$oiuGtQ~%Y|9<$r=kc}S#^n;+GoA^b zo6}Y9?ql*#qj8h*VT^NBA;>YWzD7gn;ep$|H-z3eyYE{qet$^oXiG`0ZqvDPou^zg zVV~%ku*sNt{Kg>AE7h{RQHyFM8zSlx{<)|5F>0#co{mc;-oBZ0Cvm#?Yw5Oe_N!0$ ztKV$K=3B|`R^Q{XOKu)g8cd5-Hf2-~qh{CN9$UTO*Rp+xz!0J;z(QcGNVV}KF1;Ou zR#l0bbG*91jj1Av@g;O)jTQgHweNbX%81nu68s@Asc^O*$)o>LAZHw5KYOs(B12w_!V!l5$)@chU!EkF-m%K6U*^ z*9v_8Gs$J~g^^GHyg~kSPD|^dc4I|5yrEV6`g*?!MS5t2p@s&gB`yd9%n_c?8yG8k zY82A-KOC&1P1qHb@HRxA_y&aPtQnUXw|9hobDt{whx?Q+i~0IVjq(BmbPpNRpGL^9hGc(T(_9@MvV6`fCCA9A;r^b$s4~hZ7jGvQUs<_ONr8dBNm_V17iB#u2v*N^gew=dCFt>6pc@+En46OqnI?m?qe&v@LpL z255}$66$Z#(_tVf07BW1qMshv0FIhT^FG)@7jS~9NFsxGuA|zudtr-5fUR{UZ~aI& zy*E1pmgIGVp=JapXLN{9p@uE>sC@(h60VswNOeJ{&rKXJ+ib#{%ir#6ncDI!UTWmt zh1C25pD$k)@YDL+vCp`P`CiG9LkXdDADx2de~agk-%qifPxge94E58uBFPmKFLHJ? zsh6Y!AqQw&81)a-7YMx%KjstmkKwgnh@MoF$HpoE$c(?7C|Q=jnz*-x|~0lZRImKKo)blm($u~ve{4QSk38a*5(S$ z>(*6QrA%pvEw`I(*jKn*!4AN7vbPMsqYt1aL+2nH1ZS|jnsDB)YOjDIe(;~Xhu=u8 z15kL*mS^|~0R`N+^ipa#*yVi{pxF)3_*Bb4U(VXHqtif~$3rg8X(7)Ot$3}MF0Pbu`iE@vTgrX zM6^hQfPB7`LSGA&9-Dr6foQubvmA?uVi>)3b3E=!bcvK#wu#yXgp-qZa& z_jCX5`}e-j=l!G4XReQl>pHLFJkDeJe!oYM(QFF_;61TH*ku`Kax)?lH`|gAV8GOC zAZGnEirZk@lKV}wpHO2oi;%--kWr5oM8AdBo**)`Fb6>nfd)tiQNGofDB2kED!@r$ zMf&GUorDoFpJ*x7*3KNx2(^LxYIbYKQKpRdufI}HgX{3*JRDbiiQY6<@%^{b$>=4Bm>2*23)iwqc(xV=Iv|t7?ud z3x#*^2I>l-LYt8EP_hPJIQr`* zXx3qtpcI~{JvZ1V(*VxDu03F|M&v4{pL&NZ)9kQ&UX^@oBb?CPGQKOM|C{F5WgLJx z+7gidmXbxF6p{O+WzQ$~`&KRL1y0;-Fe6sc>y&hI1pON~J}-KW38O0VOOWZ6yhiW+ zyTM0BI zq2b&<zyD=M;mYbViYPfFd zhDMOVK@j1H$L!OCV%y=W_6aHnvQ3UKPI)ha-8^J5gZPYu9I#j*hsfZMMifoL_8oym zJLM@?>ITL6B>FX@StxmWkWa$f(>$+VCV7`bNp~cXySuP9Y(p)xY{Nj3_?zH!rcR#W z6yCbQTKZ58l|tX(9GnzzClATB*&QFn`O zMQfFSvh85}ke6Lwu7pz`S?MaOu5|NsRb`uBp&Q{8FRkY?T& z1-t$L>>tJY$_CQms?oxRmwORM?nY$_=~GXw1;RG@7a=?AV8R=Uq*{9g`6K2la{L>X zlm*=LKI=I$IuNAwdkz%jH|RVFM)=Vn%Z9%vqVc$3P;>@>z#yk{w9aX+wtK4<_a`*Y z204YR^%#7Tu+C^_#AIdv_rEcaZh%U7!l8>d9Cg7u`WZdE>xP;9O?H{Y+96mHzX8IAEw~0fx#njBN1cK^rk5G9t}E4cob$U+oxv zKke+B<_f9)79&JyAne@|tO!L8L4X~Gv4hO^Z(>8R(>gfl@;Tsxk}W}37R2j8P8JR@ zFt#wo+~$hQi7XX?t1h29Ue+5bNx)P{g#(lOjc02TQ=gg4F2*td@)bIe_VKg$qLjRjd38L;}9o*AZ?L@nI&ji60{+otVyW(K7xx=T8{IsceI;B((Shw zJ)tRl7d)L!^V%|dUz*HDob!bnO@geRBfX4Aj2L^2KPrXZx4lnXHaP6MxtneI5if)3 z2yIUg7W~(xcQ$Bv#u^+`sL}B@*M&5^S}^@cB9H5fA?x0B+F6%ZLHB1uHhV|32Ni_M z!0s|5q)@d4{6IfoXdlF>am~QzDF8kXqq`r49CQGkox`hplRW@T{mwuM_T=b7v|;`z z?{_h^dse|d`#HJ-p;2yXCIKmdA)5JxMvGNYiTDswm0+vY*7aYeBfaoa!-ye`Ee(hU z`M(reW16$Ive$gaE*2H~Y6Z*`I)@5|RaKpTmGwCS;az*WB%gRj`sXbw&p_Fws|hw6 zTObc&n}1DzHuw3>`$2DO$9ZSTV&7l6p6#Cr`uaI8ByYvDvzvwFqDFk_p7ykYb5jIqd4F zb1LD*)&sT88J>}VcOq&BBV7Zo)<1P$0B}1j0d>v-3S=zQc#nhtl0~A-Tg4(JJozL$ zrzz*BH_iN&xbMT+i|#RX$la^@$Y1O*3WVDeJk{%3-&%1JbNZ4VSuKgWqlqmnJ9{_W zE#_@uU0UbY8%O$%PbC-)Vh&2LE8>-JeA!RM72P4z1`#C-xSHMj_|n)n?W~ib*Ecbw zZRFl=1#rOK<}&`Z*Wq#mbjT*s&y-rkmf`O_k>1joQV^!VcDFF|!C1S>D(~f*)597> zk+A4781vHcj@Pf=M6(u#^;FPtClKty2M}!Yh>Jv>*eQF*IvHH$YOvTNq;!meHccC^ z&G4@XBe@}++wf@KYq`SO8kz}l_Cw=--zZhWlffs1vrlM7Uyh3lX1(vh7bD(_aS+dF zLIqmVlQh1lPwGMz?b3c}y^1dQrZ*!h;jQ8mVuE7QkgYphmRD_t?+|5boAQXC?^O+K z-Pgd#iBV!YHsJN|BBULItxF{gs;voAI;4QqYd%vbTHCYz-kp&-9PZ|;L07Hqm_MGn z!S~9I?tOi(9qO32^B1h|OZdgiw$zRnFUH!6D}w2NWyH+97!0YBZA}9)&oiSLSXI>C zo*Pg69Bqx$%6=~$FeEPK!V`MzFtd9YZDG*R8h3jVmTBZ z)o;ua=I(SMa1#w;fHoC^AH`=1WHvnI$fF09E&2csUozK3u9kZ5MVD}qT&K35wBO!z zBJOLk_C;vB3%uXNoEmZ6Z|Ic{$Qo|6&ukSg)O{-XI@$hFMDukyrKK{_IzwM^iD_Gz zrM)Bu*4LM5S;G216Hi4Mu{m?3xEK|VP)-81gq7~Ou!CV7UeMqF@A$GlF?h*k>m ztkIpd_Cq$xlZ5~;z_Xi5KY}E-L-C0SHn6VBb0F<~2_?-Sbg84DN(^PPb$=T_0rPc1 zT>Bx_eS$RGXImy_ru&GkIXzLhz`P-n7bRu&lv5{Ee$QwDxjedWNS3Fh;RkpL-03AR zs1PmPl0?hq<(>GjVR&KF+RRB$*{zfarT4>q)Msy@gf1)-NM<3xNZyq}$Zx8c8Q`J> z7q8Vk2Sq76zSE=0twI`#kxw5?GbhHM$9?{(eOn|f&G3z3BoomL1H_d*XGV647It{- zJ28?z4g&Wq)r4CQ5yUkxw#mMcBcHxEj|u<(tPI8sLG16T|1hv9gGiP%5Kx;qWjD?JANW4a_nW1p3F9 zK!NJr4wQ86HDJC7ZNR9E!~2dTYh#kzgswM2v4W^mGxhyGb#HLXtjZ>DCE>fHWd3eU zUQ<~3BOS!5*bw&F^J&z>?Ja0*l0*jiFzIPJYzRy$W(^9GiMu8_`H$rUdoI*&8s6;|9s&)X9ZvD zIz6!cE_B(|<)5aK@{h+dYJ!%HuM|y;`%+Jkdr?_d{Xsp{4!aI{^hvcLy&16Kkj;NVuP7w%(cC{k$o-GRz4~L{j9va=pEhKcG;6z zj+d&C8m$N=zk|h8E&*ffok>Pu4&gqUe4-qP{C@P?b|l?zJdt6}GVk#6=s~o<`arLP zr*5gww(Bm&&hA81v0?Poi+jl}@8udkGtukj=P|TEn5sFZYE-!Oo1!DS9UYj%&$TLs zKUQik`0$h=!tUqSED7xlr~i+KF2{X?6psUzI&S+_M0X=qq6GJ>7U#5|xjfzTf@gtu zy@`lYFC|@8OQ_?9N60b7gJkk+ikvNiN34xZ*0Vi1p^qFojX= z3x*f+-WG+q3Y56doawti$YhY-AOFZwE|YwOWRtyUrWtrqo3Yhi&kKgw3pL>&t98}#-*A1D3 z(xd#qXujXWa1 zej#JTAzA#C}kwC+6i+$|jG=J*!@i<|dA4 zo%$79*KnPC36$mkaeq;(lJ_Q)=D+qQ7(^;6{KGZ;Z;v-CI&8L`2VvY4Wg}k;!UJ*g zlFTg!MwlYv!jEOTN{h&`;SrT|vv*e|pB<))I3=F#ebTNDIgtP$i8Wm*geiP$9Yw1S zAy-pf&Jm_Dxv)$gJ%tu`Yb+UVH~&i8(KU{3)dZ^vbLG%&j~P zc9!nJnNrgpDEH)-5;*pry^Ty8eD3}<50!B-bfAmh_l&S{)`dDQIRTXE&*Z?A14$%W zWlsUE%8%WS)^vr^;%lcR&cANsm15pL39{lF`>=f?gk)H9=1_!_$f@J;fbMp1tsFeY zKs+O1{+vOxFS#ssCEY`}Xhhgiy|JZ5sop}@0I(CP6}F>DX~YhEVv;W~90bRA;*~A- zY733WHCoVoJ}s_*&~=&Jv`?>lt;;d(s9(%e)+<8zeNmQqWfqbs)L&A>dQybEH;_{q z{3_(KN5vPj^1Ck_1qStJ`8*eCC==9rK)-_60h%cqV2697rRWTmj`CC6zYT$IYyH<=AgQPv3m>`f`-@+gTxxpnjD{p+?7j z^w5Vw@$W|N(aHNYL1}H2MSJ0gGM6enF#%HkmXnQ|(fHHrvz$8)uE_$9S<5fN6d;<* z*}FZY_$kz|``k93WRy#a$B+BiEAtJ_a}ejxDsrq2d5)%>!%Az+?3aHH}@ANkgrHS(1zZ5-_G>P(6du>uY-AHo^WL$Sslo3X(R@VK4p%D z(zw>ZiK6h$_;U-#4RIW*=S~|ja~UX3uflQ>%%h~G?OBK6u152CeD{amNq5)N&oBMN z7V}ZRd-8RfWJq}Cpk-y+IXWS$b8$3O-+7E2Nh1@k(NE=`uS7MkTy9k*!U7}dsR>oa2{{ckoY-CR>bQ$EdVe! zG1Iw1^?w$_&MZZrH2^tJ9-=jM*rF?3%kvdI{41NkxH9w2?{C@21ZrQB0;NJqGzda_ ztyU1QR?;K$j7e$fgp!ibFMRpq5Lk@^;z6w$oFgI-zH8fgfV}f|HDiIX@t(15l_CyXWyzN zo=mYxH&YB^`tGLqmKY21xzSUY`D|Gta;Vgfpg?8PA_i^yr(@H zBKD~CknU<{=Eh4!CW=2Lp;iEHPl`#i<3OH8aCH!d7Q1F;mvd)ov>IJlFNj-)g|3bH`k^_I=xtnV&xAh9z1-d zUh`47 z>ARiR->$m6yfJMgmV&HTb>>D4<2U6Y4NAxd{nX25wq>R#Yq|Ax2EwzeA0s@;UHyWA!+1uIK-u;T84p~>f3k-+aO7`iHB|ccuZ-f&+u2( zGl8>m;Zg5Yj*g6Y^qXK`LV4<^6vlld$VZ7zmdcd{XItJSOARD+c%((#EHAGxOF5CL zk}A&rr0S^hOF`OAoj}qX$Z&kUz=T0e^Dz3P9m^8O*d-)=Gye9eM2%|oGr7kGS8do& zWwjf(cbyRsULvxS`GcwkdD9mAsXi zXoh<|g0GC(US5AryU7zsQmBWP)_%z)t=%B_g&^2S@eM4bxjPm8aBijPQhkk_a|+1f zHiwzpuf}fmT%h!PGkR-;_}-9>0R5~I-R5yrEkBVP6F}v(d~SNgwbLUPq3{B1wVkR)Hcx>jF2)ygxkj zt3P4O(L3!x_!$n{o50TDsM=tF9-5n4irL))Ra9i1V)k}H5y@o#4hhqSc*F7e5# zY?jP*M|^VJ}((Aj9r*&)@}7H8K3fwL27+EhX2j@m8#lp^UzX zg`I7r**D^y^eb;L=hzqq_5wF0%v%>9cov0h zm2OkG#$#t8`Y9`J4!LDzyGg%kfO6L{LJ;*mc-;t437iy)Cn8&5AC#{UY76lANzO-} z`nr_`CUHYXER=MCpx5WbZ;VDL;J9W~!T68fgYh5gksLd$)Lj?>lL!!+Mr6RJXv6O7 zgWP?fA7~z6*BwXspuCK(GxTgR~{8> zltp$)z_k8p0jSH@h#8cFIW3n(SO3trR^OZa^X2>P4gLEg&9;)l6sBRtkUD-he;~pv zKJz|j*lod)^;22m`sX7pt6)qd9~Ng6Bb$LfXmYdp-C!Wfe8yB}FNk*{ae`OdrPmME z+>_lbbZ-=5-{AnB=iB?d3+IfmZvN`w`3*zNHp&mKbxn=<(O_xyl)1%?BC3l zc;Y4t^_uIFyKmFB(hcZ(z31&0ZeqyS|C*+-F2TcRY<}yjyh}+*O(m>IF#*XUEaG-= z+ojBGIBlN`T?`GRvXWnsM5dU143mUQdtSnI(`y1@+SZZ_v=Tz}RtCrG?B`F|!#POr z3GsOK1|$Pnrkr@|Y%rDMr?68@b?CtJ=(oiIPp)OPo2arRu!i4ry9YS@09g)(+vrV$ zIx=``V5EsTY$*`*(_p(GXtf2!f!u7WCvtk6zzkB}|I3bq^D0SAL$_OiaaX6b`~7<} zl#lzfs41SyV!KUutL6;z?(^FI+>^ovwk_zwk|7v}%e=j6Ae(7H#&Yi4XbM$*^eyiC zm|3&)%i-i$_FGEgd50L#_v~VbE}>+R31g8}xSqRkO6A+o%Yxk%A7Ap$Fy!Y#0W9NN zO*#^nhy~3y0;piulHL75$hwtx0=re+$(YJ9{A z`L``uCVokj0_1JSDDUmgbamyw|b z{)dN|+tzUKK^Ga-59|7#jR_HvS_-n)(!=h*V3%Q^zrDP2^|QvAe8H29-AdUD#?V2RQ{ubu*6! zWeZyToMGzl4SxB^mxiE}J&XG536_Kj$i=BOIv1 zKm8TD{<9{#Z2d8H@<4f=ZSJvb?)~hmmhId24#U%Kq)@>JS?%b;ns~GA-BWOPqI$h9 z3Uahsi-6K6dMrrS@|9|tIEbuRnml?CE%)Z@Lh)Qjs}d^L33*(ZpL~||gs{{IJGoqa z-(i9<7Gsmv>MT-~8p)q($NBbwc9!wAJLj3|oJ&JozFoS_{p%{K)9XrutcmK?iws|w zQju3%DX(A~O8`O+RDh>?|O@ zE!Dj~-GmEawzk`<*@G2q&$wO71X-1D$k&kt`RKy@QB%zy-?;)+K|R%1CG3@_&_Fgm zk)NenZ%U;-2Z_H}xp;_rW{bWip&PO#e-bOQVlV0duuQGzwlq(ciBJKZQeqs5|AJsa z{&0h?bz;f4T}4-fJP6S3NRnzcoOeLc=30oBL88_cX{7=J-zcy(VOiK)c?%dfA`H>-;|+bfr>Z< zSLY@hejwJNDk?lJ2yv{CT8;vNNlJ*5KU4Di?R}m=26DCf3d{xfW5y(+<)`xl``j{M zxJaQ+LgPP^N3hheX`U$R-Up!R1nDFFM`P$!a)EwFp zWk~YFU$0}?)>ieFpmdwo{l|>!cM10@8Ox7#MC`#l5W$OZB=O0_ z9IE=*q$sV0D|-1A$xpMk$ErMZOx@LRkHPC(R$Flta~LySsGScbsbtL?RI1FFaWasrCu|IfGr z_4fy})cflc*354%hX?c_L0yswievlN!AtFf{*do9A0cUbka~M zoc=P@c>K`Bz?Q4b@+csZ76Qti19sB`SB)FuUA}jqGmW7#&uQJ%wyDBq9eIf7596q+vq7%LcLd zHfw&kl#^St?~cA0OQh#L5fH_B+r$Oypr4BjjqClQM$n$+JcaDF-=|7VLFkNbk&dJ+ zRYKlXbLX}>Ome^7Y2vWH*DvzywF~^gH{Pq^KEd|q9(b$2(Up=3t-VBI#Ycx6NU}6> zziqwI8{{ADF?h>%ok!)uOk%!-4;@a7Fg4X$KOR&xVhWcbzKqA14`swk)l?Pg1SGr^ z)6BeRo?vx_t))Dk2fxB5)iKsJ?%8F~^l6Uyd1kgtQQ52~Skr$!=H)*hGYf|J5f>eA zBeJ#j-htQq$S5zRkhbIv{i~B6I2m&k3OcO~BA}&l0Mj$gkvTJ&W#}WrIapk|g-B^6 zN?%Sh?tN`&f2{_Vxjh@}E+rz5yZ1mW=+l!=SLB3d1z%X#N|?tqLCu)Sr zwoCXs82Enxnt;zr9`Z*d=;XfxXdIw(C2H?T{gB;FK2Zkf1k-{TK*ckAjXW5A*Ztu) zjcEbw;A5A*Wn?{-pOkx0JFDz&G}|RV!h2}sNLCTEuL`qo?E3UI6dpw@+pV|X>E=G- zj{EzsU9Yigu{=YV%}jkBx1DYG}8leHx+Of3G zCjmnhBQZBhkV4JWYfiNt2icrj=d)MuXJ0a0u8ya2l!ES+ioqO}ei%Hu6Renn9I1ub zqvQkJ)TgBmbhEie?eCg>=l1*8D#ZVLan~yx6t6&= ze<9@C2ty=N&k6ow*)8T3114eavQK1`M4Nz%Lg34tyy$8>6H2l&ovVv^WTX5Vimdb` zOZ7sanMujf58>Zluh(*BmGOGpHDok%E>x++AwU^0c2YHYpNy$1e8l1 zI#g}g-S2-zXO zC*Hyxsvk>w?}CR0@?0Xs1XkM#9;U8kl*8<4(b_l$vUG!H4ib9_@Z>Aklqo95IdL)Z3DV#G_R=!c@}XHvEX z7=|kGb!f5DerU~mwaY4ri=*Fy5#pz3Tr1e}HM7#TI~TqCsrEAXEGy&=-B9BF$D1+k zRhA#MSlUzb`AV`}th3#*u{poyd^|9xE+NW1(n|=G+m--&ofi4)ttjPCHb!Lft{N{Z zlSE+A76=P4*Cx;ZkIQAqX}fJp_8%Cy>erkQ`$sHsdZ&)h80G!ALr0aG%-+A_db>~l zdF{og50CQd@||*{!^kpYxGggbCk6e}Av&XOC~t&}bZ} z4S;-c54*AA336%XW0V(};0VJ3*GaPm{6I>z0H}EDvLGj_4|GG3jq0wtRIelu+0_Go zHo4R(sg1lvl#XXF!jGy9KgdtW4_)jsNa7HDTZ&V8FcEy7=Y*Z3HqrUP@`5OqN~0|F zH8D|Y2Q;UZV(MX>iV9PSw%QCjg+Q7irOYl_G`HGEEcaNxhXG2|Z3VW?f!x#McOJoYOqQpoUF}WH%y>3Hsa5~}*fq&1 zTQefP*xun6URlN*&Q2mL3Mx9Ly0(i=th`}9Wivjw-Eac;cH+b7Rpv@%H@#xC>5eYx z`5GhWpR+c>K)D|1wGO7lHBw&vXGL58$Lg;BuePp0oFH$>G?j z$*$ho!#f=un+)mNZl!i7=b(f*R{?5c0kDQxyCTs^o5|NKpC_WoUDXUM-7Nkg9CVmj z&jK)n1BW&||MNTG(*N%NmvlNN9Qgw614J-ExT*BBmW6cUk)Jy8GWHR35XLI@lAY&a z`i(p~TVu%(q2$YbZS_h;Fbe=&O@wknCla7VrA34?%N{B#iC3F29^BK>^~2fDgg?+# z>YTaO%ex^LX%zk0+RoDpV5DZEkwuslfyq(zNldV!WAfmz9YT%-&2ZH-a29gAHAKll znAa7G9XtQQVGIF_=g!_r&mJ8$`!&?z;c}^&?r2XN@A<3AhdZ10Xc#3Akz|N6b+t!DT4@|P;c&jH+C-a2FZocc$cW|BjpLzD-VX*#6=H2zFxS7EH3J}yb~?`l)sJd z{JSk&0D>0L3Gza5n{ke_{Zf84B_3wTQ_tn2#^R-BQgw)u;<`_TMHDp`J4#jRo^T6| z)$)+#2*SSxU>Auge& z#1~90QvF(>Y;dv3L|*4j#@PjT8~>3zgASvGE3#^h(y|P}lqXqaUSbVyrQkE%)^~sv zuf*G1GDXE8~J3j z+6^1eqlT{h)a5`f4W7+IyH6)Ye??BB0&0&fRa!;ME4X6rkZSVCH-0#c8X}=u{Z9LB zj9PXnjFj;qUcNUsRr~VZh+V@z9Y^tq0nH|}8_4%q=34G09$vz-yjtu@ANOZ{q8S{~ zWz2IH)hN2RUmm~tYAj8u=r*O8SCJwo6qq@oUZT#!oQk)3KJE1`^mLwf?oF{ry-|I5 zG^$+q4sDI|c9|h=;HTwadt|-5aJUvLgStAGq2Ybi!zXXjc?;6w(P!buNFh07Ey|~A ztw+|h#$<6b{G^PGSoRI;p5zi;5sl8ls5`q7@47V`De=nOh+YUCRnRrN&J^Y6bPnqq zw-)5Qb_T_I`7>g4W-;|UTjAj9aXt;rY>8_l^!3mym>*e!K*;6qMN*Hnf68hJ(tc;9 znEs}zgoa_Dq>`ymwOnK|qHYtTffco`Cl3>T9&c&o*<<0|y)I8%32jwT(2R^v} z#Ae4mW4VfWwX0dH(@8z->edyypLao9Zc+*%KM9OeZ<$#1Z))A4=^J)-BMZ z$fqY@+x{eNNDDSlS(Ye_kS8YS+nT=Ej#)X9=q+)*c$n$^enIi+EVX|8D>b_X*e{n! zTp;x_{A!=1gFlCe+a!|CPR5d_?;5Btnn4E%Pa<@5U^c(ZNP(x_+LTfIe2eVv=;aPV zyT=u_21J`S`Lek@%s zsbvN=N?js^^8P>}s@DfO4kN+lZ5knKXut;3H)>Qq^P5IL3;FBGeGu&VVJfomXeMN4 zKXO?t6S<&;`}rES56CyC$+2rh_5C>TiK3HUt4v8pK6l`P%VSGO7KEUNm@xt3DmJ6d z<=FzG-3jG!r||e7zi(Xn=T4<&1s`rb56RD#F$je&FD_1*Nxl=`Np*rs>`-Z zJ)3$_PxJMx1?od%*QvOVQ=L^1CS<)V`Szs488}ma36jHBMm{}5G_V9J?~!S0EqtH; z;W5^CToXp)CA%P8r~3i0(+t^wk};fxoIhnW@98|dAZ&lDD$yiMi$m|)oWLY> z_qeMTwMB?|wtwd~Ty#?O&ua=}_G;jM5(mI4TITFqp8ZYp#xjSLyie5}55+Fkps4qd$$ziBL%k((+5{ct1;@>@ej$GUnOkzt>r3sh0!kk7|aJJ=(|D?%ECGG8fX z4lo4lgj8)nhDlR5OZ4?a5%`ZYPt+cT$tiz+>emg;X9!g0Bc<<5e(!F!8(f8}G6$R% z`eqdae_JH{$dWTk|^tnHDP(%}nPbst;e%B!n^FV`SUi*k=JR_6BMH zH_emp-5XMzZoRHo3-tO;*8u3!TjGnXBDe9^#axU9SRBL$BW~g`>aFia)Cy5*Fz6Ns zlfbET=^0osbEDrhY9g0O0Qd_6$Y=2>||+5N_YV*g% zplmKj_UXzsZ^$|?5X)$k$E@dRR(2m;y8i1Dhzj&1$IH$}A4XC9>404PY>7XVpRGX3* z`<(;^K9}}imN7c}v^8dSLZfA5xt{Y-R9|>iBnT;sGgkC$2+^}RJhpLEsoI1HwbbK@ zNgm4lXcl{uQE8wtN^i&8`Jwo*zK8+zDQ2+Dlw{|U4L2uMAf(0vl||{b?=R;^C~#U` zq;?!?k*zZlT7RS>(4L}<84%dA(PDYZa86Ab`F(w@KM7qH*QjZ*oZO!RfzAT9en0w| z;b~->4uOO6ArHEFGk39GclXK|fIR1ibW{DMx*NyWuV-wKV|`Vr05&qWE3uMAa!?0m zm(qgWzCMbazXE$>)aVW+hww=KrYZ0Rnet7|{cD$h)4YlS5Oy|PV#(paV z1iOeI5FY0?Wf9xB^!J2p)v{11^RMT7fj5ReOar#X?(O!n&gnHSnybObar11hIRCs_ zZnEP~z+uk5OEgcOZMkE=g<3w>YoBRcsd@fl*x{eiRztrQQ!(yjfF7}QN}XZW8rsG= z!Ff&>FKDzwJTVSZvvB==IZHbzBi0VZM(yI^BuecBk&p3&P;xn|%lDtmmwBnRu*Zin zip=D5(iIZR&r=g|(}20j*u8%NF&HOMk70r#UyzI@eS%|N;)u5|XV?oPpAT2guvvW6 zc&VejBxIGh7Kb&>#>i^)2T!gh=wlHvb}5zej#i0xy5uv?tc|A)t-c&YnVEtF`)}cv^UhOG7htCqSR%9yV$v|aiX!W-t9SgS0sDCR3rUMw zBL$&E#`^obc@XWg}Kt-SzzcJ3tT$M^w#DQ!L1 z_f*ZYAZjUOPbW$QVb%m)zn-}cgkzGBz%=xMIN^FnbLGj{{o5cDx-m;&4*hZ#<)eId zp7Cwv-9hOxjyFfgoiY<-#mzGxx@R3xd}TxZl|)Ox)&XYlF;I}b-YY_uaAxp-*i-T@ zT}=dEQ5+M&7eJ$S`NF&_7iZTUIwvHvCXnxAdO|$i{ooRpq8{GgI-j{UA(J~kF?MOa zZVt7I)~o_%z@MaK)_^@x%X4#MoP=s&4`QO-un*=8dgvI^^Da#2XPTU_1(U{p=rNsW zoAE7$1syFmwFtzNcW+u8ZVRgMIQ&__W!y;2^EL^cDkZTY&E*kJGl{ z2)8>@v2;eS1vzmvn<5lToFfNRVksWKX_l8@(09LS+z(WwQX1-!)d1Fan%rFk7yWPL zF-r^{xy!zZ*;@z2Fp%=6;kwN0`ToM0Wylq;dt#ZGLC z|GT=iG{qbk115TwmrbT^P?(iTn9roLX6T7(4LtfVtTD}QC@)bdDr9qcKDtLonoCA4 zXCjMz=my{Uv%4}4z$>AacL>81FNToK77S|r&O-LrCYNAD?lEw3KL54Gmp{^0cqi)I zVMTiZ+6xhm+pAd2bI`&RC2LF``?~D({o0UORaLCcY0gLYyE;8-AopF*hr?ptjFL6L zN$yR0w_*lv&^Zeu zjC_HicEYFj#S%Azz`_^fBnc3DGB+c)W-^eMG3n2K(|~|t6+Hu!fCK*$;WvrhTkpk2 z8udybvyeNw>zKYm^v+cn9u~g$%!oRtMHX6DK1u?C$faQD)&_{RQsSznB4n3_m&}

BV~O_~uin2Ni56C`UqcDYbe`~(!9&iGk5e!JA9$BlEdQU;OB6btrMmxyl^WVe}~~6IrY_$-$5Kyj3qo zhqZXS@?{D&bcXfy!yLZwl%)jj3|Y=CHq9tO!!(mxr)U&jL#l9opXa0PgG`kY`P_}v z#H2PM`@wCfzR+P%G(-JaH2eJb8d}grz`e^@+LMUk1z?AR5A(lig0bsba4q0xP9rsU zfj9Nnz3z za1g4hV5N}uut3Wz#HP56WgNj5ZrPYTx|Af*H`NxT)3b8XjoRMjC&b^?Y-n!fXF$E& zI*L8Bg=Hf-;=4gtm)0gX$JMt(yFBJ@fx$KF%47b0f;0K8vTMOcQ{!IMTA;D0-k=K^ zBn3^N{I?*dNL@I?3Zg(D#yC5_-1qi~nt1;37hw+BE^SvX<+CfepT+%=Hg@BdFpeDCX)2f9_{t``F*Wh(=Akg;C~!&FV6 z3{b}pQl<}>EcAh#R6F_vb~+z9au`MtG~LUkPI;a{1c0`Ex&9_a_lLOd&RGOHlTZfS z_q3(%9tj*IK*1Q3rwY90y69hpv8~!IvhD^{p%Xclg$$rRzW?*~tAxjf{H^5B zB+&HAUqINdkzmMl%f);1|f)&4fWbWBpdqy}CRk zq1z-Bm=;&3%xmN++9dDU75|p^BCM@4B;|4Hd*4Ha82}hPd4Rw#l3C#;^xh|Q`#RKK zTd0ijA`3Y(46dmAuJoE#_O6&+AJRn(9#T1?GXJGLnEuS64KlinVMW7%5MxgHV%A`J z0$oQvL$vSqD>)`RSEFsZeiY-A5l?UR{Nk0_3;e%U9~ujqGi@tChFGU^%bE%Zj#KGA z_-sK%j|tBwO_n_IjlCls{9lTF;tE};lS7_ zLS7lZEr;6moxjm`fO;dAZ?uHYQ!~u5SMtzq$G$Aw9jHWFgHbX@_?;$Cvqm1SKE#PZ zTjumpshZ|)GN0a{rEO%BgTJtdmsx=Ha=pThZ(JivYm+-G<5NM*0hUsPvIfVAcncx? z<|#^UwTYWE+iI2Tk7F@qy(y)#ch&0IW;n%(y@9Y}i289Zbp2VN@aYd@d$Gm*!IVBV z2BgYE&w{p<)b?awj%|tLT^k+Wv2=V|8XVrs$+l=IL5Zvtnn$vcwTQgiE<|cPUI=!U zl4S3AX8`nb;ti_H0v>Tl$6kHxcvkT08^$izIvUP0WGP1zI?%nf5lMsSLY(ixHBqGq z$)t`4BGW(YqCYCEd=u$hdh%7-61w7hz(4oHoG-y#UR32yhnl8xt%b;zA$A zw3=o#9U-AoNhtN+xE>`LlTg7QEOx>X!r_U*TpIO;b<@whl%EjOGl@twN+#Jkn~dnK z7RXv0K+=>9qw}9*?Wr9VB^p7Wt|`uG6q>z}3yBf(PUW#1bz&Y;n7So1POHpBK1Rs+ z@B&o!rAc-i{wj+O9^BE9(}J9+j%W=lpELGHUG8+=6!miKh5VYD@(WgUo9y<7uv2;G zE636>WQBf3h8IH!pKD3^{JSHO-)cRwKR#~uUKvdi3*3VGjTgSnp1=l-Ggf=b_832WGsBcapbYS80pek%@k z_?f<6tD)-}Ge~KLCmj4*^dfbT8dkd!mlyF9}NI0%{6*kBY^rZQ4+Z0u*~vx zbWm+5UTwuwGLtXqY0fDLuG8ZD>X7TSQghIfwFV0#ytOx6n~-Gz)3gG8zANTSZMjP{ z3wzSJ|Oz}Z^AgJoe&0HFGZFd*ZbTu#zYkaH;Iv?p5pJ+R!OS#lpJNJb`vMcz_1F_+29je2T!GNr=E$AtmWKhEw1ta{*r;%KXgrDu+wl41 zAjTLlu#KifDaWK?Brz0VV5d-7V8Pf;;X%M@Y=Kg*R*7+0+M6mCd9PC0Tt`T_YZA3X zuc~rjO-d@duKh8Kx$vV0yFXioeH)X+y6JFz1`Om&ST5weNg;^SgTcrh2qEx*r~c(x zHy9l#b}&>c7ak-IDAx&62s`q3yJ@!&)6i6W_6BU$(Go;=VXYsq%a8M^_An=-79;_K z5L&Dp@VS=u#dHV=d*fHM&xnt3dIZ%`T zpJM=;l))?uvwGwY1tc30kQ(Voi}a>+iGUD7DAE&32#{o-<$m^l-go>b#u;av^CJTjxw-GGHP>8oUURPN8owYE>dkagIy2;LeJnveT*rD^l1Q5me=yyc zZc>-#;*@gtnR_DdbMIC^kFVR`y1nxwGsNmlLzur$FLXbWJhVCldouT^@}I3_r*F6V z6+&_v^O?Fwa|AOeIgNb11$okVqMlZyf<_MDM&;vf;iD_m%k@0R1p@@n3*UyyIQJ^2 zPOQMphyQJMz{3A9J4?jhU6EC#w`_Nwf{L2eg!^{d*Uk;WjQajjd*4!3U)+A^i~-4j z7U2tu?Kx4|AN5z1m&&-adTi(yX}ebi=A}H%Y@T zDcOH)!PlU=WyeE|#lpK-$idH`56X_1xjfXE2$Gccl4&r$9B>^<8=iKO?Szi5vr<#d zVfHM3xsS1rCf4ks|6z>N1aDT~%lgJIBI2rR*B1RLpJRnA>gNyL9}g;-pM8cXe8>*NHYE|dski&ln$g{bZ6xdoUtF-?}*0q@qCWv3muJ#nJSz!#-tfyT$ zLYJc6h;5q`jS+g5ZenugaH7HaQUI@x^&PYGv05?E_Q976Q3gF=%B>gue6^pwi%V#azMRW&q(LHz;~ol><;?t zV`pqKaT2(8F@re7@+|<6^jkE}pyTEBAn@x$aHDCN1_jVHT=7-acI{Tenp0er-o9Bs zuNTUFI@rPaSH?9NVmU?*6A=A3w_I;4mL#9@dt`@>po`Iaphx9G6dkFLXn>ZTIgF}b z+DdHS4Nc3rG# zKp{%eRZ+(I!RLf%}r~ld9wEBPxc*CdE8&<(+MDIhtj8 z>S?(&wV;4MgLkKJB{1xtJjxjX31`O}Ly;*9;&tNEJ8F0BZVVw)7HoylzhJ5E@ay?h zT}nf7+kxcXCoY~xYpY(**i*iR$6WVmU-duO_!V7*r}8|I7Z@rkTE$Hv^E)5PsI#RdTC__`VF?Uk%$D3Z2oT+(E)V5~)w~c@V_YWg+yo7jpyHq2` z>bXZzEn2y|&%4M<>{!9oxUZ?#=f9JnqZ=&{fqKa)-a)!1B_7vG7gOIpRy65+X}#1KTWycg#Ol|EnsOdZfEML7Hqe{w>^d!l2?% zckbs#L^!%!av{jZgIkIA-734me19oPJ4^9&Qe=G+IwiA-}0?-ye|OX`Ol5> z-ySTx86$A6k1F<87*n)3Bk*i_LiKG`oLu9*>)x`>kL!73##Au5f!4o5*GW_!Vpa9j z8l(OFkQ3p@8{+2{tuH3rxl-2MTXR?Zk?Io9uEjSR&jXNTpFxQNt>1rR*Q$YhF?wvS zcJnb?5UzzxvDC|2sY!F^QwEF~BUI1zJPoo^r=G@fBl?%TerE_(19GpX@Oy48HP6kY zeetK9-Pc9~ASYfE>0(^b$@h(Hw=td%@{OTJ)x|N%c&avff}-3JB1pK5ePA6F za0^I+pS3J|`sFD1h1Q;j)GfNR) zAYkPt5K)UIGCSE6@Xw7rq}~yV(=-Q?%0&{gsi}{>N|gJI)q+3Nl2xi8uu8B#Z)G1h z#+F)x@i?O12HDj5j+kNttAh)KB!BvXqQ*|_!6w~r5dm5lUz)$CotnUGnx z_Y@)EGR)%;OVeOuHyZr)`2d&{0ZupN5`}k*Cq@b-3cU*I&Bc+&^pR5g*pD*yTNfwH z_!6&A7#FL2t-$!s1b&n^v1hg&em`Vt4;n--nXeoG4bg*3bc`%?GgoVUpx+u4)Q{E+ z>-gaVs2Uz)J-vwn7rUQyzq+9@MGWAXf&HA~Av%`{lFC>OLSZ-fo@8;{!$)aWjoV93 z4>qI0u{*ff>Bj(E}k_z@@~2AcMUcLSga*rRu?7KftC& zc9v^#&g5J_1z^9mz@|1IA$UcA9y%tfy;VlP?N%|&Er?#?V z?3DGHcR|;mmhc^7KQo--tpq^#h-;c(|{TX0Mm~4rX0*@nNspyNr2_Dk^S= zCxjK?s24~uT7V0#t8n48ZEUgD znnmMp$lgXQ(|OB>#2mRn!#!CMzDv)fOdcib9sC6r-yI*qI2St`R_?LCp5B?NW?#)G zYuF#WBpYy7GQaRY)*p#7$=_zxYF2btNDb$R5t|`ps7Dt%`Z}E+t=iq%fgFUNr$qxU z@}Bl;+|hdF693pwfs;j$Ju^>MTyUI%oq6Nn2`fevg*o^kzIi6BFFA^2qTrwALJH2|DGk&%v^PI``3~CkD;_&XYGANds&bNv8G?h z)PL2BDoeiJ^K$}c-Tnq~4?^*mKoLID{!g28?M)@BTUL?S6G~h&Pp8oy#kSVP^H#y(F+k{r?DE?u^dF*|&z56NQxekZd1Q&*u8?7e^JDuDC=-0x%uy}=zo zsTSCwMktYni>VhVq&#jS#1pPm`8&B79TF(*I?jb`ZhTlxxk@S34I#}>cj$xE@fAr{Y|4>XqF z^3Cg!wb|g5e|!IR>O+|t=FUzrU43wttFTcW9Alf0M6A8WP|~1yYbr2z6pkIx3MeB; zsvBqUaBn}Wy}pbas&QtEI0TicOYaa>Y@qQVTy02CfD*F4bft7~5>Ke9U#HIPT`ZPj zn|-Tr;Dcg=uG@$d*eCzz8>lQk9Jys-=2PzZxgbL`J|koH9u7s_x7OmhBLmtvekw>A zA8RZ?)Zi|_bm6KLPr?_b_C$YF8&>d6J}r}MSfzJU(}kz$#4vkD#;$e$XQg-S(~UUU zmfi?nZdZ>4vaN^cBgOt4-2o!{?`U{iqnJN|96Au9PElx{+)wmvES%CcvRtZdefC<; z$nV(A=%WU+zggYL_LO8AY($n)G)b{W;YAfo6U@ugwr_NBny6!jeO-s34c7lLYU{}q z1TRTd=5$-`M9JQko@3R882{+6S7S{Za6a>WW6(B~kdDyQX}yiCN6uC?O$;10w=tQ}stO||LRc9cS8V7=I3pwf8EAv6Y&hiZQrKFTR5%VAW*cKzFB{_a>MFE zrLCb*5_7~B&uD0;mt|>btEv8_?XsXi5r1JMm$`u8x{>%A=$sT=Ts-}@@!Q_6Gy9_0C6q$QI`sgd*QKNJ4%f;C6!E?I7?Zm)x7?K@clZJ#b2^% z@?fdEa#JAh>)%G}Jl%rjdxcKNLZ+qmolIJLj=ieS0D`JNUhrFccSF8<(R(4xTjs(P zogcMxnA}A*E7Kf@iIVf*jm2J6JnWWW4V~vFV0gAT!r?CXHD0@myHtpHuden*r$FMAQ~$q#C3NqjBtN_*yCRg3i|u~+sSmaC74kDg$>@#3AK zcJOJYrI^!iLy!D?6$U?&Et;#lD|;et=D6<(?kcEpB@99zhLnTdK{Tz!CU#mfR)=53 zUgkZ$2S1+kRH*#8!s{oOWG|^b7mWzjhK~A?V+NrvxQHRR!N8-~H@uS~b{ocOSa0z} zzH(`nIi*WNeoSMHap)#biUe*sDW8)4sn)qIwl7LgnYi)8QK1$}MB1B^gVl&OU0nC> zJE$w7R=fJd`eSGdzwP^zMS&gBmPQ3fRvOXM0n;vo!3Xl_O!4E&#YN^)SIikc++Zbr z35Z;tCzYE51efCnjISb2Em#8Uz;{sFB-Q8?a(SbRvFC)ew^L__(-KUF4gj#SFMaLy zy5`fY5Kg!Q9=UAJ>S$jQm-W5KN7-myN3Cj_WnfC%xj;YsaZhrQ37^-3 zjITx=g*)9LuTOqge>OH{kS&sQ<1FWIci1X1BccHw_nF9ve?IoU7@2vIspxqW`3&v3FAU<}F+Dv2Hzr{tn4iIYvGoGMXPbgHz@Vp(8x!$Lkw#kDN5n5E>eB2XX-ZcxVd|WmlCm9aZ@S(xRSH#jB;3 zC9a0$*zePKtg=idpk!TdkXc@_F?k(FL& zG>|y$?{v(@L{QNh$rjeaRT~O*I{g>@NH<*rZh&|OIRuB1^kg{mtHU>Ai?7(e)=AK; zIzP@Dsx~oMqp^v$M1Mw)my=b9=+-r6>N!f6bHH>DhEE?qs@Iae;8)i0rj`6~CFYEi zpytai`lI!SX%pJtoB?38}Xb`32;FTpGIwl#dP{FzHMtYX?XjEsa*Tpe=#6(lO zRvKpSVo3Moep+^Kg8K7H1gA?}R`I85LD9G5ljcyN`u>6o2^H?Enj&99Zqq+uK7=3( zegsCwhnzw*d*a`Cwlu2NxNCKrq`#3DDzuk~FqBA~G#vh1rg$Rb;gr)zDAe~aYL~yp zc&;9ZMjWC@56Z0ZyV-?AyX$0LzUHca)vkU0X4O|0+jH)}Ww1Y#!S{`3lZOTyPg6Rk z8zV`4t$MtJ59+0+lipFh4Oa3c`ns3_sXY6^PvV0uV&+3iN_&3TW=<6E~HUAXkbSN#|_wylQ3=es5lrawODPa zEuDIFn{Vg|?Pd5VYl!}cE#qN&wcHXQkh#Z`JgjxJ`_oZ_<6&m+e~Xauw>V_&dMS%L z@Bly4I*;5Ro}0&ReI09FnYO!Y`E+tpJ;nCUiL1vHZC^)E9k^no_q6D%{kV+jr9n5A zA1OgWQIk?mUdjG57Dey;4j3_yg+dh@73erR<3OV>{3H?7VsX)#+_glta=(yF#oG9} zv1#r5PEoH|Qw5;TtW>6i8qy%F0NZ@Oe&$>3-C=e~8% zO{+H6^lIKxCZ?wYo#N9$!87rK5Z`g9VPV=2s|5HcWN=+x0R zZmc&8@gPY*IFx4}>*pVjz4F8G zlsR)h*5QWV6Fmq;29J!qMMSpHc>B%{6i*tREd3C5VV_Q(&x`C(n{~I+e{C^Oy}I7M z2))E{;Vk48P)u6E;W;9!9hnE|N2wQypSK4oBR$3>%Y8vXU2-Z}Qt_mmYe@{x^I2(@ zF?)S@Y&#?z#pk}}LFe~J8<8$+`E-bUPc!}QXTsFYa`NNzH#wo=&LuKakg*~1A)+VJ zgUVS57o`N?f1nAZ;*|X9{7*51CdE=YN{)wF66OQKqwg$^PB-pQ+=$M2Kxp8$1PyQG z9nw)am?!Q%(UCh|#G>7LBwNEV=iKot%((-9kzIA5Dsd|pvf<1WZ32Xoa>0Q}>xiR5 z+3M`?W3gAeWQ=6{^R26lES&c8Sr`<(!Uh{#(4P_Gd1L{KJLv)*iv&R}^QQ|ol#hi5 zE^;~F)qP|)%!OZ^Z#6j8Y|megGB=_Du2=*Jr!!GCmoHrLSDs7Oo3fJ#&tYF`C}jeV>Z_Mq^SK$42MZAVU&@&8 zj5-Hnnu@HgnBSc%E7h4)Uvw{Oe2vTowAUe#nUW+>+4`L!a{fA{uxPQ$&%(ie(pd7f z-F5qjQQhY8C$sTqudaLv4%^eFcOwpjFq{RAzLA;7Vq0p-`8i`_u4Or2TE9qqjW;-d zD$NqoxS#r)&&pZI)t^jcKVLouD&WY}@BRPFp!BaGm8l)>Nn&aPl4VoG)bK|p!B#FR zq>vR=>?j%iYT$*EVjw$hiI2{OdXCzZ_=cG3M}*sqfRbKK3sC0hqfe;%Rqj-Ckshln zbm*ua;1iN8@Waj_P1v4=8^Z^6mVRD{`RhUmF@p*s=PWgF?ZGgr%)os#3eZf1zy6&; z_9xYAmk4BZv%XIY6k6v&q4jVTgrWlIWvG%+0q76Y4`9H41KGo--RZww#7quWQ0~Rx zHUB6>x<}I4m?;2QI^5T8zRUFhO1uwkZs3i9PM^4k==sSUj@ITN_vi*xKwnB=+=>Ci7rE ziSC;H87~wFr5oyH8I$XD_Woh$^8dd#2m|-C=_h%esUP;hw2J?4&7~v91$_wVXgPE? z5*(k{IZp4(nG3-FoN>f|%{UayZ5Jpp|7&io-(r)NFrs>g{hh2n_QQ<`YitXF$w@nY zynboL6YQ0Zj1jf0-b#Bw%Wif0>#& zF|rL6jyvjkZwSsZy~5S1{&*l@<6JJR$xqzN<8VWY?x=-aK!|71}m&M;EmGaVV<+);m&{m^E5+-)znybD1-!W@2@?PB3K!o)I=$=)so zt*j$v1JCR?f#pr^OI-ZS@eh`o_kTL*cFG3Si?V$?x3+<`LCY5cGJXG8FR;JX3-ae3 z5&eHI4W51qtX=OIsXcQS6XsjU_PAX$b7J+;641c2&BhZH1a69$U(JX~Z*7z&_Jyeg zz|T$7?0PN^PTet5P(3_2HX&$r@d$Bye+T#cGq{IYYQ zSyCKwZ3a4p3EpAIk;V{8KXiK34Y6vESh%dZCa6w*3fj4rCLuINw@p#5Ubi`aBkNZ2 zwB(AFJ9~}usMu^lpV{aEOPJlk83FY`-FqeeCp! zD6ueG1hJM1ssR@Xt+1Csw*m@k!HGEp)b=KhCSkm(MaAh79+)wS=*UX7y&ty@$=93D zzHmVBg~fvn6vZI~K$vg6rWLyXkWcyB+Xlgyb?EfZvf=zN0u`N7r5qun9Bg5=xAaXh{*O1d-L`=VcwNIawara)F$(5?X zM|;H(MZ5)TQk6 zPrQGyP~4lLb0KG2d?-GUB18`W05L@hEGxqGWa0-7iWOZDG{!y1LK%YMxw4di;1f@I}w?Thy5zP%_ves#7<<0u=O`A7I6J54b{>raX7% z3qdHJy9SO4TVH+h11hbkD}$n+)HmNDVu%Wq0|;k>op>0X5B&r)pNR%N#pxKjsvR9d z`8c)4<@V4&$joiH;iHjwwETm-D%R3{pU)+83p3hjh=#4B)fx!|gD_w)9oImaoh@(& zBG{+ce&NW1_vG&6p2QwMwn_S-&*fd-`f@u;8<0J-t?O4{qo$_F=4*}H^gVpX7 za;y6T`WiZus>Vo3BCIOY(}8b)S86>x|1rIX=-qaD!*0yIuC(WTYDHR@76-+#@gQJ# z3dGQlcz}WW3dk|wY=94K3qj?c042GCMMYia*-OkhAqdc>buy1ifl>IVO#Q${HiR)$ z1)%QQrwaq^OX>)!EArRB=F1PhS2oiik`m1Wod*TFPnaoY@*~8nnOw>akv=AoIlpJY zj}^)^Ra)TmJ}YqVIb+wV+qYtm+x1m3tNSkdoa}t|{8$MSyIw?NAf=y~DosPsYg)$X znJ3ZS#CdAyqqW`5YH(J?4d&irJ5zpHrLbGLD!CeS5gY`H zI(-l4mA+Y--_t{RY02h)^)nDsSOQ%?zuHMRT!mp#Ppyx}x{cbEm)_FhFy1K|kIxHh zw`?wuTCW4gZ;0##8ax_OFdvW+|6&#(&SJrS!fo_fgW${*2WVtUvYGlrEN=!{zxnS& zXukXdh$sH1q2*f>(4o&u^(Dy}dXGw*i`CM7j(H|OJuOQL7r4zVQ48ldw`2VpZpTFD zJlzYOlga@*#*OQvsiaMkMuESdzWlx9wTbak9>pVPyc;{pfePpq|7$IQk(X zXco&hZhIF(%BBnMwIDVXa0uKFM0?}mx1s<18&E?Xu07-XA!;ODWQ5!!w>lTkHwkwW zZAPtKQ)Z<4TzC1d53i`Wt1Ve!Zl{cR>HPEn7X@xJ%n)VPm!>4C$!HD)HUPSL4f|ue z6}fX%M;}Nr_n>LK!;BsJq@_>Fmy*uc76D?`k~D{L8K%zTQBlXqCqcmV%S04_rXU6o z-mtWw$VbXoerLFxQpd_+Os(Ub7BTj{YyM462*&4`uxOxQD*4$P%>Y(ErFaPU(wQHEw;D`FZwDmE7tK|%CJ?+1T3+$W{2ByI=(F9|3A~2fqzbCnzk@I@`|9v zvix_3YEy@mhjut>DnDY`_IHNy+!8W|ozS-(?_~*DWZU0fCu5Hwj?0j*?FP6t=Pw}s|j@hAJz=&7!b)5Fh zs9r?eauBik%EtXPhxr)aq?G$@pC1qERwy0Nm?(EGS!USCJJ|@75j zLrx|mW9ko_Ax@TZ|5lW}9OhmT&mmrM;yTjrQlBQecWpEk4_V0~`B;$*R(y%*HZ8tq z@{K1K`3lTR-N~_$CqvTFt@@+l;xMn*j)XtX0;-&pfS&_|;j7UGw>ZH)+cz8UGIGc1UI7e6{r%#o+&$nmI{;o%A z<=RI~b#jh;%&usj0+qX*tP=~SZJ#p~xcg z6G!Hg!_i|peP#GprJyNc&-qO|Sk4*Nvuar&#s#jLXzYG+-o_+v=n*J>N$=W!-)H-% zw80Wd*6UwCy^?sAz%vxl`N8IOM}nD@>A4Nn;?wre_TgcqVFiir}?>YRbA3N4#dvN3jACjVZB}*RJ9D`r_E?6E7!W zKhC=E%`fVd+Y8!Wa|@?1F^#<50|(8pIwqi7^>+p#j5A8;el4tRZNERv+#bc@I&`jT z`*Pw$x1v$gkvmrY^qZWKq-@sA8wbswB}-rTGA(gd9Sy3goOqKwkzOvJ6EN|92Vz1& zl2>S`3d9q22t;MV(ZR%U@8WK795Ufl!`58RzL~KirSQ9A$LpUxxTqcdgChy9Li?!4 zMjwP7-omjgrd8$RahzRKEB4p0@u|)}0z-0H(bOF|d0XT95AC0F56ia1tQW%KLLfj2 zb}I9C27c-#9m!mOr&cZ92((*?t;-0hIDfd+t9+u_QuN*pUQO9U$IDsIY(gWns-H$u zxgL*c+ZUfvmBB;=79GhKXYr9O9s3>-#dp%`?wB8|@Xe&bT1VUPY-8g`?+Te&-X5#Jaf-_-s;ZnGmy{cG(}vMZ&b5HUMswA?ESJpe8@=N zYNXb6a>eDhA75VtJ+?t z7hRx+R&uFYF0Z;}|GIB7&anBZ;ish=ZCENdmaJceG_Nr~2K#;v9<^?IF!>Q_G+H28 zziTuKH`_aDBgHI?6(Jx-*T^musXa}qdp$+uKyd>-Z_2?=dw{*jm-+tf@K^9Sm7KE= zycsqD--^i;=Ex+XNm70~)wmquPJCSb{<~6>V14!Djqmk7+6TT5N<}NS&Kv+rH;;-W z*y}^eqmqp#y~4+h7OVZ{#&h3EebKWSb@BOu<^f^TCCnYnAxaE{!+}z{))K{<7V-M7 z?i`AFjK_9=Drf%Xm76y5#*ZEx)rS*6=y{x!7omdYr1v6@maGvxIqWDUa<*$(wB>tU z@YmGg(Km}uI}zXKE1f8wO>`b8(Xen7S|%ITgJ=0-VZcjT3;MCjU)-WL(SEh!R#zvJ zh)i>c7%kG80@>r#pVp6<>vp&570Rjq)~~}NeZqD>>V#<+vmgIbo8Rn5k7M!Ds;BdV z$}&X8{3?WM2keI|q%G6qQ1;539e9J4y**{>9a39$3+AvJ_KKVJO!2e8H_u0|PAWZL z37=s!NqAH6?Ow;ey1sZ^W%Z}E(ZR(I6H0P%7jI$yiInlnAHasP^>#n<%-blDD8u8* zFZeh6REd&Q?$>m|t^nfW9z2%p1UEM4by>>p$lifIu;(|MZoQYTTwm|deG61M@h`_ptWJorZ*CA!}cy>?)2B2 z_4mbSV+l&(vLW~GN#?5#Ae!Wm&9Si2#~N>gPwAL?>jZL{ z?%3~LUmg6l&syr!)$q&4a~XWnM%vn4YOCKLTa`T?eveIe)mB1Nxg^^i`G~0H^l5|; z1$8ydxPMvaYT@LK$qDsT?kg+N)n{~j&oX!$G3+z-cU~FV^9||42qT=$IpOM*AVh(~ zqF-PH;3XegNE#nAtE%?ukr;J2%dao)(H>;|MT#B7pF}ejQ2X9#Id2}z>WeG)Aec|x ztwJ!o$g5guAi;ivR9{3LPNg4S9YL^&4+>p~u2&ngb6H{B$K}KPnMd#D4AW*6Fs;4Z(${$rgGKo`&26D&=qou!@*zuc%zp97ieUc398ekN83|P(IrW zR+@8*e51a$e*VO*qa@`(Wzx4yA8a@L5a ztowXp1&RBi{r9e8^`-hc+Y*+SZWS*%w- zKv>Uq^0x&u5yJ?4p&?on363ZznSm-HBl88ovr-e3Hqo#oHA799*A21%0hr1f~xq#_y! z0I7OmilPO9BFArX{?PU(;4ypb0e{js*!6Cq{Rc2JT+X1q9aM0Cu)xYFz5w)DL=h0X zm(BUk&`)lvfP(nmMkrBEh;E!3AGrrQdXr8C@&wF*R{K*X9niG}%7mt}uu1)*h7a_XM1J|p51V+pUQ{IQ++p51E02E8a3J_i z$yiVDTUTC1AQ|c6b&RRwi?~_gZUXEL=7V{3$O(U@PXPE!*YP5Dv<1p%Wpz0$b;?VWSR%3RIEN2mK3?aZXUsJgq~b#chSS~hN4eowxb)K8N@ zt5X{KTA-W=EQ&sCycx5Bh{dg6rh**U5wUkwCLI9wA=z{d`u*CUv>aK>lYF4z_`lSa zQG~S}&iV@=V=lzej%{WAjpDrl6kvnSUr_v%jcLSlIyZGcQ4cc} z1ml>PocQjN2G5_?y{eLa)l;kUMu$pbv*=CB2hY#QDu;SG_5kb<2{}j?aU5TO9EH36 zMnPEA#7iB>S)pF-iRL~!&f;+-NSWc#yy_0{83zF3l%zwkN9_typ#YWiSeo%fimDNF z8%R!soM$5;97LI#P%=}%O^-FUpV~gkwq4?|t)7$1}5K2Y}8jmSE@ zVPq?Brf|7toUYHK>jY?q z#E?wXJAwM}22v^+Ls2I}+XWH89dLcgUkni+mZOi-QnOsv7yYi-Jx@{@Xi1E%G-2G1 ziTgJGCHMmDrKt8$$SF7zt!`@#=}z2mq&&fU3i`XUN%wlP+ZC^TN&fD(kN=I$d!&zj zLgK@#O-o^+v>!A z{7u{FM!VTi*z+)0p{nKxdnTWN7I!0$`^2`6j}t`9q%Qr=u+5vX-eQZ?=QzmQ4ebxcGUT|!R{aLZ(FfQ#B3^I>nitf_H@fY%}C8J*ia zoh|^kC;7#MNWd=>F)`D`iOle+@luiSUQ}V5Z{W@SYr*FkmD4X>7icL8rVvoP6qdb* z3(yMY6_IK5Gf~a~ESTFXb|>O-X{ouh!ot0 zRt#q(T1}7PtVZguIeESh&YfSrZF;&->SOf1u{PFMmJe>el+~VEr1uIMba?Kk*F`Cumq@pRoLQw*M27MfP}{+4E4 z+3>BdGNiiNv%&tg^_xNa8?;HZ5~UJs7>S5)*Gz!f)f5i7eQuOPbml;4;VKxKOiZKG zaWMHQ5ULk-01KmdZ)&5T!`4l7jDSt4D3sa6eP%SZhkHBE72j0fgo_qXvg&1svhw9GOIugmE8bJ=+^prV|d$$^u! z7wz7sShnHIqNO{iy3RGq$Z8Ua%l2#0mJCG5&0YK|#cs`TsDA4FPPvM44mqsRB0JT? zxI`JNn@GJ(v=r>4o|rd|F=vTWg{$}`_-Jc_fw3i{%bmK#9g-wg>Z&$Y&WgJDPA$1Z zTgZWpc_lVuj*C{XF}1)4vFho%`7hc9?i{+!-e$zPI`QNi$!ZJ8CPzH2T^T_K94JnS z)f&y|iELXTyfyq_FQKk|0RFLkm|-Snyz(nm(|Jm0g~OVp8eiukk|ZMkY21XoCt`US zN|?OB-%C`Oeb!qhsH0IGv;Y=gvly(5{Z`bQEQX2n^<|fxi+=mO%hNklq@-8q zPLoeUWxQ7CEh-aOK9%^j-4#iCOdw5^k_6_NISR8Z03w99V;GH)eZo{b;x^3>{D;17 z2oi1xCYq@gMrH+@;eh*DVe^(gpuC~^bN7P+X2Ne+5#k2@q5?bOn{*$zM9y~*nku9x zQ1$fj`yOG|DfSsMdhP+@U!;4xc8Glt{QwmRk0YVkQHW$iM8t4YVy{M;Yq_~lFhkD< zbrAO(A`z?ZL-*$488T8Gs$akPm!j+M7W(V_7|W?$uu4h2V3~=#m+nU{n`-k^Bg~nw z_f3`@oMX$*khyGUHy62rTBA zg%Q)>AR>9|Re+dssHP13axuJP_e}<(57iExdqnd_B9>Nltzy>E6jQyEV4gI-LW%O@ zKYb-o^;4j6#a;WIVc!a-9l52g0S+`kpCf?Ig#l)3TeMI=z?v$7;zaz&a79u0-03Qa zW)~74J-Qt{?vuwjg2BJ<1G$ln+ucK6M=Mh>L{u|Qh|2h$szNHqNkkX&hm6$v_Sl(9 zUpUz*rgU1;Nhz-9xgx)+J)O%u3bSdZYEESVp|l_ruR@9aS#h-lrK>tmQnA#-OkXqC z94%AT0>Aj*8T>Ql!>k@G!da$J>{NB4N8FUcq4D6POZ~E0AIhFp&Dq>?$o3JnOA@?`)(U1-icLG*mK`Ejjcjn`dpa7mr*z0mN*GX;j? zJCFC@R^;~GwMP6hqmaKp-v}r#=ol=ApnK%azIRXTZA*^h(O-5U5>P4^GuaSvRK#tM z#nN8chn$^VlCLqO?)#P)^!Z{t9r2Y(3B1f1DD^8>DF@rBtgV;|j?glJkCxnCs`g)~%8XA01JGah)#U%Gzp`&r#S z)y?X^kIgS5XL~_x+EBAbO5U9|i$>S0!&p}kopX=ON7JX~kVNBgav9q+4HHwAw5F^U^FA+Zctoq7G|jR1rhQM8q>%69OFoy9RY_98CHS$tGdbWRw zmIhdSHf$*g)8o4CmBoRC6?L>*LDzm$iGGZ<^o#5x8t9x~7j5=0i@N7sUNf&V-Ao{r7T*;1pu#UC|K6iP!eb!d?q@|KJ&2 zU?-=wdbgY;3j({fi!*5l1S$7dc3%OSc5F*5tIM|nO_sf*jd{f@XG_D4(4Wz?yB~O(|YNro8)G9y0){s zB;5`&teM0-JJ4(avV9>cZ-OU(KP z8*{n)+c(GcAwT?mx*5<|c?`^&cTcPn+xP9({?X)Tz%S>5@HqhDsP66i=? z`QZ&^`JLe|Xq3bqZsc=C9Hc^XjNbd3+oVkgEs5NQRm;xJUcX}{pFMlxphl-oSf<;& zt~vd1z}~6FYi3SEs^dZ~@03_%OkMHrPU5T0m-dLiZ3<|hOEAA|Ilo%(qcRVPJWb2o zcNvu->tA;kC9RJPnr~^kp+@uY!B`}mnRL>|6A&^r3N3^g_P=uY5bkazc8JRWIa$@R z9WA_Jhhl4F1k4!M2rxl#GP#-4(V&n*Cn;2>)O>V#>yq%Mf9*YeNE4QL#SFXA5EiGEK-WqD?i$%ENB977fFo*P%;P)xacSGpPlqafaG8O&i z+{ubG?`n&t2c6wnP6-I{HEqjb?wlD>;RtWdrm*8lN59+mDau=gZTyXp3 z>X_gP>z?!(pQdAc?YWbV|T+SH{I-c`jc| zZc>RVfH&F`qe5u~^@pcCW1$u7$b1(lCmODPi?>uQH9#$0hQ6kEqyK4VgHQE+X`8Si zrX}<<2y=)i{0Pys8f3H)b`}eE`sEcleidbX+M%K@@4W4dAosg;Lm3zt-Xrj;e~T!7 zonF!zO^%~hk!8v1T|w_RWE8|d);`iY0ps9%v_-E+UVmF(_=`9D)!U(=G_zjld>;X_ zp|U$G3bOO>{~PfcWbv^V&=x`PgQn2n|2N~a=n!Sl;wntPALrEZTusTvapbU#vaX~Za%xy)j+%Ajr#`#@%ueM<8oHlVm-y)?Ftf7+? zbEzc*vKM1ERaDOPq{%ErTBpq0^U-G6GS2C9``71l8MYxYr_moY`t||HG2j0OqB<93 z*Gkq|)(5~8lZG=Dzfw)`=A7d$39dM2r3-SYV}}$kl_FoEJicC|A2olD+tjiF4PeD? z8|=rYzCYH6T;sWPbvC)$V^_RRG#4UZ#yt#B4gs_}a4K8`bsBpLlp4o-5Bnc$ zbe7~UHS2LV4!2hZy6R}&|O_iKw|F{vD=!10e7xZOS0 za^#KT8B4R!L~n%Pz~6?98q4=-^v&gqi_Km}RolU^tfdP=M*0%*-EvG9LhTPdS~6@) zR~}!fj)tdriYDnPe%@|mM|RaO*q+eoWFhkh(Im- zJM7`$pQ&0iWQzbYjvoh8|NCUQu0_y={6}!@wVttnzDdaaipz)Z7lf~j(lS5~o|J_* zyN09=wzB0rB>octAA>$!Q@A`qb}Nk+<1>5o{dQ0#d^dNGY*=zU-5 zg8oGRYC94~c)gK(ZcC8~cNwtD%uj8{+V!01Yp#3jVm+<|B3JW(+w1Y*3YVP>f7h5d z`ck6dXC*5GFx2BH0nCp_rJ$^82Zs2NWV`3Ch%AC=Rk?$rUijRzleLXm*M+VealC6D z2|cI>p-K(cn-}_@Y&X$H@w<8!xqja0QcPPn6ByP!!AM`6vd+h*gN@@bDV-i^0-^F0 zLx@k|_do;r;7@GV&DuKD$1ZU`-$HJunttA9*W}`B7!X!Vqy>S_y`OyJ)-@DA`dKYM zS|F$Z{HR1TU_-b;M~OtT*IQS*Vp&S>59#jM+-I^p$sM2HadyY-DqOB`W78VxdZk$% zp%Kd%YJBp?|+f>^~oYBU`ojrUW^%>k9(3|Vd(_>>A zEb2c|%WZ`o9OVJr}*mz&aG zJQF46_T<=MTV{PF_sO%Hl040b|K)f&I#VxDhjs{H7F2d=@;}{yLM&`uO7c(eD(7#@ zBKtjDmVzn~=M7woeX#lrp>8Qq5u#0a2HFDb=}#(uPtNWQzYB9 zJfmkv@Uij!#*Zf((MnQNw3DM=y86eDy05r-b`|m^b7GM0_^M#6qF9lc-DrI0 zb;yMd)7#ceUY0k>R+WS%h+=bWP&$j;94okcvIX8SF+cJq*ldrV~jvF$`CiqU?fi_{#ReijQw28fFk0MgXr@8HN8 z@zC~ko`~i7<IM9El_ctLNsygRl)n`S|8}v+#-l}Cn+cg@C5e)WgDE9z)RwX z3E~-F4N0}-<*n8R7Sd+&2G=IP|9Zf*M(=V{W0~<-SOws6`03B6bGDMQ9qT^_ZuYl> z(aaxjUm^^(R%NN+MiR66?vnx%OOA_{lnjWfr{*f%pOWLnbzUZ3Uw z2>>gs-T~85fnr=s3Cw#^?BnWTtM`8Bg9}f*=zFAB;uWq-w&X|pzelWCRX(?u*4(YPK=@r;x3!B3An5A;inqgeCU#2;q3m)+5T8 z*`^tQPlPC(xn`gl>3E%R#XxHdfO34v_P)*RF$3S=IOFDQg(>c@3LlXjOTFpgMmOmR|M&bu##`!~fK1kY|k4 z8_lO(@T?fE(A8y0Q*4dFSVu?1Yp6UH7pHj&Z)(HQolwi$kj`k5q=5Qnk6r~s0?OuW zo&(L4!~97jpX#-jBIY*Kbc&lAO16Vl#M+7g0eduQTHpOyOTnpz;eUyHw zs`QPS%>WozT2Bu6%Czr*h%T_YM1%hR4cG>-SUbOhuTtd!uDB$es3-bg&OJ^l8X zBmU_-vX{KE&KYbr4rMC}4I2a32%&QVKXM_jB5y0Gfbe^uN2ER2?mySYPPHIL*0N%K zZ5{vZ;+QE-j@n_n{(Pndn##eIe~oTRq5q@s|1nAbzt)O>r#k&>g!|umhuj#^=iy)R zv?3*E*1&d!yE?eUC#lYXKo-bkBp_4uv>Cm${t9aBz4NAIKWD0Dwr zuOel6jT~Yny(GAwbDZP1ZIKuMfs{?Wc@lxVMp(hD!~9Rmh{iyw*<6fJBQJo-E)F7bI`9Wr6F{}@M!RhoM=@66;|80p$f{E3bfb5`@~G^D-`^uy}p6Hu`XwC)oliGn5@7fSHIf z^vdO?bdryEm=Rjq9ye>uD2F~*WQNZ}CfHhzoFkniCuhr6{Sx0hRo$~I{rJki{fXXS zLR?o88LX#e8JYQ4e6HVcy67@cwul#TFjt&sWooXy_I%%ETXes@JY`t?s6t`!^>seB z#pmEt*|Ba6lU|xVr$A@AfvnmhpSYz*bNg02geP4afi@K& z-=W-B>ECk>LRDzC0ugTbCB=JIh{m}z=3i`e53*1yJ?ou*cQo32+mt$m5IhWQRQ?7X;*7o zrJUw=*}TMa$}P{Ou`6D4-77e2f&f?l++S3-A0R(UrKQRf$9}QMp|ev_-~)~QZzyU% zj)d9$#SvviJ2-_XbU(1PEWzwup6-dk(g@E$d22=5mI$`nBia7YzWGMo&WHC_n-Q2m zURxOer^Firr(QFoSuW19XObpaIYOkDXS9ag-Cxv%Kpv@pdRJZ`uVl~@xc3!@>On>3 zKAJ7cGNX!j{rxctk_@v{yPh|oc6@5P)e(%U2~ma+=H3NIFs8K+h;>Cn>4|m=ePJ22 zfn3I@S41}F+~TecuMHk|b)PEgyN)AseQ#ZU*<#aXd9>e_2+RcGB-^>31_x5Bz|SPk zIcx_f$$zS;EsI7VxRAr5TTgZ!jWtTEr3bGOH|xxJjK27wN$00a%n1I?f#L)UJiien znv*WYKN7^^;KY}3M>o`tbM(Mo&h~!C2Fum1tY?m$&&k00<&*i_<;P~-sdhJV)2}SW zPZ;ECSqzH-ZhhVBv^OjwDv^{jlEGNE%ZEBACwCcd$xTDzH_8*7Q0Z7MoeReI6uFv=Wp z6;IO_Dy0iQKQ%eNVKDwJ#j|zlshN+{gfxA9ST>V7jEVfy-kJ)i$GO=-57Gj@qznq| zn2{PRA@VMu2&O5?(TWz{`EOec%(Hnn)oW~&Z9OPxX?O1DZlcRcqZt%%djvI6odC*% zsS$=@cj3i^iee9{${_>MP5+)oQbM_g;IV`6hI0iYX#5%P~4FL$K$zb~opu7}Bq(l(9TP&G? z1im9~ww2syfIG}Q?Mac9*el&o+c8}Q9-X2d0{;ztTk=iF_|QDiMGg%OygV$4hz?W+ z(^oh|?kZIrR+vGqjkQEvohYa|97zRZjru+Qwo#^(bXUK8>Nl3~-)~F*3a0oYqC>tc z8V21l5r%I9wfvLG`S#%7y%gH^fuHpHmF@mOG7Sg}1L}wN`^bKS7yIp)mA@`6FP-XL zxbDu|kQQ}mYD({Fljdasi}CPP#`_7-chE0J*ntw53t%=Oq3I$KqeI%mts^Tz(^qe5 z^DxDKd28`5HU)bjxnoiM^QxFd2Jv#%f&xsMK*t{>`EEkkrrjO>^lI#O&dWzzvq7&} zT$wq%Gt{7OXD*h;W1P#eK~PGNRIxFTg;A`fiYlEF&O~WmDoJw1@t2E5x50>hH!MAS##JEsKByu|Yno7cnR*uKMM&*EOlHpzF_tx{(Zo;{|<$=doGyyzql#6Esn& zpc@p7b;@&iRqlfRz+3Ldq<)qDvdZZsLyCK(f#{Eimc=AyfXzU$QpX@x{NO&6bjG=B zWj)P0+FxCM9@l|) z@#En@m4Gwi98o^C#V=?1g#&Kgq{Q(_HT>E-W@6^qR5$ImaIO_6riCGW4>L;#Om^M= z8Xgi=?+eGvnBnMNdI&#W(C#mvrMr#OYCEm$3#*FtqA!$CkYM5wFl5PNZ_J%bcvCWH z>Tz-|$@2aE#o@;vQ~mawCcOKBoLv~)X*(7SV8GdIhopn>GR+T4Ep!d>)bi#T&Oz9V zzT%Tp+D%*L9C(ECJH$t2Dj$iq3_23X#7DDzhCjJYzW5bdKq|b^B(FlewCa)*%S@c( z1G#5$2dVi4HPVHumDrc*G}4o|XL0W)p3w|>kBjWDOv?5;#uiEiC8}ptP1z(3yej>i zC!`Hs{1!g~p3D2V45~mi&vN%bWNZ&Bo!CmZt7H)ab#U+|J|K_Nt7TXE`)s*Wd*$ZB z1Pbgi4IruB@f1=mSOu!RMmzfGYsB=IS`^SdlgE7xWwHkdK0>N9-T`BSUHU|1?N`3G zkxtWZgYzt&-my!lIww4k!-OvtYn6;?KKO8!Tvo2c)CcjnY(a8b>+?LSFIOl^ zZM-O!G|qV3=&aMY>14W=r?)5OYZ|>ImqU@zkX|*Zwl5w*oFJUwg%)DJLW99H%I69G z(AX9b1JN~KJ5wE_`|7y##vIklFCr9me8J*fW6f1PD*Vq0IXL)!$w`_bl_$QtX_nO! z!o1E(VfTp;q&YGqAJF=RQ6h|SJfxL>3mYC`olo;ZKi8^j`ona*v!m$Y7rUhbZJE0w zPlvtHIr{^C8AXZs4D4NeUpx4mq65xb75Acy2`PgI1B6%mpA?s+C$UBcnl58w?`l>m zV0b)YKNW~qLIIbBg2YLrMJh9oA|)oNx_cqnDFNlT`=>M+9=*MIO~)J2S3yR>)PY0A zLd)8UaS9m3SGhg9FeD%T>j=!|Su_*M4SjL4mwNx%jta}$ywvKm?6|B1^Rb@BZ!~xG zyU>Bf0Az5bSU#vo%DdNg@(bLn5C?W$MYL`MRqm|YNI}xzj4;7ufi65{rJ3#&fnB&< zcfQV{)7g`4Z-ON|{+pnQkx2W>)n7iH0tzYzz)$I-W~9=8w4XFNY%|&;#RnC|K3J|J z8S#cE#l6K-N!_%~Yz-~0UDnZy4-Cd18g@Lcb1io_b;`SDYEd}tR9|)YR+m4Y)pDTW zSz6J4pg+_ic4R^bZvzxPSsd#J4$hwX?#yNmvl^`zKhj%j@w=dYW2a)H)y32y`j-IG zvkVEzyO}c)rBMie@RwTkoY9Qzabu`O=O^TpMHpwgS4QtD^PS6Uz8X21J0=1MyhZZqV_O01%!!ZUld4 zFa4Da^|2wgR3pDPi^0pXhY;8~;Ae^XtedA4d4u=C;uJi`L zF?}I3=kc*2QqJeskEGK_O|m_3|8}H;G}J0st6tYbTK+ z!1KyD$O+{Q`1WdvDg0in)MBb*l~+sw$}Bau8hu^cL}F(_^&|@DISSf$MWbxCSsN;X z(XZ-nd7{WRpwG<0a#XB2Z-)ivyfBuaH_;g4hrB_T!j$kf#LIW3Ky*G303rtWc?c`A zIX^oC7Wek#%GzdDQtN?&#Id3*Q=Ma*0 z@t-d_AlXK8>Ce=dGw?jKWPjQ0miStJ>j_tWA&5r#3~2zw2=Fb_FzmlN$;}#4R00Lf z*_PAM+KC@FTDt()N5#m8WOgj69>L?!*#%a%;XjMW?}R7+Zj<}$gq{DyI+~%yN=|sQ zq5{#i?T8W$0qePMw&31ain%xy?ELbRLc3lx$`?~$7^YMU~ZjWIfs zyvoIs`-;P~hM47X9UTDmzA^@&lch9x(#*HiaF>*N~=BI;}$j&vAbJ*?gS4kfw&W@XYvu*L^@$DD8fImwiA^4z?47~~_^3R+HQa($uCno@y z#KuD66OQL~|n?;1$Uo)3kA( zW60hZ9G_}mYA@8I?!IwXt-iyGhU-*_hFq_&bU3hbqD~TFo|sQ4n`v%!JU|T$`P5mU znrH6~wMl97o#cTRSGnAQWoL=YKi8s3IRUx?|G)-ZPow6;7uxvGtRHRZv^?JTC@D&; zu_*V3zUziDAqjQ)KD09j3n3YKc7fMZmX~tN2H$Ht($u^@yl_P^^{r-fyDQHgA{Fe6 zdI`RSlqZVeRzi`?#*^!;>B$%^DLJc}nurmKi)QA0-a1|9XV!1hx&#z%{P8r_UhktZ zh|H8v@hg1)J&7;P)ADlKP(#rs?3yCb0dq!dN&>g>wYsB^c$+zobQbNI1-n8>1YtYf z@soEyS3#(z2;wxnB?ik9Tqq{|jj*dz))PDrPL)QT>J6~_ovMNUrh^Zzr?ctXqsx%R z9*1o5ZzGj;q-!0tkSK{I8ihIU7znlu6B{L;7{%Y1(jF8NmV6o`AvAR}*n*nt@cARN zWo}R!YQRdrUnzbv;w+Kkb^yYDgwv2u|cZd8%t@NX6 zD;}dY#2?9uj>qTX8L4Mm(?b^C*QM}wZe58Z@nw+QSqTgz3Pq+YL+S^eB{E|m^yYTy zY=rK)VhxL@UrkU)r@NQ8%v`U%^IGB#3*`;jJFMnGC;6;2&*JU?nn~U?i_3%AeDJYp zl;_O0sz7^g=w|XHKWKarP%7Ng?y#_|HQ?>Rt15K_Y^Z;-Nk0vgLB1xH4ABg#Sui@? zl+MjakzCr4>M-pGwE)je0pM$*n*j*5IUqVPk+FFUcg_RuZPSi3c7HcabRMjT?;7Kh z{IUg4Js;}1z!l4b2P$j&CZP+67P??RL`&=`g7dAR`tuk7e%3!t)X0QWE2?tYORU>n zblTYa`Ui@HSe+B;Ka^vE{LseEHG^(Fh6nu~rLrBoe%|)`jtYg_hh;#%1_S+ym=KzW zV9%GpU?(ZhzGqyT@cBkz0aqF2H4aTr)%JL58&2K9p`*oO{Fy7Ly@04N1T-$hvWyWh z(C8GSP(|>gPXxDJy#Yxjj)6rPG}Vh6LtAgKlW&@J8=qkpxYtpCXqgHmaRrWT#R;S6 z2gBiptvVOPbG+h>mc09Mv18ry@TfrAz&=Y(LcqeLanW9}vcp08m8ZI|4AYmj87^t) zyw5uA;tMrz^R zaqeeI7ZTU6-%p$zTdEMIo3~5|aL9wM*4}cTt{lGigpM;frtb~ISN;YkV6%t9U0oqv zoEG|(j4Tr{XF^NGmfQD+rOu01KqasYY3+WAvYVIbZ&Ip89K?ny3n6R5C57z;Uwx{s zN*j3YLB3VWy?v_xfb%5qo*?EErqm$QKrx>PUL?*E$v4Yl=#m&H+0_NzB^#EG5WMJZ zc|f#gekWSsc_@K_6#TavKzu}gCAhx^jD5fy^qnWPBlp?O3`_yw2 zHF_|;{Z}&T3p*}yR?V2g$Lt6!t5@ZyQc<(C@q1os@#uyW`-7Q%V8h>F)%vg8_Wy6r z^_P&r-|#ER!vW+ghZ#14F9uFeFdf1keS4Q>&zusF+e8YOZm5q};~eOAj=TTR&HV*5 zF5}D7a6AOy_3;pmo}k1E7f<=D)95ImE)(PzHBZv)SS=p=Gk>1PL}#9{N_ccTF6uGr zVZ9>;Oi&|oln}}`@2x|}Y8_`X_Z1%1`p1508a%uUfI@nG+R{QaL873SP)`vY#JdDf zyu-d+{jdijhBYt3W6aZnLHl!Uc|7Ku&7@y#T1}W)h-ypBd1`UvjCwM#C5ay((kMeU zA}*-{eqXqy+~9Y;r><-P5k@vimy;DZU0G4_(J`!gS(&R2x%fUTISw+92$Ae#A*{63 zO2>V4ut(%t_IZUUELzX7&OWlbIi}im`lfmy|GLqHXtO9#Z`lWkxGgAbWj+xd<*89u zSv?FBc0^2Um+Lcs5t~&-K7QM+e()AOKD}#A zM??BG^?dh3gbJ~36i;oxvNehIshsL$)EL@dJBddWdG>0|V!VPE2|h1;kx!$?#4hAg zJfU%82{XJI+6gfKghS6?#nElPm`@F`GQw?#<>Hgu()cZqkR6Q5GObw3-4<8o`^da(C~FABvg!Uf{gzy8OykNAOJ0i^j7xkPuKs z>?RhU+ET?yN3dHc7Bw4nY(|Y?ytJRjC_4L4M5Mck_}U)OHUtNPVX8QzhzdmLY~?`^ zl6gXh1{Gc2y(Wp3(~%heWld>ao!#f#YGNc8Nd9HDl294Yg$aH>dr?`Cco{jQEM>8g zE`S@^D>u~)`QAq{Y3D8Nz{6?STD-Ymi9d%O@#YE6enLrMy{iJsPD#f09rLZRMw#ko z;)OIqwsa3YM#IWeGpk?p=<_uNu93g2o3&5qkLy*#2klRvq$AG}wwp8Ph>jRDTj#yi ziFK35uENMP>qmBv)0Y;e%gyNvFFZGCraXS1@hv0Nk`7k^usj{?$Bk@BrV5HK5Qa=8RxRh^DYT!|UstiXUNVaj;OrVZX3gG#WOmsPyzYgMqpoRo{1wn@Dm zEgbK4vBP+d>>VYUYkxl_wl7psEz74$We9d}zB?N^SmAH%{H!GFTW9eWy`r0litH1v zs265ZEdZ%!RS-aO!mWkRYd%h_$8F_NzRS~p-B7E2T0-MO`!(+COSDs;7ARaJu%F21 z#wtWXgdlVLiqt79ICJp5w=PYz?xORBGsCFTfCGR)fE`_NA9^bEI=EMbn2BR3j zHo8D4Bef|36_;v^V@50QN<5=;BJ=g%ZzgP_Itc82vTX*f`e}0G{NwPro`6UU-3i^% z$_j{ZcV9FN{2jcJ@g6=8RbJ1ijNLrQBC5bcnQMtC=a55>&TWHHqam&atE!IpO*&z7 znJ&mQWj_1DdhpdZ;;*|@xME&$W~8}|e3Q`8mXEhTwh-NsvtJ%hlT6c=3Mhvzx`Q7R zf`Gds)f~2y7+2&oB9OesK7&I|;R||?7HVu?yzR(L?xFlZ&i=Teww|&DzGEzjI>5r? zN{CsqZ4dIL47|&&_sz0U*LzDn?e*@bVVQteBgj@fB`Ng8eK(dUaLn8SIs)zWeFNi4 zK1S@IMyZ=gfq_)SSkYr|px_h?K10$J{pmS1$B@bWs#L1t$`T%!izONI}Az83i~c@B_OdUmY;| zFcNigCt+)WLtkh>`EL&<)D3L*;wMFcEvRF#|Fqw`T-T_a&4%|_AN2;N&F{O4lgzr~ z{&EwzNI&86?vww1QZ3Y7`ic4kXIIQw+DRz8;!+lcG zD3$#{ENKKZh3eSNN?WG0L)F0Jvi^9`JTC*~kVPz^e|jW_le$;{ui{VR(Mp73oWqq_ zM?8w7uAgP5mSSo1>r_~+dyo44)Pa7Mtzjb?x|Z3H1J*-aF)E_m#Pjte zZF+5RP0f_-TH_!i#R z*9eaTPgVn*B!|GA);7dx~M5z|*BkH^o*&W&U5GpZ480xsJ43I;H zc>V^6h3v@a?GJp9*kZa38B$2%mKRU}zRi5%4mqJPDvs>Mzizx!$Z|ZmjEpo>lIP47 zm2PdO{3Y9_s0Q(I^@P^@9|@NbgDKFNQZHK>jiJ#W(qldl8=pw683ZXWLu)Skcg%8I z1+(zg=VUykQ@;lYpl_sNdjlf`coyA%bk_e?WQFp5GBAd*(O5-3q|_*1lvx1*%kEZsU3%nSfD z3pOwv{lh68tjo3?>}b4uxlDqctUz3w{{xvdI$ac~#R{gzNa2&78>fAs(Vyf=h_dS0 zNEhmj@qecabPw>7G}G8OZYDR&9Ek47{E?|hPs%%a*aKc!nL~AC{8c-JGYJ_O{o(~0 z-Dc4Ri^sB?u<5cJ5hlb+Ll%mMHs1E%LBH4WUAnVCBd-yjau6ut(9pEpw#;>WZzV7i zW%sSKP4r2fWoTZ?@>s2*6|4IfSICnQ_J1V88HKRh1osaPVgz?{yhe21dKvwtu65Fk zP>689@3Z-ugI2G(kUL$f6xQ@^=al5wS$8e{-{yGV-B<+FHr@LzulJ89R$OU0ij9Ry zWOWyIFQoa+Keo1rzzZtG$j8N8htgU49JH`XRhBZxRE#}X6uVf~(&|-esdWW=n*eln zUVDNDc!GbdBJT55#Bf&VaXfsr9ihEE*0;G7wx52WRB2|q@?qB;I?Nad_fMIY1~?Xf zKO#R{uullZ49sj(w_SE+4L&63@_?8&cYw~giHZJ&*VY|eI8}tS+}0D(5JzJ2At%he zzR64vP|j%7AD;0#DHJ12-m-6V7q^^aS6~`y+q3VU)A9(-VlhxAqM@PTZYG%yf6H(4 zL*)6>twv+=fazT#{bW^9gsX*wTnL*_tSY)W%wJ@6p8%I=!H9`4=6OEdw;0%SynGjG9}xC`)JXLg zUatS?Z~g*!{}b9#iG6z`l3_^7P6`Xw7{XQvR{18oOsH`*Zy!(Q+^2P)74Gv}v;9Ep zZjK8LuD^}z#Bf||zUKft)YutiOW^dwif1_z#h_$?1K=;{7UzpZpD%>zIWQeTqPysGFQ>2u)DpUXGoc6>OZ&3 z994S6HIo6JZoA~Eo!HHw1^F&WJ^^Ky%E9nPmb@II?PtsG{Qr{=U}ec8-X0? zY%~VXTav8Qu(eWNA#N7V;jQTNDDI_vuMlBQPQ0G?tUe%UFe_R`tDcjn05-yqxv6*q3OqYBhr{v~a;6OK_;TPjEE;do<18*bI;w@&f z9?eHPLq5_PItPFk=8Z;gDd9NpY;6izv5k#G-bAaF6}8tsj!O#@&LpXB z`NB};4&{d{g+D0g&ig<~zaWJHjJ{YPzuDhvjPE_!N}S7}Ur(`5t}7e#q>A+9xLsf>-?BtJEOO7Lb3-ae&dQOpkh=_DXJ)VscTT4l9bR@@| ziB4{eHZykA(US!Dbc-8`%Fbt(h1^MneVJBvtXVvu{0`R~d?v|pt^Jq5Of!FB`3W{% z+dGe}0C>p(6%I1P(-l4V%Xv(hB2IR4o&ySY8zhZ)(SB}`cM`YxMQ>sA5QnT%bIL#5 zyv7_LZSP2o_CN8V3j;>tvmJbU0`M;(S4qMUoK{gRp#t2 ze@|zT{rgh^-=;F$p>~!+O4+N=lWcRyXtUouq+EgCAhph1#B07(J~Oe(DLPnE9-u81 z{q$)?LsY9kjQ_ZGbY*%^(K)3?ga%`3qfV;7juNV2*Cvy-cWz0qAq6PHMDDJyn+hOg zcl#(Hhr;nE;QcY92J}l{Yrr7wz+j>dp5 z`cSr*Qoa(sr#HGMEWSS2w4>pv>mR;gZ9>KMl+D|Q>k`0Q(gpNwo+r{0q>dHVz;v76 zCxuXvRUca=&59K~Qug+%pJ*g4SsU>0RBbQ4EV)TG4b6r#E2?7psEFctzJ@tHz5w@@ z2T7MWL?M+?y*JSMw_Gt|-4w#hH@6BLc2nn)3X5UVsU(KjE5vg^P1JU{X~a7z$fb2A z`aXdqwWNq1ZE4{-XzdF|{TVv;dtE2l57*UK(>7D|Pznt}H&&vQvMs}BIjRi5eISQ(^H=DN4&&uS$G)$59t+?e)x@na+H>B)_Q9ArqzPdWnjPuT+vW z*M;6LlD+0QKha!9ZnV*wRGhAuo)-)=Y$x*kU~wt|*5@2S%Z-_OAq;{T_1Cw~Tdo7G zv2Wou^9{VfH#m{tlRmmN(Vj)3cjksAFV3=NewKC}K9-W6_sG@G;nSuaO?E=2s(|73 zmJg3L&T-8JLE~o2t6m@}fov-DBavai<18fnbO2Z5LilF(m#P?&oq>t-(-G^Y=Yfy! zNL;2kEnb5drHe3yMtLGaEw2rdIM+ermVDJi%NEynxqOWtIz=spkh`+Esgip+%t69{ zXp9Wem>}qj0~r%UF@iAnBD;Y#=85R<^wG3}AuR#wPTg@Mw+ml1bIP_k2)tR~bS+%ufb+L7X#yM)4lik()J@?)g^CwHR*r!B_1pf4H`P{lwpR`;hq& z3cKC2FE+b%#rfEOm&Ml`u#6Bm{JPnsnZ~Z3WDz5GjMR*O6BV zLl_V_%(pfP)7My;LH}Ket}%(`ZR=8LE!ckKi5eP98_e}h?9vMR?hza>@zvD36?Do0 zPFLYySSTdyHBv0XU*?if^i0(;N|k+qlIta)8Ha>%pzgI#7dQJ-GWLwmY-m zB;DBU(VgZT0qZ@^G< zB+I;Y8phE1G)^d;kH21ji4;|XrG@gSf(;7w#J?~%>S4EpfT0~*AwAiCaTk8V=!;^q z!>W&F75J_Q#2W*|Qh_-t<%IzRt| z9!uI+rX5ai;bSyc6{DzXMV&BhvMRGYArP1C%OB4aA$8$HX}9dE(#9_Ld~PWY4)CG( zk@w;=_C0C_zuQy*bv7`^d}A}cWy|i^?~!4yD=o2CJ8Trf8YkahIME{@=vuT%91i)o zl;sI@YqvdA$G>{enAK<8tlKlr1niUfn(4}!7Yi(iS0sMoFNE1+6{2wRoFeF4y(+LB zfa>ZBJKrD4j1gvPyq3hDjuDDp30z)5gU%0x%Kb4liAzoaY*R>kGq6 zsGdJR@6N^LBG>p$x&P!{AoF_0S<+b_$ox#z%?yK~8{IRnCkI4CtnISH*=jZVK68e$ zyP7`TiTW^28cmtmKqW&&tB0iG-VB8)@}lP1e&kb=295ISwal6gnp?#L%`$-iYYG zKtQ)zQsRxG;f~<*K9TnaMPpwnQq2Pzt#A+($vo z;QoYL6XJZHoRS(GhcIy(|~kRDDXQn-lq4V62Eo8D5}w`;IW z$5+p_c->rspgBls0(=e;&U)LFF6O(Lus0-Roox(@_{eapifP5e;EbY^T8C2pGvcYa z25_5SXo~^|Qy1N(^~zEc%k_MEW^bl`daDR+`7c+=$t%bL&jEPNYw*&lh&n_(XBswIfoheq3KZ7&SV?B{t0GT}9a2CW z62dw1_KTfcz-#l9_es;JywqJAxfQ9Sk&i?P0(^d&A83bx(%Ds}IH7#q;RU9e&(hVg zBlA5weQc+{Nak&CQ7Uab$K`FJjui=Ptu8(bjkB~wg^wGfeLSz%nWQhOy0Yoym}d#F zc?;3Ij`dUS9^;yu0@?5@U;7x>Kun76xaP(gSx0Y!YW&$uOrh>uqN7!Tnuqo7#~PD? zvHZKGk2K6L%BCYUXXj>)*brL|1&>Y zYTU zS4>E4;^Z9q9sp^HkBl~5?m82^{nS7!-m#kc@UeT;uEieNWq_F;MxN=ws-ICrC}BIE z^tWSckqVT7o+f%oAT40~2-@`ko|>weDDfPi0ucy0^q!J5G@{m#Xyl^w zMXGWwNnI@Z!^#cXvkK_&0gGx8_Pmfc*}2n0X5R8Rb1;NXP+PAqw{k0>21uvMgaD{h zLQjBY_aYF3y#IF(qp9u+FSv|aSvZdCB;^B(bw=!#@)S^Jl?sp^GJXM;Xkb%9C;-p% z1C)qoo}qBpz@+O)?vJs1za$fha5uU={U1ly{S_k5%+JpT?EKFEKy?1wr48{Mm;Z9D z*YE<5AJ>d^)`RGQuJ8;8qAAD4NT?pDz^`OkFAjFA5FCE!N+3}FyOGya_p*|8O6QK> zbx67nc11_duX`SfO4mHjsw^ z?{3_K@UFlv8F>x&ebmSW!Y)D`d%0jeW9?kvz(GE@ zj5(xIYifYPRpimK!*?3_mlyG>|mW^&(6L-RL=n@Nbl zBM@Z>23AnXWL&7n>&Q`C=MBCC@SO7fB(t#uZQ}2gD!inmTsPGhQ|=X`PE)DM&vF_W zh#IYUC33Byem?BX6L5%_C4>%_rigy0VGtBu9&ZuACWk^(7fuKW^v)J6{YoYouu!s| zVvS<*m?V_U`QO(f*Z(3I^#~;QZ0_MPy8Q2?H_+O2-LH}0oDc1EbrI$Eh1HnErjU7% zQ?8}#L>9x_EUmTAmbq+NJ-E)>zOoIqu9rL7%ft@u2QE)envwXYmSubBzBJXw+vzZy zIj#(S2V0^3`v(O5#cFglP+8)6V!cG2a&CO8JBv}7bKfhWG60~rQjK&K0t~ip?pzE` zAa8JcL=@Vuc%p4YFx++)$`DOnsqqqJP*l91e7!CZTkP=NW;6DmD>ot1e*wgCQoK1g z;(4i~1@?vEeISPrADo%^OI3L9OwFgZuTiJ2t_FT=&?1QP15B_&Kq`&y&BOIiD2J!m zCpmjylX~6)yCeW8#&dFfYc6KyuKQBY&h@s#xO2@6LGN9A{f++Kd@dP)N&6#Eou8QmIB+JMF5GI4_RTm0p8PuPATK9I!vF>6y|I1g-kzQzX zY>7d-h~ePPY4|y3%XFru8CNHwi5|I06&5M_e1=fVeTllF~Z-M+0m%Y znbK)U`b92#0EjmfMtjXnSQyMNNN?&S@kr1D$`Z)fdUt<=zSveC>z(w~qq@5J)l(oK!u=bLDlzxh@mZm3 z%sbj&>uWL|cFq->S=Qmr%x0l_Qq`9p%RDxE(at&I-{u!n=LmPJG;(iw9{p1KD@cG; zzVcP)x|~%pI%R*W*lM(8r6%A;dYXy5o6biIQE#e)ilV>_U=;O!TIE2v1j0`%7c`>E z2+pKDI0$z%Z+9H=>Y6Mi7G67OwUn;74WhTFI5=6`uqY?8?`ND91jnsCvVCHl%&2y; zXVyW%Z|nrv9Q?!e1jT`5@F{FbCh<@e+b4Thmc9{YAB;c8pK^3WwQ+3|uM*#QGJ5~-_G4~LrGI`+7hoVs%MJE?JE1r_wMvmvE5*_;pcUXUoQzr{x7!1aDYf# zSJ40jb|zEhveNlVhrY)1(Pqccp5t+G*wt*ux@TP}aTHn7%2_QK+vv032N2%9&4xt- zB`hcUd|hwYsMWf&b+w9@slGIl<*|v1Q821&BFgC-J7dk2G%%R&4t=CkC@TC@THp9) zgB)c$|0-DJ=(i(@y+HuE?%~Pn-@1R3AI#SQjQ-#jT*)>Nf2{ z$@7|wqjT=g>AMy(5sDxT_^gH5xKC1V(Q<8-*;~V~hXY`>L>_C-bL(`bSpd_L8D1lg zDSG}`&>6woY5bAps^$Z$f&1P(FI294H_GxUoEL2oJ+n4DF>`H@=Vb>yLS9}cE=l%O zmxN>I0*{qc?=;~av=q0L94S()V`mywOjFUecTH38YxIY$D-(V(!>7ItSa^pLsW6Dk zcIXLY^y4R`L#i@H;##kpuTWJlEk3#TqO(zUd)VmOIqsN%cD0{S-tMZR@Q+9=DbbI0@YgBrx1=?X5x6%FbO2tKymn=1@ zuOGXdZ(KVv>RrKw80zCu4deTpvFSZuQr>E}C2SlpRwx)h!f9kt3)ftbqi`KbjyTNI zF`mPdh1jvGa$OLIP!6RW4YeeBy(ah_O;GzPL#GVZ>5P)To(hq#1Nq;;xs)d8OxE? zO0R=rZ^`+jTU?u~uDwQRtg}=3yT0k`UGn}XLen7+6SY)xhVE^AHrLOJ%G7rai8lc1lg-&l?3=cdJgzLgoY z#~#{n@r-(XxeBHKF0fIxSQ+q{lJ zfY3e`Qkp)YCRK8KE-fp%tsZ4Ou$ftzVlpOi+O=~Jk&V_GQh!;AF>aIVQ;XQ4f$I-d z>Rx{9w64ACZi<@;z!vH-`|p?TOv$85H(+6imr}@<~m{(u2Jz4K~VJx zpV#eF^NhNJq-fiKl(&@wq>&Rrh`;ZH{a3#JE2PF{0Q&hA@h<+L3I4o) zBamXaMDo<9jiaZDMGpI)4|i9O3@5)#S!h(tdXkZi&Aq1nBrIYG3a z+{3_|CezL??=Nmhul&OWj&2!0nun+ zWdwkbnazzYEcbePM5XKhc2U{0=c;iz+iUK*OETmUL|du-(%YB-SA zKI1~Yb^ylgPO?yz-BI5--wx9dRv@ZmmcQ#1m&7MtO)^liGQ|ntUq3;F`<(!pToydH zI&uVIy;EGw`JgJvm+n^7?a~mdegQfD=jThZFe_LE9QT~zNqQpwUK^7<kkVjF2;+T9_rp|u)o*9#0ffVgh$jTOV_TnDbHc*HJ0zznCYE}&)OLoa7WbW% zl;MA-A)9`BLa_SfN~g2D-1&1^UbNfw=gCfzM}Vd|P(O)P6(4Dc_eyY{tNWCdLSt8Q zf8zF+v`^fR;(xsEodqNEm;@$80e)rt>^^9?T-x&AYJH9^{WbZ$AcGj9}t~KX;=JPyb z`jt_w!umsgPfK??@(41D@Agx*7Nk#!28()9@zw3|M6f;O))eG@qoOk7 zOTy=;v&qeYcLZ>`pVBWAoHH#p(;57(Ky@)5W#94f4yim^&#qbUsoflI7p4$>`L%)f z&2&r;dH=^_iFOE(%c_XnX2Mb}s)e83l}*QJdxI~{-kV;pe;B3F@-oV-(_$MVb%(3t znv?Y)YX+SIkP)3flfmz!7YVAmOw)GN@m4z8dQ@>y-FfC4VR~~EA(?tx&H2KQ{AqO* z5UN<4ZQU~^n98`KdM5Ni*BzZqPqa{W?YM0Ja)-l3QD)7s@=L+r)b5&$g?#LEz#%Y^ zo!}4i4Df|?;x+tJpyO?Bzx|Zck{UuksbVA67{9{tU3A%GNs>o8&1!+Buid&y+jsHI z6~a!m&^h{&u z@M|2m8F_S0UGOV5eLfeFFEa|HrEgU_uONECQ&bq#))0_>o2Nl?_;q``d8RXpR-H3w zQ;G?yCv|SNVa+sh7iRii1U(GO2)=xDF}xj@w4fwzWU3MCgygSXz~+|uSDSJq?*{~+0v{P}X7ulmu^d30ql`6C?vYI~}XiG#}2;LCo# zzOuUTN%XV6=mPm0p#|?%VRZ#CQEWD-dUb1mA^e3HLDLHD{CNUcf3;~sb0VRk7fL(a zOP#{XR#eY^y`6W?zT?j`-ur#RhaLsuZ=%5$*FX%4DvLByBdtWiG%80yBdMK=`gpX- zPtlMo<^{4^&&7v1E*d9H`_0amYtKa%ZblIv4J-A3(}=bZN!0VO)8XB_ZreT*2zG0a zpes>8bj@FKP-4N^Z0Vm#Lq+>9=gUUo5;RK3-@JOj({YqMU1!7=H}PVg`a{S&*=knT zkh2DJtmkJCkunv%Hm_D@JA$_8@5!tV`Zmb5_^|xabd7iCqkzu)DQ=ke;QM~81S7l; zF1Zy*$fDzG91*rPv^cT;GO%b_ilS2()6hhH`Ntny4nGU`&4?6dc9#SSvKzC=&)v<3j{(0vgj7Fz6 zN0toyb~1-`FhsO%kuh0Tjccl$GY2aOnYufUiL@G>R=08ickHua!o04xqhfZ+DuS_>R`1 z>rrAqLrw-4;GbUNJli?=k%o7iqVn{Sb5a3!-%;jo2gEwL{$k%yViL}4(G7i+MPh13 zLjGcjeWOvZ_yJabVbo8ce&ojJp)-d?&|+CV{HAFOeSjII+3V(KVb2rDhTlsQd@xiJ zkzg8kXC^xjjF@iAR8Hrw*hK!}8T2dO*!_nOaGeHcfG~nhzp)kzP#90%uzD~KQ_Ta+ zOHcG#m(WGlp-9s18}BzbVLRpKo1rVQvK;R>ouX%SXcT+cJhNpd+74_ zS92{ox}=4Jhmjw+1(Ms9M1EFYxwMo9stwR^eBhYmAd)e7En1RS^dqI3$`5%3%==p~ zyKg`l9F~A&!5I>>r#$kj6_kct_?L4gzHIU|I^T3Gq5Rll)TzRX$RNmI7xnO|SoP*4 z{uKYYDXvEwn4^X0uNidmcU8^?G)dJYV`H0(Qe_k!K+TcNzX;Dm+)B$dIzP*1OZpx~ z1ha~ImKS}vFL@FqDrMl3C>PSGzA*S6PDfBdhgMfL)r^(YDt1UQ*Unxm9Exs;$ohW$ zy)9V)z6{5rJ1VA+hPUCT+FIG~p6Fkt)ciiv{_AzDycO$P9gc(-wP7S8ZN#`kod&?1 zXbC<)^KcXIuWN%l#x-cGGMC5{$P#(B+7FF6!Lx7f!7BLPsq#J$mVT$!VZq5{Z7qNO z<6rQq}L|(Eww6`+(+Vix!67jos%SMS& zB&=kyM5cF_>zO;4bop!)K&Ew!nnyO_UMC;;Xj6P9CWiydwC_(y`6!mI5G*H!sQardTFwFyH+?`gxmPOX|+toG7syrKKqT-e=+ zbQQ}7x$9*c-rqVrI;<#Jr5Kj{nAT5W0T7beit=j3j+4tIZM5ViB3)wQvoeqhA?3_> zj}xD09y9GLXj7<#d%LBJXcHm`#Bj*>g*Y+3Qrv_^N$-T|EBX!HOO5T9-(05dy60lP z#eTac#IbnABihuQYf?d8=H14IM%6_9}9e%+_Au6T~Ox@-p|6NZ>bT6L=zoIyqYYAt%@ zt~*j$Ip%n{ot-cL@^%~J)k_gibCf5^R}48&!jWCE%D7Leq4xPxR@L#~b4rv{`^v8n zKrlp1K|0IHk*j*QYA_<7GEQ*20H&SYu%0^|IaU&+icSvjdR zpYn7}ckH^FYTiSSYP$~tyKR*yN(*35A!d(nX>tG|!RBDr$c=zqKyd1^R33TK)YclSuS=4n%1KL{=i3$)i;27 zy130r)_qX_5&N+1`avtBiTWiylfa6U!=)yo&8(IXLW$)wdzFk~GAe;r$vd}RPNC?S89T>+1DB*q{TQ}P#1fe3omhV-;~-mD}Jk zBF+0M`FhyIEEucvH8XUve`nws-eaHd9eBdb6A|o`<(nBKzuU##i3D7TzekrSgv;5O zL>=H&K18#!f;NjcId_bE9UP8KPA{v6yi1kecL!<*C0*O+K)4i|p7}o+9{u^Oe}zlq z=lf0Nm7|UVrKo_rp>{vJMD@OPb7TSOtq(>3h9hJzotq#zg7CYFpJ?X3Fe1Gz0H?S0 z&IzM$x#;1svavU=L7%<%gxcvv{FDw@_z-A=U4c(JFIXU2;2+Z{u!BL;;XGjGHE&UN zzpU@iROr2|l^?uZcopj1g-=SJqJ=g`oIZcpgadtDfP=uaoYI=Q^~dd8)Owab1G3+%@*G4#WDF!ee! z`y-tPkE&%$rviC1UoZ&hj@Jtdk%dH}QSW&IR_4Go>l9pD`S*In)88=LSvjulk#cjz z)lpAg-%}_VA!FUvQYCR^SNA9IW_rL*DQlkRl&0@IAU4px2EoYWuieqBS# z+~jA9Fvie@u2%RZD9EfKw4HtTvs-j->g#7;`V7A|bWukj?Ul2)ReziqyfQ{?AX%|1 zb)Gi9Me82M6L)+P#RTfAL+q}*hE+Q9RNRo~%)eX%;Gm7@$e^-QQzCbL>9j+xID;#B zgGDn8v>I0j4Jkf|d+n3V?wS^7t_>DR1hIGi+OVUjHfy8g_&{+t?4D1-)2`?Vq*=9iQ3{DK(KSvg9p(e9mH7StQW&=Q^UnW6 zAF6uo$)#y7W7A3N5ID`2{hP`(AGG!?oq${vBR zv-8S(wqYC}@9qD&vC99ohSUANn8f)-FNbfHvA{u!*%wbnhEvE7J5p?IWEed z@Q{Bp7h3S9%u^-d6-QSt`F!-hvZli6FJev5#+=AVk1yf4OQuqu24ywL#rS5vycqSN zWto(3TY+=@-Zj#)B7AIz62I+Wa8*X;o$A321h;iC{7If+6&dncEW-gLA)aE&K7h@pqYBd?!cJzPgzu3Z2uw$8V zOdeuAHhXy9S2LA5b3CD6z3wCq>5Erek9S%nD^^kckGlL{wfVpPq*TsoKwC9?GJ8s8 zd9xjg$meex<~a=ZV_f=uJ;TDhaf6Tcb(x^G+pwdIy`Pd z+H!2t+r>#cez$`M%iNaxg*`%HJ)7<$r|O?VHMLtzc!hiJN2l!FT`=~P_Ec7-dc^vQ zlv67}@F#9A(eZT+d%KXrtSkGDvnS{IJ%@FN!2qJ5oP#L4Z$SEmxYwgx@A;u80h+6* z3#FJjh2FiSFE1kUSru-|^z&6$PmJwW>iel@g~UrpF2%FQCJ2vrH$Vo>92?43J;rN( zLK!yL93L~3*J&hr2|s%ty=w`1k-MSmQ66Naf9xtQ>-QQNK7L?OUYY(lRNvxZ{bipo zCtvQmKW#p%>VoRF8k@#vJKiYMesa^QR8mh_=~IM5YpY8DbA*Zht(2IsgnLsiLx|nl zY2uvSaoHx$yIns5;{QoP;b})pLN^i;>^g$DiQf)_{SA7bb84vK6mPmG^j z9js+`@z>=8EgvE>57Q|hBL%bs%HqX#GP7cL#qXWIY}{>UXEfW~;}N+YqI}sq%y@jM z(F};ZuB~@lK3SQ)r=*=O_i3Q;Nod=+d9}9XEWBhDfbsrZE&sXWlxda|d(pY_0bdX8 ztw*j=4ifF-39;opt44c!4`kZ;7}PValZceyT`)|6YIXM~#$&QrJ7$x}t$A&iy#|-{ z@Qa2?7GkdgO~!1#-0>HGv6qmtM?2JwXp6T=d=P5inImJT)1$4q?zF*RbQtrSgwB|0 z?P-2|!Eo~GSXB-Jt{`}!Uj93|^rd*lRys@H8qVUF(O>rC?apf)E>19wbviK|9rlZ7$O$l+ZH@xlbaSUBI&I`#2&Z{ZU1+tZ^$>z58{%fT@WvR8i zANNTI4evE=_G^bPnFequ9QrBgGbt-9rMOroT*7^EWrqfv&cwvP1qi=MxEdg?tEGpV%foMDv~?8c{jwTqti@2l zj@Hmbrs4Y(tVEFIp>D@pO^NUwdZvhz1y06@Ady+|Mp5Z|YOQYW&wH9by?OuWpvzEc zY`H#$baC+p=`Zv928=CdAIYa0q$2pTzSg~F^|kr|Kn8tB%i=OyizmXf3uTMRGn$}k&)(fch&jv2u5;JjMeUQtkb;&!~J2R2YlATPkF3zeXHck-cs0mxvq0^fVcF$^_6hP-@Ze0GG6X>8-R#S<4dflyalLOs;uUx4>owJMOrBoAaKW zL!DUUuO6XqC{b|6{r?w4Hh)q>{CWMqPjs_4-36kCdATl%d2DcTTU|R1pD8PYCcM+U zU0zKE>{z@Z%W$+IK^Z_!0l3T~5K#g0n(hkJrUSyH_-^jTNCrxF*zOynkl=HlcV?`- z`4{S)@doKM@HNZw-y}%0zYtX6{ZN3jsY91aFC{s=%BVVDiD=^}dTo`7Z}21VY}ntP ztxga|1%VC#r9)!dZ;~#NT?xDa{O=C~I8FrHAg3%vs9gZ9fGHdVNI1ej0H_>20=RGF z?2hjcfY}vB{QVheb=aC9(ZjEe_gr7&Hg12RZ#JQl{Y}#F<7MMn(m5k=*i*+&5I=89 zoGqj91t*amNCxa|LYE^)n@18%(k`~?Ys{F+-Y*T>t+3VfVs8a!_7^S{DK|%AmP6yK zCt-aB&vPy&+k63P_yu_2O_YHik}WXdT_oNReigh%BXNEn_?slccn*U7{#Vz)&n5s4 z2CMlGM4GT}0O~-(1U@hJ7a%PaNM&=70m`U3cz7b>$n7j^VIFS?`P)MPTo)w*Lw-&Q z;HXfg0HUntte40H`rCj2_@mF7fBQLL4DWOJ>L`kCpKfdYSiJ>>EkIs}{{&hL&**-W za00^#X{S3S(*hU}U;HLnj5|&s2!sFjQh$A8EE;i6H4ZuMg#!q-cD3f;B!l^WpcT;M z$WG({8V#oBdW(11(ml_;tc^UF4R1KznLhT~1g0KaK>XWxAxuMCmDs_RigPS?+yphB zcI4810S_^q+JK>}2-8!mKW6r4pZo~b$0OLm(g`NJ*fCxESSru|%dSmD*`G5k1pH=W z92xQbFaV+b1;;P@0{9@x$iKgJYxZB_;r9;{&iCdCh5#-FM_&rw-+%<3BmqRny7fu` zi0iZA-=6{ca=X zT2AK9@S+afs&UGG`k26ncSt|^PvigD;ZnjJz#YM3uxuY(RUPj!p)d42=casK8P?{X zi{SsvA@bZ*x`v|*mlMJ<6f;@;hB+-BpPC6hQ#*bo8?U z@7-6S_9>>gTbP0&hU)DyH?El_aN@XTI(<;on=F*xX5o7M zR%9bYB?kbD`QN+DUxV`d68-l2`KC-f3rXfN%R|$ZjWtHrKe0aIIoyVN+FpC80rJ&| zoCUKpUu#8vy1?9#ym~{)_4zj;ntU@9)B^kLk_~Lj0g|Ssa)2}7dNxDVi9O1(1FIZ4 z-XC0USyW4P;%_rXr|N|FL2|rc z``*SNy7pylpuxzC7q|R1JllUL-wo9O>X%Yv>}!f9SZ9nM;@UeZiK=>y9Mt0QO;($d zhs{0{-=XBK+-##nS!h<<@u?f*`uNNJAJuPRwS>BthSLMki;c4w=RPRAQwRRp%)LTw#)n{U;dhmks|be{+o3lgC9c#-1GqpPY8 zWS_mBq8VP)T5QBr>p*`nEqg#NDe>WBA%X4n9-k(>@AVcIZMr&hNabEzQ0!@2(Nnm9 z+{6!Ik6TR1+s(@3qmXmU|9<%(M|g3!$>Ls& zbI_^jh&b#4O5?>6%}bV4%gAxVRJ&>dP`~7L-LnIDuOcqBK&Ws(KmJ7#+qJoo{%aRa z-i{z8=rxQHEHZ)3*+esYra9I(ckWF?FO@LnrbQs7SEmR-WBX4pSL95Aces!3>Z}!$ zrV=W8k&FcVC>pc%jiQikRAG`%IW{$^}ob0|Ha{k=u5xmH!@!Z4?@|2g5VdGE<)v6>N{%Q zW6IJ@s(Pmj?*_;0v8IsdsjxW?D4!^ku^~q63CbVQc^v@b7a02l6d4buK=RKJ|Hdf-PoK_t7CXew*2LNo3E-XN}lUR*yd8dbbni9uzejJ_{3$#+Zc`7W;zqZLl)-J zvApP#bPU(K)4w!kn8J^P6g7^Oj0)Yod>qS7^bA1tH!hFziZ;D7OFD3r?OZ(4KF>$; z;X!Er@nTo>F&^x`sNrUz^`v$*i#S;pV_A+>1xZapG-gu^(Uew^{txO{3HOw!BGu=Cf?A8?V70-@`)5 z)WhQC$)ZFU2A=>nWuG<((kraZHcR?AmaA)I}2tJ3?lJ{Vo;yIu>NR&#HAbGPY9a;_7u! z^|qq0!R_zj(=4Kw&3?LeALTH%zLrIg{>YHjZhK3Pyn{iJiuuv56Q$3AldLT$!n7+G zrlqKP)u~*o`kqC`2@(zfPLlll5dX6sDvTaUPi7bRVe^z3~$!(h~a&UQU>PNph0JLg92V_d?c@0Y`==JoN z@+*qM-PKtUOw&hG5!5cGn*;3hl%Fz z)lp18Ct;R&92#*Ic595_@O@owN}^qXC-h8dc%;YYgd3dux3Z{dWPh{kgzJ3AOnRhZe#ckcV#qCcYunxI%t4`w0v4*V8fWFT zOk8>qBtF-cG=><^4MS4k=zxAITTnpKP7Nk>)`JVuK3y!jqx`HA|EZzfh4j4|?d`BA zkprjv+bgxn0i`Fe>BvIbjE@Q$`2~jU1acRx-O}R6s*qD49w4zHs15%4n`E6~V#08b z54`7AvG}h`_d|1iBnz>z?NEu4stWHJ<>wbmOX8E z46!zR#d`Jtp{}fAGvJ!?irToR#%c?16M(8hmg(hK>!h-}KGIfj7HZ*VDs0SdVw#UW zK4;p4Y&m0S;O*(u5(t=j{;d7P(@o>Ip`mrAHbSA?B(DeAy2<8{m6>-7Q()TBx&r+o z%kBa>!g|Y&wyE_Su!J+f`hJ`&M4s-XpPJD4IwYNl906&M==~PZ;UxH=oc(n45RpFs zUo!b{kWTyp>!8Doo=`ly0||jC6mPHx#Aa=H)YQ~J_7~ZN^3#b`h7Y}Z+7r8LHg+;C z*yVxAW|V=39!U*Qd->0AhiT(S>cFI+c_0TU7CH-b&6PO&UO+tuWr8!K|RX zPdbLzGca*+GX-kw^Lb5v@w02b$K*lM9L1J!x@mjan2q@F{Av9HYPG?%BsO7OJxVXS zxjGysR#>(s^4GjswiycmdbxrLGvtJup@d(#Q^cl0eLP!EfI#zX#UwI#^4bGZpZNPQi+! z*JWAml;pW^<)o%a>AJYwz8E6(O>bbQi`3A7L38cWYbd0{j}2EH=!kJ9x3Bh|i}q%n zo`3eZhI#+u%P_r%{AScw%Rk@!qW<@L_j^JXKyTSgCIZGPU$32jI%bF?>{ou1 zX2} z+Lq zs(w7{5bTrPkbS>F(6vLirlCU!8%zl`qLy0HlG{%cP@D@f&*pYD9W2c*FuU{?1?7CJ zd2lXw6ntJtAjO5EFQ?;@4Y6aZpZz2zDYyr<%Vo=LS2X07bhfmlUqnwPhH#cPcRKW@ zQ(@43xtJtmf~QAFY;|htVU1&dyfB^BLE0*2x3y50Iq$s_;YH>HjL8+MY|VVLVw_;c z-s9vwU&(a)gGWR!umk7=HyK_F>+NXVT+cbD#C?5&XS_-x9OMwNVp&8qw1v?2~WAJNFRC=#wW((1jEUZnfYLM9@IE4)=_+*9i~ z^Xm-(1#>Gu^p)jKEOdD^%f0T*yY@XFu8FE8WLTil151GfrXlDD*rYe3M{ao4# zHoDTw%q#j#i1+*Xhx0@w;SDf{9~o2v6Hsx{Xey#1H@l{+Ip+n7WQFR@Fot$sDF#wO z5=`CgH!;stw|V&%-5N(+Rz~X$>t>TA{CZaM?6f5{GZ71+(qG60@|o2Ub~ zxWqGMc(>#zc(WKq3}ec}suMMx{xZE>fLvhM*bTLm5_t}w`=&L2lYEU#*zm4mI-xkz z21s?;V01XS&Lrkw9AnEIO)0jcG*)fsc^RC3)3kBzLyrZ2ZIlW5YOw4ZbH+_w-RX~u zHjzrq1gf8YAd|If2m?}L#?ovM?nQ-6wxUf}jOxCl(wI8@X69NvRn>bo5%V&1ig|u( zf&K8|h`LCVn1i4XqntHyd8uE`V9ez+CFTg?+H77j82cEI}jiiYUi&{5fV>j-2MpW7^ z4igGRGRibp#oB_tMHQCnDYsmFwX6+`#(~g?4=9_+aQFpj2rX0xeg$u%j$;Z2F*B=p z3I-dwF>G5`iE;DWOvjtQS@PReSZ~E{y`B}WI8JJGEU=$IRy$Y4tZcsI5BAB)1eg_5 zf5E$5s{jfGC-}M0ql3q*QBZ_pDKI0eKJ+ouZ_U}q-k;!A9vn%(hQ2(de`9^aXaJ?khnzyZXYiw zx0UzZL5}?kk`oouxpRDW-Go3EVboWCZz913-8W*7IFZOn2V>twnBo_Sn!oe zW@h-0E%lG5xWmTx6^+Kn84GzIiv0`9CdV%1o} zahg&1Z&clum^rLE{U=h!n6mId&~?uJQA*q(sb4d^!nrN|njNP*F*EK5225#&Dd)dbQ)xj|+GfyQ9s#5u2S1%c+R|BJH!DVVjN6cRk)ww+PGnw^ zF%IG8AKmmUrWa|Io9|yZ%EJd9Ipd#VBB`KME%lwLF5(H;@qHZ>cFVz2N~ZDD+)}$N zdaUg+ll+Ztn$e|+27tg!0A_F=j3j|UaP24tMa`TIW^Uew4`K-C18u#{M=?j&IPTB* zCOu?;W!&U*x=xxVBGC>ig>P*lY`HnHTc(zUS<`r3sharCo`%WnY_Mpb zCf`KnUmOJ|Xn=M=7^vrLmc;!g$?k08X^eat+673!04HcOO>E9Um35I!tg5u})mHLJ z^skw_ul9WJ8lv!8N&@hqyGWpwe-kJgpsr*5*!s#$QcdyR#-UZI6B;>VX`LL5kFI@r zb$Gkc9dM$9?b=>2Z48H5~agir!NQt0A%=h88mkvEo za*MA`+uuCU+^U4QY{&qpp>TkpWXX;0blr-O)Py233x?b~tdfMO%VbrX3_F{54Uxq|qOjG5)V!&5(Awq+pO5kw+S_S|f^_NDdrqJPGwNZEf1J z_r=&PNPuvf{;Uu5(`>z7kv&F-F>wOO6VT53sfJSe~q?>?!=wcp~O9j(3< z7@=Vk@@_%o((}hY_ZKc*^frbAqUA=&8z0d{G{wCncvn%qPStX`X;|i!HtWE+bV0o9 z#8d@0Jjz-XTj)M<3OjNu1xWYORW~&cTGtf#P{unu@BSwq) zxg+C1=<3e!KqwKGc;Vtn0;+cy8HErBka-mtn{&p7T>@=2P(}X!=Z$=E z>H&uGEPNNC|MreRyz^J1KAg7CqsYWbX~eP*(I&$0CwNZ@%1dm>BgQ{~n64n@@WswJ zx^|=>c05b&qYQ^^Y22gEq8c7Oao*TVn4YdK%uPRv=ww}}Or-Q93ODv5N)T?t53`>X- z12p8g{kB-YFeRm{OW8+J>4)O;@&duH+n$Y}inn$rL9NdW!JKesK(r5R11Jx`)e zVcg+X@$%1!=@$(|o8}rYRy$nN?e6YpQ{zr9zg{$VR(^G!+$Ur&^=GZe0(*LEsV!3E z@*8AKVli+nf+Ro?)k5SLCJ1IcIP#TKtEefh4CSv@ab)je?Ouv1kkZvRH~u%1WwQ8= z`$k6&`@v3pLyP^fQ@_PYlR70H>cDc6CW9R$>M(YK04&Bi7me>58dxgYnTBA1-EH@E z=+c9V??4Zqx>Qd(IgVxR&w}D5K8Al?pKJQ~$FD6R@}N_C8G}Bp4c9*jnRSY9s8Ey@ z`0{czUB8XMa>i#zq{R+)!f8}7b~~|)Oml_q(*3W4rtHS9Lmzz$1+OUJu4IFo$JLc zlwWp|wKutj%Qt_Gy}Y$)09ZZm(R&~`u=)<4FoU17ifAna5@Ke_iFMjQESFt!8^c3* zg6sP11XE!7;4H@O8UCr*(`mu$4M)avuR_0z$r$*@M!`J16NGebRwm|7qS4+gnX`xA zXZ+bOgpRpPBK)POVEOTRo=i#ga@BG53dAIDyAF{P`IVq`Mvjl!#mKcgGvPs)xcLBS zc!-wJ#FnvoRa1R?b6^Bje!$w|OC686H=M35`RPQ8{j3&jB>MP+8-#P?j&TT)b$%9t z_vwg_cTrbV+j*UciJ58O?W&E$405$tgJ1lyI{qAzz#|(*)0+<*;DV~X2w$!$^NguaV6$7)HpX~Evn;VQ zr(D^06vvI`Xax=(jUhqjNT%CR=-ezW}+ za{hep{|8Xq-#ER$g$VsGjig%VU`q-xeIs2Vl+VE%5lCBjCGbrDVT$Pn^DlSDpN|Zy z<$A*wu-bsQ>f!<@3?_4C4O2W%74=hq832tn&VlYUmU}x8dda2;AGNwGWw*lAt_B(3 zh>jGz=<=R!)>~wMvklIPdOi4SxkQN)dbUVggK>NV;W+C>(D%9&&Dr&OL>H*Lx;i>p zRey54WOSc$mv4b{V#LL72t>}056&WpaPxkEsbj-(cf=Qh^O-$yAPuP97v|$Lot$8#SMQ9<#*g&z)J1)c7!sgAp>yR;{l*_v zdL#6?2z}O>9B~XlOQGzFFGBFNM-TF+CZ{dzliaG0>8nD`BY35{8B}kQ`&+#>KagK} zuax|sLk0|8(YC(v@PXC?c#%bm)tjPg{jS0fSjB!azsnM=k;$@@#LesDpXTCOh!Ucf z@W2v@>p`7*#xySb(y;pvaFu7qO5!zTS;94sdJD@>kU@u1=HwGD2c;<$35qe5y!~=S z)>(@qW6L}UpusDFUOlIt0u!mlfXI1S+Zo5$?5W@7;D`|uFv)d~kRLvsO`MIm@STA{ zQ;>vK6Y&ETaR;%L=$gIu7A8<;=FmCr7?C9uvQ4?Z+0YMadPxgqmaY53) zSX;1riB{*hX|Pi1SIlj@H!1GEEv<)4UP}vHSH1RQNz=;Y$+G!n(1U&@tc7e0cq8Te>$P^ZeT@ z9C@7^<<=bgmwll>vX-Ly^6$ZLFXPPLu2`%IcSnyQ%7G z(jh23nmTH_;*ccgp{wSH@uz!`WO{-)GQ%3yd?Ipj{HpA(43=q#^%&o?&ojOslF1Wu z*Iz?;jkFnq{DLTLdC712Nkm7nxNsUWCWz<74qWfGT|S9>LqnN1d@D9mCZ?%oSPI|@ z)s#N2eX7bd3jNQLySZQlKn4rOTU}0c$B{wGe!d)cMh|bXA3eQV+nBCou9>DKtMsvs zX?6nRyshPe(IxjAF$H&^B70fF-W<$dHC;zh_;J`V3Z*}ZF^crda|^uAYF@3_w%EP)g(x%AhV5G^XcVp_u#>j=D?C1VcH0}TwYa(_N zLtn>DhEt2gNR(}zsUH~^ z(;l;~7+rYn+kcIUrrj2s&&SLWn1})rIVkYL7Z-K%_W6jY4%!cIg1_4}tD^0a79Xl4 z%9qC_PA-62wLrxre$zvLQ!6kAV^B;`cr%e>6i|6-_;s)iQm=~NJKZtg3bCRVjrM=vxNWaTN@=FKTpA|K%W^WB zHYrVd3bMk%(Llui!EHXc44V8nuyv}n^T@P#w$H<(frQ(5~06ISP#752M(3lrC4Nh>~|IVhiF;+KUWGr$AYwf_{@!#}NzB z?O0S>WFVN_T|;k(XPl$~Ctl`oD~7E-c1DBdd-MZ3(Y=Ced)t*{r#HSuo0vSfd67O! z{XUJ@KiXFQ@Aj4dFaC25^&LN|pUvBU`%!SE=d-)~j*{4V8Vzo@8SW3Rplhb0aW%((@vv87 zosh9z8j;bZwYxQ=qkXtlTJV+OS=SEc&1;^>8jn8W_RRW`TqI|L5$^F2*HC3piNR_9 z^@Vd`7wSjE=c+VeP9!$mi5Yb5bf09CRMWRy0j_6bBsMHby;Fi}{Z9WtgPoMjY$xjN z#z1{>~dV?i_gBXXBP<$c$G!?r!+isRBYLf0_ zX;P=ZgXHS~e!3aFQ zNeRs%uQn5A? z{snG*&;2aq5b~Z|_NVlEWbarIvlGxakJ#R*OaDGUc zn4Dt%m<6@btp3>uwx&(bo6Lxy@^Pc@47o>gxS^}(r8tpFcmjCNW6WVwIkdC6X)G3S zre+HMz*)M%zNc}^Y2%&3kCqmY40QmK>-=5Vm%_XI5RLA;cH=YE$CgN+^g@iHdhfoF zVl0=A8>Uv>sw#CtuEV$b#rPh&}~L3PiBHvR?hp8*)08QD0Tl zR2@ICBPn`2OPxnf_{Tya*_KL&*Vo1@3d`FoIox)F2;ruAi^!tx+UYyJ8$)h5ci&Vw zmLCP0YTa|=O~u30AW^@8bmm48U-a%Jm}rHa&DcmQvv+d$Rl19|sCwJkFEQmJv?^S< ziICI!#s_HUtwZd{!V#jtA&s&kiKE!yjYpc*tpKiR!uN_EfgP8DxA!jj__Ey-HB=O} zTNLZ79B~k%H+Ko^k@RXZ*U=o@%G;H@E3hmuQ>i^^F)7l77zKvsU@-bW6)Fi@=iikm zO!2pm2zXwc6a1=uQpaw1qoaMHosW=@qttk}71gEEPD834m<^fd#;B{7kuGx|}6C;R@&A0n; zV#Pj>mrgi)%g{WN&2ixzI6HpmL{;Qt7WXTQJbF-OZpyC@=oPA*4_`eB7_Pihr0Bq( za^yN^S{5Mk!S(&VHYJ+-U6%7#H4Mdot~BYc!M{2T|FSEl6Cj?r1jbe;gIlykFyUQc znOI~FcF{P{a;R8>*JSI^QB4*iFqzTf#n0!rOH!~L8eYeHP>uA%51vU9Zs5<0@w(>t z%$)85)JKHnQRifybyZ@DD_5UlcG(W)uj>rP7vn^;$#DBeG+2O-c}fq=4HZ24hMgVA z!AY1;On1pW*@&{R!)e(lQgJdUuMNHAGYk9`p*$^gv+k|>i!&a-E>s01(ocrqI|S2S z=(SMS!P3jbPgL-WJiTMF;G=1}H-Hwol;7o1P4H|kTPR^EC1J^QKg z0nFKB5u5Kx4-aUCjr#aAy)Qk@K&WzGiB8YZ|P@yDiIkb9*^$ zPsQ?rPmt92t?%!Uj>g!Mt=TSWSUt<)cuHc({hpfA_kR$X`0r4U|3WVQdzQohE!Ek- OhY|k=ekS`p`F{ZiZ(~0I literal 0 HcmV?d00001 diff --git a/examples/ernie_sat/README.md b/examples/ernie_sat/README.md new file mode 100644 index 00000000..d3bd1337 --- /dev/null +++ b/examples/ernie_sat/README.md @@ -0,0 +1,137 @@ +ERNIE-SAT 是可以同时处理中英文的跨语言的语音-语言跨模态大模型,其在语音编辑、个性化语音合成以及跨语言的语音合成等多个任务取得了领先效果。可以应用于语音编辑、个性化合成、语音克隆、同传翻译等一系列场景,该项目供研究使用。 + +## 模型框架 +ERNIE-SAT 中我们提出了两项创新: +- 在预训练过程中将中英双语对应的音素作为输入,实现了跨语言、个性化的软音素映射 +- 采用语言和语音的联合掩码学习实现了语言和语音的对齐 + +[外链图片转存失败,源站可能有防盗链机制,建议将图片保存下来直接上传(img-3lOXKJXE-1655380879339)(.meta/framework.png)] + +## 使用说明 + +### 1.安装飞桨与环境依赖 + +- 本项目的代码基于 Paddle(version>=2.0) +- 本项目开放提供加载 torch 版本的 vocoder 的功能 + - torch version>=1.8 + +- 安装 htk: 在[官方地址](https://htk.eng.cam.ac.uk/)注册完成后,即可进行下载较新版本的 htk (例如 3.4.1)。同时提供[历史版本 htk 下载地址](https://htk.eng.cam.ac.uk/ftp/software/) + + - 1.注册账号,下载 htk + - 2.解压 htk 文件,**放入项目根目录的 tools 文件夹中, 以 htk 文件夹名称放入** + - 3.**注意**: 如果您下载的是 3.4.1 或者更高版本, 需要进入 HTKLib/HRec.c 文件中, **修改 1626 行和 1650 行**, 即把**以下两行的 dur<=0 都修改为 dur<0**,如下所示: + ```bash + 以htk3.4.1版本举例: + (1)第1626行: if (dur<=0 && labid != splabid) HError(8522,"LatFromPaths: Align have dur<=0"); + 修改为: if (dur<0 && labid != splabid) HError(8522,"LatFromPaths: Align have dur<0"); + + (2)1650行: if (dur<=0 && labid != splabid) HError(8522,"LatFromPaths: Align have dur<=0 "); + 修改为: if (dur<0 && labid != splabid) HError(8522,"LatFromPaths: Align have dur<0 "); + ``` + - 4.**编译**: 详情参见解压后的 htk 中的 README 文件(如果未编译, 则无法正常运行) + + + +- 安装 ParallelWaveGAN: 参见[官方地址](https://github.com/kan-bayashi/ParallelWaveGAN):按照该官方链接的安装流程,直接在**项目的根目录下** git clone ParallelWaveGAN 项目并且安装相关依赖即可。 + + +- 安装其他依赖: **sox, libsndfile**等 + +### 2.预训练模型 +预训练模型 ERNIE-SAT 的模型如下所示: +- [ERNIE-SAT_ZH](https://paddlespeech.bj.bcebos.com/Parakeet/released_models/ernie_sat/old/model-ernie-sat-base-zh.tar.gz) +- [ERNIE-SAT_EN](https://paddlespeech.bj.bcebos.com/Parakeet/released_models/ernie_sat/old/model-ernie-sat-base-en.tar.gz) +- [ERNIE-SAT_ZH_and_EN](https://paddlespeech.bj.bcebos.com/Parakeet/released_models/ernie_sat/old/model-ernie-sat-base-en_zh.tar.gz) + + +创建 pretrained_model 文件夹,下载上述 ERNIE-SAT 预训练模型并将其解压: +```bash +mkdir pretrained_model +cd pretrained_model +tar -zxvf model-ernie-sat-base-en.tar.gz +tar -zxvf model-ernie-sat-base-zh.tar.gz +tar -zxvf model-ernie-sat-base-en_zh.tar.gz +``` + +### 3.下载 + +1. 本项目使用 parallel wavegan 作为声码器(vocoder): + - [pwg_aishell3_ckpt_0.5.zip](https://paddlespeech.bj.bcebos.com/Parakeet/released_models/pwgan/pwg_aishell3_ckpt_0.5.zip) + + 创建 download 文件夹,下载上述预训练的声码器(vocoder)模型并将其解压: + + ```bash + mkdir download + cd download + unzip pwg_aishell3_ckpt_0.5.zip + ``` + +2. 本项目使用 [FastSpeech2](https://arxiv.org/abs/2006.04558) 作为音素(phoneme)的持续时间预测器: + - [fastspeech2_conformer_baker_ckpt_0.5.zip](https://paddlespeech.bj.bcebos.com/Parakeet/released_models/fastspeech2/fastspeech2_conformer_baker_ckpt_0.5.zip) 中文场景下使用 + - [fastspeech2_nosil_ljspeech_ckpt_0.5.zip](https://paddlespeech.bj.bcebos.com/Parakeet/released_models/fastspeech2/fastspeech2_nosil_ljspeech_ckpt_0.5.zip) 英文场景下使用 + + 下载上述预训练的 fastspeech2 模型并将其解压: + + ```bash + cd download + unzip fastspeech2_conformer_baker_ckpt_0.5.zip + unzip fastspeech2_nosil_ljspeech_ckpt_0.5.zip + ``` + +3. 本项目使用 HTK 获取输入音频和文本的对齐信息: + + - [aligner.zip](https://paddlespeech.bj.bcebos.com/Parakeet/released_models/ernie_sat/old/aligner.zip) + + 下载上述文件到 tools 文件夹并将其解压: + ```bash + cd tools + unzip aligner.zip + ``` + + +### 4.推理 + +本项目当前开源了语音编辑、个性化语音合成、跨语言语音合成的推理代码,后续会逐步开源。 +注:当前英文场下的合成语音采用的声码器默认为 vctk_parallel_wavegan.v1.long, 可在[该链接](https://github.com/kan-bayashi/ParallelWaveGAN)中找到; 若 use_pt_vocoder 参数设置为 False,则英文场景下使用 paddle 版本的声码器。 + +我们提供特定音频文件, 以及其对应的文本、音素相关文件: +- prompt_wav: 提供的音频文件 +- prompt/dev: 基于上述特定音频对应的文本、音素相关文件 + + +```text +prompt_wav +├── p299_096.wav # 样例语音文件1 +├── p243_313.wav # 样例语音文件2 +└── ... +``` + +```text +prompt/dev +├── text # 样例语音对应文本 +├── wav.scp # 样例语音路径 +├── mfa_text # 样例语音对应音素 +├── mfa_start # 样例语音中各个音素的开始时间 +└── mfa_end # 样例语音中各个音素的结束时间 +``` +1. `--am` 声学模型格式符合 {model_name}_{dataset} +2. `--am_config`, `--am_checkpoint`, `--am_stat` 和 `--phones_dict` 是声学模型的参数,对应于 fastspeech2 预训练模型中的 4 个文件。 +3. `--voc` 声码器(vocoder)格式是否符合 {model_name}_{dataset} +4. `--voc_config`, `--voc_checkpoint`, `--voc_stat` 是声码器的参数,对应于 parallel wavegan 预训练模型中的 3 个文件。 +5. `--lang` 对应模型的语言可以是 `zh` 或 `en` 。 +6. `--ngpu` 要使用的 GPU 数,如果 ngpu==0,则使用 cpu。 +7. `--model_name` 模型名称 +8. `--uid` 特定提示(prompt)语音的 id +9. `--new_str` 输入的文本(本次开源暂时先设置特定的文本) +10. `--prefix` 特定音频对应的文本、音素相关文件的地址 +11. `--source_lang` , 源语言 +12. `--target_lang` , 目标语言 +13. `--output_name` , 合成语音名称 +14. `--task_name` , 任务名称, 包括:语音编辑任务、个性化语音合成任务、跨语言语音合成任务 + +运行以下脚本即可进行实验 +```shell +./run_sedit_en.sh # 语音编辑任务(英文) +./run_gen_en.sh # 个性化语音合成任务(英文) +./run_clone_en_to_zh.sh # 跨语言语音合成任务(英文到中文的语音克隆) +``` diff --git a/examples/ernie_sat/local/align.py b/examples/ernie_sat/local/align.py new file mode 100755 index 00000000..025877dd --- /dev/null +++ b/examples/ernie_sat/local/align.py @@ -0,0 +1,441 @@ +""" Usage: + align.py wavfile trsfile outwordfile outphonefile +""" +import os +import sys + +PHONEME = 'tools/aligner/english_envir/english2phoneme/phoneme' +MODEL_DIR_EN = 'tools/aligner/english' +MODEL_DIR_ZH = 'tools/aligner/mandarin' +HVITE = 'tools/htk/HTKTools/HVite' +HCOPY = 'tools/htk/HTKTools/HCopy' + + +def get_unk_phns(word_str: str): + tmpbase = '/tmp/tp.' + f = open(tmpbase + 'temp.words', 'w') + f.write(word_str) + f.close() + os.system(PHONEME + ' ' + tmpbase + 'temp.words' + ' ' + tmpbase + + 'temp.phons') + f = open(tmpbase + 'temp.phons', 'r') + lines2 = f.readline().strip().split() + f.close() + phns = [] + for phn in lines2: + phons = phn.replace('\n', '').replace(' ', '') + seq = [] + j = 0 + while (j < len(phons)): + if (phons[j] > 'Z'): + if (phons[j] == 'j'): + seq.append('JH') + elif (phons[j] == 'h'): + seq.append('HH') + else: + seq.append(phons[j].upper()) + j += 1 + else: + p = phons[j:j + 2] + if (p == 'WH'): + seq.append('W') + elif (p in ['TH', 'SH', 'HH', 'DH', 'CH', 'ZH', 'NG']): + seq.append(p) + elif (p == 'AX'): + seq.append('AH0') + else: + seq.append(p + '1') + j += 2 + phns.extend(seq) + return phns + + +def words2phns(line: str): + ''' + Args: + line (str): input text. + eg: for that reason cover is impossible to be given. + Returns: + List[str]: phones of input text. + eg: + ['F', 'AO1', 'R', 'DH', 'AE1', 'T', 'R', 'IY1', 'Z', 'AH0', 'N', 'K', 'AH1', 'V', 'ER0', + 'IH1', 'Z', 'IH2', 'M', 'P', 'AA1', 'S', 'AH0', 'B', 'AH0', 'L', 'T', 'UW1', 'B', 'IY1', + 'G', 'IH1', 'V', 'AH0', 'N'] + + Dict(str, str): key - idx_word + value - phones + eg: + {'0_FOR': ['F', 'AO1', 'R'], '1_THAT': ['DH', 'AE1', 'T'], '2_REASON': ['R', 'IY1', 'Z', 'AH0', 'N'], + '3_COVER': ['K', 'AH1', 'V', 'ER0'], '4_IS': ['IH1', 'Z'], '5_IMPOSSIBLE': ['IH2', 'M', 'P', 'AA1', 'S', 'AH0', 'B', 'AH0', 'L'], + '6_TO': ['T', 'UW1'], '7_BE': ['B', 'IY1'], '8_GIVEN': ['G', 'IH1', 'V', 'AH0', 'N']} + ''' + dictfile = MODEL_DIR_EN + '/dict' + line = line.strip() + words = [] + for pun in [',', '.', ':', ';', '!', '?', '"', '(', ')', '--', '---']: + line = line.replace(pun, ' ') + for wrd in line.split(): + if (wrd[-1] == '-'): + wrd = wrd[:-1] + if (wrd[0] == "'"): + wrd = wrd[1:] + if wrd: + words.append(wrd) + ds = set([]) + word2phns_dict = {} + with open(dictfile, 'r') as fid: + for line in fid: + word = line.split()[0] + ds.add(word) + if word not in word2phns_dict.keys(): + word2phns_dict[word] = " ".join(line.split()[1:]) + + phns = [] + wrd2phns = {} + for index, wrd in enumerate(words): + if wrd == '[MASK]': + wrd2phns[str(index) + "_" + wrd] = [wrd] + phns.append(wrd) + elif (wrd.upper() not in ds): + wrd2phns[str(index) + "_" + wrd.upper()] = get_unk_phns(wrd) + phns.extend(get_unk_phns(wrd)) + else: + wrd2phns[str(index) + + "_" + wrd.upper()] = word2phns_dict[wrd.upper()].split() + phns.extend(word2phns_dict[wrd.upper()].split()) + return phns, wrd2phns + + +def words2phns_zh(line: str): + dictfile = MODEL_DIR_ZH + '/dict' + line = line.strip() + words = [] + for pun in [ + ',', '.', ':', ';', '!', '?', '"', '(', ')', '--', '---', u',', + u'。', u':', u';', u'!', u'?', u'(', u')' + ]: + line = line.replace(pun, ' ') + for wrd in line.split(): + if (wrd[-1] == '-'): + wrd = wrd[:-1] + if (wrd[0] == "'"): + wrd = wrd[1:] + if wrd: + words.append(wrd) + + ds = set([]) + word2phns_dict = {} + with open(dictfile, 'r') as fid: + for line in fid: + word = line.split()[0] + ds.add(word) + if word not in word2phns_dict.keys(): + word2phns_dict[word] = " ".join(line.split()[1:]) + + phns = [] + wrd2phns = {} + for index, wrd in enumerate(words): + if wrd == '[MASK]': + wrd2phns[str(index) + "_" + wrd] = [wrd] + phns.append(wrd) + elif (wrd.upper() not in ds): + print("出现非法词错误,请输入正确的文本...") + else: + wrd2phns[str(index) + "_" + wrd] = word2phns_dict[wrd].split() + phns.extend(word2phns_dict[wrd].split()) + + return phns, wrd2phns + + +def prep_txt_zh(line: str, tmpbase: str, dictfile: str): + + words = [] + line = line.strip() + for pun in [ + ',', '.', ':', ';', '!', '?', '"', '(', ')', '--', '---', u',', + u'。', u':', u';', u'!', u'?', u'(', u')' + ]: + line = line.replace(pun, ' ') + for wrd in line.split(): + if (wrd[-1] == '-'): + wrd = wrd[:-1] + if (wrd[0] == "'"): + wrd = wrd[1:] + if wrd: + words.append(wrd) + + ds = set([]) + with open(dictfile, 'r') as fid: + for line in fid: + ds.add(line.split()[0]) + + unk_words = set([]) + with open(tmpbase + '.txt', 'w') as fwid: + for wrd in words: + if (wrd not in ds): + unk_words.add(wrd) + fwid.write(wrd + ' ') + fwid.write('\n') + return unk_words + + +def prep_txt_en(line: str, tmpbase, dictfile): + + words = [] + + line = line.strip() + for pun in [',', '.', ':', ';', '!', '?', '"', '(', ')', '--', '---']: + line = line.replace(pun, ' ') + for wrd in line.split(): + if (wrd[-1] == '-'): + wrd = wrd[:-1] + if (wrd[0] == "'"): + wrd = wrd[1:] + if wrd: + words.append(wrd) + + ds = set([]) + with open(dictfile, 'r') as fid: + for line in fid: + ds.add(line.split()[0]) + + unk_words = set([]) + with open(tmpbase + '.txt', 'w') as fwid: + for wrd in words: + if (wrd.upper() not in ds): + unk_words.add(wrd.upper()) + fwid.write(wrd + ' ') + fwid.write('\n') + + #generate pronounciations for unknows words using 'letter to sound' + with open(tmpbase + '_unk.words', 'w') as fwid: + for unk in unk_words: + fwid.write(unk + '\n') + try: + os.system(PHONEME + ' ' + tmpbase + '_unk.words' + ' ' + tmpbase + + '_unk.phons') + except Exception: + print('english2phoneme error!') + sys.exit(1) + + #add unknown words to the standard dictionary, generate a tmp dictionary for alignment + fw = open(tmpbase + '.dict', 'w') + with open(dictfile, 'r') as fid: + for line in fid: + fw.write(line) + f = open(tmpbase + '_unk.words', 'r') + lines1 = f.readlines() + f.close() + f = open(tmpbase + '_unk.phons', 'r') + lines2 = f.readlines() + f.close() + for i in range(len(lines1)): + wrd = lines1[i].replace('\n', '') + phons = lines2[i].replace('\n', '').replace(' ', '') + seq = [] + j = 0 + while (j < len(phons)): + if (phons[j] > 'Z'): + if (phons[j] == 'j'): + seq.append('JH') + elif (phons[j] == 'h'): + seq.append('HH') + else: + seq.append(phons[j].upper()) + j += 1 + else: + p = phons[j:j + 2] + if (p == 'WH'): + seq.append('W') + elif (p in ['TH', 'SH', 'HH', 'DH', 'CH', 'ZH', 'NG']): + seq.append(p) + elif (p == 'AX'): + seq.append('AH0') + else: + seq.append(p + '1') + j += 2 + + fw.write(wrd + ' ') + for s in seq: + fw.write(' ' + s) + fw.write('\n') + fw.close() + + +def prep_mlf(txt: str, tmpbase: str): + + with open(tmpbase + '.mlf', 'w') as fwid: + fwid.write('#!MLF!#\n') + fwid.write('"' + tmpbase + '.lab"\n') + fwid.write('sp\n') + wrds = txt.split() + for wrd in wrds: + fwid.write(wrd.upper() + '\n') + fwid.write('sp\n') + fwid.write('.\n') + + +def _get_user(): + return os.path.expanduser('~').split("/")[-1] + + +def alignment(wav_path: str, text: str): + ''' + intervals: List[phn, start, end] + ''' + tmpbase = '/tmp/' + _get_user() + '_' + str(os.getpid()) + + #prepare wav and trs files + try: + os.system('sox ' + wav_path + ' -r 16000 ' + tmpbase + '.wav remix -') + except Exception: + print('sox error!') + return None + + #prepare clean_transcript file + try: + prep_txt_en(line=text, tmpbase=tmpbase, dictfile=MODEL_DIR_EN + '/dict') + except Exception: + print('prep_txt error!') + return None + + #prepare mlf file + try: + with open(tmpbase + '.txt', 'r') as fid: + txt = fid.readline() + prep_mlf(txt, tmpbase) + except Exception: + print('prep_mlf error!') + return None + + #prepare scp + try: + os.system(HCOPY + ' -C ' + MODEL_DIR_EN + '/16000/config ' + tmpbase + + '.wav' + ' ' + tmpbase + '.plp') + except Exception: + print('HCopy error!') + return None + + #run alignment + try: + os.system(HVITE + ' -a -m -t 10000.0 10000.0 100000.0 -I ' + tmpbase + + '.mlf -H ' + MODEL_DIR_EN + '/16000/macros -H ' + MODEL_DIR_EN + + '/16000/hmmdefs -i ' + tmpbase + '.aligned ' + tmpbase + + '.dict ' + MODEL_DIR_EN + '/monophones ' + tmpbase + + '.plp 2>&1 > /dev/null') + except Exception: + print('HVite error!') + return None + + with open(tmpbase + '.txt', 'r') as fid: + words = fid.readline().strip().split() + words = txt.strip().split() + words.reverse() + + with open(tmpbase + '.aligned', 'r') as fid: + lines = fid.readlines() + i = 2 + intervals = [] + word2phns = {} + current_word = '' + index = 0 + while (i < len(lines)): + splited_line = lines[i].strip().split() + if (len(splited_line) >= 4) and (splited_line[0] != splited_line[1]): + phn = splited_line[2] + pst = (int(splited_line[0]) / 1000 + 125) / 10000 + pen = (int(splited_line[1]) / 1000 + 125) / 10000 + intervals.append([phn, pst, pen]) + # splited_line[-1]!='sp' + if len(splited_line) == 5: + current_word = str(index) + '_' + splited_line[-1] + word2phns[current_word] = phn + index += 1 + elif len(splited_line) == 4: + word2phns[current_word] += ' ' + phn + i += 1 + return intervals, word2phns + + +def alignment_zh(wav_path: str, text: str): + tmpbase = '/tmp/' + _get_user() + '_' + str(os.getpid()) + + #prepare wav and trs files + try: + os.system('sox ' + wav_path + ' -r 16000 -b 16 ' + tmpbase + + '.wav remix -') + + except Exception: + print('sox error!') + return None + + #prepare clean_transcript file + try: + unk_words = prep_txt_zh( + line=text, tmpbase=tmpbase, dictfile=MODEL_DIR_ZH + '/dict') + if unk_words: + print('Error! Please add the following words to dictionary:') + for unk in unk_words: + print("非法words: ", unk) + except Exception: + print('prep_txt error!') + return None + + #prepare mlf file + try: + with open(tmpbase + '.txt', 'r') as fid: + txt = fid.readline() + prep_mlf(txt, tmpbase) + except Exception: + print('prep_mlf error!') + return None + + #prepare scp + try: + os.system(HCOPY + ' -C ' + MODEL_DIR_ZH + '/16000/config ' + tmpbase + + '.wav' + ' ' + tmpbase + '.plp') + except Exception: + print('HCopy error!') + return None + + #run alignment + try: + os.system(HVITE + ' -a -m -t 10000.0 10000.0 100000.0 -I ' + tmpbase + + '.mlf -H ' + MODEL_DIR_ZH + '/16000/macros -H ' + MODEL_DIR_ZH + + '/16000/hmmdefs -i ' + tmpbase + '.aligned ' + MODEL_DIR_ZH + + '/dict ' + MODEL_DIR_ZH + '/monophones ' + tmpbase + + '.plp 2>&1 > /dev/null') + + except Exception: + print('HVite error!') + return None + + with open(tmpbase + '.txt', 'r') as fid: + words = fid.readline().strip().split() + words = txt.strip().split() + words.reverse() + + with open(tmpbase + '.aligned', 'r') as fid: + lines = fid.readlines() + + i = 2 + intervals = [] + word2phns = {} + current_word = '' + index = 0 + while (i < len(lines)): + splited_line = lines[i].strip().split() + if (len(splited_line) >= 4) and (splited_line[0] != splited_line[1]): + phn = splited_line[2] + pst = (int(splited_line[0]) / 1000 + 125) / 10000 + pen = (int(splited_line[1]) / 1000 + 125) / 10000 + intervals.append([phn, pst, pen]) + # splited_line[-1]!='sp' + if len(splited_line) == 5: + current_word = str(index) + '_' + splited_line[-1] + word2phns[current_word] = phn + index += 1 + elif len(splited_line) == 4: + word2phns[current_word] += ' ' + phn + i += 1 + return intervals, word2phns diff --git a/examples/ernie_sat/local/inference.py b/examples/ernie_sat/local/inference.py new file mode 100644 index 00000000..196d9c6d --- /dev/null +++ b/examples/ernie_sat/local/inference.py @@ -0,0 +1,601 @@ +#!/usr/bin/env python3 +import os +import random +from typing import Dict +from typing import List + +import librosa +import numpy as np +import paddle +import soundfile as sf +from align import alignment +from align import alignment_zh +from align import words2phns +from align import words2phns_zh +from paddle import nn +from sedit_arg_parser import parse_args +from utils import eval_durs +from utils import get_voc_out +from utils import is_chinese +from utils import load_num_sequence_text +from utils import read_2col_text + +from paddlespeech.t2s.datasets.am_batch_fn import build_mlm_collate_fn +from paddlespeech.t2s.models.ernie_sat.mlm import build_model_from_file + +random.seed(0) +np.random.seed(0) + + +def get_wav(wav_path: str, + source_lang: str='english', + target_lang: str='english', + model_name: str="paddle_checkpoint_en", + old_str: str="", + new_str: str="", + non_autoreg: bool=True): + wav_org, output_feat, old_span_bdy, new_span_bdy, fs, hop_length = get_mlm_output( + source_lang=source_lang, + target_lang=target_lang, + model_name=model_name, + wav_path=wav_path, + old_str=old_str, + new_str=new_str, + use_teacher_forcing=non_autoreg) + + masked_feat = output_feat[new_span_bdy[0]:new_span_bdy[1]] + + alt_wav = get_voc_out(masked_feat) + + old_time_bdy = [hop_length * x for x in old_span_bdy] + + wav_replaced = np.concatenate( + [wav_org[:old_time_bdy[0]], alt_wav, wav_org[old_time_bdy[1]:]]) + + data_dict = {"origin": wav_org, "output": wav_replaced} + + return data_dict + + +def load_model(model_name: str="paddle_checkpoint_en"): + config_path = './pretrained_model/{}/config.yaml'.format(model_name) + model_path = './pretrained_model/{}/model.pdparams'.format(model_name) + mlm_model, conf = build_model_from_file( + config_file=config_path, model_file=model_path) + return mlm_model, conf + + +def read_data(uid: str, prefix: os.PathLike): + # 获取 uid 对应的文本 + mfa_text = read_2col_text(prefix + '/text')[uid] + # 获取 uid 对应的音频路径 + mfa_wav_path = read_2col_text(prefix + '/wav.scp')[uid] + if not os.path.isabs(mfa_wav_path): + mfa_wav_path = prefix + mfa_wav_path + return mfa_text, mfa_wav_path + + +def get_align_data(uid: str, prefix: os.PathLike): + mfa_path = prefix + "mfa_" + mfa_text = read_2col_text(mfa_path + 'text')[uid] + mfa_start = load_num_sequence_text( + mfa_path + 'start', loader_type='text_float')[uid] + mfa_end = load_num_sequence_text( + mfa_path + 'end', loader_type='text_float')[uid] + mfa_wav_path = read_2col_text(mfa_path + 'wav.scp')[uid] + return mfa_text, mfa_start, mfa_end, mfa_wav_path + + +# 获取需要被 mask 的 mel 帧的范围 +def get_masked_mel_bdy(mfa_start: List[float], + mfa_end: List[float], + fs: int, + hop_length: int, + span_to_repl: List[List[int]]): + align_start = np.array(mfa_start) + align_end = np.array(mfa_end) + align_start = np.floor(fs * align_start / hop_length).astype('int') + align_end = np.floor(fs * align_end / hop_length).astype('int') + if span_to_repl[0] >= len(mfa_start): + span_bdy = [align_end[-1], align_end[-1]] + else: + span_bdy = [ + align_start[span_to_repl[0]], align_end[span_to_repl[1] - 1] + ] + return span_bdy, align_start, align_end + + +def recover_dict(word2phns: Dict[str, str], tp_word2phns: Dict[str, str]): + dic = {} + keys_to_del = [] + exist_idx = [] + sp_count = 0 + add_sp_count = 0 + for key in word2phns.keys(): + idx, wrd = key.split('_') + if wrd == 'sp': + sp_count += 1 + exist_idx.append(int(idx)) + else: + keys_to_del.append(key) + + for key in keys_to_del: + del word2phns[key] + + cur_id = 0 + for key in tp_word2phns.keys(): + if cur_id in exist_idx: + dic[str(cur_id) + "_sp"] = 'sp' + cur_id += 1 + add_sp_count += 1 + idx, wrd = key.split('_') + dic[str(cur_id) + "_" + wrd] = tp_word2phns[key] + cur_id += 1 + + if add_sp_count + 1 == sp_count: + dic[str(cur_id) + "_sp"] = 'sp' + add_sp_count += 1 + + assert add_sp_count == sp_count, "sp are not added in dic" + return dic + + +def get_max_idx(dic): + return sorted([int(key.split('_')[0]) for key in dic.keys()])[-1] + + +def get_phns_and_spans(wav_path: str, + old_str: str="", + new_str: str="", + source_lang: str="english", + target_lang: str="english"): + is_append = (old_str == new_str[:len(old_str)]) + old_phns, mfa_start, mfa_end = [], [], [] + # source + if source_lang == "english": + intervals, word2phns = alignment(wav_path, old_str) + elif source_lang == "chinese": + intervals, word2phns = alignment_zh(wav_path, old_str) + _, tp_word2phns = words2phns_zh(old_str) + + for key, value in tp_word2phns.items(): + idx, wrd = key.split('_') + cur_val = " ".join(value) + tp_word2phns[key] = cur_val + + word2phns = recover_dict(word2phns, tp_word2phns) + else: + assert source_lang == "chinese" or source_lang == "english", \ + "source_lang is wrong..." + + for item in intervals: + old_phns.append(item[0]) + mfa_start.append(float(item[1])) + mfa_end.append(float(item[2])) + # target + if is_append and (source_lang != target_lang): + cross_lingual_clone = True + else: + cross_lingual_clone = False + + if cross_lingual_clone: + str_origin = new_str[:len(old_str)] + str_append = new_str[len(old_str):] + + if target_lang == "chinese": + phns_origin, origin_word2phns = words2phns(str_origin) + phns_append, append_word2phns_tmp = words2phns_zh(str_append) + + elif target_lang == "english": + # 原始句子 + phns_origin, origin_word2phns = words2phns_zh(str_origin) + # clone 句子 + phns_append, append_word2phns_tmp = words2phns(str_append) + else: + assert target_lang == "chinese" or target_lang == "english", \ + "cloning is not support for this language, please check it." + + new_phns = phns_origin + phns_append + + append_word2phns = {} + length = len(origin_word2phns) + for key, value in append_word2phns_tmp.items(): + idx, wrd = key.split('_') + append_word2phns[str(int(idx) + length) + '_' + wrd] = value + new_word2phns = origin_word2phns.copy() + new_word2phns.update(append_word2phns) + + else: + if source_lang == target_lang and target_lang == "english": + new_phns, new_word2phns = words2phns(new_str) + elif source_lang == target_lang and target_lang == "chinese": + new_phns, new_word2phns = words2phns_zh(new_str) + else: + assert source_lang == target_lang, \ + "source language is not same with target language..." + + span_to_repl = [0, len(old_phns) - 1] + span_to_add = [0, len(new_phns) - 1] + left_idx = 0 + new_phns_left = [] + sp_count = 0 + # find the left different index + for key in word2phns.keys(): + idx, wrd = key.split('_') + if wrd == 'sp': + sp_count += 1 + new_phns_left.append('sp') + else: + idx = str(int(idx) - sp_count) + if idx + '_' + wrd in new_word2phns: + left_idx += len(new_word2phns[idx + '_' + wrd]) + new_phns_left.extend(word2phns[key].split()) + else: + span_to_repl[0] = len(new_phns_left) + span_to_add[0] = len(new_phns_left) + break + + # reverse word2phns and new_word2phns + right_idx = 0 + new_phns_right = [] + sp_count = 0 + word2phns_max_idx = get_max_idx(word2phns) + new_word2phns_max_idx = get_max_idx(new_word2phns) + new_phns_mid = [] + if is_append: + new_phns_right = [] + new_phns_mid = new_phns[left_idx:] + span_to_repl[0] = len(new_phns_left) + span_to_add[0] = len(new_phns_left) + span_to_add[1] = len(new_phns_left) + len(new_phns_mid) + span_to_repl[1] = len(old_phns) - len(new_phns_right) + # speech edit + else: + for key in list(word2phns.keys())[::-1]: + idx, wrd = key.split('_') + if wrd == 'sp': + sp_count += 1 + new_phns_right = ['sp'] + new_phns_right + else: + idx = str(new_word2phns_max_idx - (word2phns_max_idx - int(idx) + - sp_count)) + if idx + '_' + wrd in new_word2phns: + right_idx -= len(new_word2phns[idx + '_' + wrd]) + new_phns_right = word2phns[key].split() + new_phns_right + else: + span_to_repl[1] = len(old_phns) - len(new_phns_right) + new_phns_mid = new_phns[left_idx:right_idx] + span_to_add[1] = len(new_phns_left) + len(new_phns_mid) + if len(new_phns_mid) == 0: + span_to_add[1] = min(span_to_add[1] + 1, len(new_phns)) + span_to_add[0] = max(0, span_to_add[0] - 1) + span_to_repl[0] = max(0, span_to_repl[0] - 1) + span_to_repl[1] = min(span_to_repl[1] + 1, + len(old_phns)) + break + new_phns = new_phns_left + new_phns_mid + new_phns_right + ''' + For that reason cover should not be given. + For that reason cover is impossible to be given. + span_to_repl: [17, 23] "should not" + span_to_add: [17, 30] "is impossible to" + ''' + return mfa_start, mfa_end, old_phns, new_phns, span_to_repl, span_to_add + + +# mfa 获得的 duration 和 fs2 的 duration_predictor 获取的 duration 可能不同 +# 此处获得一个缩放比例, 用于预测值和真实值之间的缩放 +def get_dur_adj_factor(orig_dur: List[int], + pred_dur: List[int], + phns: List[str]): + length = 0 + factor_list = [] + for orig, pred, phn in zip(orig_dur, pred_dur, phns): + if pred == 0 or phn == 'sp': + continue + else: + factor_list.append(orig / pred) + factor_list = np.array(factor_list) + factor_list.sort() + if len(factor_list) < 5: + return 1 + length = 2 + avg = np.average(factor_list[length:-length]) + return avg + + +def prep_feats_with_dur(wav_path: str, + mlm_model: nn.Layer, + source_lang: str="English", + target_lang: str="English", + old_str: str="", + new_str: str="", + mask_reconstruct: bool=False, + duration_adjust: bool=True, + start_end_sp: bool=False, + fs: int=24000, + hop_length: int=300): + ''' + Returns: + np.ndarray: new wav, replace the part to be edited in original wav with 0 + List[str]: new phones + List[float]: mfa start of new wav + List[float]: mfa end of new wav + List[int]: masked mel boundary of original wav + List[int]: masked mel boundary of new wav + ''' + wav_org, _ = librosa.load(wav_path, sr=fs) + + mfa_start, mfa_end, old_phns, new_phns, span_to_repl, span_to_add = get_phns_and_spans( + wav_path=wav_path, + old_str=old_str, + new_str=new_str, + source_lang=source_lang, + target_lang=target_lang) + + if start_end_sp: + if new_phns[-1] != 'sp': + new_phns = new_phns + ['sp'] + # 中文的 phns 不一定都在 fastspeech2 的字典里, 用 sp 代替 + if target_lang == "english" or target_lang == "chinese": + old_durs = eval_durs(old_phns, target_lang=source_lang) + else: + assert target_lang == "chinese" or target_lang == "english", \ + "calculate duration_predict is not support for this language..." + + orig_old_durs = [e - s for e, s in zip(mfa_end, mfa_start)] + if '[MASK]' in new_str: + new_phns = old_phns + span_to_add = span_to_repl + d_factor_left = get_dur_adj_factor( + orig_dur=orig_old_durs[:span_to_repl[0]], + pred_dur=old_durs[:span_to_repl[0]], + phns=old_phns[:span_to_repl[0]]) + d_factor_right = get_dur_adj_factor( + orig_dur=orig_old_durs[span_to_repl[1]:], + pred_dur=old_durs[span_to_repl[1]:], + phns=old_phns[span_to_repl[1]:]) + d_factor = (d_factor_left + d_factor_right) / 2 + new_durs_adjusted = [d_factor * i for i in old_durs] + else: + if duration_adjust: + d_factor = get_dur_adj_factor( + orig_dur=orig_old_durs, pred_dur=old_durs, phns=old_phns) + d_factor = d_factor * 1.25 + else: + d_factor = 1 + + if target_lang == "english" or target_lang == "chinese": + new_durs = eval_durs(new_phns, target_lang=target_lang) + else: + assert target_lang == "chinese" or target_lang == "english", \ + "calculate duration_predict is not support for this language..." + + new_durs_adjusted = [d_factor * i for i in new_durs] + + new_span_dur_sum = sum(new_durs_adjusted[span_to_add[0]:span_to_add[1]]) + old_span_dur_sum = sum(orig_old_durs[span_to_repl[0]:span_to_repl[1]]) + dur_offset = new_span_dur_sum - old_span_dur_sum + new_mfa_start = mfa_start[:span_to_repl[0]] + new_mfa_end = mfa_end[:span_to_repl[0]] + for i in new_durs_adjusted[span_to_add[0]:span_to_add[1]]: + if len(new_mfa_end) == 0: + new_mfa_start.append(0) + new_mfa_end.append(i) + else: + new_mfa_start.append(new_mfa_end[-1]) + new_mfa_end.append(new_mfa_end[-1] + i) + new_mfa_start += [i + dur_offset for i in mfa_start[span_to_repl[1]:]] + new_mfa_end += [i + dur_offset for i in mfa_end[span_to_repl[1]:]] + + # 3. get new wav + # 在原始句子后拼接 + if span_to_repl[0] >= len(mfa_start): + left_idx = len(wav_org) + right_idx = left_idx + # 在原始句子中间替换 + else: + left_idx = int(np.floor(mfa_start[span_to_repl[0]] * fs)) + right_idx = int(np.ceil(mfa_end[span_to_repl[1] - 1] * fs)) + blank_wav = np.zeros( + (int(np.ceil(new_span_dur_sum * fs)), ), dtype=wav_org.dtype) + # 原始音频,需要编辑的部分替换成空音频,空音频的时间由 fs2 的 duration_predictor 决定 + new_wav = np.concatenate( + [wav_org[:left_idx], blank_wav, wav_org[right_idx:]]) + + # 4. get old and new mel span to be mask + # [92, 92] + + old_span_bdy, mfa_start, mfa_end = get_masked_mel_bdy( + mfa_start=mfa_start, + mfa_end=mfa_end, + fs=fs, + hop_length=hop_length, + span_to_repl=span_to_repl) + # [92, 174] + # new_mfa_start, new_mfa_end 时间级别的开始和结束时间 -> 帧级别 + new_span_bdy, new_mfa_start, new_mfa_end = get_masked_mel_bdy( + mfa_start=new_mfa_start, + mfa_end=new_mfa_end, + fs=fs, + hop_length=hop_length, + span_to_repl=span_to_add) + + # old_span_bdy, new_span_bdy 是帧级别的范围 + return new_wav, new_phns, new_mfa_start, new_mfa_end, old_span_bdy, new_span_bdy + + +def prep_feats(mlm_model: nn.Layer, + wav_path: str, + source_lang: str="english", + target_lang: str="english", + old_str: str="", + new_str: str="", + duration_adjust: bool=True, + start_end_sp: bool=False, + mask_reconstruct: bool=False, + fs: int=24000, + hop_length: int=300, + token_list: List[str]=[]): + wav, phns, mfa_start, mfa_end, old_span_bdy, new_span_bdy = prep_feats_with_dur( + source_lang=source_lang, + target_lang=target_lang, + mlm_model=mlm_model, + old_str=old_str, + new_str=new_str, + wav_path=wav_path, + duration_adjust=duration_adjust, + start_end_sp=start_end_sp, + mask_reconstruct=mask_reconstruct, + fs=fs, + hop_length=hop_length) + + token_to_id = {item: i for i, item in enumerate(token_list)} + text = np.array( + list(map(lambda x: token_to_id.get(x, token_to_id['']), phns))) + span_bdy = np.array(new_span_bdy) + + batch = [('1', { + "speech": wav, + "align_start": mfa_start, + "align_end": mfa_end, + "text": text, + "span_bdy": span_bdy + })] + + return batch, old_span_bdy, new_span_bdy + + +def decode_with_model(mlm_model: nn.Layer, + collate_fn, + wav_path: str, + source_lang: str="english", + target_lang: str="english", + old_str: str="", + new_str: str="", + use_teacher_forcing: bool=False, + duration_adjust: bool=True, + start_end_sp: bool=False, + fs: int=24000, + hop_length: int=300, + token_list: List[str]=[]): + batch, old_span_bdy, new_span_bdy = prep_feats( + source_lang=source_lang, + target_lang=target_lang, + mlm_model=mlm_model, + wav_path=wav_path, + old_str=old_str, + new_str=new_str, + duration_adjust=duration_adjust, + start_end_sp=start_end_sp, + fs=fs, + hop_length=hop_length, + token_list=token_list) + + feats = collate_fn(batch)[1] + + if 'text_masked_pos' in feats.keys(): + feats.pop('text_masked_pos') + + output = mlm_model.inference( + text=feats['text'], + speech=feats['speech'], + masked_pos=feats['masked_pos'], + speech_mask=feats['speech_mask'], + text_mask=feats['text_mask'], + speech_seg_pos=feats['speech_seg_pos'], + text_seg_pos=feats['text_seg_pos'], + span_bdy=new_span_bdy, + use_teacher_forcing=use_teacher_forcing) + + # 拼接音频 + output_feat = paddle.concat(x=output, axis=0) + wav_org, _ = librosa.load(wav_path, sr=fs) + return wav_org, output_feat, old_span_bdy, new_span_bdy, fs, hop_length + + +def get_mlm_output(wav_path: str, + model_name: str="paddle_checkpoint_en", + source_lang: str="english", + target_lang: str="english", + old_str: str="", + new_str: str="", + use_teacher_forcing: bool=False, + duration_adjust: bool=True, + start_end_sp: bool=False): + mlm_model, train_conf = load_model(model_name) + mlm_model.eval() + + collate_fn = build_mlm_collate_fn( + sr=train_conf.feats_extract_conf['fs'], + n_fft=train_conf.feats_extract_conf['n_fft'], + hop_length=train_conf.feats_extract_conf['hop_length'], + win_length=train_conf.feats_extract_conf['win_length'], + n_mels=train_conf.feats_extract_conf['n_mels'], + fmin=train_conf.feats_extract_conf['fmin'], + fmax=train_conf.feats_extract_conf['fmax'], + mlm_prob=train_conf['mlm_prob'], + mean_phn_span=train_conf['mean_phn_span'], + seg_emb=train_conf.encoder_conf['input_layer'] == 'sega_mlm') + + return decode_with_model( + source_lang=source_lang, + target_lang=target_lang, + mlm_model=mlm_model, + collate_fn=collate_fn, + wav_path=wav_path, + old_str=old_str, + new_str=new_str, + use_teacher_forcing=use_teacher_forcing, + duration_adjust=duration_adjust, + start_end_sp=start_end_sp, + fs=train_conf.feats_extract_conf['fs'], + hop_length=train_conf.feats_extract_conf['hop_length'], + token_list=train_conf.token_list) + + +def evaluate(uid: str, + source_lang: str="english", + target_lang: str="english", + prefix: os.PathLike="./prompt/dev/", + model_name: str="paddle_checkpoint_en", + new_str: str="", + prompt_decoding: bool=False, + task_name: str=None): + + # get origin text and path of origin wav + old_str, wav_path = read_data(uid=uid, prefix=prefix) + + if task_name == 'edit': + new_str = new_str + elif task_name == 'synthesize': + new_str = old_str + new_str + else: + new_str = old_str + ' '.join([ch for ch in new_str if is_chinese(ch)]) + + print('new_str is ', new_str) + + results_dict = get_wav( + source_lang=source_lang, + target_lang=target_lang, + model_name=model_name, + wav_path=wav_path, + old_str=old_str, + new_str=new_str) + return results_dict + + +if __name__ == "__main__": + # parse config and args + args = parse_args() + + data_dict = evaluate( + uid=args.uid, + source_lang=args.source_lang, + target_lang=args.target_lang, + prefix=args.prefix, + model_name=args.model_name, + new_str=args.new_str, + task_name=args.task_name) + sf.write(args.output_name, data_dict['output'], samplerate=24000) + print("finished...") diff --git a/examples/ernie_sat/local/sedit_arg_parser.py b/examples/ernie_sat/local/sedit_arg_parser.py new file mode 100644 index 00000000..21c6d0b4 --- /dev/null +++ b/examples/ernie_sat/local/sedit_arg_parser.py @@ -0,0 +1,84 @@ +import argparse + + +def parse_args(): + # parse args and config and redirect to train_sp + parser = argparse.ArgumentParser( + description="Synthesize with acoustic model & vocoder") + # acoustic model + parser.add_argument( + '--am', + type=str, + default='fastspeech2_csmsc', + choices=[ + 'speedyspeech_csmsc', 'fastspeech2_csmsc', 'fastspeech2_ljspeech', + 'fastspeech2_aishell3', 'fastspeech2_vctk', 'tacotron2_csmsc', + 'tacotron2_ljspeech', 'tacotron2_aishell3' + ], + help='Choose acoustic model type of tts task.') + parser.add_argument( + '--am_config', + type=str, + default=None, + help='Config of acoustic model. Use deault config when it is None.') + parser.add_argument( + '--am_ckpt', + type=str, + default=None, + help='Checkpoint file of acoustic model.') + parser.add_argument( + "--am_stat", + type=str, + default=None, + help="mean and standard deviation used to normalize spectrogram when training acoustic model." + ) + parser.add_argument( + "--phones_dict", type=str, default=None, help="phone vocabulary file.") + parser.add_argument( + "--tones_dict", type=str, default=None, help="tone vocabulary file.") + parser.add_argument( + "--speaker_dict", type=str, default=None, help="speaker id map file.") + + # vocoder + parser.add_argument( + '--voc', + type=str, + default='pwgan_aishell3', + choices=[ + 'pwgan_csmsc', 'pwgan_ljspeech', 'pwgan_aishell3', 'pwgan_vctk', + 'mb_melgan_csmsc', 'wavernn_csmsc', 'hifigan_csmsc', + 'hifigan_ljspeech', 'hifigan_aishell3', 'hifigan_vctk', + 'style_melgan_csmsc' + ], + help='Choose vocoder type of tts task.') + parser.add_argument( + '--voc_config', + type=str, + default=None, + help='Config of voc. Use deault config when it is None.') + parser.add_argument( + '--voc_ckpt', type=str, default=None, help='Checkpoint file of voc.') + parser.add_argument( + "--voc_stat", + type=str, + default=None, + help="mean and standard deviation used to normalize spectrogram when training voc." + ) + # other + parser.add_argument( + "--ngpu", type=int, default=1, help="if ngpu == 0, use cpu.") + + parser.add_argument("--model_name", type=str, help="model name") + parser.add_argument("--uid", type=str, help="uid") + parser.add_argument("--new_str", type=str, help="new string") + parser.add_argument("--prefix", type=str, help="prefix") + parser.add_argument( + "--source_lang", type=str, default="english", help="source language") + parser.add_argument( + "--target_lang", type=str, default="english", help="target language") + parser.add_argument("--output_name", type=str, help="output name") + parser.add_argument("--task_name", type=str, help="task name") + + # pre + args = parser.parse_args() + return args diff --git a/examples/ernie_sat/local/utils.py b/examples/ernie_sat/local/utils.py new file mode 100644 index 00000000..836942a2 --- /dev/null +++ b/examples/ernie_sat/local/utils.py @@ -0,0 +1,162 @@ +from pathlib import Path +from typing import Dict +from typing import List +from typing import Union + +import numpy as np +import paddle +import yaml +from sedit_arg_parser import parse_args +from yacs.config import CfgNode + +from paddlespeech.t2s.exps.syn_utils import get_am_inference +from paddlespeech.t2s.exps.syn_utils import get_voc_inference + + +def read_2col_text(path: Union[Path, str]) -> Dict[str, str]: + """Read a text file having 2 column as dict object. + + Examples: + wav.scp: + key1 /some/path/a.wav + key2 /some/path/b.wav + + >>> read_2col_text('wav.scp') + {'key1': '/some/path/a.wav', 'key2': '/some/path/b.wav'} + + """ + + data = {} + with Path(path).open("r", encoding="utf-8") as f: + for linenum, line in enumerate(f, 1): + sps = line.rstrip().split(maxsplit=1) + if len(sps) == 1: + k, v = sps[0], "" + else: + k, v = sps + if k in data: + raise RuntimeError(f"{k} is duplicated ({path}:{linenum})") + data[k] = v + return data + + +def load_num_sequence_text(path: Union[Path, str], loader_type: str="csv_int" + ) -> Dict[str, List[Union[float, int]]]: + """Read a text file indicating sequences of number + + Examples: + key1 1 2 3 + key2 34 5 6 + + >>> d = load_num_sequence_text('text') + >>> np.testing.assert_array_equal(d["key1"], np.array([1, 2, 3])) + """ + if loader_type == "text_int": + delimiter = " " + dtype = int + elif loader_type == "text_float": + delimiter = " " + dtype = float + elif loader_type == "csv_int": + delimiter = "," + dtype = int + elif loader_type == "csv_float": + delimiter = "," + dtype = float + else: + raise ValueError(f"Not supported loader_type={loader_type}") + + # path looks like: + # utta 1,0 + # uttb 3,4,5 + # -> return {'utta': np.ndarray([1, 0]), + # 'uttb': np.ndarray([3, 4, 5])} + d = read_2column_text(path) + # Using for-loop instead of dict-comprehension for debuggability + retval = {} + for k, v in d.items(): + try: + retval[k] = [dtype(i) for i in v.split(delimiter)] + except TypeError: + print(f'Error happened with path="{path}", id="{k}", value="{v}"') + raise + return retval + + +def is_chinese(ch): + if u'\u4e00' <= ch <= u'\u9fff': + return True + else: + return False + + +def get_voc_out(mel): + # vocoder + args = parse_args() + with open(args.voc_config) as f: + voc_config = CfgNode(yaml.safe_load(f)) + voc_inference = get_voc_inference( + voc=args.voc, + voc_config=voc_config, + voc_ckpt=args.voc_ckpt, + voc_stat=args.voc_stat) + + with paddle.no_grad(): + wav = voc_inference(mel) + return np.squeeze(wav) + + +def eval_durs(phns, target_lang="chinese", fs=24000, hop_length=300): + args = parse_args() + + if target_lang == 'english': + args.am = "fastspeech2_ljspeech" + args.am_config = "download/fastspeech2_nosil_ljspeech_ckpt_0.5/default.yaml" + args.am_ckpt = "download/fastspeech2_nosil_ljspeech_ckpt_0.5/snapshot_iter_100000.pdz" + args.am_stat = "download/fastspeech2_nosil_ljspeech_ckpt_0.5/speech_stats.npy" + args.phones_dict = "download/fastspeech2_nosil_ljspeech_ckpt_0.5/phone_id_map.txt" + + elif target_lang == 'chinese': + args.am = "fastspeech2_csmsc" + args.am_config = "download/fastspeech2_conformer_baker_ckpt_0.5/conformer.yaml" + args.am_ckpt = "download/fastspeech2_conformer_baker_ckpt_0.5/snapshot_iter_76000.pdz" + args.am_stat = "download/fastspeech2_conformer_baker_ckpt_0.5/speech_stats.npy" + args.phones_dict = "download/fastspeech2_conformer_baker_ckpt_0.5/phone_id_map.txt" + + if args.ngpu == 0: + paddle.set_device("cpu") + elif args.ngpu > 0: + paddle.set_device("gpu") + else: + print("ngpu should >= 0 !") + + # Init body. + with open(args.am_config) as f: + am_config = CfgNode(yaml.safe_load(f)) + + am_inference, am = get_am_inference( + am=args.am, + am_config=am_config, + am_ckpt=args.am_ckpt, + am_stat=args.am_stat, + phones_dict=args.phones_dict, + tones_dict=args.tones_dict, + speaker_dict=args.speaker_dict, + return_am=True) + + vocab_phones = {} + with open(args.phones_dict, "r") as f: + phn_id = [line.strip().split() for line in f.readlines()] + for tone, id in phn_id: + vocab_phones[tone] = int(id) + vocab_size = len(vocab_phones) + phonemes = [phn if phn in vocab_phones else "sp" for phn in phns] + + phone_ids = [vocab_phones[item] for item in phonemes] + phone_ids.append(vocab_size - 1) + phone_ids = paddle.to_tensor(np.array(phone_ids, np.int64)) + _, d_outs, _, _ = am.inference(phone_ids, spk_id=None, spk_emb=None) + pre_d_outs = d_outs + phu_durs_new = pre_d_outs * hop_length / fs + phu_durs_new = phu_durs_new.tolist()[:-1] + return phu_durs_new diff --git a/examples/ernie_sat/path.sh b/examples/ernie_sat/path.sh new file mode 100755 index 00000000..d46d2f61 --- /dev/null +++ b/examples/ernie_sat/path.sh @@ -0,0 +1,13 @@ +#!/bin/bash +export MAIN_ROOT=`realpath ${PWD}/../../` + +export PATH=${MAIN_ROOT}:${MAIN_ROOT}/utils:${PATH} +export LC_ALL=C + +export PYTHONDONTWRITEBYTECODE=1 +# Use UTF-8 in Python to avoid UnicodeDecodeError when LC_ALL=C +export PYTHONIOENCODING=UTF-8 +export PYTHONPATH=${MAIN_ROOT}:${PYTHONPATH} + +MODEL=ernie_sat +export BIN_DIR=${MAIN_ROOT}/paddlespeech/t2s/exps/${MODEL} \ No newline at end of file diff --git a/examples/ernie_sat/prompt/dev/text b/examples/ernie_sat/prompt/dev/text new file mode 100644 index 00000000..f79cdcb4 --- /dev/null +++ b/examples/ernie_sat/prompt/dev/text @@ -0,0 +1,3 @@ +p243_new For that reason cover should not be given. +Prompt_003_new This was not the show for me. +p299_096 We are trying to establish a date. diff --git a/examples/ernie_sat/prompt/dev/wav.scp b/examples/ernie_sat/prompt/dev/wav.scp new file mode 100644 index 00000000..eb0e8e48 --- /dev/null +++ b/examples/ernie_sat/prompt/dev/wav.scp @@ -0,0 +1,3 @@ +p243_new ../../prompt_wav/p243_313.wav +Prompt_003_new ../../prompt_wav/this_was_not_the_show_for_me.wav +p299_096 ../../prompt_wav/p299_096.wav diff --git a/examples/ernie_sat/run_clone_en_to_zh.sh b/examples/ernie_sat/run_clone_en_to_zh.sh new file mode 100755 index 00000000..68b1c754 --- /dev/null +++ b/examples/ernie_sat/run_clone_en_to_zh.sh @@ -0,0 +1,27 @@ +#!/bin/bash + +set -e +source path.sh + +# en --> zh 的 语音合成 +# 根据 Prompt_003_new 作为提示语音: This was not the show for me. 来合成: '今天天气很好' +# 注: 输入的 new_str 需为中文汉字, 否则会通过预处理只保留中文汉字, 即合成预处理后的中文语音。 + +python local/inference.py \ + --task_name=cross-lingual_clone \ + --model_name=paddle_checkpoint_dual_mask_enzh \ + --uid=Prompt_003_new \ + --new_str='今天天气很好.' \ + --prefix='./prompt/dev/' \ + --source_lang=english \ + --target_lang=chinese \ + --output_name=pred_clone.wav \ + --voc=pwgan_aishell3 \ + --voc_config=download/pwg_aishell3_ckpt_0.5/default.yaml \ + --voc_ckpt=download/pwg_aishell3_ckpt_0.5/snapshot_iter_1000000.pdz \ + --voc_stat=download/pwg_aishell3_ckpt_0.5/feats_stats.npy \ + --am=fastspeech2_csmsc \ + --am_config=download/fastspeech2_conformer_baker_ckpt_0.5/conformer.yaml \ + --am_ckpt=download/fastspeech2_conformer_baker_ckpt_0.5/snapshot_iter_76000.pdz \ + --am_stat=download/fastspeech2_conformer_baker_ckpt_0.5/speech_stats.npy \ + --phones_dict=download/fastspeech2_conformer_baker_ckpt_0.5/phone_id_map.txt diff --git a/examples/ernie_sat/run_gen_en.sh b/examples/ernie_sat/run_gen_en.sh new file mode 100755 index 00000000..a0641bc7 --- /dev/null +++ b/examples/ernie_sat/run_gen_en.sh @@ -0,0 +1,26 @@ +#!/bin/bash + +set -e +source path.sh + +# 纯英文的语音合成 +# 样例为根据 p299_096 对应的语音作为提示语音: This was not the show for me. 来合成: 'I enjoy my life.' + +python local/inference.py \ + --task_name=synthesize \ + --model_name=paddle_checkpoint_en \ + --uid=p299_096 \ + --new_str='I enjoy my life, do you?' \ + --prefix='./prompt/dev/' \ + --source_lang=english \ + --target_lang=english \ + --output_name=pred_gen.wav \ + --voc=pwgan_aishell3 \ + --voc_config=download/pwg_aishell3_ckpt_0.5/default.yaml \ + --voc_ckpt=download/pwg_aishell3_ckpt_0.5/snapshot_iter_1000000.pdz \ + --voc_stat=download/pwg_aishell3_ckpt_0.5/feats_stats.npy \ + --am=fastspeech2_ljspeech \ + --am_config=download/fastspeech2_nosil_ljspeech_ckpt_0.5/default.yaml \ + --am_ckpt=download/fastspeech2_nosil_ljspeech_ckpt_0.5/snapshot_iter_100000.pdz \ + --am_stat=download/fastspeech2_nosil_ljspeech_ckpt_0.5/speech_stats.npy \ + --phones_dict=download/fastspeech2_nosil_ljspeech_ckpt_0.5/phone_id_map.txt diff --git a/examples/ernie_sat/run_sedit_en.sh b/examples/ernie_sat/run_sedit_en.sh new file mode 100755 index 00000000..eec7d640 --- /dev/null +++ b/examples/ernie_sat/run_sedit_en.sh @@ -0,0 +1,27 @@ +#!/bin/bash + +set -e +source path.sh + +# 纯英文的语音编辑 +# 样例为把 p243_new 对应的原始语音: For that reason cover should not be given.编辑成 'for that reason cover is impossible to be given.' 对应的语音 +# NOTE: 语音编辑任务暂支持句子中 1 个位置的替换或者插入文本操作 + +python local/inference.py \ + --task_name=edit \ + --model_name=paddle_checkpoint_en \ + --uid=p243_new \ + --new_str='for that reason cover is impossible to be given.' \ + --prefix='./prompt/dev/' \ + --source_lang=english \ + --target_lang=english \ + --output_name=pred_edit.wav \ + --voc=pwgan_aishell3 \ + --voc_config=download/pwg_aishell3_ckpt_0.5/default.yaml \ + --voc_ckpt=download/pwg_aishell3_ckpt_0.5/snapshot_iter_1000000.pdz \ + --voc_stat=download/pwg_aishell3_ckpt_0.5/feats_stats.npy \ + --am=fastspeech2_ljspeech \ + --am_config=download/fastspeech2_nosil_ljspeech_ckpt_0.5/default.yaml \ + --am_ckpt=download/fastspeech2_nosil_ljspeech_ckpt_0.5/snapshot_iter_100000.pdz \ + --am_stat=download/fastspeech2_nosil_ljspeech_ckpt_0.5/speech_stats.npy \ + --phones_dict=download/fastspeech2_nosil_ljspeech_ckpt_0.5/phone_id_map.txt diff --git a/examples/ernie_sat/test_run.sh b/examples/ernie_sat/test_run.sh new file mode 100755 index 00000000..75b6a569 --- /dev/null +++ b/examples/ernie_sat/test_run.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +rm -rf *.wav +./run_sedit_en.sh # 语音编辑任务(英文) +./run_gen_en.sh # 个性化语音合成任务(英文) +./run_clone_en_to_zh.sh # 跨语言语音合成任务(英文到中文的语音克隆) \ No newline at end of file diff --git a/examples/ernie_sat/tools/.gitkeep b/examples/ernie_sat/tools/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/examples/vctk/ernie_sat/README.md b/examples/vctk/ernie_sat/README.md new file mode 100644 index 00000000..055e7903 --- /dev/null +++ b/examples/vctk/ernie_sat/README.md @@ -0,0 +1 @@ +# ERNIE SAT with VCTK dataset diff --git a/paddlespeech/t2s/datasets/am_batch_fn.py b/paddlespeech/t2s/datasets/am_batch_fn.py index 0b278aba..1c70b1cd 100644 --- a/paddlespeech/t2s/datasets/am_batch_fn.py +++ b/paddlespeech/t2s/datasets/am_batch_fn.py @@ -11,10 +11,21 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from typing import Collection +from typing import Dict +from typing import List +from typing import Tuple + import numpy as np import paddle from paddlespeech.t2s.datasets.batch import batch_sequences +from paddlespeech.t2s.datasets.get_feats import LogMelFBank +from paddlespeech.t2s.modules.nets_utils import get_seg_pos +from paddlespeech.t2s.modules.nets_utils import make_non_pad_mask +from paddlespeech.t2s.modules.nets_utils import pad_list +from paddlespeech.t2s.modules.nets_utils import phones_masking +from paddlespeech.t2s.modules.nets_utils import phones_text_masking def tacotron2_single_spk_batch_fn(examples): @@ -335,3 +346,182 @@ def vits_single_spk_batch_fn(examples): "speech": speech } return batch + + +# for ERNIE SAT +class MLMCollateFn: + """Functor class of common_collate_fn()""" + + def __init__( + self, + feats_extract, + mlm_prob: float=0.8, + mean_phn_span: int=8, + seg_emb: bool=False, + text_masking: bool=False, + attention_window: int=0, + not_sequence: Collection[str]=(), ): + self.mlm_prob = mlm_prob + self.mean_phn_span = mean_phn_span + self.feats_extract = feats_extract + self.not_sequence = set(not_sequence) + self.attention_window = attention_window + self.seg_emb = seg_emb + self.text_masking = text_masking + + def __call__(self, data: Collection[Tuple[str, Dict[str, np.ndarray]]] + ) -> Tuple[List[str], Dict[str, paddle.Tensor]]: + return mlm_collate_fn( + data, + feats_extract=self.feats_extract, + mlm_prob=self.mlm_prob, + mean_phn_span=self.mean_phn_span, + seg_emb=self.seg_emb, + text_masking=self.text_masking, + attention_window=self.attention_window, + not_sequence=self.not_sequence) + + +def mlm_collate_fn( + data: Collection[Tuple[str, Dict[str, np.ndarray]]], + feats_extract=None, + mlm_prob: float=0.8, + mean_phn_span: int=8, + seg_emb: bool=False, + text_masking: bool=False, + attention_window: int=0, + pad_value: int=0, + not_sequence: Collection[str]=(), +) -> Tuple[List[str], Dict[str, paddle.Tensor]]: + uttids = [u for u, _ in data] + data = [d for _, d in data] + + assert all(set(data[0]) == set(d) for d in data), "dict-keys mismatching" + assert all(not k.endswith("_lens") + for k in data[0]), f"*_lens is reserved: {list(data[0])}" + + output = {} + for key in data[0]: + + array_list = [d[key] for d in data] + + # Assume the first axis is length: + # tensor_list: Batch x (Length, ...) + tensor_list = [paddle.to_tensor(a) for a in array_list] + # tensor: (Batch, Length, ...) + tensor = pad_list(tensor_list, pad_value) + output[key] = tensor + + # lens: (Batch,) + if key not in not_sequence: + lens = paddle.to_tensor( + [d[key].shape[0] for d in data], dtype=paddle.int64) + output[key + "_lens"] = lens + + feats = feats_extract.get_log_mel_fbank(np.array(output["speech"][0])) + feats = paddle.to_tensor(feats) + feats_lens = paddle.shape(feats)[0] + feats = paddle.unsqueeze(feats, 0) + + text = output["text"] + text_lens = output["text_lens"] + align_start = output["align_start"] + align_start_lens = output["align_start_lens"] + align_end = output["align_end"] + + max_tlen = max(text_lens) + max_slen = max(feats_lens) + + speech_pad = feats[:, :max_slen] + + text_pad = text + text_mask = make_non_pad_mask( + text_lens, text_pad, length_dim=1).unsqueeze(-2) + speech_mask = make_non_pad_mask( + feats_lens, speech_pad[:, :, 0], length_dim=1).unsqueeze(-2) + span_bdy = None + if 'span_bdy' in output.keys(): + span_bdy = output['span_bdy'] + + # dual_mask 的是混合中英时候同时 mask 语音和文本 + # ernie sat 在实现跨语言的时候都 mask 了 + if text_masking: + masked_pos, text_masked_pos = phones_text_masking( + xs_pad=speech_pad, + src_mask=speech_mask, + text_pad=text_pad, + text_mask=text_mask, + align_start=align_start, + align_end=align_end, + align_start_lens=align_start_lens, + mlm_prob=mlm_prob, + mean_phn_span=mean_phn_span, + span_bdy=span_bdy) + # 训练纯中文和纯英文的 -> a3t 没有对 phoneme 做 mask, 只对语音 mask 了 + # a3t 和 ernie sat 的区别主要在于做 mask 的时候 + else: + masked_pos = phones_masking( + xs_pad=speech_pad, + src_mask=speech_mask, + align_start=align_start, + align_end=align_end, + align_start_lens=align_start_lens, + mlm_prob=mlm_prob, + mean_phn_span=mean_phn_span, + span_bdy=span_bdy) + text_masked_pos = paddle.zeros(paddle.shape(text_pad)) + + output_dict = {} + + speech_seg_pos, text_seg_pos = get_seg_pos( + speech_pad=speech_pad, + text_pad=text_pad, + align_start=align_start, + align_end=align_end, + align_start_lens=align_start_lens, + seg_emb=seg_emb) + output_dict['speech'] = speech_pad + output_dict['text'] = text_pad + output_dict['masked_pos'] = masked_pos + output_dict['text_masked_pos'] = text_masked_pos + output_dict['speech_mask'] = speech_mask + output_dict['text_mask'] = text_mask + output_dict['speech_seg_pos'] = speech_seg_pos + output_dict['text_seg_pos'] = text_seg_pos + output = (uttids, output_dict) + return output + + +def build_mlm_collate_fn( + sr: int=24000, + n_fft: int=2048, + hop_length: int=300, + win_length: int=None, + n_mels: int=80, + fmin: int=80, + fmax: int=7600, + mlm_prob: float=0.8, + mean_phn_span: int=8, + seg_emb: bool=False, + epoch: int=-1, ): + feats_extract_class = LogMelFBank + + feats_extract = feats_extract_class( + sr=sr, + n_fft=n_fft, + hop_length=hop_length, + win_length=win_length, + n_mels=n_mels, + fmin=fmin, + fmax=fmax) + + if epoch == -1: + mlm_prob_factor = 1 + else: + mlm_prob_factor = 0.8 + + return MLMCollateFn( + feats_extract=feats_extract, + mlm_prob=mlm_prob * mlm_prob_factor, + mean_phn_span=mean_phn_span, + seg_emb=seg_emb) diff --git a/paddlespeech/t2s/exps/syn_utils.py b/paddlespeech/t2s/exps/syn_utils.py index 6b9f41a6..cabea989 100644 --- a/paddlespeech/t2s/exps/syn_utils.py +++ b/paddlespeech/t2s/exps/syn_utils.py @@ -147,14 +147,14 @@ def get_frontend(lang: str='zh', # dygraph -def get_am_inference( - am: str='fastspeech2_csmsc', - am_config: CfgNode=None, - am_ckpt: Optional[os.PathLike]=None, - am_stat: Optional[os.PathLike]=None, - phones_dict: Optional[os.PathLike]=None, - tones_dict: Optional[os.PathLike]=None, - speaker_dict: Optional[os.PathLike]=None, ): +def get_am_inference(am: str='fastspeech2_csmsc', + am_config: CfgNode=None, + am_ckpt: Optional[os.PathLike]=None, + am_stat: Optional[os.PathLike]=None, + phones_dict: Optional[os.PathLike]=None, + tones_dict: Optional[os.PathLike]=None, + speaker_dict: Optional[os.PathLike]=None, + return_am: bool=False): with open(phones_dict, "r") as f: phn_id = [line.strip().split() for line in f.readlines()] vocab_size = len(phn_id) @@ -203,7 +203,10 @@ def get_am_inference( am_inference = am_inference_class(am_normalizer, am) am_inference.eval() print("acoustic model done!") - return am_inference + if return_am: + return am_inference, am + else: + return am_inference def get_voc_inference( diff --git a/paddlespeech/t2s/models/__init__.py b/paddlespeech/t2s/models/__init__.py index 0b6f2911..d8df4368 100644 --- a/paddlespeech/t2s/models/__init__.py +++ b/paddlespeech/t2s/models/__init__.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from .ernie_sat import * from .fastspeech2 import * from .hifigan import * from .melgan import * diff --git a/paddlespeech/t2s/models/ernie_sat/__init__.py b/paddlespeech/t2s/models/ernie_sat/__init__.py new file mode 100644 index 00000000..dc86fa51 --- /dev/null +++ b/paddlespeech/t2s/models/ernie_sat/__init__.py @@ -0,0 +1,14 @@ +# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from .mlm import * diff --git a/paddlespeech/t2s/models/ernie_sat/mlm.py b/paddlespeech/t2s/models/ernie_sat/mlm.py new file mode 100644 index 00000000..c9c3d67a --- /dev/null +++ b/paddlespeech/t2s/models/ernie_sat/mlm.py @@ -0,0 +1,601 @@ +import argparse +from typing import Dict +from typing import List +from typing import Optional +from typing import Tuple +from typing import Union + +import paddle +import yaml +from paddle import nn +from yacs.config import CfgNode + +from paddlespeech.t2s.modules.activation import get_activation +from paddlespeech.t2s.modules.conformer.convolution import ConvolutionModule +from paddlespeech.t2s.modules.conformer.encoder_layer import EncoderLayer +from paddlespeech.t2s.modules.layer_norm import LayerNorm +from paddlespeech.t2s.modules.masked_fill import masked_fill +from paddlespeech.t2s.modules.nets_utils import initialize +from paddlespeech.t2s.modules.tacotron2.decoder import Postnet +from paddlespeech.t2s.modules.transformer.attention import LegacyRelPositionMultiHeadedAttention +from paddlespeech.t2s.modules.transformer.attention import MultiHeadedAttention +from paddlespeech.t2s.modules.transformer.attention import RelPositionMultiHeadedAttention +from paddlespeech.t2s.modules.transformer.embedding import LegacyRelPositionalEncoding +from paddlespeech.t2s.modules.transformer.embedding import PositionalEncoding +from paddlespeech.t2s.modules.transformer.embedding import RelPositionalEncoding +from paddlespeech.t2s.modules.transformer.embedding import ScaledPositionalEncoding +from paddlespeech.t2s.modules.transformer.multi_layer_conv import Conv1dLinear +from paddlespeech.t2s.modules.transformer.multi_layer_conv import MultiLayeredConv1d +from paddlespeech.t2s.modules.transformer.positionwise_feed_forward import PositionwiseFeedForward +from paddlespeech.t2s.modules.transformer.repeat import repeat +from paddlespeech.t2s.modules.transformer.subsampling import Conv2dSubsampling + + +# MLM -> Mask Language Model +class mySequential(nn.Sequential): + def forward(self, *inputs): + for module in self._sub_layers.values(): + if type(inputs) == tuple: + inputs = module(*inputs) + else: + inputs = module(inputs) + return inputs + + +class MaskInputLayer(nn.Layer): + def __init__(self, out_features: int) -> None: + super().__init__() + self.mask_feature = paddle.create_parameter( + shape=(1, 1, out_features), + dtype=paddle.float32, + default_initializer=paddle.nn.initializer.Assign( + paddle.normal(shape=(1, 1, out_features)))) + + def forward(self, input: paddle.Tensor, + masked_pos: paddle.Tensor=None) -> paddle.Tensor: + masked_pos = paddle.expand_as(paddle.unsqueeze(masked_pos, -1), input) + masked_input = masked_fill(input, masked_pos, 0) + masked_fill( + paddle.expand_as(self.mask_feature, input), ~masked_pos, 0) + return masked_input + + +class MLMEncoder(nn.Layer): + """Conformer encoder module. + + Args: + idim (int): Input dimension. + attention_dim (int): Dimension of attention. + attention_heads (int): The number of heads of multi head attention. + linear_units (int): The number of units of position-wise feed forward. + num_blocks (int): The number of decoder blocks. + dropout_rate (float): Dropout rate. + positional_dropout_rate (float): Dropout rate after adding positional encoding. + attention_dropout_rate (float): Dropout rate in attention. + input_layer (Union[str, paddle.nn.Layer]): Input layer type. + normalize_before (bool): Whether to use layer_norm before the first block. + concat_after (bool): Whether to concat attention layer's input and output. + if True, additional linear will be applied. + i.e. x -> x + linear(concat(x, att(x))) + if False, no additional linear will be applied. i.e. x -> x + att(x) + positionwise_layer_type (str): "linear", "conv1d", or "conv1d-linear". + positionwise_conv_kernel_size (int): Kernel size of positionwise conv1d layer. + macaron_style (bool): Whether to use macaron style for positionwise layer. + pos_enc_layer_type (str): Encoder positional encoding layer type. + selfattention_layer_type (str): Encoder attention layer type. + activation_type (str): Encoder activation function type. + use_cnn_module (bool): Whether to use convolution module. + zero_triu (bool): Whether to zero the upper triangular part of attention matrix. + cnn_module_kernel (int): Kernerl size of convolution module. + padding_idx (int): Padding idx for input_layer=embed. + stochastic_depth_rate (float): Maximum probability to skip the encoder layer. + + """ + + def __init__(self, + idim: int, + vocab_size: int=0, + pre_speech_layer: int=0, + attention_dim: int=256, + attention_heads: int=4, + linear_units: int=2048, + num_blocks: int=6, + dropout_rate: float=0.1, + positional_dropout_rate: float=0.1, + attention_dropout_rate: float=0.0, + input_layer: str="conv2d", + normalize_before: bool=True, + concat_after: bool=False, + positionwise_layer_type: str="linear", + positionwise_conv_kernel_size: int=1, + macaron_style: bool=False, + pos_enc_layer_type: str="abs_pos", + pos_enc_class=None, + selfattention_layer_type: str="selfattn", + activation_type: str="swish", + use_cnn_module: bool=False, + zero_triu: bool=False, + cnn_module_kernel: int=31, + padding_idx: int=-1, + stochastic_depth_rate: float=0.0, + text_masking: bool=False): + """Construct an Encoder object.""" + super().__init__() + self._output_size = attention_dim + self.text_masking = text_masking + if self.text_masking: + self.text_masking_layer = MaskInputLayer(attention_dim) + activation = get_activation(activation_type) + if pos_enc_layer_type == "abs_pos": + pos_enc_class = PositionalEncoding + elif pos_enc_layer_type == "scaled_abs_pos": + pos_enc_class = ScaledPositionalEncoding + elif pos_enc_layer_type == "rel_pos": + assert selfattention_layer_type == "rel_selfattn" + pos_enc_class = RelPositionalEncoding + elif pos_enc_layer_type == "legacy_rel_pos": + pos_enc_class = LegacyRelPositionalEncoding + assert selfattention_layer_type == "legacy_rel_selfattn" + else: + raise ValueError("unknown pos_enc_layer: " + pos_enc_layer_type) + + self.conv_subsampling_factor = 1 + if input_layer == "linear": + self.embed = nn.Sequential( + nn.Linear(idim, attention_dim), + nn.LayerNorm(attention_dim), + nn.Dropout(dropout_rate), + nn.ReLU(), + pos_enc_class(attention_dim, positional_dropout_rate), ) + elif input_layer == "conv2d": + self.embed = Conv2dSubsampling( + idim, + attention_dim, + dropout_rate, + pos_enc_class(attention_dim, positional_dropout_rate), ) + self.conv_subsampling_factor = 4 + elif input_layer == "embed": + self.embed = nn.Sequential( + nn.Embedding(idim, attention_dim, padding_idx=padding_idx), + pos_enc_class(attention_dim, positional_dropout_rate), ) + elif input_layer == "mlm": + self.segment_emb = None + self.speech_embed = mySequential( + MaskInputLayer(idim), + nn.Linear(idim, attention_dim), + nn.LayerNorm(attention_dim), + nn.ReLU(), + pos_enc_class(attention_dim, positional_dropout_rate)) + self.text_embed = nn.Sequential( + nn.Embedding( + vocab_size, attention_dim, padding_idx=padding_idx), + pos_enc_class(attention_dim, positional_dropout_rate), ) + elif input_layer == "sega_mlm": + self.segment_emb = nn.Embedding( + 500, attention_dim, padding_idx=padding_idx) + self.speech_embed = mySequential( + MaskInputLayer(idim), + nn.Linear(idim, attention_dim), + nn.LayerNorm(attention_dim), + nn.ReLU(), + pos_enc_class(attention_dim, positional_dropout_rate)) + self.text_embed = nn.Sequential( + nn.Embedding( + vocab_size, attention_dim, padding_idx=padding_idx), + pos_enc_class(attention_dim, positional_dropout_rate), ) + elif isinstance(input_layer, nn.Layer): + self.embed = nn.Sequential( + input_layer, + pos_enc_class(attention_dim, positional_dropout_rate), ) + elif input_layer is None: + self.embed = nn.Sequential( + pos_enc_class(attention_dim, positional_dropout_rate)) + else: + raise ValueError("unknown input_layer: " + input_layer) + self.normalize_before = normalize_before + + # self-attention module definition + if selfattention_layer_type == "selfattn": + encoder_selfattn_layer = MultiHeadedAttention + encoder_selfattn_layer_args = (attention_heads, attention_dim, + attention_dropout_rate, ) + elif selfattention_layer_type == "legacy_rel_selfattn": + assert pos_enc_layer_type == "legacy_rel_pos" + encoder_selfattn_layer = LegacyRelPositionMultiHeadedAttention + encoder_selfattn_layer_args = (attention_heads, attention_dim, + attention_dropout_rate, ) + elif selfattention_layer_type == "rel_selfattn": + assert pos_enc_layer_type == "rel_pos" + encoder_selfattn_layer = RelPositionMultiHeadedAttention + encoder_selfattn_layer_args = (attention_heads, attention_dim, + attention_dropout_rate, zero_triu, ) + else: + raise ValueError("unknown encoder_attn_layer: " + + selfattention_layer_type) + + # feed-forward module definition + if positionwise_layer_type == "linear": + positionwise_layer = PositionwiseFeedForward + positionwise_layer_args = (attention_dim, linear_units, + dropout_rate, activation, ) + elif positionwise_layer_type == "conv1d": + positionwise_layer = MultiLayeredConv1d + positionwise_layer_args = (attention_dim, linear_units, + positionwise_conv_kernel_size, + dropout_rate, ) + elif positionwise_layer_type == "conv1d-linear": + positionwise_layer = Conv1dLinear + positionwise_layer_args = (attention_dim, linear_units, + positionwise_conv_kernel_size, + dropout_rate, ) + else: + raise NotImplementedError("Support only linear or conv1d.") + + # convolution module definition + convolution_layer = ConvolutionModule + convolution_layer_args = (attention_dim, cnn_module_kernel, activation) + + self.encoders = repeat( + num_blocks, + lambda lnum: EncoderLayer( + attention_dim, + encoder_selfattn_layer(*encoder_selfattn_layer_args), + positionwise_layer(*positionwise_layer_args), + positionwise_layer(*positionwise_layer_args) if macaron_style else None, + convolution_layer(*convolution_layer_args) if use_cnn_module else None, + dropout_rate, + normalize_before, + concat_after, + stochastic_depth_rate * float(1 + lnum) / num_blocks, ), ) + self.pre_speech_layer = pre_speech_layer + self.pre_speech_encoders = repeat( + self.pre_speech_layer, + lambda lnum: EncoderLayer( + attention_dim, + encoder_selfattn_layer(*encoder_selfattn_layer_args), + positionwise_layer(*positionwise_layer_args), + positionwise_layer(*positionwise_layer_args) if macaron_style else None, + convolution_layer(*convolution_layer_args) if use_cnn_module else None, + dropout_rate, + normalize_before, + concat_after, + stochastic_depth_rate * float(1 + lnum) / self.pre_speech_layer, ), + ) + if self.normalize_before: + self.after_norm = LayerNorm(attention_dim) + + def forward(self, + speech: paddle.Tensor, + text: paddle.Tensor, + masked_pos: paddle.Tensor, + speech_mask: paddle.Tensor=None, + text_mask: paddle.Tensor=None, + speech_seg_pos: paddle.Tensor=None, + text_seg_pos: paddle.Tensor=None): + """Encode input sequence. + + """ + if masked_pos is not None: + speech = self.speech_embed(speech, masked_pos) + else: + speech = self.speech_embed(speech) + if text is not None: + text = self.text_embed(text) + if speech_seg_pos is not None and text_seg_pos is not None and self.segment_emb: + speech_seg_emb = self.segment_emb(speech_seg_pos) + text_seg_emb = self.segment_emb(text_seg_pos) + text = (text[0] + text_seg_emb, text[1]) + speech = (speech[0] + speech_seg_emb, speech[1]) + if self.pre_speech_encoders: + speech, _ = self.pre_speech_encoders(speech, speech_mask) + + if text is not None: + xs = paddle.concat([speech[0], text[0]], axis=1) + xs_pos_emb = paddle.concat([speech[1], text[1]], axis=1) + masks = paddle.concat([speech_mask, text_mask], axis=-1) + else: + xs = speech[0] + xs_pos_emb = speech[1] + masks = speech_mask + + xs, masks = self.encoders((xs, xs_pos_emb), masks) + + if isinstance(xs, tuple): + xs = xs[0] + if self.normalize_before: + xs = self.after_norm(xs) + + return xs, masks + + +class MLMDecoder(MLMEncoder): + def forward(self, xs: paddle.Tensor, masks: paddle.Tensor): + """Encode input sequence. + + Args: + xs (paddle.Tensor): Input tensor (#batch, time, idim). + masks (paddle.Tensor): Mask tensor (#batch, time). + + Returns: + paddle.Tensor: Output tensor (#batch, time, attention_dim). + paddle.Tensor: Mask tensor (#batch, time). + + """ + xs = self.embed(xs) + xs, masks = self.encoders(xs, masks) + + if isinstance(xs, tuple): + xs = xs[0] + if self.normalize_before: + xs = self.after_norm(xs) + + return xs, masks + + +# encoder and decoder is nn.Layer, not str +class MLM(nn.Layer): + def __init__(self, + token_list: Union[Tuple[str, ...], List[str]], + odim: int, + encoder: nn.Layer, + decoder: Optional[nn.Layer], + postnet_layers: int=0, + postnet_chans: int=0, + postnet_filts: int=0, + text_masking: bool=False): + + super().__init__() + self.odim = odim + self.token_list = token_list.copy() + self.encoder = encoder + self.decoder = decoder + self.vocab_size = encoder.text_embed[0]._num_embeddings + + if self.decoder is None or not (hasattr(self.decoder, + 'output_layer') and + self.decoder.output_layer is not None): + self.sfc = nn.Linear(self.encoder._output_size, odim) + else: + self.sfc = None + if text_masking: + self.text_sfc = nn.Linear( + self.encoder.text_embed[0]._embedding_dim, + self.vocab_size, + weight_attr=self.encoder.text_embed[0]._weight_attr) + else: + self.text_sfc = None + + self.postnet = (None if postnet_layers == 0 else Postnet( + idim=self.encoder._output_size, + odim=odim, + n_layers=postnet_layers, + n_chans=postnet_chans, + n_filts=postnet_filts, + use_batch_norm=True, + dropout_rate=0.5, )) + + def inference( + self, + speech: paddle.Tensor, + text: paddle.Tensor, + masked_pos: paddle.Tensor, + speech_mask: paddle.Tensor, + text_mask: paddle.Tensor, + speech_seg_pos: paddle.Tensor, + text_seg_pos: paddle.Tensor, + span_bdy: List[int], + use_teacher_forcing: bool=False, ) -> Dict[str, paddle.Tensor]: + ''' + Args: + speech (paddle.Tensor): input speech (1, Tmax, D). + text (paddle.Tensor): input text (1, Tmax2). + masked_pos (paddle.Tensor): masked position of input speech (1, Tmax) + speech_mask (paddle.Tensor): mask of speech (1, 1, Tmax). + text_mask (paddle.Tensor): mask of text (1, 1, Tmax2). + speech_seg_pos (paddle.Tensor): n-th phone of each mel, 0<=n<=Tmax2 (1, Tmax). + text_seg_pos (paddle.Tensor): n-th phone of each phone, 0<=n<=Tmax2 (1, Tmax2). + span_bdy (List[int]): masked mel boundary of input speech (2,) + use_teacher_forcing (bool): whether to use teacher forcing + Returns: + List[Tensor]: + eg: + [Tensor(shape=[1, 181, 80]), Tensor(shape=[80, 80]), Tensor(shape=[1, 67, 80])] + ''' + + z_cache = None + if use_teacher_forcing: + before_outs, zs, *_ = self.forward( + speech=speech, + text=text, + masked_pos=masked_pos, + speech_mask=speech_mask, + text_mask=text_mask, + speech_seg_pos=speech_seg_pos, + text_seg_pos=text_seg_pos) + if zs is None: + zs = before_outs + + speech = speech.squeeze(0) + outs = [speech[:span_bdy[0]]] + outs += [zs[0][span_bdy[0]:span_bdy[1]]] + outs += [speech[span_bdy[1]:]] + return outs + return None + + +class MLMEncAsDecoder(MLM): + def forward(self, + speech: paddle.Tensor, + text: paddle.Tensor, + masked_pos: paddle.Tensor, + speech_mask: paddle.Tensor, + text_mask: paddle.Tensor, + speech_seg_pos: paddle.Tensor, + text_seg_pos: paddle.Tensor): + # feats: (Batch, Length, Dim) + # -> encoder_out: (Batch, Length2, Dim2) + encoder_out, h_masks = self.encoder( + speech=speech, + text=text, + masked_pos=masked_pos, + speech_mask=speech_mask, + text_mask=text_mask, + speech_seg_pos=speech_seg_pos, + text_seg_pos=text_seg_pos) + if self.decoder is not None: + zs, _ = self.decoder(encoder_out, h_masks) + else: + zs = encoder_out + speech_hidden_states = zs[:, :paddle.shape(speech)[1], :] + if self.sfc is not None: + before_outs = paddle.reshape( + self.sfc(speech_hidden_states), + (paddle.shape(speech_hidden_states)[0], -1, self.odim)) + else: + before_outs = speech_hidden_states + if self.postnet is not None: + after_outs = before_outs + paddle.transpose( + self.postnet(paddle.transpose(before_outs, [0, 2, 1])), + [0, 2, 1]) + else: + after_outs = None + return before_outs, after_outs, None + + +class MLMDualMaksing(MLM): + def forward(self, + speech: paddle.Tensor, + text: paddle.Tensor, + masked_pos: paddle.Tensor, + speech_mask: paddle.Tensor, + text_mask: paddle.Tensor, + speech_seg_pos: paddle.Tensor, + text_seg_pos: paddle.Tensor): + # feats: (Batch, Length, Dim) + # -> encoder_out: (Batch, Length2, Dim2) + encoder_out, h_masks = self.encoder( + speech=speech, + text=text, + masked_pos=masked_pos, + speech_mask=speech_mask, + text_mask=text_mask, + speech_seg_pos=speech_seg_pos, + text_seg_pos=text_seg_pos) + if self.decoder is not None: + zs, _ = self.decoder(encoder_out, h_masks) + else: + zs = encoder_out + speech_hidden_states = zs[:, :paddle.shape(speech)[1], :] + if self.text_sfc: + text_hiddent_states = zs[:, paddle.shape(speech)[1]:, :] + text_outs = paddle.reshape( + self.text_sfc(text_hiddent_states), + (paddle.shape(text_hiddent_states)[0], -1, self.vocab_size)) + if self.sfc is not None: + before_outs = paddle.reshape( + self.sfc(speech_hidden_states), + (paddle.shape(speech_hidden_states)[0], -1, self.odim)) + else: + before_outs = speech_hidden_states + if self.postnet is not None: + after_outs = before_outs + paddle.transpose( + self.postnet(paddle.transpose(before_outs, [0, 2, 1])), + [0, 2, 1]) + else: + after_outs = None + return before_outs, after_outs, text_outs + + +def build_model_from_file(config_file, model_file): + + state_dict = paddle.load(model_file) + model_class = MLMDualMaksing if 'conformer_combine_vctk_aishell3_dual_masking' in config_file \ + else MLMEncAsDecoder + + # 构建模型 + with open(config_file) as f: + conf = CfgNode(yaml.safe_load(f)) + model = build_model(conf, model_class) + model.set_state_dict(state_dict) + return model, conf + + +# select encoder and decoder here +def build_model(args: argparse.Namespace, model_class=MLMEncAsDecoder) -> MLM: + if isinstance(args.token_list, str): + with open(args.token_list, encoding="utf-8") as f: + token_list = [line.rstrip() for line in f] + + # Overwriting token_list to keep it as "portable". + args.token_list = list(token_list) + elif isinstance(args.token_list, (tuple, list)): + token_list = list(args.token_list) + else: + raise RuntimeError("token_list must be str or list") + + vocab_size = len(token_list) + odim = 80 + + pos_enc_class = ScaledPositionalEncoding if args.use_scaled_pos_enc else PositionalEncoding + + if "conformer" == args.encoder: + conformer_self_attn_layer_type = args.encoder_conf[ + 'selfattention_layer_type'] + conformer_pos_enc_layer_type = args.encoder_conf['pos_enc_layer_type'] + conformer_rel_pos_type = "legacy" + if conformer_rel_pos_type == "legacy": + if conformer_pos_enc_layer_type == "rel_pos": + conformer_pos_enc_layer_type = "legacy_rel_pos" + if conformer_self_attn_layer_type == "rel_selfattn": + conformer_self_attn_layer_type = "legacy_rel_selfattn" + elif conformer_rel_pos_type == "latest": + assert conformer_pos_enc_layer_type != "legacy_rel_pos" + assert conformer_self_attn_layer_type != "legacy_rel_selfattn" + else: + raise ValueError(f"Unknown rel_pos_type: {conformer_rel_pos_type}") + args.encoder_conf[ + 'selfattention_layer_type'] = conformer_self_attn_layer_type + args.encoder_conf['pos_enc_layer_type'] = conformer_pos_enc_layer_type + if "conformer" == args.decoder: + args.decoder_conf[ + 'selfattention_layer_type'] = conformer_self_attn_layer_type + args.decoder_conf[ + 'pos_enc_layer_type'] = conformer_pos_enc_layer_type + + # Encoder + encoder_class = MLMEncoder + + if 'text_masking' in args.model_conf.keys() and args.model_conf[ + 'text_masking']: + args.encoder_conf['text_masking'] = True + else: + args.encoder_conf['text_masking'] = False + + encoder = encoder_class( + args.input_size, + vocab_size=vocab_size, + pos_enc_class=pos_enc_class, + **args.encoder_conf) + + # Decoder + if args.decoder != 'no_decoder': + decoder_class = MLMDecoder + decoder = decoder_class( + idim=0, + input_layer=None, + **args.decoder_conf, ) + else: + decoder = None + + # Build model + model = model_class( + odim=odim, + encoder=encoder, + decoder=decoder, + token_list=token_list, + **args.model_conf, ) + + # Initialize + if args.init is not None: + initialize(model, args.init) + + return model diff --git a/paddlespeech/t2s/modules/losses.py b/paddlespeech/t2s/modules/losses.py index e6ab9351..4726f40e 100644 --- a/paddlespeech/t2s/modules/losses.py +++ b/paddlespeech/t2s/modules/losses.py @@ -1007,3 +1007,55 @@ class KLDivergenceLoss(nn.Layer): loss = kl / paddle.sum(z_mask) return loss + + +# loss for ERNIE SAT +class MLMLoss(nn.Layer): + def __init__(self, + lsm_weight: float=0.1, + ignore_id: int=-1, + text_masking: bool=False): + super().__init__() + if text_masking: + self.text_mlm_loss = nn.CrossEntropyLoss(ignore_index=ignore_id) + if lsm_weight > 50: + self.l1_loss_func = nn.MSELoss() + else: + self.l1_loss_func = nn.L1Loss(reduction='none') + self.text_masking = text_masking + + def forward(self, + speech: paddle.Tensor, + before_outs: paddle.Tensor, + after_outs: paddle.Tensor, + masked_pos: paddle.Tensor, + text: paddle.Tensor=None, + text_outs: paddle.Tensor=None, + text_masked_pos: paddle.Tensor=None): + + xs_pad = speech + mlm_loss_pos = masked_pos > 0 + loss = paddle.sum( + self.l1_loss_func( + paddle.reshape(before_outs, (-1, self.odim)), + paddle.reshape(xs_pad, (-1, self.odim))), + axis=-1) + if after_outs is not None: + loss += paddle.sum( + self.l1_loss_func( + paddle.reshape(after_outs, (-1, self.odim)), + paddle.reshape(xs_pad, (-1, self.odim))), + axis=-1) + loss_mlm = paddle.sum((loss * paddle.reshape( + mlm_loss_pos, [-1]))) / paddle.sum((mlm_loss_pos) + 1e-10) + + if self.text_masking: + loss_text = paddle.sum((self.text_mlm_loss( + paddle.reshape(text_outs, (-1, self.vocab_size)), + paddle.reshape(text, (-1))) * paddle.reshape( + text_masked_pos, + (-1)))) / paddle.sum((text_masked_pos) + 1e-10) + + return loss_mlm, loss_text + + return loss_mlm diff --git a/paddlespeech/t2s/modules/nets_utils.py b/paddlespeech/t2s/modules/nets_utils.py index 598b6316..0238f4db 100644 --- a/paddlespeech/t2s/modules/nets_utils.py +++ b/paddlespeech/t2s/modules/nets_utils.py @@ -12,8 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # Modified from espnet(https://github.com/espnet/espnet) +import math from typing import Tuple +import numpy as np import paddle from paddle import nn from typeguard import check_argument_types @@ -40,7 +42,8 @@ def pad_list(xs, pad_value): """ n_batch = len(xs) max_len = max(x.shape[0] for x in xs) - pad = paddle.full([n_batch, max_len, *xs[0].shape[1:]], pad_value) + pad = paddle.full( + [n_batch, max_len, *xs[0].shape[1:]], pad_value, dtype=xs[0].dtype) for i in range(n_batch): pad[i, :xs[i].shape[0]] = xs[i] @@ -48,13 +51,17 @@ def pad_list(xs, pad_value): return pad -def make_pad_mask(lengths, length_dim=-1): +def make_pad_mask(lengths, xs=None, length_dim=-1): """Make mask tensor containing indices of padded part. Args: lengths (Tensor(int64)): Batch of lengths (B,). + xs (Tensor, optional): The reference tensor. + If set, masks will be the same shape as this tensor. + length_dim (int, optional): Dimension indicator of the above tensor. + See the example. - Returns: + Returns: Tensor(bool): Mask tensor containing indices of padded part bool. Examples: @@ -63,23 +70,99 @@ def make_pad_mask(lengths, length_dim=-1): >>> lengths = [5, 3, 2] >>> make_non_pad_mask(lengths) masks = [[0, 0, 0, 0 ,0], - [0, 0, 0, 1, 1], - [0, 0, 1, 1, 1]] + [0, 0, 0, 1, 1], + [0, 0, 1, 1, 1]] + + With the reference tensor. + + >>> xs = paddle.zeros((3, 2, 4)) + >>> make_pad_mask(lengths, xs) + tensor([[[0, 0, 0, 0], + [0, 0, 0, 0]], + [[0, 0, 0, 1], + [0, 0, 0, 1]], + [[0, 0, 1, 1], + [0, 0, 1, 1]]]) + >>> xs = paddle.zeros((3, 2, 6)) + >>> make_pad_mask(lengths, xs) + tensor([[[0, 0, 0, 0, 0, 1], + [0, 0, 0, 0, 0, 1]], + [[0, 0, 0, 1, 1, 1], + [0, 0, 0, 1, 1, 1]], + [[0, 0, 1, 1, 1, 1], + [0, 0, 1, 1, 1, 1]]]) + + With the reference tensor and dimension indicator. + + >>> xs = paddle.zeros((3, 6, 6)) + >>> make_pad_mask(lengths, xs, 1) + tensor([[[0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [1, 1, 1, 1, 1, 1]], + [[0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [1, 1, 1, 1, 1, 1], + [1, 1, 1, 1, 1, 1], + [1, 1, 1, 1, 1, 1]], + [[0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [1, 1, 1, 1, 1, 1], + [1, 1, 1, 1, 1, 1], + [1, 1, 1, 1, 1, 1], + [1, 1, 1, 1, 1, 1]]]) + >>> make_pad_mask(lengths, xs, 2) + tensor([[[0, 0, 0, 0, 0, 1], + [0, 0, 0, 0, 0, 1], + [0, 0, 0, 0, 0, 1], + [0, 0, 0, 0, 0, 1], + [0, 0, 0, 0, 0, 1], + [0, 0, 0, 0, 0, 1]], + [[0, 0, 0, 1, 1, 1], + [0, 0, 0, 1, 1, 1], + [0, 0, 0, 1, 1, 1], + [0, 0, 0, 1, 1, 1], + [0, 0, 0, 1, 1, 1], + [0, 0, 0, 1, 1, 1]], + [[0, 0, 1, 1, 1, 1], + [0, 0, 1, 1, 1, 1], + [0, 0, 1, 1, 1, 1], + [0, 0, 1, 1, 1, 1], + [0, 0, 1, 1, 1, 1], + [0, 0, 1, 1, 1, 1]]],) + """ if length_dim == 0: raise ValueError("length_dim cannot be 0: {}".format(length_dim)) bs = paddle.shape(lengths)[0] - maxlen = lengths.max() + if xs is None: + maxlen = lengths.max() + else: + maxlen = paddle.shape(xs)[length_dim] + seq_range = paddle.arange(0, maxlen, dtype=paddle.int64) seq_range_expand = seq_range.unsqueeze(0).expand([bs, maxlen]) seq_length_expand = lengths.unsqueeze(-1) mask = seq_range_expand >= seq_length_expand + if xs is not None: + assert paddle.shape(xs)[0] == bs, (paddle.shape(xs)[0], bs) + + if length_dim < 0: + length_dim = len(paddle.shape(xs)) + length_dim + # ind = (:, None, ..., None, :, , None, ..., None) + ind = tuple( + slice(None) if i in (0, length_dim) else None + for i in range(len(paddle.shape(xs)))) + mask = paddle.expand(mask[ind], paddle.shape(xs)) return mask -def make_non_pad_mask(lengths, length_dim=-1): +def make_non_pad_mask(lengths, xs=None, length_dim=-1): """Make mask tensor containing indices of non-padded part. Args: @@ -92,16 +175,78 @@ def make_non_pad_mask(lengths, length_dim=-1): Returns: Tensor(bool): mask tensor containing indices of padded part bool. - Examples: + Examples: With only lengths. >>> lengths = [5, 3, 2] >>> make_non_pad_mask(lengths) masks = [[1, 1, 1, 1 ,1], - [1, 1, 1, 0, 0], - [1, 1, 0, 0, 0]] + [1, 1, 1, 0, 0], + [1, 1, 0, 0, 0]] + + With the reference tensor. + + >>> xs = paddle.zeros((3, 2, 4)) + >>> make_non_pad_mask(lengths, xs) + tensor([[[1, 1, 1, 1], + [1, 1, 1, 1]], + [[1, 1, 1, 0], + [1, 1, 1, 0]], + [[1, 1, 0, 0], + [1, 1, 0, 0]]]) + >>> xs = paddle.zeros((3, 2, 6)) + >>> make_non_pad_mask(lengths, xs) + tensor([[[1, 1, 1, 1, 1, 0], + [1, 1, 1, 1, 1, 0]], + [[1, 1, 1, 0, 0, 0], + [1, 1, 1, 0, 0, 0]], + [[1, 1, 0, 0, 0, 0], + [1, 1, 0, 0, 0, 0]]]) + + With the reference tensor and dimension indicator. + + >>> xs = paddle.zeros((3, 6, 6)) + >>> make_non_pad_mask(lengths, xs, 1) + tensor([[[1, 1, 1, 1, 1, 1], + [1, 1, 1, 1, 1, 1], + [1, 1, 1, 1, 1, 1], + [1, 1, 1, 1, 1, 1], + [1, 1, 1, 1, 1, 1], + [0, 0, 0, 0, 0, 0]], + [[1, 1, 1, 1, 1, 1], + [1, 1, 1, 1, 1, 1], + [1, 1, 1, 1, 1, 1], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0]], + [[1, 1, 1, 1, 1, 1], + [1, 1, 1, 1, 1, 1], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0]]]) + >>> make_non_pad_mask(lengths, xs, 2) + tensor([[[1, 1, 1, 1, 1, 0], + [1, 1, 1, 1, 1, 0], + [1, 1, 1, 1, 1, 0], + [1, 1, 1, 1, 1, 0], + [1, 1, 1, 1, 1, 0], + [1, 1, 1, 1, 1, 0]], + [[1, 1, 1, 0, 0, 0], + [1, 1, 1, 0, 0, 0], + [1, 1, 1, 0, 0, 0], + [1, 1, 1, 0, 0, 0], + [1, 1, 1, 0, 0, 0], + [1, 1, 1, 0, 0, 0]], + [[1, 1, 0, 0, 0, 0], + [1, 1, 0, 0, 0, 0], + [1, 1, 0, 0, 0, 0], + [1, 1, 0, 0, 0, 0], + [1, 1, 0, 0, 0, 0], + [1, 1, 0, 0, 0, 0]]]) + """ - return paddle.logical_not(make_pad_mask(lengths, length_dim)) + return paddle.logical_not(make_pad_mask(lengths, xs, length_dim)) def initialize(model: nn.Layer, init: str): @@ -194,3 +339,270 @@ def paddle_gather(x, dim, index): ind2 = paddle.transpose(paddle.stack(nd_index), [1, 0]).astype("int64") paddle_out = paddle.gather_nd(x, ind2).reshape(index_shape) return paddle_out + + +# for ERNIE SAT +# mask phones +def phones_masking(xs_pad: paddle.Tensor, + src_mask: paddle.Tensor, + align_start: paddle.Tensor, + align_end: paddle.Tensor, + align_start_lens: paddle.Tensor, + mlm_prob: float=0.8, + mean_phn_span: int=8, + span_bdy: paddle.Tensor=None): + ''' + Args: + xs_pad (paddle.Tensor): input speech (B, Tmax, D). + src_mask (paddle.Tensor): mask of speech (B, 1, Tmax). + align_start (paddle.Tensor): frame level phone alignment start (B, Tmax2). + align_end (paddle.Tensor): frame level phone alignment end (B, Tmax2). + align_start_lens (paddle.Tensor): length of align_start (B, ). + mlm_prob (float): + mean_phn_span (int): + span_bdy (paddle.Tensor): masked mel boundary of input speech (B, 2). + Returns: + paddle.Tensor[bool]: masked position of input speech (B, Tmax). + ''' + bz, sent_len, _ = paddle.shape(xs_pad) + masked_pos = paddle.zeros((bz, sent_len)) + if mlm_prob == 1.0: + masked_pos += 1 + elif mean_phn_span == 0: + # only speech + length = sent_len + mean_phn_span = min(length * mlm_prob // 3, 50) + masked_phn_idxs = random_spans_noise_mask( + length=length, mlm_prob=mlm_prob, + mean_phn_span=mean_phn_span).nonzero() + masked_pos[:, masked_phn_idxs] = 1 + else: + for idx in range(bz): + # for inference + if span_bdy is not None: + for s, e in zip(span_bdy[idx][::2], span_bdy[idx][1::2]): + masked_pos[idx, s:e] = 1 + # for training + else: + length = align_start_lens[idx] + if length < 2: + continue + masked_phn_idxs = random_spans_noise_mask( + length=length, + mlm_prob=mlm_prob, + mean_phn_span=mean_phn_span).nonzero() + masked_start = align_start[idx][masked_phn_idxs].tolist() + masked_end = align_end[idx][masked_phn_idxs].tolist() + + for s, e in zip(masked_start, masked_end): + masked_pos[idx, s:e] = 1 + non_eos_mask = paddle.reshape(src_mask, paddle.shape(xs_pad)[:2]) + masked_pos = masked_pos * non_eos_mask + masked_pos = paddle.cast(masked_pos, 'bool') + + return masked_pos + + +# mask speech and phones +def phones_text_masking(xs_pad: paddle.Tensor, + src_mask: paddle.Tensor, + text_pad: paddle.Tensor, + text_mask: paddle.Tensor, + align_start: paddle.Tensor, + align_end: paddle.Tensor, + align_start_lens: paddle.Tensor, + mlm_prob: float=0.8, + mean_phn_span: int=8, + span_bdy: paddle.Tensor=None): + ''' + Args: + xs_pad (paddle.Tensor): input speech (B, Tmax, D). + src_mask (paddle.Tensor): mask of speech (B, 1, Tmax). + text_pad (paddle.Tensor): input text (B, Tmax2). + text_mask (paddle.Tensor): mask of text (B, 1, Tmax2). + align_start (paddle.Tensor): frame level phone alignment start (B, Tmax2). + align_end (paddle.Tensor): frame level phone alignment end (B, Tmax2). + align_start_lens (paddle.Tensor): length of align_start (B, ). + mlm_prob (float): + mean_phn_span (int): + span_bdy (paddle.Tensor): masked mel boundary of input speech (B, 2). + Returns: + paddle.Tensor[bool]: masked position of input speech (B, Tmax). + paddle.Tensor[bool]: masked position of input text (B, Tmax2). + ''' + bz, sent_len, _ = paddle.shape(xs_pad) + masked_pos = paddle.zeros((bz, sent_len)) + _, text_len = paddle.shape(text_pad) + text_mask_num_lower = math.ceil(text_len * (1 - mlm_prob) * 0.5) + text_masked_pos = paddle.zeros((bz, text_len)) + + if mlm_prob == 1.0: + masked_pos += 1 + elif mean_phn_span == 0: + # only speech + length = sent_len + mean_phn_span = min(length * mlm_prob // 3, 50) + masked_phn_idxs = random_spans_noise_mask( + length=length, mlm_prob=mlm_prob, + mean_phn_span=mean_phn_span).nonzero() + masked_pos[:, masked_phn_idxs] = 1 + else: + for idx in range(bz): + # for inference + if span_bdy is not None: + for s, e in zip(span_bdy[idx][::2], span_bdy[idx][1::2]): + masked_pos[idx, s:e] = 1 + # for training + else: + length = align_start_lens[idx] + if length < 2: + continue + masked_phn_idxs = random_spans_noise_mask( + length=length, + mlm_prob=mlm_prob, + mean_phn_span=mean_phn_span).nonzero() + unmasked_phn_idxs = list( + set(range(length)) - set(masked_phn_idxs[0].tolist())) + np.random.shuffle(unmasked_phn_idxs) + masked_text_idxs = unmasked_phn_idxs[:text_mask_num_lower] + text_masked_pos[idx][masked_text_idxs] = 1 + masked_start = align_start[idx][masked_phn_idxs].tolist() + masked_end = align_end[idx][masked_phn_idxs].tolist() + for s, e in zip(masked_start, masked_end): + masked_pos[idx, s:e] = 1 + non_eos_mask = paddle.reshape(src_mask, paddle.shape(xs_pad)[:2]) + masked_pos = masked_pos * non_eos_mask + non_eos_text_mask = paddle.reshape(text_mask, paddle.shape(xs_pad)[:2]) + text_masked_pos = text_masked_pos * non_eos_text_mask + masked_pos = paddle.cast(masked_pos, 'bool') + text_masked_pos = paddle.cast(text_masked_pos, 'bool') + + return masked_pos, text_masked_pos + + +def get_seg_pos(speech_pad: paddle.Tensor, + text_pad: paddle.Tensor, + align_start: paddle.Tensor, + align_end: paddle.Tensor, + align_start_lens: paddle.Tensor, + seg_emb: bool=False): + ''' + Args: + speech_pad (paddle.Tensor): input speech (B, Tmax, D). + text_pad (paddle.Tensor): input text (B, Tmax2). + align_start (paddle.Tensor): frame level phone alignment start (B, Tmax2). + align_end (paddle.Tensor): frame level phone alignment end (B, Tmax2). + align_start_lens (paddle.Tensor): length of align_start (B, ). + seg_emb (bool): whether to use segment embedding. + Returns: + paddle.Tensor[int]: n-th phone of each mel, 0<=n<=Tmax2 (B, Tmax). + eg: + Tensor(shape=[1, 328], dtype=int64, place=Place(gpu:0), stop_gradient=True, + [[0 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , + 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , + 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , + 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , + 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 2 , 2 , 2 , 3 , 3 , 3 , 4 , 4 , 4 , + 5 , 5 , 5 , 6 , 6 , 6 , 6 , 6 , 6 , 6 , 6 , 7 , 7 , 7 , 7 , 7 , 7 , 7 , + 7 , 8 , 8 , 8 , 8 , 9 , 9 , 9 , 9 , 9 , 9 , 9 , 9 , 10, 10, 10, 10, 10, + 10, 10, 10, 11, 11, 11, 11, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 13, + 13, 13, 13, 13, 13, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 15, 15, + 15, 15, 15, 15, 15, 16, 16, 16, 16, 16, 16, 17, 17, 17, 17, 17, 17, 17, + 17, 18, 18, 18, 18, 18, 18, 19, 19, 19, 19, 19, 19, 19, 20, 20, 20, 20, + 20, 20, 21, 21, 21, 21, 21, 21, 21, 22, 22, 22, 22, 22, 22, 22, 23, 23, + 23, 23, 23, 23, 23, 23, 24, 24, 24, 24, 24, 24, 24, 24, 24, 25, 25, 25, + 25, 26, 26, 26, 27, 27, 27, 27, 27, 28, 28, 28, 28, 28, 28, 28, 28, 29, + 29, 29, 29, 29, 29, 30, 30, 30, 30, 31, 31, 31, 31, 31, 31, 31, 31, 32, + 32, 32, 32, 32, 33, 33, 33, 33, 33, 33, 33, 33, 34, 34, 34, 34, 35, 35, + 35, 35, 35, 35, 35, 35, 36, 36, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, + 37, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, + 38, 38, 0 , 0 ]]) + paddle.Tensor[int]: n-th phone of each phone, 0<=n<=Tmax2 (B, Tmax2). + eg: + Tensor(shape=[1, 38], dtype=int64, place=Place(gpu:0), stop_gradient=True, + [[1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 , 9 , 10, 11, 12, 13, 14, 15, 16, 17, + 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, + 36, 37, 38]]) + ''' + + bz, speech_len, _ = paddle.shape(speech_pad) + _, text_len = paddle.shape(text_pad) + + text_seg_pos = paddle.zeros((bz, text_len), dtype='int64') + speech_seg_pos = paddle.zeros((bz, speech_len), dtype='int64') + + if not seg_emb: + return speech_seg_pos, text_seg_pos + for idx in range(bz): + align_length = align_start_lens[idx] + for j in range(align_length): + s, e = align_start[idx][j], align_end[idx][j] + speech_seg_pos[idx, s:e] = j + 1 + text_seg_pos[idx, j] = j + 1 + + return speech_seg_pos, text_seg_pos + + +# randomly select the range of speech and text to mask during training +def random_spans_noise_mask(length: int, + mlm_prob: float=0.8, + mean_phn_span: float=8): + """This function is copy of `random_spans_helper + `__ . + Noise mask consisting of random spans of noise tokens. + The number of noise tokens and the number of noise spans and non-noise spans + are determined deterministically as follows: + num_noise_tokens = round(length * noise_density) + num_nonnoise_spans = num_noise_spans = round(num_noise_tokens / mean_noise_span_length) + Spans alternate between non-noise and noise, beginning with non-noise. + Subject to the above restrictions, all masks are equally likely. + Args: + length: an int32 scalar (length of the incoming token sequence) + noise_density: a float - approximate density of output mask + mean_noise_span_length: a number + Returns: + np.ndarray: a boolean tensor with shape [length] + """ + + orig_length = length + + num_noise_tokens = int(np.round(length * mlm_prob)) + # avoid degeneracy by ensuring positive numbers of noise and nonnoise tokens. + num_noise_tokens = min(max(num_noise_tokens, 1), length - 1) + num_noise_spans = int(np.round(num_noise_tokens / mean_phn_span)) + + # avoid degeneracy by ensuring positive number of noise spans + num_noise_spans = max(num_noise_spans, 1) + num_nonnoise_tokens = length - num_noise_tokens + + # pick the lengths of the noise spans and the non-noise spans + def _random_seg(num_items, num_segs): + """Partition a sequence of items randomly into non-empty segments. + Args: + num_items: an integer scalar > 0 + num_segs: an integer scalar in [1, num_items] + Returns: + a Tensor with shape [num_segs] containing positive integers that add + up to num_items + """ + mask_idxs = np.arange(num_items - 1) < (num_segs - 1) + np.random.shuffle(mask_idxs) + first_in_seg = np.pad(mask_idxs, [[1, 0]]) + segment_id = np.cumsum(first_in_seg) + # count length of sub segments assuming that list is sorted + _, segment_length = np.unique(segment_id, return_counts=True) + return segment_length + + noise_span_lens = _random_seg(num_noise_tokens, num_noise_spans) + nonnoise_span_lens = _random_seg(num_nonnoise_tokens, num_noise_spans) + + interleaved_span_lens = np.reshape( + np.stack([nonnoise_span_lens, noise_span_lens], axis=1), + [num_noise_spans * 2]) + span_starts = np.cumsum(interleaved_span_lens)[:-1] + span_start_indicator = np.zeros((length, ), dtype=np.int8) + span_start_indicator[span_starts] = True + span_num = np.cumsum(span_start_indicator) + is_noise = np.equal(span_num % 2, 1) + + return is_noise[:orig_length] diff --git a/paddlespeech/t2s/modules/transformer/attention.py b/paddlespeech/t2s/modules/transformer/attention.py index cdb95b21..538a36b6 100644 --- a/paddlespeech/t2s/modules/transformer/attention.py +++ b/paddlespeech/t2s/modules/transformer/attention.py @@ -220,3 +220,99 @@ class RelPositionMultiHeadedAttention(MultiHeadedAttention): scores = (matrix_ac + matrix_bd) / math.sqrt(self.d_k) return self.forward_attention(v, scores, mask) + + +class LegacyRelPositionMultiHeadedAttention(MultiHeadedAttention): + """Multi-Head Attention layer with relative position encoding (old version). + Details can be found in https://github.com/espnet/espnet/pull/2816. + Paper: https://arxiv.org/abs/1901.02860 + + Args: + n_head (int): The number of heads. + n_feat (int): The number of features. + dropout_rate (float): Dropout rate. + zero_triu (bool): Whether to zero the upper triangular part of attention matrix. + """ + + def __init__(self, n_head, n_feat, dropout_rate, zero_triu=False): + """Construct an RelPositionMultiHeadedAttention object.""" + super().__init__(n_head, n_feat, dropout_rate) + self.zero_triu = zero_triu + # linear transformation for positional encoding + self.linear_pos = nn.Linear(n_feat, n_feat, bias_attr=False) + # these two learnable bias are used in matrix c and matrix d + # as described in https://arxiv.org/abs/1901.02860 Section 3.3 + + self.pos_bias_u = paddle.create_parameter( + shape=(self.h, self.d_k), + dtype='float32', + default_initializer=paddle.nn.initializer.XavierUniform()) + self.pos_bias_v = paddle.create_parameter( + shape=(self.h, self.d_k), + dtype='float32', + default_initializer=paddle.nn.initializer.XavierUniform()) + + def rel_shift(self, x): + """Compute relative positional encoding. + Args: + x(Tensor): Input tensor (batch, head, time1, time2). + + Returns: + Tensor:Output tensor. + """ + b, h, t1, t2 = paddle.shape(x) + zero_pad = paddle.zeros((b, h, t1, 1)) + x_padded = paddle.concat([zero_pad, x], axis=-1) + x_padded = paddle.reshape(x_padded, [b, h, t2 + 1, t1]) + # only keep the positions from 0 to time2 + x = paddle.reshape(x_padded[:, :, 1:], [b, h, t1, t2]) + + if self.zero_triu: + ones = paddle.ones((t1, t2)) + x = x * paddle.tril(ones, t2 - 1)[None, None, :, :] + + return x + + def forward(self, query, key, value, pos_emb, mask): + """Compute 'Scaled Dot Product Attention' with rel. positional encoding. + + Args: + query(Tensor): Query tensor (#batch, time1, size). + key(Tensor): Key tensor (#batch, time2, size). + value(Tensor): Value tensor (#batch, time2, size). + pos_emb(Tensor): Positional embedding tensor (#batch, time1, size). + mask(Tensor): Mask tensor (#batch, 1, time2) or (#batch, time1, time2). + + Returns: + Tensor: Output tensor (#batch, time1, d_model). + """ + q, k, v = self.forward_qkv(query, key, value) + # (batch, time1, head, d_k) + q = paddle.transpose(q, [0, 2, 1, 3]) + + n_batch_pos = paddle.shape(pos_emb)[0] + p = paddle.reshape( + self.linear_pos(pos_emb), [n_batch_pos, -1, self.h, self.d_k]) + # (batch, head, time1, d_k) + p = paddle.transpose(p, [0, 2, 1, 3]) + # (batch, head, time1, d_k) + q_with_bias_u = paddle.transpose((q + self.pos_bias_u), [0, 2, 1, 3]) + # (batch, head, time1, d_k) + q_with_bias_v = paddle.transpose((q + self.pos_bias_v), [0, 2, 1, 3]) + + # compute attention score + # first compute matrix a and matrix c + # as described in https://arxiv.org/abs/1901.02860 Section 3.3 + # (batch, head, time1, time2) + matrix_ac = paddle.matmul(q_with_bias_u, + paddle.transpose(k, [0, 1, 3, 2])) + + # compute matrix b and matrix d + # (batch, head, time1, time1) + matrix_bd = paddle.matmul(q_with_bias_v, + paddle.transpose(p, [0, 1, 3, 2])) + matrix_bd = self.rel_shift(matrix_bd) + # (batch, head, time1, time2) + scores = (matrix_ac + matrix_bd) / math.sqrt(self.d_k) + + return self.forward_attention(v, scores, mask) diff --git a/paddlespeech/t2s/modules/transformer/embedding.py b/paddlespeech/t2s/modules/transformer/embedding.py index d9339d20..9524f07e 100644 --- a/paddlespeech/t2s/modules/transformer/embedding.py +++ b/paddlespeech/t2s/modules/transformer/embedding.py @@ -185,3 +185,61 @@ class RelPositionalEncoding(nn.Layer): pe_size = paddle.shape(self.pe) pos_emb = self.pe[:, pe_size[1] // 2 - T + 1:pe_size[1] // 2 + T, ] return self.dropout(x), self.dropout(pos_emb) + + +class LegacyRelPositionalEncoding(PositionalEncoding): + """Relative positional encoding module (old version). + + Details can be found in https://github.com/espnet/espnet/pull/2816. + + See : Appendix B in https://arxiv.org/abs/1901.02860 + + Args: + d_model (int): Embedding dimension. + dropout_rate (float): Dropout rate. + max_len (int): Maximum input length. + + """ + + def __init__(self, d_model: int, dropout_rate: float, max_len: int=5000): + """ + Args: + d_model (int): Embedding dimension. + dropout_rate (float): Dropout rate. + max_len (int, optional): [Maximum input length.]. Defaults to 5000. + """ + super().__init__(d_model, dropout_rate, max_len, reverse=True) + + def extend_pe(self, x): + """Reset the positional encodings.""" + if self.pe is not None: + if paddle.shape(self.pe)[1] >= paddle.shape(x)[1]: + return + pe = paddle.zeros((paddle.shape(x)[1], self.d_model)) + if self.reverse: + position = paddle.arange( + paddle.shape(x)[1] - 1, -1, -1.0, + dtype=paddle.float32).unsqueeze(1) + else: + position = paddle.arange( + 0, paddle.shape(x)[1], dtype=paddle.float32).unsqueeze(1) + div_term = paddle.exp( + paddle.arange(0, self.d_model, 2, dtype=paddle.float32) * + -(math.log(10000.0) / self.d_model)) + pe[:, 0::2] = paddle.sin(position * div_term) + pe[:, 1::2] = paddle.cos(position * div_term) + pe = pe.unsqueeze(0) + self.pe = pe + + def forward(self, x: paddle.Tensor): + """Compute positional encoding. + Args: + x (paddle.Tensor): Input tensor (batch, time, `*`). + Returns: + paddle.Tensor: Encoded tensor (batch, time, `*`). + paddle.Tensor: Positional embedding tensor (1, time, `*`). + """ + self.extend_pe(x) + x = x * self.xscale + pos_emb = self.pe[:, :paddle.shape(x)[1]] + return self.dropout(x), self.dropout(pos_emb) -- GitLab