From 00727d227754fe58fcc45901bf45676629046d00 Mon Sep 17 00:00:00 2001 From: LDOUBLEV Date: Fri, 9 Jul 2021 08:37:36 +0000 Subject: [PATCH] fix bugbye --- deploy/cpp_infer/CMakeLists.txt | 9 +- deploy/cpp_infer/docs/windows_vs2019_build.md | 2 +- deploy/cpp_infer/imgs/cpp_infer_pred_12.png | Bin 0 -> 26798 bytes deploy/cpp_infer/include/config.h | 4 + deploy/cpp_infer/include/ocr_det.h | 5 +- deploy/cpp_infer/include/postprocess_op.h | 4 +- deploy/cpp_infer/include/utility.h | 3 + deploy/cpp_infer/inference | 1 + deploy/cpp_infer/readme.md | 50 +++++------ deploy/cpp_infer/readme_en.md | 50 ++++++----- deploy/cpp_infer/src/clipper.cpp | 22 ++--- deploy/cpp_infer/src/config.cpp | 6 +- deploy/cpp_infer/src/main.cpp | 46 ++++++---- deploy/cpp_infer/src/ocr_det.cpp | 44 +++++++++- deploy/cpp_infer/src/ocr_rec.cpp | 12 ++- deploy/cpp_infer/src/postprocess_op.cpp | 60 +++++++++++-- deploy/cpp_infer/src/preprocess_op.cpp | 79 +++++------------- deploy/cpp_infer/src/utility.cpp | 39 ++++++++- deploy/cpp_infer/tools/build.sh | 10 ++- deploy/cpp_infer/tools/config.txt | 7 +- 20 files changed, 293 insertions(+), 160 deletions(-) create mode 100644 deploy/cpp_infer/imgs/cpp_infer_pred_12.png create mode 120000 deploy/cpp_infer/inference diff --git a/deploy/cpp_infer/CMakeLists.txt b/deploy/cpp_infer/CMakeLists.txt index 90f62345..4f2dc88a 100644 --- a/deploy/cpp_infer/CMakeLists.txt +++ b/deploy/cpp_infer/CMakeLists.txt @@ -13,7 +13,6 @@ SET(TENSORRT_DIR "" CACHE PATH "Compile demo with TensorRT") set(DEMO_NAME "ocr_system") - macro(safe_set_static_flag) foreach(flag_var CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE @@ -134,7 +133,11 @@ if(WITH_MKL) endif () endif() else() - set(MATH_LIB ${PADDLE_LIB}/third_party/install/openblas/lib/libopenblas${CMAKE_STATIC_LIBRARY_SUFFIX}) + if (WIN32) + set(MATH_LIB ${PADDLE_LIB}/third_party/install/openblas/lib/openblas${CMAKE_STATIC_LIBRARY_SUFFIX}) + else () + set(MATH_LIB ${PADDLE_LIB}/third_party/install/openblas/lib/libopenblas${CMAKE_STATIC_LIBRARY_SUFFIX}) + endif () endif() # Note: libpaddle_inference_api.so/a must put before libpaddle_inference.so/a @@ -158,7 +161,7 @@ endif(WITH_STATIC_LIB) if (NOT WIN32) set(DEPS ${DEPS} - ${MATH_LIB} ${MKLDNN_LIB} + ${MATH_LIB} ${MKLDNN_LIB} glog gflags protobuf z xxhash ) if(EXISTS "${PADDLE_LIB}/third_party/install/snappystream/lib") diff --git a/deploy/cpp_infer/docs/windows_vs2019_build.md b/deploy/cpp_infer/docs/windows_vs2019_build.md index 21fbf4e0..0f243bf8 100644 --- a/deploy/cpp_infer/docs/windows_vs2019_build.md +++ b/deploy/cpp_infer/docs/windows_vs2019_build.md @@ -14,7 +14,7 @@ PaddleOCR在Windows 平台下基于`Visual Studio 2019 Community` 进行了测 ### Step1: 下载PaddlePaddle C++ 预测库 fluid_inference -PaddlePaddle C++ 预测库针对不同的`CPU`和`CUDA`版本提供了不同的预编译版本,请根据实际情况下载: [C++预测库下载列表](https://www.paddlepaddle.org.cn/documentation/docs/zh/develop/advanced_guide/inference_deployment/inference/windows_cpp_inference.html) +PaddlePaddle C++ 预测库针对不同的`CPU`和`CUDA`版本提供了不同的预编译版本,请根据实际情况下载: [C++预测库下载列表](https://www.paddlepaddle.org.cn/documentation/docs/zh/develop/guides/05_inference_deployment/inference/windows_cpp_inference.html) 解压后`D:\projects\fluid_inference`目录包含内容为: ``` diff --git a/deploy/cpp_infer/imgs/cpp_infer_pred_12.png b/deploy/cpp_infer/imgs/cpp_infer_pred_12.png new file mode 100644 index 0000000000000000000000000000000000000000..eb5f64e1f6c329f7ae772c50edce7fc8afcb1211 GIT binary patch literal 26798 zcmagFV~{3Y)UH{!ZQC}wY_rR@ZQHhOblFB%J>@QT*|whI^!uH2=EuZL%%6;m9Xod9 zzBATZcjmfww6dZUA{-tZ2nYzGjI_8a2ncBAzk3fDsDD=o*zJOU1Af+GV#+dNVkF8g zjuzH-<{%(6$)-j|bTSMylg7qIMw2u2v~Vt-s!>sKszw7NgENC8B%?-SBn4S|db>C% zyCXs9em&!ZFeS_vApqHj5AUUlHoq?{uEuN^O-jDbr(jS|SsQ0IaByKt_c?&0;Y?31Q53K%19HFOf++(6VMm!8Fm)4iU`cK-R%z zP}-rQ@{x>ngVuiIM$`Uihn{4L!XFG;8j%><98_IR4(gGC>f$YD;r~%QmYRc&hu{K@ zGR@+}(#b-@lEse0qRn>7#*cY>{sWnN*KM#BRp#-%aDE8J1kO3@}QHN?*R zQ%P!Qa&R7`3<<*$rBoO?Sius5KM`fxaB|S78ZbDx8Hys8Gd|elj|?6)8# zU<(yte*%XVi?)Yk`7tgUWteAS2Vwlc!^+aM{r>&kf(H)%P5~7$fd2jc-FN!^JrEoG z41RF=0||t}(|R|~77ZB$a2n94QIdY)l`JNu9jkLfMiz@X`8{e_zYoT4?1~cVy`W zlV=5t;Z-XsS;bxlFWHQl*68mFwg{o4>Cqd(E7beJv6A)VvOP_ zv|fQpgv;Xp8cCwP{ZQNeF8HRK;!L9@QTtI5Bp+<40Giz;U>h?H{Ed@BuAP}&b$8u( zQE!DG6o_b=4=3B~@@`#ChG}mw;cIZs^28Q-vWajgWcDNw8cEu81#Xv>9#)EstHs^f ze}5LS&=;5jyr^KjCg%Q}C~xd%`#txc60fc*KVrKGW(4CR5CP5|7*=Z~m<09!r-w__ zJZ7l#tS!mC8R#Nx8vomqnlaP#3Y&cSgup~|-F)Tm*NR9Bh7y{_M<489B8$%EY{1c) zDP}@)ZN{8-j1t#(O_!jbo-NOP zklbYTB`J+pShmq=^U~#u8*V1 zJI^+e67t_kt>26x&Sw~vi9<#`^!u@icpR7fU1VBwe4bBH6|9E!qhl*ZDABX$2_^;CnRXbh_6qWGbBQfy zR0&nWQ7iSDFmDVYFeN;B_o@0D0psOCv%!?Y!H{S4)XOJz`B9BsTa=BtLEfL^f>L$) z-*QQGSTafo9{ug>TC??=kLg@I5QgzTx2as$t700yeUxBQF&u7EnsEw}LSx!Tl3J`$ zpnaR)elJF8MFIWAn(nhWH9zke`EqqxzDYuXN zlb_jUO3vl8zNyC6b@iR5XeB?5S!Si$o3WKp!{1FFnxIiJIuG*K%icY$eAtNoPBi~7 zcjjPtSsjy+r4_D`uwqpkP0^O4&=>2YTeE~HP*bR+k@3pmTkmqU{L06yeR z7|5!A-}z56<3`OQ)wh~dDB;s1dJPgX>pXHW=DHi$S= zJGj@ds6O)D{E#rA!safSE6b2B+ydu1?6h2}FxkW!1MFD6>6EiNsUiB08LU?1OYLDU zBJIKQ-x;Wo{$g8fauGpuNq-N3QeE~*PiDb3$sytYFDrlHz^UVE*{a6v@7$=vQiY*- z#9vZKs=ApldtiScv9pTG%_6)c5UXzC^v1Z&D%&a?(b?&iMwi3NA{|u|-RXp(ztc+) zcKyX1iPCS{`@rYzlCjnk_UvM--&kqd&o6`20;&%r#;KEK(U>Xd*j%M$Qh!xg7^+; zHZfuz6|Pf!J-`r8p#N8<22E9Vle1!fayC1Bx}}ZuGuzcQ7*9#L5|@=e`?#su5Tg+g-Q!jVAgi~do*URh~>(NvP_4#8X z*ujsg)m6;>tU{}+i@>;y&Qvly_Is`Z#valMl-`D;5Mt$z2Gc*EZgxYjQ0IxhOvayY z*2ozW7F7@tzSWfI0~CjNzF_an*5ZIsh<1@dFoA5D92zJGtL^zHyKcnQxWBXkJk5tF zY>)*4km@5wZM3@T?~*`K(ni1IXN=_5)5-B$c!5~T9d-#APP!?|p*BnM6 zH}R9p8xwuvPwC+m&vD)Ud6Y@6#04_BbrpYv!$w4fE%)v&r)y{5J)hpSz1XA|D+$=Z zp&?`y3d+YF1P0YrxZ2D1E!$3HllI7?_uBm#hL-<(70{E|3+e)GyVR-~$Cg^$Xm&e6 zI~NlfnRyH<5C;=2tX9Zh;&U02b$_AUQ9}26#~v>(Hk)XXS*v!q2W+h$kr@`lz zv`u;={pClq?NVe442RVhhHGiKnGFnkvn8xY<%|Iw=#_$M75z~h$D0LkF+q%>ShyFH=Ue2~_WQ8_ zN%vXhFRHGeSUf~RQ9nXYE`T68szJFt5re92u$M)=v&Dne8G zIN>r_@OLU+)UV-*5ru3UGj?qVC_f*BB)VU~UXy>%9v3=kvA@WyQfmm&HroUcaegNo z$_GJ9BzZ@tc4jlBVY;Ajneq1TaD?E0u(Np4r;~I>n$djP#Hb%N8@v$-hZr)Y4^3Bc zZ#*a+#mrh5o!2oeUy13@#LXrU(ed&&{g<^l%LdiJ;ad)X3D1gc%6<=6F0Qs*t{9OV zFIPChv46zje+IzKu8ehh-7RoLARIAocR7jradCgMA_bw*F165(9*h4Dhv%uO%-fHX z13y-CsXu?OG!$aOBWC@^dnOD$TjfCP+w&?IZ|Oo`Q0y}4at7;~AnHe6yCe96AAe)| z@|-teW!hJsO8j5_RR)I>;5=3h?X87C1yDu;j$TthVdScC#cEQbKK}ET|DO0?UU7m` z_Z9t4q{~i{JgWiuRxwY0NdG_PQ>t{Z(B|mgN+Zeif4R?pkz5!L7TBFWT^Rqv#rwY( z|39X5kCuczGqN(9Kjrkf=LpFVj$!HEEZY$&$!0T1G-Mk2qqQAjS42Y&#k;z z`5f4W1?a`#=qG?(7pTg>$)dUT6X>-**Y3s9Uhw^PO7xIHN$Lb}zjrBP?_mflvvQ;Z z!3-xz+?F;Y6hCTelba0KbC$$?*sNx}>M*WbgoN?^Gm&K~)cmxNKRn-bH}rX8&AAr0 z*nb8Q`s;7Z4nI>uSWf7E3*12uIbSsMleR*g@iqBZ*8W{xTEzzK-@dIsW-h8^7O++R zipK9~wlA?w8X!3Qtvm<{IwS&fK@4H2wB=Z9!5h_%4F4S2s1=(dF%S3j3LNP|5Bd4E zYh<{xr!rNfQB>(t`i)Ya&z<29boAog*d2rq8170?BiGJZ*1#F%ScJgTH=OAC;&?s1 zf$TUdMd^WzB6_4_9LU`*jWdfYf%Q@d$<4+zc3&eoGb%~rnDczR+(+`I-sNQGCJ?c0 zb#@M!x#)Ki`UFW8CIZod_7z2{ip#|`L56PBQ#iPkW=Ux`=EX4pP4iJweg|zXgD+DDrA}06)n}!n^Y^~(^QV8$(4&r>}lGm z<0E3bV-uxB?$yHpC6Ea@Jgi&}Igrfd=E$h!RYvHdq6f~wsg=amD!mO3e4?Mrmr$F0 zk0t3Zc!!Q=ABY9X(%PvWI$anHxWS%Fn{mgDm}j&*RtfH^nxN>%#;RUgPewKt#SY<> zdTGMe_sAG4AVhzPyr>{pFWx#zwbL;p3zET;c6x<+d0z7X+o5vi70UaK)oN_)--Os4 zc0t_{vt@+!7K`s|Vdm#fUakTbn{QBzI{bT3^$L}Qyy4`~$Rrw^R<&X}az!?Dx}XW| zgDOb44DB7P0cF~;z4E}kVKzJN%M(5N4!x5PGBmgd?V-O7OJDS_7|w4hQKRu9Cnia z7!~>EVc3i*qwp+FnHa$12NnuBwYYVPhz6Tu0+nU2Crk#{p9f3j$HB z5B!In=W!Y~597qE+Z@lPOM78~R~|xBz(9zv71_|TxUUrli=6txvBo3vWjz-H9p)7>3x*%6HMU&K5ep#X`V_&?0%b0aE2Dd9(kaVa=*BKOo;4+B0~Z2mkOk@6EE zVnCcsk1;DBP;@D|xFfR-GQZblLiwzBP`-|vSe)xnWbAU=LzA2kiG+yW|q+Q_uPTnz$k7Gkv#+%x{rvDHI~D~QW=uZ~WhCF7T<*$Jq@ za6{%L1)KoO_Dz}7tm4n8-MFomZ@0QQ_soK=@~xu{S$w;Sijc!2)ffJZUNBsO4~=W9 zVHR;63Nm9FlyzgAU-EpUz>{#66B0hFdTJNZm}ER&czO*~w}yt~hh|z?&QfQokHmEd zH-6~cl^E~)@WU5t%L}-QGBdKWbs3~D4QNK#;m%7Ks;7bwwHi_O!X6HA4l4sN(GA1l z)_!*9aC*MY-Z(JCaWaHpq`*dUQe3V*V*h>T*id?honYHTv{o1Eh}1}=gF$aK>nF`h zW8YSno_HvEUCX_2((mgD%vr=boEOsPO?CCfsb5$FbMLJ_MB9Tn{;Fku z-R*2IB}c50!~pXORo6(_hrls%K`kcn3j{j-I2v0?`FRY#(H*j_3V#DK*~)5Dsi1n_ z(O?KzyJIgjGd?;7q0UUYY>s%^ChMM+xhdq$EeJ_GIvDwEb+dLR!I1n0tbpuJCj_yX z!jb6C(RCDmYuD1kd|+AVi^FvGzH`E4hu!83*T0rx-@B2aS+2f z3X3QxS0}0&k8uwPkty}wo_(4C*s8>cnXQFlbAP=yYbdE{d>WonB9*_mMD1u^% zw#Zc6F=2P#iqa&#jZ;VIHBc(-Zc&d?x9;$NR5t+KE-c`T10YjWV~+ zABYdX?F1?(h8_lO0K&%!)~vcsmddQ>D~uy6&Ij(7ZPhLZZKDVxa+<|0skAY&Q`QmT zE@!#8S#8-R?JI*y6N;rLt;{F}c|i@K^kXL5rj>(<^lc>%$1sHGkoYk-L5Q=5B4temZzBY)Y=zV+YduuF(WYtBk@@?Ghk9W zbhp~2MBHlm-JJuJD5ud^YDiCPeQ5Oivh&*jgv-6EvtjM&&1ej#TdE}q`!Zei8HnM> zi{`itl#!_9cIoMTpLxn2CQQP^3}pHi zL+$Er>iX0Y;R(1&GL7TX8W*g*hg?-M&@VX3U@rPaEQF)J8}jTQx?8Q-w1!PQvm&23$ZtEnOU zBo>eObpcQj;}%s*=@zjSS{#}LW1X^o#A7JMNEUxwIZTC0n7M}i8-g&dg>2fcW0g$( zes-HsDCLB;>L%U#kG|{ZF>VPM{1#B7_6KEFmZvErF^L=`W0Vnk)F-T|ykDgbutTxi!yM}(yiHq4 z=++@I5w1nw%w-J;k75U_r7BmJBtr1Lbg~?kj3>gG+8gLPU6B!NxI&spuyWU=LNGd? zMvx2W>Dh<_nD@C!nx?H{7LRv-B(iC{fVq3DT4q&S8Zvl&!9BWhhzxDeqG2uUs*K-r z3w*8YL5O!2e!uXSU{jJ!E=BL$D}40Yf=kXU8?Z(@bpvB9=N8Z2hYQWW0+V}b#!u^M z$EzCFv{)>iP?%=2;s><6j3%>=(M)e~h+=GgQa@DO5koG4jta%Kt)tV)zpIUGEDM))IB@ z*2p109#Zi;92?tjxW7ielSS+3Mtbuu~G1Ja}^loemLNUO@Ue|C;Xs5EpQ$E8)rE!vPMkE*m>x4 z3-V;(Y_Vw5mBf}!kF&~;GuzR)i)7hZ?f4xLG$dEF`8>=Sk<;lSxIv^F#0_P2QvWl- z<`Y{9owhaQXOvSvXzHYPSPNKkH>+zyup4X_i|Y?&J)KrO(3E(x@iI=QxrWWd%ta78 zag(--p@#S_7u6!a;@;MWtEwRi`km|+;XFNl)ZmmM&9MhQy)C_kmh_Z+h+EV%3!P=)p|h`BpfhV|Ie1}u0BD-11|ILjLnt_yl1yI;sW;TA zewl%Rp&oob@A}qGIXq%X?vq<+(A1tRbLNAA7SmkPSa6$XjqKNgJB7?JQa9|$!srLl z%Zp-G%~p5(Hdf=fw|3;&Z{EkyLq;1cP+`! z`fwU_X-m4vO1;k=4a-qY=lqO<^UP%wNy>gPhL{( zynZO5yjxJX2nU|P{C$Bj-G3b%$bG1}ortXCcWu%=NJzj$rm+<0LsYy%pw`Nxub&Y==M} zJDb7+OEtWS+K+k3=eG#($$X7NwNQk9auC%=C9^XYM{3rk(iSS475C*~g%;y3M^Xo7 z1P_L--gap*=Nb8<8FVF-h68!jEFh6>T($rP(1hDysl)IBxh`OenonJgO9)!Cg=0_q zax}k4I6%2Ypp|7rFr_ZF>|z#oKK01Q<$Sr%mNIM{BiPet4unN^Lg85-4`@bT7CxPL z(e<$sq3f%qi_SVN(;F-yclk^x1bB?Mx&s)wj<(0L2JfWk^3Gdblw|>q@dRSU5QsaC?XmD?aXO88dR9VG{T0`Y$Pj z@Z=T5k!BU2&6v!k;;EQhaduveH}QR0%jVRr#e@@WhQNqxzQa&P;Soi;Nn50(>DT*W z_mhg#&kv3o&*hK8-l{VxUKOZ_m(!vQ*vx!V`j^8UkEm4-&2^$A4VtKn_jN67+5aS`) z=~UvVWB8oc7RX^)puig>5~YHg6?gmzSo7CAq=Ao#rscr{!v}?@KU9cl;8t9G7CU^$ zH26wX+$;)5wZ&iU)jUnmvSVlD%=m_rf4V=^O*15r$UuWW8Q-sRe-Zr_h$6d2UtjB` zxH7EKY-2*S&$^?e^%Vosg9-E)5+0kx8x_nswb4r0cVMZ~=6r-EjUZkR?|;bvfO@_e z@0T&aiK8+!wPkh8byt9|)flKrfF4wyreej~C=ov=$AHl~I8c~g3x!oi+mbb8jWb3+ zy|i|`DuqO#l?t;_u!p?pp})uicdIwm;SSwNpA2VCFM~^wDlmjei_8 zojq?hXu~IEobYVA(;u`h_pQ3&Vxl)x{$2o*{p`N{$Y=Eoq@_<3RZ;#{)T@ZZ=XCsq zzZ;i{aj#=F5o7LMdc;p0v)wpyJ!x;3u~+Q zMDuqe)ih=xdt|{GaE9YN>+AwHC7`tOM79;$4{hz@h_XHC5Pg88C1uX z6#jDBYj10%cKu`ky`X{XrEH5O)0WsnG>fiZX)y%_W+5bXs{-{9g z=QBwN);o$e?jYe^w=$0#`k%6nIsUdEVMc)WPPd@>dj-FhVohJF;T*D#J1JXl9l1vC zd*ynhQwH<3``S1vmS$Kh@@6DZJqqLdxLPdz3B4s^|ANO5D1MJSx3l(|8!sKYzO6FOa}BZ4EbdGYHiTiICLY476IV{SENCkRnL^3&R2C_-iI+YoLN6;*3@LuzZP z1}$iKTM;gBhBQnT5cgK<*;mtZ3RQV!XADhX%!O5{m@Ixb-u|6>%iwT@%e30Vy2EXU zieC6JvOpF_d7#N2IS=z{)6?K14WjT*|QGVwTORK}yaHKMf=LVOZ zP%u55W*{fjJ7+`Zqg^(ExZ#q!W*Bd}FcsPK;07R+x*#Bkl;7tiogFxgnZFiIikC`p zmwYi}sF7glw){ax^UM<;#k4&~b0KWV6`#jHZzf}zmTnbrxhc*}7m{j8&2xsa9#^{6 z_c#?f>;EPf=lw1^#yU|du<51_)B(T+ZgTW5azHHGpZFD&EQ5mYW0580hG#nyo3v#8 zb|?(*^e8_){^XLEdp7aTQO#f^Oew-MGhfjWf;Yw=1IT_FO!hM7=ah!0%OX_bRL!`x z&g61o`;tTQL@85@J_fy7QAxx}(4}|xQa2tOIgF^CiAcX3c@vR#(=F1ZO2X!FIRc6^ zut1rv#qLYi*H-t~?Q9IgJ_Y`G&&3HAQZLTr^g$6fgm7GtjcRBSbe_;EvlPGXVI3R6x_c^#vvQ|b3&~F7(5h8DTW2fVw z!6eFie>$px)5dSiJJN|tkHyU4vQ1!9TamjG|1P!?x1|Gn^m##CJfh&LP)UnL0)8N z$w89eNH{BQ)8OJ#g0r}dL$9cF{fpH(DHfqYU`ZJ&HGmYm2<_$ciO}va zTl4EDqb3umV1AP9rYEE236x00}f1d38e<7OSYBJS^HZkWam1Lgh>$sgh* z4)zM78AjT&C-|%&YnkGlX3OYf%U$*0(}Cfkq4+NDfP^*HDB0Nyd3p#$CNqs@+lu_0 zd&LyP9nqR&sX;zS-(+4pttg16G1sT3iP~9{C)JqVxU`(3a%R$H_67#%M5~y3SnG42 zDuc70Zq?ii2g!^Th{1G8u*Ca6{Z68wreQDOifC+);+0Y4!^oEHzcy{UbzSclQGTYZ z6Sd2kRk_jWu{bhPKVdQ-Qc_xvF2j-T{NsX_)c*C50^g`o;Qu%3G(ZITbUqM(YuNrV_#45HZ0ldhx`;6eA6&I91O886*V2L918%FNanF+AopnaP5N}y2N7vs8?2)!r z3s!Z2`}fF-AS74kK|q=Yj2-1{oo4s_uprK+Y03(3=v2pCX(Q65{W;wnF3Lo}bl`lt z<^Jdyi{`~=y(b% zbV;2BBc(tk{>5h_=?`Lm|{#=ECbCQG`(#Bto-DmLVPkeZLFb-_#Cc-xb(3 z_lras2E+HF02mTinNN7muY<#ns0uQCN+z zvK9F@xCKPBOs+b?>VA{R*GQ)!lv4zPJF9ZV@Pp4H2rZy17sgfGlL)WZdehgdJwc%* z8Ginzx$R^b*joTq{adMo8}0R5M;wcYLr2MN*5Z_X9{)n9->xYzoEj%u2}MwhWthd8 z4d#Jfh2m@lpdi07OU-_f>c3a{WRNpu31rC_8R0)oC7jtGR z{{XNB_|B7Gg;mk5*(pNyF=l0|^cD)nH460W;hKKSGh6sHW$SJy1K(m`^lE~d`mr%g zdvBP_Tdp4I#sRhV%C5i5zGTX(0Vu3y+sp&1qkj>Y^uvPHjy8&NzRU%LveQ9vj8BA8 ziI9#n>D#n)VBnFtRYqk}E(;hT5!f<=aBErXTm}E3S7EfvI3B+3WByZJTm7N2W;mFa zabPPKsz~+Ae4WlgGplT%s`|7{4c{OS-vx0vc;52)U-&-btD&j!nD=U2ySIY;5MeL7 zt69N+dUd^k_INP$mbLj9S782ae&|e`EmB(F*EGDzq4s~#SzR#|%9OD_9aJD+LMAJ$ zX89EgdEfAc)pdR1eta>9TF-n_C7fK3f6&TdSV(rt@U0cncR-h~{Sr{=vE%bR4lc&A zHcl7@G$#AVTR+HC0}SspBY^e&%NS7N7bVfB?Nia|zLDdq5NeT{=TXZ~jX9&-5agG1 zKa07*9&A@(*)1PQC-X4%49_4yms2`CuEpq%t7^%7i9*4a*|2wDNWiZ_dN5ht{}>0eB&1j?kvZsaRB2p4sHho`Nvk*eOJt5>%pn?IJ#2Sey_fv zvAfAHF8OAMl$;mL7bk`nnmQgH?e&e3u?3=Qq0gj8f34A{>%`D6)TrX$kCD#alK}x) z@Rr?d0LQ2L@1<>Yao}Ybt05Yty$l^guNcahM(kl2r8D{Gw3Q!~Iy#QA7|Q^ryG}{0 zmggEfV>;_R)Qmo_$e(u9N}AS>cMjU3%r39g=Fv#kq~eUVAZ%$l0KW;?c%)Y;W?Od{ zJt>IN`G>pqSJJPIS5jh9J7w~<^;|g=Ln6}By<(fOS%230mU~IhVWr8?rnNhm7N+6G zva{4mMfwV^z`jJDy$SR6l- ziA>UZXN_R+TO+wZB1C%&h6I_txrw!{Vl8^@vT~EvmxlG!5`9~X{2h!AIt(zg=yjj8 ztqaWJF+=RW!9`?m%gfU-6KH(U!Im<(iq=RKMJ-NXrymJEA|}(U4yk{5X7;Ba`M`7N zcW(}(i*gUDwY1+9bj8CCco|~~*-0&Q@MTWZ&x*5m!-n}2ClsiGdHCFnAdYAo z=g%)$3{FK|MKc6L#Peu)R3a3npRCE`9xH;>PLO*A z7M+3Rl)MK~EVvEPVavWgh?WVhg!@yGSiYt|=^w0#b{)Y5ULB7Q`QG%Rw~SM6eti3Q zCQbza6RK2<0Gf}S2LtQvuIa-Fl{cSqP)Ya3uu=5Y3YT^U9w$0{DSs@+)FDZBO0o=e zhg&IYo$rk8m!@p7m;WQgtF4xhW-Z79p~EH=CU!UI>dDkL>7>Wg!tyM2I)xe7DdlfU z4;}xy()f8r+1>=Mpxqu2#9nMbu$&EzzEoshy-`{=64u!ER36y_HR+ zv*f+ZBUC<_#r#7W=J@35VH-#MrNkB@K;bl2Jb%C_2C7hAX5jV%B|BE*>jeUwKORb? zboZnTbc?>~>W^lQd0Qb1C}r44n_mmD=#RMPHg#}r+ju2SCiPZi;Y_YPh`a?G(rujJ z>V&^<mx2ah&|)x+8&>+2BM%veLND2$LWDuxxaxcD0L1GgIg z())&24$A;bb$o*3b5LX;auwE2XLh8uv5HNbSLkOf9D2n5Dum=334l3UPH@5lCVH>1 zn@(b_=b2q++RMQGWEt~w$=j@nC7!Cf34S=yS6z8j3w1TVGNDTc#e#7(AUd+O1|1a+ z$RvKJ$lm_|a;V(z8;)}eDu68F1pk9e0 z5Qtv^RC&s!K21Eip78uTFs50ex)Lovsz`L*>s_rJvc!0DKc1Xp02xglrJ%@p6zL-S zS8m2m`C`cNH8?6u0LOkF5}-6>C;UFeBH}3H z!;>?>O$ZLz7zbSG46fEP5`^uw;ypXk$^cKI|NA22qrilE$XPPlzWN|;Q}@${z4)uT z`+>&$zEb@;S#xX`S&Q9f^(z{mDMS_D)~OSJ&xTv}L|VW)?bmg6Xy?re=8VKQPDAKPvLy37U1)f1CLKnX&u5Tz|r%z%N%UyGCfx*jb|Q>idVTD`?tBF~xSmyZ>5` z{Mgvv&bQ|53t2+cJ~gQ+&~_E?isMV*FK4;)KKm{#2I9mLFg0@c?DE@5RD#_yuwAf22kgZEmEuu5O}(hgy@F}zmu^o}^ElgnVsuxf?c_2TiSI z&LCNtTe)B^2`1!o1-h~Tl}V~)j23ni2*jx{q&i4a2i@T0xEp)^EF5-GJ7VAB2-7U^ z&5f|ytYXdKV0~OAsL##X1B{(pqhfWSpfWZd^bq8PSH7L^0^wOkkH9<*KPqzV+g>pH ziFa?gALfrcW?rvpuI?wVd5eOAyaUkn1@so?FVSZ80wz5$S!M+La0htp912Gv1$USs z5L^d$78>Ho@mAT4iPAsS2Pb_!Z(!yIx=Nb2zAFci@uQfLT7)!#g(~ni#3hFyR3l+J zrG}HWPIYa$&#}BD?cRjM3WQ}RyA;I%5A05l+U9Z^Ywi+(1;)+DXK|Tt&l7z8wLeVV zoa_=q5soj>BdV&{K3eHQB|jn4ehJ!9zys2)rG-KQ5D{#k-)pJc)B_BEwTE;`JW26r zRJ)z7;bpihRXLQ4_%p<_ZWQ6JP;va|TD%?pVzw}`_E=^nr#a>PQ}3&2pSHLn43d9! zVG^i`1h)e=+_xP>593-lD{|KSFcz{73Wl6iWD;+c4Im1BI2meD2T4dT9j-wv&bdM@ z^3(j=i$kkcc^_2GCiK1&8GCG%_wQfXkCTE(4QMaPv+y0PN&vZ)(E@{-JKH-Ffo?xS zq1M|7uW`y(i(p9bjeL-&*i$f-g%wr)?Q?EBy7%`ASBjcmR&bI5b3rymzUHgkmXJo4 zB_(A|xUpiV0XYuv{NUu_lAxZ1BL3L3jO^VDgF;DSvJT<5=x`#xWNE{B`Z-;9{W@Z& z`)Rq)y?nzze&uKXDNAw59T{XGGGEgpmiIj5{#Kx7; z<|^?7w^qo6`=W!Sa#OFGegrM%EfKXucIrp@C_`&@d9y4Cbw&z)SRG|R@Rhn+ zEkQA}#vDRS;pU?kF4J>|mY*qnRE_9K%%^fp@_GSj|2S9Gi+Ix3Ji5M2Ia;y%q3(;( zTmJ-V?mghICe~d+9nUGK4d~=Qk~jb9S(5KXG~onk`~!Mgt-p%K{p>j2uyZ~bEU*2Y z*N$P{9w=p$DQPvxL$_U(opAY$t{RjYSqb+5NH9W)=z?Ook2t_*3Vht==Lvj*8gN^N zmWri+i~(O=4y09T%x}$xS4bH6C%G6E#84{@_y{)-=~E^njiN!szQM>s_OW&=!4>V* zf_7WgHfL9ANI@U0C#nW?r5*3!IYNQSsTR(o*4TWP%QBm>nbfXLM7vBz%uX4T>HQB~ zluF#coGwCK)y6qnsNbes4h1~_L5T#+6a$G-8d@xrl<1^0ifB|nbDN`DyZuolFF;RF zc%OvYzwR03J^e#nxFJh#;fc#;XU8g;ak1iU?@3PniOtrP9`S>D%!p~X2>Dw81-{S6 zIG-)U48?8k2gB1%c=~eQH8$a|)>90JoL3V2B`e6@7Cx=L;M(kc}1fD@sW- zF#51^oc8w7xZnmq*tp*WN3vl5WTE&EsqA5g&nhEm3$;v~-OZw#yu9pqE@JxgHcv{- za>UW(HwkB%{BRq08^_4pwD2HL`PqA^H9V?+4ed5?mEx7>D*|Z7Rj_oh>utn&Ozb+* zn#H)rUX?}G$Yw{#ZD5Zqs}FsRtbpK1x%cYD_y$sMb!^cCoId>xDW&pkd5V`kJc}2T zR~Ttz4J}-!F7@ZYyOeTFhRf-}6U1uO+DVmfJS!0L1xXMin6?Ayc(>QvR|ATg=hyH0 z_$-!&BrH(3Eq^noh|{`v?E|3?*Jf4v;S?nddST9Mc?lsQolaza(;m|?%dfDKl`oW` z-^-DXFGKlIplhfGlDX>i&5fJ{(xzo=uGE*I59J%E!s&AAj&Xi_NF(?ysq=y$5s&# zR_|a)k|dqNP7DY^?+UIa=l+aIAbFP+bi1od{8i%0qp6DR=#*HliIf-%lxDVzLB@)@cNrir!UFT_ zD}TW>7-2~XEfGxSR7nY~$JKbz(evL?2dw0GlW#_oHC_td7P&+^ap?~?(d}u7YI7Zb zrD8(a{|s?hoi$ZC)D!e2LegeCDnxS|!7Cs70Z)rro)pdAYHm0&QhTlNXKH^+(h^hc ztq4_emN>Mbox!Ght%;S|wwxx$fZg?YtCSG!er$2T=A{j>AVIs(r?dM4odMI=Ag{~* z>rPCsKp4chR#EJS3Uz-Q`pEVNO6|Cw#OMjH>Th*nY-WQ!p=_E;Ttp72()!6Uoz`G} zC`8ART9m7F^ox9ywYg#e`3P@|2hD@HA&%8%EdNCqgciw-ZkBr_Nu#_O+t_}sFfrdX zsrAMaN%#FL8G`OT805^Fi_2Xg{d}sa)<1SGYHoS*71z7gFQ1uB-fFx~x^o6>OtKoL zSvY92i`L5UPuO?~Gf86;jN&$G+N!R}SQ(JUZZFZ;G0S92Gd`Q!vR~~DAz1$n`*edI zB_o5ong4}PF9-XeQfv^%VKEo$O5#g}y?1*#T&%Q&y=G&ha?V!o{(AJR{d-gnD-%`e z&!pF~+;w2x)F+n3MYOuif~I@JKXi_wG;6;v_8$rEGx(JG51o4~+Dyl?TdmNK4D4i1 zfl2kVT3yvn?bNgvyE?5|Kh0H%%NRj}b=FVtCblg8p6F;gco{IwN~FwbwHsp9a&toJ ze8MkQ_}p+hInAvlCwk0Bg)Z!-nvBb>Z#y%Tlq3ag+-$IbC0IzT;qmvvnI@y1B*xqO zbJRJ;bTcWk^!$v%&rlo{V;)~`;m-Ef7o^Ag!E(ZE>Q8BPtumz%y)!VjEr-f=Q`@@V z>$FSLDr*Q{Xbj)nup%CI2G+3LoL! z>bmQoIJΜJCXE!JVLkli#jdf!jz?9(ged-j2tIY8juv9QdWU?MDXJ)l zKK*Rw?G|&frCm?#VWsM4GgR!%n^!9x{y4K5oNp)OifFFec9w*jD`-zU_>M)tct^15 zw=p-U+Rl*I#g2I`~jn+yF7$+E|xCbX3SUa1*r}XILIYY{E3w^qH|G&nVlE zw_PbVWBFNc$cJQ=(&3iK3$~p06Heab@8tpxoGBKBw^y}MRPTBKV=&pymi# z19@j?nL~XzkULVeID3|B8W}}RrltL-7kzH&{D+~sg1GA$WTK&K$j;vfzQXGXlJ*eP z3Zo1eW?eO;-`{ivQ6~OSKWC15CZtSchuhPw@NAwP;{a zfo5i;jQ27%U;Mx|_Oe!dI>a9%nD0HT6o3iv%EP{?ddBSMH}o%f-yB?Ml$+XBJVk%x zo7YfwL|ma7OpM7e*ZeBfhd%LXpZJYPvoDqEYQ)wJz?OE9 zN&>C=g`1O}9NOs@MnR6ZHQ0_s;2$^lSHW?_7nQGnadYO8$#sQx>^j0$s?9C7-gbfl zlP_8e<2394Pw0fajlX=tu0~;07ZKmoi1S0Pzzb+_YPtz(#AZx3J{YqmP14nZo1LJA z2+}HkX-0u8K8f1(K52Y6#Fa)WGOOM89h)fcU@91hE)lj=`;-yn!!oTtP$+s=7wUO# zwr3MdlA}Y5S$Q$JUiM{I^JHIaT}+7pnf2A3K{-H$BxCdMyEAup8aN+K5frd+=9Bas zAgf~02Q>?rKvT~(7LbL8NbCY^TgV^jlF#GT79BQi#ktMra#G)Funs4RQ&RoJU~0V= zO!Cy&9k3VkCfHJIyUVhEot0y-&ofZ$Nl7(FUxWT(OC3UpT~9jf@YZzB9$;P8Qr+EegT@9NFaTQAF@6n zB6SdNMHa_%~>}suSW#%22k9zbcL!**We z%1FjXYa*O2?Bt%_Cb2JF9q33$KeO_;RGua;7SP^^8;d=?w#jk$K%rptTS|9Fu^^g= zUR5jLBlBNU(n?$K)ii70Kn1iEw8u<79shQzxv7`w;Hx~I?Ti?k%f1R|z4K%df<|G1 z%)fZ538ID^qJu5YHZFUQcnx5$eY1g+co^L3oDE)b3SsLd1z?>Q26AaW1X>4H|F{tq zi`nsd_Cr4exk>@u7x)}+5vGxg!rsu#KlYM+fpLz1uVgw8N^~~A#5&x;2I)hiF}KB^ zd~2^IsJw4JR4E=rw0&*hwCU0?i<;Iu{D^hXy>yGonwN#W9~m}>xnnZAORD*U^5u(@ z%5ni`adbAXN7dWop=XI!y*x{S4}yEoDa#Bp>={jzxIQt1t{VMWq0aCdaTv&dP?$QL z!16VkSSv8q1M1HZp%?yIl%;bS?o=-0BVJJHAw7Ob%(tGI?zMdn=E>%cqeD=)H<9CEd?LreCkW1hGcvWeD0zFWQC9w*xjNmhQ zTADv7r;q+zM&;Lc+yOutCH28AF2~hM3ZoxpZ2ou}gXqU+Q|VncSQeIERY#D2PZCOhN3`^h$XIp%t zBM{VoDqkMc(XXYsbh)Ev2k)+pl*Aoz)sga-mShf=Z!bPdn`oZsIYNGv5sed8t&Oz3 zCvu?-o%`xHlvLGBDhXL^JVvFrjoNN=Ly1ySr7v)6*gWA$kCke|I;lR$OXrpRhc!rewVG1VCe*SW7u9%@12iTI2f!JOzpuN7R-S~#kpl}6MOu#w` ziiSn-kT&%EDgJFW-^Bj=$FXX*CdsnHjgElBbilln!(;X$uDZI@abotjZRlZ z^ef(qAZ)MqHJ3F!SFvt$-wgvNHRYhSz{zkEFtTj&n+i><37n|P~T2Xs7@TB+h&93t$!nvwUg>Y^h-FRCnDs_9;5A+pbxJlfMM}bH`x24 zQ~#dD{9)vQo%dTGRrRDU#0Bi)raIlkossp4P$!gyqe!R5&m%NaRkx{-%xvM za}bi!FS5te!r<%?$VyH}&Yml27_iFIbz?q*?waY-3Qg?g5K%KL$ARpXCw zpXiI{mr2;%@f@la)yp{iIixxJB8{3v_5Gs=FTP==Ph#oSKUEf`kJk9vaZ)u+u^e50 z>oiq!iLo#f_J!Z}kii2dL1E+{FFfeSu6v{WB_Gz0z+Z9pnwM{Me{r_U0K&cYjs$ud zBMbo+$uV*V>*A(i1qm2P+cBHcqmx|Hu9x%aSKI*%w5s!we^S9~j9hAoC6|x`>ARc1 z0}UA4GcHtU$J*$}^T$Kcnz)|1R%V{~cCSKGmfv- zJ#VPXX)0^{@5TFuR{t2PFnj;ztN$j^KN2KnjLsHRW>@^DY{36(k0~`N|Bt%Q4Qg(; zFcDqxM&n}x+a$L4uNVX zEAn(>E8a#U*l}uNU4PuopJ27)EvRU<^?ol}xrUtnh-EYOZsj+TVedKRw#jjblZ3Kwl7M_`kJ3w;&M!zq9f2S(Ao)&Gi~)vQR>iv+%x)8|2F3ycJbS z0;V9}dA9fjPGW@4dgkT{Bx=X8aOhy(O-UYB!<0Wh?jQcNl)A=qk^r)wQcR+wNF>uY zX`KYC@GC`p9nruL?z+aSf9gpoRco29%@$Cg`QUVMW&Dk3><6>oU)ag4eaEX7fndGx zSGJts4__3=x`DC(x~lNfv&wl!hhdb^-!S37xt;maq}&z+jMPF;XJx3}xskDx{E224 z3bbLdy~0pPEu^otv*(Bz`>|)c%>Qyzei1%64!M8#U6tiMi%|-}ceG>$D^z+#d!FFyxI1 z7krGrQyjU8u=v*C$l9hNV~ULYS{#eb`SsTKc}C1>x%Ni+{YNZ7NC5b>x6Fua)5Ogs z#ZV}p{fC?s65OjhV$3YQmAfG71LR0MH8=lok)*io>jpN<;B#QtBwx+*vZ^E5tJeV_WZjH-%- z2P^q2FhCq;>kM^XH~h_K8JAB~$OKt@ki=Dn`U#*pUO;J0{KMM$twEstSHLml|973c z2)VI*RgS(a%1i2Px;q#=)ZFY8HUCN1$xfyHPK^?zRQl z=iXjEZ-B zl`&JQwo~Z-EPusMCHA#WnjB2g3#@t*Rx{D`ej~Dd3V<73&nc^R1jl*HeE#|I5iX2m zwv%zdm{C2e#YI$Z;G-E&YB+)w(qLQaK!qH4+X=sPkm$QTexYvJ*N6MMY3P=ux(F4p zPZ92lyu?92IAN}nwDEhw1q|(BeZa`slByVoOVo=qjlR`#(dHEW&pWHq%uSJ2wHNUZ z%}6pPt1IIIE!VH7Az-{pZqIss0(I@DV#pU2#v4j6D>dnk<@62kFSY5Qr! zY1|K3PtbNTmB1;gH+;o|bIgYg2}_Z}FELiX*25VLt?G713>6=r5ekVNC(S!K6q^$j z^Y{>XA2fJOG=7&_MRBOBmyv7$##mex`ZDSa!FHw(8|^_oSyGPBC()Z85@JGw9=A2N z>P)R69RZTP#xH0jwgINV&VgBCYc1*dXrc&dg^Le@v?5l<2;V0Eb`&SKwq<}m;#0ntsLZlapg9`U0GSQZipy7ZrS2}UO zn{TyXOVd*R*j#wMDNB>1d+|DUIV>(h=-G28BkkV!rR@L9V!>Q-C1!e{b9BB?d|asi zt(@u})bXVQH9d2wh~;Z^YW7g~uhS?Veq{)W`S>|r%Fuc^CC<2Y*)%~1XBm1C^Q};5 z{S6@K*$8PCI>Tt$JsnMKd1Nnp8mt&H+p z$9Z(q`U^x^f}xtjU&+`Iv&W2d_%%NO0aUokTWK)qM&OJ?GH4`V0p`U4kx*-AJK9@fyzPas0iAxynD5^O@d(oz zZNG>}g-^7*VWEg4{b`#Xp)0%nT02Bf0Ary0QC>ajt^h z|8ZAD(e08RykHS^pWYehMF>JdqvrD1uGtqY-zq(jM+2Y4%?Pgi>Hrgp;))o;F+8!Z zM~?%+0Sn!#jtV$^jH@@XW8j!})(1+9t(0i@v)#xes>#R#zH_fg; z9c8fBg63^Q;u~f%g77=1KjbC4Zf-yMvhO6Da3~w~ir@sU4nZTi7XxDNCf}BkqEMT@ zr}12wFw !=}Q2+Ox10g2m|hZWH!ryxLchtPW}IF1FOmr(Yx% za*t$U=r+TTmR!vk(5=(wRj|+^aAlL=`K5q}ZrXVjTd4L~qOayh3u7ta8Ls`CIqH?< zUWD=nKJWsJ_-%8b)Lj@GcaZdWRE$Gz&CG+naoN6;F82dbYJ^K6yo=9=$9s$$qN-?C zoatbZ?m18)5t&SI4k{yq`)O1TXo-(t^uzS) z>xv-p_lg?sgjZc!CctMN>FTM2cZ4& z3$b?KY-tM}?jK)$OKMAnU#Y+H`U995oD^^WLCJ1F9yRWBwP{XZ;E*X9U``jykg^Dhg4&U-} zJI*_jaFex2fEKV*vyk^7=Fe{PJv|FY(1XD>H{rG5ROmy4p(QP_lVr=3VC;OPmvBT_ z57=3qO%IZLPJ0Nj$k7;cAkY{)XFj%j>FByZS{0K+eC22bBQSWF2(-2z)_etaGLqhC zWiL0rzED=Sp_oboo)y7H*PbQO4`1gO8v{B@1I^W+eD6iAK7VnSrs+$_l5cBt<>z{M zx&K=*3n{KyY0ReKU#$uJKFWHiwxhcSb8E0a_ik(XA^F7LQ8>_!^M=2}DOE?$QzTYN z>RE>8Y$vwll;|v?QaX+Vp!M=JPS%#u!yf0ZQ4@f;YjiufYE;r=doKNKf1ZASxtd4o zoml<()8#FTS#IqwN{Y;2`T{8MS7gY7Q|83WjQLg)v`~P`R?#T}J@#n3Zi{IR>LAem zM4s|GNqivTK0Ny#pCIDb-5T~3E)Q-{?shH`H9aG`b^Cbzi1((NA9Wf5kBRt%-D*{N zUrnUfakIZf^?;v&o?rtrEK2a#kVTAbja?AIYJHFJKyVsHMPbD9cme_~gtQnEI61VW)J02PyUesPmZPSvc);_wu1!>PF!t2pXV(rO<`u$$`ew~g7+_r4Ia5?mP;IM0J z;}@HYnF=m*-@#PB6&0og*K|1L-cHvAsm^2fS;MtxffUmt?yNbzUC}T;));W48_v>_m8{?y743bdh-4A(nTYQ zm#O2uh5K17HSV0e{L#PI7*6qCe_t($!6y#>FHNf;Ga1_!;aE2voUEYev!hbCpa)IF zt$mdC^y`)-M?sfxCZn&EW%seSoSh#zb58CByPuqv|Yg7x=8gX1U> zC_+wWvh?y;E4T6jb>~-4+BGwB_Pl%)-SN>AlyEhZ=RMcmmO!T-*_-VK`=8nQFB3K5 z3%7#iiF#|5fv)TZGwGfya`-8M-@1&Py5z#_B2nNghA@fUNyIqV=n>qqezDZYeIiMz z+pwhEksj&=jN~T_j*Q$KD*zmwR|9SDhmP_L##%C=QYDHqPoJ||E8=nrN?b@cO2Dk)3Jx-K&Ph}B{;TRB$U6K-lHN=>y2ouqo3}A;{ zoLY-#^8dR*cAq8>wmk6PUyiMCGtEr~YSy3s-S_Qim!!%6{PoX~K>VVlBbvrp_^?f6uu zzmQn$7Fbc2?X)PCg7vf@Be}~r)U;)fO+tDE-zTX3mzX=|a!m9MjDwru?)m#2jE_(t z3{Bi?Hp>w_Ujt=>Kz_$kk{Fl0)HH>bAH*bAjEkG0UGKxebaIvt*IA&82&2Km&c?F3 zBg56L>NK=IDK^ddIzW?QDN_;V?Ad5-=;XmOfwRJYI!Og~4ebT1h8EzxA&WsbbVO7H ziRNKqLh(y6-&n;>VJ?NCI1U)aUl(Qvb!x^I?+{8cVSJQ9{vCe1UIa{4Rj?IZ3{y9p zUy&fto9A$-)8Gik4NR{6J8zTAke6a71CB&E{8SUB~F zL41L$;pCwha1~HF#T2TNZ?TcALn=r@N>&wr2DpHy0}9FdDB#Dg!+T{i2KV|$`YeZ; za(!+5Rn&ZlAkt_6h#keaZ@jH-%KZ8+JvZ!k4+Qu}mO&!kN>8!yCI_T%017rR2^Id7 z$V#T*gI{q3;1boR4DNr9$_5pa%=c4=gD2Ff*vR*GSksC%jc*F$A_+L5e#q2`oLdx8 z@m;l>A8(lVIsJDi_d4SSY4gP0wFvy{e)jn4T^anRmy}=!;U5#QEQS9>BIg71)<#=&(C!#-IY#x94`7`+229Dz5wW)Q_{# zm=i-Dm6$_vvc~6Cg1}6&;O`V)pAhI+Cp1hhA%ZzPKCQAzL&BS43EPG^i(f)|Re1*F zf12}S^jsj0t+}&*Y>Ms~G)dyqpK6)|I8-0$z++L3j-+>BacnnLPjeLMhcV^K8(m}21?lf6Pi2t1lHnkAR1k%NajIIE* za0{E`Tn%2-g;Ol~xR(P%Kabbnch#K`@}m2DauvF7=>BR0 zL87-hP3*)@<=l}UFkPGxpLFNJM#pNu9<)DSNke9S^DK^%sp zW&t_Fk=YNIkEAI+YX3}pU6^Q#ZtSX~E;z$zlCm!!vDU(7UDfQbNLpU+TDBZr%XnR81qa`?C84E`LF?sF;Q)gAN zUIMk{0GS;m^N@bz5fLLikII1aFA6vi&VANuw&x{lZVjE#b35a7nRS2VWTJW03swE> z)Kq_bkKDw+HPeCJ@@1Vc_9qJlr1E)q$%UYdxe3M}7h`E-J$PnSs6UF-J9=J&w1x(O zgCn-P7JsF-K%R7b;W2)<*xd$ zJ`xw{)zx8&8@bGDT`udX^iwVK!h2rL%%Y}iVwq^UO5DD*#b}wTrTmOfGq}-X=XR3D zB*sAaJz3V_^m?FC*>sW$jia4Z!IQXm+^fD#t(~BFV5Fk7QaRbsJ6k zqab>6Nc9AtiJ{bAhXI^pd7a(6xG#Yli90i&M<+*WZzhEsvlzX14wM`E?|yd+GqJ5E z$TcefzW`MY#}+b@1^+XG4A2g;V;u6`ktsWF9))z~pSD(7KKaHmV)7 zcI&YafsKBW$od)_UHe)NQpwYO)s7JAErrnwdSWGSBhettk|QW;3Cn5Pif%C6P6dM? zV4)OAJ@+s!24knN3hBYX#NS}ow&sGc6-UaZ(`(&X!??IiNkMF^^UC4bRHwFCc^DsX zc)5drk(ra0PjYR;32G=9Pssa#D9qj#vyoQcf4ColPWT#ReR$1gke-h1Fqkziny-#7 zc8zRn9Cz5j#pY20_tvDYuPkC5QyK3XEw-ntR~c{Qb;CVs02#VBoPyDuuDubS-7?&= zr_-dOa+$DszeEfEOwOB9vktD;SMtygSgA?-6o$2%I;oo6e=kHDuq|9L@;IExs;#F# zai!Iolz?@)V9f`OTdIlCB9VU#W!JEaY0Y zlLN-k9#0kk+$Z)W^u_Uibkb)|Jm#oAyzu&7JPJu#R{hD#T=?7ucinik(3Vr&+hphx z|6nloc%N3g1w+dU?1EU&;wno*K46lOhAe@`QKtucnh%nfTc$z9IVOgPd`s3MoV9|U zuoA`Igj+pNNC^@)N3PF2?y+3lKQh#tEd?{2%5vWu{ioY<-oII2P+dvyb!LkVUUP+y zE?vN18~j0DE(!I58fIK{j-rc+AlBF^&!Cv>=;x7`j791b$7(s}X9BuD;b2_&{@79m zR?@ssa(VuYdEi&XiS8tW@|;srWthk)jb8A9!t&CN!00JR|5WEG4ZFFgt!1RlMQ&pV z$gz+JU42hbm0P&lc~4{ry|BoP#&h@- zaG<)SyFaOYv-fURQ|9Y@cC12n6mX;RzU>l2q>}nf+oeNmyDL(MF7}ya-fF6S*>_q9 zEzTQ9!W&ljLEA`mYbp&Y)XZ2rba`hq5MIq2DIeS458wCRcDqw9toWZ{Eat&tywU7> zr&;3n-vIZ%RZEOt@8tY3c?6sB-*B-QZX@$8==HT#8%39U8y;_GONI!g+{fzg2ctMz SynTlVMovmuvgVU<@c#lG=@t?I literal 0 HcmV?d00001 diff --git a/deploy/cpp_infer/include/config.h b/deploy/cpp_infer/include/config.h index dbfbc2df..cd02a997 100644 --- a/deploy/cpp_infer/include/config.h +++ b/deploy/cpp_infer/include/config.h @@ -49,6 +49,8 @@ public: this->det_db_unclip_ratio = stod(config_map_["det_db_unclip_ratio"]); + this->use_polygon_score = bool(stoi(config_map_["use_polygon_score"])); + this->det_model_dir.assign(config_map_["det_model_dir"]); this->rec_model_dir.assign(config_map_["rec_model_dir"]); @@ -86,6 +88,8 @@ public: double det_db_unclip_ratio = 2.0; + bool use_polygon_score = false; + std::string det_model_dir; std::string rec_model_dir; diff --git a/deploy/cpp_infer/include/ocr_det.h b/deploy/cpp_infer/include/ocr_det.h index bab9c95f..18318c9c 100644 --- a/deploy/cpp_infer/include/ocr_det.h +++ b/deploy/cpp_infer/include/ocr_det.h @@ -44,7 +44,8 @@ public: const bool &use_mkldnn, const int &max_side_len, const double &det_db_thresh, const double &det_db_box_thresh, - const double &det_db_unclip_ratio, const bool &visualize, + const double &det_db_unclip_ratio, + const bool &use_polygon_score, const bool &visualize, const bool &use_tensorrt, const bool &use_fp16) { this->use_gpu_ = use_gpu; this->gpu_id_ = gpu_id; @@ -57,6 +58,7 @@ public: this->det_db_thresh_ = det_db_thresh; this->det_db_box_thresh_ = det_db_box_thresh; this->det_db_unclip_ratio_ = det_db_unclip_ratio; + this->use_polygon_score_ = use_polygon_score; this->visualize_ = visualize; this->use_tensorrt_ = use_tensorrt; @@ -85,6 +87,7 @@ private: double det_db_thresh_ = 0.3; double det_db_box_thresh_ = 0.5; double det_db_unclip_ratio_ = 2.0; + bool use_polygon_score_ = false; bool visualize_ = true; bool use_tensorrt_ = false; diff --git a/deploy/cpp_infer/include/postprocess_op.h b/deploy/cpp_infer/include/postprocess_op.h index 44ca3531..b384b79b 100644 --- a/deploy/cpp_infer/include/postprocess_op.h +++ b/deploy/cpp_infer/include/postprocess_op.h @@ -51,10 +51,12 @@ public: float &ssid); float BoxScoreFast(std::vector> box_array, cv::Mat pred); + float PolygonScoreAcc(std::vector contour, cv::Mat pred); std::vector>> BoxesFromBitmap(const cv::Mat pred, const cv::Mat bitmap, - const float &box_thresh, const float &det_db_unclip_ratio); + const float &box_thresh, const float &det_db_unclip_ratio, + const bool &use_polygon_score); std::vector>> FilterTagDetRes(std::vector>> boxes, diff --git a/deploy/cpp_infer/include/utility.h b/deploy/cpp_infer/include/utility.h index 367e37e4..6e8173e0 100644 --- a/deploy/cpp_infer/include/utility.h +++ b/deploy/cpp_infer/include/utility.h @@ -44,6 +44,9 @@ public: inline static size_t argmax(ForwardIterator first, ForwardIterator last) { return std::distance(first, std::max_element(first, last)); } + + static void GetAllFiles(const char *dir_name, + std::vector &all_inputs); }; } // namespace PaddleOCR \ No newline at end of file diff --git a/deploy/cpp_infer/inference b/deploy/cpp_infer/inference new file mode 120000 index 00000000..c7785f47 --- /dev/null +++ b/deploy/cpp_infer/inference @@ -0,0 +1 @@ +/paddle/test/PaddleOCR/deploy/cpp_infer/inference \ No newline at end of file diff --git a/deploy/cpp_infer/readme.md b/deploy/cpp_infer/readme.md index 3e5c1286..6a57044b 100644 --- a/deploy/cpp_infer/readme.md +++ b/deploy/cpp_infer/readme.md @@ -74,12 +74,26 @@ opencv3/ * 有2种方式获取Paddle预测库,下面进行详细介绍。 -#### 1.2.1 预测库源码编译 + +#### 1.2.1 直接下载安装 + +* [Paddle预测库官网](https://www.paddlepaddle.org.cn/documentation/docs/zh/2.0/guides/05_inference_deployment/inference/build_and_install_lib_cn.html) 上提供了不同cuda版本的Linux预测库,可以在官网查看并选择合适的预测库版本(*建议选择paddle版本>=2.0.1版本的预测库* )。 + +* 下载之后使用下面的方法解压。 + +``` +tar -xf paddle_inference.tgz +``` + +最终会在当前的文件夹中生成`paddle_inference/`的子文件夹。 + +#### 1.2.2 预测库源码编译 * 如果希望获取最新预测库特性,可以从Paddle github上克隆最新代码,源码编译预测库。 -* 可以参考[Paddle预测库官网](https://www.paddlepaddle.org.cn/documentation/docs/zh/advanced_guide/inference_deployment/inference/build_and_install_lib_cn.html)的说明,从github上获取Paddle代码,然后进行编译,生成最新的预测库。使用git获取代码方法如下。 +* 可以参考[Paddle预测库安装编译说明](https://www.paddlepaddle.org.cn/documentation/docs/zh/2.0/guides/05_inference_deployment/inference/build_and_install_lib_cn.html#congyuanmabianyi) 的说明,从github上获取Paddle代码,然后进行编译,生成最新的预测库。使用git获取代码方法如下。 ```shell git clone https://github.com/PaddlePaddle/Paddle.git +git checkout release/2.1 ``` * 进入Paddle目录后,编译方法如下。 @@ -102,7 +116,7 @@ make -j make inference_lib_dist ``` -更多编译参数选项可以参考Paddle C++预测库官网:[https://www.paddlepaddle.org.cn/documentation/docs/zh/advanced_guide/inference_deployment/inference/build_and_install_lib_cn.html](https://www.paddlepaddle.org.cn/documentation/docs/zh/advanced_guide/inference_deployment/inference/build_and_install_lib_cn.html)。 +更多编译参数选项介绍可以参考[文档说明](https://www.paddlepaddle.org.cn/documentation/docs/zh/2.0/guides/05_inference_deployment/inference/build_and_install_lib_cn.html#congyuanmabianyi)。 * 编译完成之后,可以在`build/paddle_inference_install_dir/`文件下看到生成了以下文件及文件夹。 @@ -117,19 +131,6 @@ build/paddle_inference_install_dir/ 其中`paddle`就是C++预测所需的Paddle库,`version.txt`中包含当前预测库的版本信息。 -#### 1.2.2 直接下载安装 - -* [Paddle预测库官网](https://www.paddlepaddle.org.cn/documentation/docs/zh/advanced_guide/inference_deployment/inference/build_and_install_lib_cn.html)上提供了不同cuda版本的Linux预测库,可以在官网查看并选择合适的预测库版本(*建议选择paddle版本>=2.0.1版本的预测库* )。 - - -* 下载之后使用下面的方法解压。 - -``` -tar -xf paddle_inference.tgz -``` - -最终会在当前的文件夹中生成`paddle_inference/`的子文件夹。 - ## 2 开始运行 @@ -140,11 +141,11 @@ tar -xf paddle_inference.tgz ``` inference/ |-- det_db -| |--inference.pdparams -| |--inference.pdimodel +| |--inference.pdiparams +| |--inference.pdmodel |-- rec_rcnn -| |--inference.pdparams -| |--inference.pdparams +| |--inference.pdiparams +| |--inference.pdmodel ``` @@ -183,7 +184,7 @@ cmake .. \ make -j ``` -`OPENCV_DIR`为opencv编译安装的地址;`LIB_DIR`为下载(`paddle_inference`文件夹)或者编译生成的Paddle预测库地址(`build/paddle_inference_install_dir`文件夹);`CUDA_LIB_DIR`为cuda库文件地址,在docker中;为`/usr/local/cuda/lib64`;`CUDNN_LIB_DIR`为cudnn库文件地址,在docker中为`/usr/lib/x86_64-linux-gnu/`。 +`OPENCV_DIR`为opencv编译安装的地址;`LIB_DIR`为下载(`paddle_inference`文件夹)或者编译生成的Paddle预测库地址(`build/paddle_inference_install_dir`文件夹);`CUDA_LIB_DIR`为cuda库文件地址,在docker中为`/usr/local/cuda/lib64`;`CUDNN_LIB_DIR`为cudnn库文件地址,在docker中为`/usr/lib/x86_64-linux-gnu/`。 * 编译完成之后,会在`build`文件夹下生成一个名为`ocr_system`的可执行文件。 @@ -211,6 +212,7 @@ max_side_len 960 # 输入图像长宽大于960时,等比例缩放图像,使 det_db_thresh 0.3 # 用于过滤DB预测的二值化图像,设置为0.-0.3对结果影响不明显 det_db_box_thresh 0.5 # DB后处理过滤box的阈值,如果检测存在漏框情况,可酌情减小 det_db_unclip_ratio 1.6 # 表示文本框的紧致程度,越小则文本框更靠近文本 +use_polygon_score 1 # 是否使用多边形框计算bbox score,0表示使用矩形框计算。矩形框计算速度更快,多边形框对弯曲文本区域计算更准确。 det_model_dir ./inference/det_db # 检测模型inference model地址 # cls config @@ -226,15 +228,15 @@ char_list_file ../../ppocr/utils/ppocr_keys_v1.txt # 字典文件 visualize 1 # 是否对结果进行可视化,为1时,会在当前文件夹下保存文件名为`ocr_vis.png`的预测结果。 ``` -* PaddleOCR也支持多语言的预测,更多细节可以参考[识别文档](../../doc/doc_ch/recognition.md)中的多语言字典与模型部分。 +* PaddleOCR也支持多语言的预测,更多支持的语言和模型可以参考[识别文档](../../doc/doc_ch/recognition.md)中的多语言字典与模型部分,如果希望进行多语言预测,只需将修改`tools/config.txt`中的`char_list_file`(字典文件路径)以及`rec_model_dir`(inference模型路径)字段即可。 最终屏幕上会输出检测结果如下。
- +
### 2.3 注意 -* 在使用Paddle预测库时,推荐使用2.0.0-beta0版本的预测库。 +* 在使用Paddle预测库时,推荐使用2.0.0版本的预测库。 diff --git a/deploy/cpp_infer/readme_en.md b/deploy/cpp_infer/readme_en.md index a5197732..6c0a18db 100644 --- a/deploy/cpp_infer/readme_en.md +++ b/deploy/cpp_infer/readme_en.md @@ -76,17 +76,30 @@ opencv3/ * There are 2 ways to obtain the Paddle inference library, described in detail below. +#### 1.2.1 Direct download and installation -#### 1.2.1 Compile from the source code +[Paddle inference library official website](https://www.paddlepaddle.org.cn/documentation/docs/zh/2.0/guides/05_inference_deployment/inference/build_and_install_lib_cn.html). You can view and select the appropriate version of the inference library on the official website. + + +* After downloading, use the following method to uncompress. + +``` +tar -xf paddle_inference.tgz +``` + +Finally you can see the following files in the folder of `paddle_inference/`. + +#### 1.2.2 Compile from the source code * If you want to get the latest Paddle inference library features, you can download the latest code from Paddle github repository and compile the inference library from the source code. It is recommended to download the inference library with paddle version greater than or equal to 2.0.1. * You can refer to [Paddle inference library] (https://www.paddlepaddle.org.cn/documentation/docs/en/advanced_guide/inference_deployment/inference/build_and_install_lib_en.html) to get the Paddle source code from github, and then compile To generate the latest inference library. The method of using git to access the code is as follows. ```shell git clone https://github.com/PaddlePaddle/Paddle.git +git checkout release/2.1 ``` -* After entering the Paddle directory, the compilation method is as follows. +* After entering the Paddle directory, the commands to compile the paddle inference library are as follows. ```shell rm -rf build @@ -106,7 +119,7 @@ make -j make inference_lib_dist ``` -For more compilation parameter options, please refer to the official website of the Paddle C++ inference library:[https://www.paddlepaddle.org.cn/documentation/docs/en/advanced_guide/inference_deployment/inference/build_and_install_lib_en.html](https://www.paddlepaddle.org.cn/documentation/docs/en/advanced_guide/inference_deployment/inference/build_and_install_lib_en.html). +For more compilation parameter options, please refer to the [document](https://www.paddlepaddle.org.cn/documentation/docs/zh/2.0/guides/05_inference_deployment/inference/build_and_install_lib_cn.html#congyuanmabianyi). * After the compilation process, you can see the following files in the folder of `build/paddle_inference_install_dir/`. @@ -122,22 +135,6 @@ build/paddle_inference_install_dir/ Among them, `paddle` is the Paddle library required for C++ prediction later, and `version.txt` contains the version information of the current inference library. - -#### 1.2.2 Direct download and installation - -* Different cuda versions of the Linux inference library (based on GCC 4.8.2) are provided on the -[Paddle inference library official website](https://www.paddlepaddle.org.cn/documentation/docs/en/advanced_guide/inference_deployment/inference/build_and_install_lib_en.html). You can view and select the appropriate version of the inference library on the official website. - - -* After downloading, use the following method to uncompress. - -``` -tar -xf paddle_inference.tgz -``` - -Finally you can see the following files in the folder of `paddle_inference/`. - - ## 2. Compile and run the demo ### 2.1 Export the inference model @@ -147,11 +144,11 @@ Finally you can see the following files in the folder of `paddle_inference/`. ``` inference/ |-- det_db -| |--inference.pdparams -| |--inference.pdimodel +| |--inference.pdiparams +| |--inference.pdmodel |-- rec_rcnn -| |--inference.pdparams -| |--inference.pdparams +| |--inference.pdiparams +| |--inference.pdmodel ``` @@ -220,6 +217,7 @@ max_side_len 960 # Limit the maximum image height and width to 960 det_db_thresh 0.3 # Used to filter the binarized image of DB prediction, setting 0.-0.3 has no obvious effect on the result det_db_box_thresh 0.5 # DDB post-processing filter box threshold, if there is a missing box detected, it can be reduced as appropriate det_db_unclip_ratio 1.6 # Indicates the compactness of the text box, the smaller the value, the closer the text box to the text +use_polygon_score 1 # Whether to use polygon box to calculate bbox score, 0 means to use rectangle box to calculate. Use rectangular box to calculate faster, and polygonal box more accurate for curved text area. det_model_dir ./inference/det_db # Address of detection inference model # cls config @@ -235,16 +233,16 @@ char_list_file ../../ppocr/utils/ppocr_keys_v1.txt # dictionary file visualize 1 # Whether to visualize the results,when it is set as 1, The prediction result will be save in the image file `./ocr_vis.png`. ``` -* Multi-language inference is also supported in PaddleOCR, for more details, please refer to part of multi-language dictionaries and models in [recognition tutorial](../../doc/doc_en/recognition_en.md). +* Multi-language inference is also supported in PaddleOCR, you can refer to [recognition tutorial](../../doc/doc_en/recognition_en.md) for more supported languages and models in PaddleOCR. Specifically, if you want to infer using multi-language models, you just need to modify values of `char_list_file` and `rec_model_dir` in file `tools/config.txt`. The detection results will be shown on the screen, which is as follows.
- +
### 2.3 Notes -* Paddle2.0.0-beta0 inference model library is recommended for this toturial. +* Paddle2.0.0 inference model library is recommended for this toturial. diff --git a/deploy/cpp_infer/src/clipper.cpp b/deploy/cpp_infer/src/clipper.cpp index b35c25f2..5f5d2216 100644 --- a/deploy/cpp_infer/src/clipper.cpp +++ b/deploy/cpp_infer/src/clipper.cpp @@ -668,7 +668,7 @@ void DisposeOutPts(OutPt *&pp) { //------------------------------------------------------------------------------ inline void InitEdge(TEdge *e, TEdge *eNext, TEdge *ePrev, const IntPoint &Pt) { - std::memset(e, 0, sizeof(TEdge)); + std::memset(e, int(0), sizeof(TEdge)); e->Next = eNext; e->Prev = ePrev; e->Curr = Pt; @@ -1895,17 +1895,17 @@ void Clipper::InsertLocalMinimaIntoAEL(const cInt botY) { TEdge *rb = lm->RightBound; OutPt *Op1 = 0; - if (!lb) { + if (!lb || !rb) { // nb: don't insert LB into either AEL or SEL InsertEdgeIntoAEL(rb, 0); SetWindingCount(*rb); if (IsContributing(*rb)) Op1 = AddOutPt(rb, rb->Bot); - } else if (!rb) { - InsertEdgeIntoAEL(lb, 0); - SetWindingCount(*lb); - if (IsContributing(*lb)) - Op1 = AddOutPt(lb, lb->Bot); + //} else if (!rb) { + // InsertEdgeIntoAEL(lb, 0); + // SetWindingCount(*lb); + // if (IsContributing(*lb)) + // Op1 = AddOutPt(lb, lb->Bot); InsertScanbeam(lb->Top.Y); } else { InsertEdgeIntoAEL(lb, 0); @@ -2547,13 +2547,13 @@ void Clipper::ProcessHorizontal(TEdge *horzEdge) { if (dir == dLeftToRight) { maxIt = m_Maxima.begin(); while (maxIt != m_Maxima.end() && *maxIt <= horzEdge->Bot.X) - maxIt++; + ++maxIt; if (maxIt != m_Maxima.end() && *maxIt >= eLastHorz->Top.X) maxIt = m_Maxima.end(); } else { maxRit = m_Maxima.rbegin(); while (maxRit != m_Maxima.rend() && *maxRit > horzEdge->Bot.X) - maxRit++; + ++maxRit; if (maxRit != m_Maxima.rend() && *maxRit <= eLastHorz->Top.X) maxRit = m_Maxima.rend(); } @@ -2576,13 +2576,13 @@ void Clipper::ProcessHorizontal(TEdge *horzEdge) { while (maxIt != m_Maxima.end() && *maxIt < e->Curr.X) { if (horzEdge->OutIdx >= 0 && !IsOpen) AddOutPt(horzEdge, IntPoint(*maxIt, horzEdge->Bot.Y)); - maxIt++; + ++maxIt; } } else { while (maxRit != m_Maxima.rend() && *maxRit > e->Curr.X) { if (horzEdge->OutIdx >= 0 && !IsOpen) AddOutPt(horzEdge, IntPoint(*maxRit, horzEdge->Bot.Y)); - maxRit++; + ++maxRit; } } }; diff --git a/deploy/cpp_infer/src/config.cpp b/deploy/cpp_infer/src/config.cpp index 303c3c12..a0ac1d08 100644 --- a/deploy/cpp_infer/src/config.cpp +++ b/deploy/cpp_infer/src/config.cpp @@ -21,10 +21,10 @@ std::vector OCRConfig::split(const std::string &str, std::vector res; if ("" == str) return res; - char *strs = new char[str.length() + 1]; + char strs[str.length() + 1]; std::strcpy(strs, str.c_str()); - char *d = new char[delim.length() + 1]; + char d[delim.length() + 1]; std::strcpy(d, delim.c_str()); char *p = std::strtok(strs, d); @@ -61,4 +61,4 @@ void OCRConfig::PrintConfigInfo() { std::cout << "=======End of Paddle OCR inference config======" << std::endl; } -} // namespace PaddleOCR \ No newline at end of file +} // namespace PaddleOCR diff --git a/deploy/cpp_infer/src/main.cpp b/deploy/cpp_infer/src/main.cpp index 49c68f55..f25e674b 100644 --- a/deploy/cpp_infer/src/main.cpp +++ b/deploy/cpp_infer/src/main.cpp @@ -27,9 +27,12 @@ #include #include +#include #include #include #include +#include +#include using namespace std; using namespace cv; @@ -47,14 +50,15 @@ int main(int argc, char **argv) { config.PrintConfigInfo(); std::string img_path(argv[2]); - - cv::Mat srcimg = cv::imread(img_path, cv::IMREAD_COLOR); + std::vector all_img_names; + Utility::GetAllFiles((char *)img_path.c_str(), all_img_names); DBDetector det(config.det_model_dir, config.use_gpu, config.gpu_id, config.gpu_mem, config.cpu_math_library_num_threads, config.use_mkldnn, config.max_side_len, config.det_db_thresh, config.det_db_box_thresh, config.det_db_unclip_ratio, - config.visualize, config.use_tensorrt, config.use_fp16); + config.use_polygon_score, config.visualize, + config.use_tensorrt, config.use_fp16); Classifier *cls = nullptr; if (config.use_angle_cls == true) { @@ -70,18 +74,30 @@ int main(int argc, char **argv) { config.use_tensorrt, config.use_fp16); auto start = std::chrono::system_clock::now(); - std::vector>> boxes; - det.Run(srcimg, boxes); - - rec.Run(boxes, srcimg, cls); - auto end = std::chrono::system_clock::now(); - auto duration = - std::chrono::duration_cast(end - start); - std::cout << "Cost " - << double(duration.count()) * - std::chrono::microseconds::period::num / - std::chrono::microseconds::period::den - << "s" << std::endl; + + for (auto img_dir : all_img_names) { + LOG(INFO) << "The predict img: " << img_dir; + + cv::Mat srcimg = cv::imread(img_dir, cv::IMREAD_COLOR); + if (!srcimg.data) { + std::cerr << "[ERROR] image read failed! image path: " << img_path + << "\n"; + exit(1); + } + std::vector>> boxes; + + det.Run(srcimg, boxes); + + rec.Run(boxes, srcimg, cls); + auto end = std::chrono::system_clock::now(); + auto duration = + std::chrono::duration_cast(end - start); + std::cout << "Cost " + << double(duration.count()) * + std::chrono::microseconds::period::num / + std::chrono::microseconds::period::den + << "s" << std::endl; + } return 0; } diff --git a/deploy/cpp_infer/src/ocr_det.cpp b/deploy/cpp_infer/src/ocr_det.cpp index 489940f0..33ad468a 100644 --- a/deploy/cpp_infer/src/ocr_det.cpp +++ b/deploy/cpp_infer/src/ocr_det.cpp @@ -30,6 +30,42 @@ void DBDetector::LoadModel(const std::string &model_dir) { this->use_fp16_ ? paddle_infer::Config::Precision::kHalf : paddle_infer::Config::Precision::kFloat32, false, false); + std::map> min_input_shape = { + {"x", {1, 3, 50, 50}}, + {"conv2d_92.tmp_0", {1, 96, 20, 20}}, + {"conv2d_91.tmp_0", {1, 96, 10, 10}}, + {"nearest_interp_v2_1.tmp_0", {1, 96, 10, 10}}, + {"nearest_interp_v2_2.tmp_0", {1, 96, 20, 20}}, + {"nearest_interp_v2_3.tmp_0", {1, 24, 20, 20}}, + {"nearest_interp_v2_4.tmp_0", {1, 24, 20, 20}}, + {"nearest_interp_v2_5.tmp_0", {1, 24, 20, 20}}, + {"elementwise_add_7", {1, 56, 2, 2}}, + {"nearest_interp_v2_0.tmp_0", {1, 96, 2, 2}}}; + std::map> max_input_shape = { + {"x", {1, 3, this->max_side_len_, this->max_side_len_}}, + {"conv2d_92.tmp_0", {1, 96, 400, 400}}, + {"conv2d_91.tmp_0", {1, 96, 200, 200}}, + {"nearest_interp_v2_1.tmp_0", {1, 96, 200, 200}}, + {"nearest_interp_v2_2.tmp_0", {1, 96, 400, 400}}, + {"nearest_interp_v2_3.tmp_0", {1, 24, 400, 400}}, + {"nearest_interp_v2_4.tmp_0", {1, 24, 400, 400}}, + {"nearest_interp_v2_5.tmp_0", {1, 24, 400, 400}}, + {"elementwise_add_7", {1, 56, 400, 400}}, + {"nearest_interp_v2_0.tmp_0", {1, 96, 400, 400}}}; + std::map> opt_input_shape = { + {"x", {1, 3, 640, 640}}, + {"conv2d_92.tmp_0", {1, 96, 160, 160}}, + {"conv2d_91.tmp_0", {1, 96, 80, 80}}, + {"nearest_interp_v2_1.tmp_0", {1, 96, 80, 80}}, + {"nearest_interp_v2_2.tmp_0", {1, 96, 160, 160}}, + {"nearest_interp_v2_3.tmp_0", {1, 24, 160, 160}}, + {"nearest_interp_v2_4.tmp_0", {1, 24, 160, 160}}, + {"nearest_interp_v2_5.tmp_0", {1, 24, 160, 160}}, + {"elementwise_add_7", {1, 56, 40, 40}}, + {"nearest_interp_v2_0.tmp_0", {1, 96, 40, 40}}}; + + config.SetTRTDynamicShapeInfo(min_input_shape, max_input_shape, + opt_input_shape); } } else { config.DisableGpu(); @@ -48,7 +84,7 @@ void DBDetector::LoadModel(const std::string &model_dir) { config.SwitchIrOptim(true); config.EnableMemoryOptim(); - config.DisableGlogInfo(); + // config.DisableGlogInfo(); this->predictor_ = CreatePredictor(config); } @@ -109,9 +145,9 @@ void DBDetector::Run(cv::Mat &img, cv::Mat dilation_map; cv::Mat dila_ele = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(2, 2)); cv::dilate(bit_map, dilation_map, dila_ele); - boxes = post_processor_.BoxesFromBitmap(pred_map, dilation_map, - this->det_db_box_thresh_, - this->det_db_unclip_ratio_); + boxes = post_processor_.BoxesFromBitmap( + pred_map, dilation_map, this->det_db_box_thresh_, + this->det_db_unclip_ratio_, this->use_polygon_score_); boxes = post_processor_.FilterTagDetRes(boxes, ratio_h, ratio_w, srcimg); diff --git a/deploy/cpp_infer/src/ocr_rec.cpp b/deploy/cpp_infer/src/ocr_rec.cpp index 76873dad..b09282b0 100644 --- a/deploy/cpp_infer/src/ocr_rec.cpp +++ b/deploy/cpp_infer/src/ocr_rec.cpp @@ -25,8 +25,9 @@ void CRNNRecognizer::Run(std::vector>> boxes, std::cout << "The predicted text is :" << std::endl; int index = 0; - for (int i = boxes.size() - 1; i >= 0; i--) { + for (int i = 0; i < boxes.size(); i++) { crop_img = GetRotateCropImage(srcimg, boxes[i]); + if (cls != nullptr) { crop_img = cls->Run(crop_img); } @@ -105,6 +106,15 @@ void CRNNRecognizer::LoadModel(const std::string &model_dir) { this->use_fp16_ ? paddle_infer::Config::Precision::kHalf : paddle_infer::Config::Precision::kFloat32, false, false); + std::map> min_input_shape = { + {"x", {1, 3, 32, 10}}}; + std::map> max_input_shape = { + {"x", {1, 3, 32, 2000}}}; + std::map> opt_input_shape = { + {"x", {1, 3, 32, 320}}}; + + config.SetTRTDynamicShapeInfo(min_input_shape, max_input_shape, + opt_input_shape); } } else { config.DisableGpu(); diff --git a/deploy/cpp_infer/src/postprocess_op.cpp b/deploy/cpp_infer/src/postprocess_op.cpp index 8c44a54a..22494d67 100644 --- a/deploy/cpp_infer/src/postprocess_op.cpp +++ b/deploy/cpp_infer/src/postprocess_op.cpp @@ -159,6 +159,52 @@ std::vector> PostProcessor::GetMiniBoxes(cv::RotatedRect box, return array; } +float PostProcessor::PolygonScoreAcc(std::vector contour, + cv::Mat pred) { + int width = pred.cols; + int height = pred.rows; + std::vector box_x; + std::vector box_y; + for (int i = 0; i < contour.size(); ++i) { + box_x.push_back(contour[i].x); + box_y.push_back(contour[i].y); + } + + int xmin = + clamp(int(std::floor(*(std::min_element(box_x.begin(), box_x.end())))), 0, + width - 1); + int xmax = + clamp(int(std::ceil(*(std::max_element(box_x.begin(), box_x.end())))), 0, + width - 1); + int ymin = + clamp(int(std::floor(*(std::min_element(box_y.begin(), box_y.end())))), 0, + height - 1); + int ymax = + clamp(int(std::ceil(*(std::max_element(box_y.begin(), box_y.end())))), 0, + height - 1); + + cv::Mat mask; + mask = cv::Mat::zeros(ymax - ymin + 1, xmax - xmin + 1, CV_8UC1); + + cv::Point *rook_point = new cv::Point[contour.size()]; + + for (int i = 0; i < contour.size(); ++i) { + rook_point[i] = cv::Point(int(box_x[i]) - xmin, int(box_y[i]) - ymin); + } + const cv::Point *ppt[1] = {rook_point}; + int npt[] = {int(contour.size())}; + + cv::fillPoly(mask, ppt, npt, 1, cv::Scalar(1)); + + cv::Mat croppedImg; + pred(cv::Rect(xmin, ymin, xmax - xmin + 1, ymax - ymin + 1)) + .copyTo(croppedImg); + float score = cv::mean(croppedImg, mask)[0]; + + delete[] rook_point; + return score; +} + float PostProcessor::BoxScoreFast(std::vector> box_array, cv::Mat pred) { auto array = box_array; @@ -197,10 +243,9 @@ float PostProcessor::BoxScoreFast(std::vector> box_array, return score; } -std::vector>> -PostProcessor::BoxesFromBitmap(const cv::Mat pred, const cv::Mat bitmap, - const float &box_thresh, - const float &det_db_unclip_ratio) { +std::vector>> PostProcessor::BoxesFromBitmap( + const cv::Mat pred, const cv::Mat bitmap, const float &box_thresh, + const float &det_db_unclip_ratio, const bool &use_polygon_score) { const int min_size = 3; const int max_candidates = 1000; @@ -234,7 +279,12 @@ PostProcessor::BoxesFromBitmap(const cv::Mat pred, const cv::Mat bitmap, } float score; - score = BoxScoreFast(array, pred); + if (use_polygon_score) + /* compute using polygon*/ + score = PolygonScoreAcc(contours[_i], pred); + else + score = BoxScoreFast(array, pred); + if (score < box_thresh) continue; diff --git a/deploy/cpp_infer/src/preprocess_op.cpp b/deploy/cpp_infer/src/preprocess_op.cpp index 87d8dbbd..23c51c20 100644 --- a/deploy/cpp_infer/src/preprocess_op.cpp +++ b/deploy/cpp_infer/src/preprocess_op.cpp @@ -47,16 +47,13 @@ void Normalize::Run(cv::Mat *im, const std::vector &mean, e /= 255.0; } (*im).convertTo(*im, CV_32FC3, e); - for (int h = 0; h < im->rows; h++) { - for (int w = 0; w < im->cols; w++) { - im->at(h, w)[0] = - (im->at(h, w)[0] - mean[0]) * scale[0]; - im->at(h, w)[1] = - (im->at(h, w)[1] - mean[1]) * scale[1]; - im->at(h, w)[2] = - (im->at(h, w)[2] - mean[2]) * scale[2]; - } + std::vector bgr_channels(3); + cv::split(*im, bgr_channels); + for (auto i = 0; i < bgr_channels.size(); i++) { + bgr_channels[i].convertTo(bgr_channels[i], CV_32FC1, 1.0 * scale[i], + (0.0 - mean[i]) * scale[i]); } + cv::merge(bgr_channels, *im); } void ResizeImgType0::Run(const cv::Mat &img, cv::Mat &resize_img, @@ -77,28 +74,13 @@ void ResizeImgType0::Run(const cv::Mat &img, cv::Mat &resize_img, int resize_h = int(float(h) * ratio); int resize_w = int(float(w) * ratio); - if (resize_h % 32 == 0) - resize_h = resize_h; - else if (resize_h / 32 < 1 + 1e-5) - resize_h = 32; - else - resize_h = (resize_h / 32) * 32; - if (resize_w % 32 == 0) - resize_w = resize_w; - else if (resize_w / 32 < 1 + 1e-5) - resize_w = 32; - else - resize_w = (resize_w / 32) * 32; - if (!use_tensorrt) { - cv::resize(img, resize_img, cv::Size(resize_w, resize_h)); - ratio_h = float(resize_h) / float(h); - ratio_w = float(resize_w) / float(w); - } else { - cv::resize(img, resize_img, cv::Size(640, 640)); - ratio_h = float(640) / float(h); - ratio_w = float(640) / float(w); - } + resize_h = max(int(round(float(resize_h) / 32) * 32), 32); + resize_w = max(int(round(float(resize_w) / 32) * 32), 32); + + cv::resize(img, resize_img, cv::Size(resize_w, resize_h)); + ratio_h = float(resize_h) / float(h); + ratio_w = float(resize_w) / float(w); } void CrnnResizeImg::Run(const cv::Mat &img, cv::Mat &resize_img, float wh_ratio, @@ -117,23 +99,12 @@ void CrnnResizeImg::Run(const cv::Mat &img, cv::Mat &resize_img, float wh_ratio, resize_w = imgW; else resize_w = int(ceilf(imgH * ratio)); - if (!use_tensorrt) { - cv::resize(img, resize_img, cv::Size(resize_w, imgH), 0.f, 0.f, - cv::INTER_LINEAR); - cv::copyMakeBorder(resize_img, resize_img, 0, 0, 0, - int(imgW - resize_img.cols), cv::BORDER_CONSTANT, - {127, 127, 127}); - } else { - int k = int(img.cols * 32 / img.rows); - if (k >= 100) { - cv::resize(img, resize_img, cv::Size(100, 32), 0.f, 0.f, - cv::INTER_LINEAR); - } else { - cv::resize(img, resize_img, cv::Size(k, 32), 0.f, 0.f, cv::INTER_LINEAR); - cv::copyMakeBorder(resize_img, resize_img, 0, 0, 0, int(100 - k), - cv::BORDER_CONSTANT, {127, 127, 127}); - } - } + + cv::resize(img, resize_img, cv::Size(resize_w, imgH), 0.f, 0.f, + cv::INTER_LINEAR); + cv::copyMakeBorder(resize_img, resize_img, 0, 0, 0, + int(imgW - resize_img.cols), cv::BORDER_CONSTANT, + {127, 127, 127}); } void ClsResizeImg::Run(const cv::Mat &img, cv::Mat &resize_img, @@ -151,15 +122,11 @@ void ClsResizeImg::Run(const cv::Mat &img, cv::Mat &resize_img, else resize_w = int(ceilf(imgH * ratio)); - if (!use_tensorrt) { - cv::resize(img, resize_img, cv::Size(resize_w, imgH), 0.f, 0.f, - cv::INTER_LINEAR); - if (resize_w < imgW) { - cv::copyMakeBorder(resize_img, resize_img, 0, 0, 0, imgW - resize_w, - cv::BORDER_CONSTANT, cv::Scalar(0, 0, 0)); - } - } else { - cv::resize(img, resize_img, cv::Size(100, 32), 0.f, 0.f, cv::INTER_LINEAR); + cv::resize(img, resize_img, cv::Size(resize_w, imgH), 0.f, 0.f, + cv::INTER_LINEAR); + if (resize_w < imgW) { + cv::copyMakeBorder(resize_img, resize_img, 0, 0, 0, imgW - resize_w, + cv::BORDER_CONSTANT, cv::Scalar(0, 0, 0)); } } diff --git a/deploy/cpp_infer/src/utility.cpp b/deploy/cpp_infer/src/utility.cpp index c1c9d938..2cd84f7e 100644 --- a/deploy/cpp_infer/src/utility.cpp +++ b/deploy/cpp_infer/src/utility.cpp @@ -12,12 +12,14 @@ // See the License for the specific language governing permissions and // limitations under the License. +#include +#include #include #include +#include +#include #include -#include - namespace PaddleOCR { std::vector Utility::ReadDict(const std::string &path) { @@ -57,4 +59,37 @@ void Utility::VisualizeBboxes( << std::endl; } +// list all files under a directory +void Utility::GetAllFiles(const char *dir_name, + std::vector &all_inputs) { + if (NULL == dir_name) { + std::cout << " dir_name is null ! " << std::endl; + return; + } + struct stat s; + lstat(dir_name, &s); + if (!S_ISDIR(s.st_mode)) { + std::cout << "dir_name is not a valid directory !" << std::endl; + all_inputs.push_back(dir_name); + return; + } else { + struct dirent *filename; // return value for readdir() + DIR *dir; // return value for opendir() + dir = opendir(dir_name); + if (NULL == dir) { + std::cout << "Can not open dir " << dir_name << std::endl; + return; + } + std::cout << "Successfully opened the dir !" << std::endl; + while ((filename = readdir(dir)) != NULL) { + if (strcmp(filename->d_name, ".") == 0 || + strcmp(filename->d_name, "..") == 0) + continue; + // img_dir + std::string("/") + all_inputs[0]; + all_inputs.push_back(dir_name + std::string("/") + + std::string(filename->d_name)); + } + } +} + } // namespace PaddleOCR \ No newline at end of file diff --git a/deploy/cpp_infer/tools/build.sh b/deploy/cpp_infer/tools/build.sh index 60653948..3b36e8f1 100755 --- a/deploy/cpp_infer/tools/build.sh +++ b/deploy/cpp_infer/tools/build.sh @@ -1,7 +1,7 @@ -OPENCV_DIR=your_opencv_dir -LIB_DIR=your_paddle_inference_dir -CUDA_LIB_DIR=your_cuda_lib_dir -CUDNN_LIB_DIR=your_cudnn_lib_dir +OPENCV_DIR=/paddle/test/opencv-3.4.7/opencv3 +LIB_DIR=/paddle/test/PaddleOCR/deploy/paddle_inference +CUDA_LIB_DIR=/usr/local/cuda/lib64 +CUDNN_LIB_DIR=/usr/lib/x86_64-linux-gnu/ BUILD_DIR=build rm -rf ${BUILD_DIR} @@ -18,3 +18,5 @@ cmake .. \ -DCUDA_LIB=${CUDA_LIB_DIR} \ make -j + + diff --git a/deploy/cpp_infer/tools/config.txt b/deploy/cpp_infer/tools/config.txt index 28085ca4..d4d66d65 100644 --- a/deploy/cpp_infer/tools/config.txt +++ b/deploy/cpp_infer/tools/config.txt @@ -3,13 +3,14 @@ use_gpu 0 gpu_id 0 gpu_mem 4000 cpu_math_library_num_threads 10 -use_mkldnn 1 +use_mkldnn 0 # det config max_side_len 960 det_db_thresh 0.3 det_db_box_thresh 0.5 det_db_unclip_ratio 1.6 +use_polygon_score 1 det_model_dir ./inference/ch_ppocr_mobile_v2.0_det_infer/ # cls config @@ -19,10 +20,10 @@ cls_thresh 0.9 # rec config rec_model_dir ./inference/ch_ppocr_mobile_v2.0_rec_infer/ -char_list_file ../../ppocr/utils/ppocr_keys_v1.txt +char_list_file ../../ppocr/utils/ppocr_keys_v1.txt # show the detection results -visualize 1 +visualize 0 # use_tensorrt use_tensorrt 0 -- GitLab