提交 ffecf106 编写于 作者: L lvxiangxiang

ios ocr demo

上级 199fd0e0
#!/bin/bash
set -e
OCR_MODEL_URL="https://paddleocr.bj.bcebos.com/deploy/lite/ocr_v1_for_cpu.tar.gz"
PADDLE_LITE_LIB_URL="https://paddlelite-demo.bj.bcebos.com/libs/ios/paddle_lite_libs_v2_6_0.tar.gz"
OPENCV3_FRAMEWORK_URL="https://paddlelite-demo.bj.bcebos.com/libs/ios/opencv3.framework.tar.gz"
download_and_extract() {
local url="$1"
local dst_dir="$2"
local tempdir=$(mktemp -d)
echo "Downloading ${url} ..."
curl -L ${url} > ${tempdir}/temp.tar.gz
echo "Download ${url} done "
if [ ! -d ${dst_dir} ];then
mkdir -p ${dst_dir}
fi
echo "Extracting ..."
tar -zxvf ${tempdir}/temp.tar.gz -C ${dst_dir}
echo "Extract done "
rm -rf ${tempdir}
}
echo -e "[Download ios ocr demo denpendancy]\n"
download_and_extract "${OCR_MODEL_URL}" "./ios-demo/ocr_demo/models"
download_and_extract "${PADDLE_LITE_LIB_URL}" "./ios-demo/ocr_demo"
download_and_extract "${OPENCV3_FRAMEWORK_URL}" "./ios-demo/ocr_demo"
echo -e "[done]\n"
此差异已折叠。
//
// AppDelegate.h
// seg_demo
//
// Created by Li,Xiaoyang(SYS) on 2018/11/13.
// Copyright © 2018年 Li,Xiaoyang(SYS). All rights reserved.
//
#import <UIKit/UIKit.h>
@interface AppDelegate : UIResponder <UIApplicationDelegate>
@property (strong, nonatomic) UIWindow *window;
@end
//
// AppDelegate.m
// seg_demo
//
// Created by Li,Xiaoyang(SYS) on 2018/11/13.
// Copyright © 2018年 Li,Xiaoyang(SYS). All rights reserved.
//
#import "AppDelegate.h"
@interface AppDelegate ()
@end
@implementation AppDelegate
- (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
// Override point for customization after application launch.
return YES;
}
- (void)applicationWillResignActive:(UIApplication *)application {
// Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
// Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game.
}
- (void)applicationDidEnterBackground:(UIApplication *)application {
// Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
// If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
}
- (void)applicationWillEnterForeground:(UIApplication *)application {
// Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background.
}
- (void)applicationDidBecomeActive:(UIApplication *)application {
// Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
}
- (void)applicationWillTerminate:(UIApplication *)application {
// Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
}
@end
{
"images" : [
{
"idiom" : "iphone",
"size" : "20x20",
"scale" : "2x"
},
{
"idiom" : "iphone",
"size" : "20x20",
"scale" : "3x"
},
{
"idiom" : "iphone",
"size" : "29x29",
"scale" : "2x"
},
{
"idiom" : "iphone",
"size" : "29x29",
"scale" : "3x"
},
{
"idiom" : "iphone",
"size" : "40x40",
"scale" : "2x"
},
{
"idiom" : "iphone",
"size" : "40x40",
"scale" : "3x"
},
{
"idiom" : "iphone",
"size" : "60x60",
"scale" : "2x"
},
{
"idiom" : "iphone",
"size" : "60x60",
"scale" : "3x"
},
{
"idiom" : "ipad",
"size" : "20x20",
"scale" : "1x"
},
{
"idiom" : "ipad",
"size" : "20x20",
"scale" : "2x"
},
{
"idiom" : "ipad",
"size" : "29x29",
"scale" : "1x"
},
{
"idiom" : "ipad",
"size" : "29x29",
"scale" : "2x"
},
{
"idiom" : "ipad",
"size" : "40x40",
"scale" : "1x"
},
{
"idiom" : "ipad",
"size" : "40x40",
"scale" : "2x"
},
{
"idiom" : "ipad",
"size" : "76x76",
"scale" : "1x"
},
{
"idiom" : "ipad",
"size" : "76x76",
"scale" : "2x"
},
{
"idiom" : "ipad",
"size" : "83.5x83.5",
"scale" : "2x"
},
{
"idiom" : "ios-marketing",
"size" : "1024x1024",
"scale" : "1x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}
\ No newline at end of file
{
"info" : {
"version" : 1,
"author" : "xcode"
}
}
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="13122.16" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" launchScreen="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES" initialViewController="01J-lp-oVM">
<dependencies>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="13104.12"/>
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<scenes>
<!--View Controller-->
<scene sceneID="EHf-IW-A2E">
<objects>
<viewController id="01J-lp-oVM" sceneMemberID="viewController">
<view key="view" contentMode="scaleToFill" id="Ze5-6b-2t3">
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<viewLayoutGuide key="safeArea" id="6Tk-OE-BBY"/>
</view>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="iYj-Kq-Ea1" userLabel="First Responder" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="53" y="375"/>
</scene>
</scenes>
</document>
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="16097" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES" initialViewController="BYZ-38-t0r">
<device id="retina4_7" orientation="portrait" appearance="light"/>
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="16087"/>
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<scenes>
<!--View Controller-->
<scene sceneID="tne-QT-ifu">
<objects>
<viewController id="BYZ-38-t0r" customClass="ViewController" sceneMemberID="viewController">
<view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews>
<switch opaque="NO" contentMode="scaleToFill" horizontalHuggingPriority="750" verticalHuggingPriority="750" contentHorizontalAlignment="center" contentVerticalAlignment="center" translatesAutoresizingMaskIntoConstraints="NO" id="yZw-YR-x44">
<rect key="frame" x="114.5" y="624" width="51" height="31"/>
</switch>
<switch opaque="NO" contentMode="scaleToFill" horizontalHuggingPriority="750" verticalHuggingPriority="750" contentHorizontalAlignment="center" contentVerticalAlignment="center" translatesAutoresizingMaskIntoConstraints="NO" id="wN7-2M-FdP">
<rect key="frame" x="16" y="624" width="56" height="31"/>
</switch>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="前/后摄像头" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="lu6-Fq-OIg">
<rect key="frame" x="93" y="594" width="92" height="21"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<nil key="textColor"/>
<nil key="highlightedColor"/>
</label>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" horizontalCompressionResistancePriority="751" text="开启相机" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="VfD-z1-Okj">
<rect key="frame" x="8" y="594" width="70" height="21"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<nil key="textColor"/>
<nil key="highlightedColor"/>
</label>
<imageView userInteractionEnabled="NO" contentMode="scaleToFill" horizontalHuggingPriority="251" verticalHuggingPriority="251" translatesAutoresizingMaskIntoConstraints="NO" id="ptx-ND-Ywq">
<rect key="frame" x="0.0" y="0.0" width="375" height="564"/>
</imageView>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="T9y-OT-OQS">
<rect key="frame" x="33" y="574" width="309" height="10"/>
<constraints>
<constraint firstAttribute="height" constant="10" id="pMg-XK-d3N"/>
</constraints>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<nil key="textColor"/>
<nil key="highlightedColor"/>
</label>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="roundedRect" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="HJ5-UE-PrR">
<rect key="frame" x="302" y="620.5" width="43" height="38"/>
<fontDescription key="fontDescription" type="system" pointSize="21"/>
<state key="normal" title="拍照"/>
<connections>
<action selector="cap_photo:" destination="BYZ-38-t0r" eventType="touchUpInside" id="PbV-pB-BRY"/>
</connections>
</button>
<switch opaque="NO" contentMode="scaleToFill" horizontalHuggingPriority="750" verticalHuggingPriority="750" contentHorizontalAlignment="center" contentVerticalAlignment="center" translatesAutoresizingMaskIntoConstraints="NO" id="rc6-ZX-igF">
<rect key="frame" x="208" y="624" width="51" height="31"/>
<connections>
<action selector="swith_video_photo:" destination="BYZ-38-t0r" eventType="valueChanged" id="I05-92-4FW"/>
</connections>
</switch>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="视频/拍照" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="0tm-fo-hjF">
<rect key="frame" x="195" y="595" width="75" height="21"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<nil key="textColor"/>
<nil key="highlightedColor"/>
</label>
</subviews>
<color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<constraints>
<constraint firstItem="VfD-z1-Okj" firstAttribute="top" secondItem="T9y-OT-OQS" secondAttribute="bottom" constant="10" id="BZ1-F0-re1"/>
<constraint firstItem="wN7-2M-FdP" firstAttribute="top" secondItem="rc6-ZX-igF" secondAttribute="top" id="Gzf-hC-X6O"/>
<constraint firstItem="wN7-2M-FdP" firstAttribute="leading" secondItem="8bC-Xf-vdC" secondAttribute="leadingMargin" id="JB8-MT-bdB"/>
<constraint firstItem="lu6-Fq-OIg" firstAttribute="leading" secondItem="VfD-z1-Okj" secondAttribute="trailing" constant="15" id="JbA-wd-hE8"/>
<constraint firstItem="wN7-2M-FdP" firstAttribute="centerX" secondItem="VfD-z1-Okj" secondAttribute="centerX" id="LW4-R4-nh2"/>
<constraint firstItem="0tm-fo-hjF" firstAttribute="leading" secondItem="lu6-Fq-OIg" secondAttribute="trailing" constant="10" id="NDI-W8-717"/>
<constraint firstItem="6Tk-OE-BBY" firstAttribute="trailing" secondItem="ptx-ND-Ywq" secondAttribute="trailing" id="V5z-FH-SFs"/>
<constraint firstItem="ptx-ND-Ywq" firstAttribute="leading" secondItem="6Tk-OE-BBY" secondAttribute="leading" id="dET-yr-Mon"/>
<constraint firstItem="ptx-ND-Ywq" firstAttribute="top" secondItem="6Tk-OE-BBY" secondAttribute="top" id="fn8-6Z-tv4"/>
<constraint firstItem="T9y-OT-OQS" firstAttribute="leading" secondItem="6Tk-OE-BBY" secondAttribute="leading" constant="33" id="iMB-Zg-Hsa"/>
<constraint firstItem="VfD-z1-Okj" firstAttribute="leading" secondItem="6Tk-OE-BBY" secondAttribute="leading" constant="8" id="izE-la-Fhu"/>
<constraint firstItem="wN7-2M-FdP" firstAttribute="top" secondItem="VfD-z1-Okj" secondAttribute="bottom" constant="9" id="jcU-7c-FNS"/>
<constraint firstItem="HJ5-UE-PrR" firstAttribute="centerY" secondItem="rc6-ZX-igF" secondAttribute="centerY" id="lpA-wq-cXI"/>
<constraint firstItem="6Tk-OE-BBY" firstAttribute="trailing" secondItem="T9y-OT-OQS" secondAttribute="trailing" constant="33" id="mD1-P0-mgB"/>
<constraint firstItem="rc6-ZX-igF" firstAttribute="centerX" secondItem="0tm-fo-hjF" secondAttribute="centerX" id="p5w-6o-OqW"/>
<constraint firstItem="rc6-ZX-igF" firstAttribute="top" secondItem="0tm-fo-hjF" secondAttribute="bottom" constant="8" id="rzr-oM-f7f"/>
<constraint firstItem="6Tk-OE-BBY" firstAttribute="trailing" secondItem="HJ5-UE-PrR" secondAttribute="trailing" constant="30" id="tYA-x1-MRj"/>
<constraint firstItem="T9y-OT-OQS" firstAttribute="top" secondItem="ptx-ND-Ywq" secondAttribute="bottom" constant="10" id="vNp-h8-QF9"/>
<constraint firstItem="VfD-z1-Okj" firstAttribute="baseline" secondItem="lu6-Fq-OIg" secondAttribute="baseline" id="wcZ-9g-OTX"/>
<constraint firstItem="6Tk-OE-BBY" firstAttribute="bottom" secondItem="wN7-2M-FdP" secondAttribute="bottom" constant="12" id="xm2-Eb-dxp"/>
<constraint firstItem="wN7-2M-FdP" firstAttribute="top" secondItem="yZw-YR-x44" secondAttribute="top" id="yHi-Fb-V4o"/>
<constraint firstItem="yZw-YR-x44" firstAttribute="centerX" secondItem="lu6-Fq-OIg" secondAttribute="centerX" id="yXW-Ap-sa7"/>
<constraint firstItem="VfD-z1-Okj" firstAttribute="centerY" secondItem="lu6-Fq-OIg" secondAttribute="centerY" id="zQ1-gg-Rnh"/>
</constraints>
<viewLayoutGuide key="safeArea" id="6Tk-OE-BBY"/>
</view>
<connections>
<outlet property="flag_back_cam" destination="yZw-YR-x44" id="z5O-BW-sm7"/>
<outlet property="flag_process" destination="wN7-2M-FdP" id="i8h-CM-ida"/>
<outlet property="flag_video" destination="rc6-ZX-igF" id="Uch-KB-gwF"/>
<outlet property="imageView" destination="ptx-ND-Ywq" id="XjA-C2-hvm"/>
<outlet property="result" destination="T9y-OT-OQS" id="6kB-Ha-dfo"/>
</connections>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="53.600000000000001" y="62.518740629685162"/>
</scene>
</scenes>
</document>
//
// Created by chenxiaoyu on 2018/5/5.
// Copyright (c) 2018 baidu. All rights reserved.
//
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
#import "OcrData.h"
@interface BoxLayer : CAShapeLayer
/**
* 绘制OCR的结果
*/
-(void) renderOcrPolygon: (OcrData *)data withHeight:(CGFloat)originHeight withWidth:(CGFloat)originWidth withLabel:(bool) withLabel;
@end
//
// Created by chenxiaoyu on 2018/5/5.
// Copyright (c) 2018 baidu. All rights reserved.
//
#include "BoxLayer.h"
#import "Helpers.h"
@implementation BoxLayer {
}
#define MAIN_COLOR UIColorFromRGB(0x3B85F5)
- (void)renderOcrPolygon:(OcrData *)d withHeight:(CGFloat)originHeight withWidth:(CGFloat)originWidth withLabel:(bool)withLabel {
if ([d.polygonPoints count] != 4) {
NSLog(@"poloygonPoints size is not 4");
return;
}
CGPoint startPoint = [d.polygonPoints[0] CGPointValue];
NSString *text = d.label;
CGFloat x = startPoint.x * originWidth;
CGFloat y = startPoint.y * originHeight;
CGFloat width = originWidth - x;
CGFloat height = originHeight - y;
UIFont *font = [UIFont systemFontOfSize:16];
NSDictionary *attrs = @{
// NSStrokeColorAttributeName: [UIColor blackColor],
NSForegroundColorAttributeName: [UIColor whiteColor],
// NSStrokeWidthAttributeName : @((float) -6.0),
NSFontAttributeName: font
};
if (withLabel) {
NSAttributedString *displayStr = [[NSAttributedString alloc] initWithString:text attributes:attrs];
CATextLayer *textLayer = [[CATextLayer alloc] init];
textLayer.wrapped = YES;
textLayer.string = displayStr;
textLayer.frame = CGRectMake(x + 2, y + 2, width, height);
textLayer.contentsScale = [[UIScreen mainScreen] scale];
// 加阴影显得有点乱
// textLayer.shadowColor = [MAIN_COLOR CGColor];
// textLayer.shadowOffset = CGSizeMake(2.0, 2.0);
// textLayer.shadowOpacity = 0.8;
// textLayer.shadowRadius = 0.0;
[self addSublayer:textLayer];
}
UIBezierPath *path = [UIBezierPath new];
[path moveToPoint:CGPointMake(startPoint.x * originWidth, startPoint.y * originHeight)];
for (NSValue *val in d.polygonPoints) {
CGPoint p = [val CGPointValue];
[path addLineToPoint:CGPointMake(p.x * originWidth, p.y * originHeight)];
}
[path closePath];
self.path = path.CGPath;
self.strokeColor = MAIN_COLOR.CGColor;
self.lineWidth = 2.0;
self.fillColor = [MAIN_COLOR colorWithAlphaComponent:0.2].CGColor;
self.lineJoin = kCALineJoinBevel;
}
- (void)renderSingleBox:(OcrData *)data withHeight:(CGFloat)originHeight withWidth:(CGFloat)originWidth {
[self renderOcrPolygon:data withHeight:originHeight withWidth:originWidth withLabel:YES];
}
@end
//
// Helpers.h
// EasyDLDemo
//
// Created by chenxiaoyu on 2018/5/14.
// Copyright © 2018年 baidu. All rights reserved.
//
#import <Foundation/Foundation.h>
#import <UIKit/UIImage.h>
#define UIColorFromRGB(rgbValue) \
[UIColor colorWithRed:((float)((rgbValue & 0xFF0000) >> 16))/255.0 \
green:((float)((rgbValue & 0x00FF00) >> 8))/255.0 \
blue:((float)((rgbValue & 0x0000FF) >> 0))/255.0 \
alpha:1.0]
#define SCREEN_HEIGHT [UIScreen mainScreen].bounds.size.height
#define SCREEN_WIDTH [UIScreen mainScreen].bounds.size.width
#define HIGHLIGHT_COLOR UIColorFromRGB(0xF5A623)
//#define BTN_HIGHTLIGH_TEXT_COLOR UIColorFromRGB(0xF5A623)
@interface Helpers : NSObject {
}
@end
//
// Helpers.m
// EasyDLDemo
//
// Created by chenxiaoyu on 2018/5/14.
// Copyright © 2018年 baidu. All rights reserved.
//
#import "Helpers.h"
@implementation Helpers
@end
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>$(DEVELOPMENT_LANGUAGE)</string>
<key>CFBundleDisplayName</key>
<string>$(PRODUCT_NAME)</string>
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIdentifier</key>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>$(PRODUCT_NAME)</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>0.1</string>
<key>CFBundleVersion</key>
<string>1</string>
<key>LSRequiresIPhoneOS</key>
<true/>
<key>NSCameraUsageDescription</key>
<string>for test</string>
<key>UILaunchStoryboardName</key>
<string>LaunchScreen</string>
<key>UIMainStoryboardFile</key>
<string>Main</string>
<key>UIRequiredDeviceCapabilities</key>
<array>
<string>armv7</string>
</array>
<key>UISupportedInterfaceOrientations</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
<key>UISupportedInterfaceOrientations~ipad</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationPortraitUpsideDown</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
</dict>
</plist>
//
// Created by Lv,Xiangxiang on 2020/7/11.
// Copyright (c) 2020 Li,Xiaoyang(SYS). All rights reserved.
//
#import <Foundation/Foundation.h>
@interface OcrData : NSObject
@property(nonatomic, copy) NSString *label;
@property(nonatomic) int category;
@property(nonatomic) float accuracy;
@property(nonatomic) NSArray *polygonPoints;
@end
\ No newline at end of file
//
// Created by Lv,Xiangxiang on 2020/7/11.
// Copyright (c) 2020 Li,Xiaoyang(SYS). All rights reserved.
//
#import "OcrData.h"
@implementation OcrData {
}
@end
\ No newline at end of file
//
// ViewController.h
// seg_demo
//
// Created by Li,Xiaoyang(SYS) on 2018/11/13.
// Copyright © 2018年 Li,Xiaoyang(SYS). All rights reserved.
//
#import <UIKit/UIKit.h>
@interface ViewController : UIViewController
@end
//
// Created by lvxiangxiang on 2020/7/10.
// Copyright (c) 2020 baidu. All rights reserved.
//
#import <opencv2/opencv.hpp>
#import <opencv2/imgcodecs/ios.h>
#import <opencv2/videoio/cap_ios.h>
//#import <opencv2/highgui/ios.h>
#import "ViewController.h"
#import "BoxLayer.h"
#include "include/paddle_api.h"
#include "timer.h"
#import "pdocr/ocr_db_post_process.h"
#import "pdocr/ocr_crnn_process.h"
using namespace paddle::lite_api;
using namespace cv;
struct Object {
int batch_id;
cv::Rect rec;
int class_id;
float prob;
};
std::mutex mtx;
std::shared_ptr<PaddlePredictor> net_ocr1;
std::shared_ptr<PaddlePredictor> net_ocr2;
Timer tic;
long long count = 0;
double tensor_mean(const Tensor &tin) {
auto shape = tin.shape();
int64_t size = 1;
for (int i = 0; i < shape.size(); i++) {
size *= shape[i];
}
double mean = 0.;
auto ptr = tin.data<float>();
for (int i = 0; i < size; i++) {
mean += ptr[i];
}
return mean / size;
}
cv::Mat resize_img_type0(const cv::Mat &img, int max_size_len, float *ratio_h, float *ratio_w) {
int w = img.cols;
int h = img.rows;
float ratio = 1.f;
int max_wh = w >= h ? w : h;
if (max_wh > max_size_len) {
if (h > w) {
ratio = float(max_size_len) / float(h);
} else {
ratio = float(max_size_len) / float(w);
}
}
int resize_h = int(float(h) * ratio);
int resize_w = int(float(w) * ratio);
if (resize_h % 32 == 0)
resize_h = resize_h;
else if (resize_h / 32 < 1)
resize_h = 32;
else
resize_h = (resize_h / 32 - 1) * 32;
if (resize_w % 32 == 0)
resize_w = resize_w;
else if (resize_w / 32 < 1)
resize_w = 32;
else
resize_w = (resize_w / 32 - 1) * 32;
cv::Mat resize_img;
cv::resize(img, resize_img, cv::Size(resize_w, resize_h));
*ratio_h = float(resize_h) / float(h);
*ratio_w = float(resize_w) / float(w);
return resize_img;
}
void neon_mean_scale(const float *din, float *dout, int size, std::vector<float> mean, std::vector<float> scale) {
float32x4_t vmean0 = vdupq_n_f32(mean[0]);
float32x4_t vmean1 = vdupq_n_f32(mean[1]);
float32x4_t vmean2 = vdupq_n_f32(mean[2]);
float32x4_t vscale0 = vdupq_n_f32(1.f / scale[0]);
float32x4_t vscale1 = vdupq_n_f32(1.f / scale[1]);
float32x4_t vscale2 = vdupq_n_f32(1.f / scale[2]);
float *dout_c0 = dout;
float *dout_c1 = dout + size;
float *dout_c2 = dout + size * 2;
int i = 0;
for (; i < size - 3; i += 4) {
float32x4x3_t vin3 = vld3q_f32(din);
float32x4_t vsub0 = vsubq_f32(vin3.val[0], vmean0);
float32x4_t vsub1 = vsubq_f32(vin3.val[1], vmean1);
float32x4_t vsub2 = vsubq_f32(vin3.val[2], vmean2);
float32x4_t vs0 = vmulq_f32(vsub0, vscale0);
float32x4_t vs1 = vmulq_f32(vsub1, vscale1);
float32x4_t vs2 = vmulq_f32(vsub2, vscale2);
vst1q_f32(dout_c0, vs0);
vst1q_f32(dout_c1, vs1);
vst1q_f32(dout_c2, vs2);
din += 12;
dout_c0 += 4;
dout_c1 += 4;
dout_c2 += 4;
}
for (; i < size; i++) {
*(dout_c0++) = (*(din++) - mean[0]) / scale[0];
*(dout_c1++) = (*(din++) - mean[1]) / scale[1];
*(dout_c2++) = (*(din++) - mean[2]) / scale[2];
}
}
// fill tensor with mean and scale, neon speed up
void fill_tensor_with_cvmat(const Mat &img_in, Tensor &tout, int width, int height,
std::vector<float> mean, std::vector<float> scale, bool is_scale) {
if (img_in.channels() == 4) {
cv::cvtColor(img_in, img_in, CV_RGBA2RGB);
}
cv::Mat im;
cv::resize(img_in, im, cv::Size(width, height), 0.f, 0.f);
cv::Mat imgf;
float scale_factor = is_scale ? 1 / 255.f : 1.f;
im.convertTo(imgf, CV_32FC3, scale_factor);
const float *dimg = reinterpret_cast<const float *>(imgf.data);
float *dout = tout.mutable_data<float>();
neon_mean_scale(dimg, dout, width * height, mean, scale);
}
std::vector<Object> detect_object(const float *data,
int count,
const std::vector<std::vector<uint64_t>> &lod,
const float thresh,
Mat &image) {
std::vector<Object> rect_out;
const float *dout = data;
for (int iw = 0; iw < count; iw++) {
int oriw = image.cols;
int orih = image.rows;
if (dout[1] > thresh && static_cast<int>(dout[0]) > 0) {
Object obj;
int x = static_cast<int>(dout[2] * oriw);
int y = static_cast<int>(dout[3] * orih);
int w = static_cast<int>(dout[4] * oriw) - x;
int h = static_cast<int>(dout[5] * orih) - y;
cv::Rect rec_clip = cv::Rect(x, y, w, h) & cv::Rect(0, 0, image.cols, image.rows);
obj.batch_id = 0;
obj.class_id = static_cast<int>(dout[0]);
obj.prob = dout[1];
obj.rec = rec_clip;
if (w > 0 && h > 0 && obj.prob <= 1) {
rect_out.push_back(obj);
cv::rectangle(image, rec_clip, cv::Scalar(255, 0, 0));
}
}
dout += 6;
}
return rect_out;
}
@interface ViewController () <CvVideoCameraDelegate>
@property(weak, nonatomic) IBOutlet UIImageView *imageView;
@property(weak, nonatomic) IBOutlet UISwitch *flag_process;
@property(weak, nonatomic) IBOutlet UISwitch *flag_video;
@property(weak, nonatomic) IBOutlet UIImageView *preView;
@property(weak, nonatomic) IBOutlet UISwitch *flag_back_cam;
@property(weak, nonatomic) IBOutlet UILabel *result;
@property(nonatomic, strong) CvVideoCamera *videoCamera;
@property(nonatomic, strong) UIImage *image;
@property(nonatomic) bool flag_init;
@property(nonatomic) bool flag_cap_photo;
@property(nonatomic) std::vector<float> scale;
@property(nonatomic) std::vector<float> mean;
@property(nonatomic) NSArray *labels;
@property(nonatomic) cv::Mat cvimg;
@property(nonatomic, strong) UIImage *ui_img_test;
@property(strong, nonatomic) CALayer *boxLayer;
@end
@implementation ViewController
@synthesize imageView;
- (OcrData *)paddleOcrRec:(cv::Mat)image {
OcrData *result = [OcrData new];
std::vector<float> mean = {0.5f, 0.5f, 0.5f};
std::vector<float> scale = {1 / 0.5f, 1 / 0.5f, 1 / 0.5f};
cv::Mat crop_img;
image.copyTo(crop_img);
cv::Mat resize_img;
float wh_ratio = float(crop_img.cols) / float(crop_img.rows);
resize_img = crnn_resize_img(crop_img, wh_ratio);
resize_img.convertTo(resize_img, CV_32FC3, 1 / 255.f);
const float *dimg = reinterpret_cast<const float *>(resize_img.data);
std::unique_ptr<Tensor> input_tensor0(std::move(net_ocr2->GetInput(0)));
input_tensor0->Resize({1, 3, resize_img.rows, resize_img.cols});
auto *data0 = input_tensor0->mutable_data<float>();
neon_mean_scale(dimg, data0, resize_img.rows * resize_img.cols, mean, scale);
//// Run CRNN predictor
net_ocr2->Run();
// Get output and run postprocess
std::unique_ptr<const Tensor> output_tensor0(std::move(net_ocr2->GetOutput(0)));
auto *rec_idx = output_tensor0->data<int>();
auto rec_idx_lod = output_tensor0->lod();
auto shape_out = output_tensor0->shape();
NSMutableString *text = [[NSMutableString alloc] init];
for (int n = int(rec_idx_lod[0][0]); n < int(rec_idx_lod[0][1] * 2); n += 2) {
if (rec_idx[n] >= self.labels.count) {
std::cout << "Index " << rec_idx[n] << " out of text dict range!" << std::endl;
continue;
}
[text appendString:self.labels[rec_idx[n]]];
}
result.label = text;
// get score
std::unique_ptr<const Tensor> output_tensor1(std::move(net_ocr2->GetOutput(1)));
auto *predict_batch = output_tensor1->data<float>();
auto predict_shape = output_tensor1->shape();
auto predict_lod = output_tensor1->lod();
int argmax_idx;
int blank = predict_shape[1];
float score = 0.f;
int count = 0;
float max_value = 0.0f;
for (int n = predict_lod[0][0]; n < predict_lod[0][1] - 1; n++) {
argmax_idx = int(argmax(&predict_batch[n * predict_shape[1]], &predict_batch[(n + 1) * predict_shape[1]]));
max_value = float(*std::max_element(&predict_batch[n * predict_shape[1]], &predict_batch[(n + 1) * predict_shape[1]]));
if (blank - 1 - argmax_idx > 1e-5) {
score += max_value;
count += 1;
}
}
score /= count;
result.accuracy = score;
return result;
}
- (NSArray *) ocr_infer:(cv::Mat) originImage{
int max_side_len = 960;
float ratio_h{};
float ratio_w{};
cv::Mat image;
cv::cvtColor(originImage, image, cv::COLOR_RGB2BGR);
cv::Mat img;
image.copyTo(img);
img = resize_img_type0(img, max_side_len, &ratio_h, &ratio_w);
cv::Mat img_fp;
img.convertTo(img_fp, CV_32FC3, 1.0 / 255.f);
std::unique_ptr<Tensor> input_tensor(net_ocr1->GetInput(0));
input_tensor->Resize({1, 3, img_fp.rows, img_fp.cols});
auto *data0 = input_tensor->mutable_data<float>();
const float *dimg = reinterpret_cast<const float *>(img_fp.data);
neon_mean_scale(dimg, data0, img_fp.rows * img_fp.cols, self.mean, self.scale);
tic.clear();
tic.start();
net_ocr1->Run();
std::unique_ptr<const Tensor> output_tensor(std::move(net_ocr1->GetOutput(0)));
auto *outptr = output_tensor->data<float>();
auto shape_out = output_tensor->shape();
int64_t out_numl = 1;
double sum = 0;
for (auto i : shape_out) {
out_numl *= i;
}
int s2 = int(shape_out[2]);
int s3 = int(shape_out[3]);
cv::Mat pred_map = cv::Mat::zeros(s2, s3, CV_32F);
memcpy(pred_map.data, outptr, s2 * s3 * sizeof(float));
cv::Mat cbuf_map;
pred_map.convertTo(cbuf_map, CV_8UC1, 255.0f);
const double threshold = 0.1 * 255;
const double maxvalue = 255;
cv::Mat bit_map;
cv::threshold(cbuf_map, bit_map, threshold, maxvalue, cv::THRESH_BINARY);
auto boxes = boxes_from_bitmap(pred_map, bit_map);
std::vector<std::vector<std::vector<int>>> filter_boxes = filter_tag_det_res(boxes, ratio_h, ratio_w, image);
cv::Point rook_points[filter_boxes.size()][4];
for (int n = 0; n < filter_boxes.size(); n++) {
for (int m = 0; m < filter_boxes[0].size(); m++) {
rook_points[n][m] = cv::Point(int(filter_boxes[n][m][0]), int(filter_boxes[n][m][1]));
}
}
NSMutableArray *result = [[NSMutableArray alloc] init];
for (int i = 0; i < filter_boxes.size(); i++) {
cv::Mat crop_img;
crop_img = get_rotate_crop_image(image, filter_boxes[i]);
OcrData *r = [self paddleOcrRec:crop_img ];
NSMutableArray *points = [NSMutableArray new];
for (int jj = 0; jj < 4; ++jj) {
NSValue *v = [NSValue valueWithCGPoint:CGPointMake(
rook_points[i][jj].x / CGFloat(originImage.cols),
rook_points[i][jj].y / CGFloat(originImage.rows))];
[points addObject:v];
}
r.polygonPoints = points;
[result addObject:r];
}
NSArray* rec_out =[[result reverseObjectEnumerator] allObjects];
tic.end();
std::cout<<"infer time: "<<tic.get_sum_ms()<<"ms"<<std::endl;
return rec_out;
}
- (NSArray *)readLabelsFromFile:(NSString *)labelFilePath {
NSString *content = [NSString stringWithContentsOfFile:labelFilePath encoding:NSUTF8StringEncoding error:nil];
NSArray *lines = [content componentsSeparatedByCharactersInSet:[NSCharacterSet newlineCharacterSet]];
NSMutableArray *ret = [[NSMutableArray alloc] init];
for (int i = 0; i < lines.count; ++i) {
[ret addObject:@""];
}
NSUInteger cnt = 0;
for (id line in lines) {
NSString *l = [(NSString *) line stringByTrimmingCharactersInSet:[NSCharacterSet whitespaceAndNewlineCharacterSet]];
if ([l length] == 0)
continue;
NSArray *segs = [l componentsSeparatedByString:@":"];
NSUInteger key;
NSString *value;
if ([segs count] != 2) {
key = cnt;
value = [segs[0] stringByTrimmingCharactersInSet:[NSCharacterSet whitespaceAndNewlineCharacterSet]];
} else {
key = [[segs[0] stringByTrimmingCharactersInSet:[NSCharacterSet whitespaceAndNewlineCharacterSet]] integerValue];
value = [segs[1] stringByTrimmingCharactersInSet:[NSCharacterSet whitespaceAndNewlineCharacterSet]];
}
ret[key] = value;
cnt += 1;
}
return [NSArray arrayWithArray:ret];
}
- (void)viewDidAppear:(BOOL)animated {
[super viewDidAppear:animated];
self.boxLayer = [[CALayer alloc] init];
CGRect r = AVMakeRectWithAspectRatioInsideRect(self.imageView.frame.size, self.imageView.bounds);
std::cout<<self.imageView.frame.size.width<<","<<self.imageView.frame.size.height<<std::endl;
self.boxLayer.frame = r;
[self.imageView.layer addSublayer:self.boxLayer];
NSString *label_file_path = [[NSBundle mainBundle] pathForResource:[NSString stringWithFormat:@"%@", @"label_list"] ofType:@"txt"];
self.labels = [self readLabelsFromFile:label_file_path];
self.mean = {0.485f, 0.456f, 0.406f};
self.scale = {1 / 0.229f, 1 / 0.224f, 1 / 0.225f};
NSString *model1_path = [[NSBundle mainBundle] pathForResource:[NSString stringWithFormat:@"%@", @"ch_det_mv3_db_opt"] ofType:@"nb"];
NSString *model2_path = [[NSBundle mainBundle] pathForResource:[NSString stringWithFormat:@"%@", @"ch_rec_mv3_crnn_opt"] ofType:@"nb"];
std::string model1_path_str = std::string([model1_path UTF8String]);
std::string model2_path_str = std::string([model2_path UTF8String]);
MobileConfig config;
config.set_model_from_file(model1_path_str);
net_ocr1 = CreatePaddlePredictor<MobileConfig>(config);
MobileConfig config2;
config2.set_model_from_file(model2_path_str);
net_ocr2 = CreatePaddlePredictor<MobileConfig>(config2);
cv::Mat originImage;
UIImageToMat(self.image, originImage);
NSArray *rec_out = [self ocr_infer:originImage];
[_boxLayer.sublayers makeObjectsPerformSelector:@selector(removeFromSuperlayer)];
std::cout<<self.imageView.image.size.width<<","<<self.imageView.image.size.height<<std::endl;
CGFloat h = _boxLayer.frame.size.height;
CGFloat w = _boxLayer.frame.size.width;
std::ostringstream result2;
NSInteger cnt = 0;
for (id obj in rec_out) {
OcrData *data = obj;
BoxLayer *singleBox = [[BoxLayer alloc] init];
[singleBox renderOcrPolygon:data withHeight:h withWidth:w withLabel:YES];
[_boxLayer addSublayer:singleBox];
result2<<[data.label UTF8String] <<","<<data.accuracy<<"\n";
cnt += 1;
}
self.flag_init = true;
}
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
_flag_process.on = NO;
_flag_back_cam.on = NO;
_flag_video.on = NO;
_flag_cap_photo = false;
_image = [UIImage imageNamed:@"ocr.png"];
if (_image != nil) {
printf("load image successed\n");
imageView.image = _image;
} else {
printf("load image failed\n");
}
[_flag_process addTarget:self action:@selector(PSwitchValueChanged:) forControlEvents:UIControlEventValueChanged];
[_flag_back_cam addTarget:self action:@selector(CSwitchValueChanged:) forControlEvents:UIControlEventValueChanged];
self.videoCamera = [[CvVideoCamera alloc] initWithParentView:self.preView];
self.videoCamera.delegate = self;
self.videoCamera.defaultAVCaptureDevicePosition = AVCaptureDevicePositionFront;
self.videoCamera.defaultAVCaptureSessionPreset = AVCaptureSessionPreset1920x1080;
self.videoCamera.defaultAVCaptureVideoOrientation = AVCaptureVideoOrientationPortrait;
self.videoCamera.rotateVideo = 90;
self.videoCamera.defaultFPS = 30;
[self.view insertSubview:self.imageView atIndex:0];
}
- (IBAction)swith_video_photo:(UISwitch *)sender {
NSLog(@"%@", sender.isOn ? @"video ON" : @"video OFF");
if (sender.isOn) {
self.flag_video.on = YES;
} else {
self.flag_video.on = NO;
}
}
- (IBAction)cap_photo:(id)sender {
if (!self.flag_process.isOn) {
self.result.text = @"please turn on the camera firstly";
} else {
self.flag_cap_photo = true;
}
}
- (void)PSwitchValueChanged:(UISwitch *)sender {
NSLog(@"%@", sender.isOn ? @"process ON" : @"process OFF");
if (sender.isOn) {
[self.videoCamera start];
} else {
[self.videoCamera stop];
}
}
- (void)CSwitchValueChanged:(UISwitch *)sender {
NSLog(@"%@", sender.isOn ? @"back ON" : @"back OFF");
if (sender.isOn) {
if (self.flag_process.isOn) {
[self.videoCamera stop];
}
self.videoCamera.defaultAVCaptureDevicePosition = AVCaptureDevicePositionBack ;
if (self.flag_process.isOn) {
[self.videoCamera start];
}
} else {
if (self.flag_process.isOn) {
[self.videoCamera stop];
}
self.videoCamera.defaultAVCaptureDevicePosition = AVCaptureDevicePositionFront;
if (self.flag_process.isOn) {
[self.videoCamera start];
}
}
}
- (void)processImage:(cv::Mat &)image {
dispatch_async(dispatch_get_main_queue(), ^{
if (self.flag_process.isOn) {
if (self.flag_init) {
if (self.flag_video.isOn || self.flag_cap_photo) {
self.flag_cap_photo = false;
if (image.channels() == 4) {
cvtColor(image, self->_cvimg, CV_RGBA2RGB);
}
auto rec_out =[self ocr_infer:self->_cvimg];
std::ostringstream result;
NSInteger cnt = 0;
[_boxLayer.sublayers makeObjectsPerformSelector:@selector(removeFromSuperlayer)];
CGFloat h = _boxLayer.frame.size.height;
CGFloat w = _boxLayer.frame.size.width;
for (id obj in rec_out) {
OcrData *data = obj;
BoxLayer *singleBox = [[BoxLayer alloc] init];
[singleBox renderOcrPolygon:data withHeight:h withWidth:w withLabel:YES];
[_boxLayer addSublayer:singleBox];
result<<[data.label UTF8String] <<","<<data.accuracy<<"\n";
cnt += 1;
}
cvtColor(self->_cvimg, self->_cvimg, CV_RGB2BGR);
self.imageView.image = MatToUIImage(self->_cvimg);
}
}
}
});
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
@end
此差异已折叠。
//
// main.m
// seg_demo
//
// Created by Li,Xiaoyang(SYS) on 2018/11/13.
// Copyright © 2018年 Li,Xiaoyang(SYS). All rights reserved.
//
#import <UIKit/UIKit.h>
#import "AppDelegate.h"
int main(int argc, char * argv[]) {
@autoreleasepool {
return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class]));
}
}
此差异已折叠。
/*******************************************************************************
* *
* Author : Angus Johnson *
* Version : 6.4.2 *
* Date : 27 February 2017 *
* Website : http://www.angusj.com *
* Copyright : Angus Johnson 2010-2017 *
* *
* License: *
* Use, modification & distribution is subject to Boost Software License Ver 1. *
* http://www.boost.org/LICENSE_1_0.txt *
* *
* Attributions: *
* The code in this library is an extension of Bala Vatti's clipping algorithm: *
* "A generic solution to polygon clipping" *
* Communications of the ACM, Vol 35, Issue 7 (July 1992) pp 56-63. *
* http://portal.acm.org/citation.cfm?id=129906 *
* *
* Computer graphics and geometric modeling: implementation and algorithms *
* By Max K. Agoston *
* Springer; 1 edition (January 4, 2005) *
* http://books.google.com/books?q=vatti+clipping+agoston *
* *
* See also: *
* "Polygon Offsetting by Computing Winding Numbers" *
* Paper no. DETC2005-85513 pp. 565-575 *
* ASME 2005 International Design Engineering Technical Conferences *
* and Computers and Information in Engineering Conference (IDETC/CIE2005) *
* September 24-28, 2005 , Long Beach, California, USA *
* http://www.me.berkeley.edu/~mcmains/pubs/DAC05OffsetPolygon.pdf *
* *
*******************************************************************************/
#ifndef clipper_hpp
#define clipper_hpp
#define CLIPPER_VERSION "6.4.2"
//use_int32: When enabled 32bit ints are used instead of 64bit ints. This
//improve performance but coordinate values are limited to the range +/- 46340
//#define use_int32
//use_xyz: adds a Z member to IntPoint. Adds a minor cost to perfomance.
//#define use_xyz
//use_lines: Enables line clipping. Adds a very minor cost to performance.
#define use_lines
//use_deprecated: Enables temporary support for the obsolete functions
//#define use_deprecated
#include <vector>
#include <list>
#include <set>
#include <stdexcept>
#include <cstring>
#include <cstdlib>
#include <ostream>
#include <functional>
#include <queue>
namespace ClipperLib {
enum ClipType {
ctIntersection, ctUnion, ctDifference, ctXor
};
enum PolyType {
ptSubject, ptClip
};
//By far the most widely used winding rules for polygon filling are
//EvenOdd & NonZero (GDI, GDI+, XLib, OpenGL, Cairo, AGG, Quartz, SVG, Gr32)
//Others rules include Positive, Negative and ABS_GTR_EQ_TWO (only in OpenGL)
//see http://glprogramming.com/red/chapter11.html
enum PolyFillType {
pftEvenOdd, pftNonZero, pftPositive, pftNegative
};
#ifdef use_int32
typedef int cInt;
static cInt const loRange = 0x7FFF;
static cInt const hiRange = 0x7FFF;
#else
typedef signed long long cInt;
static cInt const loRange = 0x3FFFFFFF;
static cInt const hiRange = 0x3FFFFFFFFFFFFFFFLL;
typedef signed long long long64; //used by Int128 class
typedef unsigned long long ulong64;
#endif
struct IntPoint {
cInt X;
cInt Y;
#ifdef use_xyz
cInt Z;
IntPoint(cInt x = 0, cInt y = 0, cInt z = 0): X(x), Y(y), Z(z) {};
#else
IntPoint(cInt x = 0, cInt y = 0) : X(x), Y(y) {};
#endif
friend inline bool operator==(const IntPoint &a, const IntPoint &b) {
return a.X == b.X && a.Y == b.Y;
}
friend inline bool operator!=(const IntPoint &a, const IntPoint &b) {
return a.X != b.X || a.Y != b.Y;
}
};
//------------------------------------------------------------------------------
typedef std::vector <IntPoint> Path;
typedef std::vector <Path> Paths;
inline Path &operator<<(Path &poly, const IntPoint &p) {
poly.push_back(p);
return poly;
}
inline Paths &operator<<(Paths &polys, const Path &p) {
polys.push_back(p);
return polys;
}
std::ostream &operator<<(std::ostream &s, const IntPoint &p);
std::ostream &operator<<(std::ostream &s, const Path &p);
std::ostream &operator<<(std::ostream &s, const Paths &p);
struct DoublePoint {
double X;
double Y;
DoublePoint(double x = 0, double y = 0) : X(x), Y(y) {}
DoublePoint(IntPoint ip) : X((double) ip.X), Y((double) ip.Y) {}
};
//------------------------------------------------------------------------------
#ifdef use_xyz
typedef void (*ZFillCallback)(IntPoint& e1bot, IntPoint& e1top, IntPoint& e2bot, IntPoint& e2top, IntPoint& pt);
#endif
enum InitOptions {
ioReverseSolution = 1, ioStrictlySimple = 2, ioPreserveCollinear = 4
};
enum JoinType {
jtSquare, jtRound, jtMiter
};
enum EndType {
etClosedPolygon, etClosedLine, etOpenButt, etOpenSquare, etOpenRound
};
class PolyNode;
typedef std::vector<PolyNode *> PolyNodes;
class PolyNode {
public:
PolyNode();
virtual ~PolyNode() {};
Path Contour;
PolyNodes Childs;
PolyNode *Parent;
PolyNode *GetNext() const;
bool IsHole() const;
bool IsOpen() const;
int ChildCount() const;
private:
//PolyNode& operator =(PolyNode& other);
unsigned Index; //node index in Parent.Childs
bool m_IsOpen;
JoinType m_jointype;
EndType m_endtype;
PolyNode *GetNextSiblingUp() const;
void AddChild(PolyNode &child);
friend class Clipper; //to access Index
friend class ClipperOffset;
};
class PolyTree : public PolyNode {
public:
~PolyTree() { Clear(); };
PolyNode *GetFirst() const;
void Clear();
int Total() const;
private:
//PolyTree& operator =(PolyTree& other);
PolyNodes AllNodes;
friend class Clipper; //to access AllNodes
};
bool Orientation(const Path &poly);
double Area(const Path &poly);
int PointInPolygon(const IntPoint &pt, const Path &path);
void SimplifyPolygon(const Path &in_poly, Paths &out_polys, PolyFillType fillType = pftEvenOdd);
void SimplifyPolygons(const Paths &in_polys, Paths &out_polys, PolyFillType fillType = pftEvenOdd);
void SimplifyPolygons(Paths &polys, PolyFillType fillType = pftEvenOdd);
void CleanPolygon(const Path &in_poly, Path &out_poly, double distance = 1.415);
void CleanPolygon(Path &poly, double distance = 1.415);
void CleanPolygons(const Paths &in_polys, Paths &out_polys, double distance = 1.415);
void CleanPolygons(Paths &polys, double distance = 1.415);
void MinkowskiSum(const Path &pattern, const Path &path, Paths &solution, bool pathIsClosed);
void MinkowskiSum(const Path &pattern, const Paths &paths, Paths &solution, bool pathIsClosed);
void MinkowskiDiff(const Path &poly1, const Path &poly2, Paths &solution);
void PolyTreeToPaths(const PolyTree &polytree, Paths &paths);
void ClosedPathsFromPolyTree(const PolyTree &polytree, Paths &paths);
void OpenPathsFromPolyTree(PolyTree &polytree, Paths &paths);
void ReversePath(Path &p);
void ReversePaths(Paths &p);
struct IntRect {
cInt left;
cInt top;
cInt right;
cInt bottom;
};
//enums that are used internally ...
enum EdgeSide {
esLeft = 1, esRight = 2
};
//forward declarations (for stuff used internally) ...
struct TEdge;
struct IntersectNode;
struct LocalMinimum;
struct OutPt;
struct OutRec;
struct Join;
typedef std::vector<OutRec *> PolyOutList;
typedef std::vector<TEdge *> EdgeList;
typedef std::vector<Join *> JoinList;
typedef std::vector<IntersectNode *> IntersectList;
//------------------------------------------------------------------------------
//ClipperBase is the ancestor to the Clipper class. It should not be
//instantiated directly. This class simply abstracts the conversion of sets of
//polygon coordinates into edge objects that are stored in a LocalMinima list.
class ClipperBase {
public:
ClipperBase();
virtual ~ClipperBase();
virtual bool AddPath(const Path &pg, PolyType PolyTyp, bool Closed);
bool AddPaths(const Paths &ppg, PolyType PolyTyp, bool Closed);
virtual void Clear();
IntRect GetBounds();
bool PreserveCollinear() { return m_PreserveCollinear; };
void PreserveCollinear(bool value) { m_PreserveCollinear = value; };
protected:
void DisposeLocalMinimaList();
TEdge *AddBoundsToLML(TEdge *e, bool IsClosed);
virtual void Reset();
TEdge *ProcessBound(TEdge *E, bool IsClockwise);
void InsertScanbeam(const cInt Y);
bool PopScanbeam(cInt &Y);
bool LocalMinimaPending();
bool PopLocalMinima(cInt Y, const LocalMinimum *&locMin);
OutRec *CreateOutRec();
void DisposeAllOutRecs();
void DisposeOutRec(PolyOutList::size_type index);
void SwapPositionsInAEL(TEdge *edge1, TEdge *edge2);
void DeleteFromAEL(TEdge *e);
void UpdateEdgeIntoAEL(TEdge *&e);
typedef std::vector <LocalMinimum> MinimaList;
MinimaList::iterator m_CurrentLM;
MinimaList m_MinimaList;
bool m_UseFullRange;
EdgeList m_edges;
bool m_PreserveCollinear;
bool m_HasOpenPaths;
PolyOutList m_PolyOuts;
TEdge *m_ActiveEdges;
typedef std::priority_queue <cInt> ScanbeamList;
ScanbeamList m_Scanbeam;
};
//------------------------------------------------------------------------------
class Clipper : public virtual ClipperBase {
public:
Clipper(int initOptions = 0);
bool Execute(ClipType clipType,
Paths &solution,
PolyFillType fillType = pftEvenOdd);
bool Execute(ClipType clipType,
Paths &solution,
PolyFillType subjFillType,
PolyFillType clipFillType);
bool Execute(ClipType clipType,
PolyTree &polytree,
PolyFillType fillType = pftEvenOdd);
bool Execute(ClipType clipType,
PolyTree &polytree,
PolyFillType subjFillType,
PolyFillType clipFillType);
bool ReverseSolution() { return m_ReverseOutput; };
void ReverseSolution(bool value) { m_ReverseOutput = value; };
bool StrictlySimple() { return m_StrictSimple; };
void StrictlySimple(bool value) { m_StrictSimple = value; };
//set the callback function for z value filling on intersections (otherwise Z is 0)
#ifdef use_xyz
void ZFillFunction(ZFillCallback zFillFunc);
#endif
protected:
virtual bool ExecuteInternal();
private:
JoinList m_Joins;
JoinList m_GhostJoins;
IntersectList m_IntersectList;
ClipType m_ClipType;
typedef std::list <cInt> MaximaList;
MaximaList m_Maxima;
TEdge *m_SortedEdges;
bool m_ExecuteLocked;
PolyFillType m_ClipFillType;
PolyFillType m_SubjFillType;
bool m_ReverseOutput;
bool m_UsingPolyTree;
bool m_StrictSimple;
#ifdef use_xyz
ZFillCallback m_ZFill; //custom callback
#endif
void SetWindingCount(TEdge &edge);
bool IsEvenOddFillType(const TEdge &edge) const;
bool IsEvenOddAltFillType(const TEdge &edge) const;
void InsertLocalMinimaIntoAEL(const cInt botY);
void InsertEdgeIntoAEL(TEdge *edge, TEdge *startEdge);
void AddEdgeToSEL(TEdge *edge);
bool PopEdgeFromSEL(TEdge *&edge);
void CopyAELToSEL();
void DeleteFromSEL(TEdge *e);
void SwapPositionsInSEL(TEdge *edge1, TEdge *edge2);
bool IsContributing(const TEdge &edge) const;
bool IsTopHorz(const cInt XPos);
void DoMaxima(TEdge *e);
void ProcessHorizontals();
void ProcessHorizontal(TEdge *horzEdge);
void AddLocalMaxPoly(TEdge *e1, TEdge *e2, const IntPoint &pt);
OutPt *AddLocalMinPoly(TEdge *e1, TEdge *e2, const IntPoint &pt);
OutRec *GetOutRec(int idx);
void AppendPolygon(TEdge *e1, TEdge *e2);
void IntersectEdges(TEdge *e1, TEdge *e2, IntPoint &pt);
OutPt *AddOutPt(TEdge *e, const IntPoint &pt);
OutPt *GetLastOutPt(TEdge *e);
bool ProcessIntersections(const cInt topY);
void BuildIntersectList(const cInt topY);
void ProcessIntersectList();
void ProcessEdgesAtTopOfScanbeam(const cInt topY);
void BuildResult(Paths &polys);
void BuildResult2(PolyTree &polytree);
void SetHoleState(TEdge *e, OutRec *outrec);
void DisposeIntersectNodes();
bool FixupIntersectionOrder();
void FixupOutPolygon(OutRec &outrec);
void FixupOutPolyline(OutRec &outrec);
bool IsHole(TEdge *e);
bool FindOwnerFromSplitRecs(OutRec &outRec, OutRec *&currOrfl);
void FixHoleLinkage(OutRec &outrec);
void AddJoin(OutPt *op1, OutPt *op2, const IntPoint offPt);
void ClearJoins();
void ClearGhostJoins();
void AddGhostJoin(OutPt *op, const IntPoint offPt);
bool JoinPoints(Join *j, OutRec *outRec1, OutRec *outRec2);
void JoinCommonEdges();
void DoSimplePolygons();
void FixupFirstLefts1(OutRec *OldOutRec, OutRec *NewOutRec);
void FixupFirstLefts2(OutRec *InnerOutRec, OutRec *OuterOutRec);
void FixupFirstLefts3(OutRec *OldOutRec, OutRec *NewOutRec);
#ifdef use_xyz
void SetZ(IntPoint& pt, TEdge& e1, TEdge& e2);
#endif
};
//------------------------------------------------------------------------------
class ClipperOffset {
public:
ClipperOffset(double miterLimit = 2.0, double roundPrecision = 0.25);
~ClipperOffset();
void AddPath(const Path &path, JoinType joinType, EndType endType);
void AddPaths(const Paths &paths, JoinType joinType, EndType endType);
void Execute(Paths &solution, double delta);
void Execute(PolyTree &solution, double delta);
void Clear();
double MiterLimit;
double ArcTolerance;
private:
Paths m_destPolys;
Path m_srcPoly;
Path m_destPoly;
std::vector <DoublePoint> m_normals;
double m_delta, m_sinA, m_sin, m_cos;
double m_miterLim, m_StepsPerRad;
IntPoint m_lowest;
PolyNode m_polyNodes;
void FixOrientations();
void DoOffset(double delta);
void OffsetPoint(int j, int &k, JoinType jointype);
void DoSquare(int j, int k);
void DoMiter(int j, int k, double r);
void DoRound(int j, int k);
};
//------------------------------------------------------------------------------
class clipperException : public std::exception {
public:
clipperException(const char *description) : m_descr(description) {}
virtual ~clipperException() throw() {}
virtual const char *what() const throw() { return m_descr.c_str(); }
private:
std::string m_descr;
};
//------------------------------------------------------------------------------
} //ClipperLib namespace
#endif //clipper_hpp
// Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "ocr_crnn_process.h"
#include <iostream>
#include <vector>
#include <iostream>
#include <cstring>
#include <fstream>
#include <cmath>
const std::string CHARACTER_TYPE = "ch";
const int MAX_DICT_LENGTH = 6624;
const std::vector<int> REC_IMAGE_SHAPE = {3, 32, 320};
static cv::Mat crnn_resize_norm_img(cv::Mat img, float wh_ratio) {
int imgC, imgH, imgW;
imgC = REC_IMAGE_SHAPE[0];
imgW = REC_IMAGE_SHAPE[2];
imgH = REC_IMAGE_SHAPE[1];
if (CHARACTER_TYPE == "ch")
imgW = int(32 * wh_ratio);
float ratio = float(img.cols) / float(img.rows);
int resize_w, resize_h;
if (ceilf(imgH * ratio) > imgW)
resize_w = imgW;
else
resize_w = int(ceilf(imgH * ratio));
cv::Mat resize_img;
cv::resize(img, resize_img, cv::Size(resize_w, imgH), 0.f, 0.f, cv::INTER_CUBIC);
resize_img.convertTo(resize_img, CV_32FC3, 1 / 255.f);
for (int h = 0; h < resize_img.rows; h++) {
for (int w = 0; w < resize_img.cols; w++) {
resize_img.at<cv::Vec3f>(h, w)[0] = (resize_img.at<cv::Vec3f>(h, w)[0] - 0.5) * 2;
resize_img.at<cv::Vec3f>(h, w)[1] = (resize_img.at<cv::Vec3f>(h, w)[1] - 0.5) * 2;
resize_img.at<cv::Vec3f>(h, w)[2] = (resize_img.at<cv::Vec3f>(h, w)[2] - 0.5) * 2;
}
}
cv::Mat dist;
cv::copyMakeBorder(resize_img, dist, 0, 0, 0, int(imgW - resize_w), cv::BORDER_CONSTANT,
{0, 0, 0});
return dist;
}
cv::Mat crnn_resize_img(const cv::Mat &img, float wh_ratio) {
int imgC = REC_IMAGE_SHAPE[0];
int imgW = REC_IMAGE_SHAPE[2];
int imgH = REC_IMAGE_SHAPE[1];
if (CHARACTER_TYPE == "ch") {
imgW = int(32 * wh_ratio);
}
float ratio = float(img.cols) / float(img.rows);
int resize_w;
if (ceilf(imgH * ratio) > imgW)
resize_w = imgW;
else
resize_w = int(ceilf(imgH * ratio));
cv::Mat resize_img;
cv::resize(img, resize_img, cv::Size(resize_w, imgH));
return resize_img;
}
cv::Mat get_rotate_crop_image(const cv::Mat &srcimage, const std::vector<std::vector<int>> &box) {
std::vector<std::vector<int>> points = box;
int x_collect[4] = {box[0][0], box[1][0], box[2][0], box[3][0]};
int y_collect[4] = {box[0][1], box[1][1], box[2][1], box[3][1]};
int left = int(*std::min_element(x_collect, x_collect + 4));
int right = int(*std::max_element(x_collect, x_collect + 4));
int top = int(*std::min_element(y_collect, y_collect + 4));
int bottom = int(*std::max_element(y_collect, y_collect + 4));
cv::Mat img_crop;
srcimage(cv::Rect(left, top, right - left, bottom - top)).copyTo(img_crop);
for (int i = 0; i < points.size(); i++) {
points[i][0] -= left;
points[i][1] -= top;
}
int img_crop_width = int(sqrt(pow(points[0][0] - points[1][0], 2) +
pow(points[0][1] - points[1][1], 2)));
int img_crop_height = int(sqrt(pow(points[0][0] - points[3][0], 2) +
pow(points[0][1] - points[3][1], 2)));
cv::Point2f pts_std[4];
pts_std[0] = cv::Point2f(0., 0.);
pts_std[1] = cv::Point2f(img_crop_width, 0.);
pts_std[2] = cv::Point2f(img_crop_width, img_crop_height);
pts_std[3] = cv::Point2f(0.f, img_crop_height);
cv::Point2f pointsf[4];
pointsf[0] = cv::Point2f(points[0][0], points[0][1]);
pointsf[1] = cv::Point2f(points[1][0], points[1][1]);
pointsf[2] = cv::Point2f(points[2][0], points[2][1]);
pointsf[3] = cv::Point2f(points[3][0], points[3][1]);
cv::Mat M = cv::getPerspectiveTransform(pointsf, pts_std);
cv::Mat dst_img;
cv::warpPerspective(img_crop, dst_img, M, cv::Size(img_crop_width, img_crop_height),
cv::BORDER_REPLICATE);
if (float(dst_img.rows) >= float(dst_img.cols) * 1.5) {
/*
cv::Mat srcCopy = cv::Mat(dst_img.rows, dst_img.cols, dst_img.depth());
cv::transpose(dst_img, srcCopy);
cv::flip(srcCopy, srcCopy, 0);
return srcCopy;
*/
cv::transpose(dst_img, dst_img);
cv::flip(dst_img, dst_img, 0);
return dst_img;
} else {
return dst_img;
}
}
//
// Created by fujiayi on 2020/7/3.
//
#pragma once
#include <vector>
#include <opencv2/opencv.hpp>
extern const std::vector<int> REC_IMAGE_SHAPE;
cv::Mat get_rotate_crop_image(const cv::Mat &srcimage, const std::vector<std::vector<int>>& box);
cv::Mat crnn_resize_img(const cv::Mat& img, float wh_ratio);
template<class ForwardIterator>
inline size_t argmax(ForwardIterator first, ForwardIterator last) {
return std::distance(first, std::max_element(first, last));
}
\ No newline at end of file
// Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <iostream>
#include <vector>
#include <math.h>
#include "opencv2/core.hpp"
#include "opencv2/imgcodecs.hpp"
#include "opencv2/imgproc.hpp"
#include "ocr_clipper.hpp"
static void getcontourarea(float **box, float unclip_ratio, float &distance) {
int pts_num = 4;
float area = 0.0f;
float dist = 0.0f;
for (int i = 0; i < pts_num; i++) {
area += box[i][0] * box[(i + 1) % pts_num][1] - box[i][1] * box[(i + 1) % pts_num][0];
dist += sqrtf(
(box[i][0] - box[(i + 1) % pts_num][0]) * (box[i][0] - box[(i + 1) % pts_num][0]) +
(box[i][1] - box[(i + 1) % pts_num][1]) * (box[i][1] - box[(i + 1) % pts_num][1]));
}
area = fabs(float(area / 2.0));
distance = area * unclip_ratio / dist;
}
static cv::RotatedRect unclip(float **box) {
float unclip_ratio = 2.0;
float distance = 1.0;
getcontourarea(box, unclip_ratio, distance);
ClipperLib::ClipperOffset offset;
ClipperLib::Path p;
p << ClipperLib::IntPoint(int(box[0][0]), int(box[0][1]))
<< ClipperLib::IntPoint(int(box[1][0]), int(box[1][1])) <<
ClipperLib::IntPoint(int(box[2][0]), int(box[2][1]))
<< ClipperLib::IntPoint(int(box[3][0]), int(box[3][1]));
offset.AddPath(p, ClipperLib::jtRound, ClipperLib::etClosedPolygon);
ClipperLib::Paths soln;
offset.Execute(soln, distance);
std::vector<cv::Point2f> points;
for (int j = 0; j < soln.size(); j++) {
for (int i = 0; i < soln[soln.size() - 1].size(); i++) {
points.emplace_back(soln[j][i].X, soln[j][i].Y);
}
}
cv::RotatedRect res = cv::minAreaRect(points);
return res;
}
static float **Mat2Vec(cv::Mat mat) {
auto **array = new float *[mat.rows];
for (int i = 0; i < mat.rows; ++i)
array[i] = new float[mat.cols];
for (int i = 0; i < mat.rows; ++i) {
for (int j = 0; j < mat.cols; ++j) {
array[i][j] = mat.at<float>(i, j);
}
}
return array;
}
static void quickSort(float **s, int l, int r) {
if (l < r) {
int i = l, j = r;
float x = s[l][0];
float *xp = s[l];
while (i < j) {
while (i < j && s[j][0] >= x)
j--;
if (i < j)
std::swap(s[i++], s[j]);
while (i < j && s[i][0] < x)
i++;
if (i < j)
std::swap(s[j--], s[i]);
}
s[i] = xp;
quickSort(s, l, i - 1);
quickSort(s, i + 1, r);
}
}
static void quickSort_vector(std::vector<std::vector<int>> &box, int l, int r, int axis) {
if (l < r) {
int i = l, j = r;
int x = box[l][axis];
std::vector<int> xp(box[l]);
while (i < j) {
while (i < j && box[j][axis] >= x)
j--;
if (i < j)
std::swap(box[i++], box[j]);
while (i < j && box[i][axis] < x)
i++;
if (i < j)
std::swap(box[j--], box[i]);
}
box[i] = xp;
quickSort_vector(box, l, i - 1, axis);
quickSort_vector(box, i + 1, r, axis);
}
}
static std::vector<std::vector<int>> order_points_clockwise(std::vector<std::vector<int>> pts) {
std::vector<std::vector<int>> box = pts;
quickSort_vector(box, 0, int(box.size() - 1), 0);
std::vector<std::vector<int>> leftmost = {box[0], box[1]};
std::vector<std::vector<int>> rightmost = {box[2], box[3]};
if (leftmost[0][1] > leftmost[1][1])
std::swap(leftmost[0], leftmost[1]);
if (rightmost[0][1] > rightmost[1][1])
std::swap(rightmost[0], rightmost[1]);
std::vector<std::vector<int>> rect = {leftmost[0], rightmost[0], rightmost[1], leftmost[1]};
return rect;
}
static float **get_mini_boxes(cv::RotatedRect box, float &ssid) {
ssid = box.size.width >= box.size.height ? box.size.height : box.size.width;
cv::Mat points;
cv::boxPoints(box, points);
// sorted box points
auto array = Mat2Vec(points);
quickSort(array, 0, 3);
float *idx1 = array[0], *idx2 = array[1], *idx3 = array[2], *idx4 = array[3];
if (array[3][1] <= array[2][1]) {
idx2 = array[3];
idx3 = array[2];
} else {
idx2 = array[2];
idx3 = array[3];
}
if (array[1][1] <= array[0][1]) {
idx1 = array[1];
idx4 = array[0];
} else {
idx1 = array[0];
idx4 = array[1];
}
array[0] = idx1;
array[1] = idx2;
array[2] = idx3;
array[3] = idx4;
return array;
}
template <class T> T clamp(T x, T min, T max) {
if (x > max){
return max;
}
if (x < min){
return min;
}
return x;
}
static float clampf(float x, float min, float max) {
if (x > max)
return max;
if (x < min)
return min;
return x;
}
float box_score_fast(float **box_array, cv::Mat pred) {
auto array = box_array;
int width = pred.cols;
int height = pred.rows;
float box_x[4] = {array[0][0], array[1][0], array[2][0], array[3][0]};
float box_y[4] = {array[0][1], array[1][1], array[2][1], array[3][1]};
int xmin = clamp(int(std::floorf(*(std::min_element(box_x, box_x + 4)))), 0, width - 1);
int xmax = clamp(int(std::ceilf(*(std::max_element(box_x, box_x + 4)))), 0, width - 1);
int ymin = clamp(int(std::floorf(*(std::min_element(box_y, box_y + 4)))), 0, height - 1);
int ymax = clamp(int(std::ceilf(*(std::max_element(box_y, box_y + 4)))), 0, height - 1);
cv::Mat mask;
mask = cv::Mat::zeros(ymax - ymin + 1, xmax - xmin + 1, CV_8UC1);
cv::Point root_point[4];
root_point[0] = cv::Point(int(array[0][0]) - xmin, int(array[0][1]) - ymin);
root_point[1] = cv::Point(int(array[1][0]) - xmin, int(array[1][1]) - ymin);
root_point[2] = cv::Point(int(array[2][0]) - xmin, int(array[2][1]) - ymin);
root_point[3] = cv::Point(int(array[3][0]) - xmin, int(array[3][1]) - ymin);
const cv::Point *ppt[1] = {root_point};
int npt[] = {4};
cv::fillPoly(mask, ppt, npt, 1, cv::Scalar(1));
cv::Mat croppedImg;
pred(cv::Rect(xmin, ymin, xmax - xmin + 1, ymax - ymin + 1)).copyTo(croppedImg);
auto score = cv::mean(croppedImg, mask)[0];
return score;
}
std::vector<std::vector<std::vector<int>>>
boxes_from_bitmap(const cv::Mat& pred, const cv::Mat& bitmap) {
const int min_size = 3;
const int max_candidates = 1000;
const float box_thresh = 0.5;
int width = bitmap.cols;
int height = bitmap.rows;
std::vector<std::vector<cv::Point>> contours;
std::vector<cv::Vec4i> hierarchy;
cv::findContours(bitmap, contours, hierarchy, cv::RETR_LIST, cv::CHAIN_APPROX_SIMPLE);
int num_contours = contours.size() >= max_candidates ? max_candidates : contours.size();
std::vector<std::vector<std::vector<int>>> boxes;
for (int _i = 0; _i < num_contours; _i++) {
float ssid;
cv::RotatedRect box = cv::minAreaRect(contours[_i]);
auto array = get_mini_boxes(box, ssid);
auto box_for_unclip = array;
//end get_mini_box
if (ssid < min_size) {
continue;
}
float score;
score = box_score_fast(array, pred);
//end box_score_fast
if (score < box_thresh)
continue;
// start for unclip
cv::RotatedRect points = unclip(box_for_unclip);
// end for unclip
cv::RotatedRect clipbox = points;
auto cliparray = get_mini_boxes(clipbox, ssid);
if (ssid < min_size + 2) continue;
int dest_width = pred.cols;
int dest_height = pred.rows;
std::vector<std::vector<int>> intcliparray;
for (int num_pt = 0; num_pt < 4; num_pt++) {
std::vector<int> a{
int(clampf(roundf(cliparray[num_pt][0] / float(width) * float(dest_width)), 0,
float(dest_width))),
int(clampf(roundf(cliparray[num_pt][1] / float(height) * float(dest_height)), 0,
float(dest_height)))};
intcliparray.push_back(a);
}
boxes.push_back(intcliparray);
}//end for
return boxes;
}
int _max(int a, int b) {
return a >= b ? a : b;
}
int _min(int a, int b) {
return a >= b ? b : a;
}
std::vector<std::vector<std::vector<int>>>
filter_tag_det_res(const std::vector<std::vector<std::vector<int>>>& o_boxes,
float ratio_h, float ratio_w,const cv::Mat& srcimg) {
int oriimg_h = srcimg.rows;
int oriimg_w = srcimg.cols;
std::vector<std::vector<std::vector<int>>> boxes{o_boxes};
std::vector<std::vector<std::vector<int>>> root_points;
for (int n = 0; n < boxes.size(); n++) {
boxes[n] = order_points_clockwise(boxes[n]);
for (int m = 0; m < boxes[0].size(); m++) {
boxes[n][m][0] /= ratio_w;
boxes[n][m][1] /= ratio_h;
boxes[n][m][0] = int(_min(_max(boxes[n][m][0], 0), oriimg_w - 1));
boxes[n][m][1] = int(_min(_max(boxes[n][m][1], 0), oriimg_h - 1));
}
}
for (int n = 0; n < boxes.size(); n++) {
int rect_width, rect_height;
rect_width = int(sqrt(
pow(boxes[n][0][0] - boxes[n][1][0], 2) + pow(boxes[n][0][1] - boxes[n][1][1], 2)));
rect_height = int(sqrt(
pow(boxes[n][0][0] - boxes[n][3][0], 2) + pow(boxes[n][0][1] - boxes[n][3][1], 2)));
if (rect_width <= 10 || rect_height <= 10)
continue;
root_points.push_back(boxes[n]);
}
return root_points;
}
/*
using namespace std;
// read data from txt file
cv::Mat readtxt2(std::string path, int imgw, int imgh, int imgc) {
std::cout << "read data file from txt file! " << std::endl;
ifstream in(path);
string line;
int count = 0;
int i = 0, j = 0;
std::vector<float> img_mean = {0.485, 0.456, 0.406};
std::vector<float> img_std = {0.229, 0.224, 0.225};
float trainData[imgh][imgw*imgc];
while (getline(in, line)) {
stringstream ss(line);
double x;
while (ss >> x) {
// trainData[i][j] = float(x) * img_std[j % 3] + img_mean[j % 3];
trainData[i][j] = float(x);
j++;
}
i++;
j = 0;
}
cv::Mat pred_map(imgh, imgw*imgc, CV_32FC1, (float *) trainData);
cv::Mat reshape_img = pred_map.reshape(imgc, imgh);
return reshape_img;
}
*/
//using namespace std;
//
//void writetxt(vector<vector<float>> data, std::string save_path){
//
// ofstream fout(save_path);
//
// for (int i = 0; i < data.size(); i++) {
// for (int j=0; j< data[0].size(); j++){
// fout << data[i][j] << " ";
// }
// fout << endl;
// }
// fout << endl;
// fout.close();
//}
//
// Created by fujiayi on 2020/7/2.
//
#pragma once
std::vector<std::vector<std::vector<int>>> boxes_from_bitmap(const cv::Mat& pred, const cv::Mat& bitmap);
std::vector<std::vector<std::vector<int>>>
filter_tag_det_res(const std::vector<std::vector<std::vector<int>>>& o_boxes,
float ratio_h, float ratio_w, const cv::Mat& srcimg);
\ No newline at end of file
//
// timer.h
// face_demo
//
// Created by Li,Xiaoyang(SYS) on 2019/8/20.
// Copyright © 2019年 Li,Xiaoyang(SYS). All rights reserved.
//
#ifndef timer_h
#define timer_h
#include <chrono>
#include <list>
class Timer final {
public:
Timer() {}
~Timer() {}
void clear() {
ms_time.clear();
}
void start() {
tstart = std::chrono::system_clock::now();
}
void end() {
tend = std::chrono::system_clock::now();
auto ts = std::chrono::duration_cast<std::chrono::microseconds>(tend - tstart);
float elapse_ms = 1000.f * float(ts.count()) * std::chrono::microseconds::period::num / \
std::chrono::microseconds::period::den;
ms_time.push_back(elapse_ms);
}
float get_average_ms() {
if (ms_time.size() == 0) {
return 0.f;
}
float sum = 0.f;
for (auto i : ms_time){
sum += i;
}
return sum / ms_time.size();
}
float get_sum_ms(){
if (ms_time.size() == 0) {
return 0.f;
}
float sum = 0.f;
for (auto i : ms_time){
sum += i;
}
return sum;
}
// return tile (0-99) time.
float get_tile_time(float tile) {
if (tile <0 || tile > 100) {
return -1.f;
}
int total_items = (int)ms_time.size();
if (total_items <= 0) {
return -2.f;
}
ms_time.sort();
int pos = (int)(tile * total_items / 100);
auto it = ms_time.begin();
for (int i = 0; i < pos; ++i) {
++it;
}
return *it;
}
const std::list<float> get_time_stat() {
return ms_time;
}
private:
std::chrono::time_point<std::chrono::system_clock> tstart;
std::chrono::time_point<std::chrono::system_clock> tend;
std::list<float> ms_time;
};
#endif /* timer_h */
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册