未验证 提交 62709a1a 编写于 作者: R Ruilong Liu 提交者: GitHub

Merge branch 'develop' into face_op

......@@ -69,3 +69,10 @@ build
# clion building directories
cmake-build-debug
cmake-build-release
#ios demo
demo/ios/PaddleMobileDemo/PaddleMobileDemo/googlenet_combine/
demo/ios/PaddleMobileDemo/PaddleMobileDemo/*.jpg
demo/ios/PaddleMobileDemo/PaddleMobileDemo/PaddleMobile/*.a
*.xcuserstate
\ No newline at end of file
......@@ -11,15 +11,19 @@ option(MALI_GPU "mali gpu" OFF)
option(FPGA "fpga" OFF)
set(DEBUGING ON)
file(GLOB_RECURSE PADDLE_MOBILE_CC src/*.cc src/*.cpp src/*.c)
if (ARM_LINUX)
include("${CMAKE_CURRENT_LIST_DIR}/tools/arm-platform.cmake")
endif ()
file(GLOB_RECURSE PADDLE_MOBILE_CC src/*.cc src/*.cpp src/*.c src/*.mm)
file(GLOB_RECURSE PADDLE_MOBILE_H src/*.h)
if (CPU)
add_definitions(-DPADDLE_MOBILE_CPU)
else()
list(REMOVE_ITEM PADDLE_MOBILE_CC ${CMAKE_CURRENT_SOURCE_DIR}/src/operators/kernel/arm/*.h)
list(REMOVE_ITEM PADDLE_MOBILE_CC ${CMAKE_CURRENT_SOURCE_DIR}/src/operators/kernel/arm/*.cc)
list(REMOVE_ITEM PADDLE_MOBILE_CC ${CMAKE_CURRENT_SOURCE_DIR}/src/operators/kernel/arm/*.cpp)
list(REMOVE_ITEM PADDLE_MOBILE_CC ./src/operators/kernel/arm/*.h)
list(REMOVE_ITEM PADDLE_MOBILE_CC ./src/operators/kernel/arm/*.cc)
list(REMOVE_ITEM PADDLE_MOBILE_CC ./src/operators/kernel/arm/*.cpp)
endif()
......@@ -37,9 +41,15 @@ if (MALI_GPU)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -lOpenCL")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DUSE_ACL=1")
else()
list(REMOVE_ITEM PADDLE_MOBILE_CC ${CMAKE_CURRENT_SOURCE_DIR}/src/operators/kernel/mali/*.h)
list(REMOVE_ITEM PADDLE_MOBILE_CC ${CMAKE_CURRENT_SOURCE_DIR}/src/operators/kernel/mali/*.cc)
list(REMOVE_ITEM PADDLE_MOBILE_CC ${CMAKE_CURRENT_SOURCE_DIR}/src/operators/kernel/mali/*.cpp)
file(GLOB_RECURSE _tmp_list src/operators/kernel/mali/*.cpp src/operators/kernel/mali/*.cc)
foreach(f ${_tmp_list})
list(REMOVE_ITEM PADDLE_MOBILE_CC ${f})
endforeach()
file(GLOB_RECURSE _tmp_list_h src/operators/kernel/mali/*.h)
foreach(f ${_tmp_list_h})
list(REMOVE_ITEM PADDLE_MOBILE_H ${f})
endforeach()
endif()
if(FPGA)
......@@ -88,9 +98,9 @@ endif()
if (NOT ANDROID_NDK_TOOLCHAIN_INCLUDED)
list(REMOVE_ITEM PADDLE_MOBILE_CC ${CMAKE_CURRENT_SOURCE_DIR}/src/jni/*.cpp)
list(REMOVE_ITEM PADDLE_MOBILE_CC ${CMAKE_CURRENT_SOURCE_DIR}/src/jni/*.h)
list(REMOVE_ITEM PADDLE_MOBILE_CC ${CMAKE_CURRENT_SOURCE_DIR}/src/operators/math/math_func_neon.h)
list(REMOVE_ITEM PADDLE_MOBILE_H ${CMAKE_CURRENT_SOURCE_DIR}/src/jni/paddle_mobile_jni.h)
list(REMOVE_ITEM PADDLE_MOBILE_CC ${CMAKE_CURRENT_SOURCE_DIR}/src/jni/paddle_mobile_jni.cpp)
list(REMOVE_ITEM PADDLE_MOBILE_H ${CMAKE_CURRENT_SOURCE_DIR}/src/operators/math/math_func_neon.h)
endif ()
include_directories(src/)
......@@ -107,11 +117,18 @@ set_property(CACHE NET PROPERTY STRINGS "defult" "googlenet" "mobilenet" "yolo"
include("${CMAKE_CURRENT_LIST_DIR}/tools/op.cmake")
# if (IS_IOS)
# add_library(paddle-mobile STATIC ${PADDLE_MOBILE_CC} ${PADDLE_MOBILE_H})
if (IS_IOS)
add_library(paddle-mobile STATIC ${PADDLE_MOBILE_CC} ${PADDLE_MOBILE_H})
else()
list(REMOVE_ITEM PADDLE_MOBILE_H ${CMAKE_CURRENT_SOURCE_DIR}/src/ios_io/PaddleMobile.h)
list(REMOVE_ITEM PADDLE_MOBILE_CC ${CMAKE_CURRENT_SOURCE_DIR}/src/ios_io/PaddleMobile.mm)
list(REMOVE_ITEM PADDLE_MOBILE_H ${CMAKE_CURRENT_SOURCE_DIR}/src/ios_io/op_symbols.h)
endif ()
if (ANDROID_NDK_TOOLCHAIN_INCLUDED)
list(REMOVE_DUPLICATES CMAKE_CXX_FLAGS)
add_library(paddle-mobile SHARED ${PADDLE_MOBILE_CC} ${PADDLE_MOBILE_H})
elseif(IS_IOS)
else ()
add_library(paddle-mobile SHARED ${PADDLE_MOBILE_CC} ${PADDLE_MOBILE_H})
endif ()
......
......@@ -183,7 +183,6 @@ upstream
接下来等待 review,如果有需要修改的地方,参照上述步骤更新 origin 中的对应分支即可。
![](http://otkwwi4x8.bkt.clouddn.com/2018-06-20-15294877166787.jpg)
之后就可以提交代码了
......@@ -223,7 +222,6 @@ upstream
- 原因:如果仅仅修改一个文件但提交了十几个commit,每个commit只做了少量的修改,这会给评审人带来很大困扰。评审人需要逐一查看每个commit才能知道做了哪些修改,且不排除commit之间的修改存在相互覆盖的情况。
- 建议:每次提交时,保持尽量少的commit,可以通过`git commit --amend`补充上次的commit。对已经Push到远程仓库的多个commit,可以参考[squash commits after push](http://stackoverflow.com/questions/5667884/how-to-squash-commits-in-git-after-they-have-been-pushed)
- 请注意每个commit的名称:应能反映当前commit的内容,不能太随意。
3. 如果解决了某个Issue的问题,请在该Pull Request的**第一个**评论框中加上:`fix #issue_number`,这样当该Pull Request被合并后,会自动关闭对应的Issue。关键词包括:close, closes, closed, fix, fixes, fixed, resolve, resolves, resolved,请选择合适的词汇。详细可参考[Closing issues via commit messages](https://help.github.com/articles/closing-issues-via-commit-messages)
此外,在回复评审人意见时,请您遵守以下约定:
......
......@@ -15,7 +15,6 @@ RUN apt-get install -y --no-install-recommends \
unzip \
git \
make \
cmake \
cmake-curses-gui \
python \
python-pip \
......@@ -25,9 +24,14 @@ RUN apt-get install -y --no-install-recommends \
g++-arm-linux-gnueabi \
gcc-arm-linux-gnueabi
RUN apt-get autoremove -y && apt-get clean
RUN pip install --upgrade pip
RUN pip install wheel && pip install pre-commit
RUN ln -s clang-format-5.0 /usr/bin/clang-format
RUN pip install -i https://pypi.tuna.tsinghua.edu.cn/simple --upgrade pip
RUN pip install -i https://pypi.tuna.tsinghua.edu.cn/simple wheel
RUN pip install -i https://pypi.tuna.tsinghua.edu.cn/simple pre-commit
RUN cd /tmp && curl -O http://mirrors.neusoft.edu.cn/android/repository/android-ndk-r17b-linux-x86_64.zip
RUN curl -O https://mms-res.cdn.bcebos.com/cmake-3.10.3-Linux-x86_64.tar.gz && \
tar xzf cmake-3.10.3-Linux-x86_64.tar.gz && \
mv cmake-3.10.3-Linux-x86_64 /opt/cmake-3.10 && \
mv /usr/bin/cmake /usr/bin/cmake.bak && ln -s /opt/cmake-3.10/bin/cmake /usr/bin/cmake
RUN cd /opt && unzip /tmp/android-ndk-r17b-linux-x86_64.zip
ENV NDK_ROOT /opt/android-ndk-r17b
......@@ -8,11 +8,20 @@
[![License](https://img.shields.io/badge/license-Apache%202-blue.svg)](LICENSE)-->
欢迎来到 Paddle-Mobile GitHub 项目。
Paddle-Moible是PaddlePaddle组织下的项目,是一个致力于嵌入式平台的深度学习的框架。Paddle-Moible设计思想和PaddlePaddle的最新版fluid版本保持了高度一致,同时针对嵌入式做了大量优化。设计之初就对嵌入式的性能、体积、能耗、硬件平台覆盖等方面做了考虑。
## 简单搜索线上效果
如下gif是简单搜索app的线上主体检测应用效果
![ezgif-1-050a733dfb](http://otkwwi4x8.bkt.clouddn.com/2018-07-05-ezgif-1-050a733dfb.gif)
## Demo目录
[点我](https://github.com/PaddlePaddle/paddle-mobile/tree/develop/demo)
## Features
- **ARM CPU**
......@@ -23,7 +32,6 @@ Paddle-Moible是PaddlePaddle组织下的项目,是一个致力于嵌入式平
- **Mali GPU**
Mali GPU是百度和ARM合作开发的,双方团队近期都在致力于将paddle的op能无缝运行在ACL(arm compute library)。目前已经支持squeezenet,googlenet,resnet等几个网络模型,后续会继续加大力度。使全部移动端paddle op能高效运行在mali gpu上。
- **苹果设备的GPU Metal实现**
......@@ -45,7 +53,7 @@ Paddle-Moible是PaddlePaddle组织下的项目,是一个致力于嵌入式平
- **体积**
paddle-mobile从设计之初就深入考虑到移动端的包体积的问题,cpu实现中没有外部依赖。在编译过程中,如果该网络不需要的op是完全不会被打入的。同时编译选项优化也为体积压缩提供了帮助。
除了二进制体积,我们对代码体积极力避免过大。整个仓库不到5m的代码体积
除了二进制体积,我们对代码体积极力避免过大。整个仓库的代码体积也非常小
## 文档
......@@ -78,14 +86,7 @@ ONNX全称为“Open Neural Network Exchange”,即“开放的神经网络切
目前,百度也在做onnx支持工作。相关转换项目在这里:[paddle-onnx](https://github.com/PaddlePaddle/paddle-onnx)
```flow
st=>start: 其他模型
op1=>operation: onnx模型
op2=>operation: paddle-onnx
op3=>operation: paddle fluid模型
e=>end: paddle-mobile运行
st->op1->op2->op3->e
```
![](http://otkwwi4x8.bkt.clouddn.com/2018-07-03-15305875853057.jpg)
### 4. 部分测试模型下载
[下载链接](https://mms-mis.cdn.bcebos.com/paddle-mobile/models.zip)
......
*.iml
.gradle
/local.properties
/.idea/workspace.xml
/.idea/libraries
.DS_Store
/build
/captures
.externalNativeBuild
apply plugin: 'com.android.application'
android {
compileSdkVersion 21
defaultConfig {
applicationId "com.baidu.paddle"
minSdkVersion 15
targetSdkVersion 21
versionCode 1
versionName "1.0"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
}
dependencies {
implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation 'com.android.support:appcompat-v7:21.0.3'
}
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.baidu.paddle">
<!-- 往SDCard写入数据权限 -->
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.CAMERA" />
<application
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:supportsRtl="true"
android:theme="@style/AppTheme" >
<activity
android:name=".MainActivity"
android:label="@string/app_name">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>
/*
* Copyright (c) 2016 Baidu, Inc. All Rights Reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
* to permit persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of
* the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
* THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
* CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/
package com.baidu.paddle;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import static android.graphics.Color.blue;
import static android.graphics.Color.green;
import static android.graphics.Color.red;
import static com.baidu.paddle.MainActivity.TYPE.googlenet;
public class MainActivity extends Activity {
public static final int TAKE_PHOTO_REQUEST_CODE = 1001;
private Context mContext = null;
private int inputSize = 224;
enum TYPE {
googlenet
}
private TYPE type = googlenet;
private ImageView imageView;
private TextView tvSpeed;
private Button button;
private Bitmap bmp;
static {
try {
System.loadLibrary("paddle-mobile");
} catch (SecurityException e) {
e.printStackTrace();
} catch (UnsatisfiedLinkError e) {
e.printStackTrace();
} catch (NullPointerException e) {
e.printStackTrace();
}
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mContext = this;
setContentView(R.layout.main_activity);
init();
}
private void init() {
imageView = (ImageView) findViewById(R.id.imageView);
tvSpeed = (TextView) findViewById(R.id.tv_speed);
button = (Button) findViewById(R.id.button);
button.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (!isHasSdCard()) {
Toast.makeText(mContext, R.string.sdcard_not_available,
Toast.LENGTH_LONG).show();
return;
}
Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
// save pic in sdcard
Uri imageUri = Uri.fromFile(getTempImage());
intent.putExtra(MediaStore.EXTRA_OUTPUT, imageUri);
startActivityForResult(intent, TAKE_PHOTO_REQUEST_CODE);
}
});
Button bt_load = (Button) findViewById(R.id.bt_load);
bt_load.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
String assetPath = "pml_demo";
String sdcardPath = Environment.getExternalStorageDirectory()
+ File.separator + assetPath + File.separator + type;
PML.load(sdcardPath);
}
});
Button bt_clear = (Button) findViewById(R.id.bt_clear);
bt_clear.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
PML.clear();
}
});
String assetPath = "pml_demo";
String sdcardPath = Environment.getExternalStorageDirectory()
+ File.separator + assetPath;
copyFilesFromAssets(this, assetPath, sdcardPath);
}
public void copyFilesFromAssets(Context context, String oldPath, String newPath) {
try {
String[] fileNames = context.getAssets().list(oldPath);
if (fileNames.length > 0) {
// directory
File file = new File(newPath);
file.mkdirs();
// copy recursivelyC
for (String fileName : fileNames) {
copyFilesFromAssets(context, oldPath + "/" + fileName,
newPath + "/" + fileName);
}
} else {
// file
InputStream is = context.getAssets().open(oldPath);
FileOutputStream fos = new FileOutputStream(new File(newPath));
byte[] buffer = new byte[1024];
int byteCount;
while ((byteCount = is.read(buffer)) != -1) {
fos.write(buffer, 0, byteCount);
}
fos.flush();
is.close();
fos.close();
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public File getTempImage() {
if (Environment.getExternalStorageState().equals(
Environment.MEDIA_MOUNTED)) {
File tempFile = new File(Environment.getExternalStorageDirectory(), "temp.jpg");
try {
tempFile.createNewFile();
} catch (IOException e) {
e.printStackTrace();
}
return tempFile;
}
return null;
}
public void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
switch (requestCode) {
case TAKE_PHOTO_REQUEST_CODE:
if (resultCode == RESULT_OK) {
DetectionTask detectionTask = new DetectionTask();
detectionTask.execute(getTempImage().getPath());
}
break;
default:
break;
}
}
/**
* draw rect on imageView
*
* @param bitmap
* @param predicted
* @param viewWidth
* @param viewHeight
*/
private void drawRect(Bitmap bitmap, float[] predicted, int viewWidth, int viewHeight) {
Canvas canvas = new Canvas(bitmap);
canvas.drawBitmap(bitmap, 0, 0, null);
if (type == googlenet) {
Paint paint = new Paint();
paint.setColor(Color.RED);
paint.setStyle(Paint.Style.STROKE);
paint.setStrokeWidth(3.0f);
float x1 = 0;
float x2 = 0;
float y1 = 0;
float y2 = 0;
// the googlenet result sequence is (left top right top bottom)
x1 = (predicted[0] * viewWidth / 224);
y1 = (predicted[1] * viewHeight / 224);
x2 = (predicted[2] * viewWidth / 224);
y2 = (predicted[3] * viewHeight / 224);
canvas.drawRect(x1, y1, x2, y2, paint);
}
imageView.setImageBitmap(bitmap);
}
float getMaxIndex(float[] predicted) {
float max = 0;
int index = 0;
for (int i = 0; i < predicted.length; i++) {
if (predicted[i] > max) {
max = predicted[i];
index = i;
}
}
return index;
}
public float[] getScaledMatrix(Bitmap bitmap, int desWidth,
int desHeight) {
float[] dataBuf = new float[3 * desWidth * desHeight];
int rIndex;
int gIndex;
int bIndex;
int[] pixels = new int[desWidth * desHeight];
Bitmap bm = Bitmap.createScaledBitmap(bitmap, desWidth, desHeight, false);
bm.getPixels(pixels, 0, desWidth, 0, 0, desWidth, desHeight);
int j = 0;
int k = 0;
for (int i = 0; i < pixels.length; i ++) {
int clr = pixels[i];
j = i / desHeight;
k = i % desWidth;
rIndex = j * desWidth + k;
gIndex = rIndex + desHeight * desWidth;
bIndex = gIndex + desHeight * desWidth;
dataBuf[rIndex] = (float)((clr & 0x00ff0000)>> 16) -148;
dataBuf[gIndex] = (float)((clr & 0x0000ff00)>> 8) - 148;
dataBuf[bIndex] = (float)((clr & 0x000000ff)) -148;
}
if (bm.isRecycled()) {
bm.recycle();
}
return dataBuf;
}
/**
* check whether sdcard is mounted
*
* @return
*/
public boolean isHasSdCard() {
if (Environment.getExternalStorageState().equals(
Environment.MEDIA_MOUNTED)) {
return true;
} else {
return false;
}
}
public void dumpData(float[] results, String filename) {
try {
File writename = new File(filename);
writename.createNewFile();
BufferedWriter out = new BufferedWriter(new FileWriter(writename));
for (float result : results) {
out.write(result + " ");
}
out.flush();
out.close();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* scale bitmap in case of OOM
*
* @param ctx
* @param filePath
* @return
*/
public Bitmap getScaleBitmap(Context ctx, String filePath) {
BitmapFactory.Options opt = new BitmapFactory.Options();
opt.inJustDecodeBounds = true;
BitmapFactory.decodeFile(filePath, opt);
int bmpWidth = opt.outWidth;
int bmpHeight = opt.outHeight;
int maxSize = 500;
opt.inSampleSize = 1;
while (true) {
if (bmpWidth / opt.inSampleSize < maxSize || bmpHeight / opt.inSampleSize < maxSize) {
break;
}
opt.inSampleSize *= 2;
}
opt.inJustDecodeBounds = false;
Bitmap bmp = BitmapFactory.decodeFile(filePath, opt);
return bmp;
}
@Override
public void onBackPressed() {
super.onBackPressed();
Log.d("mdl", "mdl clear");
// clear mdl
PML.clear();
}
class DetectionTask extends AsyncTask<String, Void, float[]> {
private long time;
public DetectionTask() {
super();
}
@Override
protected void onPreExecute() {
super.onPreExecute();
if (type == googlenet) {
inputSize = 224;
}
}
@Override
protected void onPostExecute(float[] result) {
super.onPostExecute(result);
try {
Bitmap src = Bitmap.createScaledBitmap(bmp, imageView.getWidth(),
imageView.getHeight(), false);
drawRect(src, result, imageView.getWidth(), imageView.getHeight());
tvSpeed.setText("detection cost:" + time + "ms");
} catch (Exception e) {
e.printStackTrace();
}
}
@Override
protected void onProgressUpdate(Void... values) {
super.onProgressUpdate(values);
}
@Override
protected void onCancelled() {
super.onCancelled();
}
@Override
protected float[] doInBackground(String... strings) {
bmp = getScaleBitmap(mContext, strings[0]);
float[] inputData = getScaledMatrix(bmp, inputSize, inputSize);
float[] result = null;
try {
long start = System.currentTimeMillis();
result = PML.predict(inputData);
long end = System.currentTimeMillis();
time = end - start;
} catch (Exception e) {
e.printStackTrace();
}
return result;
}
}
}
package com.baidu.paddle;
public class PML {
/**
* Load
* @param modelPath
* @return
*/
public static native boolean load(String modelPath);
/**
* object detection
*
* @param buf
* @return
*/
public static native float[] predict(float[] buf);
public static native void clear();
}
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="108dp"
android:height="108dp"
android:viewportHeight="108"
android:viewportWidth="108">
<path
android:fillColor="#26A69A"
android:pathData="M0,0h108v108h-108z" />
<path
android:fillColor="#00000000"
android:pathData="M9,0L9,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,0L19,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M29,0L29,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M39,0L39,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M49,0L49,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M59,0L59,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M69,0L69,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M79,0L79,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M89,0L89,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M99,0L99,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,9L108,9"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,19L108,19"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,29L108,29"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,39L108,39"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,49L108,49"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,59L108,59"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,69L108,69"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,79L108,79"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,89L108,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,99L108,99"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,29L89,29"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,39L89,39"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,49L89,49"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,59L89,59"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,69L89,69"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,79L89,79"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M29,19L29,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M39,19L39,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M49,19L49,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M59,19L59,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M69,19L69,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M79,19L79,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
</vector>
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical" android:layout_width="match_parent"
android:layout_height="match_parent">
<LinearLayout
android:id="@+id/ll_bottom"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal"
android:layout_alignParentBottom="true"
android:background="@android:color/background_light">
<TextView
android:id="@+id/tv_speed"
android:layout_width="match_parent"
android:layout_weight="1"
android:layout_height="wrap_content"
android:textColor="@android:color/background_dark"
android:text="@string/time_cost"/>
<Button
android:id="@+id/button"
android:layout_width="match_parent"
android:layout_height="50dp"
android:text="take photo"
android:layout_weight="1"
/>
</LinearLayout>
<LinearLayout
android:id="@+id/ll_test"
android:layout_above="@id/ll_bottom"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal">
<Button
android:id="@+id/bt_load"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
android:text="load"/>
<Button
android:layout_width="0dp"
android:layout_height="wrap_content"
android:id="@+id/bt_clear"
android:layout_weight="1"
android:text="clear"/>
</LinearLayout>
<ImageView
android:id="@+id/imageView"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:scaleType="centerInside"
android:layout_above="@id/ll_test"
android:background="@android:color/background_light"/>
</RelativeLayout>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="colorPrimary">#3F51B5</color>
<color name="colorPrimaryDark">#303F9F</color>
<color name="colorAccent">#FF4081</color>
</resources>
<resources>
<string name="app_name">PaddleMobile_Android</string>
<string name="sdcard_not_available">sdcard not available</string>
<string name="time_cost">detection cost:</string>
</resources>
<resources>
<!-- Base application theme. -->
<style name="AppTheme" parent="Theme.AppCompat.Light.DarkActionBar">
<!-- Customize your theme here. -->
<item name="colorPrimary">@color/colorPrimary</item>
<item name="colorPrimaryDark">@color/colorPrimaryDark</item>
<item name="colorAccent">@color/colorAccent</item>
</style>
</resources>
// Top-level build file where you can add configuration options common to all sub-projects/modules.
buildscript {
repositories {
google()
jcenter()
}
dependencies {
classpath 'com.android.tools.build:gradle:3.1.3'
// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files
}
}
allprojects {
repositories {
google()
jcenter()
}
}
task clean(type: Delete) {
delete rootProject.buildDir
}
# Project-wide Gradle settings.
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
org.gradle.jvmargs=-Xmx1536m
android.injected.testOnly=false
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. More details, visit
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true
#Mon Jul 02 13:58:58 CST 2018
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-4.4-all.zip
#!/usr/bin/env bash
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn ( ) {
echo "$*"
}
die ( ) {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
esac
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
function splitJvmOpts() {
JVM_OPTS=("$@")
}
eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windowz variants
if not "%OS%" == "Windows_NT" goto win9xME_args
if "%@eval[2+2]" == "4" goto 4NT_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
goto execute
:4NT_args
@rem Get arguments from the 4NT Shell from JP Software
set CMD_LINE_ARGS=%$
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega
......@@ -7,80 +7,123 @@
objects = {
/* Begin PBXBuildFile section */
FC086BC920E783AF00D85EF7 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = FC086BC820E783AF00D85EF7 /* AppDelegate.m */; };
FC086BCC20E783AF00D85EF7 /* ViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = FC086BCB20E783AF00D85EF7 /* ViewController.m */; };
FC086BCF20E783AF00D85EF7 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = FC086BCD20E783AF00D85EF7 /* Main.storyboard */; };
FC086BD120E783B100D85EF7 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = FC086BD020E783B100D85EF7 /* Assets.xcassets */; };
FC086BD420E783B100D85EF7 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = FC086BD220E783B100D85EF7 /* LaunchScreen.storyboard */; };
FC086BD720E783B100D85EF7 /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = FC086BD620E783B100D85EF7 /* main.m */; };
FC12E93320EB6B2800807EF4 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = FC12E93220EB6B2800807EF4 /* AppDelegate.m */; };
FC12E93620EB6B2800807EF4 /* ViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = FC12E93520EB6B2800807EF4 /* ViewController.m */; };
FC12E93920EB6B2800807EF4 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = FC12E93720EB6B2800807EF4 /* Main.storyboard */; };
FC12E93B20EB6B2900807EF4 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = FC12E93A20EB6B2900807EF4 /* Assets.xcassets */; };
FC12E93E20EB6B2900807EF4 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = FC12E93C20EB6B2900807EF4 /* LaunchScreen.storyboard */; };
FC12E94120EB6B2900807EF4 /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = FC12E94020EB6B2900807EF4 /* main.m */; };
FC12E94A20EB6B6800807EF4 /* libpaddle-mobile.a in Frameworks */ = {isa = PBXBuildFile; fileRef = FC12E94820EB6B6800807EF4 /* libpaddle-mobile.a */; };
FC12E94D20EB6BBB00807EF4 /* libstdc++.tbd in Frameworks */ = {isa = PBXBuildFile; fileRef = FC12E94C20EB6BBB00807EF4 /* libstdc++.tbd */; };
FC12E95120EB6BED00807EF4 /* params in Resources */ = {isa = PBXBuildFile; fileRef = FC12E94F20EB6BED00807EF4 /* params */; };
FC12E95220EB6BED00807EF4 /* model in Resources */ = {isa = PBXBuildFile; fileRef = FC12E95020EB6BED00807EF4 /* model */; };
FC12E95420EB6C0D00807EF4 /* apple.jpg in Resources */ = {isa = PBXBuildFile; fileRef = FC12E95320EB6C0D00807EF4 /* apple.jpg */; };
/* End PBXBuildFile section */
/* Begin PBXFileReference section */
FC086BC420E783AF00D85EF7 /* PaddleMobileDemo.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = PaddleMobileDemo.app; sourceTree = BUILT_PRODUCTS_DIR; };
FC086BC720E783AF00D85EF7 /* AppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = "<group>"; };
FC086BC820E783AF00D85EF7 /* AppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = "<group>"; };
FC086BCA20E783AF00D85EF7 /* ViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ViewController.h; sourceTree = "<group>"; };
FC086BCB20E783AF00D85EF7 /* ViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ViewController.m; sourceTree = "<group>"; };
FC086BCE20E783AF00D85EF7 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = "<group>"; };
FC086BD020E783B100D85EF7 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
FC086BD320E783B100D85EF7 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = "<group>"; };
FC086BD520E783B100D85EF7 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
FC086BD620E783B100D85EF7 /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = "<group>"; };
FC12E92E20EB6B2800807EF4 /* PaddleMobileDemo.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = PaddleMobileDemo.app; sourceTree = BUILT_PRODUCTS_DIR; };
FC12E93120EB6B2800807EF4 /* AppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = "<group>"; };
FC12E93220EB6B2800807EF4 /* AppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = "<group>"; };
FC12E93420EB6B2800807EF4 /* ViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ViewController.h; sourceTree = "<group>"; };
FC12E93520EB6B2800807EF4 /* ViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ViewController.m; sourceTree = "<group>"; };
FC12E93820EB6B2800807EF4 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = "<group>"; };
FC12E93A20EB6B2900807EF4 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
FC12E93D20EB6B2900807EF4 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = "<group>"; };
FC12E93F20EB6B2900807EF4 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
FC12E94020EB6B2900807EF4 /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = "<group>"; };
FC12E94820EB6B6800807EF4 /* libpaddle-mobile.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; path = "libpaddle-mobile.a"; sourceTree = "<group>"; };
FC12E94920EB6B6800807EF4 /* PaddleMobile.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = PaddleMobile.h; sourceTree = "<group>"; };
FC12E94C20EB6BBB00807EF4 /* libstdc++.tbd */ = {isa = PBXFileReference; lastKnownFileType = "sourcecode.text-based-dylib-definition"; name = "libstdc++.tbd"; path = "usr/lib/libstdc++.tbd"; sourceTree = SDKROOT; };
FC12E94F20EB6BED00807EF4 /* params */ = {isa = PBXFileReference; lastKnownFileType = file; path = params; sourceTree = "<group>"; };
FC12E95020EB6BED00807EF4 /* model */ = {isa = PBXFileReference; lastKnownFileType = file; path = model; sourceTree = "<group>"; };
FC12E95320EB6C0D00807EF4 /* apple.jpg */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = apple.jpg; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
FC086BC120E783AF00D85EF7 /* Frameworks */ = {
FC12E92B20EB6B2800807EF4 /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
FC12E94D20EB6BBB00807EF4 /* libstdc++.tbd in Frameworks */,
FC12E94A20EB6B6800807EF4 /* libpaddle-mobile.a in Frameworks */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
FC086BBB20E783AF00D85EF7 = {
FC12E92520EB6B2800807EF4 = {
isa = PBXGroup;
children = (
FC086BC620E783AF00D85EF7 /* PaddleMobileDemo */,
FC086BC520E783AF00D85EF7 /* Products */,
FC12E93020EB6B2800807EF4 /* PaddleMobileDemo */,
FC12E92F20EB6B2800807EF4 /* Products */,
FC12E94B20EB6BBB00807EF4 /* Frameworks */,
);
sourceTree = "<group>";
};
FC086BC520E783AF00D85EF7 /* Products */ = {
FC12E92F20EB6B2800807EF4 /* Products */ = {
isa = PBXGroup;
children = (
FC086BC420E783AF00D85EF7 /* PaddleMobileDemo.app */,
FC12E92E20EB6B2800807EF4 /* PaddleMobileDemo.app */,
);
name = Products;
sourceTree = "<group>";
};
FC086BC620E783AF00D85EF7 /* PaddleMobileDemo */ = {
FC12E93020EB6B2800807EF4 /* PaddleMobileDemo */ = {
isa = PBXGroup;
children = (
FC086BC720E783AF00D85EF7 /* AppDelegate.h */,
FC086BC820E783AF00D85EF7 /* AppDelegate.m */,
FC086BCA20E783AF00D85EF7 /* ViewController.h */,
FC086BCB20E783AF00D85EF7 /* ViewController.m */,
FC086BCD20E783AF00D85EF7 /* Main.storyboard */,
FC086BD020E783B100D85EF7 /* Assets.xcassets */,
FC086BD220E783B100D85EF7 /* LaunchScreen.storyboard */,
FC086BD520E783B100D85EF7 /* Info.plist */,
FC086BD620E783B100D85EF7 /* main.m */,
FC12E95320EB6C0D00807EF4 /* apple.jpg */,
FC12E94E20EB6BED00807EF4 /* googlenet_combine */,
FC12E94720EB6B6800807EF4 /* PaddleMobile */,
FC12E93120EB6B2800807EF4 /* AppDelegate.h */,
FC12E93220EB6B2800807EF4 /* AppDelegate.m */,
FC12E93420EB6B2800807EF4 /* ViewController.h */,
FC12E93520EB6B2800807EF4 /* ViewController.m */,
FC12E93720EB6B2800807EF4 /* Main.storyboard */,
FC12E93A20EB6B2900807EF4 /* Assets.xcassets */,
FC12E93C20EB6B2900807EF4 /* LaunchScreen.storyboard */,
FC12E93F20EB6B2900807EF4 /* Info.plist */,
FC12E94020EB6B2900807EF4 /* main.m */,
);
path = PaddleMobileDemo;
sourceTree = "<group>";
};
FC12E94720EB6B6800807EF4 /* PaddleMobile */ = {
isa = PBXGroup;
children = (
FC12E94820EB6B6800807EF4 /* libpaddle-mobile.a */,
FC12E94920EB6B6800807EF4 /* PaddleMobile.h */,
);
path = PaddleMobile;
sourceTree = "<group>";
};
FC12E94B20EB6BBB00807EF4 /* Frameworks */ = {
isa = PBXGroup;
children = (
FC12E94C20EB6BBB00807EF4 /* libstdc++.tbd */,
);
name = Frameworks;
sourceTree = "<group>";
};
FC12E94E20EB6BED00807EF4 /* googlenet_combine */ = {
isa = PBXGroup;
children = (
FC12E94F20EB6BED00807EF4 /* params */,
FC12E95020EB6BED00807EF4 /* model */,
);
path = googlenet_combine;
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
FC086BC320E783AF00D85EF7 /* PaddleMobileDemo */ = {
FC12E92D20EB6B2800807EF4 /* PaddleMobileDemo */ = {
isa = PBXNativeTarget;
buildConfigurationList = FC086BDA20E783B100D85EF7 /* Build configuration list for PBXNativeTarget "PaddleMobileDemo" */;
buildConfigurationList = FC12E94420EB6B2900807EF4 /* Build configuration list for PBXNativeTarget "PaddleMobileDemo" */;
buildPhases = (
FC086BC020E783AF00D85EF7 /* Sources */,
FC086BC120E783AF00D85EF7 /* Frameworks */,
FC086BC220E783AF00D85EF7 /* Resources */,
FC12E92A20EB6B2800807EF4 /* Sources */,
FC12E92B20EB6B2800807EF4 /* Frameworks */,
FC12E92C20EB6B2800807EF4 /* Resources */,
);
buildRules = (
);
......@@ -88,24 +131,24 @@
);
name = PaddleMobileDemo;
productName = PaddleMobileDemo;
productReference = FC086BC420E783AF00D85EF7 /* PaddleMobileDemo.app */;
productReference = FC12E92E20EB6B2800807EF4 /* PaddleMobileDemo.app */;
productType = "com.apple.product-type.application";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
FC086BBC20E783AF00D85EF7 /* Project object */ = {
FC12E92620EB6B2800807EF4 /* Project object */ = {
isa = PBXProject;
attributes = {
LastUpgradeCheck = 0930;
LastUpgradeCheck = 0940;
ORGANIZATIONNAME = orange;
TargetAttributes = {
FC086BC320E783AF00D85EF7 = {
CreatedOnToolsVersion = 9.3.1;
FC12E92D20EB6B2800807EF4 = {
CreatedOnToolsVersion = 9.4.1;
};
};
};
buildConfigurationList = FC086BBF20E783AF00D85EF7 /* Build configuration list for PBXProject "PaddleMobileDemo" */;
buildConfigurationList = FC12E92920EB6B2800807EF4 /* Build configuration list for PBXProject "PaddleMobileDemo" */;
compatibilityVersion = "Xcode 9.3";
developmentRegion = en;
hasScannedForEncodings = 0;
......@@ -113,55 +156,58 @@
en,
Base,
);
mainGroup = FC086BBB20E783AF00D85EF7;
productRefGroup = FC086BC520E783AF00D85EF7 /* Products */;
mainGroup = FC12E92520EB6B2800807EF4;
productRefGroup = FC12E92F20EB6B2800807EF4 /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
FC086BC320E783AF00D85EF7 /* PaddleMobileDemo */,
FC12E92D20EB6B2800807EF4 /* PaddleMobileDemo */,
);
};
/* End PBXProject section */
/* Begin PBXResourcesBuildPhase section */
FC086BC220E783AF00D85EF7 /* Resources */ = {
FC12E92C20EB6B2800807EF4 /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
FC086BD420E783B100D85EF7 /* LaunchScreen.storyboard in Resources */,
FC086BD120E783B100D85EF7 /* Assets.xcassets in Resources */,
FC086BCF20E783AF00D85EF7 /* Main.storyboard in Resources */,
FC12E93E20EB6B2900807EF4 /* LaunchScreen.storyboard in Resources */,
FC12E95220EB6BED00807EF4 /* model in Resources */,
FC12E93B20EB6B2900807EF4 /* Assets.xcassets in Resources */,
FC12E95120EB6BED00807EF4 /* params in Resources */,
FC12E95420EB6C0D00807EF4 /* apple.jpg in Resources */,
FC12E93920EB6B2800807EF4 /* Main.storyboard in Resources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXResourcesBuildPhase section */
/* Begin PBXSourcesBuildPhase section */
FC086BC020E783AF00D85EF7 /* Sources */ = {
FC12E92A20EB6B2800807EF4 /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
FC086BCC20E783AF00D85EF7 /* ViewController.m in Sources */,
FC086BD720E783B100D85EF7 /* main.m in Sources */,
FC086BC920E783AF00D85EF7 /* AppDelegate.m in Sources */,
FC12E93620EB6B2800807EF4 /* ViewController.m in Sources */,
FC12E94120EB6B2900807EF4 /* main.m in Sources */,
FC12E93320EB6B2800807EF4 /* AppDelegate.m in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */
/* Begin PBXVariantGroup section */
FC086BCD20E783AF00D85EF7 /* Main.storyboard */ = {
FC12E93720EB6B2800807EF4 /* Main.storyboard */ = {
isa = PBXVariantGroup;
children = (
FC086BCE20E783AF00D85EF7 /* Base */,
FC12E93820EB6B2800807EF4 /* Base */,
);
name = Main.storyboard;
sourceTree = "<group>";
};
FC086BD220E783B100D85EF7 /* LaunchScreen.storyboard */ = {
FC12E93C20EB6B2900807EF4 /* LaunchScreen.storyboard */ = {
isa = PBXVariantGroup;
children = (
FC086BD320E783B100D85EF7 /* Base */,
FC12E93D20EB6B2900807EF4 /* Base */,
);
name = LaunchScreen.storyboard;
sourceTree = "<group>";
......@@ -169,7 +215,7 @@
/* End PBXVariantGroup section */
/* Begin XCBuildConfiguration section */
FC086BD820E783B100D85EF7 /* Debug */ = {
FC12E94220EB6B2900807EF4 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
......@@ -220,14 +266,14 @@
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 11.3;
IPHONEOS_DEPLOYMENT_TARGET = 11.4;
MTL_ENABLE_DEBUG_INFO = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos;
};
name = Debug;
};
FC086BD920E783B100D85EF7 /* Release */ = {
FC12E94320EB6B2900807EF4 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
......@@ -272,41 +318,53 @@
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 11.3;
IPHONEOS_DEPLOYMENT_TARGET = 11.4;
MTL_ENABLE_DEBUG_INFO = NO;
SDKROOT = iphoneos;
VALIDATE_PRODUCT = YES;
};
name = Release;
};
FC086BDB20E783B100D85EF7 /* Debug */ = {
FC12E94520EB6B2900807EF4 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CODE_SIGN_STYLE = Automatic;
DEVELOPMENT_TEAM = Z5M2UUN5YV;
ENABLE_BITCODE = NO;
INFOPLIST_FILE = PaddleMobileDemo/Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 10.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
LIBRARY_SEARCH_PATHS = (
"$(inherited)",
"$(PROJECT_DIR)/PaddleMobileDemo/PaddleMobile",
);
PRODUCT_BUNDLE_IDENTIFIER = orange.PaddleMobileDemo;
PRODUCT_NAME = "$(TARGET_NAME)";
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Debug;
};
FC086BDC20E783B100D85EF7 /* Release */ = {
FC12E94620EB6B2900807EF4 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CODE_SIGN_STYLE = Automatic;
DEVELOPMENT_TEAM = Z5M2UUN5YV;
ENABLE_BITCODE = NO;
INFOPLIST_FILE = PaddleMobileDemo/Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 10.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
LIBRARY_SEARCH_PATHS = (
"$(inherited)",
"$(PROJECT_DIR)/PaddleMobileDemo/PaddleMobile",
);
PRODUCT_BUNDLE_IDENTIFIER = orange.PaddleMobileDemo;
PRODUCT_NAME = "$(TARGET_NAME)";
TARGETED_DEVICE_FAMILY = "1,2";
......@@ -316,25 +374,25 @@
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
FC086BBF20E783AF00D85EF7 /* Build configuration list for PBXProject "PaddleMobileDemo" */ = {
FC12E92920EB6B2800807EF4 /* Build configuration list for PBXProject "PaddleMobileDemo" */ = {
isa = XCConfigurationList;
buildConfigurations = (
FC086BD820E783B100D85EF7 /* Debug */,
FC086BD920E783B100D85EF7 /* Release */,
FC12E94220EB6B2900807EF4 /* Debug */,
FC12E94320EB6B2900807EF4 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
FC086BDA20E783B100D85EF7 /* Build configuration list for PBXNativeTarget "PaddleMobileDemo" */ = {
FC12E94420EB6B2900807EF4 /* Build configuration list for PBXNativeTarget "PaddleMobileDemo" */ = {
isa = XCConfigurationList;
buildConfigurations = (
FC086BDB20E783B100D85EF7 /* Debug */,
FC086BDC20E783B100D85EF7 /* Release */,
FC12E94520EB6B2900807EF4 /* Debug */,
FC12E94620EB6B2900807EF4 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
};
rootObject = FC086BBC20E783AF00D85EF7 /* Project object */;
rootObject = FC12E92620EB6B2800807EF4 /* Project object */;
}
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#import <UIKit/UIKit.h>
@interface AppDelegate : UIResponder <UIApplicationDelegate>
@property (strong, nonatomic) UIWindow *window;
@end
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#import "AppDelegate.h"
......
......@@ -17,10 +17,10 @@
@interface PaddleMobile : NSObject
+ (instancetype)sharedInstance;
- (instancetype)init;
- (BOOL)load:(NSString *)modelPath andWeightsPath:(NSString *)weighsPath;
- (NSArray *)predict:(CGImageRef)image means:(NSArray<NSNumber *> *)means scale:(float)scale;
- (NSArray *)predict:(CGImageRef)image;
- (NSArray *)predict:(CGImageRef)image dim:(NSArray<NSNumber *> *)dim means:(NSArray<NSNumber *> *)means scale:(float)scale;
- (NSArray *)predict:(CGImageRef)image dim:(NSArray<NSNumber *> *)dim;
- (void)clear;
@end
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#import <UIKit/UIKit.h>
@interface ViewController : UIViewController
@end
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#import "PaddleMobile.h"
#import "ViewController.h"
@interface ViewController ()
@end
@implementation ViewController
- (void)viewDidLoad {
[super viewDidLoad];
PaddleMobile *pam = [[PaddleMobile alloc] init];
NSString *modelPath = [[NSBundle mainBundle] pathForResource:@"model" ofType:nil];
NSString *paramPath = [[NSBundle mainBundle] pathForResource:@"params" ofType:nil];
if (modelPath.length == 0 || paramPath.length == 0) {
NSLog(@" need model and param");
return;
}
if ([pam load:modelPath andWeightsPath:paramPath]) {
NSLog(@"load success");
UIImage *inputImage = [UIImage imageNamed:@"apple.jpg"];
if (!inputImage) {
NSLog(@" input image is nil");
return;
}
NSDate *beforeDate = [NSDate date];
NSArray *res = [pam predict:inputImage.CGImage dim:@[@1, @3, @224, @224] means:@[@148, @148, @148] scale:1.0];
NSLog(@"res: %@", res);
NSLog(@"elapsed time: %f", [[NSDate date] timeIntervalSinceDate:beforeDate]);
}
}
@end
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#import <UIKit/UIKit.h>
#import "AppDelegate.h"
......
......@@ -3,7 +3,6 @@
#### 以下是 paddle-mobile 代码的执行流程图:
![执行流程图](http://otkwwi4x8.bkt.clouddn.com/2018-07-02-15305189473720.png)
......@@ -15,7 +14,6 @@
先来看一下模型, 模型分为两种结构:
一种为参数文件是散开的, 如下图, 红框为模型结构的 protobuf 文件, 其余为参数文件
![模型描述](http://otkwwi4x8.bkt.clouddn.com/2018-07-02-15305190629577.png)
......@@ -23,6 +21,7 @@
![模型描述combined](http://otkwwi4x8.bkt.clouddn.com/2018-07-02-15305191057130.png)
loader 模块的作用是将模型结构信息 load 进内存, 将红框内的 protobuf 文件 load 进内存, 并对模型结构进行优化(如将几个细粒度的 op 融合成 粗粒度的 op, 如将 conv、 add、 batchnorm、 relu 融合为 conv\_add\_batchnorm\_relu).
方便进行算法优化.
......
### iOS&Android开发文档
# iOS开发文档
## 编译
### 一. 使用 build.sh 编译
```sh
```sh
sh build.sh ios
# 如果只想编译某个特定模型的 op, 则需执行以下命令
......@@ -15,39 +17,24 @@ cd ../build/release/ios/build
```
### 二. 使用 xcode 编译
我们提供了 ios 开发更为熟悉的 xcode 编译环境:
在 ios/ 目录下打开 PaddleMobile.xcworkspace 即可编译 PaddleMobile 或者 运行 Demo
### 二. 集成
### 三. 集成
#### 如使用 c++ 接口
#### 如使用 oc 接口
```
libpaddle-mobile.a
io.h
program.h
types.h
lod_tensor.h
tensor.h
```
拖入工程, io.h 为接口文件, 可在 [github](https://github.com/PaddlePaddle/paddle-mobile/blob/develop/src/io/io.h)上查看接口注释
将上一步生成的:
libpaddle-mobile.a
#### 如使用 oc 接口
将在xcode 编译生成的
```
libPaddleMobile.a
/src/ios_io/ 下的
PaddleMobile.h
```
拖入工程, 接口如下:
```
/*
创建单例对象
创建对象
*/
+ (instancetype)sharedInstance;
- (instancetype)init;
/*
load 模型, 开辟内存
......@@ -57,12 +44,12 @@ PaddleMobile.h
/*
进行预测, means 和 scale 为训练模型时的预处理参数, 如训练时没有做这些预处理则直接使用 predict
*/
- (NSArray *)predict:(CGImageRef)image means:(NSArray<NSNumber *> *)means scale:(float)scale;
- (NSArray *)predict:(CGImageRef)image dim:(NSArray<NSNumber *> *)dim means:(NSArray<NSNumber *> *)means scale:(float)scale;
/*
进行预测
*/
- (NSArray *)predict:(CGImageRef)image;
- (NSArray *)predict:(CGImageRef)image dim:(NSArray<NSNumber *> *)dim;
/*
清理内存
......@@ -72,14 +59,155 @@ PaddleMobile.h
```
# Android开发文档
用户可通过如下两种方式,交叉编译Android平台上适用的paddle-mobile库:
- 基于Docker容器编译
- 基于Linux交叉编译
## 基于Docker容器编译
### 1. 安装 docker
安装 docker 的方式,参考官方文档 [https://docs.docker.com/install/](https://docs.docker.com/install/)
### 2. 使用 docker 搭建构建环境
首先进入 paddle-mobile 的目录下,执行 `docker build`
以 Linux/Mac 为例 (windows 建议在 'Docker Quickstart Terminal' 中执行)
```
$ docker build -t paddle-mobile:dev - < Dockerfile
```
使用 `docker images` 可以看到我们新建的 image
```
$ docker images
REPOSITORY TAG IMAGE ID CREATED SIZE
paddle-mobile dev 33b146787711 45 hours ago 372MB
```
### 3. 使用 docker 构建
进入 paddle-mobile 目录,执行 docker run
```
$ docker run -it --mount type=bind,source=$PWD,target=/paddle-mobile paddle-mobile:dev
root@5affd29d4fc5:/ # cd /paddle-mobile
# 生成构建 android 产出的 Makefile
root@5affd29d4fc5:/ # rm CMakeCache.txt
root@5affd29d4fc5:/ # cmake -DCMAKE_TOOLCHAIN_FILE=tools/toolchains/arm-android-neon.cmake
# 生成构建 linux 产出的 Makefile
root@5affd29d4fc5:/ # rm CMakeCache.txt
root@5affd29d4fc5:/ # cmake -DCMAKE_TOOLCHAIN_FILE=tools/toolchains/arm-linux-gnueabi.cmake
```
### 4. 设置编译选项
可以通过 ccmake 设置编译选项
```
root@5affd29d4fc5:/ # ccmake .
Page 1 of 1
CMAKE_ASM_FLAGS
CMAKE_ASM_FLAGS_DEBUG
CMAKE_ASM_FLAGS_RELEASE
CMAKE_BUILD_TYPE
CMAKE_INSTALL_PREFIX /usr/local
CMAKE_TOOLCHAIN_FILE /paddle-mobile/tools/toolchains/arm-android-neon.cmake
CPU ON
DEBUGING ON
FPGA OFF
LOG_PROFILE ON
MALI_GPU OFF
NET googlenet
USE_EXCEPTION ON
USE_OPENMP OFF
```
修改选项后,按 `c`, `g` 更新 Makefile
### 5. 构建
使用 make 命令进行构建
```
root@5affd29d4fc5:/ # make
```
### 6. 查看构建产出
构架产出可以在 host 机器上查看,在 paddle-mobile 的目录下,build 以及 test/build 下,可以使用 adb 指令或者 scp 传输到 device 上执行
## 基于Linux交叉编译
### 交叉编译环境准备
##### 下载Android NDK
从源码交叉编译paddle-mobile,用户需要提前准备好交叉编译环境。Android平台使用的C/C++交叉编译工具链是[Android NDK](https://developer.android.com/ndk/),用户可以自行前往下载,也可以通过以下命令获取:
```
wget https://dl.google.com/android/repository/android-ndk-r17b-darwin-x86_64.zip
unzip android-ndk-r17b-darwin-x86_64.zip
```
##### 设置环境变量
工程中自带的独立工具链会根据环境变量NDK_ROOT查找NDK,因此需要配置环境变量:
```
export NDK_ROOT = "path to ndk"
```
### 执行编译
在paddle-mobile根目录中,执行以下命令:
```
cd tools
sh build.sh android
```
执行完毕后,生成的so位于build目录中,单测可执行文件位于test/build目录中。
##### Tips:
如果想要获得体积更小的库,可选择编译支持指定模型结构的库。
如执行如下命令:
```
sh build.sh android googlenet
```
会得到一个支持googlnet的体积更小的库。
##测试
在编译完成后,我们提供了自动化的测试脚本,帮助用户将运行单测文件所需要的模型及库文件push到Android设备中,执行以下命令:
```
cd tools/android-debug-script
sh run_on_android.sh (npm) 可选参数npm,用于选择是否传输模型文件到手机上
```
出现如下提示:
```
**** choose OP or NET to test ****
which to test :
```
输入名称即可运行对应的测试文件。
##部署
Android应用可通过JNI接口调用底层C/C++,paddle-mobile对外提供的JNI接口如下:
##### 1 load接口 加载模型参数
```
/*
*@param modelPath 模型文件路径
*@return jboolean
*/
JNIEXPORT jboolean JNICALL Java_com_baidu_paddle_PML_load(JNIEnv *env,
jclass thiz,
jstring modelPath);
```
##### 2 predict接口 执行预测
```
/**
*@param buf 输入数据
*@return 输出数据
JNIEXPORT jfloatArray JNICALL Java_com_baidu_paddle_PML_predict(
JNIEnv *env, jclass thiz, jfloatArray buf);
```
##### 3 clear接口 销毁实例、清理内存操作
```
JNIEXPORT void JNICALL Java_com_baidu_paddle_PMLL_clear(JNIEnv *env,
jclass thiz);
```
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "group:PaddleMobileDemo/PaddleMobileDemo.xcodeproj">
</FileRef>
<FileRef
location = "group:PaddleMobile/PaddleMobile.xcodeproj">
</FileRef>
</Workspace>
<?xml version="1.0" encoding="UTF-8"?>
<Bucket
type = "0"
version = "2.0">
</Bucket>
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "self:PaddleMobile.xcodeproj">
</FileRef>
</Workspace>
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>IDEDidComputeMac32BitWarning</key>
<true/>
</dict>
</plist>
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>SchemeUserState</key>
<dict>
<key>PaddleMobile.xcscheme</key>
<dict>
<key>orderHint</key>
<integer>1</integer>
</dict>
</dict>
</dict>
</plist>
//
// MacroDefine.h
// PaddleMobile
//
// Created by liuRuiLong on 2018/6/30.
// Copyright © 2018年 orange. All rights reserved.
//
#ifndef MacroDefine_h
#define MacroDefine_h
#endif /* MacroDefine_h */
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>IDEDidComputeMac32BitWarning</key>
<true/>
</dict>
</plist>
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#import <UIKit/UIKit.h>
@interface AppDelegate : UIResponder <UIApplicationDelegate>
@property (strong, nonatomic) UIWindow *window;
@end
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#import <UIKit/UIKit.h>
@interface ViewController : UIViewController
@end
......@@ -34,6 +34,10 @@ class FusionOpRegister {
}
void regist(FusionOpMatcher* matcher) {
if (matchers_.find(matcher->Type()) != matchers_.end()) {
return;
}
std::shared_ptr<FusionOpMatcher> shared_matcher(matcher);
matchers_[matcher->Type()] = shared_matcher;
}
......
......@@ -23,7 +23,17 @@ namespace framework {
class Scope {
public:
Scope() = default;
~Scope() = default;
~Scope() {
for (auto &var : vars_) {
delete var.second;
}
vars_.clear();
for (auto kid : kids_) {
delete kid;
}
kids_.clear();
}
Scope &NewScope() const;
......
......@@ -348,16 +348,19 @@ std::shared_ptr<framework::Tensor> Executor<Dtype, P>::Predict(
fprintf(df, "}\n");
fclose(df);
#endif
FILE *pf = fopen("profile.out", "w");
// FILE *pf = fopen("profile.out", "w");
std::unordered_map<std::string, uint64_t> _tp;
for (int i = 0; i < profile.size(); i++) {
const auto &pInfo = profile[i];
uint64_t timeCost = pInfo.runEnd - pInfo.runBegin;
_tp[ops[i]->Type()] += timeCost;
fprintf(pf, "%d\t%s\t%d\t%llu\t%llu\t%llu\n", i, ops[i]->Type().c_str(),
pInfo.tid, pInfo.runBegin, pInfo.runEnd, timeCost);
// fprintf(pf, "%d\t%s\t%d\t%llu\t%llu\t%llu\n", i,
// ops[i]->Type().c_str(),
// pInfo.tid, pInfo.runBegin, pInfo.runEnd, timeCost);
}
fclose(pf);
// fclose(pf);
printf("====================[ profile ]======================\n");
using prof_t = std::pair<std::string, uint64_t>;
std::vector<prof_t> _tv(_tp.begin(), _tp.end());
......
......@@ -21,7 +21,6 @@ namespace paddle_mobile {
using framework::Variable;
static size_t ReadBuffer(const char *file_name, uint8_t **out) {
printf("%s \n", file_name);
FILE *fp;
fp = fopen(file_name, "rb");
PADDLE_MOBILE_ENFORCE(fp != NULL, " %s open failed !", file_name);
......
......@@ -12,10 +12,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
//
// Created by liuRuiLong on 2018/7/2.
//
#include "io/paddle_mobile.h"
namespace paddle_mobile {
......@@ -78,6 +74,12 @@ void PaddleMobile<Dtype, P>::Clear() {
loader_ = nullptr;
}
template <typename Dtype, Precision P>
PaddleMobile<Dtype, P>::~PaddleMobile() {
executor_ = nullptr;
loader_ = nullptr;
}
template class PaddleMobile<CPU, Precision::FP32>;
template class PaddleMobile<FPGA, Precision::FP32>;
template class PaddleMobile<GPU_MALI, Precision::FP32>;
......
......@@ -60,6 +60,8 @@ class PaddleMobile {
void Clear();
~PaddleMobile();
private:
std::shared_ptr<Loader<Dtype, P>> loader_;
std::shared_ptr<Executor<Dtype, P>> executor_;
......
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#import "PaddleMobile.h"
@implementation PaddleMobile
+ (instancetype)sharedInstance{
//TODO: imp
exit(0);
}
#pragma once
- (BOOL)load:(NSString *)modelPath andWeightsPath:(NSString *)weighsPath{
//TODO: imp
exit(0);
}
#import <CoreImage/CoreImage.h>
#import <Foundation/Foundation.h>
- (NSArray *)predict:(CGImageRef)image means:(NSArray<NSNumber *> *)means scale:(float)scale{
//TODO: imp
exit(0);
}
@interface PaddleMobile : NSObject
- (NSArray *)predict:(CGImageRef)image{
//TODO: imp
exit(0);
}
- (instancetype)init;
- (BOOL)load:(NSString *)modelPath andWeightsPath:(NSString *)weighsPath;
- (NSArray *)predict:(CGImageRef)image dim:(NSArray<NSNumber *> *)dim means:(NSArray<NSNumber *> *)means scale:(float)scale;
- (NSArray *)predict:(CGImageRef)image dim:(NSArray<NSNumber *> *)dim;
- (void)clear;
- (void)clear{
//TODO: imp
exit(0);
}
@end
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#import "PaddleMobile.h"
#import "op_symbols.h"
#import "io/paddle_mobile.h"
#import <memory>
#import <vector>
@interface PaddleMobile()
{
paddle_mobile::PaddleMobile<paddle_mobile::CPU, paddle_mobile::Precision::FP32> *pam_;
BOOL loaded_;
}
@end
@implementation PaddleMobile
static std::mutex shared_mutex;
- (instancetype)init {
if (self = [super init]) {
pam_ = new paddle_mobile::PaddleMobile<paddle_mobile::CPU, paddle_mobile::Precision::FP32>();
}
return self;
}
- (void)dealloc {
if (pam_) {
delete pam_;
}
}
+ (instancetype)sharedInstance{
static dispatch_once_t onceToken;
static id sharedManager = nil;
dispatch_once(&onceToken, ^{
sharedManager = [[[self class] alloc] init];
});
return sharedManager;
}
- (BOOL)load:(NSString *)modelPath andWeightsPath:(NSString *)weighsPath{
std::string model_path_str = std::string([modelPath UTF8String]);
std::string weights_path_str = std::string([weighsPath UTF8String]);
if (loaded_ = pam_->Load(model_path_str, weights_path_str, false)) {
return YES;
} else {
return NO;
}
}
-(void)preprocess:(const UInt8 *)input output:(float *)output imageWidth:(int)imageWidth imageHeight:(int)imageHeight imageChannels:(int)imageChannels means:(NSArray<NSNumber *> *)means scale:(float)scale dim:(std::vector<int64_t>)dim{
if (means == nil) {
means = @[@0, @0, @0];
}
int wanted_input_width = dim[3];
int wanted_input_height = dim[2];
int wanted_input_channels = dim[1];
for (int c = 0; c < wanted_input_channels; ++c) {
float *out_channel = output + c * wanted_input_height * wanted_input_width;
for (int y = 0; y < wanted_input_height; ++y) {
float *out_row = out_channel + y * wanted_input_width;
for (int x = 0; x < wanted_input_width; ++x) {
int in_row = (y * imageHeight) / wanted_input_height;
int in_col = (x * imageWidth) / wanted_input_width;
const UInt8 *in_pixel = input + (in_row * imageWidth * imageChannels) + (in_col * imageChannels);
float *out_pos = out_row + x;
if (c == 0) {
*out_pos = (in_pixel[c] - means[c].floatValue) * scale;
}else if (c == 1){
*out_pos = (in_pixel[c] - means[c].floatValue) * scale;
}else if (c == 2){
*out_pos = (in_pixel[c] - means[c].floatValue) * scale;
}
}
}
}
}
- (NSArray *)predict:(CGImageRef)image dim:(NSArray<NSNumber *> *)dim means:(NSArray<NSNumber *> *)means scale:(float)scale{
std::lock_guard<std::mutex> lock(shared_mutex);
if (!loaded_) {
printf("PaddleMobile doesn't be loaded yet");
return nil;
}
if (dim.count != 4) {
printf("dim must have 4 elements");
return nil;
}
// dim to c++ vector, get numel
std::vector<int64_t > dim_vec;
int numel = 1;
for (int k = 0; k < dim.count; ++k) {
int d = dim[k].intValue;
numel *= d;
dim_vec.push_back(d);
}
const int sourceRowBytes = CGImageGetBytesPerRow(image);
const int image_width = CGImageGetWidth(image);
const int image_height = CGImageGetHeight(image);
const int image_channels = 4;
CGDataProviderRef provider = CGImageGetDataProvider(image);
CFDataRef cfData = CGDataProviderCopyData(provider);
const UInt8 *input = CFDataGetBytePtr(cfData);
// sample image
float *output = (float *)malloc(numel*sizeof(float));
[self preprocess:input output:output imageWidth:image_width imageHeight:image_height imageChannels:image_channels means:means scale:scale dim:dim_vec];
float *dataPointer = nullptr;
if (nullptr != output) {
dataPointer = output;
} else {
return nil;
}
// input
std::vector<float> predict_input;
for (int j = 0; j < numel; ++j) {
predict_input.push_back(dataPointer[j]);
}
// predict
std::vector<float> cpp_result = pam_->Predict(predict_input, dim_vec);
// result
long count = 0;
count = cpp_result.size();
NSMutableArray *result = [[NSMutableArray alloc] init];
for (int i = 0; i < count; i++) {
[result addObject:[NSNumber numberWithFloat:cpp_result[i]]];
}
free(output);
// 待验证
// if ([UIDevice currentDevice].systemVersion.doubleValue < 11.0) {
CFRelease(cfData);
cfData = NULL;
// }
return result;
}
- (NSArray *)predict:(CGImageRef)image dim:(NSArray<NSNumber *> *)dim {
[self predict:image dim:dim means:nil scale:1];
}
- (void)clear{
pam_->Clear();
}
@end
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#import "ViewController.h"
@interface ViewController ()
@end
@implementation ViewController
- (void)viewDidLoad {
[super viewDidLoad];
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
}
@end
#pragma once
#include "operators/batchnorm_op.h"
#include "operators/box_coder_op.h"
#include "operators/concat_op.h"
#include "operators/conv_op.h"
#include "operators/depthwise_conv_op.h"
#include "operators/dropout_op.h"
#include "operators/elementwise_add_op.h"
#include "operators/feed_op.h"
#include "operators/fetch_op.h"
#include "operators/fusion_conv_add.h"
#include "operators/fusion_conv_add_bn_relu_op.h"
#include "operators/fusion_fc_op.h"
#include "operators/im2sequence_op.h"
#include "operators/lrn_op.h"
#include "operators/mul_op.h"
#include "operators/multiclass_nms_op.h"
#include "operators/pool_op.h"
#include "operators/prior_box_op.h"
#include "operators/relu_op.h"
#include "operators/reshape_op.h"
#include "operators/sigmoid_op.h"
#include "operators/softmax_op.h"
#include "operators/transpose_op.h"
......@@ -54,13 +54,14 @@ string jstring2cppstring(JNIEnv *env, jstring jstr) {
JNIEXPORT jboolean JNICALL Java_com_baidu_paddle_PML_load(JNIEnv *env,
jclass thiz,
jstring modelPath) {
ANDROIDLOGI("load invoked");
bool optimize = true;
return getPaddleMobileInstance()->Load(jstring2cppstring(env, modelPath),
optimize);
}
JNIEXPORT jfloatArray JNICALL Java_com_baidu_paddle_PML_predictImage(
JNIEnv *env, jclass thiz, jfloatArray buf) {
JNIEXPORT jfloatArray JNICALL
Java_com_baidu_paddle_PML_predict(JNIEnv *env, jclass thiz, jfloatArray buf) {
jfloatArray result = NULL;
int count = 0;
float *dataPointer = nullptr;
......@@ -78,6 +79,7 @@ JNIEXPORT jfloatArray JNICALL Java_com_baidu_paddle_PML_predictImage(
count = output->numel();
result = env->NewFloatArray(count);
env->SetFloatArrayRegion(result, 0, count, output->data<float>());
ANDROIDLOGI("predict finished");
return result;
}
......
......@@ -31,8 +31,8 @@ JNIEXPORT jboolean JNICALL Java_com_baidu_paddle_PML_load(JNIEnv *env,
/**
* object detection for anroid
*/
JNIEXPORT jfloatArray JNICALL Java_com_baidu_paddle_PML_predictImage(
JNIEnv *env, jclass thiz, jfloatArray buf);
JNIEXPORT jfloatArray JNICALL
Java_com_baidu_paddle_PML_predict(JNIEnv *env, jclass thiz, jfloatArray buf);
/**
* clear data of the net when destroy for android
......
......@@ -32,11 +32,9 @@ template class BatchNormOp<CPU, float>;
namespace ops = paddle_mobile::operators;
#ifdef PADDLE_MOBILE_CPU
USE_OP_CPU(batch_norm);
REGISTER_OPERATOR_CPU(batch_norm, ops::BatchNormOp);
#endif
#ifdef PADDLE_MOBILE_MALI_GPU
USE_OP_MALI_GPU(batch_norm);
REGISTER_OPERATOR_MALI_GPU(batch_norm, ops::BatchNormOp);
#endif
#ifdef PADDLE_MOBILE_FPGA
......
......@@ -45,4 +45,13 @@ class BatchNormOp
} // namespace operators
} // namespace paddle_mobile
#ifdef PADDLE_MOBILE_CPU
USE_OP_CPU(batch_norm);
#endif
#ifdef PADDLE_MOBILE_MALI_GPU
USE_OP_MALI_GPU(batch_norm);
#endif
#ifdef PADDLE_MOBILE_FPGA
#endif
#endif
......@@ -53,7 +53,6 @@ template class BoxCoderOp<CPU, float>;
namespace ops = paddle_mobile::operators;
#ifdef PADDLE_MOBILE_CPU
USE_OP_CPU(box_coder);
REGISTER_OPERATOR_CPU(box_coder, ops::BoxCoderOp);
#endif
#ifdef PADDLE_MOBILE_MALI_GPU
......
......@@ -51,4 +51,12 @@ class BoxCoderOp
} // namespace operators
} // namespace paddle_mobile
#ifdef PADDLE_MOBILE_CPU
USE_OP_CPU(box_coder);
#endif
#ifdef PADDLE_MOBILE_MALI_GPU
#endif
#ifdef PADDLE_MOBILE_FPGA
#endif
#endif
......@@ -63,11 +63,9 @@ template class ConcatOp<CPU, float>;
namespace ops = paddle_mobile::operators;
#ifdef PADDLE_MOBILE_CPU
USE_OP_CPU(concat);
REGISTER_OPERATOR_CPU(concat, ops::ConcatOp);
#endif
#ifdef PADDLE_MOBILE_MALI_GPU
USE_OP_MALI_GPU(concat);
REGISTER_OPERATOR_MALI_GPU(concat, ops::ConcatOp);
#endif
#ifdef PADDLE_MOBILE_FPGA
......
......@@ -46,4 +46,13 @@ class ConcatOp
} // namespace operators
} // namespace paddle_mobile
#ifdef PADDLE_MOBILE_CPU
USE_OP_CPU(concat);
#endif
#ifdef PADDLE_MOBILE_MALI_GPU
USE_OP_MALI_GPU(concat);
#endif
#ifdef PADDLE_MOBILE_FPGA
#endif
#endif
......@@ -55,15 +55,12 @@ template class ConvOp<CPU, float>;
namespace ops = paddle_mobile::operators;
#ifdef PADDLE_MOBILE_CPU
USE_OP_CPU(conv2d);
REGISTER_OPERATOR_CPU(conv2d, ops::ConvOp);
#endif
#ifdef PADDLE_MOBILE_MALI_GPU
USE_OP_MALI_GPU(conv2d);
REGISTER_OPERATOR_MALI_GPU(conv2d, ops::ConvOp);
#endif
#ifdef PADDLE_MOBILE_FPGA
USE_OP_FPGA(conv2d);
REGISTER_OPERATOR_FPGA(conv2d, ops::ConvOp);
#endif
......
......@@ -46,4 +46,14 @@ class ConvOp
} // namespace operators
} // namespace paddle_mobile
#ifdef PADDLE_MOBILE_CPU
USE_OP_CPU(conv2d);
#endif
#ifdef PADDLE_MOBILE_MALI_GPU
USE_OP_MALI_GPU(conv2d);
#endif
#ifdef PADDLE_MOBILE_FPGA
USE_OP_FPGA(conv2d);
#endif
#endif
......@@ -56,7 +56,6 @@ template class DepthwiseConvOp<CPU, float>;
namespace ops = paddle_mobile::operators;
#ifdef PADDLE_MOBILE_CPU
USE_OP_CPU(depthwise_conv2d);
REGISTER_OPERATOR_CPU(depthwise_conv2d, ops::DepthwiseConvOp);
#endif
#ifdef PADDLE_MOBILE_MALI_GPU
......
......@@ -48,4 +48,12 @@ class DepthwiseConvOp : public framework::OperatorWithKernel<
} // namespace operators
} // namespace paddle_mobile
#ifdef PADDLE_MOBILE_CPU
USE_OP_CPU(depthwise_conv2d);
#endif
#ifdef PADDLE_MOBILE_MALI_GPU
#endif
#ifdef PADDLE_MOBILE_FPGA
#endif
#endif
......@@ -28,7 +28,6 @@ template class DropoutOp<CPU, float>;
namespace ops = paddle_mobile::operators;
#ifdef PADDLE_MOBILE_CPU
USE_OP_CPU(dropout);
REGISTER_OPERATOR_CPU(dropout, ops::DropoutOp);
#endif
#ifdef PADDLE_MOBILE_MALI_GPU
......
......@@ -50,4 +50,12 @@ class DropoutOp
} // namespace operators
} // namespace paddle_mobile
#ifdef PADDLE_MOBILE_CPU
USE_OP_CPU(dropout);
#endif
#ifdef PADDLE_MOBILE_MALI_GPU
#endif
#ifdef PADDLE_MOBILE_FPGA
#endif
#endif
......@@ -30,11 +30,9 @@ template class ElementwiseAddOp<CPU, float>;
namespace ops = paddle_mobile::operators;
#ifdef PADDLE_MOBILE_CPU
USE_OP_CPU(elementwise_add);
REGISTER_OPERATOR_CPU(elementwise_add, ops::ElementwiseAddOp);
#endif
#ifdef PADDLE_MOBILE_MALI_GPU
USE_OP_MALI_GPU(elementwise_add);
REGISTER_OPERATOR_MALI_GPU(elementwise_add, ops::ElementwiseAddOp);
#endif
#ifdef PADDLE_MOBILE_FPGA
......
......@@ -48,4 +48,13 @@ class ElementwiseAddOp : public framework::OperatorWithKernel<
} // namespace operators
} // namespace paddle_mobile
#ifdef PADDLE_MOBILE_CPU
USE_OP_CPU(elementwise_add);
#endif
#ifdef PADDLE_MOBILE_MALI_GPU
USE_OP_MALI_GPU(elementwise_add);
#endif
#ifdef PADDLE_MOBILE_FPGA
#endif
#endif
......@@ -19,3 +19,14 @@ namespace operators {
template class FeedOp<CPU, float>;
}
} // namespace paddle_mobile
namespace ops = paddle_mobile::operators;
#ifdef PADDLE_MOBILE_CPU
REGISTER_OPERATOR_CPU(feed, ops::FeedOp);
#endif
#ifdef PADDLE_MOBILE_MALI_GPU
REGISTER_OPERATOR_MALI_GPU(feed, ops::FeedOp);
#endif
#ifdef PADDLE_MOBILE_FPGA
#endif
......@@ -44,17 +44,14 @@ class FeedOp : public framework::OperatorBase<DeviceType> {
FeedParam param_;
};
namespace ops = paddle_mobile::operators;
} // namespace operators
} // namespace paddle_mobile
#ifdef PADDLE_MOBILE_CPU
USE_OP_CPU(feed);
REGISTER_OPERATOR_CPU(feed, ops::FeedOp);
#endif
#ifdef PADDLE_MOBILE_MALI_GPU
USE_OP_MALI_GPU(feed);
REGISTER_OPERATOR_MALI_GPU(feed, ops::FeedOp);
#endif
#ifdef PADDLE_MOBILE_FPGA
#endif
} // namespace operators
} // namespace paddle_mobile
......@@ -19,3 +19,13 @@ namespace operators {
template class FetchOp<CPU, float>;
}
} // namespace paddle_mobile
namespace ops = paddle_mobile::operators;
#ifdef PADDLE_MOBILE_CPU
REGISTER_OPERATOR_CPU(fetch, ops::FetchOp);
#endif
#ifdef PADDLE_MOBILE_MALI_GPU
REGISTER_OPERATOR_MALI_GPU(fetch, ops::FetchOp);
#endif
#ifdef PADDLE_MOBILE_FPGA
#endif
......@@ -44,17 +44,14 @@ class FetchOp : public framework::OperatorBase<DeviceType> {
FetchParam param_;
};
namespace ops = paddle_mobile::operators;
} // namespace operators
} // namespace paddle_mobile
#ifdef PADDLE_MOBILE_CPU
USE_OP_CPU(fetch);
REGISTER_OPERATOR_CPU(fetch, ops::FetchOp);
#endif
#ifdef PADDLE_MOBILE_MALI_GPU
USE_OP_MALI_GPU(fetch);
REGISTER_OPERATOR_MALI_GPU(fetch, ops::FetchOp);
#endif
#ifdef PADDLE_MOBILE_FPGA
#endif
} // namespace operators
} // namespace paddle_mobile
......@@ -44,18 +44,39 @@ void FusionConvAddOp<Dtype, T>::InferShape() const {
framework::DDim ddim = framework::make_ddim(output_shape);
this->param_.Output()->Resize(ddim);
}
#ifdef PADDLE_MOBILE_CPU
#ifndef CONV_ADD_REGISTER
framework::FusionOpRegistrar convadd_registrar(new FusionConvAddMatcher());
#define CONV_ADD_REGISTER
#endif
#endif
#ifdef PADDLE_MOBILE_MALI_GPU
#ifndef CONV_ADD_REGISTER
static framework::FusionOpRegistrar convadd_registrar(
new FusionConvAddMatcher());
#define CONV_ADD_REGISTER
#endif
#endif
#ifdef PADDLE_MOBILE_FPGA
#endif
template class FusionConvAddOp<CPU, float>;
} // namespace operators
} // namespace paddle_mobile
namespace ops = paddle_mobile::operators;
#ifdef PADDLE_MOBILE_CPU
USE_OP_CPU(fusion_conv_add);
REGISTER_OPERATOR_CPU(fusion_conv_add, ops::FusionConvAddOp);
#endif
#ifdef PADDLE_MOBILE_MALI_GPU
USE_OP_MALI_GPU(conv_add);
REGISTER_OPERATOR_MALI_GPU(conv_add, ops::FusionConvAddOp);
REGISTER_OPERATOR_MALI_GPU(fusion_conv_add, ops::FusionConvAddOp);
#endif
#ifdef PADDLE_MOBILE_FPGA
#endif
......
......@@ -69,7 +69,7 @@ class FusionConvAddOp : public framework::OperatorWithKernel<
#ifdef PADDLE_MOBILE_CPU
#ifndef CONV_ADD_REGISTER
static framework::FusionOpRegistrar convadd_registrar(
extern framework::FusionOpRegistrar convadd_registrar(
new FusionConvAddMatcher());
#define CONV_ADD_REGISTER
#endif
......@@ -92,4 +92,13 @@ static framework::FusionOpRegistrar convadd_registrar(
} // namespace operators
} // namespace paddle_mobile
#ifdef PADDLE_MOBILE_CPU
USE_OP_CPU(fusion_conv_add);
#endif
#ifdef PADDLE_MOBILE_MALI_GPU
USE_OP_MALI_GPU(fusion_conv_add);
#endif
#ifdef PADDLE_MOBILE_FPGA
#endif
#endif
......@@ -50,7 +50,6 @@ template class FusionConvAddBNReluOp<CPU, float>;
namespace ops = paddle_mobile::operators;
#ifdef PADDLE_MOBILE_CPU
USE_OP_CPU(fusion_conv_add_bn_relu);
REGISTER_OPERATOR_CPU(fusion_conv_add_bn_relu, ops::FusionConvAddBNReluOp);
#endif
#ifdef PADDLE_MOBILE_MALI_GPU
......
......@@ -103,4 +103,12 @@ static framework::FusionOpRegistrar fusion_conv_add_bn_relu_registrar(
} // namespace operators
} // namespace paddle_mobile
#ifdef PADDLE_MOBILE_CPU
USE_OP_CPU(fusion_conv_add_bn_relu);
#endif
#ifdef PADDLE_MOBILE_MALI_GPU
#endif
#ifdef PADDLE_MOBILE_FPGA
#endif
#endif
......@@ -49,7 +49,6 @@ void FusionConvAddReluOp<Dtype, T>::InferShape() const {
namespace ops = paddle_mobile::operators;
#ifdef PADDLE_MOBILE_CPU
USE_OP_CPU(fusion_conv_add_relu);
REGISTER_OPERATOR_CPU(fusion_conv_add_relu, ops::FusionConvAddReluOp);
#endif
#ifdef PADDLE_MOBILE_MALI_GPU
......
......@@ -80,4 +80,12 @@ class FusionConvAddReluOp : public framework::OperatorWithKernel<
} // namespace operators
} // namespace paddle_mobile
#ifdef PADDLE_MOBILE_CPU
USE_OP_CPU(fusion_conv_add_relu);
#endif
#ifdef PADDLE_MOBILE_MALI_GPU
#endif
#ifdef PADDLE_MOBILE_FPGA
#endif
#endif
......@@ -49,18 +49,38 @@ void FusionFcOp<Dtype, T>::InferShape() const {
framework::DDim ddim = framework::make_ddim(output_dims);
this->param_.Out()->Resize(ddim);
}
#ifdef PADDLE_MOBILE_CPU
#ifndef CONV_CPU_REGISTER
#define CONV_CPU_REGISTER
framework::FusionOpRegistrar fc_registrar(new FusionFcMatcher());
#endif
#endif
#ifdef PADDLE_MOBILE_MALI_GPU
#ifndef CONV_CPU_REGISTER
#define CONV_CPU_REGISTER
framework::FusionOpRegistrar fc_registrar(new FusionFcMatcher());
#endif
#endif
#ifdef PADDLE_MOBILE_FPGA
#endif
template class FusionFcOp<CPU, float>;
} // namespace operators
} // namespace paddle_mobile
namespace ops = paddle_mobile::operators;
#ifdef PADDLE_MOBILE_CPU
USE_OP_CPU(fusion_fc);
REGISTER_OPERATOR_CPU(fusion_fc, ops::FusionFcOp);
#endif
#ifdef PADDLE_MOBILE_MALI_GPU
USE_OP_MALI_GPU(fc);
REGISTER_OPERATOR_MALI_GPU(fc, ops::FusionFcOp);
REGISTER_OPERATOR_MALI_GPU(fusion_fc, ops::FusionFcOp);
#endif
#ifdef PADDLE_MOBILE_FPGA
#endif
......
......@@ -69,7 +69,7 @@ class FusionFcOp
#ifndef CONV_CPU_REGISTER
#define CONV_CPU_REGISTER
static framework::FusionOpRegistrar fc_registrar(new FusionFcMatcher());
extern framework::FusionOpRegistrar fc_registrar(new FusionFcMatcher());
#endif
#endif
......@@ -78,7 +78,7 @@ static framework::FusionOpRegistrar fc_registrar(new FusionFcMatcher());
#ifndef CONV_CPU_REGISTER
#define CONV_CPU_REGISTER
static framework::FusionOpRegistrar fc_registrar(new FusionFcMatcher());
extern framework::FusionOpRegistrar fc_registrar(new FusionFcMatcher());
#endif
#endif
......@@ -89,4 +89,13 @@ static framework::FusionOpRegistrar fc_registrar(new FusionFcMatcher());
} // namespace operators
} // namespace paddle_mobile
#ifdef PADDLE_MOBILE_CPU
USE_OP_CPU(fusion_fc);
#endif
#ifdef PADDLE_MOBILE_MALI_GPU
USE_OP_MALI_GPU(fusion_fc);
#endif
#ifdef PADDLE_MOBILE_FPGA
#endif
#endif
......@@ -54,7 +54,6 @@ template class Im2SequenceOp<CPU, float>;
namespace ops = paddle_mobile::operators;
#ifdef PADDLE_MOBILE_CPU
USE_OP_CPU(im2sequence);
REGISTER_OPERATOR_CPU(im2sequence, ops::Im2SequenceOp);
#endif
#ifdef PADDLE_MOBILE_MALI_GPU
......
......@@ -50,4 +50,12 @@ class Im2SequenceOp : public framework::OperatorWithKernel<
} // namespace operators
} // namespace paddle_mobile
#ifdef PADDLE_MOBILE_CPU
USE_OP_CPU(im2sequence);
#endif
#ifdef PADDLE_MOBILE_MALI_GPU
#endif
#ifdef PADDLE_MOBILE_FPGA
#endif
#endif
......@@ -14,6 +14,7 @@ limitations under the License. */
#ifdef FUSION_CONVADD_OP
#include "operators/kernel/conv_add_kernel.h"
#include "../central-arm-func/conv_add_arm_func.h"
namespace paddle_mobile {
namespace operators {
......@@ -23,111 +24,9 @@ bool ConvAddKernel<CPU, float>::Init(FusionConvAddParam *param) {
return true;
}
void ConvAddBasic(const FusionConvAddParam &param) {
const Tensor *input = param.Input();
Tensor filter = *param.Filter();
Tensor bias = *param.Bias();
int axis = param.Axis();
Tensor *output = param.Output();
math::expand_bias(bias, axis, output->dims());
output->ShareDataWith(bias);
int groups = param.Groups();
std::vector<int> strides = param.Strides();
std::vector<int> paddings = param.Paddings();
std::vector<int> dilations = param.Dilations();
const int batch_size = static_cast<int>(input->dims()[0]);
std::vector<int64_t> filter_shape_vec(framework::vectorize(filter.dims()));
std::vector<int64_t> output_shape_vec(framework::vectorize(output->dims()));
size_t data_dim = filter_shape_vec.size() - 2;
std::vector<int64_t> col_shape_vec(1 + 2 * data_dim);
col_shape_vec[0] = input->dims()[1] / groups;
for (size_t j = 0; j < data_dim; ++j) {
col_shape_vec[j + 1] = filter_shape_vec[j + 2];
col_shape_vec[j + 1 + data_dim] = output_shape_vec[j + 2];
}
framework::DDim col_shape(framework::make_ddim(col_shape_vec));
framework::DDim col_matrix_shape =
framework::flatten_to_2d(col_shape, data_dim + 1);
bool is_expand =
math::IsExpand(filter_shape_vec, strides, paddings, dilations);
Tensor col;
Tensor col_matrix;
if (is_expand) {
col.mutable_data<float>(col_shape);
col_matrix.ShareDataWith(col);
col_matrix.Resize(col_matrix_shape);
}
framework::DDim input_shape = framework::slice_ddim(
input->dims(), 1, static_cast<int>(input->dims().size()));
framework::DDim filter_matrix_shape = {filter.dims()[0],
filter.numel() / filter.dims()[0]};
filter.Resize(filter_matrix_shape);
framework::DDim output_matrix_shape = {
output->dims()[1],
output->numel() / (output->dims()[0] * output->dims()[1])};
// convolution operator: im2col(or vol2col) + gemm
int in_step = static_cast<int>(input->dims()[1]) / groups;
int out_step = static_cast<int>(output->dims()[1]) / groups;
math::Vol2ColFunctor<CPU, float> vol2col;
math::Im2ColFunctor<math::ColFormat::kCFO, CPU, float> im2col;
for (int i = 0; i < batch_size; i++) {
Tensor in_batch = input->Slice(i, i + 1).Resize(input_shape);
Tensor out_batch = output->Slice(i, i + 1).Resize(output_matrix_shape);
for (int g = 0; g < groups; g++) {
Tensor in_slice = in_batch.Slice(g * in_step, (g + 1) * in_step);
if (!is_expand) {
col.ShareDataWith(in_slice);
col_matrix.ShareDataWith(col);
col_matrix.Resize(col_matrix_shape);
} else if (data_dim == 2U) {
// im2col
im2col(in_slice, dilations, strides,
std::vector<int>{paddings[0], paddings[1], paddings[0],
paddings[1]},
&col);
} else if (data_dim == 3U) {
// vol2col
vol2col(in_slice, dilations, strides, paddings, &col);
}
// gemm
Tensor out_slice = out_batch.Slice(g * out_step, (g + 1) * out_step);
Tensor filter_slice = filter.Slice(g * out_step, (g + 1) * out_step);
math::matmul<float>(filter_slice, false, col_matrix, false,
static_cast<float>(1), &out_slice,
static_cast<float>(1));
}
}
}
template <>
void ConvAddKernel<CPU, float>::Compute(const FusionConvAddParam &param) const {
if (param.Groups() == param.Input()->dims()[1] &&
param.Input()->dims()[1] == param.Output()->dims()[1] &&
param.Filter()->dims()[2] == param.Filter()->dims()[3] &&
param.Filter()->dims()[2] == 3 && param.Strides()[0] == 1) {
math::DepthwiseConv3x3s1p1(param.Input(), param.Filter(), param.Output(),
param.Bias(), true);
} else if (param.Groups() == param.Input()->dims()[1] &&
param.Input()->dims()[1] == param.Output()->dims()[1] &&
param.Filter()->dims()[2] == param.Filter()->dims()[3] &&
param.Filter()->dims()[2] == 3) {
math::DepthwiseConv3x3(param.Input(), param.Strides(), param.Paddings(),
param.Filter(), param.Bias(), param.Output(), true);
} else {
ConvAddBasic(param);
}
ConvAddCompute<float>(param);
}
template class ConvAddKernel<CPU, float>;
......
......@@ -14,27 +14,11 @@ limitations under the License. */
#ifdef POOL_OP
#include <operators/kernel/pool_kernel.h>
#include "common/log.h"
#include "operators/kernel/pool_kernel.h"
#include "../central-arm-func/pool_arm_func.h"
namespace paddle_mobile {
namespace operators {
inline void PoolBasic(std::string pooling_type, std::vector<int> ksize,
std::vector<int> strides, std::vector<int> paddings,
const Tensor *in_x, Tensor *out) {
if (pooling_type == "max") {
math::PoolFunctor<CPU, math::MaxPool<float>, float> pool2d_forward;
math::MaxPool<float> pool_process;
pool2d_forward(*in_x, ksize, strides, paddings, pool_process, out);
} else if (pooling_type == "avg") {
math::PoolFunctor<CPU, math::AvgPool<float>, float> pool2d_forward;
math::AvgPool<float> pool_process;
pool2d_forward(*in_x, ksize, strides, paddings, pool_process, out);
}
}
template <>
bool PoolKernel<CPU, float>::Init(PoolParam *param) {
return true;
......@@ -42,42 +26,7 @@ bool PoolKernel<CPU, float>::Init(PoolParam *param) {
template <>
void PoolKernel<CPU, float>::Compute(const PoolParam &param) const {
const Tensor *in_x = param.Input();
Tensor *out = param.Output();
std::string pooling_type = param.PoolingType();
std::vector<int> ksize = param.Ksize();
std::vector<int> strides = param.Strides();
std::vector<int> paddings = param.Paddings();
if (ksize.size() != 2) {
LOG(paddle_mobile::LogLevel::kLOG_ERROR)
<< "Pool op only supports 2D and 3D input.";
}
if (param.isGlobalPooling()) {
for (size_t i = 0; i < ksize.size(); ++i) {
paddings[i] = 0;
ksize[i] = static_cast<int>(in_x->dims()[i + 2]);
}
} else if (ksize[0] == 3 && ksize[0] == ksize[1]) {
if (pooling_type == "max") {
math::Pool3x3Max(strides, paddings, in_x, out);
} else if (pooling_type == "avg") {
math::Pool3x3Avg(strides, paddings, in_x, out);
}
} else if (ksize[0] == 2 && ksize[0] == ksize[1]) {
if (pooling_type == "max") {
math::Pool2x2Max(strides, paddings, in_x, out);
} else if (pooling_type == "avg") {
math::Pool2x2Avg(strides, paddings, in_x, out);
}
} else {
PoolBasic(pooling_type, ksize, strides, paddings, in_x, out);
}
PoolCompute<float>(param);
}
} // namespace operators
} // namespace paddle_mobile
......
......@@ -15,6 +15,7 @@ limitations under the License. */
#ifdef SIGMOID_OP
#include "../sigmoid_kernel.h"
#include "../central-arm-func/sigmoid_arm_func.h"
#if __ARM_NEON
#include "../../math/math_func_neon.h"
#endif
......@@ -25,52 +26,6 @@ namespace operators {
using framework::DDim;
using framework::Tensor;
void sigmoid(const Tensor *X, Tensor *Y) {
#if __ARM_NEON
const float *input = X->data<float>();
float *output = Y->mutable_data<float>();
const DDim &dDim = X->dims();
int axis_index = 1;
if (dDim.size() < 4) {
axis_index = 0;
}
DDim outer_ddim =
paddle_mobile::framework::slice_ddim(dDim, 0, axis_index + 1);
DDim inner_ddim =
paddle_mobile::framework::slice_ddim(dDim, axis_index + 1, dDim.size());
int out_size = paddle_mobile::framework::product(outer_ddim);
int inner_size = paddle_mobile::framework::product(inner_ddim);
DLOG << "outsize=" << out_size;
DLOG << "innersize=" << inner_size;
#pragma omp parallel for
for (int i = 0; i < out_size; ++i) {
const float *input_outer_ptr = input + i * inner_size;
float *output_outer_ptr = output + i * inner_size;
int nn = inner_size >> 2;
int remain = inner_size - (nn << 2);
float32x4_t _one = vdupq_n_f32(1.f);
for (; nn > 0; nn--) {
float32x4_t data = vld1q_f32(input_outer_ptr);
data = vnegq_f32(data);
data = exp_ps(data);
data = vaddq_f32(data, _one);
float32x4_t out_data = vrecpeq_f32(data);
out_data = vmulq_f32(vrecpsq_f32(data, out_data), out_data);
vst1q_f32(output_outer_ptr, out_data);
input_outer_ptr += 4;
output_outer_ptr += 4;
}
for (; remain > 0; remain--) {
*output_outer_ptr = 1.f / (1.f + exp(-*input_outer_ptr));
output_outer_ptr++;
input_outer_ptr++;
}
}
#endif
}
template <>
bool SigmoidKernel<CPU, float>::Init(SigmoidParam *param) {
return true;
......@@ -78,11 +33,7 @@ bool SigmoidKernel<CPU, float>::Init(SigmoidParam *param) {
template <>
void SigmoidKernel<CPU, float>::Compute(const SigmoidParam &param) const {
const Tensor *in_x = param.InputX();
Tensor *out = param.Out();
auto x_dims = in_x->dims();
out->Resize(x_dims);
sigmoid(in_x, out);
SigmoidCompute<float>(param);
}
template class SigmoidKernel<CPU, float>;
......
......@@ -15,7 +15,8 @@ limitations under the License. */
#ifdef SOFTMAX_OP
#include "../softmax_kernel.h"
#include "../../math/softmax.h"
#include "../central-arm-func/softmax_arm_func.h"
#include "operators/math/softmax.h"
namespace paddle_mobile {
namespace operators {
......@@ -26,11 +27,7 @@ bool SoftmaxKernel<CPU, float>::Init(SoftmaxParam *param) {
template <>
void SoftmaxKernel<CPU, float>::Compute(const SoftmaxParam &param) const {
const Tensor *in_x = param.InputX();
Tensor *out = param.Out();
auto x_dims = in_x->dims();
out->Resize(x_dims);
math::SoftmaxFuntor<CPU, float>()(in_x, out);
SoftmaxCompute<float>(param);
}
template class SoftmaxKernel<CPU, float>;
......
此差异已折叠。
文件模式从 100644 更改为 100755
此差异已折叠。
文件模式从 100644 更改为 100755
文件模式从 100644 更改为 100755
此差异已折叠。
此差异已折叠。
此差异已折叠。
文件模式从 100644 更改为 100755
文件模式从 100644 更改为 100755
文件模式从 100644 更改为 100755
此差异已折叠。
此差异已折叠。
文件模式从 100644 更改为 100755
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册