提交 00d4df47 编写于 作者: Y yunyaoXYY 提交者: Wei Shengyu

create shoukou

上级 19ef98f4
../../docs/zh_CN/fastdeploy/README.md
\ No newline at end of file
../../../../../docs/zh_CN/fastdeploy/amlogic/a311d/README.md
\ No newline at end of file
PROJECT(infer_demo C CXX)
CMAKE_MINIMUM_REQUIRED (VERSION 3.10)
# 指定下载解压后的fastdeploy库路径
option(FASTDEPLOY_INSTALL_DIR "Path of downloaded fastdeploy sdk.")
include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake)
# 添加FastDeploy依赖头文件
include_directories(${FASTDEPLOY_INCS})
include_directories(${FastDeploy_INCLUDE_DIRS})
add_executable(infer_demo ${PROJECT_SOURCE_DIR}/infer.cc)
# 添加FastDeploy库依赖
target_link_libraries(infer_demo ${FASTDEPLOY_LIBS})
set(CMAKE_INSTALL_PREFIX ${CMAKE_SOURCE_DIR}/build/install)
install(TARGETS infer_demo DESTINATION ./)
install(DIRECTORY models DESTINATION ./)
install(DIRECTORY images DESTINATION ./)
file(GLOB_RECURSE FASTDEPLOY_LIBS ${FASTDEPLOY_INSTALL_DIR}/lib/lib*.so*)
file(GLOB_RECURSE ALL_LIBS ${FASTDEPLOY_INSTALL_DIR}/third_libs/install/lib*.so*)
list(APPEND ALL_LIBS ${FASTDEPLOY_LIBS})
install(PROGRAMS ${ALL_LIBS} DESTINATION lib)
file(GLOB ADB_TOOLS run_with_adb.sh)
install(PROGRAMS ${ADB_TOOLS} DESTINATION ./)
../../../../../docs/zh_CN/fastdeploy/amlogic/a311d/cpp/README.md
\ No newline at end of file
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "fastdeploy/vision.h"
#include <string>
#ifdef WIN32
const char sep = '\\';
#else
const char sep = '/';
#endif
void InitAndInfer(const std::string &model_dir, const std::string &image_file) {
auto model_file = model_dir + sep + "inference.pdmodel";
auto params_file = model_dir + sep + "inference.pdiparams";
auto config_file = model_dir + sep + "inference_cls.yaml";
fastdeploy::vision::EnableFlyCV();
fastdeploy::RuntimeOption option;
option.UseTimVX();
auto model = fastdeploy::vision::classification::PaddleClasModel(
model_file, params_file, config_file, option);
assert(model.Initialized());
auto im = cv::imread(image_file);
fastdeploy::vision::ClassifyResult res;
if (!model.Predict(im, &res)) {
std::cerr << "Failed to predict." << std::endl;
return;
}
std::cout << res.Str() << std::endl;
}
int main(int argc, char *argv[]) {
if (argc < 3) {
std::cout << "Usage: infer_demo path/to/quant_model "
"path/to/image "
"e.g ./infer_demo ./ResNet50_vd_quant ./test.jpeg"
<< std::endl;
return -1;
}
std::string model_dir = argv[1];
std::string test_image = argv[2];
InitAndInfer(model_dir, test_image);
return 0;
}
#!/bin/bash
HOST_SPACE=${PWD}
echo ${HOST_SPACE}
WORK_SPACE=/data/local/tmp/test
# The first parameter represents the demo name
DEMO_NAME=image_classification_demo
if [ -n "$1" ]; then
DEMO_NAME=$1
fi
# The second parameter represents the model name
MODEL_NAME=mobilenet_v1_fp32_224
if [ -n "$2" ]; then
MODEL_NAME=$2
fi
# The third parameter indicates the name of the image to be tested
IMAGE_NAME=0001.jpg
if [ -n "$3" ]; then
IMAGE_NAME=$3
fi
# The fourth parameter represents the ID of the device
ADB_DEVICE_NAME=
if [ -n "$4" ]; then
ADB_DEVICE_NAME="-s $4"
fi
# Set the environment variables required during the running process
EXPORT_ENVIRONMENT_VARIABLES="export GLOG_v=5; export VIV_VX_ENABLE_GRAPH_TRANSFORM=-pcq:1; export VIV_VX_SET_PER_CHANNEL_ENTROPY=100; export TIMVX_BATCHNORM_FUSION_MAX_ALLOWED_QUANT_SCALE_DEVIATION=300000; export VSI_NN_LOG_LEVEL=5;"
EXPORT_ENVIRONMENT_VARIABLES="${EXPORT_ENVIRONMENT_VARIABLES}export LD_LIBRARY_PATH=${WORK_SPACE}/lib:\$LD_LIBRARY_PATH;"
# Please install adb, and DON'T run this in the docker.
set -e
adb $ADB_DEVICE_NAME shell "rm -rf $WORK_SPACE"
adb $ADB_DEVICE_NAME shell "mkdir -p $WORK_SPACE"
# Upload the demo, librarys, model and test images to the device
adb $ADB_DEVICE_NAME push ${HOST_SPACE}/lib $WORK_SPACE
adb $ADB_DEVICE_NAME push ${HOST_SPACE}/${DEMO_NAME} $WORK_SPACE
adb $ADB_DEVICE_NAME push models $WORK_SPACE
adb $ADB_DEVICE_NAME push images $WORK_SPACE
# Execute the deployment demo
adb $ADB_DEVICE_NAME shell "cd $WORK_SPACE; ${EXPORT_ENVIRONMENT_VARIABLES} chmod +x ./${DEMO_NAME}; ./${DEMO_NAME} ./models/${MODEL_NAME} ./images/$IMAGE_NAME"
.DS_Store
.idea
.gradle
.cxx
cache
build
app/cache
app/libs/fastdeploy*
app/.cxx
app/build
app/src/main/assets/models/*
app/.gradle
app/.idea
fastdeploy/cache
fastdeploy/libs/fastdeploy*
fastdeploy/.cxx
fastdeploy/build
fastdeploy/src/main/assets/models/*
fastdeploy/.gradle
fastdeploy/.idea
../../../docs/zh_CN/fastdeploy/android/README.md
\ No newline at end of file
import java.security.MessageDigest
apply plugin: 'com.android.application'
android {
compileSdk 28
defaultConfig {
applicationId 'com.baidu.paddle.fastdeploy.app.examples'
minSdkVersion 15
//noinspection ExpiredTargetSdkVersion
targetSdkVersion 28
versionCode 1
versionName "1.0"
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
}
dependencies {
implementation fileTree(include: ['*.aar'], dir: 'libs')
implementation 'com.android.support:appcompat-v7:28.0.0'
//noinspection GradleDependency
implementation 'com.android.support.constraint:constraint-layout:1.1.3'
implementation 'com.android.support:design:28.0.0'
implementation 'org.jetbrains:annotations:15.0'
//noinspection GradleDependency
testImplementation 'junit:junit:4.12'
androidTestImplementation 'com.android.support.test:runner:1.0.2'
androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.2'
}
def FD_MODEL = [
[
'src' : 'https://bj.bcebos.com/paddlehub/fastdeploy/MobileNetV1_x0_25_infer.tgz',
'dest': 'src/main/assets/models'
]
]
def FD_JAVA_SDK = [
[
'src' : 'https://bj.bcebos.com/fastdeploy/test/fastdeploy-android-sdk-latest-dev.aar',
'dest': 'libs'
]
]
task downloadAndExtractModels(type: DefaultTask) {
doFirst {
println "Downloading and extracting fastdeploy models ..."
}
doLast {
String cachePath = "cache"
if (!file("${cachePath}").exists()) {
mkdir "${cachePath}"
}
FD_MODEL.eachWithIndex { model, index ->
MessageDigest messageDigest = MessageDigest.getInstance('MD5')
messageDigest.update(model.src.bytes)
String[] modelPaths = model.src.split("/")
String modelName = modelPaths[modelPaths.length - 1]
// Download the target model if not exists
boolean copyFiles = !file("${model.dest}").exists()
if (!file("${cachePath}/${modelName}").exists()) {
println "Downloading ${model.src} -> ${cachePath}/${modelName}"
ant.get(src: model.src, dest: file("${cachePath}/${modelName}"))
copyFiles = true
}
if (copyFiles) {
println "Coping ${cachePath}/${modelName} -> ${model.dest}"
copy {
from tarTree("${cachePath}/${modelName}")
into "${model.dest}"
}
}
}
}
}
task downloadAndExtractSDKs(type: DefaultTask) {
doFirst {
println "Downloading and extracting fastdeploy android java sdk ..."
}
doLast {
String cachePath = "cache"
if (!file("${cachePath}").exists()) {
mkdir "${cachePath}"
}
FD_JAVA_SDK.eachWithIndex { sdk, index ->
String[] sdkPaths = sdk.src.split("/")
String sdkName = sdkPaths[sdkPaths.length - 1]
// Download the target SDK if not exists
boolean copyFiles = !file("${sdk.dest}/${sdkName}").exists()
if (!file("${cachePath}/${sdkName}").exists()) {
println "Downloading ${sdk.src} -> ${cachePath}/${sdkName}"
ant.get(src: sdk.src, dest: file("${cachePath}/${sdkName}"))
copyFiles = true
}
if (copyFiles) {
println "Coping ${cachePath}/${sdkName} -> ${sdk.dest}/${sdkName}"
copy {
from "${cachePath}/${sdkName}"
into "${sdk.dest}"
}
}
}
}
}
preBuild.dependsOn downloadAndExtractSDKs
preBuild.dependsOn downloadAndExtractModels
\ No newline at end of file
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile
\ No newline at end of file
package com.baidu.paddle.fastdeploy;
import android.content.Context;
import androidx.test.platform.app.InstrumentationRegistry;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.junit.Assert.*;
/**
* Instrumented test, which will execute on an Android device.
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
@RunWith(AndroidJUnit4.class)
public class ExampleInstrumentedTest {
@Test
public void useAppContext() {
// Context of the app under test.
Context appContext = InstrumentationRegistry.getInstrumentation().getTargetContext();
assertEquals("com.baidu.paddle.fastdeploy", appContext.getPackageName());
}
}
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.baidu.paddle.fastdeploy.app.examples">
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.CAMERA"/>
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus" />
<application
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/AppTheme">
<activity android:name=".classification.ClassificationMainActivity">
<intent-filter>
<action android:name="android.intent.action.MAIN"/>
<category android:name="android.intent.category.LAUNCHER"/>
</intent-filter>
</activity>
<activity
android:name=".classification.ClassificationSettingsActivity"
android:label="Settings">
</activity>
</application>
</manifest>
\ No newline at end of file
0 tench, Tinca tinca
1 goldfish, Carassius auratus
2 great white shark, white shark, man-eater, man-eating shark, Carcharodon carcharias
3 tiger shark, Galeocerdo cuvieri
4 hammerhead, hammerhead shark
5 electric ray, crampfish, numbfish, torpedo
6 stingray
7 cock
8 hen
9 ostrich, Struthio camelus
10 brambling, Fringilla montifringilla
11 goldfinch, Carduelis carduelis
12 house finch, linnet, Carpodacus mexicanus
13 junco, snowbird
14 indigo bunting, indigo finch, indigo bird, Passerina cyanea
15 robin, American robin, Turdus migratorius
16 bulbul
17 jay
18 magpie
19 chickadee
20 water ouzel, dipper
21 kite
22 bald eagle, American eagle, Haliaeetus leucocephalus
23 vulture
24 great grey owl, great gray owl, Strix nebulosa
25 European fire salamander, Salamandra salamandra
26 common newt, Triturus vulgaris
27 eft
28 spotted salamander, Ambystoma maculatum
29 axolotl, mud puppy, Ambystoma mexicanum
30 bullfrog, Rana catesbeiana
31 tree frog, tree-frog
32 tailed frog, bell toad, ribbed toad, tailed toad, Ascaphus trui
33 loggerhead, loggerhead turtle, Caretta caretta
34 leatherback turtle, leatherback, leathery turtle, Dermochelys coriacea
35 mud turtle
36 terrapin
37 box turtle, box tortoise
38 banded gecko
39 common iguana, iguana, Iguana iguana
40 American chameleon, anole, Anolis carolinensis
41 whiptail, whiptail lizard
42 agama
43 frilled lizard, Chlamydosaurus kingi
44 alligator lizard
45 Gila monster, Heloderma suspectum
46 green lizard, Lacerta viridis
47 African chameleon, Chamaeleo chamaeleon
48 Komodo dragon, Komodo lizard, dragon lizard, giant lizard, Varanus komodoensis
49 African crocodile, Nile crocodile, Crocodylus niloticus
50 American alligator, Alligator mississipiensis
51 triceratops
52 thunder snake, worm snake, Carphophis amoenus
53 ringneck snake, ring-necked snake, ring snake
54 hognose snake, puff adder, sand viper
55 green snake, grass snake
56 king snake, kingsnake
57 garter snake, grass snake
58 water snake
59 vine snake
60 night snake, Hypsiglena torquata
61 boa constrictor, Constrictor constrictor
62 rock python, rock snake, Python sebae
63 Indian cobra, Naja naja
64 green mamba
65 sea snake
66 horned viper, cerastes, sand viper, horned asp, Cerastes cornutus
67 diamondback, diamondback rattlesnake, Crotalus adamanteus
68 sidewinder, horned rattlesnake, Crotalus cerastes
69 trilobite
70 harvestman, daddy longlegs, Phalangium opilio
71 scorpion
72 black and gold garden spider, Argiope aurantia
73 barn spider, Araneus cavaticus
74 garden spider, Aranea diademata
75 black widow, Latrodectus mactans
76 tarantula
77 wolf spider, hunting spider
78 tick
79 centipede
80 black grouse
81 ptarmigan
82 ruffed grouse, partridge, Bonasa umbellus
83 prairie chicken, prairie grouse, prairie fowl
84 peacock
85 quail
86 partridge
87 African grey, African gray, Psittacus erithacus
88 macaw
89 sulphur-crested cockatoo, Kakatoe galerita, Cacatua galerita
90 lorikeet
91 coucal
92 bee eater
93 hornbill
94 hummingbird
95 jacamar
96 toucan
97 drake
98 red-breasted merganser, Mergus serrator
99 goose
100 black swan, Cygnus atratus
101 tusker
102 echidna, spiny anteater, anteater
103 platypus, duckbill, duckbilled platypus, duck-billed platypus, Ornithorhynchus anatinus
104 wallaby, brush kangaroo
105 koala, koala bear, kangaroo bear, native bear, Phascolarctos cinereus
106 wombat
107 jellyfish
108 sea anemone, anemone
109 brain coral
110 flatworm, platyhelminth
111 nematode, nematode worm, roundworm
112 conch
113 snail
114 slug
115 sea slug, nudibranch
116 chiton, coat-of-mail shell, sea cradle, polyplacophore
117 chambered nautilus, pearly nautilus, nautilus
118 Dungeness crab, Cancer magister
119 rock crab, Cancer irroratus
120 fiddler crab
121 king crab, Alaska crab, Alaskan king crab, Alaska king crab, Paralithodes camtschatica
122 American lobster, Northern lobster, Maine lobster, Homarus americanus
123 spiny lobster, langouste, rock lobster, crawfish, crayfish, sea crawfish
124 crayfish, crawfish, crawdad, crawdaddy
125 hermit crab
126 isopod
127 white stork, Ciconia ciconia
128 black stork, Ciconia nigra
129 spoonbill
130 flamingo
131 little blue heron, Egretta caerulea
132 American egret, great white heron, Egretta albus
133 bittern
134 crane
135 limpkin, Aramus pictus
136 European gallinule, Porphyrio porphyrio
137 American coot, marsh hen, mud hen, water hen, Fulica americana
138 bustard
139 ruddy turnstone, Arenaria interpres
140 red-backed sandpiper, dunlin, Erolia alpina
141 redshank, Tringa totanus
142 dowitcher
143 oystercatcher, oyster catcher
144 pelican
145 king penguin, Aptenodytes patagonica
146 albatross, mollymawk
147 grey whale, gray whale, devilfish, Eschrichtius gibbosus, Eschrichtius robustus
148 killer whale, killer, orca, grampus, sea wolf, Orcinus orca
149 dugong, Dugong dugon
150 sea lion
151 Chihuahua
152 Japanese spaniel
153 Maltese dog, Maltese terrier, Maltese
154 Pekinese, Pekingese, Peke
155 Shih-Tzu
156 Blenheim spaniel
157 papillon
158 toy terrier
159 Rhodesian ridgeback
160 Afghan hound, Afghan
161 basset, basset hound
162 beagle
163 bloodhound, sleuthhound
164 bluetick
165 black-and-tan coonhound
166 Walker hound, Walker foxhound
167 English foxhound
168 redbone
169 borzoi, Russian wolfhound
170 Irish wolfhound
171 Italian greyhound
172 whippet
173 Ibizan hound, Ibizan Podenco
174 Norwegian elkhound, elkhound
175 otterhound, otter hound
176 Saluki, gazelle hound
177 Scottish deerhound, deerhound
178 Weimaraner
179 Staffordshire bullterrier, Staffordshire bull terrier
180 American Staffordshire terrier, Staffordshire terrier, American pit bull terrier, pit bull terrier
181 Bedlington terrier
182 Border terrier
183 Kerry blue terrier
184 Irish terrier
185 Norfolk terrier
186 Norwich terrier
187 Yorkshire terrier
188 wire-haired fox terrier
189 Lakeland terrier
190 Sealyham terrier, Sealyham
191 Airedale, Airedale terrier
192 cairn, cairn terrier
193 Australian terrier
194 Dandie Dinmont, Dandie Dinmont terrier
195 Boston bull, Boston terrier
196 miniature schnauzer
197 giant schnauzer
198 standard schnauzer
199 Scotch terrier, Scottish terrier, Scottie
200 Tibetan terrier, chrysanthemum dog
201 silky terrier, Sydney silky
202 soft-coated wheaten terrier
203 West Highland white terrier
204 Lhasa, Lhasa apso
205 flat-coated retriever
206 curly-coated retriever
207 golden retriever
208 Labrador retriever
209 Chesapeake Bay retriever
210 German short-haired pointer
211 vizsla, Hungarian pointer
212 English setter
213 Irish setter, red setter
214 Gordon setter
215 Brittany spaniel
216 clumber, clumber spaniel
217 English springer, English springer spaniel
218 Welsh springer spaniel
219 cocker spaniel, English cocker spaniel, cocker
220 Sussex spaniel
221 Irish water spaniel
222 kuvasz
223 schipperke
224 groenendael
225 malinois
226 briard
227 kelpie
228 komondor
229 Old English sheepdog, bobtail
230 Shetland sheepdog, Shetland sheep dog, Shetland
231 collie
232 Border collie
233 Bouvier des Flandres, Bouviers des Flandres
234 Rottweiler
235 German shepherd, German shepherd dog, German police dog, alsatian
236 Doberman, Doberman pinscher
237 miniature pinscher
238 Greater Swiss Mountain dog
239 Bernese mountain dog
240 Appenzeller
241 EntleBucher
242 boxer
243 bull mastiff
244 Tibetan mastiff
245 French bulldog
246 Great Dane
247 Saint Bernard, St Bernard
248 Eskimo dog, husky
249 malamute, malemute, Alaskan malamute
250 Siberian husky
251 dalmatian, coach dog, carriage dog
252 affenpinscher, monkey pinscher, monkey dog
253 basenji
254 pug, pug-dog
255 Leonberg
256 Newfoundland, Newfoundland dog
257 Great Pyrenees
258 Samoyed, Samoyede
259 Pomeranian
260 chow, chow chow
261 keeshond
262 Brabancon griffon
263 Pembroke, Pembroke Welsh corgi
264 Cardigan, Cardigan Welsh corgi
265 toy poodle
266 miniature poodle
267 standard poodle
268 Mexican hairless
269 timber wolf, grey wolf, gray wolf, Canis lupus
270 white wolf, Arctic wolf, Canis lupus tundrarum
271 red wolf, maned wolf, Canis rufus, Canis niger
272 coyote, prairie wolf, brush wolf, Canis latrans
273 dingo, warrigal, warragal, Canis dingo
274 dhole, Cuon alpinus
275 African hunting dog, hyena dog, Cape hunting dog, Lycaon pictus
276 hyena, hyaena
277 red fox, Vulpes vulpes
278 kit fox, Vulpes macrotis
279 Arctic fox, white fox, Alopex lagopus
280 grey fox, gray fox, Urocyon cinereoargenteus
281 tabby, tabby cat
282 tiger cat
283 Persian cat
284 Siamese cat, Siamese
285 Egyptian cat
286 cougar, puma, catamount, mountain lion, painter, panther, Felis concolor
287 lynx, catamount
288 leopard, Panthera pardus
289 snow leopard, ounce, Panthera uncia
290 jaguar, panther, Panthera onca, Felis onca
291 lion, king of beasts, Panthera leo
292 tiger, Panthera tigris
293 cheetah, chetah, Acinonyx jubatus
294 brown bear, bruin, Ursus arctos
295 American black bear, black bear, Ursus americanus, Euarctos americanus
296 ice bear, polar bear, Ursus Maritimus, Thalarctos maritimus
297 sloth bear, Melursus ursinus, Ursus ursinus
298 mongoose
299 meerkat, mierkat
300 tiger beetle
301 ladybug, ladybeetle, lady beetle, ladybird, ladybird beetle
302 ground beetle, carabid beetle
303 long-horned beetle, longicorn, longicorn beetle
304 leaf beetle, chrysomelid
305 dung beetle
306 rhinoceros beetle
307 weevil
308 fly
309 bee
310 ant, emmet, pismire
311 grasshopper, hopper
312 cricket
313 walking stick, walkingstick, stick insect
314 cockroach, roach
315 mantis, mantid
316 cicada, cicala
317 leafhopper
318 lacewing, lacewing fly
319 dragonfly, darning needle, devil's darning needle, sewing needle, snake feeder, snake doctor, mosquito hawk, skeeter hawk
320 damselfly
321 admiral
322 ringlet, ringlet butterfly
323 monarch, monarch butterfly, milkweed butterfly, Danaus plexippus
324 cabbage butterfly
325 sulphur butterfly, sulfur butterfly
326 lycaenid, lycaenid butterfly
327 starfish, sea star
328 sea urchin
329 sea cucumber, holothurian
330 wood rabbit, cottontail, cottontail rabbit
331 hare
332 Angora, Angora rabbit
333 hamster
334 porcupine, hedgehog
335 fox squirrel, eastern fox squirrel, Sciurus niger
336 marmot
337 beaver
338 guinea pig, Cavia cobaya
339 sorrel
340 zebra
341 hog, pig, grunter, squealer, Sus scrofa
342 wild boar, boar, Sus scrofa
343 warthog
344 hippopotamus, hippo, river horse, Hippopotamus amphibius
345 ox
346 water buffalo, water ox, Asiatic buffalo, Bubalus bubalis
347 bison
348 ram, tup
349 bighorn, bighorn sheep, cimarron, Rocky Mountain bighorn, Rocky Mountain sheep, Ovis canadensis
350 ibex, Capra ibex
351 hartebeest
352 impala, Aepyceros melampus
353 gazelle
354 Arabian camel, dromedary, Camelus dromedarius
355 llama
356 weasel
357 mink
358 polecat, fitch, foulmart, foumart, Mustela putorius
359 black-footed ferret, ferret, Mustela nigripes
360 otter
361 skunk, polecat, wood pussy
362 badger
363 armadillo
364 three-toed sloth, ai, Bradypus tridactylus
365 orangutan, orang, orangutang, Pongo pygmaeus
366 gorilla, Gorilla gorilla
367 chimpanzee, chimp, Pan troglodytes
368 gibbon, Hylobates lar
369 siamang, Hylobates syndactylus, Symphalangus syndactylus
370 guenon, guenon monkey
371 patas, hussar monkey, Erythrocebus patas
372 baboon
373 macaque
374 langur
375 colobus, colobus monkey
376 proboscis monkey, Nasalis larvatus
377 marmoset
378 capuchin, ringtail, Cebus capucinus
379 howler monkey, howler
380 titi, titi monkey
381 spider monkey, Ateles geoffroyi
382 squirrel monkey, Saimiri sciureus
383 Madagascar cat, ring-tailed lemur, Lemur catta
384 indri, indris, Indri indri, Indri brevicaudatus
385 Indian elephant, Elephas maximus
386 African elephant, Loxodonta africana
387 lesser panda, red panda, panda, bear cat, cat bear, Ailurus fulgens
388 giant panda, panda, panda bear, coon bear, Ailuropoda melanoleuca
389 barracouta, snoek
390 eel
391 coho, cohoe, coho salmon, blue jack, silver salmon, Oncorhynchus kisutch
392 rock beauty, Holocanthus tricolor
393 anemone fish
394 sturgeon
395 gar, garfish, garpike, billfish, Lepisosteus osseus
396 lionfish
397 puffer, pufferfish, blowfish, globefish
398 abacus
399 abaya
400 academic gown, academic robe, judge's robe
401 accordion, piano accordion, squeeze box
402 acoustic guitar
403 aircraft carrier, carrier, flattop, attack aircraft carrier
404 airliner
405 airship, dirigible
406 altar
407 ambulance
408 amphibian, amphibious vehicle
409 analog clock
410 apiary, bee house
411 apron
412 ashcan, trash can, garbage can, wastebin, ash bin, ash-bin, ashbin, dustbin, trash barrel, trash bin
413 assault rifle, assault gun
414 backpack, back pack, knapsack, packsack, rucksack, haversack
415 bakery, bakeshop, bakehouse
416 balance beam, beam
417 balloon
418 ballpoint, ballpoint pen, ballpen, Biro
419 Band Aid
420 banjo
421 bannister, banister, balustrade, balusters, handrail
422 barbell
423 barber chair
424 barbershop
425 barn
426 barometer
427 barrel, cask
428 barrow, garden cart, lawn cart, wheelbarrow
429 baseball
430 basketball
431 bassinet
432 bassoon
433 bathing cap, swimming cap
434 bath towel
435 bathtub, bathing tub, bath, tub
436 beach wagon, station wagon, wagon, estate car, beach waggon, station waggon, waggon
437 beacon, lighthouse, beacon light, pharos
438 beaker
439 bearskin, busby, shako
440 beer bottle
441 beer glass
442 bell cote, bell cot
443 bib
444 bicycle-built-for-two, tandem bicycle, tandem
445 bikini, two-piece
446 binder, ring-binder
447 binoculars, field glasses, opera glasses
448 birdhouse
449 boathouse
450 bobsled, bobsleigh, bob
451 bolo tie, bolo, bola tie, bola
452 bonnet, poke bonnet
453 bookcase
454 bookshop, bookstore, bookstall
455 bottlecap
456 bow
457 bow tie, bow-tie, bowtie
458 brass, memorial tablet, plaque
459 brassiere, bra, bandeau
460 breakwater, groin, groyne, mole, bulwark, seawall, jetty
461 breastplate, aegis, egis
462 broom
463 bucket, pail
464 buckle
465 bulletproof vest
466 bullet train, bullet
467 butcher shop, meat market
468 cab, hack, taxi, taxicab
469 caldron, cauldron
470 candle, taper, wax light
471 cannon
472 canoe
473 can opener, tin opener
474 cardigan
475 car mirror
476 carousel, carrousel, merry-go-round, roundabout, whirligig
477 carpenter's kit, tool kit
478 carton
479 car wheel
480 cash machine, cash dispenser, automated teller machine, automatic teller machine, automated teller, automatic teller, ATM
481 cassette
482 cassette player
483 castle
484 catamaran
485 CD player
486 cello, violoncello
487 cellular telephone, cellular phone, cellphone, cell, mobile phone
488 chain
489 chainlink fence
490 chain mail, ring mail, mail, chain armor, chain armour, ring armor, ring armour
491 chain saw, chainsaw
492 chest
493 chiffonier, commode
494 chime, bell, gong
495 china cabinet, china closet
496 Christmas stocking
497 church, church building
498 cinema, movie theater, movie theatre, movie house, picture palace
499 cleaver, meat cleaver, chopper
500 cliff dwelling
501 cloak
502 clog, geta, patten, sabot
503 cocktail shaker
504 coffee mug
505 coffeepot
506 coil, spiral, volute, whorl, helix
507 combination lock
508 computer keyboard, keypad
509 confectionery, confectionary, candy store
510 container ship, containership, container vessel
511 convertible
512 corkscrew, bottle screw
513 cornet, horn, trumpet, trump
514 cowboy boot
515 cowboy hat, ten-gallon hat
516 cradle
517 crane
518 crash helmet
519 crate
520 crib, cot
521 Crock Pot
522 croquet ball
523 crutch
524 cuirass
525 dam, dike, dyke
526 desk
527 desktop computer
528 dial telephone, dial phone
529 diaper, nappy, napkin
530 digital clock
531 digital watch
532 dining table, board
533 dishrag, dishcloth
534 dishwasher, dish washer, dishwashing machine
535 disk brake, disc brake
536 dock, dockage, docking facility
537 dogsled, dog sled, dog sleigh
538 dome
539 doormat, welcome mat
540 drilling platform, offshore rig
541 drum, membranophone, tympan
542 drumstick
543 dumbbell
544 Dutch oven
545 electric fan, blower
546 electric guitar
547 electric locomotive
548 entertainment center
549 envelope
550 espresso maker
551 face powder
552 feather boa, boa
553 file, file cabinet, filing cabinet
554 fireboat
555 fire engine, fire truck
556 fire screen, fireguard
557 flagpole, flagstaff
558 flute, transverse flute
559 folding chair
560 football helmet
561 forklift
562 fountain
563 fountain pen
564 four-poster
565 freight car
566 French horn, horn
567 frying pan, frypan, skillet
568 fur coat
569 garbage truck, dustcart
570 gasmask, respirator, gas helmet
571 gas pump, gasoline pump, petrol pump, island dispenser
572 goblet
573 go-kart
574 golf ball
575 golfcart, golf cart
576 gondola
577 gong, tam-tam
578 gown
579 grand piano, grand
580 greenhouse, nursery, glasshouse
581 grille, radiator grille
582 grocery store, grocery, food market, market
583 guillotine
584 hair slide
585 hair spray
586 half track
587 hammer
588 hamper
589 hand blower, blow dryer, blow drier, hair dryer, hair drier
590 hand-held computer, hand-held microcomputer
591 handkerchief, hankie, hanky, hankey
592 hard disc, hard disk, fixed disk
593 harmonica, mouth organ, harp, mouth harp
594 harp
595 harvester, reaper
596 hatchet
597 holster
598 home theater, home theatre
599 honeycomb
600 hook, claw
601 hoopskirt, crinoline
602 horizontal bar, high bar
603 horse cart, horse-cart
604 hourglass
605 iPod
606 iron, smoothing iron
607 jack-o'-lantern
608 jean, blue jean, denim
609 jeep, landrover
610 jersey, T-shirt, tee shirt
611 jigsaw puzzle
612 jinrikisha, ricksha, rickshaw
613 joystick
614 kimono
615 knee pad
616 knot
617 lab coat, laboratory coat
618 ladle
619 lampshade, lamp shade
620 laptop, laptop computer
621 lawn mower, mower
622 lens cap, lens cover
623 letter opener, paper knife, paperknife
624 library
625 lifeboat
626 lighter, light, igniter, ignitor
627 limousine, limo
628 liner, ocean liner
629 lipstick, lip rouge
630 Loafer
631 lotion
632 loudspeaker, speaker, speaker unit, loudspeaker system, speaker system
633 loupe, jeweler's loupe
634 lumbermill, sawmill
635 magnetic compass
636 mailbag, postbag
637 mailbox, letter box
638 maillot
639 maillot, tank suit
640 manhole cover
641 maraca
642 marimba, xylophone
643 mask
644 matchstick
645 maypole
646 maze, labyrinth
647 measuring cup
648 medicine chest, medicine cabinet
649 megalith, megalithic structure
650 microphone, mike
651 microwave, microwave oven
652 military uniform
653 milk can
654 minibus
655 miniskirt, mini
656 minivan
657 missile
658 mitten
659 mixing bowl
660 mobile home, manufactured home
661 Model T
662 modem
663 monastery
664 monitor
665 moped
666 mortar
667 mortarboard
668 mosque
669 mosquito net
670 motor scooter, scooter
671 mountain bike, all-terrain bike, off-roader
672 mountain tent
673 mouse, computer mouse
674 mousetrap
675 moving van
676 muzzle
677 nail
678 neck brace
679 necklace
680 nipple
681 notebook, notebook computer
682 obelisk
683 oboe, hautboy, hautbois
684 ocarina, sweet potato
685 odometer, hodometer, mileometer, milometer
686 oil filter
687 organ, pipe organ
688 oscilloscope, scope, cathode-ray oscilloscope, CRO
689 overskirt
690 oxcart
691 oxygen mask
692 packet
693 paddle, boat paddle
694 paddlewheel, paddle wheel
695 padlock
696 paintbrush
697 pajama, pyjama, pj's, jammies
698 palace
699 panpipe, pandean pipe, syrinx
700 paper towel
701 parachute, chute
702 parallel bars, bars
703 park bench
704 parking meter
705 passenger car, coach, carriage
706 patio, terrace
707 pay-phone, pay-station
708 pedestal, plinth, footstall
709 pencil box, pencil case
710 pencil sharpener
711 perfume, essence
712 Petri dish
713 photocopier
714 pick, plectrum, plectron
715 pickelhaube
716 picket fence, paling
717 pickup, pickup truck
718 pier
719 piggy bank, penny bank
720 pill bottle
721 pillow
722 ping-pong ball
723 pinwheel
724 pirate, pirate ship
725 pitcher, ewer
726 plane, carpenter's plane, woodworking plane
727 planetarium
728 plastic bag
729 plate rack
730 plow, plough
731 plunger, plumber's helper
732 Polaroid camera, Polaroid Land camera
733 pole
734 police van, police wagon, paddy wagon, patrol wagon, wagon, black Maria
735 poncho
736 pool table, billiard table, snooker table
737 pop bottle, soda bottle
738 pot, flowerpot
739 potter's wheel
740 power drill
741 prayer rug, prayer mat
742 printer
743 prison, prison house
744 projectile, missile
745 projector
746 puck, hockey puck
747 punching bag, punch bag, punching ball, punchball
748 purse
749 quill, quill pen
750 quilt, comforter, comfort, puff
751 racer, race car, racing car
752 racket, racquet
753 radiator
754 radio, wireless
755 radio telescope, radio reflector
756 rain barrel
757 recreational vehicle, RV, R.V.
758 reel
759 reflex camera
760 refrigerator, icebox
761 remote control, remote
762 restaurant, eating house, eating place, eatery
763 revolver, six-gun, six-shooter
764 rifle
765 rocking chair, rocker
766 rotisserie
767 rubber eraser, rubber, pencil eraser
768 rugby ball
769 rule, ruler
770 running shoe
771 safe
772 safety pin
773 saltshaker, salt shaker
774 sandal
775 sarong
776 sax, saxophone
777 scabbard
778 scale, weighing machine
779 school bus
780 schooner
781 scoreboard
782 screen, CRT screen
783 screw
784 screwdriver
785 seat belt, seatbelt
786 sewing machine
787 shield, buckler
788 shoe shop, shoe-shop, shoe store
789 shoji
790 shopping basket
791 shopping cart
792 shovel
793 shower cap
794 shower curtain
795 ski
796 ski mask
797 sleeping bag
798 slide rule, slipstick
799 sliding door
800 slot, one-armed bandit
801 snorkel
802 snowmobile
803 snowplow, snowplough
804 soap dispenser
805 soccer ball
806 sock
807 solar dish, solar collector, solar furnace
808 sombrero
809 soup bowl
810 space bar
811 space heater
812 space shuttle
813 spatula
814 speedboat
815 spider web, spider's web
816 spindle
817 sports car, sport car
818 spotlight, spot
819 stage
820 steam locomotive
821 steel arch bridge
822 steel drum
823 stethoscope
824 stole
825 stone wall
826 stopwatch, stop watch
827 stove
828 strainer
829 streetcar, tram, tramcar, trolley, trolley car
830 stretcher
831 studio couch, day bed
832 stupa, tope
833 submarine, pigboat, sub, U-boat
834 suit, suit of clothes
835 sundial
836 sunglass
837 sunglasses, dark glasses, shades
838 sunscreen, sunblock, sun blocker
839 suspension bridge
840 swab, swob, mop
841 sweatshirt
842 swimming trunks, bathing trunks
843 swing
844 switch, electric switch, electrical switch
845 syringe
846 table lamp
847 tank, army tank, armored combat vehicle, armoured combat vehicle
848 tape player
849 teapot
850 teddy, teddy bear
851 television, television system
852 tennis ball
853 thatch, thatched roof
854 theater curtain, theatre curtain
855 thimble
856 thresher, thrasher, threshing machine
857 throne
858 tile roof
859 toaster
860 tobacco shop, tobacconist shop, tobacconist
861 toilet seat
862 torch
863 totem pole
864 tow truck, tow car, wrecker
865 toyshop
866 tractor
867 trailer truck, tractor trailer, trucking rig, rig, articulated lorry, semi
868 tray
869 trench coat
870 tricycle, trike, velocipede
871 trimaran
872 tripod
873 triumphal arch
874 trolleybus, trolley coach, trackless trolley
875 trombone
876 tub, vat
877 turnstile
878 typewriter keyboard
879 umbrella
880 unicycle, monocycle
881 upright, upright piano
882 vacuum, vacuum cleaner
883 vase
884 vault
885 velvet
886 vending machine
887 vestment
888 viaduct
889 violin, fiddle
890 volleyball
891 waffle iron
892 wall clock
893 wallet, billfold, notecase, pocketbook
894 wardrobe, closet, press
895 warplane, military plane
896 washbasin, handbasin, washbowl, lavabo, wash-hand basin
897 washer, automatic washer, washing machine
898 water bottle
899 water jug
900 water tower
901 whiskey jug
902 whistle
903 wig
904 window screen
905 window shade
906 Windsor tie
907 wine bottle
908 wing
909 wok
910 wooden spoon
911 wool, woolen, woollen
912 worm fence, snake fence, snake-rail fence, Virginia fence
913 wreck
914 yawl
915 yurt
916 web site, website, internet site, site
917 comic book
918 crossword puzzle, crossword
919 street sign
920 traffic light, traffic signal, stoplight
921 book jacket, dust cover, dust jacket, dust wrapper
922 menu
923 plate
924 guacamole
925 consomme
926 hot pot, hotpot
927 trifle
928 ice cream, icecream
929 ice lolly, lolly, lollipop, popsicle
930 French loaf
931 bagel, beigel
932 pretzel
933 cheeseburger
934 hotdog, hot dog, red hot
935 mashed potato
936 head cabbage
937 broccoli
938 cauliflower
939 zucchini, courgette
940 spaghetti squash
941 acorn squash
942 butternut squash
943 cucumber, cuke
944 artichoke, globe artichoke
945 bell pepper
946 cardoon
947 mushroom
948 Granny Smith
949 strawberry
950 orange
951 lemon
952 fig
953 pineapple, ananas
954 banana
955 jackfruit, jak, jack
956 custard apple
957 pomegranate
958 hay
959 carbonara
960 chocolate sauce, chocolate syrup
961 dough
962 meat loaf, meatloaf
963 pizza, pizza pie
964 potpie
965 burrito
966 red wine
967 espresso
968 cup
969 eggnog
970 alp
971 bubble
972 cliff, drop, drop-off
973 coral reef
974 geyser
975 lakeside, lakeshore
976 promontory, headland, head, foreland
977 sandbar, sand bar
978 seashore, coast, seacoast, sea-coast
979 valley, vale
980 volcano
981 ballplayer, baseball player
982 groom, bridegroom
983 scuba diver
984 rapeseed
985 daisy
986 yellow lady's slipper, yellow lady-slipper, Cypripedium calceolus, Cypripedium parviflorum
987 corn
988 acorn
989 hip, rose hip, rosehip
990 buckeye, horse chestnut, conker
991 coral fungus
992 agaric
993 gyromitra
994 stinkhorn, carrion fungus
995 earthstar
996 hen-of-the-woods, hen of the woods, Polyporus frondosus, Grifola frondosa
997 bolete
998 ear, spike, capitulum
999 toilet tissue, toilet paper, bathroom tissue
\ No newline at end of file
package com.baidu.paddle.fastdeploy.app.examples.classification;
import static com.baidu.paddle.fastdeploy.app.ui.Utils.decodeBitmap;
import static com.baidu.paddle.fastdeploy.app.ui.Utils.getRealPathFromURI;
import static com.baidu.paddle.fastdeploy.app.ui.Utils.readTxt;
import android.Manifest;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.net.Uri;
import android.os.Bundle;
import android.os.SystemClock;
import android.preference.PreferenceManager;
import android.support.annotation.NonNull;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.util.Log;
import android.view.View;
import android.view.ViewGroup;
import android.view.Window;
import android.view.WindowManager;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.SeekBar;
import android.widget.TextView;
import com.baidu.paddle.fastdeploy.RuntimeOption;
import com.baidu.paddle.fastdeploy.app.examples.R;
import com.baidu.paddle.fastdeploy.app.ui.view.CameraSurfaceView;
import com.baidu.paddle.fastdeploy.app.ui.view.ResultListView;
import com.baidu.paddle.fastdeploy.app.ui.Utils;
import com.baidu.paddle.fastdeploy.app.ui.view.adapter.BaseResultAdapter;
import com.baidu.paddle.fastdeploy.app.ui.view.model.BaseResultModel;
import com.baidu.paddle.fastdeploy.vision.ClassifyResult;
import com.baidu.paddle.fastdeploy.vision.Visualize;
import com.baidu.paddle.fastdeploy.vision.classification.PaddleClasModel;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.List;
public class ClassificationMainActivity extends Activity implements View.OnClickListener, CameraSurfaceView.OnTextureChangedListener {
private static final String TAG = ClassificationMainActivity.class.getSimpleName();
CameraSurfaceView svPreview;
TextView tvStatus;
ImageButton btnSwitch;
ImageButton btnShutter;
ImageButton btnSettings;
ImageView realtimeToggleButton;
boolean isRealtimeStatusRunning = false;
ImageView backInPreview;
private ImageView albumSelectButton;
private View cameraPageView;
private ViewGroup resultPageView;
private ImageView resultImage;
private ImageView backInResult;
private SeekBar confidenceSeekbar;
private TextView seekbarText;
private float resultNum = 1.0f;
private ResultListView resultView;
private Bitmap picBitmap;
private Bitmap shutterBitmap;
private Bitmap originPicBitmap;
private Bitmap originShutterBitmap;
private boolean isShutterBitmapCopied = false;
public static final int TYPE_UNKNOWN = -1;
public static final int BTN_SHUTTER = 0;
public static final int ALBUM_SELECT = 1;
public static final int REALTIME_DETECT = 2;
private static int TYPE = REALTIME_DETECT;
private static final int REQUEST_PERMISSION_CODE_STORAGE = 101;
private static final int INTENT_CODE_PICK_IMAGE = 100;
private static final int TIME_SLEEP_INTERVAL = 50; // ms
long timeElapsed = 0;
long frameCounter = 0;
// Call 'init' and 'release' manually later
PaddleClasModel predictor = new PaddleClasModel();
private float[] scores;
private int[] labelId;
private boolean initialized;
private List<String> labelText;
private List<BaseResultModel> results = new ArrayList<>();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Fullscreen
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.classification_activity_main);
// Clear all setting items to avoid app crashing due to the incorrect settings
initSettings();
// Check and request CAMERA and WRITE_EXTERNAL_STORAGE permissions
if (!checkAllPermissions()) {
requestAllPermissions();
}
// Init the camera preview and UI components
initView();
}
@SuppressLint("NonConstantResourceId")
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.btn_switch:
svPreview.switchCamera();
break;
case R.id.btn_shutter:
TYPE = BTN_SHUTTER;
shutterAndPauseCamera();
resultView.setAdapter(null);
break;
case R.id.btn_settings:
startActivity(new Intent(ClassificationMainActivity.this, ClassificationSettingsActivity.class));
break;
case R.id.realtime_toggle_btn:
toggleRealtimeStyle();
break;
case R.id.back_in_preview:
finish();
break;
case R.id.iv_select:
TYPE = ALBUM_SELECT;
// Judge whether authority has been granted.
if (ContextCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
// If this permission was requested before the application but the user refused the request, this method will return true.
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE}, REQUEST_PERMISSION_CODE_STORAGE);
} else {
Intent intent = new Intent(Intent.ACTION_PICK);
intent.setType("image/*");
startActivityForResult(intent, INTENT_CODE_PICK_IMAGE);
}
resultView.setAdapter(null);
break;
case R.id.back_in_result:
back();
break;
}
}
@Override
public void onBackPressed() {
super.onBackPressed();
back();
}
private void back() {
resultPageView.setVisibility(View.GONE);
cameraPageView.setVisibility(View.VISIBLE);
TYPE = REALTIME_DETECT;
isShutterBitmapCopied = false;
svPreview.onResume();
results.clear();
if (scores != null) {
scores = null;
}
if (labelId != null) {
labelId = null;
}
}
private void shutterAndPauseCamera() {
new Thread(new Runnable() {
@Override
public void run() {
try {
// Sleep some times to ensure picture has been correctly shut.
Thread.sleep(TIME_SLEEP_INTERVAL * 10); // 500ms
} catch (InterruptedException e) {
e.printStackTrace();
}
runOnUiThread(new Runnable() {
@SuppressLint("SetTextI18n")
public void run() {
// These code will run in main thread.
svPreview.onPause();
cameraPageView.setVisibility(View.GONE);
resultPageView.setVisibility(View.VISIBLE);
seekbarText.setText(resultNum + "");
confidenceSeekbar.setProgress((int) (resultNum * 100));
if (shutterBitmap != null && !shutterBitmap.isRecycled()) {
resultImage.setImageBitmap(shutterBitmap);
} else {
new AlertDialog.Builder(ClassificationMainActivity.this)
.setTitle("Empty Result!")
.setMessage("Current picture is empty, please shutting it again!")
.setCancelable(true)
.show();
}
}
});
}
}).start();
}
private void copyBitmapFromCamera(Bitmap ARGB8888ImageBitmap) {
if (isShutterBitmapCopied || ARGB8888ImageBitmap == null) {
return;
}
if (!ARGB8888ImageBitmap.isRecycled()) {
synchronized (this) {
shutterBitmap = ARGB8888ImageBitmap.copy(Bitmap.Config.ARGB_8888, true);
originShutterBitmap = ARGB8888ImageBitmap.copy(Bitmap.Config.ARGB_8888, true);
}
SystemClock.sleep(TIME_SLEEP_INTERVAL);
isShutterBitmapCopied = true;
}
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == INTENT_CODE_PICK_IMAGE) {
if (resultCode == Activity.RESULT_OK) {
cameraPageView.setVisibility(View.GONE);
resultPageView.setVisibility(View.VISIBLE);
seekbarText.setText(resultNum + "");
confidenceSeekbar.setProgress((int) (resultNum * 100));
Uri uri = data.getData();
String path = getRealPathFromURI(this, uri);
picBitmap = decodeBitmap(path, 720, 1280);
originPicBitmap = picBitmap.copy(Bitmap.Config.ARGB_8888, true);
resultImage.setImageBitmap(picBitmap);
}
}
}
private void toggleRealtimeStyle() {
if (isRealtimeStatusRunning) {
isRealtimeStatusRunning = false;
realtimeToggleButton.setImageResource(R.drawable.realtime_stop_btn);
svPreview.setOnTextureChangedListener(this);
tvStatus.setVisibility(View.VISIBLE);
} else {
isRealtimeStatusRunning = true;
realtimeToggleButton.setImageResource(R.drawable.realtime_start_btn);
tvStatus.setVisibility(View.GONE);
isShutterBitmapCopied = false;
svPreview.setOnTextureChangedListener(new CameraSurfaceView.OnTextureChangedListener() {
@Override
public boolean onTextureChanged(Bitmap ARGB8888ImageBitmap) {
if (TYPE == BTN_SHUTTER) {
copyBitmapFromCamera(ARGB8888ImageBitmap);
}
return false;
}
});
}
}
@Override
public boolean onTextureChanged(Bitmap ARGB8888ImageBitmap) {
if (TYPE == BTN_SHUTTER) {
copyBitmapFromCamera(ARGB8888ImageBitmap);
return false;
}
boolean modified = false;
long tc = System.currentTimeMillis();
ClassifyResult result = predictor.predict(ARGB8888ImageBitmap,
true, ClassificationSettingsActivity.scoreThreshold);
timeElapsed += (System.currentTimeMillis() - tc);
modified = result.initialized();
frameCounter++;
if (frameCounter >= 30) {
final int fps = (int) (1000 / (timeElapsed / 30));
runOnUiThread(new Runnable() {
@SuppressLint("SetTextI18n")
public void run() {
tvStatus.setText(Integer.toString(fps) + "fps");
}
});
frameCounter = 0;
timeElapsed = 0;
}
return modified;
}
@Override
protected void onResume() {
super.onResume();
// Reload settings and re-initialize the predictor
checkAndUpdateSettings();
// Open camera until the permissions have been granted
if (!checkAllPermissions()) {
svPreview.disableCamera();
} else {
svPreview.enableCamera();
}
svPreview.onResume();
}
@Override
protected void onPause() {
super.onPause();
svPreview.onPause();
}
@Override
protected void onDestroy() {
if (predictor != null) {
predictor.release();
}
super.onDestroy();
}
public void initView() {
TYPE = REALTIME_DETECT;
svPreview = (CameraSurfaceView) findViewById(R.id.sv_preview);
svPreview.setOnTextureChangedListener(this);
tvStatus = (TextView) findViewById(R.id.tv_status);
btnSwitch = (ImageButton) findViewById(R.id.btn_switch);
btnSwitch.setOnClickListener(this);
btnShutter = (ImageButton) findViewById(R.id.btn_shutter);
btnShutter.setOnClickListener(this);
btnSettings = (ImageButton) findViewById(R.id.btn_settings);
btnSettings.setOnClickListener(this);
realtimeToggleButton = findViewById(R.id.realtime_toggle_btn);
realtimeToggleButton.setOnClickListener(this);
backInPreview = findViewById(R.id.back_in_preview);
backInPreview.setOnClickListener(this);
albumSelectButton = findViewById(R.id.iv_select);
albumSelectButton.setOnClickListener(this);
cameraPageView = findViewById(R.id.camera_page);
resultPageView = findViewById(R.id.result_page);
resultImage = findViewById(R.id.result_image);
backInResult = findViewById(R.id.back_in_result);
backInResult.setOnClickListener(this);
confidenceSeekbar = findViewById(R.id.confidence_seekbar);
seekbarText = findViewById(R.id.seekbar_text);
resultView = findViewById(R.id.result_list_view);
confidenceSeekbar.setMax(100);
confidenceSeekbar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
float resultConfidence = seekBar.getProgress() / 100f;
BigDecimal bd = new BigDecimal(resultConfidence);
resultNum = bd.setScale(1, BigDecimal.ROUND_HALF_UP).floatValue();
seekbarText.setText(resultNum + "");
confidenceSeekbar.setProgress((int) (resultNum * 100));
results.clear();
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
runOnUiThread(new Runnable() {
@Override
public void run() {
if (TYPE == ALBUM_SELECT) {
SystemClock.sleep(500);
detail(picBitmap);
picBitmap = originPicBitmap.copy(Bitmap.Config.ARGB_8888, true);
} else {
SystemClock.sleep(500);
detail(shutterBitmap);
shutterBitmap = originShutterBitmap.copy(Bitmap.Config.ARGB_8888, true);
}
}
});
}
});
}
private void detail(Bitmap bitmap) {
ClassifyResult result = predictor.predict(bitmap, true, resultNum);
scores = result.mScores;
labelId = result.mLabelIds;
initialized = result.initialized();
if (initialized) {
for (int i = 0; i < labelId.length; i++) {
if (scores[i] > resultNum) {
int idx = labelId[i];
String text = labelText.get(idx);
text = text.substring(text.indexOf(" "));
results.add(new BaseResultModel(idx, text, scores[i]));
}
}
}
BaseResultAdapter adapter = new BaseResultAdapter(getBaseContext(), R.layout.classification_result_page_item, results);
resultView.setAdapter(adapter);
resultView.invalidate();
resultImage.setImageBitmap(bitmap);
resultNum = 1.0f;
}
@SuppressLint("ApplySharedPref")
public void initSettings() {
SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this);
SharedPreferences.Editor editor = sharedPreferences.edit();
editor.clear();
editor.commit();
ClassificationSettingsActivity.resetSettings();
}
public void checkAndUpdateSettings() {
if (ClassificationSettingsActivity.checkAndUpdateSettings(this)) {
String realModelDir = getCacheDir() + "/" + ClassificationSettingsActivity.modelDir;
Utils.copyDirectoryFromAssets(this, ClassificationSettingsActivity.modelDir, realModelDir);
String realLabelPath = getCacheDir() + "/" + ClassificationSettingsActivity.labelPath;
Utils.copyFileFromAssets(this, ClassificationSettingsActivity.labelPath, realLabelPath);
String modelFile = realModelDir + "/" + "inference.pdmodel";
String paramsFile = realModelDir + "/" + "inference.pdiparams";
String configFile = realModelDir + "/" + "inference_cls.yaml";
String labelFile = realLabelPath;
labelText = readTxt(labelFile);
RuntimeOption option = new RuntimeOption();
option.setCpuThreadNum(ClassificationSettingsActivity.cpuThreadNum);
option.setLitePowerMode(ClassificationSettingsActivity.cpuPowerMode);
if (Boolean.parseBoolean(ClassificationSettingsActivity.enableLiteFp16)) {
option.enableLiteFp16();
}
predictor.init(modelFile, paramsFile, configFile, labelFile, option);
}
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions,
@NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (grantResults[0] != PackageManager.PERMISSION_GRANTED || grantResults[1] != PackageManager.PERMISSION_GRANTED) {
new AlertDialog.Builder(ClassificationMainActivity.this)
.setTitle("Permission denied")
.setMessage("Click to force quit the app, then open Settings->Apps & notifications->Target " +
"App->Permissions to grant all of the permissions.")
.setCancelable(false)
.setPositiveButton("Exit", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
ClassificationMainActivity.this.finish();
}
}).show();
}
}
private void requestAllPermissions() {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE,
Manifest.permission.CAMERA}, 0);
}
private boolean checkAllPermissions() {
return ContextCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE) == PackageManager.PERMISSION_GRANTED
&& ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED;
}
}
package com.baidu.paddle.fastdeploy.app.examples.classification;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.preference.EditTextPreference;
import android.preference.ListPreference;
import android.preference.PreferenceManager;
import android.support.v7.app.ActionBar;
import com.baidu.paddle.fastdeploy.app.examples.R;
import com.baidu.paddle.fastdeploy.app.ui.Utils;
import com.baidu.paddle.fastdeploy.app.ui.view.AppCompatPreferenceActivity;
import java.util.ArrayList;
import java.util.List;
public class ClassificationSettingsActivity extends AppCompatPreferenceActivity implements
SharedPreferences.OnSharedPreferenceChangeListener {
private static final String TAG = ClassificationSettingsActivity.class.getSimpleName();
static public int selectedModelIdx = -1;
static public String modelDir = "";
static public String labelPath = "";
static public int cpuThreadNum = 2;
static public String cpuPowerMode = "";
static public float scoreThreshold = 0.1f;
static public String enableLiteFp16 = "true";
ListPreference lpChoosePreInstalledModel = null;
EditTextPreference etModelDir = null;
EditTextPreference etLabelPath = null;
ListPreference lpCPUThreadNum = null;
ListPreference lpCPUPowerMode = null;
EditTextPreference etScoreThreshold = null;
ListPreference lpEnableLiteFp16 = null;
List<String> preInstalledModelDirs = null;
List<String> preInstalledLabelPaths = null;
List<String> preInstalledCPUThreadNums = null;
List<String> preInstalledCPUPowerModes = null;
List<String> preInstalledScoreThresholds = null;
List<String> preInstalledEnableLiteFp16s = null;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
addPreferencesFromResource(R.xml.classification_settings);
ActionBar supportActionBar = getSupportActionBar();
if (supportActionBar != null) {
supportActionBar.setDisplayHomeAsUpEnabled(true);
}
// Initialize pre-installed models
preInstalledModelDirs = new ArrayList<String>();
preInstalledLabelPaths = new ArrayList<String>();
preInstalledCPUThreadNums = new ArrayList<String>();
preInstalledCPUPowerModes = new ArrayList<String>();
preInstalledScoreThresholds = new ArrayList<String>();
preInstalledEnableLiteFp16s = new ArrayList<String>();
preInstalledModelDirs.add(getString(R.string.CLASSIFICATION_MODEL_DIR_DEFAULT));
preInstalledLabelPaths.add(getString(R.string.CLASSIFICATION_LABEL_PATH_DEFAULT));
preInstalledCPUThreadNums.add(getString(R.string.CPU_THREAD_NUM_DEFAULT));
preInstalledCPUPowerModes.add(getString(R.string.CPU_POWER_MODE_DEFAULT));
preInstalledScoreThresholds.add(getString(R.string.SCORE_THRESHOLD_CLASSIFICATION));
preInstalledEnableLiteFp16s.add(getString(R.string.ENABLE_LITE_FP16_MODE_DEFAULT));
// Setup UI components
lpChoosePreInstalledModel =
(ListPreference) findPreference(getString(R.string.CHOOSE_PRE_INSTALLED_MODEL_KEY));
String[] preInstalledModelNames = new String[preInstalledModelDirs.size()];
for (int i = 0; i < preInstalledModelDirs.size(); i++) {
preInstalledModelNames[i] = preInstalledModelDirs.get(i).substring(preInstalledModelDirs.get(i).lastIndexOf("/") + 1);
}
lpChoosePreInstalledModel.setEntries(preInstalledModelNames);
lpChoosePreInstalledModel.setEntryValues(preInstalledModelDirs.toArray(new String[preInstalledModelDirs.size()]));
lpCPUThreadNum = (ListPreference) findPreference(getString(R.string.CPU_THREAD_NUM_KEY));
lpCPUPowerMode = (ListPreference) findPreference(getString(R.string.CPU_POWER_MODE_KEY));
etModelDir = (EditTextPreference) findPreference(getString(R.string.MODEL_DIR_KEY));
etModelDir.setTitle("Model dir (SDCard: " + Utils.getSDCardDirectory() + ")");
etLabelPath = (EditTextPreference) findPreference(getString(R.string.LABEL_PATH_KEY));
etLabelPath.setTitle("Label path (SDCard: " + Utils.getSDCardDirectory() + ")");
etScoreThreshold = (EditTextPreference) findPreference(getString(R.string.SCORE_THRESHOLD_KEY));
lpEnableLiteFp16 = (ListPreference) findPreference(getString(R.string.ENABLE_LITE_FP16_MODE_KEY));
}
@SuppressLint("ApplySharedPref")
private void reloadSettingsAndUpdateUI() {
SharedPreferences sharedPreferences = getPreferenceScreen().getSharedPreferences();
String selected_model_dir = sharedPreferences.getString(getString(R.string.CHOOSE_PRE_INSTALLED_MODEL_KEY),
getString(R.string.CLASSIFICATION_MODEL_DIR_DEFAULT));
int selected_model_idx = lpChoosePreInstalledModel.findIndexOfValue(selected_model_dir);
if (selected_model_idx >= 0 && selected_model_idx < preInstalledModelDirs.size() && selected_model_idx != selectedModelIdx) {
SharedPreferences.Editor editor = sharedPreferences.edit();
editor.putString(getString(R.string.MODEL_DIR_KEY), preInstalledModelDirs.get(selected_model_idx));
editor.putString(getString(R.string.LABEL_PATH_KEY), preInstalledLabelPaths.get(selected_model_idx));
editor.putString(getString(R.string.CPU_THREAD_NUM_KEY), preInstalledCPUThreadNums.get(selected_model_idx));
editor.putString(getString(R.string.CPU_POWER_MODE_KEY), preInstalledCPUPowerModes.get(selected_model_idx));
editor.putString(getString(R.string.SCORE_THRESHOLD_KEY), preInstalledScoreThresholds.get(selected_model_idx));
editor.putString(getString(R.string.ENABLE_LITE_FP16_MODE_DEFAULT), preInstalledEnableLiteFp16s.get(selected_model_idx));
editor.commit();
lpChoosePreInstalledModel.setSummary(selected_model_dir);
selectedModelIdx = selected_model_idx;
}
String model_dir = sharedPreferences.getString(getString(R.string.MODEL_DIR_KEY),
getString(R.string.CLASSIFICATION_MODEL_DIR_DEFAULT));
String label_path = sharedPreferences.getString(getString(R.string.LABEL_PATH_KEY),
getString(R.string.CLASSIFICATION_LABEL_PATH_DEFAULT));
String cpu_thread_num = sharedPreferences.getString(getString(R.string.CPU_THREAD_NUM_KEY),
getString(R.string.CPU_THREAD_NUM_DEFAULT));
String cpu_power_mode = sharedPreferences.getString(getString(R.string.CPU_POWER_MODE_KEY),
getString(R.string.CPU_POWER_MODE_DEFAULT));
String score_threshold = sharedPreferences.getString(getString(R.string.SCORE_THRESHOLD_KEY),
getString(R.string.SCORE_THRESHOLD_CLASSIFICATION));
String enable_lite_fp16 = sharedPreferences.getString(getString(R.string.ENABLE_LITE_FP16_MODE_KEY),
getString(R.string.ENABLE_LITE_FP16_MODE_DEFAULT));
etModelDir.setSummary(model_dir);
etLabelPath.setSummary(label_path);
lpCPUThreadNum.setValue(cpu_thread_num);
lpCPUThreadNum.setSummary(cpu_thread_num);
lpCPUPowerMode.setValue(cpu_power_mode);
lpCPUPowerMode.setSummary(cpu_power_mode);
etScoreThreshold.setSummary(score_threshold);
etScoreThreshold.setText(score_threshold);
lpEnableLiteFp16.setValue(enable_lite_fp16);
lpEnableLiteFp16.setSummary(enable_lite_fp16);
}
static boolean checkAndUpdateSettings(Context ctx) {
boolean settingsChanged = false;
SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(ctx);
String model_dir = sharedPreferences.getString(ctx.getString(R.string.MODEL_DIR_KEY),
ctx.getString(R.string.CLASSIFICATION_MODEL_DIR_DEFAULT));
settingsChanged |= !modelDir.equalsIgnoreCase(model_dir);
modelDir = model_dir;
String label_path = sharedPreferences.getString(ctx.getString(R.string.LABEL_PATH_KEY),
ctx.getString(R.string.CLASSIFICATION_LABEL_PATH_DEFAULT));
settingsChanged |= !labelPath.equalsIgnoreCase(label_path);
labelPath = label_path;
String cpu_thread_num = sharedPreferences.getString(ctx.getString(R.string.CPU_THREAD_NUM_KEY),
ctx.getString(R.string.CPU_THREAD_NUM_DEFAULT));
settingsChanged |= cpuThreadNum != Integer.parseInt(cpu_thread_num);
cpuThreadNum = Integer.parseInt(cpu_thread_num);
String cpu_power_mode = sharedPreferences.getString(ctx.getString(R.string.CPU_POWER_MODE_KEY),
ctx.getString(R.string.CPU_POWER_MODE_DEFAULT));
settingsChanged |= !cpuPowerMode.equalsIgnoreCase(cpu_power_mode);
cpuPowerMode = cpu_power_mode;
String score_threshold = sharedPreferences.getString(ctx.getString(R.string.SCORE_THRESHOLD_KEY),
ctx.getString(R.string.SCORE_THRESHOLD_CLASSIFICATION));
settingsChanged |= scoreThreshold != Float.parseFloat(score_threshold);
scoreThreshold = Float.parseFloat(score_threshold);
String enable_lite_fp16 = sharedPreferences.getString(ctx.getString(R.string.ENABLE_LITE_FP16_MODE_KEY),
ctx.getString(R.string.ENABLE_LITE_FP16_MODE_DEFAULT));
settingsChanged |= !enableLiteFp16.equalsIgnoreCase(enable_lite_fp16);
enableLiteFp16 = enable_lite_fp16;
return settingsChanged;
}
static void resetSettings() {
selectedModelIdx = -1;
modelDir = "";
labelPath = "";
cpuThreadNum = 2;
cpuPowerMode = "";
scoreThreshold = 0.1f;
enableLiteFp16 = "true";
}
@Override
protected void onResume() {
super.onResume();
getPreferenceScreen().getSharedPreferences().registerOnSharedPreferenceChangeListener(this);
reloadSettingsAndUpdateUI();
}
@Override
protected void onPause() {
super.onPause();
getPreferenceScreen().getSharedPreferences().unregisterOnSharedPreferenceChangeListener(this);
}
@Override
public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) {
reloadSettingsAndUpdateUI();
}
}
package com.baidu.paddle.fastdeploy.app.ui;
import android.content.Context;
import android.content.res.Resources;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.hardware.Camera;
import android.net.Uri;
import android.opengl.GLES20;
import android.os.Environment;
import android.provider.MediaStore;
import android.util.Log;
import android.view.Surface;
import android.view.WindowManager;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.List;
public class Utils {
private static final String TAG = Utils.class.getSimpleName();
public static void RecursiveCreateDirectories(String fileDir) {
String[] fileDirs = fileDir.split("\\/");
String topPath = "";
for (int i = 0; i < fileDirs.length; i++) {
topPath += "/" + fileDirs[i];
File file = new File(topPath);
if (file.exists()) {
continue;
} else {
file.mkdir();
}
}
}
public static void copyFileFromAssets(Context appCtx, String srcPath, String dstPath) {
if (srcPath.isEmpty() || dstPath.isEmpty()) {
return;
}
String dstDir = dstPath.substring(0, dstPath.lastIndexOf('/'));
if (dstDir.length() > 0) {
RecursiveCreateDirectories(dstDir);
}
InputStream is = null;
OutputStream os = null;
try {
is = new BufferedInputStream(appCtx.getAssets().open(srcPath));
os = new BufferedOutputStream(new FileOutputStream(new File(dstPath)));
byte[] buffer = new byte[1024];
int length = 0;
while ((length = is.read(buffer)) != -1) {
os.write(buffer, 0, length);
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
os.close();
is.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
public static void copyDirectoryFromAssets(Context appCtx, String srcDir, String dstDir) {
if (srcDir.isEmpty() || dstDir.isEmpty()) {
return;
}
try {
if (!new File(dstDir).exists()) {
new File(dstDir).mkdirs();
}
for (String fileName : appCtx.getAssets().list(srcDir)) {
String srcSubPath = srcDir + File.separator + fileName;
String dstSubPath = dstDir + File.separator + fileName;
if (new File(srcSubPath).isDirectory()) {
copyDirectoryFromAssets(appCtx, srcSubPath, dstSubPath);
} else {
copyFileFromAssets(appCtx, srcSubPath, dstSubPath);
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
public static float[] parseFloatsFromString(String string, String delimiter) {
String[] pieces = string.trim().toLowerCase().split(delimiter);
float[] floats = new float[pieces.length];
for (int i = 0; i < pieces.length; i++) {
floats[i] = Float.parseFloat(pieces[i].trim());
}
return floats;
}
public static long[] parseLongsFromString(String string, String delimiter) {
String[] pieces = string.trim().toLowerCase().split(delimiter);
long[] longs = new long[pieces.length];
for (int i = 0; i < pieces.length; i++) {
longs[i] = Long.parseLong(pieces[i].trim());
}
return longs;
}
public static String getSDCardDirectory() {
return Environment.getExternalStorageDirectory().getAbsolutePath();
}
public static String getDCIMDirectory() {
return Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM).getAbsolutePath();
}
public static Camera.Size getOptimalPreviewSize(List<Camera.Size> sizes, int w, int h) {
final double ASPECT_TOLERANCE = 0.3;
double targetRatio = (double) w / h;
if (sizes == null) return null;
Camera.Size optimalSize = null;
double minDiff = Double.MAX_VALUE;
int targetHeight = h;
// Try to find an size match aspect ratio and size
for (Camera.Size size : sizes) {
double ratio = (double) size.width / size.height;
if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue;
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
// Cannot find the one match the aspect ratio, ignore the requirement
if (optimalSize == null) {
minDiff = Double.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
}
return optimalSize;
}
public static int getScreenWidth() {
return Resources.getSystem().getDisplayMetrics().widthPixels;
}
public static int getScreenHeight() {
return Resources.getSystem().getDisplayMetrics().heightPixels;
}
public static int getCameraDisplayOrientation(Context context, int cameraId) {
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(cameraId, info);
WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
int rotation = wm.getDefaultDisplay().getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90:
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
}
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else {
// back-facing
result = (info.orientation - degrees + 360) % 360;
}
return result;
}
public static int createShaderProgram(String vss, String fss) {
int vshader = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
GLES20.glShaderSource(vshader, vss);
GLES20.glCompileShader(vshader);
int[] status = new int[1];
GLES20.glGetShaderiv(vshader, GLES20.GL_COMPILE_STATUS, status, 0);
if (status[0] == 0) {
Log.e(TAG, GLES20.glGetShaderInfoLog(vshader));
GLES20.glDeleteShader(vshader);
vshader = 0;
return 0;
}
int fshader = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
GLES20.glShaderSource(fshader, fss);
GLES20.glCompileShader(fshader);
GLES20.glGetShaderiv(fshader, GLES20.GL_COMPILE_STATUS, status, 0);
if (status[0] == 0) {
Log.e(TAG, GLES20.glGetShaderInfoLog(fshader));
GLES20.glDeleteShader(vshader);
GLES20.glDeleteShader(fshader);
fshader = 0;
return 0;
}
int program = GLES20.glCreateProgram();
GLES20.glAttachShader(program, vshader);
GLES20.glAttachShader(program, fshader);
GLES20.glLinkProgram(program);
GLES20.glDeleteShader(vshader);
GLES20.glDeleteShader(fshader);
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, status, 0);
if (status[0] == 0) {
Log.e(TAG, GLES20.glGetProgramInfoLog(program));
program = 0;
return 0;
}
GLES20.glValidateProgram(program);
GLES20.glGetProgramiv(program, GLES20.GL_VALIDATE_STATUS, status, 0);
if (status[0] == 0) {
Log.e(TAG, GLES20.glGetProgramInfoLog(program));
GLES20.glDeleteProgram(program);
program = 0;
return 0;
}
return program;
}
public static boolean isSupportedNPU() {
String hardware = android.os.Build.HARDWARE;
return hardware.equalsIgnoreCase("kirin810") || hardware.equalsIgnoreCase("kirin990");
}
public static Bitmap decodeBitmap(String path, int displayWidth, int displayHeight) {
BitmapFactory.Options op = new BitmapFactory.Options();
op.inJustDecodeBounds = true;// Only the width and height information of Bitmap is read, not the pixels.
Bitmap bmp = BitmapFactory.decodeFile(path, op); // Get size information.
int wRatio = (int) Math.ceil(op.outWidth / (float) displayWidth);// Get Scale Size.
int hRatio = (int) Math.ceil(op.outHeight / (float) displayHeight);
// If the specified size is exceeded, reduce the corresponding scale.
if (wRatio > 1 && hRatio > 1) {
if (wRatio > hRatio) {
// If it is too wide, we will reduce the width to the required size. Note that the height will become smaller.
op.inSampleSize = wRatio;
} else {
op.inSampleSize = hRatio;
}
}
op.inJustDecodeBounds = false;
bmp = BitmapFactory.decodeFile(path, op);
// Create a Bitmap with a given width and height from the original Bitmap.
return Bitmap.createScaledBitmap(bmp, displayWidth, displayHeight, true);
}
public static String getRealPathFromURI(Context context, Uri contentURI) {
String result;
Cursor cursor = null;
try {
cursor = context.getContentResolver().query(contentURI, null, null, null, null);
} catch (Throwable e) {
e.printStackTrace();
}
if (cursor == null) {
result = contentURI.getPath();
} else {
cursor.moveToFirst();
int idx = cursor.getColumnIndex(MediaStore.Images.ImageColumns.DATA);
result = cursor.getString(idx);
cursor.close();
}
return result;
}
public static List<String> readTxt(String txtPath) {
File file = new File(txtPath);
if (file.isFile() && file.exists()) {
try {
FileInputStream fileInputStream = new FileInputStream(file);
InputStreamReader inputStreamReader = new InputStreamReader(fileInputStream);
BufferedReader bufferedReader = new BufferedReader(inputStreamReader);
String text;
List<String> labels = new ArrayList<>();
while ((text = bufferedReader.readLine()) != null) {
labels.add(text);
}
return labels;
} catch (Exception e) {
e.printStackTrace();
}
}
return null;
}
}
package com.baidu.paddle.fastdeploy.app.ui.layout;
import android.content.Context;
import android.graphics.Color;
import android.support.annotation.Nullable;
import android.util.AttributeSet;
import android.widget.RelativeLayout;
public class ActionBarLayout extends RelativeLayout {
private int layoutHeight = 150;
public ActionBarLayout(Context context) {
super(context);
}
public ActionBarLayout(Context context, @Nullable AttributeSet attrs) {
super(context, attrs);
}
public ActionBarLayout(Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
int width = MeasureSpec.getSize(widthMeasureSpec);
setMeasuredDimension(width, layoutHeight);
setBackgroundColor(Color.BLACK);
setAlpha(0.9f);
}
}
\ No newline at end of file
package com.baidu.paddle.fastdeploy.app.ui.view;
import android.content.res.Configuration;
import android.os.Bundle;
import android.preference.PreferenceActivity;
import android.support.annotation.LayoutRes;
import android.support.annotation.Nullable;
import android.support.v7.app.ActionBar;
import android.support.v7.app.AppCompatDelegate;
import android.support.v7.widget.Toolbar;
import android.view.MenuInflater;
import android.view.View;
import android.view.ViewGroup;
/**
* A {@link PreferenceActivity} which implements and proxies the necessary calls
* to be used with AppCompat.
* <p>
* This technique can be used with an {@link android.app.Activity} class, not just
* {@link PreferenceActivity}.
*/
public abstract class AppCompatPreferenceActivity extends PreferenceActivity {
private AppCompatDelegate mDelegate;
@Override
protected void onCreate(Bundle savedInstanceState) {
getDelegate().installViewFactory();
getDelegate().onCreate(savedInstanceState);
super.onCreate(savedInstanceState);
}
@Override
protected void onPostCreate(Bundle savedInstanceState) {
super.onPostCreate(savedInstanceState);
getDelegate().onPostCreate(savedInstanceState);
}
public ActionBar getSupportActionBar() {
return getDelegate().getSupportActionBar();
}
public void setSupportActionBar(@Nullable Toolbar toolbar) {
getDelegate().setSupportActionBar(toolbar);
}
@Override
public MenuInflater getMenuInflater() {
return getDelegate().getMenuInflater();
}
@Override
public void setContentView(@LayoutRes int layoutResID) {
getDelegate().setContentView(layoutResID);
}
@Override
public void setContentView(View view) {
getDelegate().setContentView(view);
}
@Override
public void setContentView(View view, ViewGroup.LayoutParams params) {
getDelegate().setContentView(view, params);
}
@Override
public void addContentView(View view, ViewGroup.LayoutParams params) {
getDelegate().addContentView(view, params);
}
@Override
protected void onPostResume() {
super.onPostResume();
getDelegate().onPostResume();
}
@Override
protected void onTitleChanged(CharSequence title, int color) {
super.onTitleChanged(title, color);
getDelegate().setTitle(title);
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
getDelegate().onConfigurationChanged(newConfig);
}
@Override
protected void onStop() {
super.onStop();
getDelegate().onStop();
}
@Override
protected void onDestroy() {
super.onDestroy();
getDelegate().onDestroy();
}
public void invalidateOptionsMenu() {
getDelegate().invalidateOptionsMenu();
}
private AppCompatDelegate getDelegate() {
if (mDelegate == null) {
mDelegate = AppCompatDelegate.create(this, null);
}
return mDelegate;
}
}
package com.baidu.paddle.fastdeploy.app.ui.view;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.hardware.Camera.CameraInfo;
import android.hardware.Camera.Size;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.opengl.GLSurfaceView.Renderer;
import android.opengl.GLUtils;
import android.opengl.Matrix;
import android.util.AttributeSet;
import android.util.Log;
import com.baidu.paddle.fastdeploy.app.ui.Utils;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.util.List;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
public class CameraSurfaceView extends GLSurfaceView implements Renderer,
SurfaceTexture.OnFrameAvailableListener {
private static final String TAG = CameraSurfaceView.class.getSimpleName();
public static int EXPECTED_PREVIEW_WIDTH = 1280; // 1920
public static int EXPECTED_PREVIEW_HEIGHT = 720; // 960
protected int numberOfCameras;
protected int selectedCameraId;
protected boolean disableCamera = false;
protected Camera camera;
protected Context context;
protected SurfaceTexture surfaceTexture;
protected int surfaceWidth = 0;
protected int surfaceHeight = 0;
protected int textureWidth = 0;
protected int textureHeight = 0;
protected Bitmap ARGB8888ImageBitmap;
protected boolean bitmapReleaseMode = true;
// In order to manipulate the camera preview data and render the modified one
// to the screen, three textures are created and the data flow is shown as following:
// previewdata->camTextureId->fboTexureId->drawTexureId->framebuffer
protected int[] fbo = {0};
protected int[] camTextureId = {0};
protected int[] fboTexureId = {0};
protected int[] drawTexureId = {0};
private final String vss = ""
+ "attribute vec2 vPosition;\n"
+ "attribute vec2 vTexCoord;\n" + "varying vec2 texCoord;\n"
+ "void main() {\n" + " texCoord = vTexCoord;\n"
+ " gl_Position = vec4 (vPosition.x, vPosition.y, 0.0, 1.0);\n"
+ "}";
private final String fssCam2FBO = ""
+ "#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "uniform samplerExternalOES sTexture;\n"
+ "varying vec2 texCoord;\n"
+ "void main() {\n"
+ " gl_FragColor = texture2D(sTexture,texCoord);\n" + "}";
private final String fssTex2Screen = ""
+ "precision mediump float;\n"
+ "uniform sampler2D sTexture;\n"
+ "varying vec2 texCoord;\n"
+ "void main() {\n"
+ " gl_FragColor = texture2D(sTexture,texCoord);\n" + "}";
private final float[] vertexCoords = {
-1, -1,
-1, 1,
1, -1,
1, 1};
private float[] textureCoords = {
0, 1,
0, 0,
1, 1,
1, 0};
private FloatBuffer vertexCoordsBuffer;
private FloatBuffer textureCoordsBuffer;
private int progCam2FBO = -1;
private int progTex2Screen = -1;
private int vcCam2FBO;
private int tcCam2FBO;
private int vcTex2Screen;
private int tcTex2Screen;
public void setBitmapReleaseMode(boolean mode) {
synchronized (this) {
bitmapReleaseMode = mode;
}
}
public Bitmap getBitmap() {
return ARGB8888ImageBitmap; // may null or recycled.
}
public interface OnTextureChangedListener {
boolean onTextureChanged(Bitmap ARGB8888ImageBitmap);
}
private OnTextureChangedListener onTextureChangedListener = null;
public void setOnTextureChangedListener(OnTextureChangedListener listener) {
onTextureChangedListener = listener;
}
public CameraSurfaceView(Context ctx, AttributeSet attrs) {
super(ctx, attrs);
context = ctx;
setEGLContextClientVersion(2);
setRenderer(this);
setRenderMode(RENDERMODE_WHEN_DIRTY);
// Find the total number of available cameras and the ID of the default camera
numberOfCameras = Camera.getNumberOfCameras();
CameraInfo cameraInfo = new CameraInfo();
for (int i = 0; i < numberOfCameras; i++) {
Camera.getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == CameraInfo.CAMERA_FACING_BACK) {
selectedCameraId = i;
}
}
}
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
// Create OES texture for storing camera preview data(YUV format)
GLES20.glGenTextures(1, camTextureId, 0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, camTextureId[0]);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
surfaceTexture = new SurfaceTexture(camTextureId[0]);
surfaceTexture.setOnFrameAvailableListener(this);
// Prepare vertex and texture coordinates
int bytes = vertexCoords.length * Float.SIZE / Byte.SIZE;
vertexCoordsBuffer = ByteBuffer.allocateDirect(bytes).order(ByteOrder.nativeOrder()).asFloatBuffer();
textureCoordsBuffer = ByteBuffer.allocateDirect(bytes).order(ByteOrder.nativeOrder()).asFloatBuffer();
vertexCoordsBuffer.put(vertexCoords).position(0);
textureCoordsBuffer.put(textureCoords).position(0);
// Create vertex and fragment shaders
// camTextureId->fboTexureId
progCam2FBO = Utils.createShaderProgram(vss, fssCam2FBO);
vcCam2FBO = GLES20.glGetAttribLocation(progCam2FBO, "vPosition");
tcCam2FBO = GLES20.glGetAttribLocation(progCam2FBO, "vTexCoord");
GLES20.glEnableVertexAttribArray(vcCam2FBO);
GLES20.glEnableVertexAttribArray(tcCam2FBO);
// fboTexureId/drawTexureId -> screen
progTex2Screen = Utils.createShaderProgram(vss, fssTex2Screen);
vcTex2Screen = GLES20.glGetAttribLocation(progTex2Screen, "vPosition");
tcTex2Screen = GLES20.glGetAttribLocation(progTex2Screen, "vTexCoord");
GLES20.glEnableVertexAttribArray(vcTex2Screen);
GLES20.glEnableVertexAttribArray(tcTex2Screen);
}
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
surfaceWidth = width;
surfaceHeight = height;
openCamera();
}
@Override
public void onDrawFrame(GL10 gl) {
if (surfaceTexture == null) return;
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
surfaceTexture.updateTexImage();
float[] matrix = new float[16];
surfaceTexture.getTransformMatrix(matrix);
// camTextureId->fboTexureId
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fbo[0]);
GLES20.glViewport(0, 0, textureWidth, textureHeight);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glUseProgram(progCam2FBO);
GLES20.glVertexAttribPointer(vcCam2FBO, 2, GLES20.GL_FLOAT, false, 4 * 2, vertexCoordsBuffer);
textureCoordsBuffer.clear();
textureCoordsBuffer.put(transformTextureCoordinates(textureCoords, matrix));
textureCoordsBuffer.position(0);
GLES20.glVertexAttribPointer(tcCam2FBO, 2, GLES20.GL_FLOAT, false, 4 * 2, textureCoordsBuffer);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, camTextureId[0]);
GLES20.glUniform1i(GLES20.glGetUniformLocation(progCam2FBO, "sTexture"), 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GLES20.glFlush();
// Check if the draw texture is set
int targetTexureId = fboTexureId[0];
if (onTextureChangedListener != null) {
// Read pixels of FBO to a bitmap
ByteBuffer pixelBuffer = ByteBuffer.allocate(textureWidth * textureHeight * 4);
GLES20.glReadPixels(0, 0, textureWidth, textureHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixelBuffer);
ARGB8888ImageBitmap = Bitmap.createBitmap(textureWidth, textureHeight, Bitmap.Config.ARGB_8888);
ARGB8888ImageBitmap.copyPixelsFromBuffer(pixelBuffer);
boolean modified = onTextureChangedListener.onTextureChanged(ARGB8888ImageBitmap);
if (modified) {
targetTexureId = drawTexureId[0];
// Update a bitmap to the GL texture if modified
GLES20.glActiveTexture(targetTexureId);
// GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, targetTexureId);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, targetTexureId);
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, ARGB8888ImageBitmap, 0);
}
if (bitmapReleaseMode) {
ARGB8888ImageBitmap.recycle();
}
}
// fboTexureId/drawTexureId->Screen
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
GLES20.glViewport(0, 0, surfaceWidth, surfaceHeight);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glUseProgram(progTex2Screen);
GLES20.glVertexAttribPointer(vcTex2Screen, 2, GLES20.GL_FLOAT, false, 4 * 2, vertexCoordsBuffer);
textureCoordsBuffer.clear();
textureCoordsBuffer.put(textureCoords);
textureCoordsBuffer.position(0);
GLES20.glVertexAttribPointer(tcTex2Screen, 2, GLES20.GL_FLOAT, false, 4 * 2, textureCoordsBuffer);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, targetTexureId);
GLES20.glUniform1i(GLES20.glGetUniformLocation(progTex2Screen, "sTexture"), 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GLES20.glFlush();
}
private float[] transformTextureCoordinates(float[] coords, float[] matrix) {
float[] result = new float[coords.length];
float[] vt = new float[4];
for (int i = 0; i < coords.length; i += 2) {
float[] v = {coords[i], coords[i + 1], 0, 1};
Matrix.multiplyMV(vt, 0, matrix, 0, v, 0);
result[i] = vt[0];
result[i + 1] = vt[1];
}
return result;
}
@Override
public void onResume() {
super.onResume();
}
@Override
public void onPause() {
super.onPause();
releaseCamera();
}
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
requestRender();
}
public void disableCamera() {
disableCamera = true;
}
public void enableCamera() {
disableCamera = false;
}
public void switchCamera() {
releaseCamera();
selectedCameraId = (selectedCameraId + 1) % numberOfCameras;
openCamera();
}
public void openCamera() {
if (disableCamera) return;
camera = Camera.open(selectedCameraId);
List<Size> supportedPreviewSizes = camera.getParameters().getSupportedPreviewSizes();
Size previewSize = Utils.getOptimalPreviewSize(supportedPreviewSizes, EXPECTED_PREVIEW_WIDTH,
EXPECTED_PREVIEW_HEIGHT);
Camera.Parameters parameters = camera.getParameters();
parameters.setPreviewSize(previewSize.width, previewSize.height);
if (parameters.getSupportedFocusModes().contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
camera.setParameters(parameters);
int degree = Utils.getCameraDisplayOrientation(context, selectedCameraId);
camera.setDisplayOrientation(degree);
boolean rotate = degree == 90 || degree == 270;
textureWidth = rotate ? previewSize.height : previewSize.width;
textureHeight = rotate ? previewSize.width : previewSize.height;
// Destroy FBO and draw textures
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
GLES20.glDeleteFramebuffers(1, fbo, 0);
GLES20.glDeleteTextures(1, drawTexureId, 0);
GLES20.glDeleteTextures(1, fboTexureId, 0);
// Normal texture for storing modified camera preview data(RGBA format)
GLES20.glGenTextures(1, drawTexureId, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, drawTexureId[0]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, textureWidth, textureHeight, 0,
GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
// FBO texture for storing camera preview data(RGBA format)
GLES20.glGenTextures(1, fboTexureId, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, fboTexureId[0]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, textureWidth, textureHeight, 0,
GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
// Generate FBO and bind to FBO texture
GLES20.glGenFramebuffers(1, fbo, 0);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fbo[0]);
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D,
fboTexureId[0], 0);
try {
camera.setPreviewTexture(surfaceTexture);
} catch (IOException exception) {
Log.e(TAG, "IOException caused by setPreviewDisplay()", exception);
}
camera.startPreview();
}
public void releaseCamera() {
if (camera != null) {
camera.setPreviewCallback(null);
camera.stopPreview();
camera.release();
camera = null;
}
}
}
package com.baidu.paddle.fastdeploy.app.ui.view;
import android.content.Context;
import android.os.Handler;
import android.util.AttributeSet;
import android.widget.ListView;
public class ResultListView extends ListView {
public ResultListView(Context context) {
super(context);
}
public ResultListView(Context context, AttributeSet attrs) {
super(context, attrs);
}
public ResultListView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
}
private Handler handler;
public void setHandler(Handler mHandler) {
handler = mHandler;
}
public void clear() {
handler.post(new Runnable() {
@Override
public void run() {
removeAllViewsInLayout();
invalidate();
}
});
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
int expandSpec = MeasureSpec.makeMeasureSpec(Integer.MAX_VALUE >> 2,
MeasureSpec.AT_MOST);
super.onMeasure(widthMeasureSpec, expandSpec);
}
}
package com.baidu.paddle.fastdeploy.app.ui.view.adapter;
import android.content.Context;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.TextView;
import com.baidu.paddle.fastdeploy.app.examples.R;
import com.baidu.paddle.fastdeploy.app.ui.view.model.BaseResultModel;
import java.text.DecimalFormat;
import java.util.List;
public class BaseResultAdapter extends ArrayAdapter<BaseResultModel> {
private int resourceId;
public BaseResultAdapter(@NonNull Context context, int resource) {
super(context, resource);
}
public BaseResultAdapter(@NonNull Context context, int resource, @NonNull List<BaseResultModel> objects) {
super(context, resource, objects);
resourceId = resource;
}
@NonNull
@Override
public View getView(int position, @Nullable View convertView, @NonNull ViewGroup parent) {
BaseResultModel model = getItem(position);
View view = LayoutInflater.from(getContext()).inflate(resourceId, null);
TextView indexText = (TextView) view.findViewById(R.id.index);
TextView nameText = (TextView) view.findViewById(R.id.name);
TextView confidenceText = (TextView) view.findViewById(R.id.confidence);
indexText.setText(String.valueOf(model.getIndex()));
nameText.setText(String.valueOf(model.getName()));
confidenceText.setText(formatFloatString(model.getConfidence()));
return view;
}
public static String formatFloatString(float number) {
DecimalFormat df = new DecimalFormat("0.00");
return df.format(number);
}
}
package com.baidu.paddle.fastdeploy.app.ui.view.model;
public class BaseResultModel {
private int index;
private String name;
private float confidence;
public BaseResultModel() {
}
public BaseResultModel(int index, String name, float confidence) {
this.index = index;
this.name = name;
this.confidence = confidence;
}
public float getConfidence() {
return confidence;
}
public void setConfidence(float confidence) {
this.confidence = confidence;
}
public int getIndex() {
return index;
}
public void setIndex(int index) {
this.index = index;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
<?xml version="1.0" encoding="utf-8"?>
<layer-list xmlns:android="http://schemas.android.com/apk/res/android">
<item>
<shape>
<solid android:color="@color/textColorHighlight" />
</shape>
</item>
<!--这里的right和bottom表示的是这一层图片的右边距和下边距,当然还有left和top-->
<item android:bottom="5px">
<shape>
<solid android:color="@color/bk_black" />
</shape>
</item>
</layer-list>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<selector xmlns:android="http://schemas.android.com/apk/res/android">
<item android:state_selected="true" android:drawable="@drawable/album" />
<item android:state_focused="true" android:drawable="@drawable/album" />
<item android:state_pressed="true" android:drawable="@drawable/album" />
<item android:drawable="@drawable/album_pressed" />
</selector>
\ No newline at end of file
<vector xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:aapt="http://schemas.android.com/aapt"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path
android:fillType="evenOdd"
android:pathData="M32,64C32,64 38.39,52.99 44.13,50.95C51.37,48.37 70.14,49.57 70.14,49.57L108.26,87.69L108,109.01L75.97,107.97L32,64Z"
android:strokeWidth="1"
android:strokeColor="#00000000">
<aapt:attr name="android:fillColor">
<gradient
android:endX="78.5885"
android:endY="90.9159"
android:startX="48.7653"
android:startY="61.0927"
android:type="linear">
<item
android:color="#44000000"
android:offset="0.0" />
<item
android:color="#00000000"
android:offset="1.0" />
</gradient>
</aapt:attr>
</path>
<path
android:fillColor="#FFFFFF"
android:fillType="nonZero"
android:pathData="M66.94,46.02L66.94,46.02C72.44,50.07 76,56.61 76,64L32,64C32,56.61 35.56,50.11 40.98,46.06L36.18,41.19C35.45,40.45 35.45,39.3 36.18,38.56C36.91,37.81 38.05,37.81 38.78,38.56L44.25,44.05C47.18,42.57 50.48,41.71 54,41.71C57.48,41.71 60.78,42.57 63.68,44.05L69.11,38.56C69.84,37.81 70.98,37.81 71.71,38.56C72.44,39.3 72.44,40.45 71.71,41.19L66.94,46.02ZM62.94,56.92C64.08,56.92 65,56.01 65,54.88C65,53.76 64.08,52.85 62.94,52.85C61.8,52.85 60.88,53.76 60.88,54.88C60.88,56.01 61.8,56.92 62.94,56.92ZM45.06,56.92C46.2,56.92 47.13,56.01 47.13,54.88C47.13,53.76 46.2,52.85 45.06,52.85C43.92,52.85 43,53.76 43,54.88C43,56.01 43.92,56.92 45.06,56.92Z"
android:strokeWidth="1"
android:strokeColor="#00000000" />
</vector>
<?xml version="1.0" encoding="utf-8"?>
<selector xmlns:android="http://schemas.android.com/apk/res/android">
<item android:state_selected="true" android:drawable="@drawable/realtime_start_pressed" />
<item android:state_focused="true" android:drawable="@drawable/realtime_start_pressed" />
<item android:state_pressed="true" android:drawable="@drawable/realtime_start_pressed" />
<item android:drawable="@drawable/realtime_start" />
</selector>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<selector xmlns:android="http://schemas.android.com/apk/res/android">
<item android:state_selected="true" android:drawable="@drawable/realtime_stop_pressed" />
<item android:state_focused="true" android:drawable="@drawable/realtime_stop_pressed" />
<item android:state_pressed="true" android:drawable="@drawable/realtime_stop_pressed" />
<item android:drawable="@drawable/realtime_stop" />
</selector>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<layer-list xmlns:android="http://schemas.android.com/apk/res/android">
<item>
<shape android:shape="rectangle">
<solid android:color="#FFFFFF" />
<stroke
android:width="1px"
android:color="#E5E5E5" />
</shape>
</item>
</layer-list>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<shape xmlns:android="http://schemas.android.com/apk/res/android"
android:shape="rectangle">
<corners
android:bottomLeftRadius="25dp"
android:bottomRightRadius="25dp"
android:topLeftRadius="25dp"
android:topRightRadius="25dp"></corners>
<solid android:color="#3B85F5"></solid>
</shape>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<layer-list xmlns:android="http://schemas.android.com/apk/res/android">
<!--定义seekbar滑动条的底色-->
<item android:id="@android:id/background">
<bitmap
android:src="@drawable/seekbar_progress_dotted"
android:tileMode="mirror"></bitmap>
</item>
<!--定义seekbar滑动条进度颜色-->
<item android:id="@android:id/progress">
<clip>
<shape>
<solid android:color="#F5A623" />
</shape>
</clip>
</item>
</layer-list>
<?xml version="1.0" encoding="utf-8"?>
<layer-list xmlns:android="http://schemas.android.com/apk/res/android">
<!--定义seekbar滑动条的底色-->
<item android:id="@android:id/background">
<shape>
<solid android:color="#E5E5E5" />
</shape>
</item>
<!--定义seekbar滑动条进度颜色-->
<item android:id="@android:id/progress">
<clip>
<shape>
<solid android:color="#3B85F5" />
</shape>
</clip>
</item>
</layer-list>
<?xml version="1.0" encoding="utf-8"?>
<selector xmlns:android="http://schemas.android.com/apk/res/android">
<selector>
<item android:state_focused="true" android:state_pressed="false" android:drawable="@drawable/seekbar_thumb_shape" />
<item android:state_focused="true" android:state_pressed="true" android:drawable="@drawable/seekbar_thumb_shape" />
<item android:state_focused="false" android:state_pressed="true" android:drawable="@drawable/seekbar_thumb_shape" />
<item android:drawable="@drawable/seekbar_thumb_shape" />
</selector>
</selector>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<layer-list xmlns:android="http://schemas.android.com/apk/res/android">
<!-- 白色前景 -->
<item
android:gravity="center"
android:bottom="20px"
android:top="20px"
android:right="20px"
android:left="20px">
<shape android:shape="oval">
<size
android:width="20px"
android:height="20px" />
<solid android:color="#ffffffff" />
</shape>
</item>
<!-- 透明阴影 -->
<item android:gravity="center">
<shape android:shape="oval">
<size
android:height="30px"
android:width="30px" />
<solid android:color="#96ffffff" />
</shape>
</item>
</layer-list>
<?xml version="1.0" encoding="utf-8"?>
<selector xmlns:android="http://schemas.android.com/apk/res/android">
<item android:state_selected="true" android:drawable="@drawable/switch_side_pressed" />
<item android:state_focused="true" android:drawable="@drawable/switch_side_pressed" />
<item android:state_pressed="true" android:drawable="@drawable/switch_side_pressed" />
<item android:drawable="@drawable/switch_side" />
</selector>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<selector xmlns:android="http://schemas.android.com/apk/res/android">
<item android:state_selected="true" android:drawable="@drawable/take_picture_pressed" />
<item android:state_focused="true" android:drawable="@drawable/take_picture_pressed" />
<item android:state_pressed="true" android:drawable="@drawable/take_picture_pressed" />
<item android:drawable="@drawable/take_picture" />
</selector>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<selector xmlns:android="http://schemas.android.com/apk/res/android">
<item android:state_pressed="true" android:drawable="@drawable/btn_settings_pressed"/>
<item android:drawable="@drawable/btn_settings_default"/>
</selector>
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="24dp"
android:height="24dp"
android:viewportWidth="24"
android:viewportHeight="24">
<path
android:fillColor="#FFFFFF"
android:fillType="evenOdd"
android:pathData="M10.4696,3.5L9.8539,5.2714C9.7605,5.5401 9.5784,5.7688 9.3375,5.9199L8.0854,6.7054C7.8421,6.8581 7.5537,6.9223 7.2686,6.8872L5.3518,6.6517L3.9124,9.0649L4.9862,10.3888C5.1668,10.6114 5.2654,10.8894 5.2654,11.1762L5.2654,12.9051C5.2654,13.2057 5.157,13.4963 4.9602,13.7235L3.9165,14.9283L5.3573,17.4236L7.264,17.1741C7.5472,17.137 7.8344,17.198 8.0781,17.3469L9.401,18.1555C9.655,18.3107 9.8452,18.5515 9.9375,18.8345L10.4806,20.5L13.5194,20.5L14.0625,18.8345C14.1548,18.5515 14.345,18.3107 14.599,18.1555L15.9219,17.3469C16.1656,17.198 16.4528,17.137 16.736,17.1741L18.6427,17.4236L20.0835,14.9283L19.0398,13.7235C18.843,13.4963 18.7346,13.2057 18.7346,12.9051L18.7346,11.1762C18.7346,10.8894 18.8332,10.6114 19.0138,10.3888L20.0876,9.0649L18.6482,6.6517L16.7314,6.8872C16.4463,6.9223 16.1579,6.8581 15.9146,6.7054L14.6629,5.9202C14.4221,5.7691 14.2399,5.5404 14.1466,5.2718L13.5305,3.5L10.4696,3.5ZM8.4659,4.696L9.1111,2.8396C9.2858,2.3369 9.7596,2 10.2918,2L13.7083,2C14.2404,2 14.7142,2.3369 14.8889,2.8395L15.5345,4.6962L16.6366,5.3876L18.6269,5.143C19.1184,5.0826 19.5993,5.318 19.8529,5.7433L21.4653,8.4465C21.7339,8.8968 21.6928,9.4669 21.3625,9.8742L20.2346,11.2648L20.2346,12.8118L21.3338,14.0807C21.6826,14.4833 21.7379,15.0628 21.4715,15.5241L19.8583,18.3182C19.6057,18.7557 19.1145,18.9982 18.6136,18.9326L16.6288,18.6728L15.46,19.3872L14.8893,21.1375C14.7216,21.6519 14.2419,22 13.7009,22L10.2991,22C9.7581,22 9.2784,21.6519 9.1107,21.1375L8.54,19.3872L7.3712,18.6728L5.3864,18.9326C4.8855,18.9982 4.3943,18.7557 4.1417,18.3182L2.5285,15.5241C2.2621,15.0628 2.3174,14.4833 2.6662,14.0807L3.7654,12.8118L3.7654,11.2648L2.6375,9.8742C2.3072,9.4669 2.2661,8.8968 2.5347,8.4465L4.1471,5.7433C4.4007,5.318 4.8816,5.0826 5.3731,5.143L7.3634,5.3876L8.4659,4.696ZM12,15.75C9.9289,15.75 8.25,14.0711 8.25,12C8.25,9.9289 9.9289,8.25 12,8.25C14.0711,8.25 15.75,9.9289 15.75,12C15.75,14.0711 14.0711,15.75 12,15.75ZM12,14.75C13.5188,14.75 14.75,13.5188 14.75,12C14.75,10.4812 13.5188,9.25 12,9.25C10.4812,9.25 9.25,10.4812 9.25,12C9.25,13.5188 10.4812,14.75 12,14.75Z"
android:strokeWidth="0"
android:strokeColor="#00000000" />
</vector>
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="24dp"
android:height="24dp"
android:viewportWidth="24"
android:viewportHeight="24">
<path
android:fillColor="#bcbcbc"
android:fillType="evenOdd"
android:pathData="M10.4696,3.5L9.8539,5.2714C9.7605,5.5401 9.5784,5.7688 9.3375,5.9199L8.0854,6.7054C7.8421,6.8581 7.5537,6.9223 7.2686,6.8872L5.3518,6.6517L3.9124,9.0649L4.9862,10.3888C5.1668,10.6114 5.2654,10.8894 5.2654,11.1762L5.2654,12.9051C5.2654,13.2057 5.157,13.4963 4.9602,13.7235L3.9165,14.9283L5.3573,17.4236L7.264,17.1741C7.5472,17.137 7.8344,17.198 8.0781,17.3469L9.401,18.1555C9.655,18.3107 9.8452,18.5515 9.9375,18.8345L10.4806,20.5L13.5194,20.5L14.0625,18.8345C14.1548,18.5515 14.345,18.3107 14.599,18.1555L15.9219,17.3469C16.1656,17.198 16.4528,17.137 16.736,17.1741L18.6427,17.4236L20.0835,14.9283L19.0398,13.7235C18.843,13.4963 18.7346,13.2057 18.7346,12.9051L18.7346,11.1762C18.7346,10.8894 18.8332,10.6114 19.0138,10.3888L20.0876,9.0649L18.6482,6.6517L16.7314,6.8872C16.4463,6.9223 16.1579,6.8581 15.9146,6.7054L14.6629,5.9202C14.4221,5.7691 14.2399,5.5404 14.1466,5.2718L13.5305,3.5L10.4696,3.5ZM8.4659,4.696L9.1111,2.8396C9.2858,2.3369 9.7596,2 10.2918,2L13.7083,2C14.2404,2 14.7142,2.3369 14.8889,2.8395L15.5345,4.6962L16.6366,5.3876L18.6269,5.143C19.1184,5.0826 19.5993,5.318 19.8529,5.7433L21.4653,8.4465C21.7339,8.8968 21.6928,9.4669 21.3625,9.8742L20.2346,11.2648L20.2346,12.8118L21.3338,14.0807C21.6826,14.4833 21.7379,15.0628 21.4715,15.5241L19.8583,18.3182C19.6057,18.7557 19.1145,18.9982 18.6136,18.9326L16.6288,18.6728L15.46,19.3872L14.8893,21.1375C14.7216,21.6519 14.2419,22 13.7009,22L10.2991,22C9.7581,22 9.2784,21.6519 9.1107,21.1375L8.54,19.3872L7.3712,18.6728L5.3864,18.9326C4.8855,18.9982 4.3943,18.7557 4.1417,18.3182L2.5285,15.5241C2.2621,15.0628 2.3174,14.4833 2.6662,14.0807L3.7654,12.8118L3.7654,11.2648L2.6375,9.8742C2.3072,9.4669 2.2661,8.8968 2.5347,8.4465L4.1471,5.7433C4.4007,5.318 4.8816,5.0826 5.3731,5.143L7.3634,5.3876L8.4659,4.696ZM12,15.75C9.9289,15.75 8.25,14.0711 8.25,12C8.25,9.9289 9.9289,8.25 12,8.25C14.0711,8.25 15.75,9.9289 15.75,12C15.75,14.0711 14.0711,15.75 12,15.75ZM12,14.75C13.5188,14.75 14.75,13.5188 14.75,12C14.75,10.4812 13.5188,9.25 12,9.25C10.4812,9.25 9.25,10.4812 9.25,12C9.25,13.5188 10.4812,14.75 12,14.75Z"
android:strokeWidth="0"
android:strokeColor="#00000000" />
</vector>
<?xml version="1.0" encoding="utf-8"?>
<selector xmlns:android="http://schemas.android.com/apk/res/android">
<item android:state_pressed="true" android:drawable="@drawable/btn_shutter_pressed"/>
<item android:drawable="@drawable/btn_shutter_default"/>
</selector>
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:height="201dp"
android:width="201dp"
android:viewportWidth="201"
android:viewportHeight="201">
<path
android:fillColor="#ffffff"
android:pathData="M 100.5 0 C 156.004617358 0 201 44.995382642 201 100.5 C 201 156.004617358 156.004617358 201 100.5 201 C 44.995382642 201 0 156.004617358 0 100.5 C 0 44.995382642 44.995382642 0 100.5 0 Z"
android:strokeWidth="1"
android:fillType="evenOdd"/>
<path
android:fillColor="#dfe7eb"
android:pathData="M 100.5 17.2285714 C 146.489540112 17.2285714 183.7714286 54.5104598876 183.7714286 100.5 C 183.7714286 146.489540112 146.489540112 183.7714286 100.5 183.7714286 C 54.5104598876 183.7714286 17.2285714 146.489540112 17.2285714 100.5 C 17.2285714 54.5104598876 54.5104598876 17.2285714 100.5 17.2285714 Z"
android:strokeWidth="1"
android:fillType="evenOdd"/>
</vector>
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:height="201dp"
android:width="201dp"
android:viewportWidth="201"
android:viewportHeight="201">
<path
android:fillColor="#ffffff"
android:pathData="M 100.5 0 C 156.004617358 0 201 44.995382642 201 100.5 C 201 156.004617358 156.004617358 201 100.5 201 C 44.995382642 201 0 156.004617358 0 100.5 C 0 44.995382642 44.995382642 0 100.5 0 Z"
android:strokeWidth="1"
android:fillType="evenOdd"/>
<path
android:fillColor="#cfd7db"
android:pathData="M 100.5 17.2285714 C 146.489540112 17.2285714 183.7714286 54.5104598876 183.7714286 100.5 C 183.7714286 146.489540112 146.489540112 183.7714286 100.5 183.7714286 C 54.5104598876 183.7714286 17.2285714 146.489540112 17.2285714 100.5 C 17.2285714 54.5104598876 54.5104598876 17.2285714 100.5 17.2285714 Z"
android:strokeWidth="1"
android:fillType="evenOdd"/>
</vector>
<?xml version="1.0" encoding="utf-8"?>
<selector xmlns:android="http://schemas.android.com/apk/res/android">
<item android:state_pressed="true" android:drawable="@drawable/btn_switch_pressed"/>
<item android:drawable="@drawable/btn_switch_default"/>
</selector>
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path
android:fillColor="#008577"
android:pathData="M0,0h108v108h-108z" />
<path
android:fillColor="#00000000"
android:pathData="M9,0L9,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,0L19,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,0L29,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,0L39,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,0L49,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,0L59,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,0L69,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,0L79,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M89,0L89,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M99,0L99,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,9L108,9"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,19L108,19"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,29L108,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,39L108,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,49L108,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,59L108,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,69L108,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,79L108,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,89L108,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,99L108,99"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,29L89,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,39L89,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,49L89,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,59L89,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,69L89,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,79L89,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,19L29,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,19L39,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,19L49,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,19L59,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,19L69,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,19L79,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
</vector>
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<include
layout="@layout/classification_camera_page"
android:id="@+id/camera_page"></include>
<include
layout="@layout/classification_result_page"
android:id="@+id/result_page"
android:visibility="gone"></include>
</FrameLayout>
<?xml version="1.0" encoding="utf-8"?>
<android.support.constraint.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:keepScreenOn="true"
tools:context=".detection.DetectionMainActivity">
<RelativeLayout
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="@color/colorWindow">
<com.baidu.paddle.fastdeploy.app.ui.layout.ActionBarLayout
android:id="@+id/action_bar_main"
android:layout_width="match_parent"
android:layout_height="wrap_content">
<ImageView
android:id="@+id/back_in_preview"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:cropToPadding="true"
android:paddingLeft="40px"
android:paddingTop="60px"
android:paddingRight="60px"
android:paddingBottom="40px"
android:src="@drawable/back_btn" />
<LinearLayout
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_centerHorizontal="true"
android:layout_marginTop="50px"
android:orientation="horizontal">
<TextView
android:id="@+id/action_takepicture_btn"
style="@style/action_btn_selected"
android:layout_width="300px"
android:layout_height="wrap_content"
android:text="@string/action_bar_take_photo"
android:textAlignment="center"
android:visibility="gone"/>
<TextView
android:id="@+id/action_realtime_btn"
style="@style/action_btn"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/action_bar_realtime"
android:textAlignment="center"
android:textSize="15sp"/>
</LinearLayout>
</com.baidu.paddle.fastdeploy.app.ui.layout.ActionBarLayout>
<com.baidu.paddle.fastdeploy.app.ui.view.CameraSurfaceView
android:id="@+id/sv_preview"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_above="@+id/contral"
android:layout_below="@+id/action_bar_main"
android:layout_centerInParent="true" />
<ImageView
android:id="@+id/iv_select"
android:layout_width="40dp"
android:layout_height="40dp"
android:layout_alignParentRight="true"
android:layout_alignParentBottom="true"
android:layout_marginRight="20dp"
android:layout_marginBottom="145dp"
android:background="@drawable/album_btn"
android:scaleType="fitXY" />
<TextView
android:id="@+id/tv_status"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentRight="true"
android:layout_marginTop="60dp"
android:layout_marginRight="30dp"
android:textColor="@color/colorText"
android:textSize="@dimen/small_font_size" />
<RelativeLayout
android:layout_width="match_parent"
android:layout_height="@dimen/top_bar_height"
android:layout_alignParentTop="true"
android:background="@color/colorTopBar">
<ImageButton
android:id="@+id/btn_settings"
android:layout_width="30dp"
android:layout_height="30dp"
android:layout_alignParentRight="true"
android:layout_centerVertical="true"
android:layout_marginRight="10dp"
android:background="@null"
android:scaleType="fitXY"
android:src="@drawable/btn_settings" />
</RelativeLayout>
<LinearLayout
android:id="@+id/contral"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_alignParentBottom="true"
android:background="@color/colorBottomBar"
android:orientation="vertical">
<LinearLayout
android:layout_width="match_parent"
android:layout_height="@dimen/bottom_bar_top_margin"
android:orientation="vertical"></LinearLayout>
<RelativeLayout
android:layout_width="match_parent"
android:layout_height="@dimen/large_button_height">
<ImageButton
android:id="@+id/btn_switch"
android:layout_width="60dp"
android:layout_height="60dp"
android:layout_alignParentLeft="true"
android:layout_centerVertical="true"
android:layout_marginLeft="60dp"
android:background="#00000000"
android:scaleType="fitXY"
android:src="@drawable/switch_side_btn" />
<ImageButton
android:id="@+id/btn_shutter"
android:layout_width="@dimen/large_button_width"
android:layout_height="@dimen/large_button_height"
android:layout_centerInParent="true"
android:background="@null"
android:scaleType="fitXY"
android:src="@drawable/take_picture_btn" />
<ImageView
android:id="@+id/realtime_toggle_btn"
android:layout_width="60dp"
android:layout_height="60dp"
android:layout_alignParentRight="true"
android:layout_centerVertical="true"
android:layout_marginRight="60dp"
android:scaleType="fitXY"
android:src="@drawable/realtime_stop_btn" />
</RelativeLayout>
<LinearLayout
android:layout_width="match_parent"
android:layout_height="@dimen/bottom_bar_bottom_margin"
android:orientation="vertical"></LinearLayout>
</LinearLayout>
</RelativeLayout>
</android.support.constraint.ConstraintLayout>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="#FFFFFF"
android:orientation="vertical">
<com.baidu.paddle.fastdeploy.app.ui.layout.ActionBarLayout
android:id="@+id/action_bar_result"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal">
<ImageView
android:id="@+id/back_in_result"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:cropToPadding="true"
android:paddingLeft="40px"
android:paddingTop="60px"
android:paddingRight="60px"
android:paddingBottom="40px"
android:src="@drawable/back_btn" />
<TextView
android:id="@+id/model_name"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_centerHorizontal="true"
android:layout_marginTop="50px"
android:textColor="@color/textColor"
android:textSize="@dimen/action_btn_text_size" />
</com.baidu.paddle.fastdeploy.app.ui.layout.ActionBarLayout>
<FrameLayout
android:layout_width="match_parent"
android:layout_height="700px">
<ImageView
android:id="@+id/result_image"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="@color/bk_result_image_padding" />
</FrameLayout>
<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginLeft="40px"
android:layout_marginTop="26px"
android:layout_marginBottom="20px"
android:text="@string/result_label"
android:textColor="@color/bk_black"
android:textSize="56px"
android:visibility="visible" />
<LinearLayout
android:id="@+id/result_seekbar_section"
android:layout_width="match_parent"
android:layout_height="130px"
android:layout_marginLeft="@dimen/result_list_padding_lr"
android:layout_marginRight="@dimen/result_list_padding_lr"
android:layout_marginBottom="@dimen/result_list_gap_width"
android:background="@drawable/result_page_border_section_bk"
android:visibility="visible">
<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center_vertical"
android:layout_weight="2"
android:paddingLeft="30px"
android:text="@string/result_table_header_confidence"
android:textColor="@color/table_result_tableheader_text_color"
android:textSize="@dimen/result_list_view_text_size" />
<SeekBar
android:id="@+id/confidence_seekbar"
android:layout_width="220dp"
android:layout_height="wrap_content"
android:layout_gravity="center_vertical"
android:layout_weight="6"
android:focusable="false"
android:maxHeight="8px"
android:progressDrawable="@drawable/seekbar_progress_result"
android:splitTrack="false"
android:thumb="@drawable/seekbar_handle" />
<TextView
android:id="@+id/seekbar_text"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center_vertical"
android:layout_weight="1"
android:paddingRight="30px"
android:textSize="@dimen/result_list_view_text_size"
/>
</LinearLayout>
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginLeft="@dimen/result_list_padding_lr"
android:layout_marginRight="@dimen/result_list_padding_lr"
android:layout_marginBottom="@dimen/result_list_gap_width"
android:background="@drawable/result_page_border_section_bk"
android:visibility="visible">
<TextView
style="@style/list_result_view_tablehead_style"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/result_table_header_index"
android:textColor="@color/table_result_tableheader_text_color" />
<TextView
style="@style/list_result_view_tablehead_style"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/result_table_header_name"
android:textColor="@color/table_result_tableheader_text_color" />
<TextView
style="@style/list_result_view_tablehead_style"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_weight="0.4"
android:gravity="right"
android:text="@string/result_table_header_confidence"
android:textColor="@color/table_result_tableheader_text_color" />
</LinearLayout>
<FrameLayout
android:layout_width="match_parent"
android:layout_height="wrap_content">
<ScrollView
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginBottom="15px"
android:paddingLeft="@dimen/result_list_padding_lr"
android:paddingRight="@dimen/result_list_padding_lr">
<com.baidu.paddle.fastdeploy.app.ui.view.ResultListView
android:id="@+id/result_list_view"
android:layout_width="match_parent"
android:layout_height="700px"
android:divider="#FFFFFF"
android:dividerHeight="@dimen/result_list_gap_width"></com.baidu.paddle.fastdeploy.app.ui.view.ResultListView>
</ScrollView>
</FrameLayout>
</LinearLayout>
</FrameLayout>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="horizontal"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:background="@drawable/result_page_border_section_bk">
<TextView
android:id="@+id/index"
style="@style/list_result_view_item_style"
android:layout_width="wrap_content"
android:layout_weight="0.2" />
<TextView
android:id="@+id/name"
style="@style/list_result_view_item_style"
android:layout_width="wrap_content"
android:layout_weight="0.6"
android:maxWidth="300px" />
<TextView
android:id="@+id/confidence"
style="@style/list_result_view_item_style"
android:layout_weight="0.2"
android:layout_width="wrap_content" />
</LinearLayout>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string-array name="cpu_thread_num_entries">
<item>1 threads</item>
<item>2 threads</item>
<item>4 threads</item>
<item>8 threads</item>
</string-array>
<string-array name="cpu_thread_num_values">
<item>1</item>
<item>2</item>
<item>4</item>
<item>8</item>
</string-array>
<string-array name="cpu_power_mode_entries">
<item>HIGH(only big cores)</item>
<item>LOW(only LITTLE cores)</item>
<item>FULL(all cores)</item>
<item>NO_BIND(depends on system)</item>
<item>RAND_HIGH</item>
<item>RAND_LOW</item>
</string-array>
<string-array name="cpu_power_mode_values">
<item>LITE_POWER_HIGH</item>
<item>LITE_POWER_LOW</item>
<item>LITE_POWER_FULL</item>
<item>LITE_POWER_NO_BIND</item>
<item>LITE_POWER_RAND_HIGH</item>
<item>LITE_POWER_RAND_LOW</item>
</string-array>
<string-array name="enable_lite_fp16_mode_entries">
<item>true</item>
<item>false</item>
</string-array>
<string-array name="enable_lite_fp16_mode_values">
<item>true</item>
<item>false</item>
</string-array>
</resources>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="colorPrimary">#008577</color>
<color name="colorPrimaryDark">#00574B</color>
<color name="colorAccent">#D81B60</color>
<color name="colorWindow">#FF000000</color>
<color name="colorTopBar">#00000000</color>
<color name="colorBottomBar">#00000000</color>
<color name="colorText">#FFFFFFFF</color>
<color name="bk_black">#000000</color>
<color name="bk_blue">#3B85F5</color>
<color name="textColorHighlight">#F5A623</color>
<color name="textColor">#FFFFFF</color>
<color name="bk_result_image_padding">#EEEEEE</color>
<color name="table_result_item_text_color">#3B85F5</color>
<color name="table_result_tableheader_text_color">#333333</color>
<color name="result_section_border_color">#E5E5E5</color>
<color name="result_popview_tablebody_bk">#3b85f5</color>
</resources>
<?xml version="1.0" encoding="utf-8"?>
<resources>
<dimen name="bottom_bar_top_margin">26dp</dimen>
<dimen name="bottom_bar_bottom_margin">36dp</dimen>
<dimen name="bottom_bar_left_right_margin">34dp</dimen>
<dimen name="top_bar_height">60dp</dimen>
<dimen name="top_bar_left_right_margin">16dp</dimen>
<dimen name="large_button_width">67dp</dimen>
<dimen name="large_button_height">67dp</dimen>
<dimen name="medium_button_width">56dp</dimen>
<dimen name="medium_button_height">56dp</dimen>
<dimen name="small_button_width">46dp</dimen>
<dimen name="small_button_height">46dp</dimen>
<dimen name="large_font_size">32dp</dimen>
<dimen name="medium_font_size">24dp</dimen>
<dimen name="small_font_size">16dp</dimen>
</resources>
<resources>
<!-- Default App name -->
<string name="app_name">EasyEdge</string>
<!-- Other App name -->
<string name="detection_app_name">EasyEdge</string>
<string name="ocr_app_name">EasyEdge</string>
<string name="classification_app_name">EasyEdge</string>
<string name="facedet_app_name">EasyEdge</string>
<string name="segmentation_app_name">EasyEdge</string>
<!-- Keys for PreferenceScreen -->
<string name="CHOOSE_PRE_INSTALLED_MODEL_KEY">CHOOSE_INSTALLED_MODEL_KEY</string>
<string name="MODEL_DIR_KEY">MODEL_DIR_KEY</string>
<string name="LABEL_PATH_KEY">LABEL_PATH_KEY</string>
<string name="CPU_THREAD_NUM_KEY">CPU_THREAD_NUM_KEY</string>
<string name="CPU_POWER_MODE_KEY">CPU_POWER_MODE_KEY</string>
<string name="SCORE_THRESHOLD_KEY">SCORE_THRESHOLD_KEY</string>
<string name="ENABLE_LITE_FP16_MODE_KEY">ENABLE_LITE_FP16_MODE_KEY</string>
<!-- Common default values ... -->
<string name="CPU_THREAD_NUM_DEFAULT">2</string>
<string name="CPU_POWER_MODE_DEFAULT">LITE_POWER_HIGH</string>
<string name="SCORE_THRESHOLD_DEFAULT">0.4</string>
<string name="SCORE_THRESHOLD_CLASSIFICATION">0.1</string>
<string name="SCORE_THRESHOLD_FACEDET">0.25</string>
<string name="ENABLE_LITE_FP16_MODE_DEFAULT">true</string>
<!--Other values-->
<!-- Detection model & Label paths & other values ... -->
<string name="DETECTION_MODEL_DIR_DEFAULT">models/picodet_s_320_coco_lcnet</string>
<string name="DETECTION_LABEL_PATH_DEFAULT">labels/coco_label_list.txt</string>
<!-- PP-OCRv2 & PP-OCRv3 values ... -->
<string name="OCR_MODEL_DIR_DEFAULT">models</string>
<string name="OCR_REC_LABEL_DEFAULT">labels/ppocr_keys_v1.txt</string>
<!-- classification values ... -->
<string name="CLASSIFICATION_MODEL_DIR_DEFAULT">models/MobileNetV1_x0_25_infer</string>
<string name="CLASSIFICATION_LABEL_PATH_DEFAULT">labels/imagenet1k_label_list.txt</string>
<!-- facedet values ... -->
<string name="FACEDET_MODEL_DIR_DEFAULT">models/scrfd_500m_bnkps_shape320x320_pd</string>
<!-- segmentation values ... -->
<string name="SEGMENTATION_MODEL_DIR_DEFAULT">models/portrait_pp_humansegv2_lite_256x144_inference_model</string>
<!-- Other resources values-->
<string name="action_bar_take_photo">拍照识别</string>
<string name="action_bar_realtime">实时识别</string>
<string name="action_bar_back">&lt;</string>
<string name="action_bar_model_name">模型名称</string>
<string name="result_label">识别结果</string>
<string name="result_table_header_index">序号</string>
<string name="result_table_header_name">名称</string>
<string name="result_table_header_confidence">置信度</string>
<string name="operation_confidence_control">阈值控制</string>
<string name="operation_retry">重新识别</string>
<string name="operation_save">保存结果</string>
</resources>
<resources>
<!-- Base application theme. -->
<style name="AppTheme" parent="Theme.AppCompat.Light.DarkActionBar">
<!-- Customize your theme here. -->
<item name="colorPrimary">@color/colorPrimary</item>
<item name="colorPrimaryDark">@color/colorPrimaryDark</item>
<item name="colorAccent">@color/colorAccent</item>
<item name="actionOverflowMenuStyle">@style/OverflowMenuStyle</item>
</style>
<style name="OverflowMenuStyle" parent="Widget.AppCompat.Light.PopupMenu.Overflow">
<item name="overlapAnchor">false</item>
</style>
<style name="AppTheme.NoActionBar">
<item name="windowActionBar">false</item>
<item name="windowNoTitle">true</item>
</style>
<style name="AppTheme.AppBarOverlay" parent="ThemeOverlay.AppCompat.Dark.ActionBar"/>
<style name="AppTheme.PopupOverlay" parent="ThemeOverlay.AppCompat.Light"/>
<style name="list_result_view_item_style">
<item name="android:textColor">@color/table_result_item_text_color</item>
<item name="android:layout_height">wrap_content</item>
<item name="android:layout_weight">1</item>
<item name="android:gravity">left</item>
<item name="android:padding">30px</item>
</style>
<style name="list_result_popview_item_style">
<item name="android:textColor">@color/textColor</item>
<item name="android:layout_height">wrap_content</item>
<item name="android:gravity">left</item>
<item name="android:padding">15px</item>
<item name="android:background">@color/result_popview_tablebody_bk</item>
<item name="android:layout_width">wrap_content</item>
<item name="android:alpha">0.5</item>
</style>
<style name="list_result_view_tablehead_style">
<item name="android:textColor">@color/table_result_item_text_color</item>
<item name="android:layout_height">wrap_content</item>
<item name="android:layout_weight">1</item>
<item name="android:gravity">left</item>
<item name="android:padding">15px</item>
</style>
<style name="list_result_popview_tablehead_style">
<item name="android:textColor">@color/textColor</item>
<item name="android:layout_height">wrap_content</item>
<item name="android:layout_weight">1</item>
<item name="android:gravity">left</item>
<item name="android:padding">20px</item>
</style>
<style name="action_btn">
<item name="android:textColor">@color/textColor</item>
<item name="android:background">@color/bk_black</item>
</style>
<style name="action_btn_selected">
<item name="android:textColor">@color/textColorHighlight</item>
<item name="android:background">@color/bk_black</item>
</style>
</resources>
<?xml version="1.0" encoding="utf-8"?>
<resources>
<dimen name="action_btn_size">120dp</dimen>
<dimen name="action_btn_text_size">46px</dimen>
<dimen name="operation_btn_margin_top_take_picture">126px</dimen>
<dimen name="operation_btn_margin_top">136px</dimen>
<dimen name="result_list_view_text_size">46px</dimen>
<dimen name="result_list_popview_text_size">36px</dimen>
<dimen name="result_list_padding_lr">15dp</dimen>
<dimen name="result_list_gap_width">15dp</dimen>
</resources>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<PreferenceScreen xmlns:android="http://schemas.android.com/apk/res/android">
<ListPreference
android:defaultValue="@string/CLASSIFICATION_MODEL_DIR_DEFAULT"
android:key="@string/CHOOSE_PRE_INSTALLED_MODEL_KEY"
android:negativeButtonText="@null"
android:positiveButtonText="@null"
android:title="Choose Pre-Installed Models" />
<EditTextPreference
android:defaultValue="@string/CLASSIFICATION_MODEL_DIR_DEFAULT"
android:key="@string/MODEL_DIR_KEY"
android:title="Model Dir" />
<EditTextPreference
android:defaultValue="@string/CLASSIFICATION_LABEL_PATH_DEFAULT"
android:key="@string/LABEL_PATH_KEY"
android:title="Label Path" />
<ListPreference
android:defaultValue="@string/CPU_THREAD_NUM_DEFAULT"
android:entries="@array/cpu_thread_num_entries"
android:entryValues="@array/cpu_thread_num_values"
android:key="@string/CPU_THREAD_NUM_KEY"
android:negativeButtonText="@null"
android:positiveButtonText="@null"
android:title="CPU Thread Num" />
<ListPreference
android:defaultValue="@string/CPU_POWER_MODE_DEFAULT"
android:entries="@array/cpu_power_mode_entries"
android:entryValues="@array/cpu_power_mode_values"
android:key="@string/CPU_POWER_MODE_KEY"
android:negativeButtonText="@null"
android:positiveButtonText="@null"
android:title="CPU Power Mode" />
<EditTextPreference
android:key="@string/SCORE_THRESHOLD_KEY"
android:defaultValue="@string/SCORE_THRESHOLD_CLASSIFICATION"
android:title="Score Threshold: (0.0, 1.0)" />
<ListPreference
android:defaultValue="@string/ENABLE_LITE_FP16_MODE_DEFAULT"
android:entries="@array/enable_lite_fp16_mode_entries"
android:entryValues="@array/enable_lite_fp16_mode_values"
android:key="@string/ENABLE_LITE_FP16_MODE_KEY"
android:negativeButtonText="@null"
android:positiveButtonText="@null"
android:title="Enable Lite FP16" />
</PreferenceScreen>
\ No newline at end of file
// Top-level build file where you can add configuration options common to all sub-projects/modules.
//plugins {
// id 'com.android.application' version '7.2.2' apply false
// id 'com.android.library' version '7.2.2' apply false
//}
//
//task clean(type: Delete) {
// delete rootProject.buildDir
//}
buildscript {
repositories {
google()
jcenter()
// mavenCentral()
}
dependencies {
classpath 'com.android.tools.build:gradle:7.2.2'
// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files
}
}
allprojects {
repositories {
google()
jcenter()
// mavenCentral()
}
}
task clean(type: Delete) {
delete rootProject.buildDir
}
# Project-wide Gradle settings.
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
org.gradle.jvmargs=-Xmx3096m
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. More details, visit
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true
#Sat Oct 08 17:24:34 CST 2022
distributionBase=GRADLE_USER_HOME
distributionUrl=https\://services.gradle.org/distributions/gradle-7.3.3-bin.zip
distributionPath=wrapper/dists
zipStorePath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
#!/usr/bin/env sh
#
# Copyright 2015 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn () {
echo "$*"
}
die () {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
NONSTOP* )
nonstop=true
;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin or MSYS, switch paths to Windows format before running java
if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=`expr $i + 1`
done
case $i in
0) set -- ;;
1) set -- "$args0" ;;
2) set -- "$args0" "$args1" ;;
3) set -- "$args0" "$args1" "$args2" ;;
4) set -- "$args0" "$args1" "$args2" "$args3" ;;
5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Escape application args
save () {
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
echo " "
}
APP_ARGS=`save "$@"`
# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
exec "$JAVACMD" "$@"
@rem
@rem Copyright 2015 the original author or authors.
@rem
@rem Licensed under the Apache License, Version 2.0 (the "License");
@rem you may not use this file except in compliance with the License.
@rem You may obtain a copy of the License at
@rem
@rem https://www.apache.org/licenses/LICENSE-2.0
@rem
@rem Unless required by applicable law or agreed to in writing, software
@rem distributed under the License is distributed on an "AS IS" BASIS,
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto execute
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto execute
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega
## This file must *NOT* be checked into Version Control Systems,
# as it contains information specific to your local configuration.
#
# Location of the SDK. This is only used by Gradle.
# For customization when using a Version Control System, please read the
# header note.
#Wed Nov 23 11:20:41 CST 2022
sdk.dir=D\:\\androidsdk
../../../docs/zh_CN/fastdeploy/ascend/README.md
\ No newline at end of file
PROJECT(infer_demo C CXX)
CMAKE_MINIMUM_REQUIRED (VERSION 3.10)
# 指定下载解压后的fastdeploy库路径
option(FASTDEPLOY_INSTALL_DIR "Path of downloaded fastdeploy sdk.")
include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake)
# 添加FastDeploy依赖头文件
include_directories(${FASTDEPLOY_INCS})
add_executable(infer_demo ${PROJECT_SOURCE_DIR}/infer.cc)
# 添加FastDeploy库依赖
target_link_libraries(infer_demo ${FASTDEPLOY_LIBS})
../../../../docs/zh_CN/fastdeploy/ascend/cpp/README.md
\ No newline at end of file
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "fastdeploy/vision.h"
#ifdef WIN32
const char sep = '\\';
#else
const char sep = '/';
#endif
void AscendInfer(const std::string &model_dir, const std::string &image_file) {
auto model_file = model_dir + sep + "inference.pdmodel";
auto params_file = model_dir + sep + "inference.pdiparams";
auto config_file = model_dir + sep + "inference_cls.yaml";
auto option = fastdeploy::RuntimeOption();
option.UseAscend();
auto model = fastdeploy::vision::classification::PaddleClasModel(
model_file, params_file, config_file, option);
assert(model.Initialized());
auto im = cv::imread(image_file);
fastdeploy::vision::ClassifyResult res;
if (!model.Predict(&im, &res)) {
std::cerr << "Failed to predict." << std::endl;
return;
}
std::cout << res.Str() << std::endl;
}
int main(int argc, char *argv[]) {
if (argc < 3) {
std::cout << "Usage: infer_demo path/to/model path/to/image " << std::endl;
return -1;
}
std::string model_dir = argv[1];
std::string test_image = argv[2];
AscendInfer(model_dir, test_image);
return 0;
}
../../../../docs/zh_CN/fastdeploy/ascend/python/README.md
\ No newline at end of file
import fastdeploy as fd
import cv2
import os
def parse_arguments():
import argparse
import ast
parser = argparse.ArgumentParser()
parser.add_argument(
"--model", required=True, help="Path of PaddleClas model.")
parser.add_argument(
"--image", type=str, required=True, help="Path of test image file.")
parser.add_argument(
"--topk", type=int, default=1, help="Return topk results.")
return parser.parse_args()
def build_option(args):
option = fd.RuntimeOption()
option.use_ascend()
return option
args = parse_arguments()
# 配置runtime,加载模型
runtime_option = build_option(args)
model_file = os.path.join(args.model, "inference.pdmodel")
params_file = os.path.join(args.model, "inference.pdiparams")
config_file = os.path.join(args.model, "inference_cls.yaml")
model = fd.vision.classification.PaddleClasModel(
model_file, params_file, config_file, runtime_option=runtime_option)
# 预测图片分类结果
im = cv2.imread(args.image)
result = model.predict(im, args.topk)
print(result)
../../../docs/zh_CN/fastdeploy/cpu-gpu/README.md
\ No newline at end of file
PROJECT(infer_demo C)
CMAKE_MINIMUM_REQUIRED (VERSION 3.10)
# 指定下载解压后的fastdeploy库路径
option(FASTDEPLOY_INSTALL_DIR "Path of downloaded fastdeploy sdk.")
include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake)
# 添加FastDeploy依赖头文件
include_directories(${FASTDEPLOY_INCS})
add_executable(infer_demo ${PROJECT_SOURCE_DIR}/infer.c)
target_link_libraries(infer_demo ${FASTDEPLOY_LIBS})
../../../../docs/zh_CN/fastdeploy/cpu-gpu/c/README.md
\ No newline at end of file
// Copyright (c) 2023 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <stdio.h>
#include <stdlib.h>
#include "fastdeploy_capi/vision.h"
#ifdef WIN32
const char sep = '\\';
#else
const char sep = '/';
#endif
void CpuInfer(const char *model_dir, const char *image_file) {
char model_file[100];
char params_file[100];
char config_file[100];
int max_size = 99;
snprintf(model_file, max_size, "%s%c%s", model_dir, sep, "inference.pdmodel");
snprintf(params_file, max_size, "%s%c%s", model_dir, sep,
"inference.pdiparams");
snprintf(config_file, max_size, "%s%c%s", model_dir, sep,
"inference_cls.yaml");
FD_C_RuntimeOptionWrapper *option = FD_C_CreateRuntimeOptionWrapper();
FD_C_RuntimeOptionWrapperUseCpu(option);
FD_C_PaddleClasModelWrapper *model = FD_C_CreatePaddleClasModelWrapper(
model_file, params_file, config_file, option, FD_C_ModelFormat_PADDLE);
if (!FD_C_PaddleClasModelWrapperInitialized(model)) {
printf("Failed to initialize.\n");
FD_C_DestroyRuntimeOptionWrapper(option);
FD_C_DestroyPaddleClasModelWrapper(model);
return;
}
FD_C_Mat im = FD_C_Imread(image_file);
FD_C_ClassifyResult *result =
(FD_C_ClassifyResult *)malloc(sizeof(FD_C_ClassifyResult));
if (!FD_C_PaddleClasModelWrapperPredict(model, im, result)) {
printf("Failed to predict.\n");
FD_C_DestroyRuntimeOptionWrapper(option);
FD_C_DestroyPaddleClasModelWrapper(model);
FD_C_DestroyMat(im);
free(result);
return;
}
// print res
char res[2000];
FD_C_ClassifyResultStr(result, res);
printf("%s", res);
FD_C_DestroyRuntimeOptionWrapper(option);
FD_C_DestroyPaddleClasModelWrapper(model);
FD_C_DestroyClassifyResult(result);
FD_C_DestroyMat(im);
}
void GpuInfer(const char *model_dir, const char *image_file) {
char model_file[100];
char params_file[100];
char config_file[100];
int max_size = 99;
snprintf(model_file, max_size, "%s%c%s", model_dir, sep, "inference.pdmodel");
snprintf(params_file, max_size, "%s%c%s", model_dir, sep,
"inference.pdiparams");
snprintf(config_file, max_size, "%s%c%s", model_dir, sep,
"inference_cls.yaml");
FD_C_RuntimeOptionWrapper *option = FD_C_CreateRuntimeOptionWrapper();
FD_C_RuntimeOptionWrapperUseGpu(option, 0);
FD_C_PaddleClasModelWrapper *model = FD_C_CreatePaddleClasModelWrapper(
model_file, params_file, config_file, option, FD_C_ModelFormat_PADDLE);
if (!FD_C_PaddleClasModelWrapperInitialized(model)) {
printf("Failed to initialize.\n");
FD_C_DestroyRuntimeOptionWrapper(option);
FD_C_DestroyPaddleClasModelWrapper(model);
return;
}
FD_C_Mat im = FD_C_Imread(image_file);
FD_C_ClassifyResult *result =
(FD_C_ClassifyResult *)malloc(sizeof(FD_C_ClassifyResult));
if (!FD_C_PaddleClasModelWrapperPredict(model, im, result)) {
printf("Failed to predict.\n");
FD_C_DestroyRuntimeOptionWrapper(option);
FD_C_DestroyPaddleClasModelWrapper(model);
FD_C_DestroyMat(im);
free(result);
return;
}
// print res
char res[2000];
FD_C_ClassifyResultStr(result, res);
printf("%s", res);
FD_C_DestroyRuntimeOptionWrapper(option);
FD_C_DestroyPaddleClasModelWrapper(model);
FD_C_DestroyClassifyResult(result);
FD_C_DestroyMat(im);
}
int main(int argc, char *argv[]) {
if (argc < 4) {
printf("Usage: infer_demo path/to/model_dir path/to/image run_option, "
"e.g ./infer_model ./ppyoloe_model_dir ./test.jpeg 0"
"\n");
printf(
"The data type of run_option is int, 0: run with cpu; 1: run with gpu"
"\n");
return -1;
}
if (atoi(argv[3]) == 0) {
CpuInfer(argv[1], argv[2]);
} else if (atoi(argv[3]) == 1) {
GpuInfer(argv[1], argv[2]);
}
return 0;
}
PROJECT(infer_demo C CXX)
CMAKE_MINIMUM_REQUIRED (VERSION 3.10)
# 指定下载解压后的fastdeploy库路径
option(FASTDEPLOY_INSTALL_DIR "Path of downloaded fastdeploy sdk.")
include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake)
# 添加FastDeploy依赖头文件
include_directories(${FASTDEPLOY_INCS})
add_executable(infer_demo ${PROJECT_SOURCE_DIR}/infer.cc)
# 添加FastDeploy库依赖
target_link_libraries(infer_demo ${FASTDEPLOY_LIBS})
../../../../docs/zh_CN/fastdeploy/cpu-gpu/cpp/README.md
\ No newline at end of file
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "fastdeploy/vision.h"
#ifdef WIN32
const char sep = '\\';
#else
const char sep = '/';
#endif
void InitAndInfer(const std::string &model_dir, const std::string &image_file,
const fastdeploy::RuntimeOption &option) {
auto model_file = model_dir + sep + "inference.pdmodel";
auto params_file = model_dir + sep + "inference.pdiparams";
auto config_file = model_dir + sep + "inference_cls.yaml";
auto model = fastdeploy::vision::classification::PaddleClasModel(
model_file, params_file, config_file, option);
if (!model.Initialized()) {
std::cerr << "Failed to initialize." << std::endl;
return;
}
auto im = cv::imread(image_file);
fastdeploy::vision::ClassifyResult res;
if (!model.Predict(im, &res)) {
std::cerr << "Failed to predict." << std::endl;
return;
}
// print res
std::cout << res.Str() << std::endl;
}
int main(int argc, char *argv[]) {
if (argc < 4) {
std::cout << "Usage: infer_demo path/to/model path/to/image run_option, "
"e.g ./infer_demo ./ResNet50_vd ./test.jpeg 0"
<< std::endl;
return -1;
}
fastdeploy::RuntimeOption option;
int flag = std::atoi(argv[3]);
if (flag == 0) {
option.UseCpu();
option.UsePaddleBackend(); // Paddle Inference
} else if (flag == 1) {
option.UseCpu();
option.UseOpenVINOBackend(); // OpenVINO
} else if (flag == 2) {
option.UseCpu();
option.UseOrtBackend(); // ONNX Runtime
} else if (flag == 3) {
option.UseCpu();
option.UseLiteBackend(); // Paddle Lite
} else if (flag == 4) {
option.UseGpu();
option.UsePaddleBackend(); // Paddle Inference
} else if (flag == 5) {
option.UseGpu();
option.UsePaddleInferBackend();
option.paddle_infer_option.enable_trt = true;
} else if (flag == 6) {
option.UseGpu();
option.UseOrtBackend(); // ONNX Runtime
} else if (flag == 7) {
option.UseGpu();
option.UseTrtBackend(); // TensorRT
}
std::string model_dir = argv[1];
std::string image_dir = argv[2];
InitAndInfer(model_dir, image_dir, option);
}
rm -rf build
mkdir build
cd build
#/xieyunyao/project/FastDeploy
cmake .. -DFASTDEPLOY_INSTALL_DIR=/xieyunyao/project/fastdeploy-linux-x64-gpu-0.0.0
make -j
PROJECT(infer_demo CSharp)
CMAKE_MINIMUM_REQUIRED (VERSION 3.10)
# Set the C# language version (defaults to 3.0 if not set).
set(CMAKE_CSharp_FLAGS "/langversion:10")
set(CMAKE_DOTNET_TARGET_FRAMEWORK "net6.0")
set(CMAKE_DOTNET_SDK "Microsoft.NET.Sdk")
# 指定下载解压后的fastdeploy库路径
option(FASTDEPLOY_INSTALL_DIR "Path of downloaded fastdeploy sdk.")
include(${FASTDEPLOY_INSTALL_DIR}/FastDeployCSharp.cmake)
add_executable(infer_demo ${PROJECT_SOURCE_DIR}/infer.cs)
set_property(TARGET infer_demo PROPERTY VS_DOTNET_REFERENCES
${FASTDEPLOY_DOTNET_REFERENCES}
)
set_property(TARGET infer_demo
PROPERTY VS_PACKAGE_REFERENCES ${FASTDEPLOY_PACKAGE_REFERENCES})
../../../../docs/zh_CN/fastdeploy/cpu-gpu/csharp/README.md
\ No newline at end of file
// Copyright (c) 2023 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.IO;
using System.Runtime.InteropServices;
using OpenCvSharp;
using fastdeploy;
namespace Test
{
public class TestPaddleClas
{
public static void Main(string[] args)
{
if (args.Length < 3) {
Console.WriteLine(
"Usage: infer_demo path/to/model_dir path/to/image run_option, " +
"e.g ./infer_model ./ppyolo_dirname ./test.jpeg 0"
);
Console.WriteLine( "The data type of run_option is int, 0: run with cpu; 1: run with gpu");
return;
}
string model_dir = args[0];
string image_path = args[1];
string model_file = model_dir + "\\" + "inference.pdmodel";
string params_file = model_dir + "\\" + "inference.pdiparams";
string config_file = model_dir + "\\" + "inference_cls.yaml";
RuntimeOption runtimeoption = new RuntimeOption();
int device_option = Int32.Parse(args[2]);
if(device_option==0){
runtimeoption.UseCpu();
}else{
runtimeoption.UseGpu();
}
fastdeploy.vision.classification.PaddleClasModel model = new fastdeploy.vision.classification.PaddleClasModel(model_file, params_file, config_file, runtimeoption, ModelFormat.PADDLE);
if(!model.Initialized()){
Console.WriteLine("Failed to initialize.\n");
}
Mat image = Cv2.ImRead(image_path);
fastdeploy.vision.ClassifyResult res = model.Predict(image);
Console.WriteLine(res.ToString());
}
}
}
\ No newline at end of file
../../../../docs/zh_CN/fastdeploy/cpu-gpu/python/README.md
\ No newline at end of file
import fastdeploy as fd
import cv2
import os
def parse_arguments():
import argparse
import ast
parser = argparse.ArgumentParser()
parser.add_argument(
"--model", required=True, help="Path of PaddleClas model.")
parser.add_argument(
"--image", type=str, required=True, help="Path of test image file.")
parser.add_argument(
"--topk", type=int, default=1, help="Return topk results.")
parser.add_argument(
"--device",
type=str,
default='cpu',
help="Type of inference device, support 'cpu' or 'gpu' or 'ipu' or 'kunlunxin' or 'ascend' ."
)
parser.add_argument(
"--device_id",
type=int,
default=0,
help="Define which GPU card used to run model.")
parser.add_argument(
"--backend",
type=str,
default="default",
help="Type of inference backend, support ort/trt/paddle/openvino, default 'openvino' for cpu, 'tensorrt' for gpu"
)
return parser.parse_args()
def build_option(args):
option = fd.RuntimeOption()
if args.device.lower() == "gpu":
option.use_gpu(args.device_id)
if args.backend.lower() == "trt":
assert args.device.lower(
) == "gpu", "TensorRT backend require inference on device GPU."
option.use_trt_backend()
elif args.backend.lower() == "pptrt":
assert args.device.lower(
) == "gpu", "Paddle-TensorRT backend require inference on device GPU."
option.use_paddle_infer_backend()
option.paddle_infer_option.enable_trt = True
elif args.backend.lower() == "ort":
option.use_ort_backend()
elif args.backend.lower() == "paddle":
option.use_paddle_infer_backend()
elif args.backend.lower() == "openvino":
assert args.device.lower(
) == "cpu", "OpenVINO backend require inference on device CPU."
option.use_openvino_backend()
elif args.backend.lower() == "pplite":
assert args.device.lower(
) == "cpu", "Paddle Lite backend require inference on device CPU."
option.use_lite_backend()
return option
args = parse_arguments()
# 配置runtime,加载模型
runtime_option = build_option(args)
model_file = os.path.join(args.model, "inference.pdmodel")
params_file = os.path.join(args.model, "inference.pdiparams")
config_file = os.path.join(args.model, "inference_cls.yaml")
model = fd.vision.classification.PaddleClasModel(
model_file, params_file, config_file, runtime_option=runtime_option)
# 预测图片分类结果
im = cv2.imread(args.image)
result = model.predict(im, args.topk)
print(result)
../../../docs/zh_CN/fastdeploy/graphcore/README.md
\ No newline at end of file
PROJECT(infer_demo C CXX)
CMAKE_MINIMUM_REQUIRED (VERSION 3.10)
# 指定下载解压后的fastdeploy库路径
option(FASTDEPLOY_INSTALL_DIR "Path of downloaded fastdeploy sdk.")
include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake)
# 添加FastDeploy依赖头文件
include_directories(${FASTDEPLOY_INCS})
add_executable(infer_demo ${PROJECT_SOURCE_DIR}/infer.cc)
# 添加FastDeploy库依赖
target_link_libraries(infer_demo ${FASTDEPLOY_LIBS})
../../../../docs/zh_CN/fastdeploy/graphcore/cpp/README.md
\ No newline at end of file
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "fastdeploy/vision.h"
#ifdef WIN32
const char sep = '\\';
#else
const char sep = '/';
#endif
void IPUInfer(const std::string &model_dir, const std::string &image_file) {
auto model_file = model_dir + sep + "inference.pdmodel";
auto params_file = model_dir + sep + "inference.pdiparams";
auto config_file = model_dir + sep + "inference_cls.yaml";
auto option = fastdeploy::RuntimeOption();
option.UseIPU();
auto model = fastdeploy::vision::classification::PaddleClasModel(
model_file, params_file, config_file, option);
assert(model.Initialized());
auto im = cv::imread(image_file);
fastdeploy::vision::ClassifyResult res;
if (!model.Predict(&im, &res)) {
std::cerr << "Failed to predict." << std::endl;
return;
}
std::cout << res.Str() << std::endl;
}
int main(int argc, char *argv[]) {
if (argc < 3) {
std::cout << "Usage: infer_demo path/to/model path/to/image " << std::endl;
return -1;
}
std::string model_dir = argv[1];
std::string test_image = argv[2];
IPUInfer(model_dir, test_image);
return 0;
}
../../../../docs/zh_CN/fastdeploy/graphcore/python/README.md
\ No newline at end of file
import fastdeploy as fd
import cv2
import os
def parse_arguments():
import argparse
import ast
parser = argparse.ArgumentParser()
parser.add_argument(
"--model", required=True, help="Path of PaddleClas model.")
parser.add_argument(
"--image", type=str, required=True, help="Path of test image file.")
parser.add_argument(
"--topk", type=int, default=1, help="Return topk results.")
return parser.parse_args()
def build_option(args):
option = fd.RuntimeOption()
option.use_ipu()
return option
args = parse_arguments()
# 配置runtime,加载模型
runtime_option = build_option(args)
model_file = os.path.join(args.model, "inference.pdmodel")
params_file = os.path.join(args.model, "inference.pdiparams")
config_file = os.path.join(args.model, "inference_cls.yaml")
model = fd.vision.classification.PaddleClasModel(
model_file, params_file, config_file, runtime_option=runtime_option)
# 预测图片分类结果
im = cv2.imread(args.image)
result = model.predict(im, args.topk)
print(result)
../../../docs/zh_CN/fastdeploy/kunlun/README.md
\ No newline at end of file
PROJECT(infer_demo C CXX)
CMAKE_MINIMUM_REQUIRED (VERSION 3.10)
# 指定下载解压后的fastdeploy库路径
option(FASTDEPLOY_INSTALL_DIR "Path of downloaded fastdeploy sdk.")
include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake)
# 添加FastDeploy依赖头文件
include_directories(${FASTDEPLOY_INCS})
add_executable(infer_demo ${PROJECT_SOURCE_DIR}/infer.cc)
# 添加FastDeploy库依赖
target_link_libraries(infer_demo ${FASTDEPLOY_LIBS})
../../../../docs/zh_CN/fastdeploy/kunlun/cpp/README.md
\ No newline at end of file
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "fastdeploy/vision.h"
#ifdef WIN32
const char sep = '\\';
#else
const char sep = '/';
#endif
void KunlunInfer(const std::string &model_dir, const std::string &image_file) {
auto model_file = model_dir + sep + "inference.pdmodel";
auto params_file = model_dir + sep + "inference.pdiparams";
auto config_file = model_dir + sep + "inference_cls.yaml";
auto option = fastdeploy::RuntimeOption();
option.UseKunlunXin();
auto model = fastdeploy::vision::classification::PaddleClasModel(
model_file, params_file, config_file, option);
assert(model.Initialized());
auto im = cv::imread(image_file);
fastdeploy::vision::ClassifyResult res;
if (!model.Predict(&im, &res)) {
std::cerr << "Failed to predict." << std::endl;
return;
}
std::cout << res.Str() << std::endl;
}
int main(int argc, char *argv[]) {
if (argc < 3) {
std::cout << "Usage: infer_demo path/to/model path/to/image " << std::endl;
return -1;
}
std::string model_dir = argv[1];
std::string test_image = argv[2];
KunlunInfer(model_dir, test_image);
return 0;
}
../../../../docs/zh_CN/fastdeploy/kunlun/python/README.md
\ No newline at end of file
import fastdeploy as fd
import cv2
import os
def parse_arguments():
import argparse
import ast
parser = argparse.ArgumentParser()
parser.add_argument(
"--model", required=True, help="Path of PaddleClas model.")
parser.add_argument(
"--image", type=str, required=True, help="Path of test image file.")
parser.add_argument(
"--topk", type=int, default=1, help="Return topk results.")
return parser.parse_args()
def build_option(args):
option = fd.RuntimeOption()
option.use_kunlunxin()
return option
args = parse_arguments()
# 配置runtime,加载模型
runtime_option = build_option(args)
model_file = os.path.join(args.model, "inference.pdmodel")
params_file = os.path.join(args.model, "inference.pdiparams")
config_file = os.path.join(args.model, "inference_cls.yaml")
model = fd.vision.classification.PaddleClasModel(
model_file, params_file, config_file, runtime_option=runtime_option)
# 预测图片分类结果
im = cv2.imread(args.image)
result = model.predict(im, args.topk)
print(result)
../../../docs/zh_CN/fastdeploy/quantize/README.md
\ No newline at end of file
../../../../../docs/zh_CN/fastdeploy/rockchip/rknpu2/README.md
\ No newline at end of file
CMAKE_MINIMUM_REQUIRED(VERSION 3.10)
project(rknpu_test)
set(CMAKE_CXX_STANDARD 14)
# 指定下载解压后的fastdeploy库路径
set(FASTDEPLOY_INSTALL_DIR "thirdpartys/fastdeploy-0.0.3")
include(${FASTDEPLOY_INSTALL_DIR}/FastDeployConfig.cmake)
include_directories(${FastDeploy_INCLUDE_DIRS})
add_executable(rknpu_test infer.cc)
target_link_libraries(rknpu_test
${FastDeploy_LIBS}
)
set(CMAKE_INSTALL_PREFIX ${CMAKE_SOURCE_DIR}/build/install)
install(TARGETS rknpu_test DESTINATION ./)
install(DIRECTORY ppclas_model_dir DESTINATION ./)
install(DIRECTORY images DESTINATION ./)
file(GLOB FASTDEPLOY_LIBS ${FASTDEPLOY_INSTALL_DIR}/lib/*)
message("${FASTDEPLOY_LIBS}")
install(PROGRAMS ${FASTDEPLOY_LIBS} DESTINATION lib)
file(GLOB ONNXRUNTIME_LIBS ${FASTDEPLOY_INSTALL_DIR}/third_libs/install/onnxruntime/lib/*)
install(PROGRAMS ${ONNXRUNTIME_LIBS} DESTINATION lib)
install(DIRECTORY ${FASTDEPLOY_INSTALL_DIR}/third_libs/install/opencv/lib DESTINATION ./)
file(GLOB PADDLETOONNX_LIBS ${FASTDEPLOY_INSTALL_DIR}/third_libs/install/paddle2onnx/lib/*)
install(PROGRAMS ${PADDLETOONNX_LIBS} DESTINATION lib)
file(GLOB RKNPU2_LIBS ${FASTDEPLOY_INSTALL_DIR}/third_libs/install/rknpu2_runtime/RK3588/lib/*)
install(PROGRAMS ${RKNPU2_LIBS} DESTINATION lib)
\ No newline at end of file
../../../../../docs/zh_CN/fastdeploy/rockchip/rknpu2/cpp/README.md
\ No newline at end of file
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "fastdeploy/vision.h"
void RKNPU2Infer(const std::string &model_dir, const std::string &image_file) {
auto model_file = model_dir + "/ResNet50_vd_infer_rk3588.rknn";
auto params_file = "";
auto config_file = model_dir + "/inference_cls.yaml";
auto option = fastdeploy::RuntimeOption();
option.UseRKNPU2();
auto format = fastdeploy::ModelFormat::RKNN;
auto model = fastdeploy::vision::classification::PaddleClasModel(
model_file, params_file, config_file, option, format);
if (!model.Initialized()) {
std::cerr << "Failed to initialize." << std::endl;
return;
}
model.GetPreprocessor().DisablePermute();
fastdeploy::TimeCounter tc;
tc.Start();
auto im = cv::imread(image_file);
fastdeploy::vision::ClassifyResult res;
if (!model.Predict(im, &res)) {
std::cerr << "Failed to predict." << std::endl;
return;
}
// print res
std::cout << res.Str() << std::endl;
tc.End();
tc.PrintInfo("PPClas in RKNPU2");
}
int main(int argc, char *argv[]) {
if (argc < 3) {
std::cout
<< "Usage: rknpu_test path/to/model_dir path/to/image run_option, "
"e.g ./rknpu_test ./ppclas_model_dir "
"./images/ILSVRC2012_val_00000010.jpeg"
<< std::endl;
return -1;
}
RKNPU2Infer(argv[1], argv[2]);
return 0;
}
../../../../../docs/zh_CN/fastdeploy/rockchip/rknpu2/python/README.md
\ No newline at end of file
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import fastdeploy as fd
import cv2
import os
def parse_arguments():
import argparse
import ast
parser = argparse.ArgumentParser()
parser.add_argument(
"--model_file", required=True, help="Path of rknn model.")
parser.add_argument("--config_file", required=True, help="Path of config.")
parser.add_argument(
"--image", type=str, required=True, help="Path of test image file.")
return parser.parse_args()
if __name__ == "__main__":
args = parse_arguments()
model_file = args.model_file
params_file = ""
config_file = args.config_file
# 配置runtime,加载模型
runtime_option = fd.RuntimeOption()
runtime_option.use_rknpu2()
model = fd.vision.classification.ResNet50vd(
model_file,
params_file,
config_file,
runtime_option=runtime_option,
model_format=fd.ModelFormat.RKNN)
# 禁用通道转换
model.preprocessor.disable_permute()
im = cv2.imread(args.image)
result = model.predict(im, topk=1)
print(result)
model_path: ./ResNet50_vd_infer/ResNet50_vd_infer.onnx
output_folder: ./ResNet50_vd_infer
mean:
-
- 123.675
- 116.28
- 103.53
std:
-
- 58.395
- 57.12
- 57.375
outputs_nodes:
do_quantization: False
dataset: "./ResNet50_vd_infer/dataset.txt"
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import yaml
import argparse
from rknn.api import RKNN
def get_config():
parser = argparse.ArgumentParser()
parser.add_argument("--verbose", default=True, help="rknntoolkit verbose")
parser.add_argument("--config_path")
parser.add_argument("--target_platform")
args = parser.parse_args()
return args
if __name__ == "__main__":
config = get_config()
with open(config.config_path) as file:
file_data = file.read()
yaml_config = yaml.safe_load(file_data)
print(yaml_config)
model = RKNN(config.verbose)
# Config
mean_values = yaml_config["mean"]
std_values = yaml_config["std"]
model.config(
mean_values=mean_values,
std_values=std_values,
target_platform=config.target_platform)
# Load ONNX model
if yaml_config["outputs_nodes"] is None:
ret = model.load_onnx(model=yaml_config["model_path"])
else:
ret = model.load_onnx(
model=yaml_config["model_path"],
outputs=yaml_config["outputs_nodes"])
assert ret == 0, "Load model failed!"
# Build model
ret = model.build(
do_quantization=yaml_config["do_quantization"],
dataset=yaml_config["dataset"])
assert ret == 0, "Build model failed!"
# Init Runtime
ret = model.init_runtime()
assert ret == 0, "Init runtime environment failed!"
# Export
if not os.path.exists(yaml_config["output_folder"]):
os.mkdir(yaml_config["output_folder"])
name_list = os.path.basename(yaml_config["model_path"]).split(".")
model_base_name = ""
for name in name_list[0:-1]:
model_base_name += name
model_device_name = config.target_platform.lower()
if yaml_config["do_quantization"]:
model_save_name = model_base_name + "_" + model_device_name + "_quantized" + ".rknn"
else:
model_save_name = model_base_name + "_" + model_device_name + "_unquantized" + ".rknn"
ret = model.export_rknn(
os.path.join(yaml_config["output_folder"], model_save_name))
assert ret == 0, "Export rknn model failed!"
print("Export OK!")
../../../../docs/zh_CN/fastdeploy/rockchip/rv1126/README.md
\ No newline at end of file
PROJECT(infer_demo C CXX)
CMAKE_MINIMUM_REQUIRED (VERSION 3.10)
# 指定下载解压后的fastdeploy库路径
option(FASTDEPLOY_INSTALL_DIR "Path of downloaded fastdeploy sdk.")
include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake)
# 添加FastDeploy依赖头文件
include_directories(${FASTDEPLOY_INCS})
include_directories(${FastDeploy_INCLUDE_DIRS})
add_executable(infer_demo ${PROJECT_SOURCE_DIR}/infer.cc)
# 添加FastDeploy库依赖
target_link_libraries(infer_demo ${FASTDEPLOY_LIBS})
set(CMAKE_INSTALL_PREFIX ${CMAKE_SOURCE_DIR}/build/install)
install(TARGETS infer_demo DESTINATION ./)
install(DIRECTORY models DESTINATION ./)
install(DIRECTORY images DESTINATION ./)
file(GLOB_RECURSE FASTDEPLOY_LIBS ${FASTDEPLOY_INSTALL_DIR}/lib/lib*.so*)
file(GLOB_RECURSE ALL_LIBS ${FASTDEPLOY_INSTALL_DIR}/third_libs/install/lib*.so*)
list(APPEND ALL_LIBS ${FASTDEPLOY_LIBS})
install(PROGRAMS ${ALL_LIBS} DESTINATION lib)
file(GLOB ADB_TOOLS run_with_adb.sh)
install(PROGRAMS ${ADB_TOOLS} DESTINATION ./)
../../../../../docs/zh_CN/fastdeploy/rockchip/rv1126/cpp/README.md
\ No newline at end of file
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "fastdeploy/vision.h"
#include <string>
#ifdef WIN32
const char sep = '\\';
#else
const char sep = '/';
#endif
void InitAndInfer(const std::string &model_dir, const std::string &image_file) {
auto model_file = model_dir + sep + "inference.pdmodel";
auto params_file = model_dir + sep + "inference.pdiparams";
auto config_file = model_dir + sep + "inference_cls.yaml";
fastdeploy::vision::EnableFlyCV();
fastdeploy::RuntimeOption option;
option.UseTimVX();
auto model = fastdeploy::vision::classification::PaddleClasModel(
model_file, params_file, config_file, option);
assert(model.Initialized());
auto im = cv::imread(image_file);
fastdeploy::vision::ClassifyResult res;
if (!model.Predict(im, &res)) {
std::cerr << "Failed to predict." << std::endl;
return;
}
std::cout << res.Str() << std::endl;
}
int main(int argc, char *argv[]) {
if (argc < 3) {
std::cout << "Usage: infer_demo path/to/quant_model "
"path/to/image "
"e.g ./infer_demo ./ResNet50_vd_quant ./test.jpeg"
<< std::endl;
return -1;
}
std::string model_dir = argv[1];
std::string test_image = argv[2];
InitAndInfer(model_dir, test_image);
return 0;
}
#!/bin/bash
HOST_SPACE=${PWD}
echo ${HOST_SPACE}
WORK_SPACE=/data/local/tmp/test
# The first parameter represents the demo name
DEMO_NAME=image_classification_demo
if [ -n "$1" ]; then
DEMO_NAME=$1
fi
# The second parameter represents the model name
MODEL_NAME=mobilenet_v1_fp32_224
if [ -n "$2" ]; then
MODEL_NAME=$2
fi
# The third parameter indicates the name of the image to be tested
IMAGE_NAME=0001.jpg
if [ -n "$3" ]; then
IMAGE_NAME=$3
fi
# The fourth parameter represents the ID of the device
ADB_DEVICE_NAME=
if [ -n "$4" ]; then
ADB_DEVICE_NAME="-s $4"
fi
# Set the environment variables required during the running process
EXPORT_ENVIRONMENT_VARIABLES="export GLOG_v=5; export VIV_VX_ENABLE_GRAPH_TRANSFORM=-pcq:1; export VIV_VX_SET_PER_CHANNEL_ENTROPY=100; export TIMVX_BATCHNORM_FUSION_MAX_ALLOWED_QUANT_SCALE_DEVIATION=300000; export VSI_NN_LOG_LEVEL=5;"
EXPORT_ENVIRONMENT_VARIABLES="${EXPORT_ENVIRONMENT_VARIABLES}export LD_LIBRARY_PATH=${WORK_SPACE}/lib:\$LD_LIBRARY_PATH;"
# Please install adb, and DON'T run this in the docker.
set -e
adb $ADB_DEVICE_NAME shell "rm -rf $WORK_SPACE"
adb $ADB_DEVICE_NAME shell "mkdir -p $WORK_SPACE"
# Upload the demo, librarys, model and test images to the device
adb $ADB_DEVICE_NAME push ${HOST_SPACE}/lib $WORK_SPACE
adb $ADB_DEVICE_NAME push ${HOST_SPACE}/${DEMO_NAME} $WORK_SPACE
adb $ADB_DEVICE_NAME push models $WORK_SPACE
adb $ADB_DEVICE_NAME push images $WORK_SPACE
# Execute the deployment demo
adb $ADB_DEVICE_NAME shell "cd $WORK_SPACE; ${EXPORT_ENVIRONMENT_VARIABLES} chmod +x ./${DEMO_NAME}; ./${DEMO_NAME} ./models/${MODEL_NAME} ./images/$IMAGE_NAME"
../../../docs/zh_CN/fastdeploy/serving/README.md
\ No newline at end of file
# PaddleCls Pipeline
The pipeline directory does not have model files, but a version number directory needs to be maintained.
name: "paddlecls"
platform: "ensemble"
max_batch_size: 16
input [
{
name: "INPUT"
data_type: TYPE_UINT8
dims: [ -1, -1, 3 ]
}
]
output [
{
name: "CLAS_RESULT"
data_type: TYPE_STRING
dims: [ -1 ]
}
]
ensemble_scheduling {
step [
{
model_name: "preprocess"
model_version: 1
input_map {
key: "preprocess_input"
value: "INPUT"
}
output_map {
key: "preprocess_output"
value: "RUNTIME_INPUT"
}
},
{
model_name: "runtime"
model_version: 1
input_map {
key: "inputs"
value: "RUNTIME_INPUT"
}
output_map {
key: "save_infer_model/scale_0.tmp_1"
value: "RUNTIME_OUTPUT"
}
},
{
model_name: "postprocess"
model_version: 1
input_map {
key: "post_input"
value: "RUNTIME_OUTPUT"
}
output_map {
key: "post_output"
value: "CLAS_RESULT"
}
}
]
}
\ No newline at end of file
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import numpy as np
import time
import fastdeploy as fd
# triton_python_backend_utils is available in every Triton Python model. You
# need to use this module to create inference requests and responses. It also
# contains some utility functions for extracting information from model_config
# and converting Triton input/output types to numpy types.
import triton_python_backend_utils as pb_utils
class TritonPythonModel:
"""Your Python model must use the same class name. Every Python model
that is created must have "TritonPythonModel" as the class name.
"""
def initialize(self, args):
"""`initialize` is called only once when the model is being loaded.
Implementing `initialize` function is optional. This function allows
the model to intialize any state associated with this model.
Parameters
----------
args : dict
Both keys and values are strings. The dictionary keys and values are:
* model_config: A JSON string containing the model configuration
* model_instance_kind: A string containing model instance kind
* model_instance_device_id: A string containing model instance device ID
* model_repository: Model repository path
* model_version: Model version
* model_name: Model name
"""
# You must parse model_config. JSON string is not parsed here
self.model_config = json.loads(args['model_config'])
print("model_config:", self.model_config)
self.input_names = []
for input_config in self.model_config["input"]:
self.input_names.append(input_config["name"])
print("postprocess input names:", self.input_names)
self.output_names = []
self.output_dtype = []
for output_config in self.model_config["output"]:
self.output_names.append(output_config["name"])
dtype = pb_utils.triton_string_to_numpy(output_config["data_type"])
self.output_dtype.append(dtype)
print("postprocess output names:", self.output_names)
self.postprocess_ = fd.vision.classification.PaddleClasPostprocessor()
def execute(self, requests):
"""`execute` must be implemented in every Python model. `execute`
function receives a list of pb_utils.InferenceRequest as the only
argument. This function is called when an inference is requested
for this model. Depending on the batching configuration (e.g. Dynamic
Batching) used, `requests` may contain multiple requests. Every
Python model, must create one pb_utils.InferenceResponse for every
pb_utils.InferenceRequest in `requests`. If there is an error, you can
set the error argument when creating a pb_utils.InferenceResponse.
Parameters
----------
requests : list
A list of pb_utils.InferenceRequest
Returns
-------
list
A list of pb_utils.InferenceResponse. The length of this list must
be the same as `requests`
"""
responses = []
for request in requests:
infer_outputs = pb_utils.get_input_tensor_by_name(
request, self.input_names[0])
infer_outputs = infer_outputs.as_numpy()
results = self.postprocess_.run([infer_outputs, ])
r_str = fd.vision.utils.fd_result_to_json(results)
r_np = np.array(r_str, dtype=np.object_)
out_tensor = pb_utils.Tensor(self.output_names[0], r_np)
inference_response = pb_utils.InferenceResponse(
output_tensors=[out_tensor, ])
responses.append(inference_response)
return responses
def finalize(self):
"""`finalize` is called only once when the model is being unloaded.
Implementing `finalize` function is optional. This function allows
the model to perform any necessary clean ups before exit.
"""
print('Cleaning up...')
name: "postprocess"
backend: "python"
max_batch_size: 16
input [
{
name: "post_input"
data_type: TYPE_FP32
dims: [ 1000 ]
}
]
output [
{
name: "post_output"
data_type: TYPE_STRING
dims: [ -1 ]
}
]
instance_group [
{
count: 1
kind: KIND_CPU
}
]
\ No newline at end of file
Global:
infer_imgs: "./images/ImageNet/ILSVRC2012_val_00000010.jpeg"
inference_model_dir: "./models"
batch_size: 1
use_gpu: True
enable_mkldnn: True
cpu_num_threads: 10
enable_benchmark: True
use_fp16: False
ir_optim: True
use_tensorrt: False
gpu_mem: 8000
enable_profile: False
PreProcess:
transform_ops:
- ResizeImage:
resize_short: 256
- CropImage:
size: 224
- NormalizeImage:
scale: 0.00392157
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
channel_num: 3
- ToCHWImage:
PostProcess:
main_indicator: Topk
Topk:
topk: 5
class_id_map_file: "../ppcls/utils/imagenet1k_label_list.txt"
SavePreLabel:
save_dir: ./pre_label/
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import numpy as np
import os
import fastdeploy as fd
# triton_python_backend_utils is available in every Triton Python model. You
# need to use this module to create inference requests and responses. It also
# contains some utility functions for extracting information from model_config
# and converting Triton input/output types to numpy types.
import triton_python_backend_utils as pb_utils
class TritonPythonModel:
"""Your Python model must use the same class name. Every Python model
that is created must have "TritonPythonModel" as the class name.
"""
def initialize(self, args):
"""`initialize` is called only once when the model is being loaded.
Implementing `initialize` function is optional. This function allows
the model to intialize any state associated with this model.
Parameters
----------
args : dict
Both keys and values are strings. The dictionary keys and values are:
* model_config: A JSON string containing the model configuration
* model_instance_kind: A string containing model instance kind
* model_instance_device_id: A string containing model instance device ID
* model_repository: Model repository path
* model_version: Model version
* model_name: Model name
"""
# You must parse model_config. JSON string is not parsed here
self.model_config = json.loads(args['model_config'])
print("model_config:", self.model_config)
self.input_names = []
for input_config in self.model_config["input"]:
self.input_names.append(input_config["name"])
print("preprocess input names:", self.input_names)
self.output_names = []
self.output_dtype = []
for output_config in self.model_config["output"]:
self.output_names.append(output_config["name"])
# dtype = pb_utils.triton_string_to_numpy(output_config["data_type"])
# self.output_dtype.append(dtype)
self.output_dtype.append(output_config["data_type"])
print("preprocess output names:", self.output_names)
# init PaddleClasPreprocess class
yaml_path = os.path.abspath(os.path.dirname(
__file__)) + "/inference_cls.yaml"
self.preprocess_ = fd.vision.classification.PaddleClasPreprocessor(
yaml_path)
if args['model_instance_kind'] == 'GPU':
device_id = int(args['model_instance_device_id'])
self.preprocess_.use_cuda(False, device_id)
def execute(self, requests):
"""`execute` must be implemented in every Python model. `execute`
function receives a list of pb_utils.InferenceRequest as the only
argument. This function is called when an inference is requested
for this model. Depending on the batching configuration (e.g. Dynamic
Batching) used, `requests` may contain multiple requests. Every
Python model, must create one pb_utils.InferenceResponse for every
pb_utils.InferenceRequest in `requests`. If there is an error, you can
set the error argument when creating a pb_utils.InferenceResponse.
Parameters
----------
requests : list
A list of pb_utils.InferenceRequest
Returns
-------
list
A list of pb_utils.InferenceResponse. The length of this list must
be the same as `requests`
"""
responses = []
for request in requests:
data = pb_utils.get_input_tensor_by_name(request,
self.input_names[0])
data = data.as_numpy()
outputs = self.preprocess_.run(data)
# PaddleCls preprocess has only one output
dlpack_tensor = outputs[0].to_dlpack()
output_tensor = pb_utils.Tensor.from_dlpack(self.output_names[0],
dlpack_tensor)
inference_response = pb_utils.InferenceResponse(
output_tensors=[output_tensor, ])
responses.append(inference_response)
return responses
def finalize(self):
"""`finalize` is called only once when the model is being unloaded.
Implementing `finalize` function is optional. This function allows
the model to perform any necessary clean ups before exit.
"""
print('Cleaning up...')
name: "preprocess"
backend: "python"
max_batch_size: 16
input [
{
name: "preprocess_input"
data_type: TYPE_UINT8
dims: [ -1, -1, 3 ]
}
]
output [
{
name: "preprocess_output"
data_type: TYPE_FP32
dims: [ 3, 224, 224 ]
}
]
instance_group [
{
# The number of instances is 1
count: 1
# Use CPU, GPU inference option is:KIND_GPU
kind: KIND_CPU
# The instance is deployed on the 0th GPU card
# gpus: [0]
}
]
\ No newline at end of file
# Runtime Directory
This directory holds the model files.
Paddle models must be model.pdmodel and model.pdiparams files.
ONNX models must be model.onnx files.
# optional, If name is specified it must match the name of the model repository directory containing the model.
name: "runtime"
backend: "fastdeploy"
max_batch_size: 16
# Input configuration of the model
input [
{
# input name
name: "inputs"
# input type such as TYPE_FP32、TYPE_UINT8、TYPE_INT8、TYPE_INT16、TYPE_INT32、TYPE_INT64、TYPE_FP16、TYPE_STRING
data_type: TYPE_FP32
# input shape, The batch dimension is omitted and the actual shape is [batch, c, h, w]
dims: [ 3, 224, 224 ]
}
]
# The output of the model is configured in the same format as the input
output [
{
name: "save_infer_model/scale_0.tmp_1"
data_type: TYPE_FP32
dims: [ 1000 ]
}
]
# Number of instances of the model
instance_group [
{
# The number of instances is 1
count: 1
# Use GPU, CPU inference option is:KIND_CPU
kind: KIND_GPU
# The instance is deployed on the 0th GPU card
gpus: [0]
}
]
optimization {
execution_accelerators {
gpu_execution_accelerator : [ {
# use TRT engine
name: "tensorrt",
# use fp16 on TRT engine
parameters { key: "precision" value: "trt_fp16" }
},
{
name: "min_shape"
parameters { key: "inputs" value: "1 3 224 224" }
},
{
name: "opt_shape"
parameters { key: "inputs" value: "1 3 224 224" }
},
{
name: "max_shape"
parameters { key: "inputs" value: "16 3 224 224" }
}
]
}}
import logging
import numpy as np
import time
from typing import Optional
import cv2
import json
from tritonclient import utils as client_utils
from tritonclient.grpc import InferenceServerClient, InferInput, InferRequestedOutput, service_pb2_grpc, service_pb2
LOGGER = logging.getLogger("run_inference_on_triton")
class SyncGRPCTritonRunner:
DEFAULT_MAX_RESP_WAIT_S = 120
def __init__(
self,
server_url: str,
model_name: str,
model_version: str,
*,
verbose=False,
resp_wait_s: Optional[float]=None, ):
self._server_url = server_url
self._model_name = model_name
self._model_version = model_version
self._verbose = verbose
self._response_wait_t = self.DEFAULT_MAX_RESP_WAIT_S if resp_wait_s is None else resp_wait_s
self._client = InferenceServerClient(
self._server_url, verbose=self._verbose)
error = self._verify_triton_state(self._client)
if error:
raise RuntimeError(
f"Could not communicate to Triton Server: {error}")
LOGGER.debug(
f"Triton server {self._server_url} and model {self._model_name}:{self._model_version} "
f"are up and ready!")
model_config = self._client.get_model_config(self._model_name,
self._model_version)
model_metadata = self._client.get_model_metadata(self._model_name,
self._model_version)
LOGGER.info(f"Model config {model_config}")
LOGGER.info(f"Model metadata {model_metadata}")
for tm in model_metadata.inputs:
print("tm:", tm)
self._inputs = {tm.name: tm for tm in model_metadata.inputs}
self._input_names = list(self._inputs)
self._outputs = {tm.name: tm for tm in model_metadata.outputs}
self._output_names = list(self._outputs)
self._outputs_req = [
InferRequestedOutput(name) for name in self._outputs
]
def Run(self, inputs):
"""
Args:
inputs: list, Each value corresponds to an input name of self._input_names
Returns:
results: dict, {name : numpy.array}
"""
infer_inputs = []
for idx, data in enumerate(inputs):
infer_input = InferInput(self._input_names[idx], data.shape,
"UINT8")
infer_input.set_data_from_numpy(data)
infer_inputs.append(infer_input)
results = self._client.infer(
model_name=self._model_name,
model_version=self._model_version,
inputs=infer_inputs,
outputs=self._outputs_req,
client_timeout=self._response_wait_t, )
results = {name: results.as_numpy(name) for name in self._output_names}
return results
def _verify_triton_state(self, triton_client):
if not triton_client.is_server_live():
return f"Triton server {self._server_url} is not live"
elif not triton_client.is_server_ready():
return f"Triton server {self._server_url} is not ready"
elif not triton_client.is_model_ready(self._model_name,
self._model_version):
return f"Model {self._model_name}:{self._model_version} is not ready"
return None
if __name__ == "__main__":
model_name = "paddlecls"
model_version = "1"
url = "localhost:8001"
runner = SyncGRPCTritonRunner(url, model_name, model_version)
im = cv2.imread("ILSVRC2012_val_00000010.jpeg")
im = np.array([im, ])
# batch input
# im = np.array([im, im, im])
for i in range(1):
result = runner.Run([im, ])
for name, values in result.items():
print("output_name:", name)
# values is batch
for value in values:
value = json.loads(value)
print(value)
../../../docs/zh_CN/fastdeploy/sophgo/README.md
\ No newline at end of file
PROJECT(infer_demo C CXX)
CMAKE_MINIMUM_REQUIRED (VERSION 3.10)
# 指定下载解压后的fastdeploy库路径
option(FASTDEPLOY_INSTALL_DIR "Path of downloaded fastdeploy sdk.")
set(ENABLE_LITE_BACKEND OFF)
#set(FDLIB ${FASTDEPLOY_INSTALL_DIR})
include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake)
# 添加FastDeploy依赖头文件
include_directories(${FASTDEPLOY_INCS})
include_directories(${FastDeploy_INCLUDE_DIRS})
add_executable(infer_demo ${PROJECT_SOURCE_DIR}/infer.cc)
# 添加FastDeploy库依赖
target_link_libraries(infer_demo ${FASTDEPLOY_LIBS})
../../../../docs/zh_CN/fastdeploy/sophgo/cpp/README.md
\ No newline at end of file
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "fastdeploy/vision.h"
#include <string>
#ifdef WIN32
const char sep = '\\';
#else
const char sep = '/';
#endif
void InitAndInfer(const std::string &model_dir, const std::string &image_file) {
auto model_file = model_dir + sep + "resnet50_1684x_f32.bmodel";
auto params_file = model_dir + sep + "";
auto config_file = model_dir + sep + "preprocess_config.yaml";
fastdeploy::RuntimeOption option;
option.UseSophgo();
auto model_format = fastdeploy::ModelFormat::SOPHGO;
auto model = fastdeploy::vision::classification::PaddleClasModel(
model_file, params_file, config_file, option, model_format);
assert(model.Initialized());
auto im = cv::imread(image_file);
fastdeploy::vision::ClassifyResult res;
if (!model.Predict(im, &res)) {
std::cerr << "Failed to predict." << std::endl;
return;
}
std::cout << res.Str() << std::endl;
}
int main(int argc, char *argv[]) {
if (argc < 3) {
std::cout << "Usage: infer_demo path/to/model "
"path/to/image "
"run_option, "
"e.g ./infer_demo ./bmodel ./test.jpeg"
<< std::endl;
return -1;
}
std::string model_dir = argv[1];
std::string test_image = argv[2];
InitAndInfer(model_dir, test_image);
return 0;
}
../../../../docs/zh_CN/fastdeploy/sophgo/python/README.md
\ No newline at end of file
import fastdeploy as fd
import cv2
import os
def parse_arguments():
import argparse
import ast
parser = argparse.ArgumentParser()
parser.add_argument("--model", required=True, help="Path of model.")
parser.add_argument(
"--config_file", required=True, help="Path of config file.")
parser.add_argument(
"--image", type=str, required=True, help="Path of test image file.")
parser.add_argument(
"--topk", type=int, default=1, help="Return topk results.")
return parser.parse_args()
args = parse_arguments()
# 配置runtime,加载模型
runtime_option = fd.RuntimeOption()
runtime_option.use_sophgo()
model_file = args.model
params_file = ""
config_file = args.config_file
model = fd.vision.classification.PaddleClasModel(
model_file,
params_file,
config_file,
runtime_option=runtime_option,
model_format=fd.ModelFormat.SOPHGO)
# 预测图片分类结果
im = cv2.imread(args.image)
result = model.predict(im, args.topk)
print(result)
../../../docs/zh_CN/fastdeploy/web/README.md
\ No newline at end of file
# PaddleClas高性能全场景模型部署方案—FastDeploy
## 目录
- [FastDeploy介绍](#FastDeploy介绍)
- [图像分类模型部署](#图像分类模型部署)
- [常见问题](#常见问题)
## 1. FastDeploy介绍
<div id="FastDeploy介绍"></div>
**[⚡️FastDeploy](https://github.com/PaddlePaddle/FastDeploy)**是一款**全场景****易用灵活****极致高效**的AI推理部署工具,支持**云边端**部署。使用FastDeploy可以简单高效的在X86 CPU、NVIDIA GPU、飞腾CPU、ARM CPU、Intel GPU、Graphcore IPU、昆仑、昇腾、瑞芯微、晶晨、算能等10+款硬件上对PaddleClas模型进行快速部署,并且支持Paddle Inference、Paddle Lite、TensorRT、OpenVINO、ONNXRuntime、RKNPU2、SOPHGO等多种推理后端。
<div align="center">
<img src="https://user-images.githubusercontent.com/31974251/224941235-d5ea4ed0-7626-4c62-8bbd-8e4fad1e72ad.png" >
</div>
## 2. 图像分类模型部署
<div id="图像分类模型部署"></div>
### 2.1 硬件支持列表
|硬件类型|该硬件是否支持|使用指南|Python|C++|
|:---:|:---:|:---:|:---:|:---:|
|X86 CPU|✅|[链接](./cpu-gpu)|✅|✅|
|NVIDIA GPU|✅|[链接](./cpu-gpu)|✅|✅|
|飞腾CPU|✅|[链接](./cpu-gpu)|✅|✅|
|ARM CPU|✅|[链接](./cpu-gpu)|✅|✅|
|Intel GPU(集成显卡)|✅|[链接](./cpu-gpu)|✅|✅|
|Intel GPU(独立显卡)|✅|[链接](./cpu-gpu)|✅|✅|
|Graphcore IPU|✅|[链接](./graphcore)|✅|✅|
|昆仑|✅|[链接](./kunlun)|✅|✅|
|昇腾|✅|[链接](./ascend)|✅|✅|
|瑞芯微|✅|[链接](./rockchip)|✅|✅|
|晶晨|✅|[链接](./amlogic)|--|✅|✅|
|算能|✅|[链接](./sophgo)|✅|✅|
### 2.2. 详细使用文档
- X86 CPU
- [部署模型准备](./cpu-gpu)
- [Python部署示例](./cpu-gpu/python/)
- [C++部署示例](./cpu-gpu/cpp/)
- NVIDIA GPU
- [部署模型准备](./cpu-gpu)
- [Python部署示例](./cpu-gpu/python/)
- [C++部署示例](./cpu-gpu/cpp/)
- 飞腾CPU
- [部署模型准备](./cpu-gpu)
- [Python部署示例](./cpu-gpu/python/)
- [C++部署示例](./cpu-gpu/cpp/)
- ARM CPU
- [部署模型准备](./cpu-gpu)
- [Python部署示例](./cpu-gpu/python/)
- [C++部署示例](./cpu-gpu/cpp/)
- Intel GPU
- [部署模型准备](./cpu-gpu)
- [Python部署示例](./cpu-gpu/python/)
- [C++部署示例](./cpu-gpu/cpp/)
- Graphcore IPU
- [部署模型准备](./graphcore)
- [Python部署示例](./graphcore/python/)
- [C++部署示例](./graphcore/cpp/)
- 昆仑 XPU
- [部署模型准备](./kunlun)
- [Python部署示例](./kunlun/python/)
- [C++部署示例](./kunlun/cpp/)
- 昇腾 Ascend
- [部署模型准备](./ascend)
- [Python部署示例](./ascend/python/)
- [C++部署示例](./ascend/cpp/)
- 瑞芯微 Rockchip
- [部署模型准备](./rockchip/)
- [Python部署示例](./rockchip/rknpu2/)
- [C++部署示例](./rockchip/rknpu2/)
- 晶晨 Amlogic
- [部署模型准备](./amlogic/a311d/)
- [C++部署示例](./amlogic/a311d/cpp/)
- 算能 Sophgo
- [部署模型准备](./sophgo/)
- [Python部署示例](./sophgo/python/)
- [C++部署示例](./sophgo/cpp/)
### 2.3 更多部署方式
- [Android ARM CPU部署](./android)
- [服务化Serving部署](./serving)
- [web部署](./web)
- [模型自动化压缩工具](./quantize)
## 4. 常见问题
<div id="常见问题"></div>
遇到问题可查看常见问题集合,搜索FastDeploy issue,*或给FastDeploy提交[issue](https://github.com/PaddlePaddle/FastDeploy/issues)*:
[常见问题集合](https://github.com/PaddlePaddle/FastDeploy/tree/develop/docs/cn/faq)
[FastDeploy issues](https://github.com/PaddlePaddle/FastDeploy/issues)
# PaddleClas 模型在晶晨NPU上的部署方案-FastDeploy
## 1. 说明
晶晨A311D是一款先进的AI应用处理器。PaddleClas支持通过FastDeploy在A311D上基于Paddle-Lite部署相关PaddleClas模型。**注意**:需要注意的是,芯原(verisilicon)作为 IP 设计厂商,本身并不提供实体SoC产品,而是授权其 IP 给芯片厂商,如:晶晨(Amlogic),瑞芯微(Rockchip)等。因此本文是适用于被芯原授权了 NPU IP 的芯片产品。只要芯片产品没有大副修改芯原的底层库,则该芯片就可以使用本文档作为 Paddle Lite 推理部署的参考和教程。在本文中,晶晨 SoC 中的 NPU 和 瑞芯微 SoC 中的 NPU 统称为芯原 NPU。目前支持如下芯片的部署:
- Amlogic A311D
- Amlogic C308X
- Amlogic S905D3
本示例基于晶晨A311D来介绍如何使用FastDeploy部署PaddleClas的量化模型。
## 2. 使用预导出的模型列表
FastDeploy提供预先量化好的模型进行部署. 更多模型, 欢迎用户参考[FastDeploy 一键模型自动化压缩工具](https://github.com/PaddlePaddle/FastDeploy/tree/develop/tools/common_tools/auto_compression) 来实现模型量化, 并完成部署.
| 模型 | 量化方式 |
|:---------------| :----- |
| [ResNet50_vd](https://bj.bcebos.com/paddlehub/fastdeploy/resnet50_vd_ptq.tar) | 离线量化 |
| [MobileNetV1_ssld](https://bj.bcebos.com/paddlehub/fastdeploy/mobilenetv1_ssld_ptq.tar) | 离线量化 |
## 3. 详细部署示例
目前,A311D上只支持C++的部署。
- [C++部署](cpp)
# PaddleClas A311D 开发板 C++ 部署示例
本目录下提供的 `infer.cc`,可以帮助用户快速完成 PaddleClas 量化模型在 A311D 上的部署推理加速。
## 1. 部署环境准备
在部署前,需确认以下两个步骤
- 1. 在部署前,需自行编译基于A311D的预测库,参考文档[A311D部署环境编译](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install#自行编译安装)
## 2. 量化模型准备
1. 需要特别注意的是,在 A311D 上部署的模型需要是量化后的模型. 用户可以直接使用由 FastDeploy 提供的量化模型进行部署。
2. 用户也可以使用 FastDeploy 提供的[一键模型自动化压缩工具](https://github.com/PaddlePaddle/FastDeploy/tree/develop/tools/common_tools/auto_compression/),自行进行模型量化, 并使用产出的量化模型进行部署。(注意: 推理量化后的分类模型仍然需要FP32模型文件夹下的inference_cls.yaml文件, 自行量化的模型文件夹内不包含此 yaml 文件, 用户从 FP32 模型文件夹下复制此 yaml 文件到量化后的模型文件夹内即可.)
## 3. 在 A311D 上部署量化后的 ResNet50_Vd 分类模型
请按照以下步骤完成在 A311D 上部署 ResNet50_Vd 量化模型:
1. 交叉编译编译 FastDeploy 库,具体请参考:[交叉编译 FastDeploy](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install/a311d.md)
2. 将编译后的库拷贝到当前目录,可使用如下命令:
```bash
git clone https://github.com/PaddlePaddle/PaddleClas.git
cd PaddleClas/deploy/fastdeploy/amlogic/a311d/cpp/
cp -r FastDeploy/build/fastdeploy-timvx/ ./
```
3. 在当前路径下载部署所需的模型和示例图片:
```bash
mkdir models && mkdir images
wget https://bj.bcebos.com/paddlehub/fastdeploy/resnet50_vd_ptq.tar
tar -xvf resnet50_vd_ptq.tar
cp -r resnet50_vd_ptq models
wget https://gitee.com/paddlepaddle/PaddleClas/raw/release/2.4/deploy/images/ImageNet/ILSVRC2012_val_00000010.jpeg
cp -r ILSVRC2012_val_00000010.jpeg images
```
4. 编译部署示例,可使入如下命令:
```bash
mkdir build && cd build
cmake -DCMAKE_TOOLCHAIN_FILE=${PWD}/../fastdeploy-timvx/toolchain.cmake -DFASTDEPLOY_INSTALL_DIR=${PWD}/../fastdeploy-timvx -DTARGET_ABI=arm64 ..
make -j8
make install
# 成功编译之后,会生成 install 文件夹,里面有一个运行 demo 和部署所需的库
```
5. 基于 adb 工具部署 ResNet50 分类模型到晶晨 A311D,可使用如下命令:
```bash
# 进入 install 目录
cd build/install/
# 如下命令表示:bash run_with_adb.sh 需要运行的demo 模型路径 图片路径 设备的DEVICE_ID
bash run_with_adb.sh infer_demo resnet50_vd_ptq ILSVRC2012_val_00000010.jpeg $DEVICE_ID
```
部署成功后运行结果如下:
<img width="640" src="https://user-images.githubusercontent.com/30516196/200767389-26519e50-9e4f-4fe1-8d52-260718f73476.png">
# PaddleClas Android Demo 使用文档
在 Android 上实现实时的PaddleClas图像分类功能,此 Demo 有很好的的易用性和开放性,如在 Demo 中跑自己训练好的模型等。
## 环境准备
1. 在本地环境安装好 Android Studio 工具,详细安装方法请见[Android Stuido 官网](https://developer.android.com/studio)
2. 准备一部 Android 手机,并开启 USB 调试模式。开启方法: `手机设置 -> 查找开发者选项 -> 打开开发者选项和 USB 调试模式`
## 部署步骤
1. 用 Android Studio 打开 paddleclas/android 工程
2. 手机连接电脑,打开 USB 调试和文件传输模式,并在 Android Studio 上连接自己的手机设备(手机需要开启允许从 USB 安装软件权限)
<p align="center">
<img width="1280" alt="image" src="https://user-images.githubusercontent.com/31974251/197338597-2c9e1cf0-569b-49b9-a7fb-cdec71921af8.png">
</p>
> **注意:**
>> 如果您在导入项目、编译或者运行过程中遇到 NDK 配置错误的提示,请打开 ` File > Project Structure > SDK Location`,修改 `Andriod SDK location` 为您本机配置的 SDK 所在路径。
4. 点击 Run 按钮,自动编译 APP 并安装到手机。(该过程会自动下载预编译的 FastDeploy Android 库,需要联网)
成功后效果如下,图一:APP 安装到手机;图二: APP 打开后的效果,会自动识别图片中的物体并标记;图三:APP设置选项,点击右上角的设置图片,可以设置不同选项进行体验。
| APP 图标 | APP 效果 | APP设置项
| --- | --- | --- |
| ![app_pic ](https://user-images.githubusercontent.com/14995488/203484427-83de2316-fd60-4baf-93b6-3755f9b5559d.jpg) | ![app_res](https://user-images.githubusercontent.com/14995488/203494666-16528cb3-0ce2-48fc-9f9e-37da17b2c2f6.jpg) | ![app_setup](https://user-images.githubusercontent.com/14995488/203484436-57fdd041-7dcc-4e0e-b6cb-43e5ac1e729b.jpg) |
## PaddleClasModel Java API 说明
- 模型初始化 API: 模型初始化API包含两种方式,方式一是通过构造函数直接初始化;方式二是,通过调用init函数,在合适的程序节点进行初始化。PaddleClasModel初始化参数说明如下:
- modelFile: String, paddle格式的模型文件路径,如 model.pdmodel
- paramFile: String, paddle格式的参数文件路径,如 model.pdiparams
- configFile: String, 模型推理的预处理配置文件,如 infer_cfg.yml
- labelFile: String, 可选参数,表示label标签文件所在路径,用于可视化,如 imagenet1k_label_list.txt,每一行包含一个label
- option: RuntimeOption,可选参数,模型初始化option。如果不传入该参数则会使用默认的运行时选项。
```java
// 构造函数: constructor w/o label file
public PaddleClasModel(); // 空构造函数,之后可以调用init初始化
public PaddleClasModel(String modelFile, String paramsFile, String configFile);
public PaddleClasModel(String modelFile, String paramsFile, String configFile, String labelFile);
public PaddleClasModel(String modelFile, String paramsFile, String configFile, RuntimeOption option);
public PaddleClasModel(String modelFile, String paramsFile, String configFile, String labelFile, RuntimeOption option);
// 手动调用init初始化: call init manually w/o label file
public boolean init(String modelFile, String paramsFile, String configFile, RuntimeOption option);
public boolean init(String modelFile, String paramsFile, String configFile, String labelFile, RuntimeOption option);
```
- 模型预测 API:模型预测API包含直接预测的API以及带可视化功能的API。直接预测是指,不保存图片以及不渲染结果到Bitmap上,仅预测推理结果。预测并且可视化是指,预测结果以及可视化,并将可视化后的图片保存到指定的途径,以及将可视化结果渲染在Bitmap(目前支持ARGB8888格式的Bitmap), 后续可将该Bitmap在camera中进行显示。
```java
// 直接预测:不保存图片以及不渲染结果到Bitmap上
public ClassifyResult predict(Bitmap ARGB8888Bitmap)
// 预测并且可视化:预测结果以及可视化,并将可视化后的图片保存到指定的途径,以及将可视化结果渲染在Bitmap上
public ClassifyResult predict(Bitmap ARGB8888Bitmap, String savedImagePath, float scoreThreshold)
```
- 模型资源释放 API:调用 release() API 可以释放模型资源,返回true表示释放成功,false表示失败;调用 initialized() 可以判断模型是否初始化成功,true表示初始化成功,false表示失败。
```java
public boolean release(); // 释放native资源
public boolean initialized(); // 检查是否初始化成功
```
- RuntimeOption设置说明
```java
public void enableLiteFp16(); // 开启fp16精度推理
public void disableLiteFP16(); // 关闭fp16精度推理
public void setCpuThreadNum(int threadNum); // 设置线程数
public void setLitePowerMode(LitePowerMode mode); // 设置能耗模式
public void setLitePowerMode(String modeStr); // 通过字符串形式设置能耗模式
public void enableRecordTimeOfRuntime(); // 是否打印模型运行耗时
```
- 模型结果ClassifyResult说明
```java
public float[] mScores; // [n] 得分
public int[] mLabelIds; // [n] 分类ID
public boolean initialized(); // 检测结果是否有效
```
- 模型调用示例1:使用构造函数以及默认的RuntimeOption
```java
import java.nio.ByteBuffer;
import android.graphics.Bitmap;
import android.opengl.GLES20;
import com.baidu.paddle.fastdeploy.vision.ClassifyResult;
import com.baidu.paddle.fastdeploy.vision.classification.PaddleClasModel;
// 初始化模型
PaddleClasModel model = new PaddleClasModel("MobileNetV1_x0_25_infer/inference.pdmodel",
"MobileNetV1_x0_25_infer/inference.pdiparams",
"MobileNetV1_x0_25_infer/inference_cls.yml");
// 读取图片: 以下仅为读取Bitmap的伪代码
ByteBuffer pixelBuffer = ByteBuffer.allocate(width * height * 4);
GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixelBuffer);
Bitmap ARGB8888ImageBitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
ARGB8888ImageBitmap.copyPixelsFromBuffer(pixelBuffer);
// 模型推理
ClassifyResult result = model.predict(ARGB8888ImageBitmap);
// 释放模型资源
model.release();
```
- 模型调用示例2: 在合适的程序节点,手动调用init,并自定义RuntimeOption
```java
// import 同上 ...
import com.baidu.paddle.fastdeploy.RuntimeOption;
import com.baidu.paddle.fastdeploy.LitePowerMode;
import com.baidu.paddle.fastdeploy.vision.ClassifyResult;
import com.baidu.paddle.fastdeploy.vision.classification.PaddleClasModel;
// 新建空模型
PaddleClasModel model = new PaddleClasModel();
// 模型路径
String modelFile = "MobileNetV1_x0_25_infer/inference.pdmodel";
String paramFile = "MobileNetV1_x0_25_infer/inference.pdiparams";
String configFile = "MobileNetV1_x0_25_infer/inference_cls.yml";
// 指定RuntimeOption
RuntimeOption option = new RuntimeOption();
option.setCpuThreadNum(2);
option.setLitePowerMode(LitePowerMode.LITE_POWER_HIGH);
option.enableRecordTimeOfRuntime();
option.enableLiteFp16();
// 使用init函数初始化
model.init(modelFile, paramFile, configFile, option);
// Bitmap读取、模型预测、资源释放 同上 ...
```
更详细的用法请参考 [MainActivity](./app/src/main/java/com/baidu/paddle/fastdeploy/app/examples/classification/ClassificationMainActivity.java) 中的用法
## 替换 FastDeploy 预测库和模型
替换FastDeploy预测库和模型的步骤非常简单。预测库所在的位置为 `app/libs/fastdeploy-android-xxx-shared`,其中 `xxx` 表示当前您使用的预测库版本号。模型所在的位置为,`app/src/main/assets/models/MobileNetV1_x0_25_infer`
- 替换FastDeploy预测库的步骤:
- 下载或编译最新的FastDeploy Android预测库,解压缩后放在 `app/libs` 目录下;
- 修改 `app/src/main/cpp/CMakeLists.txt` 中的预测库路径,指向您下载或编译的预测库路径。如:
```cmake
set(FastDeploy_DIR "${CMAKE_CURRENT_SOURCE_DIR}/../../../libs/fastdeploy-android-xxx-shared")
```
- 替换PaddleClas模型的步骤:
- 将您的PaddleClas分类模型放在 `app/src/main/assets/models` 目录下;
- 修改 `app/src/main/res/values/strings.xml` 中模型路径的默认值,如:
```xml
<!-- 将这个路径指修改成您的模型,如 models/MobileNetV2_x0_25_infer -->
<string name="CLASSIFICATION_MODEL_DIR_DEFAULT">models/MobileNetV1_x0_25_infer</string>
<string name="CLASSIFICATION_LABEL_PATH_DEFAULT">labels/imagenet1k_label_list.txt</string>
```
## 更多参考文档
如果您想知道更多的FastDeploy Java API文档以及如何通过JNI来接入FastDeploy C++ API感兴趣,可以参考以下内容:
- [在 Android 中使用 FastDeploy Java SDK](https://github.com/PaddlePaddle/FastDeploy/tree/develop/java/android)
- [在 Android 中使用 FastDeploy C++ SDK](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/faq/use_cpp_sdk_on_android.md)
[English](README.md) | 简体中文
# PaddleClas 模型在昇腾上的部署方案-FastDeploy
## 1. 说明
PaddleClas支持通过FastDeploy在昇腾上部署图像分类模型.
## 2. 模型版本说明
- [PaddleClas Release/2.4](https://github.com/PaddlePaddle/PaddleClas/tree/release/2.4)
目前FastDeploy支持如下模型的部署
- [PP-LCNet系列模型](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/PP-LCNet.md)
- [PP-LCNetV2系列模型](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/PP-LCNetV2.md)
- [EfficientNet系列模型](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/EfficientNet_and_ResNeXt101_wsl.md)
- [GhostNet系列模型](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/Mobile.md)
- [MobileNet系列模型(包含v1,v2,v3)](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/Mobile.md)
- [ShuffleNet系列模型](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/Mobile.md)
- [SqueezeNet系列模型](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/Others.md)
- [Inception系列模型](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/Inception.md)
- [PP-HGNet系列模型](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/PP-HGNet.md)
- [ResNet系列模型(包含vd系列)](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/ResNet_and_vd.md)
### 2.1 准备PaddleClas部署模型
PaddleClas模型导出,请参考其文档说明[模型导出](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/inference_deployment/export_model.md#2-%E5%88%86%E7%B1%BB%E6%A8%A1%E5%9E%8B%E5%AF%BC%E5%87%BA)
注意:PaddleClas导出的模型仅包含`inference.pdmodel``inference.pdiparams`两个文件,但为了满足部署的需求,同时也需准备其提供的通用[inference_cls.yaml](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/deploy/configs/inference_cls.yaml)文件,FastDeploy会从yaml文件中获取模型在推理时需要的预处理信息,开发者可直接下载此文件使用。但需根据自己的需求修改yaml文件中的配置参数,具体可比照PaddleClas模型训练[config](https://github.com/PaddlePaddle/PaddleClas/tree/release/2.4/ppcls/configs/ImageNet)中的infer部分的配置信息进行修改。
### 2.2 下载预训练模型
为了方便开发者的测试,下面提供了PaddleClas导出的部分模型(含inference_cls.yaml文件),开发者可直接下载使用。
| 模型 | 参数文件大小 |输入Shape | Top1 | Top5 |
|:---------------------------------------------------------------- |:----- |:----- | :----- | :----- |
| [PPLCNet_x1_0](https://bj.bcebos.com/paddlehub/fastdeploy/PPLCNet_x1_0_infer.tgz) | 12MB | 224x224 |71.32% | 90.03% |
| [PPLCNetV2_base](https://bj.bcebos.com/paddlehub/fastdeploy/PPLCNetV2_base_infer.tgz) | 26MB | 224x224 |77.04% | 93.27% |
| [EfficientNetB7](https://bj.bcebos.com/paddlehub/fastdeploy/EfficientNetB7_infer.tgz) | 255MB | 600x600 | 84.3% | 96.9% |
| [EfficientNetB0_small](https://bj.bcebos.com/paddlehub/fastdeploy/EfficientNetB0_small_infer.tgz)| 18MB | 224x224 | 75.8% | 75.8% |
| [GhostNet_x1_3_ssld](https://bj.bcebos.com/paddlehub/fastdeploy/GhostNet_x1_3_ssld_infer.tgz) | 29MB | 224x224 | 75.7% | 92.5% |
| [GhostNet_x0_5](https://bj.bcebos.com/paddlehub/fastdeploy/GhostNet_x0_5_infer.tgz) | 10MB | 224x224 | 66.8% | 86.9% |
| [MobileNetV1_x0_25](https://bj.bcebos.com/paddlehub/fastdeploy/MobileNetV1_x0_25_infer.tgz) | 1.9MB | 224x224 | 51.4% | 75.5% |
| [MobileNetV1_ssld](https://bj.bcebos.com/paddlehub/fastdeploy/MobileNetV1_ssld_infer.tgz) | 17MB | 224x224 | 77.9% | 93.9% |
| [MobileNetV2_x0_25](https://bj.bcebos.com/paddlehub/fastdeploy/MobileNetV2_x0_25_infer.tgz) | 5.9MB | 224x224 | 53.2% | 76.5% |
| [MobileNetV2_ssld](https://bj.bcebos.com/paddlehub/fastdeploy/MobileNetV2_ssld_infer.tgz) | 14MB | 224x224 | 76.74% | 93.39% |
| [MobileNetV3_small_x0_35_ssld](https://bj.bcebos.com/paddlehub/fastdeploy/MobileNetV3_small_x0_35_ssld_infer.tgz) | 6.4MB | 224x224 | 55.55% | 77.71% |
| [MobileNetV3_large_x1_0_ssld](https://bj.bcebos.com/paddlehub/fastdeploy/MobileNetV3_large_x1_0_ssld_infer.tgz) | 22MB | 224x224 | 78.96% | 94.48% |
| [ShuffleNetV2_x0_25](https://bj.bcebos.com/paddlehub/fastdeploy/ShuffleNetV2_x0_25_infer.tgz) | 2.4MB | 224x224 | 49.9% | 73.79% |
| [ShuffleNetV2_x2_0](https://bj.bcebos.com/paddlehub/fastdeploy/ShuffleNetV2_x2_0_infer.tgz) | 29MB | 224x224 | 73.15% | 91.2% |
| [SqueezeNet1_1](https://bj.bcebos.com/paddlehub/fastdeploy/SqueezeNet1_1_infer.tgz) | 4.8MB | 224x224 | 60.1% | 81.9% |
| [InceptionV3](https://bj.bcebos.com/paddlehub/fastdeploy/InceptionV3_infer.tgz) | 92MB | 299x299 | 79.14% | 94.59% |
| [PPHGNet_tiny_ssld](https://bj.bcebos.com/paddlehub/fastdeploy/PPHGNet_tiny_ssld_infer.tgz) | 57MB | 224x224 | 81.95% | 96.12% |
| [PPHGNet_base_ssld](https://bj.bcebos.com/paddlehub/fastdeploy/PPHGNet_base_ssld_infer.tgz) | 274MB | 224x224 | 85.0% | 97.35% |
| [ResNet50_vd](https://bj.bcebos.com/paddlehub/fastdeploy/ResNet50_vd_infer.tgz) | 98MB | 224x224 | 79.12% | 94.44% |
## 3. 详细部署的部署示例
- [Python部署](python)
- [C++部署](cpp)
# PaddleClas 昇腾 C++部署示例
本目录下提供`infer.cc`, 供用户完成PaddleClas模型在昇腾AI处理器上的部署.
## 1. 部署环境准备
在部署前,需确认以下两个步骤
- 1. 在部署前,需自行编译基于昇腾AI处理器的预测库,参考文档[昇腾AI处理器部署环境编译](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install#自行编译安装)
- 2. 部署时需要环境初始化, 请参考[如何使用C++在昇腾AI处理器部署](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/faq/use_sdk_on_ascend.md)
## 2. 部署模型准备
在部署前, 请准备好您所需要运行的推理模型, 您可以在[FastDeploy支持的PaddleClas模型列表](../README.md)中下载所需模型.
## 3. 运行部署示例
以Linux上推理为例,在本目录执行如下命令即可完成编译测试,支持此模型需保证FastDeploy版本1.0.0以上(x.x.x>=1.0.0)
```bash
# 下载部署示例代码
git clone https://github.com/PaddlePaddle/PaddleClas.git
cd PaddleClas/deploy/fastdeploy/ascend/cpp
mkdir build
cd build
# 使用编译完成的FastDeploy库编译infer_demo
cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-ascend
make -j
# 下载ResNet50_vd模型文件和测试图片
wget https://bj.bcebos.com/paddlehub/fastdeploy/ResNet50_vd_infer.tgz
tar -xvf ResNet50_vd_infer.tgz
wget https://gitee.com/paddlepaddle/PaddleClas/raw/release/2.4/deploy/images/ImageNet/ILSVRC2012_val_00000010.jpeg
# 使用昇腾部署
./infer_demo ResNet50_vd_infer ILSVRC2012_val_00000010.jpeg
```
运行完成后返回结果如下所示
```bash
ClassifyResult(
label_ids: 153,
scores: 0.686229,
)
```
## 4. 更多指南
- [PaddleClas系列 C++ API查阅](https://www.paddlepaddle.org.cn/fastdeploy-api-doc/cpp/html/namespacefastdeploy_1_1vision_1_1classification.html)
- [FastDeploy部署PaddleClas模型概览](../../)
- [PaddleClas Python部署](../python)
# PaddleClas 昇腾 Python部署示例
本目录下提供`infer.py`快速完成PaddleClas在昇腾AI处理器上部署的示例.
## 1. 部署环境准备
在部署前,需自行编译基于昇腾AI处理器的FastDeploy python wheel包并安装,参考文档,参考文档[昇腾AI处理器部署环境编译](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install#自行编译安装)
## 2. 部署模型准备
在部署前, 请准备好您所需要运行的推理模型, 您可以在[FastDeploy支持的PaddleClas模型列表](../README.md)中下载所需模型.
## 3. 运行部署示例
```bash
# 安装FastDpeloy 昇腾预测库 python包(详细文档请参考`部署环境准备`)
# 下载部署示例代码
git clone https://github.com/PaddlePaddle/PaddleClas.git
cd PaddleClas/deploy/fastdeploy/ascend/python
# 下载ResNet50_vd模型文件和测试图片
wget https://bj.bcebos.com/paddlehub/fastdeploy/ResNet50_vd_infer.tgz
tar -xvf ResNet50_vd_infer.tgz
wget https://gitee.com/paddlepaddle/PaddleClas/raw/release/2.4/deploy/images/ImageNet/ILSVRC2012_val_00000010.jpeg
# 在Ascend AI 处理器上推理
python infer.py --model ResNet50_vd_infer --image ILSVRC2012_val_00000010.jpeg --topk 1
```
运行完成后返回结果如下所示
```bash
ClassifyResult(
label_ids: 153,
scores: 0.686229,
)
```
## 4. 更多指南
- [PaddleClas系列 Python API查阅](https://www.paddlepaddle.org.cn/fastdeploy-api-doc/python/html/image_classification.html)
- [FastDeploy部署PaddleClas模型概览](../../)
- [PaddleClas C++ 部署](../cpp)
[English](README.md) | 简体中文
# PaddleClas 模型在CPU与GPU上的部署方案-FastDeploy
## 1. 说明
PaddleClas支持通过FastDeploy在NVIDIA GPU、X86 CPU、飞腾CPU、ARM CPU、Intel GPU(独立显卡/集成显卡)硬件上快速部署PaddleClas系列模型
## 2. 模型版本说明
- [PaddleClas Release/2.4](https://github.com/PaddlePaddle/PaddleClas/tree/release/2.4)
目前FastDeploy支持如下模型的部署
- [PP-LCNet系列模型](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/PP-LCNet.md)
- [PP-LCNetV2系列模型](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/PP-LCNetV2.md)
- [EfficientNet系列模型](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/EfficientNet_and_ResNeXt101_wsl.md)
- [GhostNet系列模型](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/Mobile.md)
- [MobileNet系列模型(包含v1,v2,v3)](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/Mobile.md)
- [ShuffleNet系列模型](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/Mobile.md)
- [SqueezeNet系列模型](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/Others.md)
- [Inception系列模型](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/Inception.md)
- [PP-HGNet系列模型](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/PP-HGNet.md)
- [ResNet系列模型(包含vd系列)](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/ResNet_and_vd.md)
### 2.1 准备PaddleClas部署模型
PaddleClas模型导出,请参考其文档说明[模型导出](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/inference_deployment/export_model.md#2-%E5%88%86%E7%B1%BB%E6%A8%A1%E5%9E%8B%E5%AF%BC%E5%87%BA)
注意:PaddleClas导出的模型仅包含`inference.pdmodel``inference.pdiparams`两个文件,但为了满足部署的需求,同时也需准备其提供的通用[inference_cls.yaml](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/deploy/configs/inference_cls.yaml)文件,FastDeploy会从yaml文件中获取模型在推理时需要的预处理信息,开发者可直接下载此文件使用。但需根据自己的需求修改yaml文件中的配置参数,具体可比照PaddleClas模型训练[config](https://github.com/PaddlePaddle/PaddleClas/tree/release/2.4/ppcls/configs/ImageNet)中的infer部分的配置信息进行修改。
### 2.2 下载预训练模型
为了方便开发者的测试,下面提供了PaddleClas导出的部分模型(含inference_cls.yaml文件),开发者可直接下载使用。
| 模型 | 参数文件大小 |输入Shape | Top1 | Top5 |
|:---------------------------------------------------------------- |:----- |:----- | :----- | :----- |
| [PPLCNet_x1_0](https://bj.bcebos.com/paddlehub/fastdeploy/PPLCNet_x1_0_infer.tgz) | 12MB | 224x224 |71.32% | 90.03% |
| [PPLCNetV2_base](https://bj.bcebos.com/paddlehub/fastdeploy/PPLCNetV2_base_infer.tgz) | 26MB | 224x224 |77.04% | 93.27% |
| [EfficientNetB7](https://bj.bcebos.com/paddlehub/fastdeploy/EfficientNetB7_infer.tgz) | 255MB | 600x600 | 84.3% | 96.9% |
| [EfficientNetB0_small](https://bj.bcebos.com/paddlehub/fastdeploy/EfficientNetB0_small_infer.tgz)| 18MB | 224x224 | 75.8% | 75.8% |
| [GhostNet_x1_3_ssld](https://bj.bcebos.com/paddlehub/fastdeploy/GhostNet_x1_3_ssld_infer.tgz) | 29MB | 224x224 | 75.7% | 92.5% |
| [GhostNet_x0_5](https://bj.bcebos.com/paddlehub/fastdeploy/GhostNet_x0_5_infer.tgz) | 10MB | 224x224 | 66.8% | 86.9% |
| [MobileNetV1_x0_25](https://bj.bcebos.com/paddlehub/fastdeploy/MobileNetV1_x0_25_infer.tgz) | 1.9MB | 224x224 | 51.4% | 75.5% |
| [MobileNetV1_ssld](https://bj.bcebos.com/paddlehub/fastdeploy/MobileNetV1_ssld_infer.tgz) | 17MB | 224x224 | 77.9% | 93.9% |
| [MobileNetV2_x0_25](https://bj.bcebos.com/paddlehub/fastdeploy/MobileNetV2_x0_25_infer.tgz) | 5.9MB | 224x224 | 53.2% | 76.5% |
| [MobileNetV2_ssld](https://bj.bcebos.com/paddlehub/fastdeploy/MobileNetV2_ssld_infer.tgz) | 14MB | 224x224 | 76.74% | 93.39% |
| [MobileNetV3_small_x0_35_ssld](https://bj.bcebos.com/paddlehub/fastdeploy/MobileNetV3_small_x0_35_ssld_infer.tgz) | 6.4MB | 224x224 | 55.55% | 77.71% |
| [MobileNetV3_large_x1_0_ssld](https://bj.bcebos.com/paddlehub/fastdeploy/MobileNetV3_large_x1_0_ssld_infer.tgz) | 22MB | 224x224 | 78.96% | 94.48% |
| [ShuffleNetV2_x0_25](https://bj.bcebos.com/paddlehub/fastdeploy/ShuffleNetV2_x0_25_infer.tgz) | 2.4MB | 224x224 | 49.9% | 73.79% |
| [ShuffleNetV2_x2_0](https://bj.bcebos.com/paddlehub/fastdeploy/ShuffleNetV2_x2_0_infer.tgz) | 29MB | 224x224 | 73.15% | 91.2% |
| [SqueezeNet1_1](https://bj.bcebos.com/paddlehub/fastdeploy/SqueezeNet1_1_infer.tgz) | 4.8MB | 224x224 | 60.1% | 81.9% |
| [InceptionV3](https://bj.bcebos.com/paddlehub/fastdeploy/InceptionV3_infer.tgz) | 92MB | 299x299 | 79.14% | 94.59% |
| [PPHGNet_tiny_ssld](https://bj.bcebos.com/paddlehub/fastdeploy/PPHGNet_tiny_ssld_infer.tgz) | 57MB | 224x224 | 81.95% | 96.12% |
| [PPHGNet_base_ssld](https://bj.bcebos.com/paddlehub/fastdeploy/PPHGNet_base_ssld_infer.tgz) | 274MB | 224x224 | 85.0% | 97.35% |
| [ResNet50_vd](https://bj.bcebos.com/paddlehub/fastdeploy/ResNet50_vd_infer.tgz) | 98MB | 224x224 | 79.12% | 94.44% |
## 3. 详细部署的部署示例
- [Python部署](python)
- [C++部署](cpp)
- [C部署](c)
- [C#部署](csharp)
[English](README.md) | 简体中文
# PaddleClas CPU-GPU C部署示例
本目录下提供`infer.c`来调用C API快速完成PaddleClas模型在CPU/GPU上部署的示例。
## 1. 说明
PaddleClas支持利用FastDeploy在NVIDIA GPU、X86 CPU、飞腾CPU、ARM CPU、Intel GPU(独立显卡/集成显卡)硬件上快速部署图像分类模型.
## 2. 部署环境准备
在部署前,需确认软硬件环境,同时下载预编译部署库,参考[FastDeploy安装文档](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install#FastDeploy预编译库安装)安装FastDeploy预编译库.
以Linux上推理为例,在本目录执行如下命令即可完成编译测试,支持此模型需保证FastDeploy版本1.0.4以上(x.x.x>=1.0.4)
## 3. 部署模型准备
在部署前, 请准备好您所需要运行的推理模型, 您可以在[FastDeploy支持的PaddleClas模型列表](../README.md)中下载所需模型.
## 4.运行部署示例
```bash
# 下载部署示例代码
git clone https://github.com/PaddlePaddle/PaddleClas.git
cd PaddleClas/deploy/fastdeploy/cpu-gpu/c
mkdir build
cd build
# 下载FastDeploy预编译库,用户可在上文提到的`FastDeploy预编译库`中自行选择合适的版本使用
wget https://bj.bcebos.com/fastdeploy/release/cpp/fastdeploy-linux-x64-x.x.x.tgz
tar xvf fastdeploy-linux-x64-x.x.x.tgz
cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-x.x.x
make -j
# 下载ResNet50_vd模型文件和测试图片
wget https://bj.bcebos.com/paddlehub/fastdeploy/ResNet50_vd_infer.tgz
tar -xvf ResNet50_vd_infer.tgz
wget https://gitee.com/paddlepaddle/PaddleClas/raw/release/2.4/deploy/images/ImageNet/ILSVRC2012_val_00000010.jpeg
# 使用CPU在OpenVINO推理
./infer_demo ResNet50_vd_infer ILSVRC2012_val_00000010.jpeg 0
# 使用GPU在TensorRT推理
./infer_demo ResNet50_vd_infer ILSVRC2012_val_00000010.jpeg 1
```
- 注意,以上命令只适用于Linux或MacOS, Windows下SDK的使用方式请参考文档: [如何在Windows中使用FastDeploy C++ SDK](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/faq/use_sdk_on_windows.md)
## 5. PaddleClas C API接口简介
下面提供了PaddleClas的C API简介
- 如果用户想要更换部署后端或进行其他定制化操作, 请查看[C Runtime API](https://baidu-paddle.github.io/fastdeploy-api/c/html/runtime__option_8h.html).
- 更多 PaddleClas C API 请查看 [C PaddleClas API](https://github.com/PaddlePaddle/FastDeploy/blob/develop/c_api/fastdeploy_capi/vision/classification/ppcls/model.h)
### 配置
```c
FD_C_RuntimeOptionWrapper* FD_C_CreateRuntimeOptionWrapper()
```
> 创建一个RuntimeOption的配置对象,并且返回操作它的指针。
>
> **返回**
>
> * **fd_c_runtime_option_wrapper**(FD_C_RuntimeOptionWrapper*): 指向RuntimeOption对象的指针
```c
void FD_C_RuntimeOptionWrapperUseCpu(
FD_C_RuntimeOptionWrapper* fd_c_runtime_option_wrapper)
```
> 开启CPU推理
>
> **参数**
>
> * **fd_c_runtime_option_wrapper**(FD_C_RuntimeOptionWrapper*): 指向RuntimeOption对象的指针
```c
void FD_C_RuntimeOptionWrapperUseGpu(
FD_C_RuntimeOptionWrapper* fd_c_runtime_option_wrapper,
int gpu_id)
```
> 开启GPU推理
>
> **参数**
>
> * **fd_c_runtime_option_wrapper**(FD_C_RuntimeOptionWrapper*): 指向RuntimeOption对象的指针
> * **gpu_id**(int): 显卡号
### 模型
```c
FD_C_PaddleClasModelWrapper* FD_C_CreatePaddleClasModelWrapper(
const char* model_file, const char* params_file, const char* config_file,
FD_C_RuntimeOptionWrapper* runtime_option,
const FD_C_ModelFormat model_format)
```
> 创建一个PaddleClas的模型,并且返回操作它的指针。
>
> **参数**
>
> * **model_file**(const char*): 模型文件路径
> * **params_file**(const char*): 参数文件路径
> * **config_file**(const char*): 配置文件路径,即PaddleClas导出的部署yaml文件
> * **runtime_option**(FD_C_RuntimeOptionWrapper*): 指向RuntimeOption的指针,表示后端推理配置
> * **model_format**(FD_C_ModelFormat): 模型格式
>
> **返回**
> * **fd_c_ppclas_wrapper**(FD_C_PaddleClasModelWrapper*): 指向PaddleClas模型对象的指针
### 读写图像
```c
FD_C_Mat FD_C_Imread(const char* imgpath)
```
> 读取一个图像,并且返回cv::Mat的指针。
>
> **参数**
>
> * **imgpath**(const char*): 图像文件路径
>
> **返回**
>
> * **imgmat**(FD_C_Mat): 指向图像数据cv::Mat的指针。
```c
FD_C_Bool FD_C_Imwrite(const char* savepath, FD_C_Mat img);
```
> 将图像写入文件中。
>
> **参数**
>
> * **savepath**(const char*): 保存图像的路径
> * **img**(FD_C_Mat): 指向图像数据的指针
>
> **返回**
>
> * **result**(FD_C_Bool): 表示操作是否成功
### Predict函数
```c
FD_C_Bool FD_C_PaddleClasModelWrapperPredict(
__fd_take FD_C_PaddleClasModelWrapper* fd_c_ppclas_wrapper, FD_C_Mat img,
FD_C_ClassifyResult* fd_c_ppclas_result)
```
>
> 模型预测接口,输入图像直接并生成分类结果。
>
> **参数**
> * **fd_c_ppclas_wrapper**(FD_C_PaddleClasModelWrapper*): 指向PaddleClas模型的指针
> * **img**(FD_C_Mat): 输入图像的指针,指向cv::Mat对象,可以调用FD_C_Imread读取图像获取
> * **fd_c_ppclas_result**(FD_C_ClassifyResult*): 分类结果,包括label_id,以及相应的置信度, ClassifyResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/)
### Predict结果
```c
void FD_C_ClassifyResultStr(
FD_C_ClassifyResult* fd_c_classify_result char* str_buffer);
```
>
> 打印结果
>
> **参数**
> * **fd_c_classify_result**(FD_C_ClassifyResult*): 指向FD_C_ClassifyResult对象的指针
> * **str_buffer**(char*): 保存结果数据信息的字符串
## 6. 其它文档
- [FastDeploy部署PaddleClas模型概览](../../)
- [PaddleClas Python部署](../python)
- [PaddleClas C++ 部署](../cpp)
- [PaddleClas C# 部署](../csharp)
# PaddleClas CPU-GPU C++部署示例
本目录下提供`infer.cc`快速完成PaddleClas系列模型在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。
## 1. 说明
PaddleClas支持利用FastDeploy在NVIDIA GPU、X86 CPU、飞腾CPU、ARM CPU、Intel GPU(独立显卡/集成显卡)硬件上快速部署图像分类模型.
## 2. 部署环境准备
在部署前,需确认软硬件环境,同时下载预编译部署库,参考[FastDeploy安装文档](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install#FastDeploy预编译库安装)安装FastDeploy预编译库.
## 3. 部署模型准备
在部署前, 请准备好您所需要运行的推理模型, 您可以在[FastDeploy支持的PaddleClas模型列表](../README.md)中下载所需模型.
## 4. 运行部署示例
以Linux上推理为例,在本目录执行如下命令即可完成编译测试,支持此模型需保证FastDeploy版本1.0.0以上(x.x.x>=1.0.0)
```bash
# 下载部署示例代码
git clone https://github.com/PaddlePaddle/PaddleClas.git
cd PaddleClas/deploy/fastdeploy/cpu-gpu/cpp
mkdir build
cd build
# 下载FastDeploy预编译库,用户可在上文提到的`FastDeploy预编译库`中自行选择合适的版本使用
wget https://bj.bcebos.com/fastdeploy/release/cpp/fastdeploy-linux-x64-x.x.x.tgz
tar xvf fastdeploy-linux-x64-x.x.x.tgz
cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-x.x.x
make -j
# 下载ResNet50_vd模型文件和测试图片
wget https://bj.bcebos.com/paddlehub/fastdeploy/ResNet50_vd_infer.tgz
tar -xvf ResNet50_vd_infer.tgz
wget https://gitee.com/paddlepaddle/PaddleClas/raw/release/2.4/deploy/images/ImageNet/ILSVRC2012_val_00000010.jpeg
# 在CPU上使用Paddle Inference推理
./infer_demo ResNet50_vd_infer ILSVRC2012_val_00000010.jpeg 0
# 在CPU上使用OenVINO推理
./infer_demo ResNet50_vd_infer ILSVRC2012_val_00000010.jpeg 1
# 在CPU上使用ONNX Runtime推理
./infer_demo ResNet50_vd_infer ILSVRC2012_val_00000010.jpeg 2
# 在CPU上使用Paddle Lite推理
./infer_demo ResNet50_vd_infer ILSVRC2012_val_00000010.jpeg 3
# 在GPU上使用Paddle Inference推理
./infer_demo ResNet50_vd_infer ILSVRC2012_val_00000010.jpeg 4
# 在GPU上使用Paddle TensorRT推理
./infer_demo ResNet50_vd_infer ILSVRC2012_val_00000010.jpeg 5
# 在GPU上使用ONNX Runtime推理
./infer_demo ResNet50_vd_infer ILSVRC2012_val_00000010.jpeg 6
# 在GPU上使用Nvidia TensorRT推理
./infer_demo ResNet50_vd_infer ILSVRC2012_val_00000010.jpeg 7
```
运行完成后返回结果如下所示
```bash
ClassifyResult(
label_ids: 153,
scores: 0.686229,
)
```
以上命令只适用于Linux或MacOS, Windows下SDK的使用方式请参考:
- [如何在Windows中使用FastDeploy C++ SDK](../../../../../docs/cn/faq/use_sdk_on_windows.md)
## 5. 部署示例选项说明
在我们使用`infer_demo`时, 输入了3个参数, 分别为分类模型, 预测图片, 与最后一位的数字选项.
现在下表将解释最后一位数字选项的含义.
|数字选项|含义|
|:---:|:---:|
|0| 在CPU上使用Paddle Inference推理 |
|1| 在CPU上使用OenVINO推理 |
|2| 在CPU上使用ONNX Runtime推理 |
|3| 在CPU上使用Paddle Lite推理 |
|4| 在GPU上使用Paddle Inference推理 |
|5| 在GPU上使用Paddle TensorRT推理 |
|6| 在GPU上使用ONNX Runtime推理 |
|7| 在GPU上使用Nvidia TensorRT推理 |
- 关于如何通过FastDeploy使用更多不同的推理后端,以及如何使用不同的硬件,请参考文档:[如何切换模型推理后端引擎](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/faq/how_to_change_backend.md)
## 6. 更多指南
- [PaddleClas系列 C++ API查阅](https://www.paddlepaddle.org.cn/fastdeploy-api-doc/cpp/html/namespacefastdeploy_1_1vision_1_1classification.html)
- [PaddleClas Python部署](../python)
- [PaddleClas C 部署](../c)
- [PaddleClas C# 部署](../csharp)
## 7. 常见问题
- PaddleClas能在FastDeploy支持的多种后端上推理,支持情况如下表所示, 如何切换后端, 详见文档[如何切换模型推理后端引擎](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/faq/how_to_change_backend.md)
|硬件类型|支持的后端|
|:---:|:---:|
|X86 CPU| Paddle Inference, ONNX Runtime, OpenVINO |
|ARM CPU| Paddle Lite |
|飞腾 CPU| ONNX Runtime |
|NVIDIA GPU| Paddle Inference, ONNX Runtime, TensorRT |
- [Intel GPU(独立显卡/集成显卡)的使用](https://github.com/PaddlePaddle/FastDeploy/blob/develop/tutorials/intel_gpu/README.md)
- [编译CPU部署库](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install/cpu.md)
- [编译GPU部署库](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install/gpu.md)
- [编译Jetson部署库](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install/jetson.md)
# PaddleClas CPU-GPU C#部署示例
本目录下提供`infer.cs`来调用C# API快速完成PaddleClas模型在CPU/GPU上部署的示例。
## 1. 说明
PaddleClas支持利用FastDeploy在NVIDIA GPU、X86 CPU、飞腾CPU、ARM CPU、Intel GPU(独立显卡/集成显卡)硬件上快速部署图像分类模型.
## 2. 部署环境准备
在部署前,需确认软硬件环境,同时下载预编译部署库,参考[FastDeploy安装文档](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install#FastDeploy预编译库安装)安装FastDeploy预编译库. 在本目录执行如下命令即可在Windows完成编译测试,支持此模型需保证FastDeploy版本1.0.4以上(x.x.x>=1.0.4)
## 3. 部署模型准备
在部署前, 请准备好您所需要运行的推理模型, 您可以在[FastDeploy支持的PaddleClas模型列表](../README.md)中下载所需模型.
## 4. 部署示例
### 4.1 下载C#包管理程序nuget客户端
> https://dist.nuget.org/win-x86-commandline/v6.4.0/nuget.exe
下载完成后将该程序添加到环境变量**PATH**
### 4.2 下载模型文件和测试图片
> https://bj.bcebos.com/paddlehub/fastdeploy/ResNet50_vd_infer.tgz # (下载后解压缩)
> https://gitee.com/paddlepaddle/PaddleClas/raw/release/2.4/deploy/images/ImageNet/ILSVRC2012_val_00000010.jpeg
### 4.3 编译示例代码
本文档编译的示例代码的编译工具依赖VS 2019,**Windows打开x64 Native Tools Command Prompt for VS 2019命令工具**,通过如下命令开始编译
```shell
## 下载FastDeploy预编译库,用户可在上文提到的`FastDeploy预编译库`中自行选择合适的版本使用
https://bj.bcebos.com/fastdeploy/release/cpp/fastdeploy-linux-x64-x.x.x.tgz
# 下载部署示例代码
git clone https://github.com/PaddlePaddle/PaddleClas.git
cd D:\PaddleClas\deploy\fastdeploy\cpu-gpu\csharp
mkdir build && cd build
cmake .. -G "Visual Studio 16 2019" -A x64 -DFASTDEPLOY_INSTALL_DIR=D:\fastdeploy-win-x64-gpu-x.x.x -DCUDA_DIRECTORY="C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v11.2"
nuget restore
msbuild infer_demo.sln /m:4 /p:Configuration=Release /p:Platform=x64
```
关于使用Visual Studio 2019创建sln工程,或者CMake工程等方式编译的更详细信息,可参考如下文档
- [在 Windows 使用 FastDeploy C++ SDK](https://github.com/PaddlePaddle/FastDeploy/tree/develop/docs/cn/faq/use_sdk_on_windows.md)
- [FastDeploy C++库在Windows上的多种使用方式](https://github.com/PaddlePaddle/FastDeploy/tree/develop/docs/cn/faq/use_sdk_on_windows_build.md)
## 4.4 运行可执行程序
注意Windows上运行时,需要将FastDeploy依赖的库拷贝至可执行程序所在目录, 或者配置环境变量。FastDeploy提供了工具帮助我们快速将所有依赖库拷贝至可执行程序所在目录,通过如下命令将所有依赖的dll文件拷贝至可执行程序所在的目录(可能生成的可执行文件在Release下还有一层目录,这里假设生成的可执行文件在Release处)
```shell
cd D:\Download\fastdeploy-win-x64-gpu-x.x.x
fastdeploy_init.bat install %cd% D:\PaddleClas\deploy\fastdeploy\cpu-gpu\csharp\build\Release
```
将dll拷贝到当前路径后,准备好模型和图片,使用如下命令运行可执行程序即可
```shell
cd Release
# CPU推理
infer_demo ResNet50_vd_infer ILSVRC2012_val_00000010.jpeg 0
# GPU推理
infer_demo ResNet50_vd_infer ILSVRC2012_val_00000010.jpeg 1
```
## 5. PaddleClas C#接口简介
下面提供了PaddleClas的C# API简介
- 如果用户想要更换部署后端或进行其他定制化操作, 请查看[C# Runtime API](https://github.com/PaddlePaddle/FastDeploy/blob/develop/csharp/fastdeploy/runtime_option.cs).
- 更多 PaddleClas C# API 请查看 [C# PaddleClas API](https://github.com/PaddlePaddle/FastDeploy/blob/develop/csharp/fastdeploy/vision/classification/ppcls/model.cs)
### 模型
```c#
fastdeploy.vision.classification.PaddleClasModel(
string model_file,
string params_file,
string config_file,
fastdeploy.RuntimeOption runtime_option = null,
fastdeploy.ModelFormat model_format = ModelFormat.PADDLE)
```
> PaddleClasModel模型加载和初始化。
> **参数**
>> * **model_file**(str): 模型文件路径
>> * **params_file**(str): 参数文件路径
>> * **config_file**(str): 配置文件路径,即PaddleClas导出的部署yaml文件
>> * **runtime_option**(RuntimeOption): 后端推理配置,默认为null,即采用默认配置
>> * **model_format**(ModelFormat): 模型格式,默认为PADDLE格式
### Predict函数
```c#
fastdeploy.ClassifyResult Predict(OpenCvSharp.Mat im)
```
> 模型预测接口,输入图像直接输出检测结果。
>
> **参数**
>
>> * **im**(Mat): 输入图像,注意需为HWC,BGR格式
>>
> **返回值**
>
>> * **result**: 分类结果,包括label_id,以及相应的置信度, ClassifyResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/)
## 6. 其它文档
- [FastDeploy部署PaddleClas模型概览](../../)
- [PaddleClas Python部署](../python)
- [PaddleClas C++ 部署](../cpp)
- [PaddleClas C 部署](../c)
# PaddleClas CPU-GPU Python部署示例
本目录下提供`infer.py`快速完成PaddleClas在CPU/GPU上部署的示例.
## 1. 说明
PaddleClas支持利用FastDeploy在NVIDIA GPU、X86 CPU、飞腾CPU、ARM CPU、Intel GPU(独立显卡/集成显卡)硬件上快速部署图像分类模型
## 2. 部署环境准备
在部署前,需确认软硬件环境,同时下载预编译部署库,参考[FastDeploy安装文档](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install#FastDeploy预编译库安装)安装FastDeploy预编译库.
## 3. 部署模型准备
在部署前, 请准备好您所需要运行的推理模型, 您可以在[FastDeploy支持的PaddleClas模型列表](../README.md)中下载所需模型.
## 4. 运行部署示例
```bash
# 安装FastDpeloy python包(详细文档请参考`部署环境准备`)
pip install fastdeploy-gpu-python -f https://www.paddlepaddle.org.cn/whl/fastdeploy.html
conda config --add channels conda-forge && conda install cudatoolkit=11.2 cudnn=8.2
# 下载部署示例代码
git clone https://github.com/PaddlePaddle/PaddleClas.git
cd PaddleClas/deploy/fastdeploy/cpu-gpu/python
# 下载ResNet50_vd模型文件和测试图片
wget https://bj.bcebos.com/paddlehub/fastdeploy/ResNet50_vd_infer.tgz
tar -xvf ResNet50_vd_infer.tgz
wget https://gitee.com/paddlepaddle/PaddleClas/raw/release/2.4/deploy/images/ImageNet/ILSVRC2012_val_00000010.jpeg
# 在CPU上使用Paddle Inference推理
python infer.py --model ResNet50_vd_infer --image ILSVRC2012_val_00000010.jpeg --device cpu --backend paddle --topk 1
# 在CPU上使用OenVINO推理
python infer.py --model ResNet50_vd_infer --image ILSVRC2012_val_00000010.jpeg --device cpu --backend openvino --topk 1
# 在CPU上使用ONNX Runtime推理
python infer.py --model ResNet50_vd_infer --image ILSVRC2012_val_00000010.jpeg --device cpu --backend ort --topk 1
# 在CPU上使用Paddle Lite推理
python infer.py --model ResNet50_vd_infer --image ILSVRC2012_val_00000010.jpeg --device cpu --backend pplite --topk 1
# 在GPU上使用Paddle Inference推理
python infer.py --model ResNet50_vd_infer --image ILSVRC2012_val_00000010.jpeg --device gpu --backend paddle --topk 1
# 在GPU上使用Paddle TensorRT推理
python infer.py --model ResNet50_vd_infer --image ILSVRC2012_val_00000010.jpeg --device gpu --backend pptrt --topk 1
# 在GPU上使用ONNX Runtime推理
python infer.py --model ResNet50_vd_infer --image ILSVRC2012_val_00000010.jpeg --device gpu --backend ort --topk 1
# 在GPU上使用Nvidia TensorRT推理
python infer.py --model ResNet50_vd_infer --image ILSVRC2012_val_00000010.jpeg --device gpu --backend trt --topk 1
```
运行完成后返回结果如下所示
```bash
ClassifyResult(
label_ids: 153,
scores: 0.686229,
)
```
## 5. 部署示例选项说明
|参数|含义|默认值
|---|---|---|
|--model|指定模型文件夹所在的路径|None|
|--image|指定测试图片所在的路径|None|
|--device|指定即将运行的硬件类型,支持的值为`[cpu, gpu]`,当设置为cpu时,可运行在x86 cpu/arm cpu等cpu上|cpu|
|--device_id|使用gpu时, 指定设备号|0|
|--backend|部署模型时使用的后端, 支持的值为`[paddle,pptrt,pplite,ort,openvino,trt]` |openvino|
|--topk|返回的前topk准确率, 支持的为`1,5` |1|
关于如何通过FastDeploy使用更多不同的推理后端,以及如何使用不同的硬件,请参考文档:[如何切换模型推理后端引擎](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/faq/how_to_change_backend.md)
## 6. 更多指南
- [PaddleClas系列 Python API查阅](https://www.paddlepaddle.org.cn/fastdeploy-api-doc/python/html/image_classification.html)
- [PaddleClas C++ 部署](../cpp)
- [PaddleClas C 部署](../c)
- [PaddleClas C# 部署](../csharp)
## 7. 常见问题
- PaddleClas能在FastDeploy支持的多种后端上推理,支持情况如下表所示, 如何切换后端, 详见文档[如何切换模型推理后端引擎](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/faq/how_to_change_backend.md)
|硬件类型|支持的后端|
|:---:|:---:|
|X86 CPU| Paddle Inference, ONNX Runtime, OpenVINO |
|ARM CPU| Paddle Lite |
|飞腾 CPU| ONNX Runtime |
|NVIDIA GPU| Paddle Inference, ONNX Runtime, TensorRT |
- [Intel GPU(独立显卡/集成显卡)的使用](https://github.com/PaddlePaddle/FastDeploy/blob/develop/tutorials/intel_gpu/README.md)
- [编译CPU部署库](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install/cpu.md)
- [编译GPU部署库](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install/gpu.md)
- [编译Jetson部署库](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install/jetson.md)
[English](README.md) | 简体中文
# PaddleClas 模型在Graphcore IPU的部署方案-FastDeploy
## 1. 说明
PaddleClas支持通过FastDeploy在Graphcore IPU上部署图像分类模型.
## 2. 模型版本说明
- [PaddleClas Release/2.4](https://github.com/PaddlePaddle/PaddleClas/tree/release/2.4)
目前FastDeploy支持如下模型的部署
- [PP-LCNet系列模型](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/PP-LCNet.md)
- [PP-LCNetV2系列模型](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/PP-LCNetV2.md)
- [EfficientNet系列模型](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/EfficientNet_and_ResNeXt101_wsl.md)
- [GhostNet系列模型](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/Mobile.md)
- [MobileNet系列模型(包含v1,v2,v3)](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/Mobile.md)
- [ShuffleNet系列模型](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/Mobile.md)
- [SqueezeNet系列模型](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/Others.md)
- [Inception系列模型](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/Inception.md)
- [PP-HGNet系列模型](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/PP-HGNet.md)
- [ResNet系列模型(包含vd系列)](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/ResNet_and_vd.md)
### 2.1 准备PaddleClas部署模型
PaddleClas模型导出,请参考其文档说明[模型导出](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/inference_deployment/export_model.md#2-%E5%88%86%E7%B1%BB%E6%A8%A1%E5%9E%8B%E5%AF%BC%E5%87%BA)
注意:PaddleClas导出的模型仅包含`inference.pdmodel``inference.pdiparams`两个文件,但为了满足部署的需求,同时也需准备其提供的通用[inference_cls.yaml](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/deploy/configs/inference_cls.yaml)文件,FastDeploy会从yaml文件中获取模型在推理时需要的预处理信息,开发者可直接下载此文件使用。但需根据自己的需求修改yaml文件中的配置参数,具体可比照PaddleClas模型训练[config](https://github.com/PaddlePaddle/PaddleClas/tree/release/2.4/ppcls/configs/ImageNet)中的infer部分的配置信息进行修改。
### 2.2 下载预训练模型
为了方便开发者的测试,下面提供了PaddleClas导出的部分模型(含inference_cls.yaml文件),开发者可直接下载使用。
| 模型 | 参数文件大小 |输入Shape | Top1 | Top5 |
|:---------------------------------------------------------------- |:----- |:----- | :----- | :----- |
| [PPLCNet_x1_0](https://bj.bcebos.com/paddlehub/fastdeploy/PPLCNet_x1_0_infer.tgz) | 12MB | 224x224 |71.32% | 90.03% |
| [PPLCNetV2_base](https://bj.bcebos.com/paddlehub/fastdeploy/PPLCNetV2_base_infer.tgz) | 26MB | 224x224 |77.04% | 93.27% |
| [EfficientNetB7](https://bj.bcebos.com/paddlehub/fastdeploy/EfficientNetB7_infer.tgz) | 255MB | 600x600 | 84.3% | 96.9% |
| [EfficientNetB0_small](https://bj.bcebos.com/paddlehub/fastdeploy/EfficientNetB0_small_infer.tgz)| 18MB | 224x224 | 75.8% | 75.8% |
| [GhostNet_x1_3_ssld](https://bj.bcebos.com/paddlehub/fastdeploy/GhostNet_x1_3_ssld_infer.tgz) | 29MB | 224x224 | 75.7% | 92.5% |
| [GhostNet_x0_5](https://bj.bcebos.com/paddlehub/fastdeploy/GhostNet_x0_5_infer.tgz) | 10MB | 224x224 | 66.8% | 86.9% |
| [MobileNetV1_x0_25](https://bj.bcebos.com/paddlehub/fastdeploy/MobileNetV1_x0_25_infer.tgz) | 1.9MB | 224x224 | 51.4% | 75.5% |
| [MobileNetV1_ssld](https://bj.bcebos.com/paddlehub/fastdeploy/MobileNetV1_ssld_infer.tgz) | 17MB | 224x224 | 77.9% | 93.9% |
| [MobileNetV2_x0_25](https://bj.bcebos.com/paddlehub/fastdeploy/MobileNetV2_x0_25_infer.tgz) | 5.9MB | 224x224 | 53.2% | 76.5% |
| [MobileNetV2_ssld](https://bj.bcebos.com/paddlehub/fastdeploy/MobileNetV2_ssld_infer.tgz) | 14MB | 224x224 | 76.74% | 93.39% |
| [MobileNetV3_small_x0_35_ssld](https://bj.bcebos.com/paddlehub/fastdeploy/MobileNetV3_small_x0_35_ssld_infer.tgz) | 6.4MB | 224x224 | 55.55% | 77.71% |
| [MobileNetV3_large_x1_0_ssld](https://bj.bcebos.com/paddlehub/fastdeploy/MobileNetV3_large_x1_0_ssld_infer.tgz) | 22MB | 224x224 | 78.96% | 94.48% |
| [ShuffleNetV2_x0_25](https://bj.bcebos.com/paddlehub/fastdeploy/ShuffleNetV2_x0_25_infer.tgz) | 2.4MB | 224x224 | 49.9% | 73.79% |
| [ShuffleNetV2_x2_0](https://bj.bcebos.com/paddlehub/fastdeploy/ShuffleNetV2_x2_0_infer.tgz) | 29MB | 224x224 | 73.15% | 91.2% |
| [SqueezeNet1_1](https://bj.bcebos.com/paddlehub/fastdeploy/SqueezeNet1_1_infer.tgz) | 4.8MB | 224x224 | 60.1% | 81.9% |
| [InceptionV3](https://bj.bcebos.com/paddlehub/fastdeploy/InceptionV3_infer.tgz) | 92MB | 299x299 | 79.14% | 94.59% |
| [PPHGNet_tiny_ssld](https://bj.bcebos.com/paddlehub/fastdeploy/PPHGNet_tiny_ssld_infer.tgz) | 57MB | 224x224 | 81.95% | 96.12% |
| [PPHGNet_base_ssld](https://bj.bcebos.com/paddlehub/fastdeploy/PPHGNet_base_ssld_infer.tgz) | 274MB | 224x224 | 85.0% | 97.35% |
| [ResNet50_vd](https://bj.bcebos.com/paddlehub/fastdeploy/ResNet50_vd_infer.tgz) | 98MB | 224x224 | 79.12% | 94.44% |
## 3. 详细部署的部署示例
- [Python部署](python)
- [C++部署](cpp)
# PaddleClas Graphcore IPU C++部署示例
本目录下提供`infer.cc`, 供用户完成PaddleClas模型在Graphcore IPU上的部署.
## 1. 部署环境准备
- 在部署前,需自行编译基于Graphcore IPU的预测库,参考文档[Graphcore IPU部署环境编译](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install#自行编译安装)
## 2. 部署模型准备
在部署前, 请准备好您所需要运行的推理模型, 您可以在[FastDeploy支持的PaddleClas模型列表](../README.md)中下载所需模型.
## 3. 运行部署示例
以Linux上推理为例,在本目录执行如下命令即可完成编译测试,支持此模型需保证FastDeploy版本1.0.0以上(x.x.x>=1.0.0)
```bash
# 下载部署示例代码
git clone https://github.com/PaddlePaddle/PaddleClas.git
cd PaddleClas/deploy/fastdeploy/graphcore/cpp
mkdir build
cd build
# 使用编译完成的FastDeploy库编译infer_demo
cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-graphcore
make -j
# 下载ResNet50_vd模型文件和测试图片
wget https://bj.bcebos.com/paddlehub/fastdeploy/ResNet50_vd_infer.tgz
tar -xvf ResNet50_vd_infer.tgz
wget https://gitee.com/paddlepaddle/PaddleClas/raw/release/2.4/deploy/images/ImageNet/ILSVRC2012_val_00000010.jpeg
# 使用IPU部署
./infer_demo ResNet50_vd_infer ILSVRC2012_val_00000010.jpeg
```
运行完成后返回结果如下所示
```bash
ClassifyResult(
label_ids: 153,
scores: 0.686229,
)
```
## 4. 更多指南
- [PaddleClas系列 C++ API查阅](https://www.paddlepaddle.org.cn/fastdeploy-api-doc/cpp/html/namespacefastdeploy_1_1vision_1_1classification.html)
- [FastDeploy部署PaddleClas模型概览](../../)
- [PaddleClas Python部署](../python)
# PaddleClas Graphcore IPU Python部署示例
本目录下提供`infer.py`快速完成PaddleClas在Graphcore IPU上部署的示例.
## 1. 部署环境准备
在部署前,需自行编译基于Graphcore IPU的FastDeploy python wheel包并安装,参考文档,参考文档[Graphcore IPU部署环境编译](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install#自行编译安装)
## 2. 部署模型准备
在部署前, 请准备好您所需要运行的推理模型, 您可以在[FastDeploy支持的PaddleClas模型列表](../README.md)中下载所需模型.
## 3. 运行部署示例
```bash
# 安装FastDpeloy Graphcore IPU预测库 python包(详细文档请参考`部署环境准备`)
# 下载部署示例代码
git clone https://github.com/PaddlePaddle/PaddleClas.git
cd PaddleClas/deploy/fastdeploy/graphcore/python
# 下载ResNet50_vd模型文件和测试图片
wget https://bj.bcebos.com/paddlehub/fastdeploy/ResNet50_vd_infer.tgz
tar -xvf ResNet50_vd_infer.tgz
wget https://gitee.com/paddlepaddle/PaddleClas/raw/release/2.4/deploy/images/ImageNet/ILSVRC2012_val_00000010.jpeg
# 在Graphcore AI 处理器上推理
python infer.py --model ResNet50_vd_infer --image ILSVRC2012_val_00000010.jpeg --topk 1
```
运行完成后返回结果如下所示
```bash
ClassifyResult(
label_ids: 153,
scores: 0.686229,
)
```
## 4. 更多指南
- [PaddleClas系列 Python API查阅](https://www.paddlepaddle.org.cn/fastdeploy-api-doc/python/html/image_classification.html)
- [FastDeploy部署PaddleClas模型概览](../../)
- [PaddleClas C++ 部署](../cpp)
# PaddleClas 模型在昆仑芯上的部署方案-FastDeploy
## 1. 说明
PaddleClas支持通过FastDeploy在昆仑芯上部署图像分类模型.
## 2. 模型版本说明
- [PaddleClas Release/2.4](https://github.com/PaddlePaddle/PaddleClas/tree/release/2.4)
目前FastDeploy支持如下模型的部署
- [PP-LCNet系列模型](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/PP-LCNet.md)
- [PP-LCNetV2系列模型](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/PP-LCNetV2.md)
- [EfficientNet系列模型](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/EfficientNet_and_ResNeXt101_wsl.md)
- [GhostNet系列模型](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/Mobile.md)
- [MobileNet系列模型(包含v1,v2,v3)](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/Mobile.md)
- [ShuffleNet系列模型](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/Mobile.md)
- [SqueezeNet系列模型](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/Others.md)
- [Inception系列模型](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/Inception.md)
- [PP-HGNet系列模型](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/PP-HGNet.md)
- [ResNet系列模型(包含vd系列)](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/ResNet_and_vd.md)
### 2.1 准备PaddleClas部署模型
PaddleClas模型导出,请参考其文档说明[模型导出](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/inference_deployment/export_model.md#2-%E5%88%86%E7%B1%BB%E6%A8%A1%E5%9E%8B%E5%AF%BC%E5%87%BA)
注意:PaddleClas导出的模型仅包含`inference.pdmodel``inference.pdiparams`两个文件,但为了满足部署的需求,同时也需准备其提供的通用[inference_cls.yaml](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/deploy/configs/inference_cls.yaml)文件,FastDeploy会从yaml文件中获取模型在推理时需要的预处理信息,开发者可直接下载此文件使用。但需根据自己的需求修改yaml文件中的配置参数,具体可比照PaddleClas模型训练[config](https://github.com/PaddlePaddle/PaddleClas/tree/release/2.4/ppcls/configs/ImageNet)中的infer部分的配置信息进行修改。
### 2.2 下载预训练模型
为了方便开发者的测试,下面提供了PaddleClas导出的部分模型(含inference_cls.yaml文件),开发者可直接下载使用。
| 模型 | 参数文件大小 |输入Shape | Top1 | Top5 |
|:---------------------------------------------------------------- |:----- |:----- | :----- | :----- |
| [PPLCNet_x1_0](https://bj.bcebos.com/paddlehub/fastdeploy/PPLCNet_x1_0_infer.tgz) | 12MB | 224x224 |71.32% | 90.03% |
| [PPLCNetV2_base](https://bj.bcebos.com/paddlehub/fastdeploy/PPLCNetV2_base_infer.tgz) | 26MB | 224x224 |77.04% | 93.27% |
| [EfficientNetB7](https://bj.bcebos.com/paddlehub/fastdeploy/EfficientNetB7_infer.tgz) | 255MB | 600x600 | 84.3% | 96.9% |
| [EfficientNetB0_small](https://bj.bcebos.com/paddlehub/fastdeploy/EfficientNetB0_small_infer.tgz)| 18MB | 224x224 | 75.8% | 75.8% |
| [GhostNet_x1_3_ssld](https://bj.bcebos.com/paddlehub/fastdeploy/GhostNet_x1_3_ssld_infer.tgz) | 29MB | 224x224 | 75.7% | 92.5% |
| [GhostNet_x0_5](https://bj.bcebos.com/paddlehub/fastdeploy/GhostNet_x0_5_infer.tgz) | 10MB | 224x224 | 66.8% | 86.9% |
| [MobileNetV1_x0_25](https://bj.bcebos.com/paddlehub/fastdeploy/MobileNetV1_x0_25_infer.tgz) | 1.9MB | 224x224 | 51.4% | 75.5% |
| [MobileNetV1_ssld](https://bj.bcebos.com/paddlehub/fastdeploy/MobileNetV1_ssld_infer.tgz) | 17MB | 224x224 | 77.9% | 93.9% |
| [MobileNetV2_x0_25](https://bj.bcebos.com/paddlehub/fastdeploy/MobileNetV2_x0_25_infer.tgz) | 5.9MB | 224x224 | 53.2% | 76.5% |
| [MobileNetV2_ssld](https://bj.bcebos.com/paddlehub/fastdeploy/MobileNetV2_ssld_infer.tgz) | 14MB | 224x224 | 76.74% | 93.39% |
| [MobileNetV3_small_x0_35_ssld](https://bj.bcebos.com/paddlehub/fastdeploy/MobileNetV3_small_x0_35_ssld_infer.tgz) | 6.4MB | 224x224 | 55.55% | 77.71% |
| [MobileNetV3_large_x1_0_ssld](https://bj.bcebos.com/paddlehub/fastdeploy/MobileNetV3_large_x1_0_ssld_infer.tgz) | 22MB | 224x224 | 78.96% | 94.48% |
| [ShuffleNetV2_x0_25](https://bj.bcebos.com/paddlehub/fastdeploy/ShuffleNetV2_x0_25_infer.tgz) | 2.4MB | 224x224 | 49.9% | 73.79% |
| [ShuffleNetV2_x2_0](https://bj.bcebos.com/paddlehub/fastdeploy/ShuffleNetV2_x2_0_infer.tgz) | 29MB | 224x224 | 73.15% | 91.2% |
| [SqueezeNet1_1](https://bj.bcebos.com/paddlehub/fastdeploy/SqueezeNet1_1_infer.tgz) | 4.8MB | 224x224 | 60.1% | 81.9% |
| [InceptionV3](https://bj.bcebos.com/paddlehub/fastdeploy/InceptionV3_infer.tgz) | 92MB | 299x299 | 79.14% | 94.59% |
| [PPHGNet_tiny_ssld](https://bj.bcebos.com/paddlehub/fastdeploy/PPHGNet_tiny_ssld_infer.tgz) | 57MB | 224x224 | 81.95% | 96.12% |
| [PPHGNet_base_ssld](https://bj.bcebos.com/paddlehub/fastdeploy/PPHGNet_base_ssld_infer.tgz) | 274MB | 224x224 | 85.0% | 97.35% |
| [ResNet50_vd](https://bj.bcebos.com/paddlehub/fastdeploy/ResNet50_vd_infer.tgz) | 98MB | 224x224 | 79.12% | 94.44% |
## 3. 详细部署的部署示例
- [Python部署](python)
- [C++部署](cpp)
# PaddleClas 昆仑芯XPU C++部署示例
本目录下提供`infer.cc`, 供用户完成PaddleClas模型在昆仑芯XPU上的部署.
## 1. 部署环境准备
在部署前,需自行编译基于昆仑芯XPUXPU的预测库,参考文档[昆仑芯XPU部署环境编译](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install#自行编译安装)
## 2. 部署模型准备
在部署前, 请准备好您所需要运行的推理模型, 您可以在[FastDeploy支持的PaddleClas模型列表](../README.md)中下载所需模型.
## 3. 运行部署示例
以Linux上推理为例,在本目录执行如下命令即可完成编译测试,支持此模型需保证FastDeploy版本1.0.0以上(x.x.x>=1.0.0)
```bash
# 下载部署示例代码
git clone https://github.com/PaddlePaddle/PaddleClas.git
cd PaddleClas/deploy/fastdeploy/kunlun/cpp
mkdir build
cd build
# 使用编译完成的FastDeploy库编译infer_demo
cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-kunlun
make -j
# 下载ResNet50_vd模型文件和测试图片
wget https://bj.bcebos.com/paddlehub/fastdeploy/ResNet50_vd_infer.tgz
tar -xvf ResNet50_vd_infer.tgz
wget https://gitee.com/paddlepaddle/PaddleClas/raw/release/2.4/deploy/images/ImageNet/ILSVRC2012_val_00000010.jpeg
# 使用昆仑芯XPU部署
./infer_demo ResNet50_vd_infer ILSVRC2012_val_00000010.jpeg
```
运行完成后返回结果如下所示
```bash
ClassifyResult(
label_ids: 153,
scores: 0.686229,
)
```
## 4. 更多指南
- [PaddleClas系列 C++ API查阅](https://www.paddlepaddle.org.cn/fastdeploy-api-doc/cpp/html/namespacefastdeploy_1_1vision_1_1classification.html)
- [FastDeploy部署PaddleClas模型概览](../../)
- [PaddleClas Python部署](../python)
# PaddleClas 昆仑芯XPU Python部署示例
本目录下提供`infer.py`快速完成PaddleClas在昆仑芯XPU上部署的示例.
## 1. 部署环境准备
在部署前,需自行编译基于昆仑芯XPU的FastDeploy python wheel包并安装,参考文档,参考文档[昆仑芯XPU部署环境编译](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install#自行编译安装)
## 2. 部署模型准备
在部署前, 请准备好您所需要运行的推理模型, 您可以在[FastDeploy支持的PaddleClas模型列表](../README.md)中下载所需模型.
## 3. 运行部署示例
```bash
# 安装FastDpeloy 预测库 python包(详细文档请参考`部署环境准备`)
# 下载部署示例代码
git clone https://github.com/PaddlePaddle/PaddleClas.git
cd PaddleClas/deploy/fastdeploy/kunlun/python
# 下载ResNet50_vd模型文件和测试图片
wget https://bj.bcebos.com/paddlehub/fastdeploy/ResNet50_vd_infer.tgz
tar -xvf ResNet50_vd_infer.tgz
wget https://gitee.com/paddlepaddle/PaddleClas/raw/release/2.4/deploy/images/ImageNet/ILSVRC2012_val_00000010.jpeg
# 在昆仑芯XPU AI 处理器上推理
python infer.py --model ResNet50_vd_infer --image ILSVRC2012_val_00000010.jpeg --topk 1
```
运行完成后返回结果如下所示
```bash
ClassifyResult(
label_ids: 153,
scores: 0.686229,
)
```
## 4. 更多指南
- [PaddleClas系列 Python API查阅](https://www.paddlepaddle.org.cn/fastdeploy-api-doc/python/html/image_classification.html)
- [FastDeploy部署PaddleClas模型概览](../../)
- [PaddleClas C++ 部署](../cpp)
# PaddleClas 量化模型部署-FastDeploy
FastDeploy已支持部署量化模型,并提供一键模型自动化压缩的工具.
用户可以使用一键模型自动化压缩工具,自行对模型量化后部署, 也可以直接下载FastDeploy提供的量化模型进行部署.
## 1. FastDeploy一键模型自动化压缩工具
FastDeploy 提供了一键模型自动化压缩工具, 能够简单地通过输入一个配置文件, 对模型进行量化.
详细教程请见: [一键模型自动化压缩工具](https://github.com/PaddlePaddle/FastDeploy/tree/develop/tools/common_tools/auto_compression)**注意**: 推理量化后的分类模型仍然需要FP32模型文件夹下的inference_cls.yaml文件, 自行量化的模型文件夹内不包含此yaml文件, 用户从FP32模型文件夹下复制此yaml文件到量化后的模型文件夹内即可。
## 2. 下载量化完成的PaddleClas模型
用户也可以直接下载下表中的量化模型进行部署.(点击模型名字即可下载)
| 模型 | 量化方式 |
| [ResNet50_vd](https://bj.bcebos.com/paddlehub/fastdeploy/resnet50_vd_ptq.tar) | 离线量化 |
| [MobileNetV1_ssld](https://bj.bcebos.com/paddlehub/fastdeploy/mobilenetv1_ssld_ptq.tar) | 离线量化 |
量化后模型的Benchmark比较,请参考[量化模型 Benchmark](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/quantize.md)
## 3. 部署量化模型
### 3.1 部署代码
FastDeploy 部署量化模型与部署FP32模型完全一致, 用户只需要将输入的模型换为量化后的模型即可.
如果硬件在量化模型部署过程有特殊处理,也会在文档中特别标明.
因此本目录下,不提供代码文件, 量化模型部署参考对应的硬件部署即可, 具体请点击下一小节里的链接.
### 3.2 支持部署量化模型的硬件
|硬件类型|该硬件是否支持|使用指南|Python|C++|
|:---:|:---:|:---:|:---:|:---:|
|X86 CPU|✅|[链接](cpu-gpu)|✅|✅|
|NVIDIA GPU|✅|[链接](cpu-gpu)|✅|✅|
|飞腾CPU|✅|[链接](cpu-gpu)|✅|✅|
|ARM CPU|✅|[链接](cpu-gpu)|✅|✅|
|Intel GPU(集成显卡)|✅|[链接](cpu-gpu)|✅|✅|
|Intel GPU(独立显卡)|✅|[链接](cpu-gpu)|✅|✅|
|昆仑|✅|[链接](kunlun)|✅|✅|
|昇腾|✅|[链接](ascend)|✅|✅|
|瑞芯微|✅|[链接](rockchip)|✅|✅|
|晶晨|✅|[链接](amlogic)|--|✅|
|算能|✅|[链接](sophgo)|✅|✅|
# PaddleClas 模型在RKNPU2上部署方案-FastDeploy
## 1. 说明
PaddleClas支持通过FastDeploy在RKNPU2上部署相关模型.
## 2. 转换模型
下面以 ResNet50_vd为例子,教大家如何转换分类模型到RKNN模型.
目前FastDeploy只在PaddleClas测试过ResNet50_vd模型, 欢迎用户尝试其他的PaddleClas模型进行部署.
### 2.1 导出ONNX模型
```bash
# 安装 paddle2onnx
pip install paddle2onnx
# 下载ResNet50_vd模型文件和测试图片
wget https://bj.bcebos.com/paddlehub/fastdeploy/ResNet50_vd_infer.tgz
tar -xvf ResNet50_vd_infer.tgz
# 静态图转ONNX模型,注意,这里的save_file请和压缩包名对齐
paddle2onnx --model_dir ResNet50_vd_infer \
--model_filename inference.pdmodel \
--params_filename inference.pdiparams \
--save_file ResNet50_vd_infer/ResNet50_vd_infer.onnx \
--enable_dev_version True \
--opset_version 10 \
--enable_onnx_checker True
# 固定shape,注意这里的inputs得对应netron.app展示的 inputs 的 name,有可能是image 或者 x
python -m paddle2onnx.optimize --input_model ResNet50_vd_infer/ResNet50_vd_infer.onnx \
--output_model ResNet50_vd_infer/ResNet50_vd_infer.onnx \
--input_shape_dict "{'inputs':[1,3,224,224]}"
```
### 2.2 编写模型导出配置文件
以转化RK3588的RKNN模型为例子,我们需要编辑./rknpu2_tools/config/ResNet50_vd_infer_rknn.yaml,来转换ONNX模型到RKNN模型。
如果你需要在NPU上执行normalize操作,请根据你的模型配置normalize参数,例如:
```yaml
model_path: ./ResNet50_vd_infer/ResNet50_vd_infer.onnx
output_folder: ./ResNet50_vd_infer
mean:
-
- 123.675
- 116.28
- 103.53
std:
-
- 58.395
- 57.12
- 57.375
outputs_nodes:
do_quantization: False
dataset: "./ResNet50_vd_infer/dataset.txt"
```
**在CPU上做normalize**可以参考以下yaml:
```yaml
model_path: ./ResNet50_vd_infer/ResNet50_vd_infer.onnx
output_folder: ./ResNet50_vd_infer
mean:
-
- 0
- 0
- 0
std:
-
- 1
- 1
- 1
outputs_nodes:
do_quantization: False
dataset: "./ResNet50_vd_infer/dataset.txt"
```
这里我们选择在NPU上执行normalize操作.
### 2.3 ONNX模型转RKNN模型
```shell
python ./rknpu2_tools/export.py \
--config_path ./rknpu2_tools/config/ResNet50_vd_infer_rknn.yaml \
--target_platform rk3588
```
## 3. 其他链接
- [Cpp部署](./cpp)
- [Python部署](./python)
- [视觉模型预测结果](../../../../../docs/api/vision_results/)
# PaddleClas RKNPU2 C++部署示例
本目录下提供`infer.cc`, 供用户完成PaddleClas模型在RKNPU2的部署.
## 1. 部署环境准备
在部署前,需确认以下两个步骤
- 1. 在部署前,需自行编译基于RKNPU2的预测库,参考文档[RKNPU2部署环境编译](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install#自行编译安装)
- 2. 同时请用户参考[FastDeploy RKNPU2资源导航](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install/rknpu2.md)
## 2.部署模型准备
在部署前, 请准备好您所需要运行的推理模型, 您可以参考[RKNPU2模型转换](../README.md), 来准备模型.
## 3.部署示例
### 3.1 生成基本目录文件
该例程由以下几个部分组成
```text
.
├── CMakeLists.txt
├── build # 编译文件夹
├── images # 存放图片的文件夹
├── infer.cc
├── ppclas_model_dir # 存放模型文件的文件夹
└── thirdpartys # 存放sdk的文件夹
```
首先需要先生成目录结构
```bash
mkdir build
mkdir images
mkdir ppclas_model_dir
mkdir thirdpartys
```
### 3.2 编译
#### 3.2.1 编译并拷贝SDK到thirdpartys文件夹
请参考[RK2代NPU部署库编译](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install/rknpu2.md)仓库编译SDK,编译完成后,将在build目录下生成fastdeploy-x.x.x目录,请移动它至thirdpartys目录下.
#### 3.2.2 拷贝模型文件,以及配置文件至model文件夹
在Paddle动态图模型 -> Paddle静态图模型 -> ONNX模型的过程中,将生成ONNX文件以及对应的yaml配置文件,请将配置文件存放到model文件夹内。
转换为RKNN后的模型文件也需要拷贝至model,转换方案: ([ResNet50_vd RKNN模型](../README.md))。
#### 3.2.3 准备测试图片至image文件夹
```bash
wget https://gitee.com/paddlepaddle/PaddleClas/raw/release/2.4/deploy/images/ImageNet/ILSVRC2012_val_00000010.jpeg
```
#### 3.2.4 编译example
```bash
cd build
cmake ..
make -j8
make install
```
#### 3.2.5 运行例程
```bash
cd ./build/install
./rknpu_test ./ppclas_model_dir ./images/ILSVRC2012_val_00000010.jpeg
```
#### 3.2.6 运行结果展示
ClassifyResult(
label_ids: 153,
scores: 0.684570,
)
#### 3.2.7 注意事项
RKNPU上对模型的输入要求是使用NHWC格式,且图片归一化操作会在转RKNN模型时,内嵌到模型中,因此我们在使用FastDeploy部署时,需要先调用DisablePermute(C++)或`disable_permute(Python),在预处理阶段禁用数据格式的转换。
## 4. 其它文档
- [ResNet50_vd Python 部署](../python)
- [转换ResNet50_vd RKNN模型文档](../README.md)
# PaddleClas Python部署示例
## 1. 部署环境准备
在部署前,需确认以下两个步骤
- 1. 在部署前,需自行编译基于RKNPU2的Python预测库,参考文档[RKNPU2部署环境编译](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install#自行编译安装)
- 2. 同时请用户参考[FastDeploy RKNPU2资源导航](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install/rknpu2.md)
## 2. 部署模型准备
在部署前, 请准备好您所需要运行的推理模型, 您可以参考[RKNPU2模型转换](../README.md), 来准备模型.
## 3. 部署示例
本目录下提供`infer.py`快速完成 ResNet50_vd 在RKNPU上部署的示例
```bash
# 安装FastDpeloy RKNPU2 python包(详细文档请参考`部署环境准备`)
# 下载部署示例代码
git clone https://github.com/PaddlePaddle/PaddleClas.git
cd PaddleClas/deploy/fastdeploy/rockchip/rknpu2/python
# 下载图片
wget https://gitee.com/paddlepaddle/PaddleClas/raw/release/2.4/deploy/images/ImageNet/ILSVRC2012_val_00000010.jpeg
# 推理
python3 infer.py --model_file ./ResNet50_vd_infer/ResNet50_vd_infer_rk3588.rknn --config_file ResNet50_vd_infer/inference_cls.yaml --image ILSVRC2012_val_00000010.jpeg
# 运行完成后返回结果如下所示
ClassifyResult(
label_ids: 153,
scores: 0.684570,
)
```
## 4.注意事项
RKNPU上对模型的输入要求是使用NHWC格式,且图片归一化操作会在转RKNN模型时,内嵌到模型中,因此我们在使用FastDeploy部署时,需要先调用DisablePermute(C++)或`disable_permute(Python),在预处理阶段禁用数据格式的转换。
## 5. 其它文档
- [ResNet50_vd C++部署](../cpp)
- [转换ResNet50_vd RKNN模型文档](../README.md)
# PaddleClas 图像分类模瑞芯微NPU部署方案-FastDeploy
## 1. 说明
本示例基于RV1126来介绍如何使用FastDeploy部署PaddleClas量化模型,支持如下芯片的部署:
- Rockchip RV1109
- Rockchip RV1126
- Rockchip RK1808
## 2. 使用预导出的模型列表
FastDeploy提供预先量化好的模型进行部署. 更多模型, 欢迎用户参考[FastDeploy 一键模型自动化压缩工具](https://github.com/PaddlePaddle/FastDeploy/tree/develop/tools/common_tools/auto_compression) 来实现模型量化, 并完成部署.
| 模型 | 量化方式 |
|:---------------| :----- |
| [ResNet50_vd](https://bj.bcebos.com/paddlehub/fastdeploy/resnet50_vd_ptq.tar) | 离线量化 |
| [MobileNetV1_ssld](https://bj.bcebos.com/paddlehub/fastdeploy/mobilenetv1_ssld_ptq.tar) | 离线量化 |
## 3. 详细部署示例
在 RV1126 上只支持 C++ 的部署。
- [C++部署](cpp)
# PaddleClas rv1126 开发板 C++ 部署示例
本目录下提供的 `infer.cc`,可以帮助用户快速完成 PaddleClas 量化模型在 rv1126 上的部署推理加速。
## 1. 部署环境准备
在部署前,需确认以下两个步骤
- 1. 在部署前,需自行编译基于rv的预测库,参考文档[rv1126部署环境编译](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install#自行编译安装)
## 2. 量化模型准备
1. 需要特别注意的是,在 rv1126 上部署的模型需要是量化后的模型. 用户可以直接使用由 FastDeploy 提供的量化模型进行部署。
2. 用户也可以使用 FastDeploy 提供的[一键模型自动化压缩工具](https://github.com/PaddlePaddle/FastDeploy/tree/develop/tools/common_tools/auto_compression/),自行进行模型量化, 并使用产出的量化模型进行部署。(注意: 推理量化后的分类模型仍然需要FP32模型文件夹下的inference_cls.yaml文件, 自行量化的模型文件夹内不包含此 yaml 文件, 用户从 FP32 模型文件夹下复制此 yaml 文件到量化后的模型文件夹内即可.)
## 3. 在 RV1126 上部署量化后的 ResNet50_Vd 分类模型
请按照以下步骤完成在 RV1126 上部署 ResNet50_Vd 量化模型:
1. 交叉编译编译 FastDeploy 库,具体请参考:[交叉编译 FastDeploy](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install/rv1126.md#基于-paddlelite-的-fastdeploy-交叉编译库编译)
2. 将编译后的库拷贝到当前目录,可使用如下命令:
```bash
cp -r FastDeploy/build/fastdeploy-timvx/ PaddleClas/deploy/fastdeploy/rockchip/rv1126/cpp/
```
3. 在当前路径下载部署所需的模型和示例图片:
```bash
cd PaddleClas/deploy/fastdeploy/rockchip/rv1126/cpp/
mkdir models && mkdir images
wget https://bj.bcebos.com/paddlehub/fastdeploy/resnet50_vd_ptq.tar
tar -xvf resnet50_vd_ptq.tar
cp -r resnet50_vd_ptq models
wget https://gitee.com/paddlepaddle/PaddleClas/raw/release/2.4/deploy/images/ImageNet/ILSVRC2012_val_00000010.jpeg
cp -r ILSVRC2012_val_00000010.jpeg images
```
4. 编译部署示例,可使入如下命令:
```bash
cd PaddleClas/deploy/fastdeploy/rockchip/rv1126/cpp/
mkdir build && cd build
cmake -DCMAKE_TOOLCHAIN_FILE=${PWD}/../fastdeploy-timvx/toolchain.cmake -DFASTDEPLOY_INSTALL_DIR=${PWD}/../fastdeploy-timvx -DTARGET_ABI=armhf ..
make -j8
make install
# 成功编译之后,会生成 install 文件夹,里面有一个运行 demo 和部署所需的库
```
5. 基于 adb 工具部署 ResNet50 分类模型到 Rockchip RV1126,可使用如下命令:
```bash
# 进入 install 目录
cd PaddleClas/deploy/fastdeploy/rockchip/rv1126/cpp/build/install/
# 如下命令表示:bash run_with_adb.sh 需要运行的demo 模型路径 图片路径 设备的DEVICE_ID
bash run_with_adb.sh infer_demo resnet50_vd_ptq ILSVRC2012_val_00000010.jpeg $DEVICE_ID
```
部署成功后运行结果如下:
<img width="640" src="https://user-images.githubusercontent.com/30516196/200767389-26519e50-9e4f-4fe1-8d52-260718f73476.png">
# PaddleClas服务化部署示例
PaddleClas 服务化部署示例是利用FastDeploy Serving搭建的服务化部署示例。FastDeploy Serving是基于Triton Inference Server框架封装的适用于高并发、高吞吐量请求的服务化部署框架,是一套可用于实际生产的完备且性能卓越的服务化部署框架.
## 1. 部署环境准备
在服务化部署前,需确认服务化镜像的软硬件环境要求和镜像拉取命令,请参考[FastDeploy服务化部署](https://github.com/PaddlePaddle/FastDeploy/blob/develop/serving/README_CN.md)
## 2. 启动服务
```bash
# 下载部署示例代码
git clone https://github.com/PaddlePaddle/PaddleClas.git
cd PaddleClas/deploy/fastdeploy/serving
# 下载ResNet50_vd模型文件和测试图片
wget https://bj.bcebos.com/paddlehub/fastdeploy/ResNet50_vd_infer.tgz
tar -xvf ResNet50_vd_infer.tgz
wget https://gitee.com/paddlepaddle/PaddleClas/raw/release/2.4/deploy/images/ImageNet/ILSVRC2012_val_00000010.jpeg
# 将配置文件放入预处理目录
mv ResNet50_vd_infer/inference_cls.yaml models/preprocess/1/inference_cls.yaml
# 将模型放入 models/runtime/1目录下, 并重命名为model.pdmodel和model.pdiparams
mv ResNet50_vd_infer/inference.pdmodel models/runtime/1/model.pdmodel
mv ResNet50_vd_infer/inference.pdiparams models/runtime/1/model.pdiparams
# 拉取fastdeploy镜像(x.y.z为镜像版本号,需参照serving文档替换为数字)
# GPU镜像
docker pull registry.baidubce.com/paddlepaddle/fastdeploy:x.y.z-gpu-cuda11.4-trt8.4-21.10
# CPU镜像
docker pull registry.baidubce.com/paddlepaddle/fastdeploy:x.y.z-cpu-only-21.10
# 运行容器.容器名字为 fd_serving, 并挂载当前目录为容器的 /serving 目录
nvidia-docker run -it --net=host --name fd_serving -v `pwd`/:/serving registry.baidubce.com/paddlepaddle/fastdeploy:x.y.z-gpu-cuda11.4-trt8.4-21.10 bash
# 启动服务(不设置CUDA_VISIBLE_DEVICES环境变量,会拥有所有GPU卡的调度权限)
CUDA_VISIBLE_DEVICES=0 fastdeployserver --model-repository=/serving/models --backend-config=python,shm-default-byte-size=10485760
```
>> **注意**:
>> 拉取其他硬件上的镜像请看[服务化部署主文档](../../../../../serving/README_CN.md)
>> 执行fastdeployserver启动服务出现"Address already in use", 请使用`--grpc-port`指定端口号来启动服务,同时更改客户端示例中的请求端口号.
>> 其他启动参数可以使用 fastdeployserver --help 查看
服务启动成功后, 会有以下输出:
```
......
I0928 04:51:15.784517 206 grpc_server.cc:4117] Started GRPCInferenceService at 0.0.0.0:8001
I0928 04:51:15.785177 206 http_server.cc:2815] Started HTTPService at 0.0.0.0:8000
I0928 04:51:15.826578 206 http_server.cc:167] Started Metrics Service at 0.0.0.0:8002
```
## 3. 客户端请求
在物理机器中执行以下命令,发送grpc请求并输出结果
```
#下载测试图片
wget https://gitee.com/paddlepaddle/PaddleClas/raw/release/2.4/deploy/images/ImageNet/ILSVRC2012_val_00000010.jpeg
#安装客户端依赖
python3 -m pip install tritonclient\[all\]
# 发送请求
python3 paddlecls_grpc_client.py
```
发送请求成功后,会返回json格式的检测结果并打印输出:
```
output_name: CLAS_RESULT
{'label_ids': [153], 'scores': [0.6862289905548096]}
```
## 4. 配置修改
当前默认配置在GPU上运行TensorRT引擎, 如果要在CPU或其他推理引擎上运行。 需要修改`models/runtime/config.pbtxt`中配置,详情请参考[配置文档](../../../../../serving/docs/zh_CN/model_configuration.md)
## 5. 使用VisualDL进行可视化部署
可以[使用 VisualDL 进行 Serving 可视化部署](https://github.com/PaddlePaddle/FastDeploy/blob/develop/serving/docs/zh_CN/vdl_management.md),上述启动服务、配置修改以及客户端请求的操作都可以基于VisualDL进行。
通过VisualDL的可视化界面对PaddleClas进行服务化部署只需要如下三步:
```text
1. 载入模型库:PaddleClas/deploy/fastdeploy/serving/models
2. 下载模型资源文件:点击runtime模型,点击版本号1添加预训练模型,选择图像分类模型ResNet50_vd进行下载。
3. 启动服务:点击启动服务按钮,输入启动参数。
```
<p align="center">
<img src="https://user-images.githubusercontent.com/22424850/211708702-828d8ad8-4e85-457f-9c62-12f53fc81853.gif" width="100%"/>
</p>
## 6. 常见问题
- [如何编写客户端 HTTP/GRPC 请求](https://github.com/PaddlePaddle/FastDeploy/blob/develop/serving/docs/zh_CN/client.md)
- [如何编译服务化部署镜像](https://github.com/PaddlePaddle/FastDeploy/blob/develop/serving/docs/zh_CN/compile.md)
- [服务化部署原理及动态Batch介绍](https://github.com/PaddlePaddle/FastDeploy/blob/develop/serving/docs/zh_CN/demo.md)
- [模型仓库介绍](https://github.com/PaddlePaddle/FastDeploy/blob/develop/serving/docs/zh_CN/model_repository.md)
# PaddleClas SOPHGO部署示例
## 1. 说明
PaddleClas支持通过FastDeploy在SOPHGO上部署相关模型.
## 2. 支持模型列表
目前FastDeploy支持的如下模型的部署[ResNet系列模型](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/ResNet_and_vd.md)
## 3.准备ResNet部署模型以及转换模型
SOPHGO-TPU部署模型前需要将Paddle模型转换成bmodel模型,具体步骤如下:
- Paddle动态图模型转换为ONNX模型,请参考[Paddle2ONNX模型转换](https://github.com/PaddlePaddle/Paddle2ONNX/tree/develop/model_zoo/classification)
- ONNX模型转换bmodel模型的过程,请参考[TPU-MLIR](https://github.com/sophgo/tpu-mlir)
下面以[ResNet50_vd](https://bj.bcebos.com/paddlehub/fastdeploy/ResNet50_vd_infer.tgz)为例子,教大家如何转换Paddle模型到SOPHGO-TPU模型。
### 3.1 导出ONNX模型
#### 3.1.1 下载Paddle ResNet50_vd静态图模型并解压
```shell
wget https://bj.bcebos.com/paddlehub/fastdeploy/ResNet50_vd_infer.tgz
tar xvf ResNet50_vd_infer.tgz
```
#### 3.1.2 静态图转ONNX模型,注意,这里的save_file请和压缩包名对齐
```shell
paddle2onnx --model_dir ResNet50_vd_infer \
--model_filename inference.pdmodel \
--params_filename inference.pdiparams \
--save_file ResNet50_vd_infer.onnx \
--enable_dev_version True
```
### 3.2 导出bmodel模型
以转化BM1684x的bmodel模型为例子,我们需要下载[TPU-MLIR](https://github.com/sophgo/tpu-mlir)工程,安装过程具体参见[TPU-MLIR文档](https://github.com/sophgo/tpu-mlir/blob/master/README.md)
#### 3.2.1 安装
``` shell
docker pull sophgo/tpuc_dev:latest
# myname1234是一个示例,也可以设置其他名字
docker run --privileged --name myname1234 -v $PWD:/workspace -it sophgo/tpuc_dev:latest
source ./envsetup.sh
./build.sh
```
#### 3.2.2 ONNX模型转换为bmodel模型
``` shell
mkdir ResNet50_vd_infer && cd ResNet50_vd_infer
# 在该文件中放入测试图片,同时将上一步转换好的ResNet50_vd_infer.onnx放入该文件夹中
cp -rf ${REGRESSION_PATH}/dataset/COCO2017 .
cp -rf ${REGRESSION_PATH}/image .
# 放入onnx模型文件ResNet50_vd_infer.onnx
mkdir workspace && cd workspace
# 将ONNX模型转换为mlir模型,其中参数--output_names可以通过NETRON查看
model_transform.py \
--model_name ResNet50_vd_infer \
--model_def ../ResNet50_vd_infer.onnx \
--input_shapes [[1,3,224,224]] \
--mean 0.0,0.0,0.0 \
--scale 0.0039216,0.0039216,0.0039216 \
--keep_aspect_ratio \
--pixel_format rgb \
--output_names save_infer_model/scale_0.tmp_1 \
--test_input ../image/dog.jpg \
--test_result ResNet50_vd_infer_top_outputs.npz \
--mlir ResNet50_vd_infer.mlir
# 将mlir模型转换为BM1684x的F32 bmodel模型
model_deploy.py \
--mlir ResNet50_vd_infer.mlir \
--quantize F32 \
--chip bm1684x \
--test_input ResNet50_vd_infer_in_f32.npz \
--test_reference ResNet50_vd_infer_top_outputs.npz \
--model ResNet50_vd_infer_1684x_f32.bmodel
```
最终获得可以在BM1684x上能够运行的bmodel模型ResNet50_vd_infer_1684x_f32.bmodel。如果需要进一步对模型进行加速,可以将ONNX模型转换为INT8 bmodel,具体步骤参见[TPU-MLIR文档](https://github.com/sophgo/tpu-mlir/blob/master/README.md)
## 4. 其他链接
- [Python部署](python)
- [C++部署](cpp)
# PaddleClas C++部署示例
本目录下提供`infer.cc`快速完成ResNet50_vd模型在SOPHGO BM1684x板子上加速部署的示例。
## 1. 部署环境准备
在部署前,需自行编译基于SOPHGO硬件的预测库,参考文档[SOPHGO硬件部署环境](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install#算能硬件部署环境)
## 2. 生成基本目录文件
该例程由以下几个部分组成
```text
.
├── CMakeLists.txt
├── build # 编译文件夹
├── image # 存放图片的文件夹
├── infer.cc
├── preprocess_config.yaml #示例前处理配置文件
└── model # 存放模型文件的文件夹
```
## 3. 部署实例
### 3.1 编译并拷贝SDK到thirdpartys文件夹
请参考[SOPHGO部署库编译](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install/sophgo.md)仓库编译SDK,编译完成后,将在build目录下生成fastdeploy-x.x.x目录.
### 3.2 拷贝模型文件,以及配置文件至model文件夹
将Paddle模型转换为SOPHGO bmodel模型,转换步骤参考[文档](../README.md)
将转换后的SOPHGO bmodel模型文件拷贝至model中
将前处理配置文件也拷贝到model中
```bash
cp preprocess_config.yaml ./model
```
### 3.3 准备测试图片至image文件夹
```bash
wget https://gitee.com/paddlepaddle/PaddleClas/raw/release/2.4/deploy/images/ImageNet/ILSVRC2012_val_00000010.jpeg
cp ILSVRC2012_val_00000010.jpeg ./images
```
### 3.4 编译example
```bash
cd build
cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-x.x.x
make
```
### 3.5 运行例程
```bash
./infer_demo model images/ILSVRC2012_val_00000010.jpeg
```
## 4. 其它文档
- [ResNet50_vd python部署](../python)
- [转换ResNet50_vd SOPHGO模型文档](../README.md)
# PaddleClas Python部署示例
本目录下提供`infer.py`快速完成 ResNet50_vd 在SOPHGO TPU上部署的示例.
## 1. 部署环境准备
在部署前,需自行编译基于算能硬件的FastDeploy python wheel包并安装,参考文档[算能硬件部署环境](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install#算能硬件部署环境)
## 2.运行部署示例
```bash
# 下载部署示例代码
git clone https://github.com/PaddlePaddle/PaddleClas.git
cd PaddleClas/deploy/fastdeploy/sophgo/python
# 下载图片
wget https://gitee.com/paddlepaddle/PaddleClas/raw/release/2.4/deploy/images/ImageNet/ILSVRC2012_val_00000010.jpeg
# 推理转换好的模型
python3 infer.py --model_file ./bmodel/resnet50_1684x_f32.bmodel --config_file ResNet50_vd_infer/inference_cls.yaml --image ILSVRC2012_val_00000010.jpeg
# 运行完成后返回结果如下所示
ClassifyResult(
label_ids: 153,
scores: 0.684570,
)
```
## 4. 其它文档
- [ResNet50_vd C++部署](../python)
- [转换ResNet50_vd SOPHGO模型文档](../README.md)
# MobileNet 前端部署示例
本节介绍部署PaddleClas的图像分类mobilenet模型在浏览器中运行,以及@paddle-js-models/mobilenet npm包中的js接口。
## 1. 前端部署图像分类模型
图像分类模型web demo使用[**参考文档**](https://github.com/PaddlePaddle/FastDeploy/tree/develop/examples/application/js/web_demo)
## 2. MobileNet js接口
```
import * as mobilenet from "@paddle-js-models/mobilenet";
# mobilenet模型加载和初始化
await mobilenet.load()
# mobilenet模型执行预测,并获得分类的类别
const res = await mobilenet.classify(img);
console.log(res);
```
**load()函数参数**
> * **Config**(dict): 图像分类模型配置参数,默认值为 {Path: 'https://paddlejs.bj.bcebos.com/models/fuse/mobilenet/mobileNetV2_fuse_activation/model.json', fill: '#fff', mean: [0.485, 0.456, 0.406],std: [0.229, 0.224, 0.225]}; 其中,modelPath为js模型路径,fill 为图像预处理padding的值,mean和std分别为预处理的均值和标准差。
**classify()函数参数**
> * **img**(HTMLImageElement): 输入图像参数,类型为HTMLImageElement。
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册