未验证 提交 98145fb6 编写于 作者: jm_12138's avatar jm_12138 提交者: GitHub

update efficientnetb5_imagenet (#2053)

上级 ee6f74b1
......@@ -132,6 +132,11 @@
* 1.1.0
提升预测性能以及易用性
* 1.2.0
移除 Fluid API
- ```shell
$ hub install efficientnetb5_imagenet==1.1.0
$ hub install efficientnetb5_imagenet==1.2.0
```
......@@ -131,6 +131,11 @@
* 1.1.0
Improve the prediction performance and users' experience
* 1.2.0
Remove Fluid API
- ```shell
$ hub install efficientnetb5_imagenet==1.1.0
$ hub install efficientnetb5_imagenet==1.2.0
```
# copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import time
from collections import OrderedDict
import numpy as np
from PIL import Image
__all__ = ['reader']
DATA_DIM = 224
img_mean = np.array([0.485, 0.456, 0.406]).reshape((3, 1, 1))
img_std = np.array([0.229, 0.224, 0.225]).reshape((3, 1, 1))
def resize_short(img, target_size):
percent = float(target_size) / min(img.size[0], img.size[1])
resized_width = int(round(img.size[0] * percent))
resized_height = int(round(img.size[1] * percent))
img = img.resize((resized_width, resized_height), Image.LANCZOS)
return img
def crop_image(img, target_size, center):
width, height = img.size
size = target_size
if center == True:
w_start = (width - size) / 2
h_start = (height - size) / 2
else:
w_start = np.random.randint(0, width - size + 1)
h_start = np.random.randint(0, height - size + 1)
w_end = w_start + size
h_end = h_start + size
img = img.crop((w_start, h_start, w_end, h_end))
return img
def process_image(img):
img = resize_short(img, target_size=256)
img = crop_image(img, target_size=DATA_DIM, center=True)
if img.mode != 'RGB':
img = img.convert('RGB')
img = np.array(img).astype('float32').transpose((2, 0, 1)) / 255
img -= img_mean
img /= img_std
return img
def reader(images=None, paths=None):
"""
Preprocess to yield image.
Args:
images (list[numpy.ndarray]): images data, shape of each is [H, W, C].
paths (list[str]): paths to images.
Yield:
each (collections.OrderedDict): info of original image, preprocessed image.
"""
component = list()
if paths:
for im_path in paths:
each = OrderedDict()
assert os.path.isfile(im_path), "The {} isn't a valid file path.".format(im_path)
each['org_im_path'] = im_path
each['org_im'] = Image.open(im_path)
each['org_im_width'], each['org_im_height'] = each['org_im'].size
component.append(each)
if images is not None:
assert type(images), "images is a list."
for im in images:
each = OrderedDict()
each['org_im'] = Image.fromarray(im[:, :, ::-1])
each['org_im_path'] = 'ndarray_time={}'.format(round(time.time(), 6) * 1e6)
each['org_im_width'], each['org_im_height'] = each['org_im'].size
component.append(each)
for element in component:
element['image'] = process_image(element['org_im'])
yield element
tench, Tinca tinca
goldfish, Carassius auratus
great white shark, white shark, man-eater, man-eating shark, Carcharodon carcharias
tiger shark, Galeocerdo cuvieri
hammerhead, hammerhead shark
electric ray, crampfish, numbfish, torpedo
stingray
cock
hen
ostrich, Struthio camelus
brambling, Fringilla montifringilla
goldfinch, Carduelis carduelis
house finch, linnet, Carpodacus mexicanus
junco, snowbird
indigo bunting, indigo finch, indigo bird, Passerina cyanea
robin, American robin, Turdus migratorius
bulbul
jay
magpie
chickadee
water ouzel, dipper
kite
bald eagle, American eagle, Haliaeetus leucocephalus
vulture
great grey owl, great gray owl, Strix nebulosa
European fire salamander, Salamandra salamandra
common newt, Triturus vulgaris
eft
spotted salamander, Ambystoma maculatum
axolotl, mud puppy, Ambystoma mexicanum
bullfrog, Rana catesbeiana
tree frog, tree-frog
tailed frog, bell toad, ribbed toad, tailed toad, Ascaphus trui
loggerhead, loggerhead turtle, Caretta caretta
leatherback turtle, leatherback, leathery turtle, Dermochelys coriacea
mud turtle
terrapin
box turtle, box tortoise
banded gecko
common iguana, iguana, Iguana iguana
American chameleon, anole, Anolis carolinensis
whiptail, whiptail lizard
agama
frilled lizard, Chlamydosaurus kingi
alligator lizard
Gila monster, Heloderma suspectum
green lizard, Lacerta viridis
African chameleon, Chamaeleo chamaeleon
Komodo dragon, Komodo lizard, dragon lizard, giant lizard, Varanus komodoensis
African crocodile, Nile crocodile, Crocodylus niloticus
American alligator, Alligator mississipiensis
triceratops
thunder snake, worm snake, Carphophis amoenus
ringneck snake, ring-necked snake, ring snake
hognose snake, puff adder, sand viper
green snake, grass snake
king snake, kingsnake
garter snake, grass snake
water snake
vine snake
night snake, Hypsiglena torquata
boa constrictor, Constrictor constrictor
rock python, rock snake, Python sebae
Indian cobra, Naja naja
green mamba
sea snake
horned viper, cerastes, sand viper, horned asp, Cerastes cornutus
diamondback, diamondback rattlesnake, Crotalus adamanteus
sidewinder, horned rattlesnake, Crotalus cerastes
trilobite
harvestman, daddy longlegs, Phalangium opilio
scorpion
black and gold garden spider, Argiope aurantia
barn spider, Araneus cavaticus
garden spider, Aranea diademata
black widow, Latrodectus mactans
tarantula
wolf spider, hunting spider
tick
centipede
black grouse
ptarmigan
ruffed grouse, partridge, Bonasa umbellus
prairie chicken, prairie grouse, prairie fowl
peacock
quail
partridge
African grey, African gray, Psittacus erithacus
macaw
sulphur-crested cockatoo, Kakatoe galerita, Cacatua galerita
lorikeet
coucal
bee eater
hornbill
hummingbird
jacamar
toucan
drake
red-breasted merganser, Mergus serrator
goose
black swan, Cygnus atratus
tusker
echidna, spiny anteater, anteater
platypus, duckbill, duckbilled platypus, duck-billed platypus, Ornithorhynchus anatinus
wallaby, brush kangaroo
koala, koala bear, kangaroo bear, native bear, Phascolarctos cinereus
wombat
jellyfish
sea anemone, anemone
brain coral
flatworm, platyhelminth
nematode, nematode worm, roundworm
conch
snail
slug
sea slug, nudibranch
chiton, coat-of-mail shell, sea cradle, polyplacophore
chambered nautilus, pearly nautilus, nautilus
Dungeness crab, Cancer magister
rock crab, Cancer irroratus
fiddler crab
king crab, Alaska crab, Alaskan king crab, Alaska king crab, Paralithodes camtschatica
American lobster, Northern lobster, Maine lobster, Homarus americanus
spiny lobster, langouste, rock lobster, crawfish, crayfish, sea crawfish
crayfish, crawfish, crawdad, crawdaddy
hermit crab
isopod
white stork, Ciconia ciconia
black stork, Ciconia nigra
spoonbill
flamingo
little blue heron, Egretta caerulea
American egret, great white heron, Egretta albus
bittern
crane
limpkin, Aramus pictus
European gallinule, Porphyrio porphyrio
American coot, marsh hen, mud hen, water hen, Fulica americana
bustard
ruddy turnstone, Arenaria interpres
red-backed sandpiper, dunlin, Erolia alpina
redshank, Tringa totanus
dowitcher
oystercatcher, oyster catcher
pelican
king penguin, Aptenodytes patagonica
albatross, mollymawk
grey whale, gray whale, devilfish, Eschrichtius gibbosus, Eschrichtius robustus
killer whale, killer, orca, grampus, sea wolf, Orcinus orca
dugong, Dugong dugon
sea lion
Chihuahua
Japanese spaniel
Maltese dog, Maltese terrier, Maltese
Pekinese, Pekingese, Peke
Shih-Tzu
Blenheim spaniel
papillon
toy terrier
Rhodesian ridgeback
Afghan hound, Afghan
basset, basset hound
beagle
bloodhound, sleuthhound
bluetick
black-and-tan coonhound
Walker hound, Walker foxhound
English foxhound
redbone
borzoi, Russian wolfhound
Irish wolfhound
Italian greyhound
whippet
Ibizan hound, Ibizan Podenco
Norwegian elkhound, elkhound
otterhound, otter hound
Saluki, gazelle hound
Scottish deerhound, deerhound
Weimaraner
Staffordshire bullterrier, Staffordshire bull terrier
American Staffordshire terrier, Staffordshire terrier, American pit bull terrier, pit bull terrier
Bedlington terrier
Border terrier
Kerry blue terrier
Irish terrier
Norfolk terrier
Norwich terrier
Yorkshire terrier
wire-haired fox terrier
Lakeland terrier
Sealyham terrier, Sealyham
Airedale, Airedale terrier
cairn, cairn terrier
Australian terrier
Dandie Dinmont, Dandie Dinmont terrier
Boston bull, Boston terrier
miniature schnauzer
giant schnauzer
standard schnauzer
Scotch terrier, Scottish terrier, Scottie
Tibetan terrier, chrysanthemum dog
silky terrier, Sydney silky
soft-coated wheaten terrier
West Highland white terrier
Lhasa, Lhasa apso
flat-coated retriever
curly-coated retriever
golden retriever
Labrador retriever
Chesapeake Bay retriever
German short-haired pointer
vizsla, Hungarian pointer
English setter
Irish setter, red setter
Gordon setter
Brittany spaniel
clumber, clumber spaniel
English springer, English springer spaniel
Welsh springer spaniel
cocker spaniel, English cocker spaniel, cocker
Sussex spaniel
Irish water spaniel
kuvasz
schipperke
groenendael
malinois
briard
kelpie
komondor
Old English sheepdog, bobtail
Shetland sheepdog, Shetland sheep dog, Shetland
collie
Border collie
Bouvier des Flandres, Bouviers des Flandres
Rottweiler
German shepherd, German shepherd dog, German police dog, alsatian
Doberman, Doberman pinscher
miniature pinscher
Greater Swiss Mountain dog
Bernese mountain dog
Appenzeller
EntleBucher
boxer
bull mastiff
Tibetan mastiff
French bulldog
Great Dane
Saint Bernard, St Bernard
Eskimo dog, husky
malamute, malemute, Alaskan malamute
Siberian husky
dalmatian, coach dog, carriage dog
affenpinscher, monkey pinscher, monkey dog
basenji
pug, pug-dog
Leonberg
Newfoundland, Newfoundland dog
Great Pyrenees
Samoyed, Samoyede
Pomeranian
chow, chow chow
keeshond
Brabancon griffon
Pembroke, Pembroke Welsh corgi
Cardigan, Cardigan Welsh corgi
toy poodle
miniature poodle
standard poodle
Mexican hairless
timber wolf, grey wolf, gray wolf, Canis lupus
white wolf, Arctic wolf, Canis lupus tundrarum
red wolf, maned wolf, Canis rufus, Canis niger
coyote, prairie wolf, brush wolf, Canis latrans
dingo, warrigal, warragal, Canis dingo
dhole, Cuon alpinus
African hunting dog, hyena dog, Cape hunting dog, Lycaon pictus
hyena, hyaena
red fox, Vulpes vulpes
kit fox, Vulpes macrotis
Arctic fox, white fox, Alopex lagopus
grey fox, gray fox, Urocyon cinereoargenteus
tabby, tabby cat
tiger cat
Persian cat
Siamese cat, Siamese
Egyptian cat
cougar, puma, catamount, mountain lion, painter, panther, Felis concolor
lynx, catamount
leopard, Panthera pardus
snow leopard, ounce, Panthera uncia
jaguar, panther, Panthera onca, Felis onca
lion, king of beasts, Panthera leo
tiger, Panthera tigris
cheetah, chetah, Acinonyx jubatus
brown bear, bruin, Ursus arctos
American black bear, black bear, Ursus americanus, Euarctos americanus
ice bear, polar bear, Ursus Maritimus, Thalarctos maritimus
sloth bear, Melursus ursinus, Ursus ursinus
mongoose
meerkat, mierkat
tiger beetle
ladybug, ladybeetle, lady beetle, ladybird, ladybird beetle
ground beetle, carabid beetle
long-horned beetle, longicorn, longicorn beetle
leaf beetle, chrysomelid
dung beetle
rhinoceros beetle
weevil
fly
bee
ant, emmet, pismire
grasshopper, hopper
cricket
walking stick, walkingstick, stick insect
cockroach, roach
mantis, mantid
cicada, cicala
leafhopper
lacewing, lacewing fly
dragonfly, darning needle, devil's darning needle, sewing needle, snake feeder, snake doctor, mosquito hawk, skeeter hawk
damselfly
admiral
ringlet, ringlet butterfly
monarch, monarch butterfly, milkweed butterfly, Danaus plexippus
cabbage butterfly
sulphur butterfly, sulfur butterfly
lycaenid, lycaenid butterfly
starfish, sea star
sea urchin
sea cucumber, holothurian
wood rabbit, cottontail, cottontail rabbit
hare
Angora, Angora rabbit
hamster
porcupine, hedgehog
fox squirrel, eastern fox squirrel, Sciurus niger
marmot
beaver
guinea pig, Cavia cobaya
sorrel
zebra
hog, pig, grunter, squealer, Sus scrofa
wild boar, boar, Sus scrofa
warthog
hippopotamus, hippo, river horse, Hippopotamus amphibius
ox
water buffalo, water ox, Asiatic buffalo, Bubalus bubalis
bison
ram, tup
bighorn, bighorn sheep, cimarron, Rocky Mountain bighorn, Rocky Mountain sheep, Ovis canadensis
ibex, Capra ibex
hartebeest
impala, Aepyceros melampus
gazelle
Arabian camel, dromedary, Camelus dromedarius
llama
weasel
mink
polecat, fitch, foulmart, foumart, Mustela putorius
black-footed ferret, ferret, Mustela nigripes
otter
skunk, polecat, wood pussy
badger
armadillo
three-toed sloth, ai, Bradypus tridactylus
orangutan, orang, orangutang, Pongo pygmaeus
gorilla, Gorilla gorilla
chimpanzee, chimp, Pan troglodytes
gibbon, Hylobates lar
siamang, Hylobates syndactylus, Symphalangus syndactylus
guenon, guenon monkey
patas, hussar monkey, Erythrocebus patas
baboon
macaque
langur
colobus, colobus monkey
proboscis monkey, Nasalis larvatus
marmoset
capuchin, ringtail, Cebus capucinus
howler monkey, howler
titi, titi monkey
spider monkey, Ateles geoffroyi
squirrel monkey, Saimiri sciureus
Madagascar cat, ring-tailed lemur, Lemur catta
indri, indris, Indri indri, Indri brevicaudatus
Indian elephant, Elephas maximus
African elephant, Loxodonta africana
lesser panda, red panda, panda, bear cat, cat bear, Ailurus fulgens
giant panda, panda, panda bear, coon bear, Ailuropoda melanoleuca
barracouta, snoek
eel
coho, cohoe, coho salmon, blue jack, silver salmon, Oncorhynchus kisutch
rock beauty, Holocanthus tricolor
anemone fish
sturgeon
gar, garfish, garpike, billfish, Lepisosteus osseus
lionfish
puffer, pufferfish, blowfish, globefish
abacus
abaya
academic gown, academic robe, judge's robe
accordion, piano accordion, squeeze box
acoustic guitar
aircraft carrier, carrier, flattop, attack aircraft carrier
airliner
airship, dirigible
altar
ambulance
amphibian, amphibious vehicle
analog clock
apiary, bee house
apron
ashcan, trash can, garbage can, wastebin, ash bin, ash-bin, ashbin, dustbin, trash barrel, trash bin
assault rifle, assault gun
backpack, back pack, knapsack, packsack, rucksack, haversack
bakery, bakeshop, bakehouse
balance beam, beam
balloon
ballpoint, ballpoint pen, ballpen, Biro
Band Aid
banjo
bannister, banister, balustrade, balusters, handrail
barbell
barber chair
barbershop
barn
barometer
barrel, cask
barrow, garden cart, lawn cart, wheelbarrow
baseball
basketball
bassinet
bassoon
bathing cap, swimming cap
bath towel
bathtub, bathing tub, bath, tub
beach wagon, station wagon, wagon, estate car, beach waggon, station waggon, waggon
beacon, lighthouse, beacon light, pharos
beaker
bearskin, busby, shako
beer bottle
beer glass
bell cote, bell cot
bib
bicycle-built-for-two, tandem bicycle, tandem
bikini, two-piece
binder, ring-binder
binoculars, field glasses, opera glasses
birdhouse
boathouse
bobsled, bobsleigh, bob
bolo tie, bolo, bola tie, bola
bonnet, poke bonnet
bookcase
bookshop, bookstore, bookstall
bottlecap
bow
bow tie, bow-tie, bowtie
brass, memorial tablet, plaque
brassiere, bra, bandeau
breakwater, groin, groyne, mole, bulwark, seawall, jetty
breastplate, aegis, egis
broom
bucket, pail
buckle
bulletproof vest
bullet train, bullet
butcher shop, meat market
cab, hack, taxi, taxicab
caldron, cauldron
candle, taper, wax light
cannon
canoe
can opener, tin opener
cardigan
car mirror
carousel, carrousel, merry-go-round, roundabout, whirligig
carpenter's kit, tool kit
carton
car wheel
cash machine, cash dispenser, automated teller machine, automatic teller machine, automated teller, automatic teller, ATM
cassette
cassette player
castle
catamaran
CD player
cello, violoncello
cellular telephone, cellular phone, cellphone, cell, mobile phone
chain
chainlink fence
chain mail, ring mail, mail, chain armor, chain armour, ring armor, ring armour
chain saw, chainsaw
chest
chiffonier, commode
chime, bell, gong
china cabinet, china closet
Christmas stocking
church, church building
cinema, movie theater, movie theatre, movie house, picture palace
cleaver, meat cleaver, chopper
cliff dwelling
cloak
clog, geta, patten, sabot
cocktail shaker
coffee mug
coffeepot
coil, spiral, volute, whorl, helix
combination lock
computer keyboard, keypad
confectionery, confectionary, candy store
container ship, containership, container vessel
convertible
corkscrew, bottle screw
cornet, horn, trumpet, trump
cowboy boot
cowboy hat, ten-gallon hat
cradle
crane
crash helmet
crate
crib, cot
Crock Pot
croquet ball
crutch
cuirass
dam, dike, dyke
desk
desktop computer
dial telephone, dial phone
diaper, nappy, napkin
digital clock
digital watch
dining table, board
dishrag, dishcloth
dishwasher, dish washer, dishwashing machine
disk brake, disc brake
dock, dockage, docking facility
dogsled, dog sled, dog sleigh
dome
doormat, welcome mat
drilling platform, offshore rig
drum, membranophone, tympan
drumstick
dumbbell
Dutch oven
electric fan, blower
electric guitar
electric locomotive
entertainment center
envelope
espresso maker
face powder
feather boa, boa
file, file cabinet, filing cabinet
fireboat
fire engine, fire truck
fire screen, fireguard
flagpole, flagstaff
flute, transverse flute
folding chair
football helmet
forklift
fountain
fountain pen
four-poster
freight car
French horn, horn
frying pan, frypan, skillet
fur coat
garbage truck, dustcart
gasmask, respirator, gas helmet
gas pump, gasoline pump, petrol pump, island dispenser
goblet
go-kart
golf ball
golfcart, golf cart
gondola
gong, tam-tam
gown
grand piano, grand
greenhouse, nursery, glasshouse
grille, radiator grille
grocery store, grocery, food market, market
guillotine
hair slide
hair spray
half track
hammer
hamper
hand blower, blow dryer, blow drier, hair dryer, hair drier
hand-held computer, hand-held microcomputer
handkerchief, hankie, hanky, hankey
hard disc, hard disk, fixed disk
harmonica, mouth organ, harp, mouth harp
harp
harvester, reaper
hatchet
holster
home theater, home theatre
honeycomb
hook, claw
hoopskirt, crinoline
horizontal bar, high bar
horse cart, horse-cart
hourglass
iPod
iron, smoothing iron
jack-o'-lantern
jean, blue jean, denim
jeep, landrover
jersey, T-shirt, tee shirt
jigsaw puzzle
jinrikisha, ricksha, rickshaw
joystick
kimono
knee pad
knot
lab coat, laboratory coat
ladle
lampshade, lamp shade
laptop, laptop computer
lawn mower, mower
lens cap, lens cover
letter opener, paper knife, paperknife
library
lifeboat
lighter, light, igniter, ignitor
limousine, limo
liner, ocean liner
lipstick, lip rouge
Loafer
lotion
loudspeaker, speaker, speaker unit, loudspeaker system, speaker system
loupe, jeweler's loupe
lumbermill, sawmill
magnetic compass
mailbag, postbag
mailbox, letter box
maillot
maillot, tank suit
manhole cover
maraca
marimba, xylophone
mask
matchstick
maypole
maze, labyrinth
measuring cup
medicine chest, medicine cabinet
megalith, megalithic structure
microphone, mike
microwave, microwave oven
military uniform
milk can
minibus
miniskirt, mini
minivan
missile
mitten
mixing bowl
mobile home, manufactured home
Model T
modem
monastery
monitor
moped
mortar
mortarboard
mosque
mosquito net
motor scooter, scooter
mountain bike, all-terrain bike, off-roader
mountain tent
mouse, computer mouse
mousetrap
moving van
muzzle
nail
neck brace
necklace
nipple
notebook, notebook computer
obelisk
oboe, hautboy, hautbois
ocarina, sweet potato
odometer, hodometer, mileometer, milometer
oil filter
organ, pipe organ
oscilloscope, scope, cathode-ray oscilloscope, CRO
overskirt
oxcart
oxygen mask
packet
paddle, boat paddle
paddlewheel, paddle wheel
padlock
paintbrush
pajama, pyjama, pj's, jammies
palace
panpipe, pandean pipe, syrinx
paper towel
parachute, chute
parallel bars, bars
park bench
parking meter
passenger car, coach, carriage
patio, terrace
pay-phone, pay-station
pedestal, plinth, footstall
pencil box, pencil case
pencil sharpener
perfume, essence
Petri dish
photocopier
pick, plectrum, plectron
pickelhaube
picket fence, paling
pickup, pickup truck
pier
piggy bank, penny bank
pill bottle
pillow
ping-pong ball
pinwheel
pirate, pirate ship
pitcher, ewer
plane, carpenter's plane, woodworking plane
planetarium
plastic bag
plate rack
plow, plough
plunger, plumber's helper
Polaroid camera, Polaroid Land camera
pole
police van, police wagon, paddy wagon, patrol wagon, wagon, black Maria
poncho
pool table, billiard table, snooker table
pop bottle, soda bottle
pot, flowerpot
potter's wheel
power drill
prayer rug, prayer mat
printer
prison, prison house
projectile, missile
projector
puck, hockey puck
punching bag, punch bag, punching ball, punchball
purse
quill, quill pen
quilt, comforter, comfort, puff
racer, race car, racing car
racket, racquet
radiator
radio, wireless
radio telescope, radio reflector
rain barrel
recreational vehicle, RV, R.V.
reel
reflex camera
refrigerator, icebox
remote control, remote
restaurant, eating house, eating place, eatery
revolver, six-gun, six-shooter
rifle
rocking chair, rocker
rotisserie
rubber eraser, rubber, pencil eraser
rugby ball
rule, ruler
running shoe
safe
safety pin
saltshaker, salt shaker
sandal
sarong
sax, saxophone
scabbard
scale, weighing machine
school bus
schooner
scoreboard
screen, CRT screen
screw
screwdriver
seat belt, seatbelt
sewing machine
shield, buckler
shoe shop, shoe-shop, shoe store
shoji
shopping basket
shopping cart
shovel
shower cap
shower curtain
ski
ski mask
sleeping bag
slide rule, slipstick
sliding door
slot, one-armed bandit
snorkel
snowmobile
snowplow, snowplough
soap dispenser
soccer ball
sock
solar dish, solar collector, solar furnace
sombrero
soup bowl
space bar
space heater
space shuttle
spatula
speedboat
spider web, spider's web
spindle
sports car, sport car
spotlight, spot
stage
steam locomotive
steel arch bridge
steel drum
stethoscope
stole
stone wall
stopwatch, stop watch
stove
strainer
streetcar, tram, tramcar, trolley, trolley car
stretcher
studio couch, day bed
stupa, tope
submarine, pigboat, sub, U-boat
suit, suit of clothes
sundial
sunglass
sunglasses, dark glasses, shades
sunscreen, sunblock, sun blocker
suspension bridge
swab, swob, mop
sweatshirt
swimming trunks, bathing trunks
swing
switch, electric switch, electrical switch
syringe
table lamp
tank, army tank, armored combat vehicle, armoured combat vehicle
tape player
teapot
teddy, teddy bear
television, television system
tennis ball
thatch, thatched roof
theater curtain, theatre curtain
thimble
thresher, thrasher, threshing machine
throne
tile roof
toaster
tobacco shop, tobacconist shop, tobacconist
toilet seat
torch
totem pole
tow truck, tow car, wrecker
toyshop
tractor
trailer truck, tractor trailer, trucking rig, rig, articulated lorry, semi
tray
trench coat
tricycle, trike, velocipede
trimaran
tripod
triumphal arch
trolleybus, trolley coach, trackless trolley
trombone
tub, vat
turnstile
typewriter keyboard
umbrella
unicycle, monocycle
upright, upright piano
vacuum, vacuum cleaner
vase
vault
velvet
vending machine
vestment
viaduct
violin, fiddle
volleyball
waffle iron
wall clock
wallet, billfold, notecase, pocketbook
wardrobe, closet, press
warplane, military plane
washbasin, handbasin, washbowl, lavabo, wash-hand basin
washer, automatic washer, washing machine
water bottle
water jug
water tower
whiskey jug
whistle
wig
window screen
window shade
Windsor tie
wine bottle
wing
wok
wooden spoon
wool, woolen, woollen
worm fence, snake fence, snake-rail fence, Virginia fence
wreck
yawl
yurt
web site, website, internet site, site
comic book
crossword puzzle, crossword
street sign
traffic light, traffic signal, stoplight
book jacket, dust cover, dust jacket, dust wrapper
menu
plate
guacamole
consomme
hot pot, hotpot
trifle
ice cream, icecream
ice lolly, lolly, lollipop, popsicle
French loaf
bagel, beigel
pretzel
cheeseburger
hotdog, hot dog, red hot
mashed potato
head cabbage
broccoli
cauliflower
zucchini, courgette
spaghetti squash
acorn squash
butternut squash
cucumber, cuke
artichoke, globe artichoke
bell pepper
cardoon
mushroom
Granny Smith
strawberry
orange
lemon
fig
pineapple, ananas
banana
jackfruit, jak, jack
custard apple
pomegranate
hay
carbonara
chocolate sauce, chocolate syrup
dough
meat loaf, meatloaf
pizza, pizza pie
potpie
burrito
red wine
espresso
cup
eggnog
alp
bubble
cliff, drop, drop-off
coral reef
geyser
lakeside, lakeshore
promontory, headland, head, foreland
sandbar, sand bar
seashore, coast, seacoast, sea-coast
valley, vale
volcano
ballplayer, baseball player
groom, bridegroom
scuba diver
rapeseed
daisy
yellow lady's slipper, yellow lady-slipper, Cypripedium calceolus, Cypripedium parviflorum
corn
acorn
hip, rose hip, rosehip
buckeye, horse chestnut, conker
coral fungus
agaric
gyromitra
stinkhorn, carrion fungus
earthstar
hen-of-the-woods, hen of the woods, Polyporus frondosus, Grifola frondosa
bolete
ear, spike, capitulum
toilet tissue, toilet paper, bathroom tissue
......@@ -11,781 +11,187 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
import argparse
import ast
import os
import math
import collections
import re
import copy
import paddle
from paddle import ParamAttr
import paddle.nn as nn
import paddle.nn.functional as F
from paddle.nn import Conv2d, BatchNorm, Linear, Dropout
from paddle.nn import AdaptiveAvgPool2d, MaxPool2d, AvgPool2d
from paddlehub.module.module import moduleinfo
from paddlehub.module.cv_module import ImageClassifierModule
GlobalParams = collections.namedtuple('GlobalParams', [
'batch_norm_momentum',
'batch_norm_epsilon',
'dropout_rate',
'num_classes',
'width_coefficient',
'depth_coefficient',
'depth_divisor',
'min_depth',
'drop_connect_rate',
])
BlockArgs = collections.namedtuple(
'BlockArgs',
['kernel_size', 'num_repeat', 'input_filters', 'output_filters', 'expand_ratio', 'id_skip', 'stride', 'se_ratio'])
GlobalParams.__new__.__defaults__ = (None, ) * len(GlobalParams._fields)
BlockArgs.__new__.__defaults__ = (None, ) * len(BlockArgs._fields)
def efficientnet_params(model_name: str):
""" Map EfficientNet model name to parameter coefficients. """
params_dict = {
# Coefficients: width,depth,resolution,dropout
'efficientnet-b5': (1.6, 2.2, 456, 0.4)
}
return params_dict[model_name]
def efficientnet(width_coefficient: float = None,
depth_coefficient: float = None,
dropout_rate: float = 0.2,
drop_connect_rate: float = 0.2):
""" Get block arguments according to parameter and coefficients. """
blocks_args = [
'r1_k3_s11_e1_i32_o16_se0.25',
'r2_k3_s22_e6_i16_o24_se0.25',
'r2_k5_s22_e6_i24_o40_se0.25',
'r3_k3_s22_e6_i40_o80_se0.25',
'r3_k5_s11_e6_i80_o112_se0.25',
'r4_k5_s22_e6_i112_o192_se0.25',
'r1_k3_s11_e6_i192_o320_se0.25',
]
blocks_args = BlockDecoder.decode(blocks_args)
global_params = GlobalParams(
batch_norm_momentum=0.99,
batch_norm_epsilon=1e-3,
dropout_rate=dropout_rate,
drop_connect_rate=drop_connect_rate,
num_classes=1000,
width_coefficient=width_coefficient,
depth_coefficient=depth_coefficient,
depth_divisor=8,
min_depth=None)
return blocks_args, global_params
def get_model_params(model_name: str, override_params: dict):
""" Get the block args and global params for a given model """
if model_name.startswith('efficientnet'):
w, d, _, p = efficientnet_params(model_name)
blocks_args, global_params = efficientnet(width_coefficient=w, depth_coefficient=d, dropout_rate=p)
else:
raise NotImplementedError('model name is not pre-defined: %s' % model_name)
if override_params:
global_params = global_params._replace(**override_params)
return blocks_args, global_params
def round_filters(filters: int, global_params: dict):
""" Calculate and round number of filters based on depth multiplier. """
multiplier = global_params.width_coefficient
if not multiplier:
return filters
divisor = global_params.depth_divisor
min_depth = global_params.min_depth
filters *= multiplier
min_depth = min_depth or divisor
new_filters = max(min_depth, int(filters + divisor / 2) // divisor * divisor)
if new_filters < 0.9 * filters: # prevent rounding by more than 10%
new_filters += divisor
return int(new_filters)
import numpy as np
from paddle.inference import Config
from paddle.inference import create_predictor
def round_repeats(repeats: int, global_params: dict):
""" Round number of filters based on depth multiplier. """
multiplier = global_params.depth_coefficient
if not multiplier:
return repeats
return int(math.ceil(multiplier * repeats))
from .data_feed import reader
from .processor import base64_to_cv2
from .processor import postprocess
from paddlehub.module.module import moduleinfo
from paddlehub.module.module import runnable
from paddlehub.module.module import serving
class BlockDecoder(object):
@moduleinfo(name="efficientnetb5_imagenet",
type="CV/image_classification",
author="paddlepaddle",
author_email="paddle-dev@baidu.com",
summary="EfficientNetB5 is a image classfication model, this module is trained with imagenet datasets.",
version="1.2.0")
class EfficientNetB5ImageNet:
def __init__(self):
self.default_pretrained_model_path = os.path.join(self.directory, "efficientnetb5_imagenet_infer_model",
"model")
label_file = os.path.join(self.directory, "label_list.txt")
with open(label_file, 'r', encoding='utf-8') as file:
self.label_list = file.read().split("\n")[:-1]
self._set_config()
def get_expected_image_width(self):
return 224
def get_expected_image_height(self):
return 224
def get_pretrained_images_mean(self):
im_mean = np.array([0.485, 0.456, 0.406]).reshape(1, 3)
return im_mean
def get_pretrained_images_std(self):
im_std = np.array([0.229, 0.224, 0.225]).reshape(1, 3)
return im_std
def _set_config(self):
"""
Block Decoder, straight from the official TensorFlow repository.
predictor config setting
"""
model = self.default_pretrained_model_path + '.pdmodel'
params = self.default_pretrained_model_path + '.pdiparams'
cpu_config = Config(model, params)
cpu_config.disable_glog_info()
cpu_config.disable_gpu()
self.cpu_predictor = create_predictor(cpu_config)
try:
_places = os.environ["CUDA_VISIBLE_DEVICES"]
int(_places[0])
use_gpu = True
except:
use_gpu = False
if use_gpu:
gpu_config = Config(model, params)
gpu_config.disable_glog_info()
gpu_config.enable_use_gpu(memory_pool_init_size_mb=1000, device_id=0)
self.gpu_predictor = create_predictor(gpu_config)
def classification(self, images=None, paths=None, batch_size=1, use_gpu=False, top_k=1):
"""
API for image classification.
@staticmethod
def _decode_block_string(block_string: str):
""" Gets a block through a string notation of arguments. """
assert isinstance(block_string, str)
ops = block_string.split('_')
options = {}
for op in ops:
splits = re.split(r'(\d.*)', op)
if len(splits) >= 2:
key, value = splits[:2]
options[key] = value
# Check stride
cond_1 = ('s' in options and len(options['s']) == 1)
cond_2 = ((len(options['s']) == 2) and (options['s'][0] == options['s'][1]))
assert (cond_1 or cond_2)
return BlockArgs(
kernel_size=int(options['k']),
num_repeat=int(options['r']),
input_filters=int(options['i']),
output_filters=int(options['o']),
expand_ratio=int(options['e']),
id_skip=('noskip' not in block_string),
se_ratio=float(options['se']) if 'se' in options else None,
stride=[int(options['s'][0])])
@staticmethod
def _encode_block_string(block):
"""Encodes a block to a string."""
args = [
'r%d' % block.num_repeat,
'k%d' % block.kernel_size,
's%d%d' % (block.strides[0], block.strides[1]),
'e%s' % block.expand_ratio,
'i%d' % block.input_filters,
'o%d' % block.output_filters
]
if 0 < block.se_ratio <= 1:
args.append('se%s' % block.se_ratio)
if block.id_skip is False:
args.append('noskip')
return '_'.join(args)
Args:
images (list[numpy.ndarray]): data of images, shape of each is [H, W, C], color space must be BGR.
paths (list[str]): The paths of images.
batch_size (int): batch size.
use_gpu (bool): Whether to use gpu.
top_k (int): Return top k results.
@staticmethod
def decode(string_list: list):
Returns:
res (list[dict]): The classfication results.
"""
Decode a list of string notations to specify blocks in the network.
string_list: list of strings, each string is a notation of block
return
list of BlockArgs namedtuples of block args
if use_gpu:
try:
_places = os.environ["CUDA_VISIBLE_DEVICES"]
int(_places[0])
except:
raise RuntimeError(
"Environment Variable CUDA_VISIBLE_DEVICES is not set correctly. If you wanna use gpu, please set CUDA_VISIBLE_DEVICES as cuda_device_id."
)
all_data = list()
for yield_data in reader(images, paths):
all_data.append(yield_data)
total_num = len(all_data)
loop_num = int(np.ceil(total_num / batch_size))
res = list()
for iter_id in range(loop_num):
batch_data = list()
handle_id = iter_id * batch_size
for image_id in range(batch_size):
try:
batch_data.append(all_data[handle_id + image_id])
except:
pass
# feed batch image
batch_image = np.array([data['image'] for data in batch_data])
predictor = self.gpu_predictor if use_gpu else self.cpu_predictor
input_names = predictor.get_input_names()
input_handle = predictor.get_input_handle(input_names[0])
input_handle.copy_from_cpu(batch_image.copy())
predictor.run()
output_names = predictor.get_output_names()
output_handle = predictor.get_output_handle(output_names[0])
out = postprocess(data_out=output_handle.copy_to_cpu(), label_list=self.label_list, top_k=top_k)
res += out
return res
@serving
def serving_method(self, images, **kwargs):
"""
assert isinstance(string_list, list)
blocks_args = []
for block_string in string_list:
blocks_args.append(BlockDecoder._decode_block_string(block_string))
return blocks_args
@staticmethod
def encode(blocks_args: list):
Run as a service.
"""
Encodes a list of BlockArgs to a list of strings.
images_decode = [base64_to_cv2(image) for image in images]
results = self.classify(images=images_decode, **kwargs)
return results
:param blocks_args: a list of BlockArgs namedtuples of block args
:return: a list of strings, each string is a notation of block
@runnable
def run_cmd(self, argvs):
"""
block_strings = []
for block in blocks_args:
block_strings.append(BlockDecoder._encode_block_string(block))
return block_strings
def initial_type(name: str, use_bias: bool = False):
param_attr = ParamAttr(name=name + "_weights")
if use_bias:
bias_attr = ParamAttr(name=name + "_offset")
else:
bias_attr = False
return param_attr, bias_attr
def init_batch_norm_layer(name: str = "batch_norm"):
param_attr = ParamAttr(name=name + "_scale")
bias_attr = ParamAttr(name=name + "_offset")
return param_attr, bias_attr
def init_fc_layer(name: str = "fc"):
param_attr = ParamAttr(name=name + "_weights")
bias_attr = ParamAttr(name=name + "_offset")
return param_attr, bias_attr
def cal_padding(img_size: int, stride: int, filter_size: int, dilation: int = 1):
"""Calculate padding size."""
if img_size % stride == 0:
out_size = max(filter_size - stride, 0)
else:
out_size = max(filter_size - (img_size % stride), 0)
return out_size // 2, out_size - out_size // 2
inp_shape = {"b5": [456, 228, 228, 114, 57, 29, 29, 15]}
def _drop_connect(inputs: paddle.Tensor, prob: float, is_test: bool):
"""Drop input connection"""
if is_test:
return inputs
keep_prob = 1.0 - prob
inputs_shape = paddle.shape(inputs)
random_tensor = keep_prob + paddle.rand(shape=[inputs_shape[0], 1, 1, 1])
binary_tensor = paddle.floor(random_tensor)
output = inputs / keep_prob * binary_tensor
return output
class Conv2ds(nn.Layer):
"""Basic conv layer"""
def __init__(self,
input_channels: int,
output_channels: int,
filter_size: int,
stride: int = 1,
padding: int = 0,
groups: int = None,
name: str = "conv2d",
act: str = None,
use_bias: bool = False,
padding_type: str = None,
model_name: str = None,
cur_stage: str = None):
super(Conv2ds, self).__init__()
assert act in [None, "swish", "sigmoid"]
self.act = act
param_attr, bias_attr = initial_type(name=name, use_bias=use_bias)
def get_padding(filter_size, stride=1, dilation=1):
padding = ((stride - 1) + dilation * (filter_size - 1)) // 2
return padding
inps = 1 if model_name == None and cur_stage == None else inp_shape[model_name][cur_stage]
self.need_crop = False
if padding_type == "SAME":
top_padding, bottom_padding = cal_padding(inps, stride, filter_size)
left_padding, right_padding = cal_padding(inps, stride, filter_size)
height_padding = bottom_padding
width_padding = right_padding
if top_padding != bottom_padding or left_padding != right_padding:
height_padding = top_padding + stride
width_padding = left_padding + stride
self.need_crop = True
padding = [height_padding, width_padding]
elif padding_type == "VALID":
height_padding = 0
width_padding = 0
padding = [height_padding, width_padding]
elif padding_type == "DYNAMIC":
padding = get_padding(filter_size, stride)
else:
padding = padding_type
groups = 1 if groups is None else groups
self._conv = Conv2d(
input_channels,
output_channels,
filter_size,
groups=groups,
stride=stride,
padding=padding,
weight_attr=param_attr,
bias_attr=bias_attr)
def forward(self, inputs: paddle.Tensor):
x = self._conv(inputs)
if self.act == "swish":
x = F.swish(x)
elif self.act == "sigmoid":
x = F.sigmoid(x)
if self.need_crop:
x = x[:, :, 1:, 1:]
return x
class ConvBNLayer(nn.Layer):
"""Basic conv bn layer."""
def __init__(self,
input_channels: int,
filter_size: int,
output_channels: int,
stride: int = 1,
num_groups: int = 1,
padding_type: str = "SAME",
conv_act: str = None,
bn_act: str = "swish",
use_bn: bool = True,
use_bias: bool = False,
name: str = None,
conv_name: str = None,
bn_name: str = None,
model_name: str = None,
cur_stage: str = None):
super(ConvBNLayer, self).__init__()
self._conv = Conv2ds(
input_channels=input_channels,
output_channels=output_channels,
filter_size=filter_size,
stride=stride,
groups=num_groups,
act=conv_act,
padding_type=padding_type,
name=conv_name,
use_bias=use_bias,
model_name=model_name,
cur_stage=cur_stage)
self.use_bn = use_bn
if use_bn is True:
bn_name = name + bn_name
param_attr, bias_attr = init_batch_norm_layer(bn_name)
self._bn = BatchNorm(
num_channels=output_channels,
act=bn_act,
momentum=0.99,
epsilon=0.001,
moving_mean_name=bn_name + "_mean",
moving_variance_name=bn_name + "_variance",
param_attr=param_attr,
bias_attr=bias_attr)
def forward(self, inputs: paddle.Tensor):
if self.use_bn:
x = self._conv(inputs)
x = self._bn(x)
return x
else:
return self._conv(inputs)
class ExpandConvNorm(nn.Layer):
"""Expand conv norm layer."""
def __init__(self,
input_channels: int,
block_args: dict,
padding_type: str,
name: str = None,
model_name: str = None,
cur_stage: str = None):
super(ExpandConvNorm, self).__init__()
self.oup = block_args.input_filters * block_args.expand_ratio
self.expand_ratio = block_args.expand_ratio
if self.expand_ratio != 1:
self._conv = ConvBNLayer(
input_channels,
1,
self.oup,
bn_act=None,
padding_type=padding_type,
name=name,
conv_name=name + "_expand_conv",
bn_name="_bn0",
model_name=model_name,
cur_stage=cur_stage)
def forward(self, inputs: paddle.Tensor):
if self.expand_ratio != 1:
return self._conv(inputs)
else:
return inputs
class DepthwiseConvNorm(nn.Layer):
"""Depthwise conv norm layer."""
def __init__(self,
input_channels: int,
block_args: dict,
padding_type: str,
name: str = None,
model_name: str = None,
cur_stage: str = None):
super(DepthwiseConvNorm, self).__init__()
self.k = block_args.kernel_size
self.s = block_args.stride
if isinstance(self.s, list) or isinstance(self.s, tuple):
self.s = self.s[0]
oup = block_args.input_filters * block_args.expand_ratio
self._conv = ConvBNLayer(
input_channels,
self.k,
oup,
self.s,
num_groups=input_channels,
bn_act=None,
padding_type=padding_type,
name=name,
conv_name=name + "_depthwise_conv",
bn_name="_bn1",
model_name=model_name,
cur_stage=cur_stage)
def forward(self, inputs: paddle.Tensor):
return self._conv(inputs)
class ProjectConvNorm(nn.Layer):
"""Projection conv bn layer."""
def __init__(self,
input_channels: int,
block_args: dict,
padding_type: str,
name: str = None,
model_name: str = None,
cur_stage: str = None):
super(ProjectConvNorm, self).__init__()
final_oup = block_args.output_filters
self._conv = ConvBNLayer(
input_channels,
1,
final_oup,
bn_act=None,
padding_type=padding_type,
name=name,
conv_name=name + "_project_conv",
bn_name="_bn2",
model_name=model_name,
cur_stage=cur_stage)
def forward(self, inputs: paddle.Tensor):
return self._conv(inputs)
class SEBlock(nn.Layer):
"""Basic Squeeze-and-Excitation block for Efficientnet."""
def __init__(self,
input_channels: int,
num_squeezed_channels: int,
oup: int,
padding_type: str,
name: str = None,
model_name: str = None,
cur_stage: str = None):
super(SEBlock, self).__init__()
self._pool = AdaptiveAvgPool2d(1)
self._conv1 = Conv2ds(
input_channels,
num_squeezed_channels,
1,
use_bias=True,
padding_type=padding_type,
act="swish",
name=name + "_se_reduce")
self._conv2 = Conv2ds(
num_squeezed_channels,
oup,
1,
act="sigmoid",
use_bias=True,
padding_type=padding_type,
name=name + "_se_expand")
def forward(self, inputs: paddle.Tensor):
x = self._pool(inputs)
x = self._conv1(x)
x = self._conv2(x)
return paddle.multiply(inputs, x)
class MbConvBlock(nn.Layer):
"""Mobile inverted bottleneck convolution for Efficientnet."""
def __init__(self,
input_channels: int,
block_args: dict,
padding_type: str,
use_se: bool,
name: str = None,
drop_connect_rate: float = None,
is_test: bool = False,
model_name: str = None,
cur_stage: str = None):
super(MbConvBlock, self).__init__()
oup = block_args.input_filters * block_args.expand_ratio
self.block_args = block_args
self.has_se = use_se and (block_args.se_ratio is not None) and (0 < block_args.se_ratio <= 1)
self.id_skip = block_args.id_skip
self.expand_ratio = block_args.expand_ratio
self.drop_connect_rate = drop_connect_rate
self.is_test = is_test
if self.expand_ratio != 1:
self._ecn = ExpandConvNorm(
input_channels,
block_args,
padding_type=padding_type,
name=name,
model_name=model_name,
cur_stage=cur_stage)
self._dcn = DepthwiseConvNorm(
input_channels * block_args.expand_ratio,
block_args,
padding_type=padding_type,
name=name,
model_name=model_name,
cur_stage=cur_stage)
if self.has_se:
num_squeezed_channels = max(1, int(block_args.input_filters * block_args.se_ratio))
self._se = SEBlock(
input_channels * block_args.expand_ratio,
num_squeezed_channels,
oup,
padding_type=padding_type,
name=name,
model_name=model_name,
cur_stage=cur_stage)
self._pcn = ProjectConvNorm(
input_channels * block_args.expand_ratio,
block_args,
padding_type=padding_type,
name=name,
model_name=model_name,
cur_stage=cur_stage)
def forward(self, inputs: paddle.Tensor):
x = inputs
if self.expand_ratio != 1:
x = self._ecn(x)
x = F.swish(x)
x = self._dcn(x)
x = F.swish(x)
if self.has_se:
x = self._se(x)
x = self._pcn(x)
if self.id_skip and \
self.block_args.stride == 1 and \
self.block_args.input_filters == self.block_args.output_filters:
if self.drop_connect_rate:
x = _drop_connect(x, self.drop_connect_rate, self.is_test)
x = paddle.elementwise_add(x, inputs)
return x
class ConvStemNorm(nn.Layer):
"""Basic conv stem norm block for extracting features."""
def __init__(self,
input_channels: int,
padding_type: str,
_global_params: dict,
name: str = None,
model_name: str = None,
cur_stage: str = None):
super(ConvStemNorm, self).__init__()
output_channels = round_filters(32, _global_params)
self._conv = ConvBNLayer(
input_channels,
filter_size=3,
output_channels=output_channels,
stride=2,
bn_act=None,
padding_type=padding_type,
name="",
conv_name="_conv_stem",
bn_name="_bn0",
model_name=model_name,
cur_stage=cur_stage)
def forward(self, inputs: paddle.Tensor):
return self._conv(inputs)
class ExtractFeatures(nn.Layer):
"""Extract features."""
def __init__(self,
input_channels: int,
_block_args: dict,
_global_params: dict,
padding_type: str,
use_se: bool,
is_test: bool,
model_name: str = None):
super(ExtractFeatures, self).__init__()
self._global_params = _global_params
self._conv_stem = ConvStemNorm(
input_channels,
padding_type=padding_type,
_global_params=_global_params,
model_name=model_name,
cur_stage=0)
self.block_args_copy = copy.deepcopy(_block_args)
idx = 0
block_size = 0
for block_arg in self.block_args_copy:
block_arg = block_arg._replace(
input_filters=round_filters(block_arg.input_filters, _global_params),
output_filters=round_filters(block_arg.output_filters, _global_params),
num_repeat=round_repeats(block_arg.num_repeat, _global_params))
block_size += 1
for _ in range(block_arg.num_repeat - 1):
block_size += 1
self.conv_seq = []
cur_stage = 1
for block_args in _block_args:
block_args = block_args._replace(
input_filters=round_filters(block_args.input_filters, _global_params),
output_filters=round_filters(block_args.output_filters, _global_params),
num_repeat=round_repeats(block_args.num_repeat, _global_params))
drop_connect_rate = self._global_params.drop_connect_rate if not is_test else 0
if drop_connect_rate:
drop_connect_rate *= float(idx) / block_size
_mc_block = self.add_sublayer(
"_blocks." + str(idx) + ".",
MbConvBlock(
block_args.input_filters,
block_args=block_args,
padding_type=padding_type,
use_se=use_se,
name="_blocks." + str(idx) + ".",
drop_connect_rate=drop_connect_rate,
model_name=model_name,
cur_stage=cur_stage))
self.conv_seq.append(_mc_block)
idx += 1
if block_args.num_repeat > 1:
block_args = block_args._replace(input_filters=block_args.output_filters, stride=1)
for _ in range(block_args.num_repeat - 1):
drop_connect_rate = self._global_params.drop_connect_rate if not is_test else 0
if drop_connect_rate:
drop_connect_rate *= float(idx) / block_size
_mc_block = self.add_sublayer(
"block." + str(idx) + ".",
MbConvBlock(
block_args.input_filters,
block_args,
padding_type=padding_type,
use_se=use_se,
name="_blocks." + str(idx) + ".",
drop_connect_rate=drop_connect_rate,
model_name=model_name,
cur_stage=cur_stage))
self.conv_seq.append(_mc_block)
idx += 1
cur_stage += 1
def forward(self, inputs: paddle.Tensor):
x = self._conv_stem(inputs)
x = F.swish(x)
for _mc_block in self.conv_seq:
x = _mc_block(x)
return x
@moduleinfo(
name="efficientnetb5_imagenet",
type="cv/classification",
author="paddlepaddle",
author_email="",
summary="efficientnetb5_imagenet is a classification model, "
"this module is trained with Imagenet dataset.",
version="1.1.0",
meta=ImageClassifierModule)
class EfficientNet_B5(nn.Layer):
def __init__(self,
is_test: bool = False,
padding_type: str = "SAME",
override_params: dict = None,
use_se: bool = True,
class_dim: int = 1000,
load_checkpoint: str = None):
super(EfficientNet_B5, self).__init__()
model_name = 'efficientnet-b5'
self.name = "b5"
self._block_args, self._global_params = get_model_params(model_name, override_params)
self.padding_type = padding_type
self.use_se = use_se
self.is_test = is_test
self._ef = ExtractFeatures(
3,
self._block_args,
self._global_params,
self.padding_type,
self.use_se,
self.is_test,
model_name=self.name)
output_channels = round_filters(1280, self._global_params)
oup = 512
self._conv = ConvBNLayer(
oup,
1,
output_channels,
bn_act="swish",
padding_type=self.padding_type,
name="",
conv_name="_conv_head",
bn_name="_bn1",
model_name=self.name,
cur_stage=7)
self._pool = AdaptiveAvgPool2d(1)
if self._global_params.dropout_rate:
self._drop = Dropout(p=self._global_params.dropout_rate, mode="upscale_in_train")
param_attr, bias_attr = init_fc_layer("_fc")
self._fc = Linear(output_channels, class_dim, weight_attr=param_attr, bias_attr=bias_attr)
if load_checkpoint is not None:
model_dict = paddle.load(load_checkpoint)[0]
self.set_dict(model_dict)
print("load custom checkpoint success")
else:
checkpoint = os.path.join(self.directory, 'efficientnet_b5_imagenet.pdparams')
if not os.path.exists(checkpoint):
os.system(
'wget https://paddlehub.bj.bcebos.com/dygraph/image_classification/efficientnet_b5_imagenet.pdparams -O '
+ checkpoint)
model_dict = paddle.load(checkpoint)[0]
self.set_dict(model_dict)
print("load pretrained checkpoint success")
def forward(self, inputs: paddle.Tensor):
x = self._ef(inputs)
x = self._conv(x)
x = self._pool(x)
if self._global_params.dropout_rate:
x = self._drop(x)
x = paddle.squeeze(x, axis=[2, 3])
x = self._fc(x)
return x
Run as a command.
"""
self.parser = argparse.ArgumentParser(description="Run the {} module.".format(self.name),
prog='hub run {}'.format(self.name),
usage='%(prog)s',
add_help=True)
self.arg_input_group = self.parser.add_argument_group(title="Input options", description="Input data. Required")
self.arg_config_group = self.parser.add_argument_group(
title="Config options", description="Run configuration for controlling module behavior, not required.")
self.add_module_config_arg()
self.add_module_input_arg()
args = self.parser.parse_args(argvs)
results = self.classify(paths=[args.input_path], batch_size=args.batch_size, use_gpu=args.use_gpu)
return results
def add_module_config_arg(self):
"""
Add the command config options.
"""
self.arg_config_group.add_argument('--use_gpu',
type=ast.literal_eval,
default=False,
help="whether use GPU or not.")
self.arg_config_group.add_argument('--batch_size', type=ast.literal_eval, default=1, help="batch size.")
self.arg_config_group.add_argument('--top_k', type=ast.literal_eval, default=1, help="Return top k results.")
def add_module_input_arg(self):
"""
Add the command input options.
"""
self.arg_input_group.add_argument('--input_path', type=str, help="path to image.")
if __name__ == '__main__':
b5 = EfficientNetB5ImageNet()
b5.context()
import cv2
test_image = [cv2.imread('dog.jpeg')]
res = b5.classification(images=test_image)
print(res)
res = b5.classification(paths=['dog.jpeg'])
print(res)
res = b5.classification(images=test_image)
print(res)
res = b5.classify(images=test_image)
print(res)
# copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import base64
import cv2
import numpy as np
def base64_to_cv2(b64str):
data = base64.b64decode(b64str.encode('utf8'))
data = np.fromstring(data, np.uint8)
data = cv2.imdecode(data, cv2.IMREAD_COLOR)
return data
def softmax(x):
if len(x.shape) > 1:
tmp = np.max(x, axis=1)
x -= tmp.reshape((x.shape[0], 1))
x = np.exp(x)
tmp = np.sum(x, axis=1)
x /= tmp.reshape((x.shape[0], 1))
else:
tmp = np.max(x)
x -= tmp
x = np.exp(x)
tmp = np.sum(x)
x /= tmp
return x
def postprocess(data_out, label_list, top_k):
"""
Postprocess output of network, one image at a time.
Args:
data_out (numpy.ndarray): output data of network.
label_list (list): list of label.
top_k (int): Return top k results.
"""
output = []
for result in data_out:
result_i = softmax(result)
output_i = {}
indexs = np.argsort(result_i)[::-1][0:top_k]
for index in indexs:
label = label_list[index].split(',')[0]
output_i[label] = float(result_i[index])
output.append(output_i)
return output
import os
import shutil
import unittest
import cv2
import requests
import paddlehub as hub
os.environ['CUDA_VISIBLE_DEVICES'] = '0'
class TestHubModule(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
img_url = 'https://unsplash.com/photos/brFsZ7qszSY/download?ixid=MnwxMjA3fDB8MXxzZWFyY2h8OHx8ZG9nfGVufDB8fHx8MTY2MzA1ODQ1MQ&force=true&w=640'
if not os.path.exists('tests'):
os.makedirs('tests')
response = requests.get(img_url)
assert response.status_code == 200, 'Network Error.'
with open('tests/test.jpg', 'wb') as f:
f.write(response.content)
cls.module = hub.Module(name="efficientnetb5_imagenet")
@classmethod
def tearDownClass(cls) -> None:
shutil.rmtree('tests')
shutil.rmtree('inference')
def test_classification1(self):
results = self.module.classification(paths=['tests/test.jpg'])
data = results[0]
self.assertTrue('Pembroke' in data)
self.assertTrue(data['Pembroke'] > 0.5)
def test_classification2(self):
results = self.module.classification(images=[cv2.imread('tests/test.jpg')])
data = results[0]
self.assertTrue('Pembroke' in data)
self.assertTrue(data['Pembroke'] > 0.5)
def test_classification3(self):
results = self.module.classification(images=[cv2.imread('tests/test.jpg')], use_gpu=True)
data = results[0]
self.assertTrue('Pembroke' in data)
self.assertTrue(data['Pembroke'] > 0.5)
def test_classification4(self):
self.assertRaises(AssertionError, self.module.classification, paths=['no.jpg'])
def test_classification5(self):
self.assertRaises(TypeError, self.module.classification, images=['tests/test.jpg'])
def test_save_inference_model(self):
self.module.save_inference_model('./inference/model')
self.assertTrue(os.path.exists('./inference/model.pdmodel'))
self.assertTrue(os.path.exists('./inference/model.pdiparams'))
if __name__ == "__main__":
unittest.main()
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册