提交 1ca24932 编写于 作者: C cuicheng01

Update configs

上级 4afc61a3
......@@ -30,10 +30,10 @@ Loss:
Optimizer:
name: Momentum
name: "Momentum"
momentum: 0.9
lr:
name: Piecewise
name: "Piecewise"
learning_rate: 0.1
decay_epochs: [30, 60, 90]
values: [0.1, 0.01, 0.001, 0.0001]
......@@ -46,7 +46,7 @@ Optimizer:
DataLoader:
Train:
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/train_list.txt"
transform_ops:
......@@ -55,24 +55,24 @@ DataLoader:
- RandFlipImage:
flip_code: 1
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
# TOTO: modify to the latest trainer
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/val_list.txt"
transform_ops:
......@@ -81,17 +81,17 @@ DataLoader:
- CropImage:
size: 224
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......@@ -112,7 +112,7 @@ Infer:
order: ''
- ToCHWImage:
PostProcess:
name: Topk
name: "Topk"
topk: 5
class_id_map_file: "ppcls/utils/imagenet1k_label_list.txt"
......
......@@ -30,10 +30,10 @@ Loss:
Optimizer:
name: Momentum
name: "Momentum"
momentum: 0.9
lr:
name: Piecewise
name: "Piecewise"
learning_rate: 0.1
decay_epochs: [30, 60, 90]
values: [0.1, 0.01, 0.001, 0.0001]
......@@ -46,7 +46,7 @@ Optimizer:
DataLoader:
Train:
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/train_list.txt"
transform_ops:
......@@ -55,24 +55,24 @@ DataLoader:
- RandFlipImage:
flip_code: 1
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
# TOTO: modify to the latest trainer
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/val_list.txt"
transform_ops:
......@@ -81,17 +81,17 @@ DataLoader:
- CropImage:
size: 224
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......@@ -112,7 +112,7 @@ Infer:
order: ''
- ToCHWImage:
PostProcess:
name: Topk
name: "Topk"
topk: 5
class_id_map_file: "ppcls/utils/imagenet1k_label_list.txt"
......
......@@ -30,10 +30,10 @@ Loss:
Optimizer:
name: Momentum
name: "Momentum"
momentum: 0.9
lr:
name: Piecewise
name: "Piecewise"
learning_rate: 0.1
decay_epochs: [30, 60, 90]
values: [0.1, 0.01, 0.001, 0.0001]
......@@ -46,7 +46,7 @@ Optimizer:
DataLoader:
Train:
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/train_list.txt"
transform_ops:
......@@ -55,24 +55,24 @@ DataLoader:
- RandFlipImage:
flip_code: 1
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
# TOTO: modify to the latest trainer
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/val_list.txt"
transform_ops:
......@@ -81,17 +81,17 @@ DataLoader:
- CropImage:
size: 224
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......@@ -112,7 +112,7 @@ Infer:
order: ''
- ToCHWImage:
PostProcess:
name: Topk
name: "Topk"
topk: 5
class_id_map_file: "ppcls/utils/imagenet1k_label_list.txt"
......
......@@ -30,10 +30,10 @@ Loss:
Optimizer:
name: Momentum
name: "Momentum"
momentum: 0.9
lr:
name: Piecewise
name: "Piecewise"
learning_rate: 0.1
decay_epochs: [30, 60, 90]
values: [0.1, 0.01, 0.001, 0.0001]
......@@ -46,7 +46,7 @@ Optimizer:
DataLoader:
Train:
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/train_list.txt"
transform_ops:
......@@ -55,24 +55,24 @@ DataLoader:
- RandFlipImage:
flip_code: 1
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
# TOTO: modify to the latest trainer
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/val_list.txt"
transform_ops:
......@@ -81,17 +81,17 @@ DataLoader:
- CropImage:
size: 224
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......@@ -112,7 +112,7 @@ Infer:
order: ''
- ToCHWImage:
PostProcess:
name: Topk
name: "Topk"
topk: 5
class_id_map_file: "ppcls/utils/imagenet1k_label_list.txt"
......
......@@ -30,10 +30,10 @@ Loss:
Optimizer:
name: Momentum
name: "Momentum"
momentum: 0.9
lr:
name: Piecewise
name: "Piecewise"
learning_rate: 0.1
decay_epochs: [30, 60, 90]
values: [0.1, 0.01, 0.001, 0.0001]
......@@ -46,7 +46,7 @@ Optimizer:
DataLoader:
Train:
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/train_list.txt"
transform_ops:
......@@ -55,41 +55,43 @@ DataLoader:
- RandFlipImage:
flip_code: 1
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
# TOTO: modify to the latest trainer
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/val_list.txt"
transform_ops:
- ResizeImage:
resize_short: 256
- CropImage:
size: 224
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......@@ -110,7 +112,7 @@ Infer:
order: ''
- ToCHWImage:
PostProcess:
name: Topk
name: "Topk"
topk: 5
class_id_map_file: "ppcls/utils/imagenet1k_label_list.txt"
......
......@@ -30,10 +30,10 @@ Loss:
Optimizer:
name: Momentum
name: "Momentum"
momentum: 0.9
lr:
name: Piecewise
name: "Piecewise"
learning_rate: 0.1
decay_epochs: [30, 60, 90]
values: [0.1, 0.01, 0.001, 0.0001]
......@@ -46,7 +46,7 @@ Optimizer:
DataLoader:
Train:
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/train_list.txt"
transform_ops:
......@@ -55,24 +55,24 @@ DataLoader:
- RandFlipImage:
flip_code: 1
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
# TOTO: modify to the latest trainer
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/val_list.txt"
transform_ops:
......@@ -81,17 +81,17 @@ DataLoader:
- CropImage:
size: 224
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......@@ -112,7 +112,7 @@ Infer:
order: ''
- ToCHWImage:
PostProcess:
name: Topk
name: "Topk"
topk: 5
class_id_map_file: "ppcls/utils/imagenet1k_label_list.txt"
......
......@@ -30,10 +30,10 @@ Loss:
Optimizer:
name: Momentum
name: "Momentum"
momentum: 0.9
lr:
name: Piecewise
name: "Piecewise"
learning_rate: 0.1
decay_epochs: [30, 60, 90]
values: [0.1, 0.01, 0.001, 0.0001]
......@@ -46,7 +46,7 @@ Optimizer:
DataLoader:
Train:
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/train_list.txt"
transform_ops:
......@@ -55,24 +55,24 @@ DataLoader:
- RandFlipImage:
flip_code: 1
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
# TOTO: modify to the latest trainer
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/val_list.txt"
transform_ops:
......@@ -81,17 +81,17 @@ DataLoader:
- CropImage:
size: 224
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......@@ -112,7 +112,7 @@ Infer:
order: ''
- ToCHWImage:
PostProcess:
name: Topk
name: "Topk"
topk: 5
class_id_map_file: "ppcls/utils/imagenet1k_label_list.txt"
......
......@@ -12,7 +12,7 @@ Global:
print_batch_step: 10
use_visualdl: False
# used for static mode and model export
image_shape: [3, 224, 224]
image_shape: [3, 299, 299]
save_inference_dir: "./inference"
# model architecture
......@@ -24,19 +24,18 @@ Loss:
Train:
- CELoss:
weight: 1.0
epsilon: 0.1
Eval:
- CELoss:
weight: 1.0
Optimizer:
name: Momentum
name: "Momentum"
momentum: 0.9
lr:
name: Cosine
name: "Cosine"
learning_rate: 0.045
decay_epochs: [30, 60, 90]
values: [0.1, 0.01, 0.001, 0.0001]
regularizer:
name: 'L2'
coeff: 0.0001
......@@ -46,7 +45,7 @@ Optimizer:
DataLoader:
Train:
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/train_list.txt"
transform_ops:
......@@ -55,24 +54,24 @@ DataLoader:
- RandFlipImage:
flip_code: 1
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
# TOTO: modify to the latest trainer
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/val_list.txt"
transform_ops:
......@@ -81,17 +80,17 @@ DataLoader:
- CropImage:
size: 299
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......@@ -112,7 +111,7 @@ Infer:
order: ''
- ToCHWImage:
PostProcess:
name: Topk
name: "Topk"
topk: 5
class_id_map_file: "ppcls/utils/imagenet1k_label_list.txt"
......
......@@ -30,23 +30,23 @@ Loss:
Optimizer:
name: Momentum
name: "Momentum"
momentum: 0.9
lr:
name: Piecewise
name: "Piecewise"
learning_rate: 0.1
decay_epochs: [30, 60, 90]
values: [0.1, 0.01, 0.001, 0.0001]
regularizer:
name: 'L2'
coeff: 0.00003
coeff: 0.0003
# data loader for train and eval
DataLoader:
Train:
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/train_list.txt"
transform_ops:
......@@ -55,24 +55,24 @@ DataLoader:
- RandFlipImage:
flip_code: 1
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
# TOTO: modify to the latest trainer
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/val_list.txt"
transform_ops:
......@@ -81,17 +81,17 @@ DataLoader:
- CropImage:
size: 224
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......@@ -112,7 +112,7 @@ Infer:
order: ''
- ToCHWImage:
PostProcess:
name: Topk
name: "Topk"
topk: 5
class_id_map_file: "ppcls/utils/imagenet1k_label_list.txt"
......
......@@ -30,23 +30,23 @@ Loss:
Optimizer:
name: Momentum
name: "Momentum"
momentum: 0.9
lr:
name: Piecewise
name: "Piecewise"
learning_rate: 0.1
decay_epochs: [30, 60, 90]
values: [0.1, 0.01, 0.001, 0.0001]
regularizer:
name: 'L2'
coeff: 0.00003
coeff: 0.0003
# data loader for train and eval
DataLoader:
Train:
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/train_list.txt"
transform_ops:
......@@ -55,24 +55,24 @@ DataLoader:
- RandFlipImage:
flip_code: 1
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
# TOTO: modify to the latest trainer
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/val_list.txt"
transform_ops:
......@@ -81,17 +81,17 @@ DataLoader:
- CropImage:
size: 224
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......@@ -112,7 +112,7 @@ Infer:
order: ''
- ToCHWImage:
PostProcess:
name: Topk
name: "Topk"
topk: 5
class_id_map_file: "ppcls/utils/imagenet1k_label_list.txt"
......
......@@ -30,23 +30,23 @@ Loss:
Optimizer:
name: Momentum
name: "Momentum"
momentum: 0.9
lr:
name: Piecewise
name: "Piecewise"
learning_rate: 0.1
decay_epochs: [30, 60, 90]
values: [0.1, 0.01, 0.001, 0.0001]
regularizer:
name: 'L2'
coeff: 0.00003
coeff: 0.0003
# data loader for train and eval
DataLoader:
Train:
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/train_list.txt"
transform_ops:
......@@ -55,24 +55,24 @@ DataLoader:
- RandFlipImage:
flip_code: 1
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
# TOTO: modify to the latest trainer
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/val_list.txt"
transform_ops:
......@@ -81,17 +81,17 @@ DataLoader:
- CropImage:
size: 224
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......@@ -112,7 +112,7 @@ Infer:
order: ''
- ToCHWImage:
PostProcess:
name: Topk
name: "Topk"
topk: 5
class_id_map_file: "ppcls/utils/imagenet1k_label_list.txt"
......
......@@ -30,23 +30,23 @@ Loss:
Optimizer:
name: Momentum
name: "Momentum"
momentum: 0.9
lr:
name: Piecewise
name: "Piecewise"
learning_rate: 0.1
decay_epochs: [30, 60, 90]
values: [0.1, 0.01, 0.001, 0.0001]
regularizer:
name: 'L2'
coeff: 0.00003
coeff: 0.0003
# data loader for train and eval
DataLoader:
Train:
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/train_list.txt"
transform_ops:
......@@ -55,24 +55,24 @@ DataLoader:
- RandFlipImage:
flip_code: 1
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
# TOTO: modify to the latest trainer
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/val_list.txt"
transform_ops:
......@@ -81,17 +81,17 @@ DataLoader:
- CropImage:
size: 224
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......@@ -112,7 +112,7 @@ Infer:
order: ''
- ToCHWImage:
PostProcess:
name: Topk
name: "Topk"
topk: 5
class_id_map_file: "ppcls/utils/imagenet1k_label_list.txt"
......
......@@ -31,10 +31,10 @@ Loss:
Optimizer:
name: Momentum
name: "Momentum"
momentum: 0.9
lr:
name: Cosine
name: "Cosine"
learning_rate: 1.3
regularizer:
name: 'L2'
......@@ -45,7 +45,7 @@ Optimizer:
DataLoader:
Train:
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/train_list.txt"
transform_ops:
......@@ -54,24 +54,24 @@ DataLoader:
- RandFlipImage:
flip_code: 1
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 512
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
# TOTO: modify to the latest trainer
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/val_list.txt"
transform_ops:
......@@ -80,17 +80,17 @@ DataLoader:
- CropImage:
size: 224
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......@@ -111,7 +111,7 @@ Infer:
order: ''
- ToCHWImage:
PostProcess:
name: Topk
name: "Topk"
topk: 5
class_id_map_file: "ppcls/utils/imagenet1k_label_list.txt"
......
......@@ -31,10 +31,10 @@ Loss:
Optimizer:
name: Momentum
name: "Momentum"
momentum: 0.9
lr:
name: Cosine
name: "Cosine"
learning_rate: 1.3
regularizer:
name: 'L2'
......@@ -45,7 +45,7 @@ Optimizer:
DataLoader:
Train:
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/train_list.txt"
transform_ops:
......@@ -54,24 +54,24 @@ DataLoader:
- RandFlipImage:
flip_code: 1
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 512
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
# TOTO: modify to the latest trainer
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/val_list.txt"
transform_ops:
......@@ -80,17 +80,17 @@ DataLoader:
- CropImage:
size: 224
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......@@ -111,7 +111,7 @@ Infer:
order: ''
- ToCHWImage:
PostProcess:
name: Topk
name: "Topk"
topk: 5
class_id_map_file: "ppcls/utils/imagenet1k_label_list.txt"
......
......@@ -31,10 +31,10 @@ Loss:
Optimizer:
name: Momentum
name: "Momentum"
momentum: 0.9
lr:
name: Cosine
name: "Cosine"
learning_rate: 1.3
regularizer:
name: 'L2'
......@@ -45,7 +45,7 @@ Optimizer:
DataLoader:
Train:
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/train_list.txt"
transform_ops:
......@@ -54,24 +54,24 @@ DataLoader:
- RandFlipImage:
flip_code: 1
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 512
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
# TOTO: modify to the latest trainer
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/val_list.txt"
transform_ops:
......@@ -80,17 +80,17 @@ DataLoader:
- CropImage:
size: 224
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......@@ -111,7 +111,7 @@ Infer:
order: ''
- ToCHWImage:
PostProcess:
name: Topk
name: "Topk"
topk: 5
class_id_map_file: "ppcls/utils/imagenet1k_label_list.txt"
......
......@@ -31,10 +31,10 @@ Loss:
Optimizer:
name: Momentum
name: "Momentum"
momentum: 0.9
lr:
name: Cosine
name: "Cosine"
learning_rate: 1.3
regularizer:
name: 'L2'
......@@ -45,7 +45,7 @@ Optimizer:
DataLoader:
Train:
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/train_list.txt"
transform_ops:
......@@ -55,24 +55,24 @@ DataLoader:
flip_code: 1
- AutoAugment:
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 512
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
# TOTO: modify to the latest trainer
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/val_list.txt"
transform_ops:
......@@ -81,17 +81,17 @@ DataLoader:
- CropImage:
size: 224
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......@@ -112,7 +112,7 @@ Infer:
order: ''
- ToCHWImage:
PostProcess:
name: Topk
name: "Topk"
topk: 5
class_id_map_file: "ppcls/utils/imagenet1k_label_list.txt"
......
......@@ -31,21 +31,21 @@ Loss:
Optimizer:
name: Momentum
name: "Momentum"
momentum: 0.9
lr:
name: Cosine
name: "Cosine"
learning_rate: 1.3
regularizer:
name: 'L2'
coeff: 0.00002
coeff: 0.00004
# data loader for train and eval
DataLoader:
Train:
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/train_list.txt"
transform_ops:
......@@ -54,24 +54,24 @@ DataLoader:
- RandFlipImage:
flip_code: 1
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 512
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
# TOTO: modify to the latest trainer
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/val_list.txt"
transform_ops:
......@@ -80,17 +80,17 @@ DataLoader:
- CropImage:
size: 224
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......@@ -111,7 +111,7 @@ Infer:
order: ''
- ToCHWImage:
PostProcess:
name: Topk
name: "Topk"
topk: 5
class_id_map_file: "ppcls/utils/imagenet1k_label_list.txt"
......
......@@ -31,21 +31,21 @@ Loss:
Optimizer:
name: Momentum
name: "Momentum"
momentum: 0.9
lr:
name: Cosine
name: "Cosine"
learning_rate: 1.3
regularizer:
name: 'L2'
coeff: 0.00002
coeff: 0.00001
# data loader for train and eval
DataLoader:
Train:
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/train_list.txt"
transform_ops:
......@@ -54,24 +54,24 @@ DataLoader:
- RandFlipImage:
flip_code: 1
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 512
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
# TOTO: modify to the latest trainer
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/val_list.txt"
transform_ops:
......@@ -80,17 +80,17 @@ DataLoader:
- CropImage:
size: 224
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......@@ -111,7 +111,7 @@ Infer:
order: ''
- ToCHWImage:
PostProcess:
name: Topk
name: "Topk"
topk: 5
class_id_map_file: "ppcls/utils/imagenet1k_label_list.txt"
......
......@@ -31,21 +31,21 @@ Loss:
Optimizer:
name: Momentum
name: "Momentum"
momentum: 0.9
lr:
name: Cosine
name: "Cosine"
learning_rate: 1.3
regularizer:
name: 'L2'
coeff: 0.00002
coeff: 0.00001
# data loader for train and eval
DataLoader:
Train:
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/train_list.txt"
transform_ops:
......@@ -54,24 +54,24 @@ DataLoader:
- RandFlipImage:
flip_code: 1
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 512
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
# TOTO: modify to the latest trainer
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/val_list.txt"
transform_ops:
......@@ -80,17 +80,17 @@ DataLoader:
- CropImage:
size: 224
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......@@ -111,7 +111,7 @@ Infer:
order: ''
- ToCHWImage:
PostProcess:
name: Topk
name: "Topk"
topk: 5
class_id_map_file: "ppcls/utils/imagenet1k_label_list.txt"
......
......@@ -31,10 +31,10 @@ Loss:
Optimizer:
name: Momentum
name: "Momentum"
momentum: 0.9
lr:
name: Cosine
name: "Cosine"
learning_rate: 1.3
regularizer:
name: 'L2'
......@@ -45,7 +45,7 @@ Optimizer:
DataLoader:
Train:
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/train_list.txt"
transform_ops:
......@@ -54,24 +54,24 @@ DataLoader:
- RandFlipImage:
flip_code: 1
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 512
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
# TOTO: modify to the latest trainer
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/val_list.txt"
transform_ops:
......@@ -80,17 +80,17 @@ DataLoader:
- CropImage:
size: 224
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......@@ -111,7 +111,7 @@ Infer:
order: ''
- ToCHWImage:
PostProcess:
name: Topk
name: "Topk"
topk: 5
class_id_map_file: "ppcls/utils/imagenet1k_label_list.txt"
......
......@@ -31,10 +31,10 @@ Loss:
Optimizer:
name: Momentum
name: "Momentum"
momentum: 0.9
lr:
name: Cosine
name: "Cosine"
learning_rate: 1.3
regularizer:
name: 'L2'
......@@ -45,7 +45,7 @@ Optimizer:
DataLoader:
Train:
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/train_list.txt"
transform_ops:
......@@ -54,24 +54,24 @@ DataLoader:
- RandFlipImage:
flip_code: 1
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 512
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
# TOTO: modify to the latest trainer
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/val_list.txt"
transform_ops:
......@@ -80,17 +80,17 @@ DataLoader:
- CropImage:
size: 224
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......@@ -111,7 +111,7 @@ Infer:
order: ''
- ToCHWImage:
PostProcess:
name: Topk
name: "Topk"
topk: 5
class_id_map_file: "ppcls/utils/imagenet1k_label_list.txt"
......
......@@ -31,10 +31,10 @@ Loss:
Optimizer:
name: Momentum
name: "Momentum"
momentum: 0.9
lr:
name: Cosine
name: "Cosine"
learning_rate: 1.3
regularizer:
name: 'L2'
......@@ -45,7 +45,7 @@ Optimizer:
DataLoader:
Train:
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/train_list.txt"
transform_ops:
......@@ -54,24 +54,24 @@ DataLoader:
- RandFlipImage:
flip_code: 1
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 512
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
# TOTO: modify to the latest trainer
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/val_list.txt"
transform_ops:
......@@ -80,17 +80,17 @@ DataLoader:
- CropImage:
size: 224
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......@@ -111,7 +111,7 @@ Infer:
order: ''
- ToCHWImage:
PostProcess:
name: Topk
name: "Topk"
topk: 5
class_id_map_file: "ppcls/utils/imagenet1k_label_list.txt"
......
......@@ -66,7 +66,7 @@ DataLoader:
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
......@@ -91,7 +91,7 @@ DataLoader:
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......
......@@ -68,7 +68,7 @@ DataLoader:
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
......@@ -93,7 +93,7 @@ DataLoader:
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......
......@@ -66,7 +66,7 @@ DataLoader:
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
......@@ -91,7 +91,7 @@ DataLoader:
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......
......@@ -68,7 +68,7 @@ DataLoader:
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
......@@ -93,7 +93,7 @@ DataLoader:
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......
......@@ -66,7 +66,7 @@ DataLoader:
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
......@@ -91,7 +91,7 @@ DataLoader:
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......
......@@ -68,7 +68,7 @@ DataLoader:
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
......@@ -93,7 +93,7 @@ DataLoader:
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......
......@@ -68,7 +68,7 @@ DataLoader:
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
......@@ -93,7 +93,7 @@ DataLoader:
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......
......@@ -66,7 +66,7 @@ DataLoader:
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
......@@ -91,7 +91,7 @@ DataLoader:
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......
......@@ -68,7 +68,7 @@ DataLoader:
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
......@@ -93,7 +93,7 @@ DataLoader:
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......
......@@ -66,7 +66,7 @@ DataLoader:
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
......@@ -91,7 +91,7 @@ DataLoader:
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......
......@@ -68,7 +68,7 @@ DataLoader:
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
......@@ -93,7 +93,7 @@ DataLoader:
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......
......@@ -30,10 +30,10 @@ Loss:
Optimizer:
name: Momentum
name: "Momentum"
momentum: 0.9
lr:
name: Cosine
name: "Cosine"
learning_rate: 0.1
regularizer:
name: 'L2'
......@@ -44,7 +44,7 @@ Optimizer:
DataLoader:
Train:
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/train_list.txt"
transform_ops:
......@@ -53,24 +53,24 @@ DataLoader:
- RandFlipImage:
flip_code: 1
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
batch_size: 64
name: "DistributedBatchSampler"
batch_size: 128
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
# TOTO: modify to the latest trainer
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/val_list.txt"
transform_ops:
......@@ -79,17 +79,17 @@ DataLoader:
- CropImage:
size: 224
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......@@ -110,7 +110,7 @@ Infer:
order: ''
- ToCHWImage:
PostProcess:
name: Topk
name: "Topk"
topk: 5
class_id_map_file: "ppcls/utils/imagenet1k_label_list.txt"
......
......@@ -30,10 +30,10 @@ Loss:
Optimizer:
name: Momentum
name: "Momentum"
momentum: 0.9
lr:
name: Cosine
name: "Cosine"
learning_rate: 0.01
regularizer:
name: 'L2'
......@@ -44,7 +44,7 @@ Optimizer:
DataLoader:
Train:
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/train_list.txt"
transform_ops:
......@@ -53,24 +53,24 @@ DataLoader:
- RandFlipImage:
flip_code: 1
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
# TOTO: modify to the latest trainer
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/val_list.txt"
transform_ops:
......@@ -79,17 +79,17 @@ DataLoader:
- CropImage:
size: 224
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......@@ -110,7 +110,7 @@ Infer:
order: ''
- ToCHWImage:
PostProcess:
name: Topk
name: "Topk"
topk: 5
class_id_map_file: "ppcls/utils/imagenet1k_label_list.txt"
......
......@@ -30,10 +30,10 @@ Loss:
Optimizer:
name: Momentum
name: "Momentum"
momentum: 0.9
lr:
name: Cosine
name: "Cosine"
learning_rate: 0.01
regularizer:
name: 'L2'
......@@ -44,7 +44,7 @@ Optimizer:
DataLoader:
Train:
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/train_list.txt"
transform_ops:
......@@ -53,24 +53,24 @@ DataLoader:
- RandFlipImage:
flip_code: 1
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
# TOTO: modify to the latest trainer
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/val_list.txt"
transform_ops:
......@@ -79,17 +79,17 @@ DataLoader:
- CropImage:
size: 224
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......@@ -110,7 +110,7 @@ Infer:
order: ''
- ToCHWImage:
PostProcess:
name: Topk
name: "Topk"
topk: 5
class_id_map_file: "ppcls/utils/imagenet1k_label_list.txt"
......
......@@ -30,10 +30,10 @@ Loss:
Optimizer:
name: Momentum
name: "Momentum"
momentum: 0.9
lr:
name: Cosine
name: "Cosine"
learning_rate: 0.01
regularizer:
name: 'L2'
......@@ -44,7 +44,7 @@ Optimizer:
DataLoader:
Train:
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/train_list.txt"
transform_ops:
......@@ -53,24 +53,24 @@ DataLoader:
- RandFlipImage:
flip_code: 1
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
# TOTO: modify to the latest trainer
dataset:
name: ImageNetDataset
name: "ImageNetDataset"
image_root: "./dataset/ILSVRC2012/"
cls_label_path: "./dataset/ILSVRC2012/val_list.txt"
transform_ops:
......@@ -79,17 +79,17 @@ DataLoader:
- CropImage:
size: 224
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Infer:
......@@ -110,7 +110,7 @@ Infer:
order: ''
- ToCHWImage:
PostProcess:
name: Topk
name: "Topk"
topk: 5
class_id_map_file: "ppcls/utils/imagenet1k_label_list.txt"
......
......@@ -21,7 +21,7 @@ Arch:
name: "RecModel"
Backbone:
name: "ResNet50_vd"
pretrained: False
pretrained: True
BackboneStopLayer:
name: "flatten_0"
Neck:
......@@ -76,7 +76,7 @@ DataLoader:
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
# TOTO: modify to the latest trainer
......@@ -100,7 +100,7 @@ DataLoader:
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Metric:
Train:
......
# global configs
Global:
checkpoints: null
pretrained_model: null
# please download pretrained model via this link:
# https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/rec/models/pretrain/product_ResNet50_vd_Aliproduct_v1.0_pretrained.pdparams
pretrained_model: "product_ResNet50_vd_Aliproduct_v1.0_pretrained"
output_dir: "./output/"
device: "gpu"
class_num: 3997
......@@ -19,6 +21,9 @@ Global:
# model architecture
Arch:
name: "RecModel"
infer_output_key: "features"
infer_add_softmax: False
Backbone:
name: "ResNet50_vd"
pretrained: False
......@@ -34,8 +39,6 @@ Arch:
class_num: 3997
margin: 0.15
scale: 30
infer_output_key: "features"
infer_add_softmax: False
# loss function config for traing/eval process
Loss:
......@@ -86,19 +89,18 @@ DataLoader:
sh: 0.4
r1: 0.3
mean: [0., 0., 0.]
sampler:
name: DistributedRandomIdentitySampler
name: "DistributedRandomIdentitySampler"
batch_size: 64
num_instances: 2
drop_last: False
shuffle: True
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Eval:
Query:
# TOTO: modify to the latest trainer
dataset:
name: "ImageNetDataset"
image_root: "./dataset/Inshop/"
......@@ -112,16 +114,15 @@ DataLoader:
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Gallery:
# TOTO: modify to the latest trainer
dataset:
name: "ImageNetDataset"
image_root: "./dataset/Inshop/"
......@@ -130,17 +131,17 @@ DataLoader:
- ResizeImage:
size: 224
- NormalizeImage:
scale: 0.00392157
scale: 1.0/255.0
mean: [0.485, 0.456, 0.406]
std: [0.229, 0.224, 0.225]
order: ''
sampler:
name: DistributedBatchSampler
name: "DistributedBatchSampler"
batch_size: 64
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Metric:
......
# global configs
Global:
checkpoints: null
pretrained_model: null
# please download pretrained model via this link:
# https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/rec/models/pretrain/product_ResNet50_vd_Aliproduct_v1.0_pretrained.pdparams
pretrained_model: "product_ResNet50_vd_Aliproduct_v1.0_pretrained"
output_dir: "./output/"
device: "gpu"
class_num: 11319
......@@ -98,7 +100,6 @@ DataLoader:
use_shared_memory: True
Eval:
Query:
# TOTO: modify to the latest trainer
dataset:
name: "ImageNetDataset"
image_root: "./dataset/Stanford_Online_Products/"
......@@ -117,11 +118,10 @@ DataLoader:
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Gallery:
# TOTO: modify to the latest trainer
dataset:
name: "ImageNetDataset"
image_root: "./dataset/Stanford_Online_Products/"
......@@ -140,7 +140,7 @@ DataLoader:
drop_last: False
shuffle: False
loader:
num_workers: 6
num_workers: 4
use_shared_memory: True
Metric:
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册