“c534149642d2f34b94968d51aecd3373d3790f7c”上不存在“paddle/git@gitcode.net:paddlepaddle/Paddle.git”
higherhrnet_hrnet_w32_512_swahr.yml 2.8 KB
Newer Older
Z
zhiboniu 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135
use_gpu: true
log_iter: 10
save_dir: output
snapshot_epoch: 10
weights: output/higherhrnet_hrnet_v1_512/model_final
epoch: 300
num_joints: &num_joints 17
flip_perm: &flip_perm [0, 2, 1, 4, 3, 6, 5, 8, 7, 10, 9, 12, 11, 14, 13, 16, 15]
input_size: &input_size 512
hm_size: &hm_size 128
hm_size_2x: &hm_size_2x 256
max_people: &max_people 30
metric: COCO
IouType: keypoints
num_classes: 1


#####model
architecture: HigherHRNet
pretrain_weights: https://paddledet.bj.bcebos.com/models/pretrained/Trunc_HRNet_W32_C_pretrained.pdparams

HigherHRNet:
  backbone: HRNet
  hrhrnet_head: HrHRNetHead
  post_process: HrHRNetPostProcess
  flip_perm: *flip_perm
  eval_flip: true

HRNet:
  width: &width 32
  freeze_at: -1
  freeze_norm: false
  return_idx: [0]

HrHRNetHead:
  num_joints: *num_joints
  width: *width
  loss: HrHRNetLoss
  swahr: true

HrHRNetLoss:
  num_joints: *num_joints
  swahr: true


#####optimizer
LearningRate:
  base_lr: 0.001
  schedulers:
  - !PiecewiseDecay
    milestones: [200, 260]
    gamma: 0.1
  - !LinearWarmup
    start_factor: 0.001
    steps: 1000

OptimizerBuilder:
  optimizer:
    type: Adam
  regularizer:


#####data
TrainDataset:
  !KeypointBottomUpCocoDataset
    image_dir: train2017
    anno_path: annotations/person_keypoints_train2017.json
    dataset_dir: dataset/coco
    num_joints: *num_joints

EvalDataset:
  !KeypointBottomUpCocoDataset
    image_dir: val2017
    anno_path: annotations/person_keypoints_val2017.json
    dataset_dir: dataset/coco
    num_joints: *num_joints
    test_mode: true

TestDataset:
  !ImageFolder
    anno_path: dataset/coco/keypoint_imagelist.txt

worker_num: 8
global_mean: &global_mean [0.485, 0.456, 0.406]
global_std: &global_std [0.229, 0.224, 0.225]
TrainReader:
  sample_transforms:
    - RandomAffine:
        max_degree: 30
        scale: [0.75, 1.5]
        max_shift: 0.2
        trainsize: *input_size
        hmsize: [*hm_size, *hm_size_2x]
    - KeyPointFlip:
        flip_prob: 0.5
        flip_permutation: *flip_perm
        hmsize: [*hm_size, *hm_size_2x]
    - ToHeatmaps:
        num_joints: *num_joints
        hmsize: [*hm_size, *hm_size_2x]
        sigma: 2
    - TagGenerate:
        num_joints: *num_joints
        max_people: *max_people
    - NormalizePermute:
        mean: *global_mean
        std: *global_std
  batch_size: 16
  shuffle: true
  drop_last: true
  use_shared_memory: true

EvalReader:
  sample_transforms:
    - EvalAffine:
        size: *input_size
    - NormalizeImage:
        mean: *global_mean
        std: *global_std
        is_scale: true
    - Permute: {}
  batch_size: 1
  drop_empty: false

TestReader:
  sample_transforms:
    - Decode: {}
    - EvalAffine:
        size: *input_size
    - NormalizeImage:
        mean: *global_mean
        std: *global_std
        is_scale: true
    - Permute: {}
  batch_size: 1