diff --git a/ppcls/configs/ImageNet/DPN/DPN107.yaml b/ppcls/configs/ImageNet/DPN/DPN107.yaml index 92c1fb8144ec4904302f079ac0b310038bfda4b0..239da60ea75df3cb17ba57a526cf30d224cf9a79 100644 --- a/ppcls/configs/ImageNet/DPN/DPN107.yaml +++ b/ppcls/configs/ImageNet/DPN/DPN107.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/DPN/DPN131.yaml b/ppcls/configs/ImageNet/DPN/DPN131.yaml index 3cb22f60dfa82d9a4603450eb7cbe3fb47e3a735..ff81e4fe68ce6de500e2d48965ef5b53517e5419 100644 --- a/ppcls/configs/ImageNet/DPN/DPN131.yaml +++ b/ppcls/configs/ImageNet/DPN/DPN131.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/DPN/DPN68.yaml b/ppcls/configs/ImageNet/DPN/DPN68.yaml index ecd2d8540f02d2780f15898da33084f8293f8f36..fd7dc147ba916d1c2f680b113dc33e95f1c46014 100644 --- a/ppcls/configs/ImageNet/DPN/DPN68.yaml +++ b/ppcls/configs/ImageNet/DPN/DPN68.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/DPN/DPN92.yaml b/ppcls/configs/ImageNet/DPN/DPN92.yaml index c431efcf4e5eab76efe4ec4b64886d540d495da7..3559e8f1943320c89268f7922dd5c72ded105823 100644 --- a/ppcls/configs/ImageNet/DPN/DPN92.yaml +++ b/ppcls/configs/ImageNet/DPN/DPN92.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/DPN/DPN98.yaml b/ppcls/configs/ImageNet/DPN/DPN98.yaml index 9fb1ec9f6e9a35badf44a1e3cb46b2c58b122291..11af4926294efd1780ad882f58b7ac09984c14bd 100644 --- a/ppcls/configs/ImageNet/DPN/DPN98.yaml +++ b/ppcls/configs/ImageNet/DPN/DPN98.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/DarkNet/DarkNet53.yaml b/ppcls/configs/ImageNet/DarkNet/DarkNet53.yaml index b69ccfcfdbb8045985977bc374b02b422e3e5c23..1a55e75d4661b1759dc46ac2203ad5f1e2ceb2fb 100644 --- a/ppcls/configs/ImageNet/DarkNet/DarkNet53.yaml +++ b/ppcls/configs/ImageNet/DarkNet/DarkNet53.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/DataAugment/ResNet50_Cutmix.yaml b/ppcls/configs/ImageNet/DataAugment/ResNet50_Cutmix.yaml index 918a7629440f879f012d0fb7240fb3dd2a379b2f..6ab79d35c43e49e66d17372597875ed140ef4189 100644 --- a/ppcls/configs/ImageNet/DataAugment/ResNet50_Cutmix.yaml +++ b/ppcls/configs/ImageNet/DataAugment/ResNet50_Cutmix.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 Eval: - CELoss: diff --git a/ppcls/configs/ImageNet/DataAugment/ResNet50_Mixup.yaml b/ppcls/configs/ImageNet/DataAugment/ResNet50_Mixup.yaml index b12567150d8f788fd8abeaf4d6adb75e0b10d12f..448440ecfff0a5249bfe1a61ce8b5d06cd881e12 100644 --- a/ppcls/configs/ImageNet/DataAugment/ResNet50_Mixup.yaml +++ b/ppcls/configs/ImageNet/DataAugment/ResNet50_Mixup.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 Eval: - CELoss: diff --git a/ppcls/configs/ImageNet/DeiT/DeiT_base_distilled_patch16_224.yaml b/ppcls/configs/ImageNet/DeiT/DeiT_base_distilled_patch16_224.yaml index 02a2f42d7fa57e0f8ed9d6d43a93f28225c3df7c..f03543f77f4c831127bcad9c2939fb89ace902ea 100644 --- a/ppcls/configs/ImageNet/DeiT/DeiT_base_distilled_patch16_224.yaml +++ b/ppcls/configs/ImageNet/DeiT/DeiT_base_distilled_patch16_224.yaml @@ -24,7 +24,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/DeiT/DeiT_base_distilled_patch16_384.yaml b/ppcls/configs/ImageNet/DeiT/DeiT_base_distilled_patch16_384.yaml index 3565c11242e5cdf86c7973c4cbdf75317e60e606..fcf2981beb217a1fd3e55741a70df8294cc01176 100644 --- a/ppcls/configs/ImageNet/DeiT/DeiT_base_distilled_patch16_384.yaml +++ b/ppcls/configs/ImageNet/DeiT/DeiT_base_distilled_patch16_384.yaml @@ -24,7 +24,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/DeiT/DeiT_base_patch16_224.yaml b/ppcls/configs/ImageNet/DeiT/DeiT_base_patch16_224.yaml index d8bbf338ea83fa39e10aebcfbbe77d61169c322a..7b328905e05d3d9db713710185eb2e1ee5ebca46 100644 --- a/ppcls/configs/ImageNet/DeiT/DeiT_base_patch16_224.yaml +++ b/ppcls/configs/ImageNet/DeiT/DeiT_base_patch16_224.yaml @@ -24,7 +24,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/DeiT/DeiT_base_patch16_384.yaml b/ppcls/configs/ImageNet/DeiT/DeiT_base_patch16_384.yaml index b8f3ced45a3d18653b71758fdd653b9bbabd86a4..a2990ecdbd6e9e2493fdf177d9d6042d9c682227 100644 --- a/ppcls/configs/ImageNet/DeiT/DeiT_base_patch16_384.yaml +++ b/ppcls/configs/ImageNet/DeiT/DeiT_base_patch16_384.yaml @@ -24,7 +24,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/DeiT/DeiT_small_distilled_patch16_224.yaml b/ppcls/configs/ImageNet/DeiT/DeiT_small_distilled_patch16_224.yaml index 7a68e292b0bba89985c19f6107eeffc6c9e70035..b565d03ab3d468a090d2ead743b00a73cffc239e 100644 --- a/ppcls/configs/ImageNet/DeiT/DeiT_small_distilled_patch16_224.yaml +++ b/ppcls/configs/ImageNet/DeiT/DeiT_small_distilled_patch16_224.yaml @@ -24,7 +24,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/DeiT/DeiT_small_patch16_224.yaml b/ppcls/configs/ImageNet/DeiT/DeiT_small_patch16_224.yaml index 0ef9344e07a02fa8796c34a4392b3b209cdda297..9e9c5de120b9577fba179c7893f5b3640a909323 100644 --- a/ppcls/configs/ImageNet/DeiT/DeiT_small_patch16_224.yaml +++ b/ppcls/configs/ImageNet/DeiT/DeiT_small_patch16_224.yaml @@ -24,7 +24,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/DeiT/DeiT_tiny_distilled_patch16_224.yaml b/ppcls/configs/ImageNet/DeiT/DeiT_tiny_distilled_patch16_224.yaml index 8ee54657f65a0460845b2a022e67a43fd3aa18ff..53f54b1b2356b6d67270cd778e0a2b84e7906cb8 100644 --- a/ppcls/configs/ImageNet/DeiT/DeiT_tiny_distilled_patch16_224.yaml +++ b/ppcls/configs/ImageNet/DeiT/DeiT_tiny_distilled_patch16_224.yaml @@ -24,7 +24,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/DeiT/DeiT_tiny_patch16_224.yaml b/ppcls/configs/ImageNet/DeiT/DeiT_tiny_patch16_224.yaml index 3d2ab38b77481bb96f47215c181c21dcec3c070b..8fa66856367a6694501ddcfb2782751fc43e556a 100644 --- a/ppcls/configs/ImageNet/DeiT/DeiT_tiny_patch16_224.yaml +++ b/ppcls/configs/ImageNet/DeiT/DeiT_tiny_patch16_224.yaml @@ -24,7 +24,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/Inception/InceptionV3.yaml b/ppcls/configs/ImageNet/Inception/InceptionV3.yaml index fa8b64a5aaaeb0914e8ecbed80176a4e40014883..a8c30ea1a497c9897299ca9526e17c22fe555977 100644 --- a/ppcls/configs/ImageNet/Inception/InceptionV3.yaml +++ b/ppcls/configs/ImageNet/Inception/InceptionV3.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/Inception/InceptionV4.yaml b/ppcls/configs/ImageNet/Inception/InceptionV4.yaml index 6a6dbb62d79a658bf564f7c55c86a2b9d963f645..17415b3cefb1a7b7e56db5545e754ffb7fd3d0ff 100644 --- a/ppcls/configs/ImageNet/Inception/InceptionV4.yaml +++ b/ppcls/configs/ImageNet/Inception/InceptionV4.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/Res2Net/Res2Net101_vd_26w_4s.yaml b/ppcls/configs/ImageNet/Res2Net/Res2Net101_vd_26w_4s.yaml index 7e5cbfd3cae04673e28dfadef404b2c1650064b4..bf27b303d72654e3ff835280d192370c6e9e6b8d 100644 --- a/ppcls/configs/ImageNet/Res2Net/Res2Net101_vd_26w_4s.yaml +++ b/ppcls/configs/ImageNet/Res2Net/Res2Net101_vd_26w_4s.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/Res2Net/Res2Net200_vd_26w_4s.yaml b/ppcls/configs/ImageNet/Res2Net/Res2Net200_vd_26w_4s.yaml index edceda10f7c69a4b74f7b2a5b5dd697e095d708f..90b7b879c8f92eeb3c80537a5f583b8323fd5082 100644 --- a/ppcls/configs/ImageNet/Res2Net/Res2Net200_vd_26w_4s.yaml +++ b/ppcls/configs/ImageNet/Res2Net/Res2Net200_vd_26w_4s.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/Res2Net/Res2Net50_14w_8s.yaml b/ppcls/configs/ImageNet/Res2Net/Res2Net50_14w_8s.yaml index 1f3ecde91ad0853c9a25a4c858dcf37203d29a15..af1c4c73e6ff6449de94a61e88a2286a6f04db55 100644 --- a/ppcls/configs/ImageNet/Res2Net/Res2Net50_14w_8s.yaml +++ b/ppcls/configs/ImageNet/Res2Net/Res2Net50_14w_8s.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/Res2Net/Res2Net50_26w_4s.yaml b/ppcls/configs/ImageNet/Res2Net/Res2Net50_26w_4s.yaml index 31ad95e65443a824099ee1fcaff6ff0e2a44cc78..e792e9d03b8be13223ad684dac50f101ce99603a 100644 --- a/ppcls/configs/ImageNet/Res2Net/Res2Net50_26w_4s.yaml +++ b/ppcls/configs/ImageNet/Res2Net/Res2Net50_26w_4s.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/Res2Net/Res2Net50_vd_26w_4s.yaml b/ppcls/configs/ImageNet/Res2Net/Res2Net50_vd_26w_4s.yaml index 1157ac0c877fa070292ee95afcf066acb4839361..58d4968b43f7571fc633f6e29748136fec94c9ad 100644 --- a/ppcls/configs/ImageNet/Res2Net/Res2Net50_vd_26w_4s.yaml +++ b/ppcls/configs/ImageNet/Res2Net/Res2Net50_vd_26w_4s.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/ResNeSt/ResNeSt101.yaml b/ppcls/configs/ImageNet/ResNeSt/ResNeSt101.yaml index 9daaac25129256f3cb06d8b0ac275dcd4dc4e4d2..7b3bc2bdb6fb4c6d32a1d95d972407f9cf58f4c7 100644 --- a/ppcls/configs/ImageNet/ResNeSt/ResNeSt101.yaml +++ b/ppcls/configs/ImageNet/ResNeSt/ResNeSt101.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/ResNeSt/ResNeSt50.yaml b/ppcls/configs/ImageNet/ResNeSt/ResNeSt50.yaml index 24c82b5bf912df0b918c89a5c7e510baa49f94e2..acf55ec789fb37c9e0d0e34bc295191959d704bd 100644 --- a/ppcls/configs/ImageNet/ResNeSt/ResNeSt50.yaml +++ b/ppcls/configs/ImageNet/ResNeSt/ResNeSt50.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/ResNeSt/ResNeSt50_fast_1s1x64d.yaml b/ppcls/configs/ImageNet/ResNeSt/ResNeSt50_fast_1s1x64d.yaml index e761cc2d89423a2e28affb4f45d5592c90c3a0c1..9488195d833c7189b0551051190b05ed395f8241 100644 --- a/ppcls/configs/ImageNet/ResNeSt/ResNeSt50_fast_1s1x64d.yaml +++ b/ppcls/configs/ImageNet/ResNeSt/ResNeSt50_fast_1s1x64d.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/ResNeXt/ResNeXt101_vd_32x4d.yaml b/ppcls/configs/ImageNet/ResNeXt/ResNeXt101_vd_32x4d.yaml index 4ac6ab70b34cae4368fca339f4f4f518e55007f7..c400b9e288af6bb5bf2c9d2eff23895b6efb62b6 100644 --- a/ppcls/configs/ImageNet/ResNeXt/ResNeXt101_vd_32x4d.yaml +++ b/ppcls/configs/ImageNet/ResNeXt/ResNeXt101_vd_32x4d.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/ResNeXt/ResNeXt101_vd_64x4d.yaml b/ppcls/configs/ImageNet/ResNeXt/ResNeXt101_vd_64x4d.yaml index 1754e63a43f380e0764fc64875043310630c66f5..4f5f3c79353aca99ac800e79f2112839cb92d11a 100644 --- a/ppcls/configs/ImageNet/ResNeXt/ResNeXt101_vd_64x4d.yaml +++ b/ppcls/configs/ImageNet/ResNeXt/ResNeXt101_vd_64x4d.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/ResNeXt/ResNeXt152_vd_32x4d.yaml b/ppcls/configs/ImageNet/ResNeXt/ResNeXt152_vd_32x4d.yaml index 5cfb972f8314af192f4daf6c875818772881cd29..d3054143d0ab858b83d377d8b30bd022e6848c0b 100644 --- a/ppcls/configs/ImageNet/ResNeXt/ResNeXt152_vd_32x4d.yaml +++ b/ppcls/configs/ImageNet/ResNeXt/ResNeXt152_vd_32x4d.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/ResNeXt/ResNeXt152_vd_64x4d.yaml b/ppcls/configs/ImageNet/ResNeXt/ResNeXt152_vd_64x4d.yaml index a95907312c5fe89ab76661aad7882702b1622d38..c8b76d0f6846e0d425cee2c92ed2aa529c87598d 100644 --- a/ppcls/configs/ImageNet/ResNeXt/ResNeXt152_vd_64x4d.yaml +++ b/ppcls/configs/ImageNet/ResNeXt/ResNeXt152_vd_64x4d.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/ResNeXt/ResNeXt50_vd_32x4d.yaml b/ppcls/configs/ImageNet/ResNeXt/ResNeXt50_vd_32x4d.yaml index 466dfb361a4e6010add90dcb9a8fa1f9a7b8e12f..3a03646f564ea00b53ce5e21ad166ac37eb3f2f4 100644 --- a/ppcls/configs/ImageNet/ResNeXt/ResNeXt50_vd_32x4d.yaml +++ b/ppcls/configs/ImageNet/ResNeXt/ResNeXt50_vd_32x4d.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/ResNeXt/ResNeXt50_vd_64x4d.yaml b/ppcls/configs/ImageNet/ResNeXt/ResNeXt50_vd_64x4d.yaml index d2a2f86ee25d889f4def86927922c8855a71a8cb..c9b9a1015eb701d3846466cb7be1c7b134dfbad0 100644 --- a/ppcls/configs/ImageNet/ResNeXt/ResNeXt50_vd_64x4d.yaml +++ b/ppcls/configs/ImageNet/ResNeXt/ResNeXt50_vd_64x4d.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/ResNet/ResNet101_vd.yaml b/ppcls/configs/ImageNet/ResNet/ResNet101_vd.yaml index 83d1fc028fee679327f2b835e3401cb94371225c..f30ca07774bba5e457f77f6213be990045ce6b86 100644 --- a/ppcls/configs/ImageNet/ResNet/ResNet101_vd.yaml +++ b/ppcls/configs/ImageNet/ResNet/ResNet101_vd.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/ResNet/ResNet152_vd.yaml b/ppcls/configs/ImageNet/ResNet/ResNet152_vd.yaml index e09bb60c940e732232c5cc6c048ff8bc8722fb22..f3168c432a0154fbf25c7548499984433bc2abc5 100644 --- a/ppcls/configs/ImageNet/ResNet/ResNet152_vd.yaml +++ b/ppcls/configs/ImageNet/ResNet/ResNet152_vd.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/ResNet/ResNet18_vd.yaml b/ppcls/configs/ImageNet/ResNet/ResNet18_vd.yaml index e0ba71a6e80de43926f8bf5f152af06332bb7395..2dc6bba0c772e0fe8d73d2aad25aa61cc6c3c0da 100644 --- a/ppcls/configs/ImageNet/ResNet/ResNet18_vd.yaml +++ b/ppcls/configs/ImageNet/ResNet/ResNet18_vd.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/ResNet/ResNet200_vd.yaml b/ppcls/configs/ImageNet/ResNet/ResNet200_vd.yaml index 98de87e3274ae2bc1cfe419f6cf01ff232ac4936..a52c83748cb4dddbb0c2e93899c067fe3a3e40b3 100644 --- a/ppcls/configs/ImageNet/ResNet/ResNet200_vd.yaml +++ b/ppcls/configs/ImageNet/ResNet/ResNet200_vd.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/ResNet/ResNet34_vd.yaml b/ppcls/configs/ImageNet/ResNet/ResNet34_vd.yaml index 9ff0717113a3274e110f3ce4d2a0f773eb8562a7..daae960b596a7f76b599f96c327dbd698180b766 100644 --- a/ppcls/configs/ImageNet/ResNet/ResNet34_vd.yaml +++ b/ppcls/configs/ImageNet/ResNet/ResNet34_vd.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/ResNet/ResNet50_vd.yaml b/ppcls/configs/ImageNet/ResNet/ResNet50_vd.yaml index ba38350bfddc54f938515d402d7cd2ad94834e7a..0a2c4aa4e740809c69a6af44b6b6656f66ebc01b 100644 --- a/ppcls/configs/ImageNet/ResNet/ResNet50_vd.yaml +++ b/ppcls/configs/ImageNet/ResNet/ResNet50_vd.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/SENet/SENet154_vd.yaml b/ppcls/configs/ImageNet/SENet/SENet154_vd.yaml index f8255a977d448d0644023beabca94f5d7e489e9a..f7f1ba0f99060c443379d386f395656009fb81a8 100644 --- a/ppcls/configs/ImageNet/SENet/SENet154_vd.yaml +++ b/ppcls/configs/ImageNet/SENet/SENet154_vd.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/SENet/SE_ResNeXt101_32x4d.yaml b/ppcls/configs/ImageNet/SENet/SE_ResNeXt101_32x4d.yaml index bf27461845b37ef1c0a934f8e8da08a955bb6705..3b09c3fd365537e78abe1e6a2e1db7c7d5bf3daa 100644 --- a/ppcls/configs/ImageNet/SENet/SE_ResNeXt101_32x4d.yaml +++ b/ppcls/configs/ImageNet/SENet/SE_ResNeXt101_32x4d.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/SENet/SE_ResNeXt50_32x4d.yaml b/ppcls/configs/ImageNet/SENet/SE_ResNeXt50_32x4d.yaml index 2c1286927ab503aba6994794f5fb3e68f36f7a78..d04f298a3aa42adbaeabde2ff0afde653fd9d9bb 100644 --- a/ppcls/configs/ImageNet/SENet/SE_ResNeXt50_32x4d.yaml +++ b/ppcls/configs/ImageNet/SENet/SE_ResNeXt50_32x4d.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/SENet/SE_ResNeXt50_vd_32x4d.yaml b/ppcls/configs/ImageNet/SENet/SE_ResNeXt50_vd_32x4d.yaml index 48e6e4206c2f626ba38ff0ff4bde5507453d9379..cabff29b423eed9a4861b41f35e0b75ed9c239b3 100644 --- a/ppcls/configs/ImageNet/SENet/SE_ResNeXt50_vd_32x4d.yaml +++ b/ppcls/configs/ImageNet/SENet/SE_ResNeXt50_vd_32x4d.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/SENet/SE_ResNet18_vd.yaml b/ppcls/configs/ImageNet/SENet/SE_ResNet18_vd.yaml index 20b3a0c40805e7706aa600f907f169e073b9496f..fcaada9342a364a7c35efdf915fdc18a799ddd95 100644 --- a/ppcls/configs/ImageNet/SENet/SE_ResNet18_vd.yaml +++ b/ppcls/configs/ImageNet/SENet/SE_ResNet18_vd.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/SENet/SE_ResNet34_vd.yaml b/ppcls/configs/ImageNet/SENet/SE_ResNet34_vd.yaml index 7280e32441f781e8ed1d08cd6e136b97a3122dc1..69d15ccaf79d804f200241208aaf39d551f2c14a 100644 --- a/ppcls/configs/ImageNet/SENet/SE_ResNet34_vd.yaml +++ b/ppcls/configs/ImageNet/SENet/SE_ResNet34_vd.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/SENet/SE_ResNet50_vd.yaml b/ppcls/configs/ImageNet/SENet/SE_ResNet50_vd.yaml index 030dff93b963322aba5f47b1ce2bddd1cd35b8e2..f670c159e4f41ebbb8e2eb70486785662de06840 100644 --- a/ppcls/configs/ImageNet/SENet/SE_ResNet50_vd.yaml +++ b/ppcls/configs/ImageNet/SENet/SE_ResNet50_vd.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/SwinTransformer/SwinTransformer_base_patch4_window12_384.yaml b/ppcls/configs/ImageNet/SwinTransformer/SwinTransformer_base_patch4_window12_384.yaml index 5d976c0b83c266b6f9ccb91f5ac640a096bbd301..903afef9661deea944a6c84e1d7bdd068c81607e 100644 --- a/ppcls/configs/ImageNet/SwinTransformer/SwinTransformer_base_patch4_window12_384.yaml +++ b/ppcls/configs/ImageNet/SwinTransformer/SwinTransformer_base_patch4_window12_384.yaml @@ -24,7 +24,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/SwinTransformer/SwinTransformer_base_patch4_window7_224.yaml b/ppcls/configs/ImageNet/SwinTransformer/SwinTransformer_base_patch4_window7_224.yaml index efbd427ad56802ba7a7a3478a1dd4e6c22ce3c1e..227b73ba0db2ab22b38fea43721441fbda3a7229 100644 --- a/ppcls/configs/ImageNet/SwinTransformer/SwinTransformer_base_patch4_window7_224.yaml +++ b/ppcls/configs/ImageNet/SwinTransformer/SwinTransformer_base_patch4_window7_224.yaml @@ -24,7 +24,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/SwinTransformer/SwinTransformer_large_patch4_window12_384.yaml b/ppcls/configs/ImageNet/SwinTransformer/SwinTransformer_large_patch4_window12_384.yaml index 6c3abe6fff9932f86accd0a52650f37442a5fd47..d50e83365683fdacc4f496138f2e5ebe2a3ddcea 100644 --- a/ppcls/configs/ImageNet/SwinTransformer/SwinTransformer_large_patch4_window12_384.yaml +++ b/ppcls/configs/ImageNet/SwinTransformer/SwinTransformer_large_patch4_window12_384.yaml @@ -24,7 +24,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/SwinTransformer/SwinTransformer_large_patch4_window7_224.yaml b/ppcls/configs/ImageNet/SwinTransformer/SwinTransformer_large_patch4_window7_224.yaml index dd2b2acd71f2427bc667d59663d2400800d610f9..ba0c59b59d3b80d3b6ca1076e6cf618e30c0417f 100644 --- a/ppcls/configs/ImageNet/SwinTransformer/SwinTransformer_large_patch4_window7_224.yaml +++ b/ppcls/configs/ImageNet/SwinTransformer/SwinTransformer_large_patch4_window7_224.yaml @@ -24,7 +24,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/SwinTransformer/SwinTransformer_small_patch4_window7_224.yaml b/ppcls/configs/ImageNet/SwinTransformer/SwinTransformer_small_patch4_window7_224.yaml index 34a80d8341d2b07f6bd6806fde3e1f58dbc307e5..e7997aaf05ad60192127c6177b8d21b012536a07 100644 --- a/ppcls/configs/ImageNet/SwinTransformer/SwinTransformer_small_patch4_window7_224.yaml +++ b/ppcls/configs/ImageNet/SwinTransformer/SwinTransformer_small_patch4_window7_224.yaml @@ -24,7 +24,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/SwinTransformer/SwinTransformer_tiny_patch4_window7_224.yaml b/ppcls/configs/ImageNet/SwinTransformer/SwinTransformer_tiny_patch4_window7_224.yaml index d921593853d1bb658cc3b3d8aec35e0decd0f833..707d934735c08c4e7738f30788be04ad1b1339c5 100644 --- a/ppcls/configs/ImageNet/SwinTransformer/SwinTransformer_tiny_patch4_window7_224.yaml +++ b/ppcls/configs/ImageNet/SwinTransformer/SwinTransformer_tiny_patch4_window7_224.yaml @@ -24,7 +24,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/Twins/alt_gvt_base.yaml b/ppcls/configs/ImageNet/Twins/alt_gvt_base.yaml index 17fd657d5a7f40fb596b1417ab32ca52c9aa4348..ccf4cfe9c65a967e20a5a3239a23ba3564b163f5 100644 --- a/ppcls/configs/ImageNet/Twins/alt_gvt_base.yaml +++ b/ppcls/configs/ImageNet/Twins/alt_gvt_base.yaml @@ -26,7 +26,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/Twins/alt_gvt_large.yaml b/ppcls/configs/ImageNet/Twins/alt_gvt_large.yaml index 393a638781c7c1b4a372d6b9ca4f93dfe860c39f..2169956079feb8bab4f526091de9d2117263054f 100644 --- a/ppcls/configs/ImageNet/Twins/alt_gvt_large.yaml +++ b/ppcls/configs/ImageNet/Twins/alt_gvt_large.yaml @@ -26,7 +26,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/Twins/alt_gvt_small.yaml b/ppcls/configs/ImageNet/Twins/alt_gvt_small.yaml index b40f5183b0e641aee4b1de102bf0a1924007c2f2..4006de9a121593537445fe50fe4e0d5b5f6125aa 100644 --- a/ppcls/configs/ImageNet/Twins/alt_gvt_small.yaml +++ b/ppcls/configs/ImageNet/Twins/alt_gvt_small.yaml @@ -26,7 +26,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/Twins/pcpvt_base.yaml b/ppcls/configs/ImageNet/Twins/pcpvt_base.yaml index 4c7c0991ceb5910bd6016ebd399e794bb3fee874..7922efaec7b774175fda96551c667ecbddaa12fc 100644 --- a/ppcls/configs/ImageNet/Twins/pcpvt_base.yaml +++ b/ppcls/configs/ImageNet/Twins/pcpvt_base.yaml @@ -26,7 +26,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/Twins/pcpvt_large.yaml b/ppcls/configs/ImageNet/Twins/pcpvt_large.yaml index e0e5c6f53a7b87d33c705def62fa8dd382be86a3..360b1d42b57c1f987e62471425badd6d2bf1d904 100644 --- a/ppcls/configs/ImageNet/Twins/pcpvt_large.yaml +++ b/ppcls/configs/ImageNet/Twins/pcpvt_large.yaml @@ -26,7 +26,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/Twins/pcpvt_small.yaml b/ppcls/configs/ImageNet/Twins/pcpvt_small.yaml index 547d2583f8d43c6379ec677a04be79ba8b235703..10dcebb526c3555d99001bba1a7cb247a30dd723 100644 --- a/ppcls/configs/ImageNet/Twins/pcpvt_small.yaml +++ b/ppcls/configs/ImageNet/Twins/pcpvt_small.yaml @@ -26,7 +26,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/Xception/Xception65.yaml b/ppcls/configs/ImageNet/Xception/Xception65.yaml index c94b28506a3e4969dc037f70f9dc940172618b6e..f9217cf77395f4033955dab09814489f55b3ecd8 100644 --- a/ppcls/configs/ImageNet/Xception/Xception65.yaml +++ b/ppcls/configs/ImageNet/Xception/Xception65.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/ImageNet/Xception/Xception71.yaml b/ppcls/configs/ImageNet/Xception/Xception71.yaml index bda7ecfe9a2b6f3bedc0e8296b9eca4819235b84..7475a5f9588c2b48b2949d12771f844fbdcb181e 100644 --- a/ppcls/configs/ImageNet/Xception/Xception71.yaml +++ b/ppcls/configs/ImageNet/Xception/Xception71.yaml @@ -22,7 +22,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/slim/ResNet50_vd_prune.yaml b/ppcls/configs/slim/ResNet50_vd_prune.yaml index fd7d26b37ffa077b86bb84792b8195ac2be40589..f3e9e666609d54e82a4c775bfea39774b1d98dd9 100644 --- a/ppcls/configs/slim/ResNet50_vd_prune.yaml +++ b/ppcls/configs/slim/ResNet50_vd_prune.yaml @@ -30,7 +30,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/configs/slim/ResNet50_vd_quantization.yaml b/ppcls/configs/slim/ResNet50_vd_quantization.yaml index aeccaeaae497e7f538427aad671cb54af7f64847..23325ba073f0340232790081a2ac77890216e855 100644 --- a/ppcls/configs/slim/ResNet50_vd_quantization.yaml +++ b/ppcls/configs/slim/ResNet50_vd_quantization.yaml @@ -29,7 +29,7 @@ Arch: # loss function config for traing/eval process Loss: Train: - - MixCELoss: + - CELoss: weight: 1.0 epsilon: 0.1 Eval: diff --git a/ppcls/data/__init__.py b/ppcls/data/__init__.py index fd41ea3ca0d763e41798050562db9e1244d12085..cffac81226d11784b792a56ec774d15f6a5eee54 100644 --- a/ppcls/data/__init__.py +++ b/ppcls/data/__init__.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import inspect import copy import paddle import numpy as np @@ -36,7 +37,7 @@ from ppcls.data import preprocess from ppcls.data.preprocess import transform -def create_operators(params): +def create_operators(params, class_num=None): """ create operators based on the config @@ -50,7 +51,10 @@ def create_operators(params): dict) and len(operator) == 1, "yaml format error" op_name = list(operator)[0] param = {} if operator[op_name] is None else operator[op_name] - op = getattr(preprocess, op_name)(**param) + op_func = getattr(preprocess, op_name) + if "class_num" in inspect.getfullargspec(op_func).args: + param.update({"class_num": class_num}) + op = op_func(**param) ops.append(op) return ops @@ -65,6 +69,7 @@ def build_dataloader(config, mode, device, use_dali=False, seed=None): from ppcls.data.dataloader.dali import dali_dataloader return dali_dataloader(config, mode, paddle.device.get_device(), seed) + class_num = config.get("class_num", None) config_dataset = config[mode]['dataset'] config_dataset = copy.deepcopy(config_dataset) dataset_name = config_dataset.pop('name') @@ -104,7 +109,7 @@ def build_dataloader(config, mode, device, use_dali=False, seed=None): return [np.stack(slot, axis=0) for slot in slots] if isinstance(batch_transform, list): - batch_ops = create_operators(batch_transform) + batch_ops = create_operators(batch_transform, class_num) batch_collate_fn = mix_collate_fn else: batch_collate_fn = None diff --git a/ppcls/data/preprocess/batch_ops/batch_operators.py b/ppcls/data/preprocess/batch_ops/batch_operators.py index 1f3bd3253a63c65995c87a6a0baee3216b9c418c..888f44c49ae23ed86af3942230914f13272d4d66 100644 --- a/ppcls/data/preprocess/batch_ops/batch_operators.py +++ b/ppcls/data/preprocess/batch_ops/batch_operators.py @@ -44,6 +44,14 @@ class BatchOperator(object): labels.append(item[1]) return np.array(imgs), np.array(labels), bs + def _one_hot(self, targets): + return np.eye(self.class_num, dtype="float32")[targets] + + def _mix_target(self, targets0, targets1, lam): + one_hots0 = self._one_hot(targets0) + one_hots1 = self._one_hot(targets1) + return one_hots0 * lam + one_hots1 * (1 - lam) + def __call__(self, batch): return batch @@ -51,7 +59,7 @@ class BatchOperator(object): class MixupOperator(BatchOperator): """ Mixup operator """ - def __init__(self, alpha: float=1.): + def __init__(self, class_num, alpha: float=1.): """Build Mixup operator Args: @@ -64,21 +72,27 @@ class MixupOperator(BatchOperator): raise Exception( f"Parameter \"alpha\" of Mixup should be greater than 0. \"alpha\": {alpha}." ) + if not class_num: + msg = "Please set \"Arch.class_num\" in config if use \"MixupOperator\"." + logger.error(Exception(msg)) + raise Exception(msg) + self._alpha = alpha + self.class_num = class_num def __call__(self, batch): imgs, labels, bs = self._unpack(batch) idx = np.random.permutation(bs) lam = np.random.beta(self._alpha, self._alpha) - lams = np.array([lam] * bs, dtype=np.float32) imgs = lam * imgs + (1 - lam) * imgs[idx] - return list(zip(imgs, labels, labels[idx], lams)) + targets = self._mix_target(labels, labels[idx], lam) + return list(zip(imgs, targets)) class CutmixOperator(BatchOperator): """ Cutmix operator """ - def __init__(self, alpha=0.2): + def __init__(self, class_num, alpha=0.2): """Build Cutmix operator Args: @@ -91,7 +105,13 @@ class CutmixOperator(BatchOperator): raise Exception( f"Parameter \"alpha\" of Cutmix should be greater than 0. \"alpha\": {alpha}." ) + if not class_num: + msg = "Please set \"Arch.class_num\" in config if use \"CutmixOperator\"." + logger.error(Exception(msg)) + raise Exception(msg) + self._alpha = alpha + self.class_num = class_num def _rand_bbox(self, size, lam): """ _rand_bbox """ @@ -121,18 +141,29 @@ class CutmixOperator(BatchOperator): imgs[:, :, bbx1:bbx2, bby1:bby2] = imgs[idx, :, bbx1:bbx2, bby1:bby2] lam = 1 - (float(bbx2 - bbx1) * (bby2 - bby1) / (imgs.shape[-2] * imgs.shape[-1])) - lams = np.array([lam] * bs, dtype=np.float32) - return list(zip(imgs, labels, labels[idx], lams)) + targets = self._mix_target(labels, labels[idx], lam) + return list(zip(imgs, targets)) class FmixOperator(BatchOperator): """ Fmix operator """ - def __init__(self, alpha=1, decay_power=3, max_soft=0., reformulate=False): + def __init__(self, + class_num, + alpha=1, + decay_power=3, + max_soft=0., + reformulate=False): + if not class_num: + msg = "Please set \"Arch.class_num\" in config if use \"FmixOperator\"." + logger.error(Exception(msg)) + raise Exception(msg) + self._alpha = alpha self._decay_power = decay_power self._max_soft = max_soft self._reformulate = reformulate + self.class_num = class_num def __call__(self, batch): imgs, labels, bs = self._unpack(batch) @@ -141,20 +172,27 @@ class FmixOperator(BatchOperator): lam, mask = sample_mask(self._alpha, self._decay_power, \ size, self._max_soft, self._reformulate) imgs = mask * imgs + (1 - mask) * imgs[idx] - return list(zip(imgs, labels, labels[idx], [lam] * bs)) + targets = self._mix_target(labels, labels[idx], lam) + return list(zip(imgs, targets)) class OpSampler(object): """ Sample a operator from """ - def __init__(self, **op_dict): + def __init__(self, class_num, **op_dict): """Build OpSampler Raises: Exception: The parameter \"prob\" of operator(s) are be set error. """ + if not class_num: + msg = "Please set \"Arch.class_num\" in config if use \"OpSampler\"." + logger.error(Exception(msg)) + raise Exception(msg) + if len(op_dict) < 1: msg = f"ConfigWarning: No operator in \"OpSampler\". \"OpSampler\" has been skipped." + logger.warning(msg) self.ops = {} total_prob = 0 @@ -165,12 +203,13 @@ class OpSampler(object): logger.warning(msg) prob = param.pop("prob", 0) total_prob += prob + param.update({"class_num": class_num}) op = eval(op_name)(**param) self.ops.update({op: prob}) if total_prob > 1: msg = f"ConfigError: The total prob of operators in \"OpSampler\" should be less 1." - logger.error(msg) + logger.error(Exception(msg)) raise Exception(msg) # add "None Op" when total_prob < 1, "None Op" do nothing diff --git a/ppcls/engine/engine.py b/ppcls/engine/engine.py index 54f6955c580a6a1b2df1bba7303fce32e5432cda..38f5b67b8e0a7b3cc3298f568ac8e2bf415d6b98 100644 --- a/ppcls/engine/engine.py +++ b/ppcls/engine/engine.py @@ -112,6 +112,8 @@ class Engine(object): } paddle.fluid.set_flags(AMP_RELATED_FLAGS_SETTING) + class_num = config["Arch"].get("class_num", None) + self.config["DataLoader"].update({"class_num": class_num}) # build dataloader if self.mode == 'train': self.train_dataloader = build_dataloader( diff --git a/ppcls/engine/train/train.py b/ppcls/engine/train/train.py index 4de8d59d6cdbbd13cfcbf223019d44af2314d696..cbf868e4e6d1d118b417568625c493afea6cd23a 100644 --- a/ppcls/engine/train/train.py +++ b/ppcls/engine/train/train.py @@ -36,25 +36,19 @@ def train_epoch(engine, epoch_id, print_batch_step): ] batch_size = batch[0].shape[0] if not engine.config["Global"].get("use_multilabel", False): - batch[1] = batch[1].reshape([-1, 1]).astype("int64") + batch[1] = batch[1].reshape([batch_size, -1]) engine.global_step += 1 - if engine.config["DataLoader"]["Train"]["dataset"].get( - "batch_transform_ops", None): - gt_input = batch[1:] - else: - gt_input = batch[1] - # image input if engine.amp: with paddle.amp.auto_cast(custom_black_list={ "flatten_contiguous_range", "greater_than" }): out = forward(engine, batch) - loss_dict = engine.train_loss_func(out, gt_input) else: out = forward(engine, batch) - loss_dict = engine.train_loss_func(out, gt_input) + + loss_dict = engine.train_loss_func(out, batch[1]) # step opt and lr if engine.amp: diff --git a/ppcls/loss/celoss.py b/ppcls/loss/celoss.py index ff19bf9d3699c01c4e07208f7f255aecd75dc917..a78926170c6b8edf7d85f62204f34437eeb118b2 100644 --- a/ppcls/loss/celoss.py +++ b/ppcls/loss/celoss.py @@ -12,10 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. +import warnings + import paddle import paddle.nn as nn import paddle.nn.functional as F +from ppcls.utils import logger + class CELoss(nn.Layer): """ @@ -56,19 +60,8 @@ class CELoss(nn.Layer): return {"CELoss": loss} -class MixCELoss(CELoss): - """ - Cross entropy loss with mix(mixup, cutmix, fixmix) - """ - - def __init__(self, epsilon=None): - super().__init__() - self.epsilon = epsilon - - def __call__(self, input, batch): - target0, target1, lam = batch - loss0 = super().forward(input, target0)["CELoss"] - loss1 = super().forward(input, target1)["CELoss"] - loss = lam * loss0 + (1.0 - lam) * loss1 - loss = paddle.mean(loss) - return {"MixCELoss": loss} +class MixCELoss(object): + def __init__(self, *args, **kwargs): + msg = "\"MixCELos\" is deprecated, please use \"CELoss\" instead." + logger.error(DeprecationWarning(msg)) + raise DeprecationWarning(msg)