diff --git a/configs/quick_start/VGG16_finetune_kunlun.yaml b/configs/quick_start/VGG16_finetune_kunlun.yaml new file mode 100644 index 0000000000000000000000000000000000000000..bb358cf7f9e811e9f1c8136f1161b9ebc360c2e6 --- /dev/null +++ b/configs/quick_start/VGG16_finetune_kunlun.yaml @@ -0,0 +1,72 @@ +mode: 'train' +ARCHITECTURE: + name: 'VGG16' + params: + stop_grad_layers: 5 +pretrained_model: "./pretrained/VGG16_pretrained" +model_save_dir: "./output/" +classes_num: 102 +total_images: 1020 +save_interval: 1 +validate: True +valid_interval: 1 +epochs: 20 +topk: 5 +image_shape: [3, 224, 224] + +LEARNING_RATE: + function: 'Cosine' + params: + lr: 0.0005 + +OPTIMIZER: + function: 'Momentum' + params: + momentum: 0.9 + regularizer: + function: 'L2' + factor: 0.00001 + +TRAIN: + batch_size: 20 + num_workers: 0 + file_list: "./dataset/flowers102/train_list.txt" + data_dir: "./dataset/flowers102/" + shuffle_seed: 0 + transforms: + - DecodeImage: + to_rgb: True + to_np: False + channel_first: False + - RandCropImage: + size: 224 + - RandFlipImage: + flip_code: 1 + - NormalizeImage: + scale: 1./255. + mean: [0.485, 0.456, 0.406] + std: [0.229, 0.224, 0.225] + order: '' + - ToCHWImage: + +VALID: + batch_size: 20 + num_workers: 0 + file_list: "./dataset/flowers102/val_list.txt" + data_dir: "./dataset/flowers102/" + shuffle_seed: 0 + transforms: + - DecodeImage: + to_rgb: True + to_np: False + channel_first: False + - ResizeImage: + resize_short: 256 + - CropImage: + size: 224 + - NormalizeImage: + scale: 1.0/255.0 + mean: [0.485, 0.456, 0.406] + std: [0.229, 0.224, 0.225] + order: '' + - ToCHWImage: diff --git a/configs/quick_start/VGG19_finetune_kunlun.yaml b/configs/quick_start/VGG19_finetune_kunlun.yaml new file mode 100644 index 0000000000000000000000000000000000000000..7b32e4626eefd78bc3e6fa0ec10b7b6370596b6e --- /dev/null +++ b/configs/quick_start/VGG19_finetune_kunlun.yaml @@ -0,0 +1,72 @@ +mode: 'train' +ARCHITECTURE: + name: 'VGG19' + params: + stop_grad_layers: 5 +pretrained_model: "./pretrained/VGG19_pretrained" +model_save_dir: "./output/" +classes_num: 102 +total_images: 1020 +save_interval: 1 +validate: True +valid_interval: 1 +epochs: 20 +topk: 5 +image_shape: [3, 224, 224] + +LEARNING_RATE: + function: 'Cosine' + params: + lr: 0.0005 + +OPTIMIZER: + function: 'Momentum' + params: + momentum: 0.9 + regularizer: + function: 'L2' + factor: 0.00001 + +TRAIN: + batch_size: 20 + num_workers: 0 + file_list: "./dataset/flowers102/train_list.txt" + data_dir: "./dataset/flowers102/" + shuffle_seed: 0 + transforms: + - DecodeImage: + to_rgb: True + to_np: False + channel_first: False + - RandCropImage: + size: 224 + - RandFlipImage: + flip_code: 1 + - NormalizeImage: + scale: 1./255. + mean: [0.485, 0.456, 0.406] + std: [0.229, 0.224, 0.225] + order: '' + - ToCHWImage: + +VALID: + batch_size: 20 + num_workers: 0 + file_list: "./dataset/flowers102/val_list.txt" + data_dir: "./dataset/flowers102/" + shuffle_seed: 0 + transforms: + - DecodeImage: + to_rgb: True + to_np: False + channel_first: False + - ResizeImage: + resize_short: 256 + - CropImage: + size: 224 + - NormalizeImage: + scale: 1.0/255.0 + mean: [0.485, 0.456, 0.406] + std: [0.229, 0.224, 0.225] + order: '' + - ToCHWImage: diff --git a/docs/zh_CN/extension/train_on_xpu.md b/docs/zh_CN/extension/train_on_xpu.md index 7f7d50f1e819adcb1d519b2a340f1b2c7881f61a..8f984683871a05c31e5f6b32e80669481655b5ad 100644 --- a/docs/zh_CN/extension/train_on_xpu.md +++ b/docs/zh_CN/extension/train_on_xpu.md @@ -23,3 +23,10 @@ * 命令: ```python3.7 tools/static/train.py -c configs/quick_start/HRNet_W18_C_finetune.yaml -o is_distributed=False -o use_cpu=False -o use_xpu=True -o use_gpu=False``` + + +### VGG16/19 +* 命令: + +```python3.7 tools/static/train.py -c configs/quick_start/VGG16_finetune_kunlun.yaml -o use_gpu=False -o use_cpu=False -o use_xpu=True -o is_distributed=False``` +```python3.7 tools/static/train.py -c configs/quick_start/VGG19_finetune_kunlun.yaml -o use_gpu=False -o use_cpu=False -o use_xpu=True -o is_distributed=False```