epochs: 200 output_dir: output_dir find_unused_parameters: True model: name: CycleGANModel generator: name: ResnetGenerator output_nc: 3 n_blocks: 9 ngf: 64 use_dropout: False norm_type: instance input_nc: 3 discriminator: name: NLayerDiscriminator ndf: 64 n_layers: 3 norm_type: instance input_nc: 3 cycle_criterion: name: L1Loss idt_criterion: name: L1Loss loss_weight: 0.5 gan_criterion: name: GANLoss gan_mode: lsgan dataset: train: name: UnpairedDataset dataroot_a: data/horse2zebra/trainA dataroot_b: data/horse2zebra/trainB num_workers: 0 batch_size: 1 is_train: True max_size: inf preprocess: - name: LoadImageFromFile key: A - name: LoadImageFromFile key: B - name: Transforms input_keys: [A, B] pipeline: - name: Resize size: [286, 286] interpolation: 'bicubic' #cv2.INTER_CUBIC keys: ['image', 'image'] - name: RandomCrop size: [256, 256] keys: ['image', 'image'] - name: RandomHorizontalFlip prob: 0.5 keys: ['image', 'image'] - name: Transpose keys: ['image', 'image'] - name: Normalize mean: [127.5, 127.5, 127.5] std: [127.5, 127.5, 127.5] keys: ['image', 'image'] test: name: UnpairedDataset dataroot_a: data/horse2zebra/testA dataroot_b: data/horse2zebra/testB num_workers: 0 batch_size: 1 max_size: inf is_train: False preprocess: - name: LoadImageFromFile key: A - name: LoadImageFromFile key: B - name: Transforms input_keys: [A, B] pipeline: - name: Resize size: [256, 256] interpolation: 'bicubic' #cv2.INTER_CUBIC keys: ['image', 'image'] - name: Transpose keys: ['image', 'image'] - name: Normalize mean: [127.5, 127.5, 127.5] std: [127.5, 127.5, 127.5] keys: ['image', 'image'] lr_scheduler: name: LinearDecay learning_rate: 0.0002 start_epoch: 100 decay_epochs: 100 # will get from real dataset iters_per_epoch: 1 optimizer: optimG: name: Adam net_names: - netG_A - netG_B beta1: 0.5 optimD: name: Adam net_names: - netD_A - netD_B beta1: 0.5 log_config: interval: 100 visiual_interval: 500 snapshot_config: interval: 5