LookAhead优化器动态图下使用报错
Created by: GitLD
Paddle:1.6 GPU:V100 AIStudio线上环境 训练信息: 单机单卡 复现信息: sgd = fluid.optimizer.AdamOptimizer(learning_rate=0.01) optimizer = fluid.optimizer.LookaheadOptimizer(sgd,alpha=0.5,k=5) 替换房价预测案例optimizer 报错信息:
epoch: 0, batch: 0, loss is: [2.3028119], acc is [0.03125] ---------------------------------------------------------------------------TypeError Traceback (most recent call last) in 5 # optimizer=fluid.optimizer.AdamOptimizer(learning_rate=0.01) 6 pretrained_path = None ----> 7 best_valid_acc = train_model(EPOCH_NUM, optimizer, pretrained_path) in train_model(EPOCH_NUM, optimizer, pretrained_path, use_gpu) 33 #后向传播,更新参数的过程 34 avg_loss.backward() ---> 35 optimizer.minimize(avg_loss) 36 model.clear_gradients() 37 /opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/optimizer.py in minimize(self, loss, startup_program) 3716 slow_var, layers.elementwise_sub(one_var, alpha))) 3717 layers.assign(input=tmp_var, output=slow_var) -> 3718 layers.assign(input=tmp_var, output=fast_var) 3719 with switch.default(): 3720 pass /opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/layers/control_flow.py in exit(self, exc_type, exc_val, exc_tb) 1588 1589 def exit(self, exc_type, exc_val, exc_tb): -> 1590 self.block.complete() 1591 return super(ConditionalBlockGuard, self).exit(exc_type, exc_val, 1592 exc_tb) /opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/layers/control_flow.py in complete(self) 1673 attrs={ 1674 'sub_block': inside_block, -> 1675 'is_scalar_condition': self.is_scalar_condition 1676 }) 1677 /opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/framework.py in append_op(self, *args, **kwargs) 2415 kwargs.get("outputs", {}), attrs 2416 if attrs else {}, -> 2417 kwargs.get("stop_gradient", False)) 2418 else: 2419 op_desc = self.desc.append_op() /opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/tracer.py in trace_op(self, type, inputs, outputs, attrs, stop_gradient) 45 self.trace(type, inputs, outputs, attrs, 46 framework._current_expected_place(), self._train_mode and ---> 47 not stop_gradient) 48 49 def train_mode(self): TypeError: trace(): incompatible function arguments. The following argument types are supported: 1. (self: paddle.fluid.core_avx.Tracer, arg0: str, arg1: Dict[str, handle], arg2: Dict[str, handle], arg3: Dict[str, Variant], arg4: paddle::platform::CUDAPlace, arg5: bool) -> None 2. (self: paddle.fluid.core_avx.Tracer, arg0: str, arg1: Dict[str, handle], arg2: Dict[str, handle], arg3: Dict[str, Variant], arg4: paddle::platform::CPUPlace, arg5: bool) -> None
Invoked with: <paddle.fluid.dygraph.tracer.Tracer object at 0x7efb5dc94b90>, 'conditional_block', {'Cond': [name tmp_46, dtype: VarType.INT32 shape: [1] lod: {} dim: 1 layout: NCHW dtype: bool data: [0] ], 'Input': []}, {'Out': [], 'Scope': [name _generated_var_2, shape: [0], not inited]}, {'sub_block': idx: 1 parent_idx: 0 , 'is_scalar_condition': True}, <paddle.fluid.core_avx.CUDAPlace object at 0x7efb5de13430>, True [10]