diff --git a/python/paddle/dataset/image.py b/python/paddle/dataset/image.py index c36213282c59cee8959e3a3502926f1d311291eb..a094529edf57564413b169c4e9f9062fe2710f9f 100644 --- a/python/paddle/dataset/image.py +++ b/python/paddle/dataset/image.py @@ -54,7 +54,10 @@ if six.PY3: if retcode != 0: cv2 = None else: - import cv2 + try: + import cv2 + except ImportError: + cv2 = None else: try: import cv2 diff --git a/python/paddle/incubate/optimizer/distributed_fused_lamb.py b/python/paddle/incubate/optimizer/distributed_fused_lamb.py index 12a88106a44cda42a29f2942755c81f670432b2e..74b5398230dee6063ab2044c03b8b04510462e98 100644 --- a/python/paddle/incubate/optimizer/distributed_fused_lamb.py +++ b/python/paddle/incubate/optimizer/distributed_fused_lamb.py @@ -17,7 +17,7 @@ from paddle.fluid.framework import Variable from paddle.fluid.clip import ClipGradByGlobalNorm from paddle.fluid.initializer import Constant from paddle.fluid.layer_helper import LayerHelper -from paddle.optimizer import Optimizer +from paddle.fluid.optimizer import Optimizer from paddle.distributed import get_rank, get_world_size from paddle.fluid.executor import global_scope from paddle.fluid.framework import name_scope @@ -42,11 +42,7 @@ class DistributedFusedLamb(Optimizer): assert not framework._non_static_mode( ), "DistributedFusedLamb does not support dygraph mode" super(DistributedFusedLamb, self).__init__( - learning_rate=learning_rate, - parameters=parameters, - weight_decay=None, - grad_clip=None, - name=name) + learning_rate=learning_rate, grad_clip=None, name=name) self._beta1 = beta1 self._beta2 = beta2