diff --git a/mindspore/nn/optim/optimizer.py b/mindspore/nn/optim/optimizer.py
index 05560d9739a897bf3d5b66ed1294df2179288e53..226b84d8db31fa2f74ba011d5eb0d14931d4c75e 100755
--- a/mindspore/nn/optim/optimizer.py
+++ b/mindspore/nn/optim/optimizer.py
@@ -112,7 +112,7 @@ class Optimizer(Cell):
         learning_rate = self._get_single_lr(learning_rate)
         if isinstance(parameters[0], dict):
             self.is_group = True
-            self.params = []
+            self.group_params = []
             self.group_lr = []
             self.group_weight_decay = []
             self._init_group_params(parameters, learning_rate, weight_decay)
@@ -123,7 +123,7 @@ class Optimizer(Cell):
             self.learning_rate = Parameter(learning_rate, name="learning_rate")
 
         if self.is_group:
-            self.parameters = ParameterTuple(self.params)
+            self.parameters = ParameterTuple(self.group_params)
             self.weight_decay = tuple(self.group_weight_decay)
             decay_filter = lambda x: x > 0
             self.decay_flags = tuple(decay_filter(x) for x in self.weight_decay)
@@ -230,7 +230,10 @@ class Optimizer(Cell):
 
         params_store = []
         for group_param in parameters:
-            self.params += group_param['params']
+            if not group_param['params']:
+                raise ValueError("Optimizer got an empty parameter list.")
+
+            self.group_params += group_param['params']
             if 'lr' in group_param.keys():
                 params_dynamic_lr = isinstance(group_param['lr'], (Iterable, Tensor))