From a75f83594f3586fb4c3abd84d46462c719395393 Mon Sep 17 00:00:00 2001 From: wuzewu Date: Mon, 22 Apr 2019 16:46:16 +0800 Subject: [PATCH] add L2SP strategy --- paddlehub/finetune/strategy.py | 62 ++++++++++++++++------------------ paddlehub/version.py | 2 +- 2 files changed, 31 insertions(+), 33 deletions(-) diff --git a/paddlehub/finetune/strategy.py b/paddlehub/finetune/strategy.py index 01370b53..b06d4906 100644 --- a/paddlehub/finetune/strategy.py +++ b/paddlehub/finetune/strategy.py @@ -22,6 +22,7 @@ import multiprocessing import paddle.fluid as fluid from paddlehub.finetune.optimization import adam_weight_decay_optimization +from paddlehub.finetune.regularizer import L2SPDecayRegularizer def get_pretrained_parameter(main_program, start_program): @@ -42,8 +43,6 @@ class DefaultStrategy(object): def __init__(self, learning_rate=1e-4, optimizer_name="adam"): self.learning_rate = learning_rate self._optimizer_name = optimizer_name - - def execute(self, loss): if self._optimizer_name.lower() == "sgd": self.optimizer = fluid.optimizer.SGD( learning_rate=self.learning_rate) @@ -75,6 +74,7 @@ class DefaultStrategy(object): self.optimizer = fluid.optimizer.Adam( learning_rate=self.learning_rate) + def execute(self, loss): if self.optimizer is not None: self.optimizer.minimize(loss) else: @@ -153,37 +153,35 @@ class DefaultFinetuneStrategy(DefaultStrategy): self.regularization_coeff = regularization_coeff def execute(self, loss): - if self._optimizer_name.lower() == "sgd": - self.optimizer = fluid.optimizer.SGD( - learning_rate=self.learning_rate) - elif self._optimizer_name.lower() == "adagrad": - self.optimizer = fluid.optimizer.Adagrad( - learning_rate=self.learning_rate) - elif self._optimizer_name.lower() == "adamax": - self.optimizer = fluid.optimizer.Adamax( - learning_rate=self.learning_rate) - elif self._optimizer_name.lower() == "decayedadagrad": - self.optimizer = fluid.optimizer.DecayedAdagrad( - learning_rate=self.learning_rate) - elif self._optimizer_name.lower() == "ftrl": - self.optimizer = fluid.optimizer.Ftrl( - learning_rate=self.learning_rate) - elif self._optimizer_name.lower() == "larsmomentum": - self.optimizer = fluid.optimizer.LarsMomentum( - learning_rate=self.learning_rate) - elif self._optimizer_name.lower() == "momentum": - self.optimizer = fluid.optimizer.Momentum( - learning_rate=self.learning_rate) - elif self._optimizer_name.lower() == "decayedadagrad": - self.optimizer = fluid.optimizer.DecayedAdagrad( - learning_rate=self.learning_rate) - elif self._optimizer_name.lower() == "rmsprop": - self.optimizer = fluid.optimizer.RMSPropOptimizer( - learning_rate=self.learning_rate) + # get pretrained parameters + program = loss.block.program + global_block = program.global_block() + pretrained_params = get_pretrained_parameter( + program, fluid.default_startup_program()) + + # set parameter attrs + for index, param in enumerate(pretrained_params): + param.regularizer = fluid.regularizer.L2Decay( + regularization_coeff=self.regularization_coeff) + + if self.optimizer is not None: + self.optimizer.minimize(loss) else: - self.optimizer = fluid.optimizer.Adam( - learning_rate=self.learning_rate) + raise ValueError("DefaultFinetuneStrategy's optimizer is None") + + +class L2SPFinetuneStrategy(DefaultStrategy): + def __init__(self, + learning_rate=1e-4, + optimizer_name="adam", + regularization_coeff=1e-3): + super(L2SPFinetuneStrategy, self).__init__( + learning_rate=learning_rate, optimizer_name=optimizer_name) + self.learning_rate = learning_rate + self._optimizer_name = optimizer_name + self.regularization_coeff = regularization_coeff + def execute(self, loss): # get pretrained parameters program = loss.block.program global_block = program.global_block() @@ -192,7 +190,7 @@ class DefaultFinetuneStrategy(DefaultStrategy): # set parameter attrs for index, param in enumerate(pretrained_params): - param.regularizer = fluid.regularizer.L2Decay( + param.regularizer = L2SPDecayRegularizer( regularization_coeff=self.regularization_coeff) if self.optimizer is not None: diff --git a/paddlehub/version.py b/paddlehub/version.py index 29257fa4..d62ca38f 100644 --- a/paddlehub/version.py +++ b/paddlehub/version.py @@ -12,5 +12,5 @@ # See the License for the specific language governing permissions and # limitations under the License. """ PaddleHub version string """ -hub_version = "0.4.6.beta" +hub_version = "0.4.8.beta" module_proto_version = "1.0.0" -- GitLab