From 801dfd34cb8935ed5c0869fbf9d8f727f20e0332 Mon Sep 17 00:00:00 2001 From: tangwei Date: Tue, 19 May 2020 11:44:05 +0800 Subject: [PATCH] rename get_cost_op to avg_cost --- core/model.py | 2 +- core/trainers/cluster_trainer.py | 4 ++-- core/trainers/ctr_coding_trainer.py | 2 +- core/trainers/ctr_modul_trainer.py | 4 ++-- core/trainers/online_learning_trainer.py | 2 +- core/trainers/single_trainer.py | 4 ++-- doc/design.md | 2 +- models/contentunderstanding/classification/model.py | 2 +- models/contentunderstanding/tagspace/model.py | 2 +- 9 files changed, 12 insertions(+), 12 deletions(-) diff --git a/core/model.py b/core/model.py index c51ba241..b4150155 100755 --- a/core/model.py +++ b/core/model.py @@ -47,7 +47,7 @@ class Model(object): def get_infer_results(self): return self._infer_results - def get_cost_op(self): + def get_avg_cost(self): """R """ return self._cost diff --git a/core/trainers/cluster_trainer.py b/core/trainers/cluster_trainer.py index 327dffe0..faa96035 100755 --- a/core/trainers/cluster_trainer.py +++ b/core/trainers/cluster_trainer.py @@ -82,7 +82,7 @@ class ClusterTrainer(TranspileTrainer): strategy = self.build_strategy() optimizer = fleet.distributed_optimizer(optimizer, strategy) - optimizer.minimize(self.model.get_cost_op()) + optimizer.minimize(self.model.get_avg_cost()) if fleet.is_server(): context['status'] = 'server_pass' @@ -114,7 +114,7 @@ class ClusterTrainer(TranspileTrainer): program = fluid.compiler.CompiledProgram( fleet.main_program).with_data_parallel( - loss_name=self.model.get_cost_op().name, + loss_name=self.model.get_avg_cost().name, build_strategy=self.strategy.get_build_strategy(), exec_strategy=self.strategy.get_execute_strategy()) diff --git a/core/trainers/ctr_coding_trainer.py b/core/trainers/ctr_coding_trainer.py index e6979a92..b6b987dd 100755 --- a/core/trainers/ctr_coding_trainer.py +++ b/core/trainers/ctr_coding_trainer.py @@ -88,7 +88,7 @@ class CtrPaddleTrainer(Trainer): optimizer = self.model.optimizer() optimizer = fleet.distributed_optimizer(optimizer, strategy={"use_cvm": False}) - optimizer.minimize(self.model.get_cost_op()) + optimizer.minimize(self.model.get_avg_cost()) if fleet.is_server(): context['status'] = 'server_pass' diff --git a/core/trainers/ctr_modul_trainer.py b/core/trainers/ctr_modul_trainer.py index d486767e..cb16b3c4 100755 --- a/core/trainers/ctr_modul_trainer.py +++ b/core/trainers/ctr_modul_trainer.py @@ -129,7 +129,7 @@ class CtrPaddleTrainer(Trainer): model = self._exector_context[executor['name']]['model'] self._metrics.update(model.get_metrics()) runnnable_scope.append(scope) - runnnable_cost_op.append(model.get_cost_op()) + runnnable_cost_op.append(model.get_avg_cost()) for var in model._data_var: if var.name in data_var_name_dict: continue @@ -146,7 +146,7 @@ class CtrPaddleTrainer(Trainer): model = self._exector_context[executor['name']]['model'] program = model._build_param['model']['train_program'] if not executor['is_update_sparse']: - program._fleet_opt["program_configs"][str(id(model.get_cost_op().block.program))]["push_sparse"] = [] + program._fleet_opt["program_configs"][str(id(model.get_avg_cost().block.program))]["push_sparse"] = [] if 'train_thread_num' not in executor: executor['train_thread_num'] = self.global_config['train_thread_num'] with fluid.scope_guard(scope): diff --git a/core/trainers/online_learning_trainer.py b/core/trainers/online_learning_trainer.py index 4a64d4c2..1e6c183f 100755 --- a/core/trainers/online_learning_trainer.py +++ b/core/trainers/online_learning_trainer.py @@ -78,7 +78,7 @@ class ClusterTrainer(TranspileTrainer): optimizer = self.model.optimizer() strategy = self.build_strategy() optimizer = fleet.distributed_optimizer(optimizer, strategy) - optimizer.minimize(self.model.get_cost_op()) + optimizer.minimize(self.model.get_avg_cost()) if fleet.is_server(): context['status'] = 'server_pass' diff --git a/core/trainers/single_trainer.py b/core/trainers/single_trainer.py index e2f02f4b..7690a1e7 100755 --- a/core/trainers/single_trainer.py +++ b/core/trainers/single_trainer.py @@ -47,7 +47,7 @@ class SingleTrainer(TranspileTrainer): def init(self, context): self.model.train_net() optimizer = self.model.optimizer() - optimizer.minimize((self.model.get_cost_op())) + optimizer.minimize((self.model.get_avg_cost())) self.fetch_vars = [] self.fetch_alias = [] @@ -74,7 +74,7 @@ class SingleTrainer(TranspileTrainer): program = fluid.compiler.CompiledProgram( fluid.default_main_program()).with_data_parallel( - loss_name=self.model.get_cost_op().name) + loss_name=self.model.get_avg_cost().name) metrics_varnames = [] metrics_format = [] diff --git a/doc/design.md b/doc/design.md index 90a819fe..2975d77f 100644 --- a/doc/design.md +++ b/doc/design.md @@ -153,7 +153,7 @@ class Model(object): def infer_net(self): pass - def get_cost_op(self): + def get_avg_cost(self): return self._cost ``` diff --git a/models/contentunderstanding/classification/model.py b/models/contentunderstanding/classification/model.py index 18db63a5..f8bb3fea 100644 --- a/models/contentunderstanding/classification/model.py +++ b/models/contentunderstanding/classification/model.py @@ -59,7 +59,7 @@ class Model(ModelBase): self.cost = avg_cost self._metrics["acc"] = acc - def get_cost_op(self): + def get_avg_cost(self): return self.cost def get_metrics(self): diff --git a/models/contentunderstanding/tagspace/model.py b/models/contentunderstanding/tagspace/model.py index eb01efba..090d6c48 100644 --- a/models/contentunderstanding/tagspace/model.py +++ b/models/contentunderstanding/tagspace/model.py @@ -89,7 +89,7 @@ class Model(ModelBase): self.metrics["correct"] = correct self.metrics["cos_pos"] = cos_pos - def get_cost_op(self): + def get_avg_cost(self): return self.cost def get_metrics(self): -- GitLab