From 4673fea5519c4ef49266a4af00e3e714ac1ac2b9 Mon Sep 17 00:00:00 2001
From: Qiao Longfei <qiaolongfei@baidu.com>
Date: Fri, 26 Oct 2018 16:50:34 +0800
Subject: [PATCH] trainer startup should not init table optimizer because it
 maybe large

---
 .../fluid/transpiler/distribute_transpiler.py      | 14 +++++++++++---
 1 file changed, 11 insertions(+), 3 deletions(-)

diff --git a/python/paddle/fluid/transpiler/distribute_transpiler.py b/python/paddle/fluid/transpiler/distribute_transpiler.py
index 2192139f8d5..fb2dd942fc3 100644
--- a/python/paddle/fluid/transpiler/distribute_transpiler.py
+++ b/python/paddle/fluid/transpiler/distribute_transpiler.py
@@ -474,6 +474,15 @@ class DistributeTranspiler(object):
         delete_ops(self.origin_program.global_block(), self.optimize_ops)
         delete_ops(self.origin_program.global_block(), lr_ops)
 
+        # delete table init op
+        if self.has_distributed_lookup_table:
+            trainer_table_param_init_op = []
+            for op in self.startup_program.global_block().ops:
+                if self.table_name in op.output_arg_names:
+                    trainer_table_param_init_op.append(op)
+            delete_ops(self.startup_program.global_block(),
+                       trainer_table_param_init_op)
+
         self.origin_program.__str__()
 
         if wait_port:
@@ -1194,9 +1203,8 @@ to transpile() call.")
         # create table param and grad var in pserver program
         # create table optimize block in pserver program
         table_opt_op = [
-            op for op in self.optimize_ops
-            if 'Param' in op.input_names and op.input("Param")[0] ==
-            self.table_name
+            op for op in self.optimize_ops if 'Param' in op.input_names and
+            op.input("Param")[0] == self.table_name
         ][0]
 
         origin_param_var = self.origin_program.global_block().vars[
-- 
GitLab