未验证 提交 167e6488 编写于 作者: W wangxiaoning 提交者: GitHub

[Fluid clean]clean fluid.transpiler.details (#50564)

* move fluid.transpiler.details

* fix setup

* fix

* fix setup

* add setup
上级 2364a6bc
......@@ -12,6 +12,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from .program_utils import *
from .ufind import *
from .vars_distributed import *
from .program_utils import (
delete_ops,
find_op_by_input_arg,
find_op_by_output_arg,
)
from .ufind import UnionFind
from .vars_distributed import VarStruct, VarDistributed, VarsDistributed
......@@ -12,9 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from paddle.fluid import core
import paddle
def delete_ops(block, ops):
for op in ops:
......
......@@ -40,7 +40,7 @@ class UnionFind:
def find(self, x):
# Find the root index of given element x,
# execute the path compress while findind the root index
if not x in self._index:
if x not in self._index:
return -1
idx = self._index[x]
while idx != self._parents[idx]:
......
......@@ -11,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from paddle.fluid.framework import Variable
from paddle.static import Variable
class VarStruct:
......@@ -117,8 +117,6 @@ class VarDistributed:
def __str__(self):
origin_var_str = (
"{name} : fluid.{type}.shape{shape}.astype({dtype})".format(
i="{",
e="}",
name=self.origin.name,
type=self.origin.type,
shape=self.origin.shape,
......@@ -129,8 +127,6 @@ class VarDistributed:
slice_var_str = (
"{name} : fluid.{type}.shape{shape}.astype({dtype})"
".slice({is_slice}).block({block_id}).offset({offset})".format(
i="{",
e="}",
name=self.slice.name,
type=self.slice.type,
shape=self.slice.shape,
......
......@@ -27,7 +27,6 @@ from paddle.fluid.incubate.fleet.parameter_server.ir.ps_dispatcher import (
RoundRobin,
PSDispatcher,
)
from paddle.fluid.transpiler.details.program_utils import delete_ops
OP_NAME_SCOPE = "op_namescope"
CLIP_OP_NAME_SCOPE = "gradient_clip"
......
......@@ -23,7 +23,7 @@ import paddle
from paddle.framework import core
import paddle.framework as framework
from paddle.fluid.transpiler.details.program_utils import delete_ops
from paddle.distributed.transpiler.details.program_utils import delete_ops
from paddle.fluid.incubate.fleet.parameter_server.ir.public import (
_get_optimize_ops,
)
......
......@@ -48,8 +48,6 @@ from ..framework import (
Parameter,
grad_var_name,
)
from .details import UnionFind, VarStruct, VarsDistributed
from .details import delete_ops, find_op_by_output_arg
LOOKUP_TABLE_TYPE = ["lookup_table", "lookup_table_v2"]
LOOKUP_TABLE_GRAD_TYPE = ["lookup_table_grad", "lookup_table_v2_grad"]
......@@ -619,6 +617,10 @@ class DistributeTranspiler:
from paddle.distributed.distribute_lookup_table import (
find_distributed_lookup_table,
)
from paddle.distributed.transpiler.details import (
VarsDistributed,
find_op_by_output_arg,
)
err_msg = """
......@@ -1044,6 +1046,8 @@ WIKI: https://github.com/PaddlePaddle/Fleet/blob/develop/markdown_doc/transpiler
def _fake_init_sparsetable(self, sparse_table_names):
# delete table init op
from paddle.distributed.transpiler.details import delete_ops
for table_name in sparse_table_names:
table_var = self.startup_program.global_block().vars[table_name]
table_param_init_op = []
......@@ -1065,6 +1069,8 @@ WIKI: https://github.com/PaddlePaddle/Fleet/blob/develop/markdown_doc/transpiler
delete_ops(self.startup_program.global_block(), table_param_init_op)
def _delete_trainer_optimizer(self, is_startup):
from paddle.distributed.transpiler.details import delete_ops
optimize_vars = []
optimize_op_role_vars = []
optimize_need_delete_vars = []
......@@ -1132,6 +1138,7 @@ WIKI: https://github.com/PaddlePaddle/Fleet/blob/develop/markdown_doc/transpiler
from paddle.distributed.fleet.base.private_helper_function import (
wait_server_ready,
)
from paddle.distributed.transpiler.details import delete_ops
self._delete_trainer_optimizer(is_startup=True)
sparse_table_names = self._get_sparse_table_names()
......@@ -1720,6 +1727,8 @@ WIKI: https://github.com/PaddlePaddle/Fleet/blob/develop/markdown_doc/transpiler
def _get_distributed_optimizer_vars(self):
def _get_distributed_optimizer_var(endpoint):
from paddle.distributed.transpiler.details import VarStruct
opt_op_on_pserver = []
for _, op in enumerate(self.optimize_ops):
if self._is_optimizer_op(op) and self._is_opt_op_on_pserver(
......@@ -1927,6 +1936,8 @@ WIKI: https://github.com/PaddlePaddle/Fleet/blob/develop/markdown_doc/transpiler
def _replace_lookup_table_op_with_prefetch(
self, program, pserver_endpoints
):
from paddle.distributed.transpiler.details import delete_ops
# 1. replace lookup_table_op with split_ids_op -> prefetch_op -> sum_op
self.all_in_ids_vars = []
self.all_prefetch_input_vars = []
......@@ -2760,6 +2771,8 @@ WIKI: https://github.com/PaddlePaddle/Fleet/blob/develop/markdown_doc/transpiler
def _create_ufind(self, optimize_ops):
# Create a unit find data struct by optimize ops
from paddle.distributed.transpiler.details import UnionFind
ufind = UnionFind(optimize_ops)
for i in range(len(optimize_ops)):
for j in range(i, len(optimize_ops)):
......@@ -2884,6 +2897,8 @@ WIKI: https://github.com/PaddlePaddle/Fleet/blob/develop/markdown_doc/transpiler
return lr_ops
def _get_lr_ops_deprecated(self):
from paddle.distributed.transpiler.details import UnionFind
lr_ops = []
# find learning rate variables by optimize op
lr_vars = set()
......
......@@ -37,8 +37,11 @@ from ..framework import (
Block,
Parameter,
)
from .details import wait_server_ready, VarsDistributed
from .details import delete_ops
from paddle.distributed.transpiler.details import (
wait_server_ready,
VarsDistributed,
)
from paddle.distributed.transpiler.details import delete_ops
from .distribute_transpiler import (
DistributeTranspiler,
DistributeTranspilerConfig,
......
......@@ -20,13 +20,13 @@ from functools import reduce
import paddle
import paddle.framework as framework
from paddle.distributed.transpiler.details.program_utils import delete_ops
from paddle.fluid.incubate.fleet.parameter_server.ir.public import (
_get_lr_ops,
_get_optimize_ops,
get_sparse_tablenames,
)
from paddle.fluid.incubate.fleet.parameter_server.mode import DistributedMode
from paddle.fluid.transpiler.details.program_utils import delete_ops
from paddle.framework import core
OP_NAME_SCOPE = "op_namescope"
......
......@@ -383,6 +383,8 @@ packages=['paddle',
'paddle.distributed.passes',
'paddle.distributed.models',
'paddle.distributed.models.moe',
'paddle.distributed.transpiler',
'paddle.distributed.transpiler.details',
'paddle.framework',
'paddle.jit',
'paddle.jit.dy2static',
......@@ -399,7 +401,6 @@ packages=['paddle',
'paddle.fluid.contrib.extend_optimizer',
'paddle.fluid.contrib.layers',
'paddle.fluid.transpiler',
'paddle.fluid.transpiler.details',
'paddle.fluid.incubate',
'paddle.fluid.incubate.fleet',
'paddle.fluid.incubate.checkpoint',
......
......@@ -1270,6 +1270,8 @@ def get_setup_parameters():
'paddle.distributed.passes',
'paddle.distributed.models',
'paddle.distributed.models.moe',
'paddle.distributed.transpiler',
'paddle.distributed.transpiler.details',
'paddle.framework',
'paddle.jit',
'paddle.jit.dy2static',
......@@ -1286,7 +1288,6 @@ def get_setup_parameters():
'paddle.fluid.contrib.extend_optimizer',
'paddle.fluid.contrib.layers',
'paddle.fluid.transpiler',
'paddle.fluid.transpiler.details',
'paddle.fluid.incubate',
'paddle.fluid.incubate.fleet',
'paddle.fluid.incubate.checkpoint',
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册