io.py 6.5 KB
Newer Older
D
dzhwinter 已提交
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserve.
D
dzhwinter 已提交
2
#
D
dzhwinter 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
D
dzhwinter 已提交
6
#
D
dzhwinter 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
D
dzhwinter 已提交
8
#
D
dzhwinter 已提交
9 10 11 12 13 14
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

Y
Yu Yang 已提交
15 16
from .. import core
from ..layer_helper import LayerHelper
T
WIP  
typhoonzero 已提交
17 18
from control_flow import BlockGuard
from ..layer_helper import LayerHelper
Y
Yu Yang 已提交
19

T
WIP  
typhoonzero 已提交
20
__all__ = ['data', 'BlockGuardServ', 'ListenAndServ', 'Send']
Y
Yu Yang 已提交
21 22 23 24 25 26 27 28 29 30


def data(name,
         shape,
         append_batch_size=True,
         dtype='float32',
         lod_level=0,
         type=core.VarDesc.VarType.LOD_TENSOR,
         stop_gradient=True):
    """
K
kavyasrinet 已提交
31
    **Data Layer**
Y
Yu Yang 已提交
32

K
kavyasrinet 已提交
33
    This function takes in the input and based on whether data has
C
caoying03 已提交
34
    to be returned back as a minibatch, it creates the global variable by using
Y
Yu Yang 已提交
35
    the helper functions. The global variables can be accessed by all the
C
caoying03 已提交
36
    following operators in the graph.
Y
Yu Yang 已提交
37 38 39 40

    All the input variables of this function are passed in as local variables
    to the LayerHelper constructor.

K
kavyasrinet 已提交
41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58
    Args:
       name(str): The name/alias of the function
       shape(list): Tuple declaring the shape.
       append_batch_size(bool): Whether or not to append the data as a batch.
       dtype(int|float): The type of data : float32, float_16, int etc
       type(VarType): The output type. By default it is LOD_TENSOR.
       lod_level(int): The LoD Level. 0 means the input data is not a sequence.
       main_program(Program): Name of the main program that calls this
       startup_program(Program): Name of the startup program
       stop_gradient(bool): A boolean that mentions whether gradient should flow.

    Returns:
        Variable: The global variable that gives access to the data.

    Examples:
        .. code-block:: python

          data = fluid.layers.data(name='x', shape=[784], dtype='float32')
Y
Yu Yang 已提交
59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78
    """
    helper = LayerHelper('data', **locals())
    shape = list(shape)
    for i in xrange(len(shape)):
        if shape[i] is None:
            shape[i] = -1
            append_batch_size = False
        elif shape[i] < 0:
            append_batch_size = False

    if append_batch_size:
        shape = [-1] + shape  # append batch size as -1

    return helper.create_global_variable(
        name=name,
        shape=shape,
        dtype=dtype,
        type=type,
        stop_gradient=stop_gradient,
        lod_level=lod_level)
T
typhoonzero 已提交
79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109


class BlockGuardServ(BlockGuard):
    """
    BlockGuardServ class.

    BlockGuardServ class is used to create an op with a block in a program.
    """

    def __init__(self, server):
        if not (isinstance(server, ListenAndServ)):
            raise TypeError("BlockGuardServ takes a ListenAndServ")
        super(BlockGuardServ, self).__init__(server.helper.main_program)
        self.server = server

    def __exit__(self, exc_type, exc_val, exc_tb):
        if exc_type is not None:
            return False

        self.server.complete_op()
        return super(BlockGuardServ, self).__exit__(exc_type, exc_val, exc_tb)


class ListenAndServ(object):
    """
    ListenAndServ class.

    ListenAndServ class is used to wrap listen_and_serv op to create a server
    which can receive variables from clients and run a block.
    """

T
WIP  
typhoonzero 已提交
110 111
    def __init__(self, endpoint, fan_in=1, optimizer_mode=True):
        self.helper = LayerHelper("recv")
T
typhoonzero 已提交
112 113 114 115
        self.inputs = []
        self.outputs = []
        self.endpoint = endpoint
        self.fan_in = fan_in
T
typhoonzero 已提交
116 117
        # FIXME(typhoonzero): add optimizer_mode is stupid, should make it more
        # general.
T
WIP  
typhoonzero 已提交
118
        self.optimizer_mode = optimizer_mode
T
typhoonzero 已提交
119 120 121 122 123 124 125 126 127 128 129 130 131

    def do(self):
        return BlockGuardServ(self)

    def get_params_and_grads(self):
        main_program = self.helper.main_program
        current_block = main_program.current_block()
        parent_block = self.parent_block()
        # params and grads in the same order.
        params = list()
        grads = list()
        for op in current_block.ops:
            # FIXME(typhoonzero): op.inputs is None if it's cloned.
T
WIP  
typhoonzero 已提交
132 133 134 135 136 137 138 139
            if self.optimizer_mode:
                if "Grad" in op.inputs and "Param" in op.inputs:
                    params.append(op.inputs["Param"].name)
                    grads.append(op.inputs["Grad"].name)
            else:
                # simple recv mode, recv operators inputs.
                for iname in op.input_names:
                    for in_var_name in op.input(iname):
T
typhoonzero 已提交
140 141
                        params.append(parent_block.var(in_var_name))
                        grads.append(parent_block.var(in_var_name))
T
typhoonzero 已提交
142 143 144

        return params, grads

T
typhoonzero 已提交
145 146 147 148 149 150 151
    def parent_block(self):
        prog = self.helper.main_program
        parent_idx = prog.current_block().parent_idx
        assert parent_idx >= 0
        parent_block = prog.block(parent_idx)
        return parent_block

T
typhoonzero 已提交
152 153 154 155 156 157
    def complete_op(self):
        main_program = self.helper.main_program
        current_block = main_program.current_block()
        parent_block = self.parent_block()

        params, grads = self.get_params_and_grads()
T
typhoonzero 已提交
158 159
        param_names = [p.name for p in params]
        grad_names = [g.name for g in grads]
T
typhoonzero 已提交
160 161 162 163 164 165 166
        parent_block.append_op(
            type='recv',
            inputs={},
            outputs={},
            attrs={
                'endpoint': self.endpoint,
                'Fanin': self.fan_in,
T
typhoonzero 已提交
167 168
                'ParamList': param_names,
                'GradList': grad_names,
T
typhoonzero 已提交
169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189
                'OptimizeBlock': current_block
            })


def Send(endpoints, send_vars, get_vars):
    """
    Send layer

    Args:
        endpoints: comma seperated IP:PORT pairs in the order
                   of send_vars to send
        send_vars: vars to send
        get_vars: vars to get from server after send completes.

    Send variables to the server side, and get vars from server
    side when server have finished running server side program.
    """
    assert (type(send_vars) == list)
    assert (type(get_vars) == list)

    epmap = endpoints.split(",")
T
typhoonzero 已提交
190
    endpoints = list(set(epmap))
T
typhoonzero 已提交
191 192 193 194 195 196 197 198

    helper = LayerHelper("Send", **locals())
    helper.append_op(
        type="send",
        inputs={"X": send_vars},
        outputs={"Out": get_vars},
        attrs={"endpoints": endpoints,
               "epmap": epmap})