downpour.py 1.5 KB
Newer Older
D
dongdaxiang 已提交
1 2
from .node import DownpourServer
from .node import DownpourWorker
D
dongdaxiang 已提交
3 4
from ..backward import append_backward
import ps_pb2 as pslib
D
dongdaxiang 已提交
5
from paddle.fluid.distribute_lookup_table import find_distributed_lookup_table
D
dongdaxiang 已提交
6
from google.protobuf import text_format
D
dongdaxiang 已提交
7 8

class DownpourSGD(object):
D
dongdaxiang 已提交
9
    def __init__(self, learning_rate=0.001, window=1):
D
dongdaxiang 已提交
10
        # todo(guru4elephant): if optimizer is not None, will warning here
D
dongdaxiang 已提交
11
        self.learning_rate_ = learning_rate
D
dongdaxiang 已提交
12 13
        self.window_ = window

D
dongdaxiang 已提交
14 15
    def minimize(self, loss, startup_program=None,
                 parameter_list=None, no_grad_set=None,
D
dongdaxiang 已提交
16 17
                 prefetch_slots=None, prefetch_slots_emb=None):
        params_grads = sorted(append_backward(loss), key=lambda x:x[0].name)
D
dongdaxiang 已提交
18
        table_name = find_distributed_lookup_table(loss.block.program)
D
dongdaxiang 已提交
19
        server = DownpourServer()
D
dongdaxiang 已提交
20 21
        worker = DownpourWorker(self.window_)
        server.add_sparse_table(0, learning_rate,
D
dongdaxiang 已提交
22 23
                                prefetch_slots, prefetch_slots_emb)
        server.add_dense_table(1, learning_rate, params, grads)
D
dongdaxiang 已提交
24
        worker.add_sparse_table(0, learning_rate,
D
dongdaxiang 已提交
25 26 27 28
                                prefetch_slots, prefetch_slots_emb)
        worker.add_dense_table(1, learning_rate, params, grads)
        ps_param = pslib.PSParameter()
        ps_param.server_param.CopyFrom(server.get_desc())
D
dongdaxiang 已提交
29
        #ps_param.worker_param.CopyFrom(worker.get_desc())
D
dongdaxiang 已提交
30
        worker_skipped_ops = ["lookup_table", "lookup_table_grad"]
D
dongdaxiang 已提交
31 32
        ps_param_str = text_format.MessageToString(ps_param)
        return [ps_param_str, worker_skipped_ops]