model.py 7.7 KB
Newer Older
T
tangwei 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

T
tangwei 已提交
15
import abc
T
tangwei 已提交
16 17 18

import paddle.fluid as fluid

19
from paddlerec.core.utils import envs
T
tangwei 已提交
20

T
tangwei 已提交
21

T
tangwei 已提交
22
class Model(object):
T
tangwei 已提交
23
    """Base Model
T
tangwei 已提交
24 25 26 27 28 29 30 31 32
    """
    __metaclass__ = abc.ABCMeta

    def __init__(self, config):
        """R
        """
        self._cost = None
        self._metrics = {}
        self._data_var = []
M
malin10 已提交
33 34
        self._infer_data_var = []
        self._infer_results = {}
T
tangwei 已提交
35
        self._data_loader = None
M
malin10 已提交
36
        self._infer_data_loader = None
T
tangwei 已提交
37
        self._fetch_interval = 20
T
tangwei 已提交
38
        self._namespace = "train.model"
T
tangwei 已提交
39
        self._platform = envs.get_platform()
F
frankwhzhang 已提交
40
        self._init_hyper_parameters()
X
test  
xjqbest 已提交
41
        self._env = config
X
fix  
xjqbest 已提交
42
        self._slot_inited = False
F
frankwhzhang 已提交
43 44 45

    def _init_hyper_parameters(self):
        pass
X
xujiaqi01 已提交
46

X
fix  
xjqbest 已提交
47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67
    def _init_slots(self, **kargs):
        if self._slot_inited:
            return
        self._slot_inited = True
        dataset = {}
        model_dict = {}#self._env["executor"]#[kargs["name"]]
        for i in self._env["executor"]:
            if i["name"] == kargs["name"]:
                model_dict = i
                break
        for i in self._env["dataset"]:
            if i["name"] == model_dict["dataset_name"]:
                dataset = i
                break
        name = "dataset." + dataset["name"] + "."
        sparse_slots = envs.get_global_env(name + "sparse_slots")#"sparse_slots", None,
                                           #"train.reader")
        dense_slots = envs.get_global_env(name + "dense_slots")
        #"dense_slots", None, "train.reader")
        #print(sparse_slots)
        #print(dense_slots)
X
xujiaqi01 已提交
68 69 70
        if sparse_slots is not None or dense_slots is not None:
            sparse_slots = sparse_slots.strip().split(" ")
            dense_slots = dense_slots.strip().split(" ")
T
tangwei 已提交
71 72 73
            dense_slots_shape = [[
                int(j) for j in i.split(":")[1].strip("[]").split(",")
            ] for i in dense_slots]
X
xujiaqi01 已提交
74 75 76
            dense_slots = [i.split(":")[0] for i in dense_slots]
            self._dense_data_var = []
            for i in range(len(dense_slots)):
T
tangwei 已提交
77 78 79 80
                l = fluid.layers.data(
                    name=dense_slots[i],
                    shape=dense_slots_shape[i],
                    dtype="float32")
X
xujiaqi01 已提交
81 82 83 84
                self._data_var.append(l)
                self._dense_data_var.append(l)
            self._sparse_data_var = []
            for name in sparse_slots:
T
tangwei 已提交
85 86
                l = fluid.layers.data(
                    name=name, shape=[1], lod_level=1, dtype="int64")
X
xujiaqi01 已提交
87 88 89
                self._data_var.append(l)
                self._sparse_data_var.append(l)

X
fix  
xjqbest 已提交
90 91 92 93
        #dataset_class = dataset["type"]#envs.get_global_env("dataset_class", None,
                        #                    "train.reader")
        #if dataset_class == "DataLoader":
        #    self._init_dataloader()
X
xujiaqi01 已提交
94 95

    def _init_dataloader(self):
X
fix  
xjqbest 已提交
96
        #print(self._data_var)
X
xujiaqi01 已提交
97
        self._data_loader = fluid.io.DataLoader.from_generator(
T
tangwei 已提交
98 99 100 101
            feed_list=self._data_var,
            capacity=64,
            use_double_buffer=False,
            iterable=False)
T
tangwei 已提交
102 103 104 105

    def get_inputs(self):
        return self._data_var

M
malin10 已提交
106 107 108 109 110 111
    def get_infer_inputs(self):
        return self._infer_data_var

    def get_infer_results(self):
        return self._infer_results

T
tangwei 已提交
112
    def get_avg_cost(self):
T
tangwei 已提交
113 114 115 116 117 118 119 120 121 122 123 124
        """R
        """
        return self._cost

    def get_metrics(self):
        """R
        """
        return self._metrics

    def get_fetch_period(self):
        return self._fetch_interval

X
test  
xjqbest 已提交
125
    def _build_optimizer(self, name, lr, strategy=None):
T
tangwei 已提交
126 127 128
        name = name.upper()
        optimizers = ["SGD", "ADAM", "ADAGRAD"]
        if name not in optimizers:
C
chengmo 已提交
129 130
            raise ValueError(
                "configured optimizer can only supported SGD/Adam/Adagrad")
T
tangwei 已提交
131 132

        if name == "SGD":
T
tangwei 已提交
133 134
            reg = envs.get_global_env("hyper_parameters.reg", 0.0001,
                                      self._namespace)
C
chengmo 已提交
135 136
            optimizer_i = fluid.optimizer.SGD(
                lr, regularization=fluid.regularizer.L2DecayRegularizer(reg))
T
tangwei 已提交
137 138 139
        elif name == "ADAM":
            optimizer_i = fluid.optimizer.Adam(lr, lazy_mode=True)
        elif name == "ADAGRAD":
140
            optimizer_i = fluid.optimizer.Adagrad(lr)
T
tangwei 已提交
141
        else:
C
chengmo 已提交
142 143
            raise ValueError(
                "configured optimizer can only supported SGD/Adam/Adagrad")
T
tangwei 已提交
144 145 146 147

        return optimizer_i

    def optimizer(self):
T
tangwei 已提交
148 149 150 151
        learning_rate = envs.get_global_env("hyper_parameters.learning_rate",
                                            None, self._namespace)
        optimizer = envs.get_global_env("hyper_parameters.optimizer", None,
                                        self._namespace)
T
tangwei 已提交
152 153
        return self._build_optimizer(optimizer, learning_rate)

X
fix  
xjqbest 已提交
154 155 156 157
    def input_data(self, is_infer=False, **kwargs):
        name = "dataset." + kwargs.get("dataset_name") + "."
        sparse_slots = envs.get_global_env(name + "sparse_slots")
        dense_slots = envs.get_global_env(name + "dense_slots")
158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183
        if sparse_slots is not None or dense_slots is not None:
            sparse_slots = sparse_slots.strip().split(" ")
            dense_slots = dense_slots.strip().split(" ")
            dense_slots_shape = [[
                int(j) for j in i.split(":")[1].strip("[]").split(",")
            ] for i in dense_slots]
            dense_slots = [i.split(":")[0] for i in dense_slots]
            self._dense_data_var = []
            data_var_ = []
            for i in range(len(dense_slots)):
                l = fluid.layers.data(
                    name=dense_slots[i],
                    shape=dense_slots_shape[i],
                    dtype="float32")
                data_var_.append(l)
                self._dense_data_var.append(l)
            self._sparse_data_var = []
            for name in sparse_slots:
                l = fluid.layers.data(
                    name=name, shape=[1], lod_level=1, dtype="int64")
                data_var_.append(l)
                self._sparse_data_var.append(l)
            return data_var_

        else:
            return None
F
frankwhzhang 已提交
184 185 186 187

    def net(self, is_infer=False):
        return None

F
frankwhzhang 已提交
188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204
    def _construct_reader(self, is_infer=False):
        if is_infer:
            self._infer_data_loader = fluid.io.DataLoader.from_generator(
                feed_list=self._infer_data_var,
                capacity=64,
                use_double_buffer=False,
                iterable=False)
        else:
            dataset_class = envs.get_global_env("dataset_class", None,
                                                "train.reader")
            if dataset_class == "DataLoader":
                self._data_loader = fluid.io.DataLoader.from_generator(
                    feed_list=self._data_var,
                    capacity=64,
                    use_double_buffer=False,
                    iterable=False)

T
tangwei 已提交
205
    def train_net(self):
F
frankwhzhang 已提交
206 207
        input_data = self.input_data(is_infer=False)
        self._data_var = input_data
F
frankwhzhang 已提交
208
        self._construct_reader(is_infer=False)
F
frankwhzhang 已提交
209
        self.net(input_data, is_infer=False)
T
tangwei 已提交
210 211

    def infer_net(self):
F
frankwhzhang 已提交
212 213
        input_data = self.input_data(is_infer=True)
        self._infer_data_var = input_data
F
frankwhzhang 已提交
214
        self._construct_reader(is_infer=True)
F
frankwhzhang 已提交
215
        self.net(input_data, is_infer=True)