model.py 2.0 KB
Newer Older
T
tangwei 已提交
1
import abc
T
tangwei 已提交
2 3 4

import paddle.fluid as fluid

T
rename  
tangwei 已提交
5
from fleetrec.core.utils import envs
T
tangwei 已提交
6

T
tangwei 已提交
7

T
tangwei 已提交
8 9 10 11 12 13 14 15 16 17 18
class Model(object):
    """R
    """
    __metaclass__ = abc.ABCMeta

    def __init__(self, config):
        """R
        """
        self._cost = None
        self._metrics = {}
        self._data_var = []
T
tangwei 已提交
19
        self._data_loader = None
T
tangwei 已提交
20
        self._fetch_interval = 20
T
tangwei 已提交
21
        self._namespace = "train.model"
T
tangwei 已提交
22
        self._platform = envs.get_platform()
T
tangwei 已提交
23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39

    def get_inputs(self):
        return self._data_var

    def get_cost_op(self):
        """R
        """
        return self._cost

    def get_metrics(self):
        """R
        """
        return self._metrics

    def get_fetch_period(self):
        return self._fetch_interval

T
tangwei 已提交
40 41 42 43 44 45 46
    def _build_optimizer(self, name, lr):
        name = name.upper()
        optimizers = ["SGD", "ADAM", "ADAGRAD"]
        if name not in optimizers:
            raise ValueError("configured optimizer can only supported SGD/Adam/Adagrad")

        if name == "SGD":
47 48
            reg = envs.get_global_env("hyper_parameters.reg", 0.0001, self._namespace)
            optimizer_i = fluid.optimizer.SGD(lr, regularization=fluid.regularizer.L2DecayRegularizer(reg))
T
tangwei 已提交
49 50 51
        elif name == "ADAM":
            optimizer_i = fluid.optimizer.Adam(lr, lazy_mode=True)
        elif name == "ADAGRAD":
52
            optimizer_i = fluid.optimizer.Adagrad(lr)
T
tangwei 已提交
53 54 55 56 57 58 59 60
        else:
            raise ValueError("configured optimizer can only supported SGD/Adam/Adagrad")

        return optimizer_i

    def optimizer(self):
        learning_rate = envs.get_global_env("hyper_parameters.learning_rate", None, self._namespace)
        optimizer = envs.get_global_env("hyper_parameters.optimizer", None, self._namespace)
61
        print(">>>>>>>>>>>.learnig rate: %s" %learning_rate)
T
tangwei 已提交
62 63
        return self._build_optimizer(optimizer, learning_rate)

T
tangwei 已提交
64 65 66 67 68 69
    @abc.abstractmethod
    def train_net(self):
        """R
        """
        pass

T
tangwei 已提交
70
    @abc.abstractmethod
T
tangwei 已提交
71 72
    def infer_net(self):
        pass