未验证 提交 566baa2f 编写于 作者: W wuzhihua 提交者: GitHub

Merge branch 'master' into doc_v4

...@@ -177,6 +177,7 @@ python -m paddlerec.run -m ./models/rank/dnn/config.yaml -b backend.yaml ...@@ -177,6 +177,7 @@ python -m paddlerec.run -m ./models/rank/dnn/config.yaml -b backend.yaml
| 多任务 | [ESMM](models/multitask/esmm/model.py) | ✓ | ✓ | ✓ | | 多任务 | [ESMM](models/multitask/esmm/model.py) | ✓ | ✓ | ✓ |
| 多任务 | [MMOE](models/multitask/mmoe/model.py) | ✓ | ✓ | ✓ | | 多任务 | [MMOE](models/multitask/mmoe/model.py) | ✓ | ✓ | ✓ |
| 多任务 | [ShareBottom](models/multitask/share-bottom/model.py) | ✓ | ✓ | ✓ | | 多任务 | [ShareBottom](models/multitask/share-bottom/model.py) | ✓ | ✓ | ✓ |
| 重排序 | [Listwise](models/rerank/listwise/model.py) | ✓ | x | ✓ |
......
...@@ -37,6 +37,10 @@ class Model(object): ...@@ -37,6 +37,10 @@ class Model(object):
self._fetch_interval = 20 self._fetch_interval = 20
self._namespace = "train.model" self._namespace = "train.model"
self._platform = envs.get_platform() self._platform = envs.get_platform()
self._init_hyper_parameters()
def _init_hyper_parameters(self):
pass
def _init_slots(self): def _init_slots(self):
sparse_slots = envs.get_global_env("sparse_slots", None, sparse_slots = envs.get_global_env("sparse_slots", None,
...@@ -129,12 +133,65 @@ class Model(object): ...@@ -129,12 +133,65 @@ class Model(object):
print(">>>>>>>>>>>.learnig rate: %s" % learning_rate) print(">>>>>>>>>>>.learnig rate: %s" % learning_rate)
return self._build_optimizer(optimizer, learning_rate) return self._build_optimizer(optimizer, learning_rate)
@abc.abstractmethod def input_data(self, is_infer=False):
sparse_slots = envs.get_global_env("sparse_slots", None,
"train.reader")
dense_slots = envs.get_global_env("dense_slots", None, "train.reader")
if sparse_slots is not None or dense_slots is not None:
sparse_slots = sparse_slots.strip().split(" ")
dense_slots = dense_slots.strip().split(" ")
dense_slots_shape = [[
int(j) for j in i.split(":")[1].strip("[]").split(",")
] for i in dense_slots]
dense_slots = [i.split(":")[0] for i in dense_slots]
self._dense_data_var = []
data_var_ = []
for i in range(len(dense_slots)):
l = fluid.layers.data(
name=dense_slots[i],
shape=dense_slots_shape[i],
dtype="float32")
data_var_.append(l)
self._dense_data_var.append(l)
self._sparse_data_var = []
for name in sparse_slots:
l = fluid.layers.data(
name=name, shape=[1], lod_level=1, dtype="int64")
data_var_.append(l)
self._sparse_data_var.append(l)
return data_var_
else:
return None
def net(self, is_infer=False):
return None
def _construct_reader(self, is_infer=False):
if is_infer:
self._infer_data_loader = fluid.io.DataLoader.from_generator(
feed_list=self._infer_data_var,
capacity=64,
use_double_buffer=False,
iterable=False)
else:
dataset_class = envs.get_global_env("dataset_class", None,
"train.reader")
if dataset_class == "DataLoader":
self._data_loader = fluid.io.DataLoader.from_generator(
feed_list=self._data_var,
capacity=64,
use_double_buffer=False,
iterable=False)
def train_net(self): def train_net(self):
"""R input_data = self.input_data(is_infer=False)
""" self._data_var = input_data
pass self._construct_reader(is_infer=False)
self.net(input_data, is_infer=False)
@abc.abstractmethod
def infer_net(self): def infer_net(self):
pass input_data = self.input_data(is_infer=True)
self._infer_data_var = input_data
self._construct_reader(is_infer=True)
self.net(input_data, is_infer=True)
...@@ -22,7 +22,7 @@ train: ...@@ -22,7 +22,7 @@ train:
reader: reader:
batch_size: 2 batch_size: 2
train_data_path: "{workspace}/data/slot_train" train_data_path: "{workspace}/data/sample_data/train"
feat_dict_name: "{workspace}/data/vocab" feat_dict_name: "{workspace}/data/vocab"
sparse_slots: "label C1 C2 C3 C4 C5 C6 C7 C8 C9 C10 C11 C12 C13 C14 C15 C16 C17 C18 C19 C20 C21 C22 C23 C24 C25 C26" sparse_slots: "label C1 C2 C3 C4 C5 C6 C7 C8 C9 C10 C11 C12 C13 C14 C15 C16 C17 C18 C19 C20 C21 C22 C23 C24 C25 C26"
dense_slots: "I1:1 I2:1 I3:1 I4:1 I5:1 I6:1 I7:1 I8:1 I9:1 I10:1 I11:1 I12:1 I13:1" dense_slots: "I1:1 I2:1 I3:1 I4:1 I5:1 I6:1 I7:1 I8:1 I9:1 I10:1 I11:1 I12:1 I13:1"
...@@ -35,7 +35,7 @@ train: ...@@ -35,7 +35,7 @@ train:
l2_reg_cross: 0.00005 l2_reg_cross: 0.00005
dnn_use_bn: False dnn_use_bn: False
clip_by_norm: 100.0 clip_by_norm: 100.0
cat_feat_num: "{workspace}/data/cat_feature_num.txt" cat_feat_num: "{workspace}/data/sample_data/cat_feature_num.txt"
is_sparse: False is_sparse: False
is_test: False is_test: False
num_field: 39 num_field: 39
......
C1 139
C2 422
C3 1548
C4 1965
C5 54
C6 10
C7 3213
C8 81
C9 3
C10 2402
C11 2246
C12 1583
C13 1911
C14 24
C15 2011
C16 1731
C17 9
C18 1197
C19 584
C20 3
C21 1652
C22 8
C23 14
C24 1770
C25 40
C26 1349
label:0 I1:0.69314718056 I2:1.60943791243 I3:1.79175946923 I4:0.0 I5:7.23201033166 I6:1.60943791243 I7:2.77258872224 I8:1.09861228867 I9:5.20400668708 I10:0.69314718056 I11:1.09861228867 I12:0 I13:1.09861228867 C1:95 C2:398 C3:0 C4:0 C5:53 C6:1 C7:73 C8:71 C9:3 C10:1974 C11:832 C12:0 C13:875 C14:8 C15:1764 C16:0 C17:5 C18:390 C19:226 C20:1 C21:0 C22:0 C23:8 C24:1759 C25:1 C26:862
label:0 I1:1.09861228867 I2:1.38629436112 I3:3.80666248977 I4:0.69314718056 I5:4.63472898823 I6:2.19722457734 I7:1.09861228867 I8:1.09861228867 I9:1.60943791243 I10:0.69314718056 I11:0.69314718056 I12:0 I13:1.60943791243 C1:95 C2:200 C3:1184 C4:1929 C5:53 C6:4 C7:1477 C8:2 C9:3 C10:1283 C11:1567 C12:1048 C13:271 C14:6 C15:1551 C16:899 C17:1 C18:162 C19:226 C20:2 C21:575 C22:0 C23:8 C24:1615 C25:1 C26:659
label:0 I1:1.09861228867 I2:1.38629436112 I3:0.69314718056 I4:2.7080502011 I5:6.64378973315 I6:4.49980967033 I7:1.60943791243 I8:1.09861228867 I9:5.50533153593 I10:0.69314718056 I11:1.38629436112 I12:1.38629436112 I13:3.82864139649 C1:123 C2:378 C3:991 C4:197 C5:53 C6:1 C7:689 C8:2 C9:3 C10:245 C11:623 C12:1482 C13:887 C14:21 C15:106 C16:720 C17:3 C18:768 C19:0 C20:0 C21:1010 C22:1 C23:8 C24:720 C25:0 C26:0
label:0 I1:0 I2:6.79905586206 I3:0 I4:0 I5:8.38776764398 I6:0 I7:0.0 I8:0.0 I9:0.0 I10:0 I11:0.0 I12:0 I13:0 C1:95 C2:227 C3:0 C4:219 C5:53 C6:4 C7:3174 C8:2 C9:3 C10:569 C11:1963 C12:0 C13:1150 C14:21 C15:1656 C16:0 C17:6 C18:584 C19:0 C20:0 C21:0 C22:0 C23:8 C24:954 C25:0 C26:0
label:0 I1:1.38629436112 I2:1.09861228867 I3:0 I4:0.0 I5:1.09861228867 I6:0.0 I7:1.38629436112 I8:0.0 I9:0.0 I10:0.69314718056 I11:0.69314718056 I12:0 I13:0.0 C1:121 C2:147 C3:0 C4:1356 C5:53 C6:7 C7:2120 C8:2 C9:3 C10:703 C11:1678 C12:1210 C13:1455 C14:8 C15:538 C16:1276 C17:6 C18:346 C19:0 C20:0 C21:944 C22:0 C23:10 C24:355 C25:0 C26:0
label:0 I1:0 I2:1.09861228867 I3:0 I4:0 I5:9.45915167004 I6:0 I7:0.0 I8:0.0 I9:1.94591014906 I10:0 I11:0.0 I12:0 I13:0 C1:14 C2:75 C3:993 C4:480 C5:50 C6:6 C7:1188 C8:2 C9:3 C10:245 C11:1037 C12:1365 C13:1421 C14:21 C15:786 C16:5 C17:2 C18:555 C19:0 C20:0 C21:1408 C22:6 C23:7 C24:753 C25:0 C26:0
label:0 I1:0 I2:1.60943791243 I3:1.09861228867 I4:0 I5:8.06117135969 I6:0 I7:0.0 I8:0.69314718056 I9:1.09861228867 I10:0 I11:0.0 I12:0 I13:0 C1:139 C2:343 C3:553 C4:828 C5:50 C6:4 C7:0 C8:2 C9:3 C10:245 C11:2081 C12:260 C13:455 C14:21 C15:122 C16:1159 C17:2 C18:612 C19:0 C20:0 C21:1137 C22:0 C23:1 C24:1583 C25:0 C26:0
label:1 I1:0.69314718056 I2:2.07944154168 I3:1.09861228867 I4:0.0 I5:0.0 I6:0.0 I7:0.69314718056 I8:0.0 I9:0.0 I10:0.69314718056 I11:0.69314718056 I12:0 I13:0.0 C1:95 C2:227 C3:0 C4:1567 C5:21 C6:7 C7:2496 C8:71 C9:3 C10:1913 C11:2212 C12:0 C13:673 C14:21 C15:1656 C16:0 C17:5 C18:584 C19:0 C20:0 C21:0 C22:0 C23:10 C24:954 C25:0 C26:0
label:0 I1:0 I2:3.87120101091 I3:1.60943791243 I4:2.19722457734 I5:9.85277303799 I6:5.52146091786 I7:3.36729582999 I8:3.4657359028 I9:4.9558270576 I10:0 I11:0.69314718056 I12:0 I13:2.19722457734 C1:14 C2:14 C3:454 C4:197 C5:53 C6:1 C7:1386 C8:2 C9:3 C10:0 C11:1979 C12:205 C13:214 C14:6 C15:1837 C16:638 C17:5 C18:6 C19:0 C20:0 C21:70 C22:0 C23:10 C24:720 C25:0 C26:0
label:0 I1:0 I2:3.66356164613 I3:0 I4:0.69314718056 I5:10.4263800775 I6:3.09104245336 I7:0.69314718056 I8:1.09861228867 I9:1.38629436112 I10:0 I11:0.69314718056 I12:0 I13:0.69314718056 C1:14 C2:179 C3:120 C4:746 C5:53 C6:0 C7:1312 C8:2 C9:3 C10:1337 C11:1963 C12:905 C13:1150 C14:21 C15:1820 C16:328 C17:9 C18:77 C19:0 C20:0 C21:311 C22:0 C23:10 C24:89 C25:0 C26:0
f434fac1
e6051457
7e5c2ff4
abca0bad
3b509222
340c148e
48f8c5b9
3c9d8785
585b6ccc
561bf9d4
b474c2c2
c1730738
92fb1d87
05db9164
c35dc981
ae82ea21
824be517
16a99cfb
e8ef605b
88abfaf6
7ceef477
17f69355
1464facd
f0a33555
80e4d755
3ec5d916
f5c9f18c
87552397
5ebc3192
426610d2
eb6dcae0
651f6a2d
7f9f4eb6
bd4b6d14
3560b08b
8068dc7e
9660b97b
9eb7531c
2d4ea12b
87773c45
5a9ed9b0
f473b8dc
b19f768d
70d60005
89889f05
c71ae391
c6dce90e
64e77ae7
0e78bd46
75ac2fe6
42a16b9a
19c5f803
cbffbdad
bfb430af
127f4a6b
6ca3af46
2b3bff44
8a033483
45cb84c9
554adfdb
46300ee3
a14cf13a
d0d66375
da4eff0f
4265881a
9684fd4d
7382c353
50d4de26
60c68845
e3493c7c
09ca0b81
3b65d647
98237733
fc9c62bb
41edac3d
dbfc8345
39af2607
581e410c
55845e1c
28e55712
6bcf7a5b
66651cdf
2b92c0d2
24eda356
dbe63c2b
9a89b36c
489d0f96
dac91c28
dc5ebbd9
1a5f926e
885aeecb
f1548e14
6062d843
c2a5852e
68fd1e64
be589b51
b455c6d7
cd3695ae
291b7ba2
2998a458
5e53cc38
dbe15b41
ff5f3ab9
49f631b8
3b1bc654
36a5b3ff
fbc55dae
467085ca
06584483
3f6e3c8b
3cc2325b
ff004ae3
eb6ac63c
0a16e1d4
34f74dfd
decf6fa6
18988050
c512b859
a86f8721
5bfa8ab5
8cf07265
dd14f377
287e684f
49c4b7c4
2ebc17d3
8c6ba407
fb174e6b
4615a3b6
394fc830
9e9d28f5
241546e0
4a4e85c4
26428e51
940683b1
65aada8c
ba454362
d4b08d58
49807078
439a44a4
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
26ac7cf4
cf1fc48d
ad1cc976
e8dce07a
5aebfb83
b28479f6
64c94865
1adce6ef
32813e21
dcd762ee
051219e6
243a4e68
687dfaf4
f862f261
ab7390e9
f7c1b33f
d2dfe871
91233270
ec19f520
8ceecbc8
07d13a8f
0601d3b5
cfef1c29
0bc7c8c2
此差异已折叠。
此差异已折叠。
07c540c4
776ce399
8efede7f
3486227d
e5ba7672
1e88c74f
2005abd1
27c07bd6
d4bb7bd8
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
ad3062eb
49e825c5
ccfd4002
c9d4222a
78e2e389
8ec974f4
8651fddb
c0061c6d
93bad2c0
c3dc6cef
423fab69
25e3c76b
b264a060
85d5a995
be7c41b4
3a171ecb
c7dc6720
32c7478e
72592995
dbb486d7
bcdee96c
55dd3565
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
7e0ccccf
f1f2de2d
c05778d5
fe6b92e5
c76aecf6
6f6d9be8
13718bbd
e3520422
3bf701e7
fbad5c96
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册