Created by: Aurelius84
- Add pyramid hash embedding op
Example code:
import numpy as np
import paddle.fluid as fluid
num_voc = 128
embed_dim = 64
x_shape, x_lod = [16, 10], [[3, 5, 2, 6]]
x = fluid.data(name='x', shape=x_shape, dtype='int32', lod_level=1)
hash_embd = fluid.contrib.search_pyramid_hash(
input=x,
num_emb=embed_dim,
space_len=num_voc * embed_dim,
pyramid_layer=4,
rand_len=16,
drop_out_percent=0.5,
is_training=True,
use_filter=False,
white_list_len=6400,
black_list_len=2800,
seed=3,
lr=0.002,
param_attr=fluid.ParamAttr(
name="PyramidHash_emb_0",
learning_rate=0, ),
param_attr_wl=fluid.ParamAttr(
name="Filter",
learning_rate=0, ),
param_attr_bl=None,
name=None, )
place = fluid.CPUPlace()
x_tensor = fluid.create_lod_tensor(
np.random.randint(0, num_voc, x_shape).astype('int32'), x_lod,
place)
exe = fluid.Executor(place)
exe.run(fluid.default_startup_program())
ret = exe.run(feed={'x': x_tensor},
fetch_list=[hash_embd],
return_numpy=False)