Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
PALM
提交
27d1b8db
P
PALM
项目概览
PaddlePaddle
/
PALM
通知
5
Star
3
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
10
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
PALM
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
10
Issue
10
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
27d1b8db
编写于
11月 25, 2019
作者:
W
wangxiao
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
python3 ok
上级
c21afb28
变更
6
显示空白变更内容
内联
并排
Showing
6 changed file
with
14 addition
and
13 deletion
+14
-13
paddlepalm/backbone/utils/transformer.py
paddlepalm/backbone/utils/transformer.py
+1
-0
paddlepalm/mtl_controller.py
paddlepalm/mtl_controller.py
+1
-1
paddlepalm/task_instance.py
paddlepalm/task_instance.py
+5
-5
paddlepalm/tokenizer/bert_tokenizer.py
paddlepalm/tokenizer/bert_tokenizer.py
+2
-2
paddlepalm/tokenizer/ernie_tokenizer.py
paddlepalm/tokenizer/ernie_tokenizer.py
+2
-2
paddlepalm/utils/reader_helper.py
paddlepalm/utils/reader_helper.py
+3
-3
未找到文件。
paddlepalm/backbone/utils/transformer.py
浏览文件 @
27d1b8db
...
...
@@ -24,6 +24,7 @@ import paddle.fluid as fluid
import
paddle.fluid.layers
as
layers
from
paddle.fluid.layer_helper
import
LayerHelper
as
LayerHelper
from
functools
import
reduce
# py3
def
layer_norm
(
x
,
begin_norm_axis
=
1
,
epsilon
=
1e-6
,
param_attr
=
None
,
bias_attr
=
None
):
helper
=
LayerHelper
(
'layer_norm'
,
**
locals
())
mean
=
layers
.
reduce_mean
(
x
,
dim
=
begin_norm_axis
,
keep_dim
=
True
)
...
...
paddlepalm/mtl_controller.py
浏览文件 @
27d1b8db
...
...
@@ -95,7 +95,7 @@ def _try_float(s):
def
_check_conf
(
conf
,
checklist
=
None
):
assert
isinstance
(
conf
,
dict
),
"{} is not a dict."
.
format
(
conf
)
ret
=
{}
for
k
,
v
in
conf
.
items
(
):
for
k
,
v
in
list
(
conf
.
items
()
):
if
isinstance
(
v
,
str
):
v
=
_try_float
(
v
)
ret
[
k
]
=
v
...
...
paddlepalm/task_instance.py
浏览文件 @
27d1b8db
...
...
@@ -92,7 +92,7 @@ class TaskInstance(object):
output_vars
=
self
.
_task_layer
[
phase
].
build
(
net_inputs
,
scope_name
=
scope
)
if
phase
==
'pred'
:
if
output_vars
is
not
None
:
self
.
_pred_fetch_name_list
,
self
.
_pred_fetch_var_list
=
zip
(
*
output_vars
.
items
(
))
self
.
_pred_fetch_name_list
,
self
.
_pred_fetch_var_list
=
list
(
zip
(
*
list
(
output_vars
.
items
())
))
else
:
self
.
_pred_fetch_name_list
=
[]
self
.
_pred_fetch_var_list
=
[]
...
...
@@ -113,7 +113,7 @@ class TaskInstance(object):
fluid
.
io
.
save_inference_model
(
dirpath
,
self
.
_pred_input_varname_list
,
self
.
_pred_fetch_var_list
,
self
.
_exe
,
prog
)
conf
=
{}
for
k
,
strv
in
self
.
_save_protocol
.
items
():
for
k
,
strv
in
list
(
self
.
_save_protocol
.
items
()):
# py3
exec
(
'v={}'
.
format
(
strv
))
conf
[
k
]
=
v
with
open
(
os
.
path
.
join
(
dirpath
,
'__conf__'
),
'w'
)
as
writer
:
...
...
@@ -123,7 +123,7 @@ class TaskInstance(object):
def
load
(
self
,
infer_model_path
=
None
):
if
infer_model_path
is
None
:
infer_model_path
=
self
.
_save_infermodel_path
for
k
,
v
in
json
.
load
(
open
(
os
.
path
.
join
(
infer_model_path
,
'__conf__'
))).
items
():
or
k
,
v
in
list
(
json
.
load
(
open
(
os
.
path
.
join
(
infer_model_path
,
'__conf__'
))).
items
()):
# py3
strv
=
self
.
_save_protocol
[
k
]
exec
(
'{}=v'
.
format
(
strv
))
pred_prog
,
self
.
_pred_input_varname_list
,
self
.
_pred_fetch_var_list
=
\
...
...
@@ -167,13 +167,13 @@ class TaskInstance(object):
@
property
def
pred_input
(
self
):
return
zip
(
*
[
self
.
_pred_input_name_list
,
self
.
_pred_input_varname_list
])
return
list
(
zip
(
*
[
self
.
_pred_input_name_list
,
self
.
_pred_input_varname_list
]))
# py3
@
pred_input
.
setter
def
pred_input
(
self
,
val
):
assert
isinstance
(
val
,
dict
)
self
.
_pred_input_name_list
,
self
.
_pred_input_varname_list
=
\
zip
(
*
[[
k
,
v
.
name
]
for
k
,
v
in
val
.
items
()])
list
(
zip
(
*
[[
k
,
v
.
name
]
for
k
,
v
in
list
(
val
.
items
())]))
# py3s
@
property
def
pred_fetch_list
(
self
):
...
...
paddlepalm/tokenizer/bert_tokenizer.py
浏览文件 @
27d1b8db
...
...
@@ -111,7 +111,7 @@ class FullTokenizer(object):
def
__init__
(
self
,
vocab_file
,
do_lower_case
=
True
):
self
.
vocab
=
load_vocab
(
vocab_file
)
self
.
inv_vocab
=
{
v
:
k
for
k
,
v
in
self
.
vocab
.
items
(
)}
self
.
inv_vocab
=
{
v
:
k
for
k
,
v
in
list
(
self
.
vocab
.
items
()
)}
self
.
basic_tokenizer
=
BasicTokenizer
(
do_lower_case
=
do_lower_case
)
self
.
wordpiece_tokenizer
=
WordpieceTokenizer
(
vocab
=
self
.
vocab
)
...
...
@@ -135,7 +135,7 @@ class CharTokenizer(object):
def
__init__
(
self
,
vocab_file
,
do_lower_case
=
True
):
self
.
vocab
=
load_vocab
(
vocab_file
)
self
.
inv_vocab
=
{
v
:
k
for
k
,
v
in
self
.
vocab
.
items
(
)}
self
.
inv_vocab
=
{
v
:
k
for
k
,
v
in
list
(
self
.
vocab
.
items
()
)}
self
.
wordpiece_tokenizer
=
WordpieceTokenizer
(
vocab
=
self
.
vocab
)
def
tokenize
(
self
,
text
):
...
...
paddlepalm/tokenizer/ernie_tokenizer.py
浏览文件 @
27d1b8db
...
...
@@ -115,7 +115,7 @@ class FullTokenizer(object):
def
__init__
(
self
,
vocab_file
,
do_lower_case
=
True
):
self
.
vocab
=
load_vocab
(
vocab_file
)
self
.
inv_vocab
=
{
v
:
k
for
k
,
v
in
self
.
vocab
.
items
(
)}
self
.
inv_vocab
=
{
v
:
k
for
k
,
v
in
list
(
self
.
vocab
.
items
()
)}
self
.
basic_tokenizer
=
BasicTokenizer
(
do_lower_case
=
do_lower_case
)
self
.
wordpiece_tokenizer
=
WordpieceTokenizer
(
vocab
=
self
.
vocab
)
...
...
@@ -139,7 +139,7 @@ class CharTokenizer(object):
def
__init__
(
self
,
vocab_file
,
do_lower_case
=
True
):
self
.
vocab
=
load_vocab
(
vocab_file
)
self
.
inv_vocab
=
{
v
:
k
for
k
,
v
in
self
.
vocab
.
items
(
)}
self
.
inv_vocab
=
{
v
:
k
for
k
,
v
in
list
(
self
.
vocab
.
items
()
)}
self
.
wordpiece_tokenizer
=
WordpieceTokenizer
(
vocab
=
self
.
vocab
)
def
tokenize
(
self
,
text
):
...
...
paddlepalm/utils/reader_helper.py
浏览文件 @
27d1b8db
...
...
@@ -87,7 +87,7 @@ def create_iterator_fn(iterator, iterator_prefix, shape_and_dtypes, outname_to_p
outputs
=
next
(
iterator
)
# dict type
prefix
=
iterator_prefixe
for
outname
,
val
in
outputs
.
items
(
):
for
outname
,
val
in
list
(
outputs
.
items
()
):
task_outname
=
prefix
+
'/'
+
outname
if
outname
in
outname_to_pos
:
...
...
@@ -121,7 +121,7 @@ def create_joint_iterator_fn(iterators, iterator_prefixes, joint_shape_and_dtype
outputs
=
next
(
iterators
[
id
])
# dict type
outbuf
[
id
]
=
outputs
prefix
=
iterator_prefixes
[
id
]
for
outname
,
val
in
outputs
.
items
(
):
for
outname
,
val
in
list
(
outputs
.
items
()
):
task_outname
=
prefix
+
'/'
+
outname
if
outname
in
outname_to_pos
:
...
...
@@ -176,7 +176,7 @@ def create_joint_iterator_fn(iterators, iterator_prefixes, joint_shape_and_dtype
has_show_warn
=
True
prefix
=
iterator_prefixes
[
id
]
for
outname
,
val
in
outputs
.
items
(
):
for
outname
,
val
in
list
(
outputs
.
items
()
):
if
v
>
0
:
print
(
'reader generate: '
+
outname
)
task_outname
=
prefix
+
'/'
+
outname
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录