Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
PaddleX
提交
3fef7f9a
P
PaddleX
项目概览
PaddlePaddle
/
PaddleX
通知
138
Star
4
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
43
列表
看板
标记
里程碑
合并请求
5
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
PaddleX
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
43
Issue
43
列表
看板
标记
里程碑
合并请求
5
合并请求
5
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
3fef7f9a
编写于
6月 03, 2020
作者:
S
seven
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
fix normlime when the number of test samples is not enough. add logging before tqdm bar.
上级
a7aa87a6
变更
2
隐藏空白更改
内联
并排
Showing
2 changed file
with
80 addition
and
13 deletion
+80
-13
paddlex/interpret/core/lime_base.py
paddlex/interpret/core/lime_base.py
+3
-0
paddlex/interpret/core/normlime_base.py
paddlex/interpret/core/normlime_base.py
+77
-13
未找到文件。
paddlex/interpret/core/lime_base.py
浏览文件 @
3fef7f9a
...
...
@@ -544,6 +544,9 @@ class LimeImageInterpreter(object):
labels
=
[]
data
[
0
,
:]
=
1
imgs
=
[]
logging
.
info
(
"Computing LIME."
,
use_color
=
True
)
for
row
in
tqdm
.
tqdm
(
data
):
temp
=
copy
.
deepcopy
(
image
)
zeros
=
np
.
where
(
row
==
0
)[
0
]
...
...
paddlex/interpret/core/normlime_base.py
浏览文件 @
3fef7f9a
...
...
@@ -287,8 +287,14 @@ def precompute_global_classifier(dataset,
x_data
=
[]
y_labels
=
[]
num_features
=
len
(
kmeans_model
.
cluster_centers_
)
logging
.
info
(
"Initialization for NormLIME: Computing each sample in the test list."
,
use_color
=
True
)
for
each_data_
in
tqdm
.
tqdm
(
image_list
):
x_data_i
=
np
.
zeros
((
len
(
kmeans_model
.
cluster_centers_
)
))
x_data_i
=
np
.
zeros
((
num_features
))
image_show
=
read_image
(
each_data_
)
result
=
predict_fn
(
image_show
)
result
=
result
[
0
]
# only one image here.
...
...
@@ -324,28 +330,86 @@ def precompute_global_classifier(dataset,
y_labels
.
append
(
pred_y_i
)
x_data
.
append
(
x_data_i
)
if
len
(
np
.
unique
(
y_labels
))
<
2
:
logging
.
info
(
"Warning: The test samples in the dataset is limited.
\n
\
NormLIME may have no effect on the results.
\n
\
Try to add more test samples, or see the results of LIME."
)
num_classes
=
np
.
max
(
np
.
unique
(
y_labels
))
+
1
normlime_weights_all_labels
=
{}
for
class_index
in
range
(
num_classes
):
w
=
np
.
ones
((
num_features
))
/
num_features
normlime_weights_all_labels
[
class_index
]
=
{
i
:
wi
for
i
,
wi
in
enumerate
(
w
)
}
logging
.
info
(
"Saving the computed normlime_weights in {}"
.
format
(
save_path
))
np
.
save
(
save_path
,
normlime_weights_all_labels
)
return
save_path
clf
=
LogisticRegression
(
multi_class
=
'multinomial'
,
max_iter
=
1000
)
clf
.
fit
(
x_data
,
y_labels
)
num_classes
=
len
(
np
.
unique
(
y_labels
))
num_classes
=
np
.
max
(
np
.
unique
(
y_labels
))
+
1
normlime_weights_all_labels
=
{}
for
class_index
in
range
(
num_classes
):
w
=
clf
.
coef_
[
class_index
]
if
len
(
y_labels
)
/
len
(
np
.
unique
(
y_labels
))
<
3
:
logging
.
info
(
"Warning: The test samples in the dataset is limited.
\n
\
NormLIME may have no effect on the results.
\n
\
Try to add more test samples, or see the results of LIME."
)
if
len
(
np
.
unique
(
y_labels
))
==
2
:
# binary: clf.coef_ has shape of [1, num_features]
for
class_index
in
range
(
num_classes
):
if
class_index
not
in
clf
.
classes_
:
w
=
np
.
ones
((
num_features
))
/
num_features
normlime_weights_all_labels
[
class_index
]
=
{
i
:
wi
for
i
,
wi
in
enumerate
(
w
)
}
continue
if
clf
.
classes_
[
0
]
==
class_index
:
w
=
-
clf
.
coef_
[
0
]
else
:
w
=
clf
.
coef_
[
0
]
# softmax
w
=
w
-
np
.
max
(
w
)
exp_w
=
np
.
exp
(
w
*
10
)
w
=
exp_w
/
np
.
sum
(
exp_w
)
normlime_weights_all_labels
[
class_index
]
=
{
i
:
wi
for
i
,
wi
in
enumerate
(
w
)
}
else
:
# clf.coef_ has shape of [len(np.unique(y_labels)), num_features]
for
class_index
in
range
(
num_classes
):
if
class_index
not
in
clf
.
classes_
:
w
=
np
.
ones
((
num_features
))
/
num_features
normlime_weights_all_labels
[
class_index
]
=
{
i
:
wi
for
i
,
wi
in
enumerate
(
w
)
}
continue
coef_class_index
=
np
.
where
(
clf
.
classes_
==
class_index
)[
0
][
0
]
w
=
clf
.
coef_
[
coef_class_index
]
# softmax
w
=
w
-
np
.
max
(
w
)
exp_w
=
np
.
exp
(
w
*
10
)
w
=
exp_w
/
np
.
sum
(
exp_w
)
# softmax
w
=
w
-
np
.
max
(
w
)
exp_w
=
np
.
exp
(
w
*
10
)
w
=
exp_w
/
np
.
sum
(
exp_w
)
normlime_weights_all_labels
[
class_index
]
=
{
i
:
wi
for
i
,
wi
in
enumerate
(
w
)
}
normlime_weights_all_labels
[
class_index
]
=
{
i
:
wi
for
i
,
wi
in
enumerate
(
w
)
}
logging
.
info
(
"Saving the computed normlime_weights in {}"
.
format
(
save_path
))
np
.
save
(
save_path
,
normlime_weights_all_labels
)
return
save_path
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录