Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
PaddleSeg
提交
d22e3e5e
P
PaddleSeg
项目概览
PaddlePaddle
/
PaddleSeg
通知
286
Star
8
Fork
1
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
53
列表
看板
标记
里程碑
合并请求
3
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
PaddleSeg
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
53
Issue
53
列表
看板
标记
里程碑
合并请求
3
合并请求
3
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
d22e3e5e
编写于
9月 09, 2020
作者:
C
chenguowei01
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
use softmax_with_cross_entropy
上级
bfec9c85
变更
4
显示空白变更内容
内联
并排
Showing
4 changed file
with
55 addition
and
40 deletion
+55
-40
dygraph/cvlibs/param_init.py
dygraph/cvlibs/param_init.py
+4
-4
dygraph/models/architectures/hrnet.py
dygraph/models/architectures/hrnet.py
+2
-1
dygraph/models/fcn.py
dygraph/models/fcn.py
+2
-31
dygraph/models/losses/cross_entroy_loss.py
dygraph/models/losses/cross_entroy_loss.py
+47
-4
未找到文件。
dygraph/cvlibs/param_init.py
浏览文件 @
d22e3e5e
...
...
@@ -15,11 +15,11 @@
import
paddle.fluid
as
fluid
def
constant_init
(
param
,
value
=
0.0
):
initializer
=
fluid
.
initializer
.
Constant
(
value
)
def
constant_init
(
param
,
**
kwargs
):
initializer
=
fluid
.
initializer
.
Constant
(
**
kwargs
)
initializer
(
param
,
param
.
block
)
def
normal_init
(
param
,
loc
=
0.0
,
scale
=
1.0
,
seed
=
0
):
initializer
=
fluid
.
initializer
.
Normal
(
loc
=
loc
,
scale
=
scale
,
seed
=
seed
)
def
normal_init
(
param
,
**
kwargs
):
initializer
=
fluid
.
initializer
.
Normal
(
**
kwargs
)
initializer
(
param
,
param
.
block
)
dygraph/models/architectures/hrnet.py
浏览文件 @
d22e3e5e
...
...
@@ -146,6 +146,7 @@ class HRNet(fluid.dygraph.Layer):
has_se
=
self
.
has_se
,
name
=
"st4"
)
if
self
.
training
:
self
.
init_weight
(
backbone_pretrained
)
def
forward
(
self
,
x
,
label
=
None
,
mode
=
'train'
):
...
...
dygraph/models/fcn.py
浏览文件 @
d22e3e5e
...
...
@@ -86,6 +86,7 @@ class FCN(fluid.dygraph.Layer):
filter_size
=
1
,
stride
=
1
,
padding
=
0
)
if
self
.
training
:
self
.
init_weight
(
model_pretrained
)
def
forward
(
self
,
x
):
...
...
@@ -132,36 +133,6 @@ class FCN(fluid.dygraph.Layer):
raise
Exception
(
'Pretrained model is not found: {}'
.
format
(
pretrained_model
))
# def _get_loss(self, logit, label):
# """
# compute forward loss of the model
# Args:
# logit (tensor): the logit of model output
# label (tensor): ground truth
# Returns:
# avg_loss (tensor): forward loss
# """
# logit = fluid.layers.transpose(logit, [0, 2, 3, 1])
# label = fluid.layers.transpose(label, [0, 2, 3, 1])
# mask = label != self.ignore_index
# mask = fluid.layers.cast(mask, 'float32')
# loss, probs = fluid.layers.softmax_with_cross_entropy(
# logit,
# label,
# ignore_index=self.ignore_index,
# return_softmax=True,
# axis=-1)
# loss = loss * mask
# avg_loss = fluid.layers.mean(loss) / (
# fluid.layers.mean(mask) + self.EPS)
# label.stop_gradient = True
# mask.stop_gradient = True
# return avg_loss
class
ConvBNLayer
(
fluid
.
dygraph
.
Layer
):
def
__init__
(
self
,
...
...
dygraph/models/losses/cross_entroy_loss.py
浏览文件 @
d22e3e5e
...
...
@@ -17,8 +17,7 @@ from paddle import nn
import
paddle.nn.functional
as
F
from
dygraph.cvlibs
import
manager
'''
@manager.LOSSES.add_component
class CrossEntropyLoss(nn.CrossEntropyLoss):
"""
...
...
@@ -40,8 +39,9 @@ class CrossEntropyLoss(nn.CrossEntropyLoss):
"""
def __init__(self, weight=None, ignore_index=255, reduction='mean'):
super
(
CrossEntropyLoss
,
self
).
__init__
(
weight
=
weight
,
ignore_index
=
ignore_index
,
reduction
=
reduction
)
self.weight = weight
self.ignore_index = ignore_index
self.reduction = reduction
self.EPS = 1e-5
if self.reduction not in ['sum', 'mean', 'none']:
raise ValueError(
...
...
@@ -71,6 +71,49 @@ class CrossEntropyLoss(nn.CrossEntropyLoss):
mask = paddle.cast(mask, 'float32')
avg_loss = loss / (paddle.mean(mask) + self.EPS)
label.stop_gradient = True
mask.stop_gradient = True
return avg_loss
'''
@
manager
.
LOSSES
.
add_component
class
CrossEntropyLoss
(
nn
.
Layer
):
"""
Implements the cross entropy loss function.
Args:
ignore_index (int64): Specifies a target value that is ignored
and does not contribute to the input gradient. Default ``255``.
"""
def
__init__
(
self
,
ignore_index
=
255
):
super
(
CrossEntropyLoss
,
self
).
__init__
()
self
.
ignore_index
=
ignore_index
self
.
EPS
=
1e-5
def
forward
(
self
,
logit
,
label
):
"""
Forward computation.
Args:
logit (Tensor): logit tensor, the data type is float32, float64. Shape is
(N, C), where C is number of classes, and if shape is more than 2D, this
is (N, C, D1, D2,..., Dk), k >= 1.
label (Variable): label tensor, the data type is int64. Shape is (N), where each
value is 0 <= label[i] <= C-1, and if shape is more than 2D, this is
(N, D1, D2,..., Dk), k >= 1.
"""
if
len
(
label
.
shape
)
!=
len
(
logit
.
shape
):
label
=
paddle
.
unsqueeze
(
label
,
1
)
loss
=
F
.
softmax_with_cross_entropy
(
logit
,
label
,
ignore_index
=
self
.
ignore_index
,
axis
=
1
)
loss
=
paddle
.
reduce_mean
(
loss
)
mask
=
label
!=
self
.
ignore_index
mask
=
paddle
.
cast
(
mask
,
'float32'
)
avg_loss
=
loss
/
(
paddle
.
mean
(
mask
)
+
self
.
EPS
)
label
.
stop_gradient
=
True
mask
.
stop_gradient
=
True
return
avg_loss
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录