Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
机器未来
Paddle
提交
6c20e08b
P
Paddle
项目概览
机器未来
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
6c20e08b
编写于
1月 12, 2017
作者:
Y
Yu Yang
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Try using status to handle Paddle Error
上级
02480316
变更
6
隐藏空白更改
内联
并排
Showing
6 changed file
with
169 addition
and
35 deletion
+169
-35
paddle/gserver/activations/ActivationFunction.cpp
paddle/gserver/activations/ActivationFunction.cpp
+98
-28
paddle/gserver/activations/ActivationFunction.h
paddle/gserver/activations/ActivationFunction.h
+3
-2
paddle/gserver/layers/Layer.cpp
paddle/gserver/layers/Layer.cpp
+5
-2
paddle/utils/Status.h
paddle/utils/Status.h
+33
-3
paddle/utils/tests/CMakeLists.txt
paddle/utils/tests/CMakeLists.txt
+1
-0
paddle/utils/tests/test_Status.cpp
paddle/utils/tests/test_Status.cpp
+29
-0
未找到文件。
paddle/gserver/activations/ActivationFunction.cpp
浏览文件 @
6c20e08b
...
...
@@ -69,8 +69,14 @@ static ClassRegistrar<ActivationFunction> gActivationRegistrar;
class
IdentityActivation
:
public
ActivationFunction
{
public:
static
const
std
::
string
name
;
void
forward
(
Argument
&
act
)
{
(
void
)
act
;
}
void
backward
(
Argument
&
act
)
{
(
void
)
act
;
}
Status
forward
(
Argument
&
act
)
{
(
void
)
act
;
return
Status
();
}
Status
backward
(
Argument
&
act
)
{
(
void
)
act
;
return
Status
();
}
const
std
::
string
&
getName
()
const
{
return
name
;
}
};
const
std
::
string
IdentityActivation
::
name
=
""
;
...
...
@@ -86,8 +92,14 @@ static InitFunction __reg_activation__identity([] {
* \f]
*/
BEGIN_DEFINE_ACTIVATION
(
sigmoid
)
void
forward
(
Argument
&
act
)
{
act
.
value
->
sigmoid
(
*
act
.
value
);
}
void
backward
(
Argument
&
act
)
{
act
.
grad
->
sigmoidDerivative
(
*
act
.
value
);
}
Status
forward
(
Argument
&
act
)
{
act
.
value
->
sigmoid
(
*
act
.
value
);
return
Status
();
}
Status
backward
(
Argument
&
act
)
{
act
.
grad
->
sigmoidDerivative
(
*
act
.
value
);
return
Status
();
}
END_DEFINE_ACTIVATION
(
sigmoid
)
/**
...
...
@@ -103,9 +115,12 @@ MatrixPtr sftMaxDot_;
MatrixPtr
one_
;
public:
void
forward
(
Argument
&
act
)
{
act
.
value
->
softmax
(
*
act
.
value
);
}
Status
forward
(
Argument
&
act
)
{
act
.
value
->
softmax
(
*
act
.
value
);
return
Status
();
}
void
backward
(
Argument
&
act
)
{
Status
backward
(
Argument
&
act
)
{
MatrixPtr
outputV
=
act
.
value
;
MatrixPtr
outputG
=
act
.
grad
;
...
...
@@ -137,6 +152,7 @@ void backward(Argument& act) {
act
.
grad
->
softmaxDerivative
(
*
act
.
value
,
*
sftMaxSum_
);
}
return
Status
();
}
END_DEFINE_ACTIVATION
(
softmax
)
...
...
@@ -151,8 +167,11 @@ ACTIVATION_CLASS_NAME(softmax) softmax_;
Argument
argument_
;
public:
void
forward
(
Argument
&
act
)
{
CHECK_EQ
(
act
.
value
->
getWidth
(),
1UL
);
Status
forward
(
Argument
&
act
)
{
if
(
act
.
value
->
getWidth
()
!=
1UL
)
{
return
Status
(
"Input width for each timestep of sequence softmax should be 1"
);
}
if
(
!
argument_
.
value
)
{
argument_
.
value
=
Matrix
::
create
(
nullptr
,
...
...
@@ -169,10 +188,14 @@ void forward(Argument& act) {
auto
starts
=
act
.
sequenceStartPositions
->
getVector
(
useGpu
(
act
.
deviceId
));
act
.
value
->
sequenceSoftmax
(
*
act
.
value
,
*
starts
);
return
Status
();
}
void
backward
(
Argument
&
act
)
{
CHECK_EQ
(
act
.
grad
->
getWidth
(),
1UL
);
Status
backward
(
Argument
&
act
)
{
if
(
act
.
value
->
getWidth
()
!=
1UL
)
{
return
Status
(
"Input width for each timestep of sequence softmax should be 1"
);
}
size_t
numSequences
=
act
.
getNumSequences
();
const
int
*
starts
=
act
.
sequenceStartPositions
->
getData
(
false
);
...
...
@@ -186,6 +209,7 @@ void backward(Argument& act) {
softmax_
.
backward
(
argument_
);
}
return
Status
();
}
END_DEFINE_ACTIVATION
(
sequence_softmax
)
...
...
@@ -200,9 +224,15 @@ END_DEFINE_ACTIVATION(sequence_softmax)
* 0 otherwise.
*/
BEGIN_DEFINE_ACTIVATION
(
relu
)
void
forward
(
Argument
&
act
)
{
act
.
value
->
relu
(
*
act
.
value
);
}
Status
forward
(
Argument
&
act
)
{
act
.
value
->
relu
(
*
act
.
value
);
return
Status
();
}
void
backward
(
Argument
&
act
)
{
act
.
grad
->
reluDerivative
(
*
act
.
value
);
}
Status
backward
(
Argument
&
act
)
{
act
.
grad
->
reluDerivative
(
*
act
.
value
);
return
Status
();
}
END_DEFINE_ACTIVATION
(
relu
)
/**
...
...
@@ -219,9 +249,15 @@ END_DEFINE_ACTIVATION(relu)
* TODO(yuyang18): Remove magic number 24 or make it configuable.
*/
BEGIN_DEFINE_ACTIVATION
(
brelu
)
void
forward
(
Argument
&
act
)
{
act
.
value
->
brelu
(
*
act
.
value
);
}
Status
forward
(
Argument
&
act
)
{
act
.
value
->
brelu
(
*
act
.
value
);
return
Status
();
}
void
backward
(
Argument
&
act
)
{
act
.
grad
->
breluDerivative
(
*
act
.
value
);
}
Status
backward
(
Argument
&
act
)
{
act
.
grad
->
breluDerivative
(
*
act
.
value
);
return
Status
();
}
END_DEFINE_ACTIVATION
(
brelu
)
/**
...
...
@@ -231,9 +267,15 @@ END_DEFINE_ACTIVATION(brelu)
* \f]
*/
BEGIN_DEFINE_ACTIVATION
(
tanh
)
void
forward
(
Argument
&
act
)
{
act
.
value
->
tanh
(
*
act
.
value
);
}
Status
forward
(
Argument
&
act
)
{
act
.
value
->
tanh
(
*
act
.
value
);
return
Status
();
}
void
backward
(
Argument
&
act
)
{
act
.
grad
->
tanhDerivative
(
*
act
.
value
);
}
Status
backward
(
Argument
&
act
)
{
act
.
grad
->
tanhDerivative
(
*
act
.
value
);
return
Status
();
}
END_DEFINE_ACTIVATION
(
tanh
)
/**
...
...
@@ -248,10 +290,14 @@ real a, b;
public:
ACTIVATION_CLASS_NAME
(
stanh
)()
:
a
(
1.7159
),
b
(
2.
/
3.
)
{}
void
forward
(
Argument
&
act
)
{
act
.
value
->
scaledTanh
(
*
act
.
value
,
a
,
b
);
}
Status
forward
(
Argument
&
act
)
{
act
.
value
->
scaledTanh
(
*
act
.
value
,
a
,
b
);
return
Status
();
}
void
backward
(
Argument
&
act
)
{
Status
backward
(
Argument
&
act
)
{
act
.
grad
->
scaledTanhDerivative
(
*
act
.
value
,
a
,
b
);
return
Status
();
}
END_DEFINE_ACTIVATION
(
stanh
)
...
...
@@ -262,9 +308,15 @@ END_DEFINE_ACTIVATION(stanh)
* \f]
*/
BEGIN_DEFINE_ACTIVATION
(
softrelu
)
void
forward
(
Argument
&
act
)
{
act
.
value
->
softrelu
(
*
act
.
value
);
}
Status
forward
(
Argument
&
act
)
{
act
.
value
->
softrelu
(
*
act
.
value
);
return
Status
();
}
void
backward
(
Argument
&
act
)
{
act
.
grad
->
softreluDerivative
(
*
act
.
value
);
}
Status
backward
(
Argument
&
act
)
{
act
.
grad
->
softreluDerivative
(
*
act
.
value
);
return
Status
();
}
END_DEFINE_ACTIVATION
(
softrelu
)
/**
...
...
@@ -280,7 +332,7 @@ END_DEFINE_ACTIVATION(softrelu)
* 0 if z=0
*/
BEGIN_DEFINE_ACTIVATION
(
abs
)
void
forward
(
Argument
&
act
)
{
Status
forward
(
Argument
&
act
)
{
SetDevice
device
(
act
.
deviceId
);
Matrix
::
resizeOrCreate
(
act
.
in
,
act
.
value
->
getHeight
(),
...
...
@@ -290,9 +342,13 @@ void forward(Argument& act) {
act
.
in
->
copyFrom
(
*
act
.
value
);
act
.
value
->
abs2
(
*
act
.
value
);
return
Status
();
}
void
backward
(
Argument
&
act
)
{
act
.
grad
->
absDerivative
(
*
act
.
in
);
}
Status
backward
(
Argument
&
act
)
{
act
.
grad
->
absDerivative
(
*
act
.
in
);
return
Status
();
}
END_DEFINE_ACTIVATION
(
abs
)
/**
...
...
@@ -302,7 +358,7 @@ END_DEFINE_ACTIVATION(abs)
* \f]
*/
BEGIN_DEFINE_ACTIVATION
(
square
)
void
forward
(
Argument
&
act
)
{
Status
forward
(
Argument
&
act
)
{
SetDevice
device
(
act
.
deviceId
);
Matrix
::
resizeOrCreate
(
act
.
in
,
act
.
value
->
getHeight
(),
...
...
@@ -312,9 +368,13 @@ void forward(Argument& act) {
act
.
in
->
copyFrom
(
*
act
.
value
);
act
.
value
->
square2
(
*
act
.
value
);
return
Status
();
}
void
backward
(
Argument
&
act
)
{
act
.
grad
->
squareDerivative
(
*
act
.
in
);
}
Status
backward
(
Argument
&
act
)
{
act
.
grad
->
squareDerivative
(
*
act
.
in
);
return
Status
();
}
END_DEFINE_ACTIVATION
(
square
)
/**
...
...
@@ -324,9 +384,15 @@ END_DEFINE_ACTIVATION(square)
* \f]
*/
BEGIN_DEFINE_ACTIVATION
(
exponential
)
void
forward
(
Argument
&
act
)
{
act
.
value
->
exp2
(
*
act
.
value
);
}
Status
forward
(
Argument
&
act
)
{
act
.
value
->
exp2
(
*
act
.
value
);
return
Status
();
}
void
backward
(
Argument
&
act
)
{
act
.
grad
->
expDerivative
(
*
act
.
value
);
}
Status
backward
(
Argument
&
act
)
{
act
.
grad
->
expDerivative
(
*
act
.
value
);
return
Status
();
}
END_DEFINE_ACTIVATION
(
exponential
)
/**
...
...
@@ -336,7 +402,7 @@ END_DEFINE_ACTIVATION(exponential)
* \f]
*/
BEGIN_DEFINE_ACTIVATION
(
log
)
void
forward
(
Argument
&
act
)
{
Status
forward
(
Argument
&
act
)
{
SetDevice
device
(
act
.
deviceId
);
Matrix
::
resizeOrCreate
(
act
.
in
,
act
.
value
->
getHeight
(),
...
...
@@ -346,9 +412,13 @@ void forward(Argument& act) {
act
.
in
->
copyFrom
(
*
act
.
value
);
act
.
value
->
log2
(
*
act
.
value
);
return
Status
();
}
void
backward
(
Argument
&
act
)
{
act
.
grad
->
dotDiv
(
*
act
.
grad
,
*
act
.
in
);
}
Status
backward
(
Argument
&
act
)
{
act
.
grad
->
dotDiv
(
*
act
.
grad
,
*
act
.
in
);
return
Status
();
}
END_DEFINE_ACTIVATION
(
log
)
ActivationFunction
*
ActivationFunction
::
create
(
const
std
::
string
&
type
)
{
...
...
paddle/gserver/activations/ActivationFunction.h
浏览文件 @
6c20e08b
...
...
@@ -15,6 +15,7 @@ limitations under the License. */
#pragma once
#include <string>
#include <vector>
#include "paddle/utils/Status.h"
namespace
paddle
{
...
...
@@ -48,7 +49,7 @@ public:
*
* Usually, act is Layer::output_
*/
virtual
void
forward
(
Argument
&
act
)
=
0
;
virtual
Status
forward
(
Argument
&
act
)
=
0
;
/**
* @brief Backward propagaion
...
...
@@ -57,7 +58,7 @@ public:
* - Before calling backward(), act.grad = dE / dy, where E is the error/cost
* - After backward() returns, act.grad = dE / dx = (dE/dy) * (dy/dx)
*/
virtual
void
backward
(
Argument
&
act
)
=
0
;
virtual
Status
backward
(
Argument
&
act
)
=
0
;
virtual
const
std
::
string
&
getName
()
const
=
0
;
};
...
...
paddle/gserver/layers/Layer.cpp
浏览文件 @
6c20e08b
...
...
@@ -16,6 +16,7 @@ limitations under the License. */
#include "paddle/math/SparseMatrix.h"
#include "paddle/utils/Logging.h"
#include "paddle/utils/Status.h"
#include "AddtoLayer.h"
#include "CRFLayer.h"
...
...
@@ -334,7 +335,8 @@ void Layer::showOutputStats() {
void
Layer
::
forwardActivation
()
{
/* activation */
activation_
->
forward
(
output_
);
auto
status
=
activation_
->
forward
(
output_
);
CHECK
(
status
.
isOK
())
<<
status
.
what
();
/* dropout */
if
(
config_
.
drop_rate
()
>
0
)
{
...
...
@@ -372,7 +374,8 @@ void Layer::backwardActivation() {
oGrad
->
dotMul
(
*
oGrad
,
*
dropOutMask_
);
}
activation_
->
backward
(
output_
);
auto
status
=
activation_
->
backward
(
output_
);
CHECK
(
status
.
isOK
())
<<
status
.
what
();
}
void
Layer
::
forwardDropOut
()
{
...
...
paddle/utils/Status.h
浏览文件 @
6c20e08b
...
...
@@ -11,18 +11,44 @@ distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#pragma once
#include <memory>
#include <string>
namespace
paddle
{
/**
* Status is Paddle error code. It only contain a std::string as error message.
* Although Status inherits the std::exception, but do not throw it except you
* know what you are doing.
*/
class
Status
final
:
public
std
::
exception
{
public:
/**
* Default Status. OK
*/
Status
()
noexcept
{}
Status
(
const
std
::
string
&
msg
)
:
errMsg_
(
new
std
::
string
(
msg
))
{}
/**
* @brief Create Status with error message
* @param msg
*/
explicit
Status
(
const
std
::
string
&
msg
)
:
errMsg_
(
new
std
::
string
(
msg
))
{}
/**
* @brief set a error message for status.
* @param msg
*/
inline
void
set
(
const
std
::
string
&
msg
)
noexcept
{
errMsg_
.
reset
(
new
std
::
string
(
msg
));
}
virtual
const
char
*
what
()
const
noexcept
override
{
/**
* @brief what will return the error message. If status is OK, return nullptr.
*/
const
char
*
what
()
const
noexcept
override
{
if
(
errMsg_
)
{
return
errMsg_
->
data
();
}
else
{
...
...
@@ -30,10 +56,14 @@ public:
}
}
/**
* @brief isOK
* @return true if OK.
*/
inline
bool
isOK
()
const
noexcept
{
return
errMsg_
==
nullptr
;
}
private:
std
::
unique
_ptr
<
std
::
string
>
errMsg_
;
std
::
shared
_ptr
<
std
::
string
>
errMsg_
;
};
}
// namespace paddle
paddle/utils/tests/CMakeLists.txt
浏览文件 @
6c20e08b
...
...
@@ -4,6 +4,7 @@ add_simple_unittest(test_CustomStackTrace)
add_simple_unittest
(
test_ThreadBarrier
)
add_simple_unittest
(
test_SpinLock
)
add_simple_unittest
(
test_SIMDFlags
)
add_simple_unittest
(
test_Status
)
add_executable
(
test_CustomStackTracePrint
...
...
paddle/utils/tests/test_Status.cpp
0 → 100644
浏览文件 @
6c20e08b
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#include "paddle/utils/Status.h"
#include <gtest/gtest.h>
TEST
(
Status
,
testAll
)
{
paddle
::
Status
status
;
ASSERT_TRUE
(
status
.
isOK
());
status
.
set
(
"I'm the error"
);
ASSERT_FALSE
(
status
.
isOK
());
ASSERT_STREQ
(
"I'm the error"
,
status
.
what
());
paddle
::
Status
status2
(
"error2"
);
ASSERT_FALSE
(
status2
.
isOK
());
ASSERT_STREQ
(
"error2"
,
status2
.
what
());
}
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录