test_net.py 1.8 KB
Newer Older
D
dzhwinter 已提交
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserve.
D
dzhwinter 已提交
2
#
D
dzhwinter 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
D
dzhwinter 已提交
6
#
D
dzhwinter 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
D
dzhwinter 已提交
8
#
D
dzhwinter 已提交
9 10 11 12 13 14
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

Q
Qiao Longfei 已提交
15 16
import paddle.v2.fluid.core as core
from paddle.v2.fluid.op import Operator
Y
Yu Yang 已提交
17 18 19
import unittest


20 21 22
def fc(X, W, Y):
    ret_v = core.Net.create()

Y
Yu Yang 已提交
23
    ret_v.append_op(Operator("mul", X="X", Y="W", Out="pre_activation"))
F
fengjiayi 已提交
24
    ret_v.append_op(Operator("sigmoid", X="pre_activation", Out=Y))
25 26 27 28
    ret_v.complete_add_op(True)
    return ret_v


Y
Yu Yang 已提交
29 30 31
class TestNet(unittest.TestCase):
    def test_net_all(self):
        net = core.Net.create()
Y
Fix CI  
Yu Yang 已提交
32
        op1 = Operator("sum", X=["X", "Y"], Out="Out")
Y
Yu Yang 已提交
33
        net.append_op(op1)
Y
Yu Yang 已提交
34 35

        net2 = core.Net.create()
Y
Yu Yang 已提交
36
        net2.append_op(fc(X="X", W="w", Y="fc.out"))
Y
Yu Yang 已提交
37
        net2.complete_add_op(True)
Y
Yu Yang 已提交
38
        net.append_op(net2)
Y
Yu Yang 已提交
39
        net.complete_add_op(True)
Y
Yu Yang 已提交
40 41

        expected = '''
Y
Yu Yang 已提交
42
Op(plain_net), inputs:{all[W, X, Y]}, outputs:{all[Out, fc.out, pre_activation]}.
Y
Fix CI  
Yu Yang 已提交
43
    Op(sum), inputs:{X[X, Y]}, outputs:{Out[Out]}.
Y
Yu Yang 已提交
44 45 46
    Op(plain_net), inputs:{all[W, X]}, outputs:{all[fc.out, pre_activation]}.
        Op(plain_net), inputs:{all[W, X]}, outputs:{all[fc.out, pre_activation]}.
            Op(mul), inputs:{X[X], Y[W]}, outputs:{Out[pre_activation]}.
F
fengjiayi 已提交
47
            Op(sigmoid), inputs:{X[pre_activation]}, outputs:{Out[fc.out]}.
Y
Yu Yang 已提交
48
'''
Y
Yu Yang 已提交
49
        self.assertEqual(expected, "\n" + str(net))
Y
Yu Yang 已提交
50 51


Q
qijun 已提交
52
if __name__ == "__main__":
Y
Yu Yang 已提交
53
    unittest.main()