Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
PaddleFL
提交
b2a724a8
P
PaddleFL
项目概览
PaddlePaddle
/
PaddleFL
通知
35
Star
5
Fork
1
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
6
列表
看板
标记
里程碑
合并请求
4
Wiki
3
Wiki
分析
仓库
DevOps
项目成员
Pages
P
PaddleFL
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
6
Issue
6
列表
看板
标记
里程碑
合并请求
4
合并请求
4
Pages
分析
分析
仓库分析
DevOps
Wiki
3
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
b2a724a8
编写于
4月 30, 2020
作者:
J
jingqinghe
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
update
上级
62d028c8
变更
15
隐藏空白更改
内联
并排
Showing
15 changed file
with
1357 addition
and
52 deletion
+1357
-52
python/paddle_fl/mpc/tests/unittests/run_test_example.sh
python/paddle_fl/mpc/tests/unittests/run_test_example.sh
+21
-10
python/paddle_fl/mpc/tests/unittests/test_datautils_aby3.py
python/paddle_fl/mpc/tests/unittests/test_datautils_aby3.py
+140
-0
python/paddle_fl/mpc/tests/unittests/test_datautils_align.py
python/paddle_fl/mpc/tests/unittests/test_datautils_align.py
+63
-0
python/paddle_fl/mpc/tests/unittests/test_model_encryption.py
...on/paddle_fl/mpc/tests/unittests/test_model_encryption.py
+26
-42
python/paddle_fl/mpc/tests/unittests/test_op_add.py
python/paddle_fl/mpc/tests/unittests/test_op_add.py
+151
-0
python/paddle_fl/mpc/tests/unittests/test_op_base.py
python/paddle_fl/mpc/tests/unittests/test_op_base.py
+89
-0
python/paddle_fl/mpc/tests/unittests/test_op_compare.py
python/paddle_fl/mpc/tests/unittests/test_op_compare.py
+236
-0
python/paddle_fl/mpc/tests/unittests/test_op_fc.py
python/paddle_fl/mpc/tests/unittests/test_op_fc.py
+70
-0
python/paddle_fl/mpc/tests/unittests/test_op_mean.py
python/paddle_fl/mpc/tests/unittests/test_op_mean.py
+70
-0
python/paddle_fl/mpc/tests/unittests/test_op_mul.py
python/paddle_fl/mpc/tests/unittests/test_op_mul.py
+116
-0
python/paddle_fl/mpc/tests/unittests/test_op_relu.py
python/paddle_fl/mpc/tests/unittests/test_op_relu.py
+67
-0
python/paddle_fl/mpc/tests/unittests/test_op_square.py
python/paddle_fl/mpc/tests/unittests/test_op_square.py
+70
-0
python/paddle_fl/mpc/tests/unittests/test_op_square_error_cost.py
...addle_fl/mpc/tests/unittests/test_op_square_error_cost.py
+72
-0
python/paddle_fl/mpc/tests/unittests/test_op_sub.py
python/paddle_fl/mpc/tests/unittests/test_op_sub.py
+99
-0
python/paddle_fl/mpc/tests/unittests/test_op_sum.py
python/paddle_fl/mpc/tests/unittests/test_op_sum.py
+67
-0
未找到文件。
python/paddle_fl/mpc/tests/unittests/run_test_example.sh
浏览文件 @
b2a724a8
#!/bin/bash
# This script is used for run unit tests.
# modify the following vars according to your environment
LD_LIB_PATH
=
"path_to_needed_libs"
MPC_DATA_UTILS_MODULE_PATH
=
"path_to_mpc_data_utils_so_file"
export
LD_LIBRARY_PATH
=
$LD_LIB_PATH
:
$LD_LIBRARY_PATH
export
PYTHONPATH
=
$MPC_DATA_UTILS_MODULE_PATH
:
$PYTHON_PATH
# set redis server ip and port for test
export
TEST_REDIS_IP
=
"test_redis_server_ip"
export
TEST_REDIS_PORT
=
"test_redis_port"
# unittest command
PYTHON_TEST
=
"python -m unittest"
# add your test modules here
TEST_MODULES
=(
"test_datautils_load_filter"
)
# add the modules to test
TEST_MODULES
=(
"test_datautils_aby3"
"test_model_encryption"
"test_datautils_align"
"test_op_add"
"test_op_sub"
"test_op_mul"
"test_op_square"
"test_op_sum"
"test_op_mean"
"test_op_square_error_cost"
"test_op_fc"
"test_op_relu"
"test_op_compare"
)
for
MODULE
in
${
TEST_MODULES
[@]
}
# run unittest
for
MODULE
in
${
TEST_MODULES
[@]
}
do
$PYTHON_TEST
$MODULE
done
python/paddle_fl/mpc/tests/unittests/test_datautils_aby3.py
0 → 100644
浏览文件 @
b2a724a8
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This module test aby3 in data_utils.
"""
import
os
import
unittest
import
numpy
as
np
import
paddle_fl.mpc.data_utils.aby3
as
aby3
class
TestDataUtilsAby3
(
unittest
.
TestCase
):
def
test_encrypt_decrypt
(
self
):
number
=
123.4
number_shares
=
aby3
.
encrypt
(
number
)
self
.
assertEqual
(
len
(
number_shares
),
3
)
revealed_number
=
aby3
.
decrypt
(
number_shares
)
self
.
assertAlmostEqual
(
number
,
revealed_number
,
delta
=
1e-4
)
def
test_make_shares
(
self
):
num_arr
=
np
.
arange
(
0
,
4
).
reshape
((
2
,
2
))
shares
=
aby3
.
make_shares
(
num_arr
)
self
.
assertEqual
((
3
,
2
,
2
),
shares
.
shape
)
def
test_reconstruct
(
self
):
num_arr
=
np
.
arange
(
0
,
4
).
reshape
((
2
,
2
)).
astype
(
np
.
float32
)
shares
=
aby3
.
make_shares
(
num_arr
)
all_3shares
=
np
.
array
([
aby3
.
get_aby3_shares
(
shares
,
i
)
for
i
in
range
(
3
)])
recon
=
aby3
.
reconstruct
(
all_3shares
)
self
.
assertTrue
(
np
.
allclose
(
num_arr
,
recon
))
def
test_make_shares_3dim
(
self
):
num_arr
=
np
.
arange
(
0
,
8
).
reshape
((
2
,
2
,
2
))
shares
=
aby3
.
make_shares
(
num_arr
)
self
.
assertEqual
((
3
,
2
,
2
,
2
),
shares
.
shape
)
def
test_get_aby3_shares
(
self
):
raw_shares
=
np
.
arange
(
1
,
13
).
reshape
((
3
,
2
,
2
))
share_list
=
[]
for
idx
in
range
(
3
):
share
=
aby3
.
get_aby3_shares
(
raw_shares
,
idx
)
self
.
assertEqual
(
share
.
shape
,
(
2
,
2
,
2
))
share_list
.
append
(
share
)
expect_shares
=
[
np
.
array
([[[
1
,
2
],
[
3
,
4
]],
[[
5
,
6
],
[
7
,
8
]]]),
np
.
array
([[[
5
,
6
],
[
7
,
8
]],
[[
9
,
10
],
[
11
,
12
]]]),
np
.
array
([[[
9
,
10
],
[
11
,
12
]],
[[
1
,
2
],
[
3
,
4
]]])]
for
value
,
expect
in
zip
(
share_list
,
expect_shares
):
self
.
assertTrue
(
np
.
allclose
(
value
,
expect
))
def
dummy_share_reader
(
self
):
"""
Dummy share_reader for share_reader in aby3.save_aby3_shares()
:return:
"""
test_data
=
np
.
arange
(
1
,
10
).
reshape
((
3
,
3
)).
astype
(
np
.
float32
)
yield
aby3
.
make_shares
(
test_data
)
def
remove_temp_file
(
self
,
filename
):
"""
Remove temp file.
:param filename:
:return:
"""
os
.
remove
(
filename
)
def
test_save_aby3_shares
(
self
):
part_name
=
'./tmp_test_save_aby3_shares'
aby3
.
save_aby3_shares
(
share_reader
=
self
.
dummy_share_reader
,
part_name
=
part_name
)
files
=
os
.
listdir
(
'./'
)
true_filename
=
'tmp_test_save_aby3_shares'
for
idx
in
range
(
3
):
tmp_file
=
true_filename
+
'.part'
+
str
(
idx
)
self
.
assertTrue
(
tmp_file
in
files
)
self
.
remove_temp_file
(
tmp_file
)
def
test_load_aby3_shares
(
self
):
share
=
np
.
arange
(
1
,
19
).
reshape
((
2
,
3
,
3
)).
astype
(
np
.
int64
)
tmp_part_name
=
'./tmp_test_load_aby3_shares.part0'
with
open
(
tmp_part_name
,
'wb'
)
as
f
:
f
.
write
(
share
.
tostring
())
part_name
=
'./tmp_test_load_aby3_shares'
default_loader
=
aby3
.
load_aby3_shares
(
part_name
=
part_name
,
id
=
0
,
shape
=
(
3
,
3
))
default_loading_data
=
next
(
default_loader
())
self
.
assertTrue
(
np
.
allclose
(
default_loading_data
,
share
))
loader
=
aby3
.
load_aby3_shares
(
part_name
=
part_name
,
id
=
0
,
shape
=
(
2
,
3
,
3
),
append_share_dim
=
False
)
loading_data
=
next
(
loader
())
self
.
assertTrue
(
np
.
allclose
(
loading_data
,
share
))
self
.
remove_temp_file
(
tmp_part_name
)
def
dummy_reader
(
self
):
"""
Dummy reader for the reader in aby3.batch()
:return:
"""
data
=
[
np
.
arange
(
1
,
9
).
reshape
((
2
,
2
,
2
)).
astype
(
np
.
int64
)]
*
4
for
item
in
data
:
yield
item
def
test_batch
(
self
):
default_batch_reader
=
aby3
.
batch
(
reader
=
self
.
dummy_reader
,
batch_size
=
3
)
default_batch_sample_shapes
=
[(
2
,
3
,
2
,
2
),
(
2
,
1
,
2
,
2
)]
for
item
,
shape
in
zip
(
default_batch_reader
(),
default_batch_sample_shapes
):
self
.
assertEqual
(
item
.
shape
,
shape
)
batch_reader
=
aby3
.
batch
(
reader
=
self
.
dummy_reader
,
batch_size
=
3
,
drop_last
=
True
)
for
item
in
batch_reader
():
self
.
assertEqual
(
item
.
shape
,
(
2
,
3
,
2
,
2
))
if
__name__
==
'__main__'
:
unittest
.
main
()
python/paddle_fl/mpc/tests/unittests/test_datautils_align.py
0 → 100644
浏览文件 @
b2a724a8
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This module test align in aby3 module.
"""
import
unittest
from
multiprocessing
import
Process
import
paddle_fl.mpc.data_utils.alignment
as
alignment
class
TestDataUtilsAlign
(
unittest
.
TestCase
):
def
run_align
(
self
,
input_set
,
party_id
,
endpoints
,
is_receiver
):
"""
Call align function in data_utils.
:param input_set:
:param party_id:
:param endpoints:
:param is_receiver:
:return:
"""
result
=
alignment
.
align
(
input_set
=
input_set
,
party_id
=
party_id
,
endpoints
=
endpoints
,
is_receiver
=
is_receiver
)
self
.
assertEqual
(
result
,
{
'0'
})
def
test_align
(
self
):
"""
Test normal case for align function.
:return:
"""
endpoints
=
'0:127.0.0.1:11111,1:127.0.0.1:22222,2:127.0.0.1:33333'
set_0
=
{
'0'
,
'10'
,
'20'
,
'30'
}
set_1
=
{
'0'
,
'10'
,
'11'
,
'111'
}
set_2
=
{
'0'
,
'30'
,
'33'
,
'333'
}
party_0
=
Process
(
target
=
self
.
run_align
,
args
=
(
set_0
,
0
,
endpoints
,
True
))
party_1
=
Process
(
target
=
self
.
run_align
,
args
=
(
set_1
,
1
,
endpoints
,
False
))
party_2
=
Process
(
target
=
self
.
run_align
,
args
=
(
set_2
,
2
,
endpoints
,
False
))
party_0
.
start
()
party_1
.
start
()
party_2
.
start
()
party_2
.
join
()
if
__name__
==
'__main__'
:
unittest
.
main
()
python/paddle_fl/mpc/tests/unittests/test_model_encryption.py
浏览文件 @
b2a724a8
...
...
@@ -12,18 +12,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This module test
load_data and data_filter_by_id functions in data_utils
module.
This module test
model encryption/decryption in aby3
module.
"""
import
sys
sys
.
path
.
append
(
'../../../'
)
import
numpy
as
np
import
os
import
shutil
import
unittest
import
paddle_fl.mpc
as
pfl_mpc
import
paddle_fl.mpc.data_utils.aby3
as
aby3
import
numpy
as
np
import
paddle.fluid
as
fluid
import
paddle_fl.mpc.data_utils.aby3
as
aby3
from
paddle.fluid.param_attr
import
ParamAttr
...
...
@@ -43,10 +41,7 @@ class TestDataUtilsEnDecryptModel(unittest.TestCase):
y
=
fluid
.
data
(
name
=
'y'
,
shape
=
[
None
,
1
],
dtype
=
'float32'
)
param_attr
=
ParamAttr
(
name
=
"fc_0.w_0"
)
bias_attr
=
ParamAttr
(
name
=
"fc_0.b_0"
)
y_predict
=
fluid
.
layers
.
fc
(
input
=
x
,
size
=
1
,
param_attr
=
param_attr
,
bias_attr
=
bias_attr
)
y_predict
=
fluid
.
layers
.
fc
(
input
=
x
,
size
=
1
,
param_attr
=
param_attr
,
bias_attr
=
bias_attr
)
main_prog
=
fluid
.
default_main_program
()
startup_program
=
fluid
.
default_startup_program
()
...
...
@@ -56,17 +51,15 @@ class TestDataUtilsEnDecryptModel(unittest.TestCase):
if
not
os
.
path
.
exists
(
self
.
raw_model_dir
):
os
.
makedirs
(
self
.
raw_model_dir
)
fluid
.
io
.
save_inference_model
(
self
.
raw_model_dir
,
[
'x'
],
[
y_predict
],
exe
)
fluid
.
io
.
save_inference_model
(
self
.
raw_model_dir
,
[
'x'
],
[
y_predict
],
exe
)
vars
=
[
'fc_0.w_0'
,
'fc_0.b_0'
]
vars_tensor
=
[[
[
-
1.0788183212280273
],
[
2.1307122707366943
],
[
-
2.646815538406372
],
[
1.6547845602035522
],
[
-
2.13144588470459
],
[
3.6621456146240234
],
[
-
1.553664207458496
],
[
0.18727444112300873
],
[
-
2.3649044036865234
],
[
-
3.407580852508545
],
[
-
4.058014392852783
],
[
1.4958711862564087
],
[
-
3.9899468421936035
]
],
[
22.361257553100586
]]
vars_tensor
=
[
[[
-
1.0788183212280273
],
[
2.1307122707366943
],
[
-
2.646815538406372
],
[
1.6547845602035522
],
[
-
2.13144588470459
],
[
3.6621456146240234
],
[
-
1.553664207458496
],
[
0.18727444112300873
],
[
-
2.3649044036865234
],
[
-
3.407580852508545
],
[
-
4.058014392852783
],
[
1.4958711862564087
],
[
-
3.9899468421936035
]],
[
22.361257553100586
]]
global_block
=
main_prog
.
global_block
()
g_scope
=
fluid
.
global_scope
()
...
...
@@ -74,11 +67,10 @@ class TestDataUtilsEnDecryptModel(unittest.TestCase):
param
=
g_scope
.
find_var
(
var
)
param
.
get_tensor
().
set
(
tensor
,
place
)
variable
=
global_block
.
var
(
var
)
fluid
.
io
.
save_vars
(
executor
=
exe
,
dirname
=
self
.
raw_model_dir
,
vars
=
[
variable
],
filename
=
var
)
fluid
.
io
.
save_vars
(
executor
=
exe
,
dirname
=
self
.
raw_model_dir
,
vars
=
[
variable
],
filename
=
var
)
def
infer_with_decrypted_model
(
self
,
model_path
):
"""
...
...
@@ -91,11 +83,9 @@ class TestDataUtilsEnDecryptModel(unittest.TestCase):
[
inference_program
,
feed_target_names
,
fetch_targets
]
=
\
fluid
.
io
.
load_inference_model
(
model_path
,
exe
)
feat
=
[
0.42616306
,
-
0.11363636
,
0.25525005
,
-
0.06916996
,
0.28457807
,
-
0.14440207
,
0.17327599
,
-
0.19893267
,
0.62828665
,
0.49191383
,
0.18558153
,
-
0.0686218
,
0.40637243
]
feat
=
[
0.42616306
,
-
0.11363636
,
0.25525005
,
-
0.06916996
,
0.28457807
,
-
0.14440207
,
0.17327599
,
-
0.19893267
,
0.62828665
,
0.49191383
,
0.18558153
,
-
0.0686218
,
0.40637243
]
infer_feat
=
np
.
array
(
feat
).
reshape
((
1
,
13
)).
astype
(
"float32"
)
assert
feed_target_names
[
0
]
==
'x'
...
...
@@ -110,8 +100,8 @@ class TestDataUtilsEnDecryptModel(unittest.TestCase):
:return:
"""
self
.
create_test_model
()
aby3
.
encrypt_model
(
plain_model
=
self
.
raw_model_dir
,
mpc_model_dir
=
self
.
enc_model_dir
)
aby3
.
encrypt_model
(
plain_model
=
self
.
raw_model_dir
,
mpc_model_dir
=
self
.
enc_model_dir
)
def
tearDown
(
self
):
"""
...
...
@@ -124,26 +114,20 @@ class TestDataUtilsEnDecryptModel(unittest.TestCase):
"""
Test normal case for model encryption.
"""
share_dirs
=
[
os
.
path
.
join
(
self
.
enc_model_dir
,
sub_dir
)
for
sub_dir
in
os
.
listdir
(
self
.
enc_model_dir
)
if
not
sub_dir
.
startswith
(
"."
)
]
share_dirs
=
[
os
.
path
.
join
(
self
.
enc_model_dir
,
sub_dir
)
for
sub_dir
in
os
.
listdir
(
self
.
enc_model_dir
)
if
not
sub_dir
.
startswith
(
"."
)]
self
.
assertEqual
(
3
,
len
(
share_dirs
))
def
test_model_decrypt
(
self
):
"""
Test normal case for model decryption.
"""
aby3
.
decrypt_model
(
mpc_model_dir
=
self
.
enc_model_dir
,
plain_model_path
=
self
.
dec_model_dir
)
infer_result
=
self
.
infer_with_decrypted_model
(
model_path
=
self
.
dec_model_dir
)
aby3
.
decrypt_model
(
mpc_model_dir
=
self
.
enc_model_dir
,
plain_model_path
=
self
.
dec_model_dir
)
infer_result
=
self
.
infer_with_decrypted_model
(
model_path
=
self
.
dec_model_dir
)
# accurate result is 13.79
self
.
assertAlmostEqual
(
infer_result
[
0
],
13.79
,
delta
=
1e-1
)
shutil
.
rmtree
(
self
.
dec_model_dir
)
if
__name__
==
'__main__'
:
unittest
.
main
()
# run case according to their name
python/paddle_fl/mpc/tests/unittests/test_op_add.py
0 → 100644
浏览文件 @
b2a724a8
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This module test add op.
"""
import
unittest
from
multiprocessing
import
Manager
import
numpy
as
np
import
paddle.fluid
as
fluid
import
paddle_fl.mpc
as
pfl_mpc
import
paddle_fl.mpc.data_utils.aby3
as
aby3
import
test_op_base
class
TestOpAdd
(
test_op_base
.
TestOpBase
):
def
elementwise_add
(
self
,
**
kwargs
):
"""
Add two variables with one dimension.
:param kwargs:
:return:
"""
role
=
kwargs
[
'role'
]
d_1
=
kwargs
[
'data_1'
][
role
]
d_2
=
kwargs
[
'data_2'
][
role
]
expected_out
=
kwargs
[
'expect_results'
][
role
]
pfl_mpc
.
init
(
"aby3"
,
role
,
"localhost"
,
self
.
server
,
int
(
self
.
port
))
x
=
pfl_mpc
.
data
(
name
=
'x'
,
shape
=
[
4
],
dtype
=
'int64'
)
y
=
pfl_mpc
.
data
(
name
=
'y'
,
shape
=
[
4
],
dtype
=
'int64'
)
op_add
=
pfl_mpc
.
layers
.
elementwise_add
(
x
=
x
,
y
=
y
)
math_add
=
x
+
y
exe
=
fluid
.
Executor
(
place
=
fluid
.
CPUPlace
())
results
=
exe
.
run
(
feed
=
{
'x'
:
d_1
,
'y'
:
d_2
},
fetch_list
=
[
op_add
,
math_add
])
self
.
assertTrue
(
np
.
allclose
(
results
[
0
],
results
[
1
]))
self
.
assertEqual
(
results
[
0
].
shape
,
(
2
,
4
))
self
.
assertTrue
(
np
.
allclose
(
results
[
0
],
expected_out
))
def
multi_dim_add
(
self
,
**
kwargs
):
"""
Add two variables with multi dimensions.
:return:
"""
role
=
kwargs
[
'role'
]
d_1
=
kwargs
[
'data_1'
][
role
]
d_2
=
kwargs
[
'data_2'
][
role
]
expected_out
=
kwargs
[
'expect_results'
][
role
]
pfl_mpc
.
init
(
"aby3"
,
role
,
"localhost"
,
self
.
server
,
int
(
self
.
port
))
x
=
pfl_mpc
.
data
(
name
=
'x'
,
shape
=
[
2
,
2
],
dtype
=
'int64'
)
y
=
pfl_mpc
.
data
(
name
=
'y'
,
shape
=
[
2
,
2
],
dtype
=
'int64'
)
add
=
x
+
y
exe
=
fluid
.
Executor
(
place
=
fluid
.
CPUPlace
())
results
=
exe
.
run
(
feed
=
{
'x'
:
d_1
,
'y'
:
d_2
},
fetch_list
=
[
add
])
self
.
assertTrue
(
np
.
allclose
(
results
[
0
],
expected_out
))
def
diff_dim_add
(
self
,
**
kwargs
):
"""
Add with different dimensions.
:param kwargs:
:return:
"""
role
=
kwargs
[
'role'
]
d_1
=
kwargs
[
'data_1'
][
role
]
d_2
=
kwargs
[
'data_2'
][
role
]
return_results
=
kwargs
[
'return_results'
]
pfl_mpc
.
init
(
"aby3"
,
role
,
"localhost"
,
self
.
server
,
int
(
self
.
port
))
x
=
pfl_mpc
.
data
(
name
=
'x'
,
shape
=
[
3
,
4
],
dtype
=
'int64'
)
y
=
pfl_mpc
.
data
(
name
=
'y'
,
shape
=
[
4
],
dtype
=
'int64'
)
math_add
=
x
+
y
exe
=
fluid
.
Executor
(
place
=
fluid
.
CPUPlace
())
results
=
exe
.
run
(
feed
=
{
'x'
:
d_1
,
'y'
:
d_2
},
fetch_list
=
[
math_add
])
self
.
assertEqual
(
results
[
0
].
shape
,
(
2
,
3
,
4
))
return_results
.
append
(
results
[
0
])
def
test_elementwise_add
(
self
):
data_1
=
[
np
.
array
([[
0
,
1
,
2
,
3
],
[
0
,
1
,
2
,
3
]]).
astype
(
'int64'
)]
*
self
.
party_num
data_2
=
[
np
.
array
([[
4
,
3
,
2
,
1
],
[
4
,
3
,
2
,
1
]]).
astype
(
'int64'
)]
*
self
.
party_num
expect_results
=
[
np
.
array
([[
4
,
4
,
4
,
4
],
[
4
,
4
,
4
,
4
]])]
*
self
.
party_num
ret
=
self
.
multi_party_run
(
target
=
self
.
elementwise_add
,
data_1
=
data_1
,
data_2
=
data_2
,
expect_results
=
expect_results
)
self
.
assertEqual
(
ret
[
0
],
True
)
def
test_multi_dim_add
(
self
):
data_1
=
[
np
.
array
([[[
1
,
1
],
[
-
1
,
-
1
]],
[[
1
,
1
],
[
-
1
,
-
1
]]]).
astype
(
'int64'
)]
*
self
.
party_num
data_2
=
[
np
.
array
([[[
-
1
,
-
1
],
[
1
,
1
]],
[[
-
1
,
-
1
],
[
1
,
1
]]]).
astype
(
'int64'
)]
*
self
.
party_num
expect_results
=
[
np
.
array
([[[
0
,
0
],
[
0
,
0
]],
[[
0
,
0
],
[
0
,
0
]]])]
*
self
.
party_num
ret
=
self
.
multi_party_run
(
target
=
self
.
multi_dim_add
,
data_1
=
data_1
,
data_2
=
data_2
,
expect_results
=
expect_results
)
self
.
assertEqual
(
ret
[
0
],
True
)
def
test_diff_dim_add
(
self
):
data_1
=
np
.
full
((
3
,
4
),
fill_value
=
2
)
data_2
=
np
.
ones
((
4
,))
data_1_shares
=
aby3
.
make_shares
(
data_1
)
data_2_shares
=
aby3
.
make_shares
(
data_2
)
data_1_all3shares
=
np
.
array
([
aby3
.
get_aby3_shares
(
data_1_shares
,
i
)
for
i
in
range
(
3
)])
data_2_all3shares
=
np
.
array
([
aby3
.
get_aby3_shares
(
data_2_shares
,
i
)
for
i
in
range
(
3
)])
return_results
=
Manager
().
list
()
ret
=
self
.
multi_party_run
(
target
=
self
.
diff_dim_add
,
data_1
=
data_1_all3shares
,
data_2
=
data_2_all3shares
,
return_results
=
return_results
)
self
.
assertEqual
(
ret
[
0
],
True
)
revealed
=
aby3
.
reconstruct
(
np
.
array
(
return_results
))
expected_out
=
np
.
array
([[
3
,
3
,
3
,
3
],
[
3
,
3
,
3
,
3
],
[
3
,
3
,
3
,
3
]])
self
.
assertTrue
(
np
.
allclose
(
revealed
,
expected_out
,
atol
=
1e-4
))
def
test_elementwise_add_dim_error
(
self
):
data_1
=
[
np
.
array
([
0
,
1
,
2
,
3
]).
astype
(
'int64'
)]
*
self
.
party_num
data_2
=
[
np
.
array
([
4
,
3
,
2
,
1
]).
astype
(
'int64'
)]
*
self
.
party_num
expect_results
=
[
np
.
array
([[
4
,
4
,
4
,
4
],
[
4
,
4
,
4
,
4
]])]
*
self
.
party_num
ret
=
self
.
multi_party_run
(
target
=
self
.
elementwise_add
,
data_1
=
data_1
,
data_2
=
data_2
,
expect_results
=
expect_results
)
self
.
assertTrue
(
isinstance
(
ret
[
0
],
ValueError
))
if
__name__
==
'__main__'
:
unittest
.
main
()
python/paddle_fl/mpc/tests/unittests/test_op_base.py
0 → 100644
浏览文件 @
b2a724a8
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Set base config for op unit tests.
"""
from
multiprocessing
import
Pipe
,
Process
import
os
import
traceback
import
unittest
import
redis
class
Aby3Process
(
Process
):
"""
Extends from Process, evaluate the computation party in aby3.
"""
def
__init__
(
self
,
*
args
,
**
kwargs
):
Process
.
__init__
(
self
,
*
args
,
**
kwargs
)
self
.
_pconn
,
self
.
_cconn
=
Pipe
()
self
.
_exception
=
None
def
run
(
self
):
"""
Override. Send any exceptions raised in
subprocess to main process.
"""
try
:
Process
.
run
(
self
)
self
.
_cconn
.
send
(
None
)
except
Exception
as
e
:
tb
=
traceback
.
format_exc
()
self
.
_cconn
.
send
((
e
,
tb
))
@
property
def
exception
(
self
):
"""
Get exception.
"""
if
self
.
_pconn
.
poll
():
self
.
_exception
=
self
.
_pconn
.
recv
()
return
self
.
_exception
class
TestOpBase
(
unittest
.
TestCase
):
def
__init__
(
self
,
methodName
=
'runTest'
):
super
(
TestOpBase
,
self
).
__init__
(
methodName
)
# set redis server and port
self
.
server
=
os
.
environ
[
'TEST_REDIS_IP'
]
self
.
port
=
os
.
environ
[
'TEST_REDIS_PORT'
]
self
.
party_num
=
3
def
setUp
(
self
):
"""
Connect redis and delete all keys in all databases on the current host.
:return:
"""
r
=
redis
.
Redis
(
host
=
self
.
server
,
port
=
int
(
self
.
port
))
r
.
flushall
()
def
multi_party_run
(
self
,
**
kwargs
):
"""
Run 3 parties with target function or other additional arguments.
:param kwargs:
:return:
"""
target
=
kwargs
[
'target'
]
for
role
in
range
(
self
.
party_num
):
kwargs
.
update
({
'role'
:
role
})
party
=
Aby3Process
(
target
=
target
,
kwargs
=
kwargs
)
party
.
start
()
if
role
==
self
.
party_num
-
1
:
party
.
join
()
if
party
.
exception
:
return
party
.
exception
else
:
return
(
True
,)
python/paddle_fl/mpc/tests/unittests/test_op_compare.py
0 → 100644
浏览文件 @
b2a724a8
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This module test compare op.
"""
import
unittest
import
numpy
as
np
import
paddle.fluid
as
fluid
import
paddle_fl.mpc
as
pfl_mpc
import
test_op_base
class
TestOpCompare
(
test_op_base
.
TestOpBase
):
def
gt
(
self
,
**
kwargs
):
"""
Greater than.
:param kwargs:
:return:
"""
role
=
kwargs
[
'role'
]
d_1
=
kwargs
[
'data_1'
][
role
]
d_2
=
kwargs
[
'data_2'
][
role
]
expected_out
=
kwargs
[
'expect_results'
][
role
]
pfl_mpc
.
init
(
"aby3"
,
role
,
"localhost"
,
self
.
server
,
int
(
self
.
port
))
x
=
pfl_mpc
.
data
(
name
=
'x'
,
shape
=
[
3
],
dtype
=
'int64'
)
y
=
fluid
.
data
(
name
=
'y'
,
shape
=
[
1
,
3
],
dtype
=
'float32'
)
# todo: reshape y to [3]
op_gt
=
pfl_mpc
.
layers
.
greater_than
(
x
=
x
,
y
=
y
)
math_gt
=
x
>
y
exe
=
fluid
.
Executor
(
place
=
fluid
.
CPUPlace
())
results
=
exe
.
run
(
feed
=
{
'x'
:
d_1
,
'y'
:
d_2
},
fetch_list
=
[
op_gt
,
math_gt
])
self
.
assertTrue
(
np
.
allclose
(
results
[
0
],
results
[
1
]))
self
.
assertEqual
(
results
[
0
].
shape
,
(
1
,
3
))
self
.
assertTrue
(
np
.
allclose
(
results
[
0
],
expected_out
))
def
ge
(
self
,
**
kwargs
):
"""
Greater equal.
:param kwargs:
:return:
"""
role
=
kwargs
[
'role'
]
d_1
=
kwargs
[
'data_1'
][
role
]
d_2
=
kwargs
[
'data_2'
][
role
]
expected_out
=
kwargs
[
'expect_results'
][
role
]
pfl_mpc
.
init
(
"aby3"
,
role
,
"localhost"
,
self
.
server
,
int
(
self
.
port
))
x
=
pfl_mpc
.
data
(
name
=
'x'
,
shape
=
[
3
],
dtype
=
'int64'
)
y
=
fluid
.
data
(
name
=
'y'
,
shape
=
[
1
,
3
],
dtype
=
'float32'
)
op_ge
=
pfl_mpc
.
layers
.
greater_equal
(
x
=
x
,
y
=
y
)
math_ge
=
x
>=
y
exe
=
fluid
.
Executor
(
place
=
fluid
.
CPUPlace
())
results
=
exe
.
run
(
feed
=
{
'x'
:
d_1
,
'y'
:
d_2
},
fetch_list
=
[
op_ge
,
math_ge
])
self
.
assertTrue
(
np
.
allclose
(
results
[
0
],
results
[
1
]))
self
.
assertEqual
(
results
[
0
].
shape
,
(
1
,
3
))
self
.
assertTrue
(
np
.
allclose
(
results
[
0
],
expected_out
))
def
lt
(
self
,
**
kwargs
):
"""
Less than.
:param kwargs:
:return:
"""
role
=
kwargs
[
'role'
]
d_1
=
kwargs
[
'data_1'
][
role
]
d_2
=
kwargs
[
'data_2'
][
role
]
expected_out
=
kwargs
[
'expect_results'
][
role
]
pfl_mpc
.
init
(
"aby3"
,
role
,
"localhost"
,
self
.
server
,
int
(
self
.
port
))
x
=
pfl_mpc
.
data
(
name
=
'x'
,
shape
=
[
3
],
dtype
=
'int64'
)
y
=
fluid
.
data
(
name
=
'y'
,
shape
=
[
1
,
3
],
dtype
=
'float32'
)
op_lt
=
pfl_mpc
.
layers
.
less_than
(
x
=
x
,
y
=
y
)
math_lt
=
x
<
y
exe
=
fluid
.
Executor
(
place
=
fluid
.
CPUPlace
())
results
=
exe
.
run
(
feed
=
{
'x'
:
d_1
,
'y'
:
d_2
},
fetch_list
=
[
op_lt
,
math_lt
])
self
.
assertTrue
(
np
.
allclose
(
results
[
0
],
results
[
1
]))
self
.
assertEqual
(
results
[
0
].
shape
,
(
1
,
3
))
self
.
assertTrue
(
np
.
allclose
(
results
[
0
],
expected_out
))
def
le
(
self
,
**
kwargs
):
"""
Less equal.
:param kwargs:
:return:
"""
role
=
kwargs
[
'role'
]
d_1
=
kwargs
[
'data_1'
][
role
]
d_2
=
kwargs
[
'data_2'
][
role
]
expected_out
=
kwargs
[
'expect_results'
][
role
]
pfl_mpc
.
init
(
"aby3"
,
role
,
"localhost"
,
self
.
server
,
int
(
self
.
port
))
x
=
pfl_mpc
.
data
(
name
=
'x'
,
shape
=
[
3
],
dtype
=
'int64'
)
y
=
fluid
.
data
(
name
=
'y'
,
shape
=
[
1
,
3
],
dtype
=
'float32'
)
op_le
=
pfl_mpc
.
layers
.
less_equal
(
x
=
x
,
y
=
y
)
math_le
=
x
<=
y
exe
=
fluid
.
Executor
(
place
=
fluid
.
CPUPlace
())
results
=
exe
.
run
(
feed
=
{
'x'
:
d_1
,
'y'
:
d_2
},
fetch_list
=
[
op_le
,
math_le
])
self
.
assertTrue
(
np
.
allclose
(
results
[
0
],
results
[
1
]))
self
.
assertEqual
(
results
[
0
].
shape
,
(
1
,
3
))
self
.
assertTrue
(
np
.
allclose
(
results
[
0
],
expected_out
))
def
equal
(
self
,
**
kwargs
):
"""
Equal.
:param kwargs:
:return:
"""
role
=
kwargs
[
'role'
]
d_1
=
kwargs
[
'data_1'
][
role
]
d_2
=
kwargs
[
'data_2'
][
role
]
expected_out
=
kwargs
[
'expect_results'
][
role
]
pfl_mpc
.
init
(
"aby3"
,
role
,
"localhost"
,
self
.
server
,
int
(
self
.
port
))
x
=
pfl_mpc
.
data
(
name
=
'x'
,
shape
=
[
3
],
dtype
=
'int64'
)
y
=
fluid
.
data
(
name
=
'y'
,
shape
=
[
1
,
3
],
dtype
=
'float32'
)
op_eq
=
pfl_mpc
.
layers
.
equal
(
x
=
x
,
y
=
y
)
math_eq
=
x
==
y
exe
=
fluid
.
Executor
(
place
=
fluid
.
CPUPlace
())
results
=
exe
.
run
(
feed
=
{
'x'
:
d_1
,
'y'
:
d_2
},
fetch_list
=
[
op_eq
,
math_eq
])
self
.
assertTrue
(
np
.
allclose
(
results
[
0
],
results
[
1
]))
self
.
assertEqual
(
results
[
0
].
shape
,
(
1
,
3
))
self
.
assertTrue
(
np
.
allclose
(
results
[
0
],
expected_out
))
def
not_equal
(
self
,
**
kwargs
):
"""
Not equal.
:param kwargs:
:return:
"""
role
=
kwargs
[
'role'
]
d_1
=
kwargs
[
'data_1'
][
role
]
d_2
=
kwargs
[
'data_2'
][
role
]
expected_out
=
kwargs
[
'expect_results'
][
role
]
pfl_mpc
.
init
(
"aby3"
,
role
,
"localhost"
,
self
.
server
,
int
(
self
.
port
))
x
=
pfl_mpc
.
data
(
name
=
'x'
,
shape
=
[
3
],
dtype
=
'int64'
)
y
=
fluid
.
data
(
name
=
'y'
,
shape
=
[
1
,
3
],
dtype
=
'float32'
)
op_ne
=
pfl_mpc
.
layers
.
not_equal
(
x
=
x
,
y
=
y
)
math_ne
=
x
!=
y
exe
=
fluid
.
Executor
(
place
=
fluid
.
CPUPlace
())
results
=
exe
.
run
(
feed
=
{
'x'
:
d_1
,
'y'
:
d_2
},
fetch_list
=
[
op_ne
,
math_ne
])
self
.
assertTrue
(
np
.
allclose
(
results
[
0
],
results
[
1
]))
self
.
assertEqual
(
results
[
0
].
shape
,
(
1
,
3
))
self
.
assertTrue
(
np
.
allclose
(
results
[
0
],
expected_out
))
def
test_gt
(
self
):
data_1
=
[
np
.
array
([[
65536
,
65536
,
65536
],
[
65536
,
65536
,
65536
]]).
astype
(
'int64'
)]
*
self
.
party_num
data_2
=
[
np
.
array
([[
5
,
3
,
2
]]).
astype
(
'float32'
)]
*
self
.
party_num
expect_results
=
[
np
.
array
([[
0
,
0
,
1
]])]
*
self
.
party_num
ret
=
self
.
multi_party_run
(
target
=
self
.
gt
,
data_1
=
data_1
,
data_2
=
data_2
,
expect_results
=
expect_results
)
self
.
assertEqual
(
ret
[
0
],
True
)
def
test_ge
(
self
):
data_1
=
[
np
.
array
([[
65536
,
65536
,
65536
],
[
65536
,
65536
,
65536
]]).
astype
(
'int64'
)]
*
self
.
party_num
data_2
=
[
np
.
array
([[
5
,
3
,
2
]]).
astype
(
'float32'
)]
*
self
.
party_num
expect_results
=
[
np
.
array
([[
0
,
1
,
1
]])]
*
self
.
party_num
ret
=
self
.
multi_party_run
(
target
=
self
.
ge
,
data_1
=
data_1
,
data_2
=
data_2
,
expect_results
=
expect_results
)
self
.
assertEqual
(
ret
[
0
],
True
)
def
test_lt
(
self
):
data_1
=
[
np
.
array
([[
65536
,
65536
,
65536
],
[
65536
,
65536
,
65536
]]).
astype
(
'int64'
)]
*
self
.
party_num
data_2
=
[
np
.
array
([[
5
,
3
,
2
]]).
astype
(
'float32'
)]
*
self
.
party_num
expect_results
=
[
np
.
array
([[
1
,
0
,
0
]])]
*
self
.
party_num
ret
=
self
.
multi_party_run
(
target
=
self
.
lt
,
data_1
=
data_1
,
data_2
=
data_2
,
expect_results
=
expect_results
)
self
.
assertEqual
(
ret
[
0
],
True
)
def
test_le
(
self
):
data_1
=
[
np
.
array
([[
65536
,
65536
,
65536
],
[
65536
,
65536
,
65536
]]).
astype
(
'int64'
)]
*
self
.
party_num
data_2
=
[
np
.
array
([[
5
,
3
,
2
]]).
astype
(
'float32'
)]
*
self
.
party_num
expect_results
=
[
np
.
array
([[
1
,
1
,
0
]])]
*
self
.
party_num
ret
=
self
.
multi_party_run
(
target
=
self
.
le
,
data_1
=
data_1
,
data_2
=
data_2
,
expect_results
=
expect_results
)
self
.
assertEqual
(
ret
[
0
],
True
)
def
test_equal
(
self
):
data_1
=
[
np
.
array
([[
65536
,
65536
,
65536
],
[
65536
,
65536
,
65536
]]).
astype
(
'int64'
)]
*
self
.
party_num
data_2
=
[
np
.
array
([[
5
,
3
,
2
]]).
astype
(
'float32'
)]
*
self
.
party_num
expect_results
=
[
np
.
array
([[
0
,
1
,
0
]])]
*
self
.
party_num
ret
=
self
.
multi_party_run
(
target
=
self
.
equal
,
data_1
=
data_1
,
data_2
=
data_2
,
expect_results
=
expect_results
)
self
.
assertEqual
(
ret
[
0
],
True
)
def
test_not_equal
(
self
):
data_1
=
[
np
.
array
([[
65536
,
65536
,
65536
],
[
65536
,
65536
,
65536
]]).
astype
(
'int64'
)]
*
self
.
party_num
data_2
=
[
np
.
array
([[
5
,
3
,
2
]]).
astype
(
'float32'
)]
*
self
.
party_num
expect_results
=
[
np
.
array
([[
1
,
0
,
1
]])]
*
self
.
party_num
ret
=
self
.
multi_party_run
(
target
=
self
.
not_equal
,
data_1
=
data_1
,
data_2
=
data_2
,
expect_results
=
expect_results
)
self
.
assertEqual
(
ret
[
0
],
True
)
if
__name__
==
'__main__'
:
unittest
.
main
()
python/paddle_fl/mpc/tests/unittests/test_op_fc.py
0 → 100644
浏览文件 @
b2a724a8
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This module test fc op.
"""
import
unittest
from
multiprocessing
import
Manager
import
numpy
as
np
import
paddle.fluid
as
fluid
import
paddle_fl.mpc
as
pfl_mpc
import
paddle_fl.mpc.data_utils.aby3
as
aby3
import
test_op_base
class
TestOpFC
(
test_op_base
.
TestOpBase
):
def
fc
(
self
,
**
kwargs
):
"""
Normal case.
:param kwargs:
:return:
"""
role
=
kwargs
[
'role'
]
d_1
=
kwargs
[
'data_1'
][
role
]
return_results
=
kwargs
[
'return_results'
]
pfl_mpc
.
init
(
"aby3"
,
role
,
"localhost"
,
self
.
server
,
int
(
self
.
port
))
data_1
=
pfl_mpc
.
data
(
name
=
'data_1'
,
shape
=
[
3
,
2
],
dtype
=
'int64'
)
fc_out
=
pfl_mpc
.
layers
.
fc
(
input
=
data_1
,
size
=
1
,
param_attr
=
fluid
.
ParamAttr
(
initializer
=
fluid
.
initializer
.
ConstantInitializer
(
0
)))
exe
=
fluid
.
Executor
(
place
=
fluid
.
CPUPlace
())
exe
.
run
(
fluid
.
default_startup_program
())
results
=
exe
.
run
(
feed
=
{
'data_1'
:
d_1
},
fetch_list
=
[
fc_out
])
self
.
assertEqual
(
results
[
0
].
shape
,
(
2
,
3
,
1
))
return_results
.
append
(
results
[
0
])
def
test_fc
(
self
):
data_1
=
np
.
arange
(
0
,
6
).
reshape
((
3
,
2
))
data_1_shares
=
aby3
.
make_shares
(
data_1
)
data_1_all3shares
=
np
.
array
([
aby3
.
get_aby3_shares
(
data_1_shares
,
i
)
for
i
in
range
(
3
)])
return_results
=
Manager
().
list
()
ret
=
self
.
multi_party_run
(
target
=
self
.
fc
,
data_1
=
data_1_all3shares
,
return_results
=
return_results
)
self
.
assertEqual
(
ret
[
0
],
True
)
revealed
=
aby3
.
reconstruct
(
np
.
array
(
return_results
))
expected_out
=
np
.
array
([[
0
],
[
0
],
[
0
]])
self
.
assertTrue
(
np
.
allclose
(
revealed
,
expected_out
,
atol
=
1e-4
))
if
__name__
==
'__main__'
:
unittest
.
main
()
python/paddle_fl/mpc/tests/unittests/test_op_mean.py
0 → 100644
浏览文件 @
b2a724a8
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This module test mean op.
"""
import
unittest
from
multiprocessing
import
Manager
import
numpy
as
np
import
paddle.fluid
as
fluid
import
paddle_fl.mpc
as
pfl_mpc
import
paddle_fl.mpc.data_utils.aby3
as
aby3
import
test_op_base
class
TestOpMean
(
test_op_base
.
TestOpBase
):
def
mean
(
self
,
**
kwargs
):
"""
Mean.
:param kwargs:
:return:
"""
role
=
kwargs
[
'role'
]
d_1
=
kwargs
[
'data_1'
][
role
]
return_results
=
kwargs
[
'return_results'
]
pfl_mpc
.
init
(
"aby3"
,
role
,
"localhost"
,
self
.
server
,
int
(
self
.
port
))
data_1
=
pfl_mpc
.
data
(
name
=
'data_1'
,
shape
=
[
2
,
4
],
dtype
=
'int64'
)
op_mean
=
pfl_mpc
.
layers
.
mean
(
data_1
)
exe
=
fluid
.
Executor
(
place
=
fluid
.
CPUPlace
())
results
=
exe
.
run
(
feed
=
{
'data_1'
:
d_1
},
fetch_list
=
[
op_mean
])
self
.
assertEqual
(
results
[
0
].
shape
,
(
2
,
1
))
return_results
.
append
(
results
[
0
])
def
test_mean
(
self
):
"""
Test normal case.
:return:
"""
data_1
=
np
.
array
([[
1
,
2
,
3
,
4
],
[
5
,
6
,
7
,
8
]])
data_1_shares
=
aby3
.
make_shares
(
data_1
)
data_1_all3shares
=
np
.
array
([
aby3
.
get_aby3_shares
(
data_1_shares
,
i
)
for
i
in
range
(
3
)])
return_results
=
Manager
().
list
()
ret
=
self
.
multi_party_run
(
target
=
self
.
mean
,
data_1
=
data_1_all3shares
,
return_results
=
return_results
)
self
.
assertEqual
(
ret
[
0
],
True
)
revealed
=
aby3
.
reconstruct
(
np
.
array
(
return_results
))
expected_out
=
np
.
array
([
4.5
])
self
.
assertTrue
(
np
.
allclose
(
revealed
,
expected_out
,
atol
=
1e-4
))
if
__name__
==
'__main__'
:
unittest
.
main
()
python/paddle_fl/mpc/tests/unittests/test_op_mul.py
0 → 100644
浏览文件 @
b2a724a8
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This module test mul op.
"""
import
unittest
from
multiprocessing
import
Manager
import
numpy
as
np
import
paddle.fluid
as
fluid
import
paddle_fl.mpc
as
pfl_mpc
import
paddle_fl.mpc.data_utils.aby3
as
aby3
import
test_op_base
class
TestOpMul
(
test_op_base
.
TestOpBase
):
def
mul
(
self
,
**
kwargs
):
"""
Mul.
:param kwargs:
:return:
"""
role
=
kwargs
[
'role'
]
d_1
=
kwargs
[
'data_1'
][
role
]
d_2
=
kwargs
[
'data_2'
][
role
]
return_results
=
kwargs
[
'return_results'
]
pfl_mpc
.
init
(
"aby3"
,
role
,
"localhost"
,
self
.
server
,
int
(
self
.
port
))
x
=
pfl_mpc
.
data
(
name
=
'x'
,
shape
=
[
2
,
2
],
dtype
=
'int64'
)
y
=
pfl_mpc
.
data
(
name
=
'y'
,
shape
=
[
2
,
2
],
dtype
=
'int64'
)
op_mul
=
pfl_mpc
.
layers
.
mul
(
x
=
x
,
y
=
y
)
# math_mul = data_1 * data_2
exe
=
fluid
.
Executor
(
place
=
fluid
.
CPUPlace
())
results
=
exe
.
run
(
feed
=
{
'x'
:
d_1
,
'y'
:
d_2
},
fetch_list
=
[
op_mul
])
self
.
assertEqual
(
results
[
0
].
shape
,
(
2
,
2
,
2
))
return_results
.
append
(
results
[
0
])
def
diff_dim_mul
(
self
,
**
kwargs
):
"""
Mul with different dimensions.
:param kwargs:
:return:
"""
role
=
kwargs
[
'role'
]
d_1
=
kwargs
[
'data_1'
][
role
]
d_2
=
kwargs
[
'data_2'
][
role
]
return_results
=
kwargs
[
'return_results'
]
pfl_mpc
.
init
(
"aby3"
,
role
,
"localhost"
,
self
.
server
,
int
(
self
.
port
))
x
=
pfl_mpc
.
data
(
name
=
'x'
,
shape
=
[
3
,
4
],
dtype
=
'int64'
)
y
=
pfl_mpc
.
data
(
name
=
'y'
,
shape
=
[
4
,
5
],
dtype
=
'int64'
)
op_mul
=
pfl_mpc
.
layers
.
mul
(
x
=
x
,
y
=
y
)
exe
=
fluid
.
Executor
(
place
=
fluid
.
CPUPlace
())
results
=
exe
.
run
(
feed
=
{
'x'
:
d_1
,
'y'
:
d_2
},
fetch_list
=
[
op_mul
])
self
.
assertEqual
(
results
[
0
].
shape
,
(
2
,
3
,
5
))
return_results
.
append
(
results
[
0
])
def
test_mul
(
self
):
"""
Test normal case.
:return:
"""
data_1
=
np
.
arange
(
0
,
4
).
reshape
((
2
,
2
))
data_2
=
np
.
full
(
shape
=
(
2
,
2
),
fill_value
=
2
)
data_1_shares
=
aby3
.
make_shares
(
data_1
)
data_2_shares
=
aby3
.
make_shares
(
data_2
)
data_1_all3shares
=
np
.
array
([
aby3
.
get_aby3_shares
(
data_1_shares
,
i
)
for
i
in
range
(
3
)])
data_2_all3shares
=
np
.
array
([
aby3
.
get_aby3_shares
(
data_2_shares
,
i
)
for
i
in
range
(
3
)])
return_results
=
Manager
().
list
()
ret
=
self
.
multi_party_run
(
target
=
self
.
mul
,
data_1
=
data_1_all3shares
,
data_2
=
data_2_all3shares
,
return_results
=
return_results
)
self
.
assertEqual
(
ret
[
0
],
True
)
revealed
=
aby3
.
reconstruct
(
np
.
array
(
return_results
))
expected_out
=
np
.
array
([[
2
,
2
],
[
10
,
10
]])
self
.
assertTrue
(
np
.
allclose
(
revealed
,
expected_out
,
atol
=
1e-4
))
def
test_diff_dim_mul
(
self
):
data_1
=
np
.
arange
(
0
,
12
).
reshape
((
3
,
4
))
data_2
=
np
.
full
(
shape
=
(
4
,
5
),
fill_value
=
2
)
data_1_shares
=
aby3
.
make_shares
(
data_1
)
data_2_shares
=
aby3
.
make_shares
(
data_2
)
data_1_all3shares
=
np
.
array
([
aby3
.
get_aby3_shares
(
data_1_shares
,
i
)
for
i
in
range
(
3
)])
data_2_all3shares
=
np
.
array
([
aby3
.
get_aby3_shares
(
data_2_shares
,
i
)
for
i
in
range
(
3
)])
return_results
=
Manager
().
list
()
ret
=
self
.
multi_party_run
(
target
=
self
.
diff_dim_mul
,
data_1
=
data_1_all3shares
,
data_2
=
data_2_all3shares
,
return_results
=
return_results
)
self
.
assertEqual
(
ret
[
0
],
True
)
revealed
=
aby3
.
reconstruct
(
np
.
array
(
return_results
))
expected_out
=
data_1
.
dot
(
data_2
)
self
.
assertTrue
(
np
.
allclose
(
revealed
,
expected_out
,
atol
=
1e-4
))
if
__name__
==
'__main__'
:
unittest
.
main
()
python/paddle_fl/mpc/tests/unittests/test_
datautils_load_filter
.py
→
python/paddle_fl/mpc/tests/unittests/test_
op_relu
.py
浏览文件 @
b2a724a8
...
...
@@ -12,55 +12,55 @@
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This module test
load_data and data_filter_by_id functions in data_utils module
.
This module test
relu op
.
"""
import
sys
sys
.
path
.
append
(
'../../../'
)
import
unittest
from
multiprocessing
import
Manager
import
numpy
as
np
import
paddle.fluid
as
fluid
import
paddle_fl.mpc
as
pfl_mpc
import
paddle_fl.mpc.data_utils.aby3
as
aby3
import
test_op_base
class
TestDataUtilsLoadFilter
(
unittest
.
TestCase
):
def
__init__
(
self
,
methodName
=
'runTest'
):
super
(
TestDataUtilsLoadFilter
,
self
).
__init__
(
methodName
)
self
.
test_tmp_file
=
'./load_data_test.tmp'
class
TestOpRelu
(
test_op_base
.
TestOpBase
):
def
create_tmp_file
(
self
):
with
open
(
self
.
test_tmp_file
,
'w'
)
as
f
:
f
.
write
(
'111
\n
'
)
f
.
write
(
'222
\n
'
)
f
.
write
(
'333'
)
def
relu
(
self
,
**
kwargs
):
"""
Normal case.
:param kwargs:
:return:
"""
role
=
kwargs
[
'role'
]
d_1
=
kwargs
[
'data_1'
][
role
]
return_results
=
kwargs
[
'return_results'
]
def
delete_tmp_file
(
self
):
import
os
os
.
remove
(
self
.
test_tmp_file
)
pfl_mpc
.
init
(
"aby3"
,
role
,
"localhost"
,
self
.
server
,
int
(
self
.
port
))
data_1
=
pfl_mpc
.
data
(
name
=
'data_1'
,
shape
=
[
3
,
2
],
dtype
=
'int64'
)
relu_out
=
pfl_mpc
.
layers
.
relu
(
input
=
data_1
)
def
setUp
(
self
):
self
.
create_tmp_file
(
)
exe
=
fluid
.
Executor
(
place
=
fluid
.
CPUPlace
())
results
=
exe
.
run
(
feed
=
{
'data_1'
:
d_1
},
fetch_list
=
[
relu_out
]
)
def
tearDown
(
self
):
self
.
delete_tmp_file
(
)
self
.
assertEqual
(
results
[
0
].
shape
,
(
2
,
3
,
2
))
return_results
.
append
(
results
[
0
]
)
def
test_load_data
(
self
):
expected_values
=
[
'111'
,
'222'
,
'333'
]
du
=
pfl_mpc
.
data_utils
.
DataUtils
()
for
data
,
value
in
zip
(
du
.
load_data
(
self
.
test_tmp_file
),
expected_values
):
self
.
assertEqual
(
data
,
value
)
def
test_relu
(
self
):
data_1
=
np
.
arange
(
-
3
,
3
).
reshape
((
3
,
2
))
data_1_shares
=
aby3
.
make_shares
(
data_1
)
data_1_all3shares
=
np
.
array
([
aby3
.
get_aby3_shares
(
data_1_shares
,
i
)
for
i
in
range
(
3
)])
def
test_filter
(
self
):
to_filter
=
[
"0, 0.1, 0.1, 0.1, 1"
,
"1, 0.2, 0.2, 0.2, 0"
,
"2, 0.3, 0.3, 0.3, 1"
]
id_list
=
[
0
,
2
]
expected_results
=
[
"0, 0.1, 0.1, 0.1, 1"
,
"2, 0.3, 0.3, 0.3, 1"
]
du
=
pfl_mpc
.
data_utils
.
DataUtils
()
filter_results
=
du
.
data_filter_by_id
(
input_list
=
to_filter
,
id_list
=
id_list
)
for
result
,
expect
in
zip
(
filter_results
,
expected_results
):
self
.
assertEqual
(
result
,
expect
)
return_results
=
Manager
().
list
()
ret
=
self
.
multi_party_run
(
target
=
self
.
relu
,
data_1
=
data_1_all3shares
,
return_results
=
return_results
)
self
.
assertEqual
(
ret
[
0
],
True
)
revealed
=
aby3
.
reconstruct
(
np
.
array
(
return_results
))
expected_out
=
np
.
array
([[
0
,
0
],
[
0
,
0
],
[
1
,
2
]])
self
.
assertTrue
(
np
.
allclose
(
revealed
,
expected_out
,
atol
=
1e-4
))
if
__name__
==
'__main__'
:
...
...
python/paddle_fl/mpc/tests/unittests/test_op_square.py
0 → 100644
浏览文件 @
b2a724a8
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This module test square op.
"""
import
unittest
from
multiprocessing
import
Manager
import
numpy
as
np
import
paddle.fluid
as
fluid
import
paddle_fl.mpc
as
pfl_mpc
import
paddle_fl.mpc.data_utils.aby3
as
aby3
import
test_op_base
class
TestOpSquare
(
test_op_base
.
TestOpBase
):
def
square
(
self
,
**
kwargs
):
"""
Square.
:param kwargs:
:return:
"""
role
=
kwargs
[
'role'
]
d_1
=
kwargs
[
'data_1'
][
role
]
return_results
=
kwargs
[
'return_results'
]
pfl_mpc
.
init
(
"aby3"
,
role
,
"localhost"
,
self
.
server
,
int
(
self
.
port
))
data_1
=
pfl_mpc
.
data
(
name
=
'x'
,
shape
=
[
2
,
2
],
dtype
=
'int64'
)
op_square
=
pfl_mpc
.
layers
.
square
(
data_1
)
exe
=
fluid
.
Executor
(
place
=
fluid
.
CPUPlace
())
results
=
exe
.
run
(
feed
=
{
'x'
:
d_1
},
fetch_list
=
[
op_square
])
self
.
assertEqual
(
results
[
0
].
shape
,
(
2
,
2
,
2
))
return_results
.
append
(
results
[
0
])
def
test_square
(
self
):
"""
Test normal case.
:return:
"""
data_1
=
np
.
full
(
shape
=
(
2
,
2
),
fill_value
=
3
)
data_1_shares
=
aby3
.
make_shares
(
data_1
)
data_1_all3shares
=
np
.
array
([
aby3
.
get_aby3_shares
(
data_1_shares
,
i
)
for
i
in
range
(
3
)])
return_results
=
Manager
().
list
()
ret
=
self
.
multi_party_run
(
target
=
self
.
square
,
data_1
=
data_1_all3shares
,
return_results
=
return_results
)
self
.
assertEqual
(
ret
[
0
],
True
)
revealed
=
aby3
.
reconstruct
(
np
.
array
(
return_results
))
expected_out
=
np
.
array
([[
9
,
9
],
[
9
,
9
]])
self
.
assertTrue
(
np
.
allclose
(
revealed
,
expected_out
,
atol
=
1e-4
))
if
__name__
==
'__main__'
:
unittest
.
main
()
python/paddle_fl/mpc/tests/unittests/test_op_square_error_cost.py
0 → 100644
浏览文件 @
b2a724a8
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This module test square_error_cost op.
"""
import
unittest
from
multiprocessing
import
Manager
import
numpy
as
np
import
paddle.fluid
as
fluid
import
paddle_fl.mpc
as
pfl_mpc
import
paddle_fl.mpc.data_utils.aby3
as
aby3
import
test_op_base
class
TestOpSquareErrorCost
(
test_op_base
.
TestOpBase
):
def
square_error_cost
(
self
,
**
kwargs
):
"""
Normal case.
:param kwargs:
:return:
"""
role
=
kwargs
[
'role'
]
d_1
=
kwargs
[
'data_1'
][
role
]
d_2
=
kwargs
[
'data_2'
][
role
]
return_results
=
kwargs
[
'return_results'
]
pfl_mpc
.
init
(
"aby3"
,
role
,
"localhost"
,
self
.
server
,
int
(
self
.
port
))
data_1
=
pfl_mpc
.
data
(
name
=
'data_1'
,
shape
=
[
2
,
2
],
dtype
=
'int64'
)
data_2
=
pfl_mpc
.
data
(
name
=
'data_2'
,
shape
=
[
2
,
2
],
dtype
=
'int64'
)
cost
=
pfl_mpc
.
layers
.
square_error_cost
(
input
=
data_1
,
label
=
data_2
)
exe
=
fluid
.
Executor
(
place
=
fluid
.
CPUPlace
())
results
=
exe
.
run
(
feed
=
{
'data_1'
:
d_1
,
'data_2'
:
d_2
},
fetch_list
=
[
cost
])
self
.
assertEqual
(
results
[
0
].
shape
,
(
2
,
2
,
2
))
return_results
.
append
(
results
[
0
])
def
test_square_error_cost
(
self
):
data_1
=
np
.
arange
(
0
,
4
).
reshape
((
2
,
2
))
data_2
=
np
.
full
(
shape
=
(
2
,
2
),
fill_value
=
2
)
data_1_shares
=
aby3
.
make_shares
(
data_1
)
data_2_shares
=
aby3
.
make_shares
(
data_2
)
data_1_all3shares
=
np
.
array
([
aby3
.
get_aby3_shares
(
data_1_shares
,
i
)
for
i
in
range
(
3
)])
data_2_all3shares
=
np
.
array
([
aby3
.
get_aby3_shares
(
data_2_shares
,
i
)
for
i
in
range
(
3
)])
return_results
=
Manager
().
list
()
ret
=
self
.
multi_party_run
(
target
=
self
.
square_error_cost
,
data_1
=
data_1_all3shares
,
data_2
=
data_2_all3shares
,
return_results
=
return_results
)
self
.
assertEqual
(
ret
[
0
],
True
)
revealed
=
aby3
.
reconstruct
(
np
.
array
(
return_results
))
expected_out
=
np
.
array
([[
4
,
1
],
[
0
,
1
]])
self
.
assertTrue
(
np
.
allclose
(
revealed
,
expected_out
,
atol
=
1e-4
))
if
__name__
==
'__main__'
:
unittest
.
main
()
python/paddle_fl/mpc/tests/unittests/test_op_sub.py
0 → 100644
浏览文件 @
b2a724a8
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This module test sub op.
"""
import
unittest
import
numpy
as
np
import
paddle.fluid
as
fluid
import
paddle_fl.mpc
as
pfl_mpc
import
test_op_base
class
TestOpSub
(
test_op_base
.
TestOpBase
):
def
elementwise_sub
(
self
,
**
kwargs
):
"""
Normal case.
:param kwargs:
:return:
"""
role
=
kwargs
[
'role'
]
d_1
=
kwargs
[
'data_1'
][
role
]
d_2
=
kwargs
[
'data_2'
][
role
]
expected_out
=
kwargs
[
'expect_results'
][
role
]
pfl_mpc
.
init
(
"aby3"
,
role
,
"localhost"
,
self
.
server
,
int
(
self
.
port
))
x
=
pfl_mpc
.
data
(
name
=
'x'
,
shape
=
[
5
],
dtype
=
'int64'
)
y
=
pfl_mpc
.
data
(
name
=
'y'
,
shape
=
[
5
],
dtype
=
'int64'
)
op_sub
=
pfl_mpc
.
layers
.
elementwise_sub
(
x
=
x
,
y
=
y
)
math_sub
=
x
-
y
exe
=
fluid
.
Executor
(
place
=
fluid
.
CPUPlace
())
sub_results
=
exe
.
run
(
feed
=
{
'x'
:
d_1
,
'y'
:
d_2
},
fetch_list
=
[
op_sub
,
math_sub
])
self
.
assertTrue
(
np
.
allclose
(
sub_results
[
0
],
sub_results
[
1
]))
self
.
assertEqual
(
sub_results
[
0
].
shape
,
(
2
,
5
))
self
.
assertTrue
(
np
.
allclose
(
sub_results
[
0
],
expected_out
))
def
mul_dim_sub
(
self
,
**
kwargs
):
"""
Add two variables with multi dimensions.
:return:
"""
role
=
kwargs
[
'role'
]
d_1
=
kwargs
[
'data_1'
][
role
]
d_2
=
kwargs
[
'data_2'
][
role
]
expected_out
=
kwargs
[
'expect_results'
][
role
]
pfl_mpc
.
init
(
"aby3"
,
role
,
"localhost"
,
self
.
server
,
int
(
self
.
port
))
x
=
pfl_mpc
.
data
(
name
=
'x'
,
shape
=
[
2
,
2
],
dtype
=
'int64'
)
y
=
pfl_mpc
.
data
(
name
=
'y'
,
shape
=
[
2
,
2
],
dtype
=
'int64'
)
sub
=
x
-
y
exe
=
fluid
.
Executor
(
place
=
fluid
.
CPUPlace
())
sub_results
=
exe
.
run
(
feed
=
{
'x'
:
d_1
,
'y'
:
d_2
},
fetch_list
=
[
sub
])
self
.
assertTrue
(
np
.
allclose
(
sub_results
[
0
],
expected_out
))
def
test_elementwise_sub
(
self
):
data_1
=
[
np
.
array
([[
1
,
2
,
3
,
4
,
5
],
[
1
,
2
,
3
,
4
,
5
]]).
astype
(
'int64'
)]
*
self
.
party_num
data_2
=
[
np
.
array
([[
1
,
1
,
1
,
1
,
1
],
[
1
,
1
,
1
,
1
,
1
]]).
astype
(
'int64'
)]
*
self
.
party_num
expect_results
=
[
np
.
array
([[
0
,
1
,
2
,
3
,
4
],
[
0
,
1
,
2
,
3
,
4
]])]
*
self
.
party_num
ret
=
self
.
multi_party_run
(
target
=
self
.
elementwise_sub
,
data_1
=
data_1
,
data_2
=
data_2
,
expect_results
=
expect_results
)
self
.
assertEqual
(
ret
[
0
],
True
)
def
test_multi_dim_sub
(
self
):
data_1
=
[
np
.
array
([[[
0
,
0
],
[
1
,
1
]],
[[
0
,
0
],
[
1
,
1
]]]).
astype
(
'int64'
)]
*
self
.
party_num
data_2
=
[
np
.
array
([[[
0
,
0
],
[
-
1
,
-
1
]],
[[
0
,
0
],
[
-
1
,
-
1
]]]).
astype
(
'int64'
)]
*
self
.
party_num
expect_results
=
[
np
.
array
([[[
0
,
0
],
[
2
,
2
]],
[[
0
,
0
],
[
2
,
2
]]])]
*
self
.
party_num
ret
=
self
.
multi_party_run
(
target
=
self
.
mul_dim_sub
,
data_1
=
data_1
,
data_2
=
data_2
,
expect_results
=
expect_results
)
self
.
assertEqual
(
ret
[
0
],
True
)
if
__name__
==
'__main__'
:
unittest
.
main
()
python/paddle_fl/mpc/tests/unittests/test_op_sum.py
0 → 100644
浏览文件 @
b2a724a8
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This module test sum op.
"""
import
unittest
import
numpy
as
np
import
paddle.fluid
as
fluid
import
paddle_fl.mpc
as
pfl_mpc
import
test_op_base
class
TestOpSum
(
test_op_base
.
TestOpBase
):
def
sum
(
self
,
**
kwargs
):
"""
Test normal case.
:return:
"""
role
=
kwargs
[
'role'
]
d_1
=
kwargs
[
'data_1'
][
role
]
d_2
=
kwargs
[
'data_2'
][
role
]
d_3
=
kwargs
[
'data_3'
][
role
]
expected_out
=
kwargs
[
'expect_results'
][
role
]
pfl_mpc
.
init
(
"aby3"
,
role
,
"localhost"
,
self
.
server
,
int
(
self
.
port
))
data_1
=
pfl_mpc
.
data
(
name
=
'data_1'
,
shape
=
[
4
],
dtype
=
'int64'
)
data_2
=
pfl_mpc
.
data
(
name
=
'data_2'
,
shape
=
[
4
],
dtype
=
'int64'
)
data_3
=
pfl_mpc
.
data
(
name
=
'data_3'
,
shape
=
[
4
],
dtype
=
'int64'
)
op_sum
=
pfl_mpc
.
layers
.
sum
([
data_1
,
data_2
,
data_3
])
math_sum
=
data_1
+
data_2
+
data_3
exe
=
fluid
.
Executor
(
place
=
fluid
.
CPUPlace
())
results
=
exe
.
run
(
feed
=
{
'data_1'
:
d_1
,
'data_2'
:
d_2
,
'data_3'
:
d_3
},
fetch_list
=
[
op_sum
,
math_sum
])
self
.
assertTrue
(
np
.
allclose
(
results
[
0
],
results
[
1
]))
self
.
assertEqual
(
results
[
0
].
shape
,
(
2
,
4
))
self
.
assertTrue
(
np
.
allclose
(
results
[
0
],
expected_out
))
def
test_sum
(
self
):
data_1
=
[
np
.
array
([[
1
,
1
,
1
,
1
],
[
1
,
1
,
1
,
1
]]).
astype
(
'int64'
)]
*
self
.
party_num
data_2
=
[
np
.
array
([[
2
,
2
,
2
,
2
],
[
2
,
2
,
2
,
2
]]).
astype
(
'int64'
)]
*
self
.
party_num
data_3
=
[
np
.
array
([[
3
,
3
,
3
,
3
],
[
3
,
3
,
3
,
3
]]).
astype
(
'int64'
)]
*
self
.
party_num
expect_results
=
[
np
.
array
([[
6
,
6
,
6
,
6
],
[
6
,
6
,
6
,
6
]])]
*
self
.
party_num
ret
=
self
.
multi_party_run
(
target
=
self
.
sum
,
data_1
=
data_1
,
data_2
=
data_2
,
data_3
=
data_3
,
expect_results
=
expect_results
)
self
.
assertEqual
(
ret
[
0
],
True
)
if
__name__
==
'__main__'
:
unittest
.
main
()
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录