未验证 提交 63d03267 编写于 作者: X Xuefeng Xu 提交者: GitHub

support federated linear regression (#583)

* move paillier & ckks into crypto folder

* add regression dataset for VFL

* support federated linear regression
上级 240b8564
......@@ -103,6 +103,12 @@ datasets:
source: "data/FL/multiclass/hfl/test/client1.csv"
# FL regression
- description: "regression_vfl_train_host"
model: "csv"
source: "data/FL/regression/vfl/train/host.csv"
- description: "regression_vfl_test_host"
model: "csv"
source: "data/FL/regression/vfl/test/host.csv"
- description: "regression_hfl_train_client1"
model: "csv"
source: "data/FL/regression/hfl/train/client1.csv"
......
......@@ -82,6 +82,12 @@ datasets:
source: "data/FL/multiclass/hfl/test/client2.csv"
# FL regression
- description: "regression_vfl_train_guest"
model: "csv"
source: "data/FL/regression/vfl/train/guest.csv"
- description: "regression_vfl_test_guest"
model: "csv"
source: "data/FL/regression/vfl/test/guest.csv"
- description: "regression_hfl_train_client2"
model: "csv"
source: "data/FL/regression/hfl/train/client2.csv"
......
......@@ -83,6 +83,12 @@ datasets:
source: "data/FL/multiclass/hfl/test/client1.csv"
# FL regression
- description: "regression_vfl_train_host"
model: "csv"
source: "data/FL/regression/vfl/train/host.csv"
- description: "regression_vfl_test_host"
model: "csv"
source: "data/FL/regression/vfl/test/host.csv"
- description: "regression_hfl_train_client1"
model: "csv"
source: "data/FL/regression/hfl/train/client1.csv"
......
......@@ -63,6 +63,12 @@ datasets:
source: "data/FL/multiclass/hfl/test/client2.csv"
# FL regression
- description: "regression_vfl_train_guest"
model: "csv"
source: "data/FL/regression/vfl/train/guest.csv"
- description: "regression_vfl_test_guest"
model: "csv"
source: "data/FL/regression/vfl/test/guest.csv"
- description: "regression_hfl_train_client2"
model: "csv"
source: "data/FL/regression/hfl/train/client2.csv"
......
id,CRIM,ZN,INDUS,CHAS,NOX,RM
79,0.08387,0.0,12.83,0,0.437,5.874
96,0.11504,0.0,2.89,0,0.445,6.163
19,0.7258,0.0,8.14,0,0.538,5.727
371,9.2323,0.0,18.1,0,0.631,6.216
173,0.09178,0.0,4.05,0,0.51,6.416
315,0.25356,0.0,9.9,0,0.544,5.705
16,1.05393,0.0,8.14,0,0.538,5.935
456,4.66883,0.0,18.1,0,0.713,5.976
238,0.08244,30.0,4.93,0,0.428,6.481
219,0.11425,0.0,13.89,1,0.55,6.373
231,0.46296,0.0,6.2,0,0.504,7.412
65,0.03584,80.0,3.37,0,0.398,6.29
88,0.0566,0.0,3.41,0,0.489,7.007
370,6.53876,0.0,18.1,1,0.631,7.016
495,0.17899,0.0,9.69,0,0.585,5.67
145,2.37934,0.0,19.58,0,0.871,6.13
22,1.23247,0.0,8.14,0,0.538,6.142
141,1.62864,0.0,21.89,0,0.624,5.019
286,0.01965,80.0,1.76,0,0.385,6.23
210,0.17446,0.0,10.59,1,0.489,5.96
135,0.55778,0.0,21.89,0,0.624,6.335
42,0.1415,0.0,6.91,0,0.448,6.169
452,5.09017,0.0,18.1,0,0.713,6.297
367,13.5222,0.0,18.1,0,0.631,3.863
199,0.0315,95.0,1.47,0,0.403,6.975
188,0.12579,45.0,3.44,0,0.437,6.556
385,16.8118,0.0,18.1,0,0.7,5.277
243,0.12757,30.0,4.93,0,0.428,6.393
349,0.02899,40.0,1.25,0,0.429,6.939
296,0.05372,0.0,13.92,0,0.437,6.549
406,20.7162,0.0,18.1,0,0.659,4.138
314,0.3692,0.0,9.9,0,0.544,6.567
398,38.3518,0.0,18.1,0,0.693,5.453
221,0.40771,0.0,6.2,1,0.507,6.164
174,0.08447,0.0,4.05,0,0.51,5.859
232,0.57529,0.0,6.2,0,0.507,8.337
92,0.04203,28.0,15.04,0,0.464,6.442
97,0.12083,0.0,2.89,0,0.445,8.069
450,6.71772,0.0,18.1,0,0.713,6.749
108,0.12802,0.0,8.56,0,0.52,6.474
183,0.10008,0.0,2.46,0,0.488,6.563
165,2.924,0.0,19.58,0,0.605,6.101
332,0.03466,35.0,6.06,0,0.4379,6.031
74,0.07896,0.0,12.83,0,0.437,6.273
347,0.0187,85.0,4.15,0,0.429,6.516
204,0.02009,95.0,2.68,0,0.4161,8.034
192,0.08664,45.0,3.44,0,0.437,7.178
228,0.29819,0.0,6.2,0,0.504,7.686
26,0.67191,0.0,8.14,0,0.538,5.813
310,2.63548,0.0,9.9,0,0.544,4.973
103,0.21161,0.0,8.56,0,0.52,6.137
496,0.2896,0.0,9.69,0,0.585,5.39
215,0.19802,0.0,10.59,0,0.489,6.182
474,8.05579,0.0,18.1,0,0.584,5.427
407,11.9511,0.0,18.1,0,0.659,5.608
80,0.04113,25.0,4.86,0,0.426,6.727
267,0.57834,20.0,3.97,0,0.575,8.297
304,0.05515,33.0,2.18,0,0.472,7.236
431,10.0623,0.0,18.1,0,0.584,6.833
147,2.36862,0.0,19.58,0,0.871,4.926
71,0.15876,0.0,10.81,0,0.413,5.961
212,0.21719,0.0,10.59,1,0.489,5.807
487,4.83567,0.0,18.1,0,0.583,5.905
363,4.22239,0.0,18.1,1,0.77,5.803
223,0.6147,0.0,6.2,0,0.507,6.618
464,7.83932,0.0,18.1,0,0.655,6.209
216,0.0456,0.0,13.89,1,0.55,5.888
291,0.07886,80.0,4.95,0,0.411,7.148
15,0.62739,0.0,8.14,0,0.538,5.834
164,2.24236,0.0,19.58,0,0.605,5.854
244,0.20608,22.0,5.86,0,0.431,5.593
2,0.02729,0.0,7.07,0,0.469,7.185
206,0.22969,0.0,10.59,0,0.489,6.326
112,0.12329,0.0,10.01,0,0.547,5.913
328,0.06617,0.0,3.24,0,0.46,5.868
218,0.11069,0.0,13.89,1,0.55,5.951
397,7.67202,0.0,18.1,0,0.693,5.747
227,0.41238,0.0,6.2,0,0.504,7.163
485,3.67367,0.0,18.1,0,0.583,6.312
46,0.18836,0.0,6.91,0,0.448,5.786
233,0.33147,0.0,6.2,0,0.507,8.247
111,0.10084,0.0,10.01,0,0.547,6.715
68,0.13554,12.5,6.07,0,0.409,5.594
70,0.08826,0.0,10.81,0,0.413,6.417
104,0.1396,0.0,8.56,0,0.52,6.167
479,14.3337,0.0,18.1,0,0.614,6.229
85,0.05735,0.0,4.49,0,0.449,6.63
239,0.09252,30.0,4.93,0,0.428,6.606
144,2.77974,0.0,19.58,0,0.871,4.903
7,0.14455,12.5,7.87,0,0.524,6.172
159,1.42502,0.0,19.58,0,0.871,6.51
253,0.36894,22.0,5.86,0,0.431,8.259
255,0.03548,80.0,3.64,0,0.392,5.876
200,0.01778,95.0,1.47,0,0.403,7.135
116,0.13158,0.0,10.01,0,0.547,6.176
242,0.1029,30.0,4.93,0,0.428,6.358
313,0.26938,0.0,9.9,0,0.544,6.266
120,0.06899,0.0,25.65,0,0.581,5.87
379,17.8667,0.0,18.1,0,0.671,6.223
378,23.6482,0.0,18.1,0,0.671,6.38
462,6.65492,0.0,18.1,0,0.713,6.317
44,0.12269,0.0,6.91,0,0.448,6.069
id,AGE,DIS,RAD,TAX,PTRATIO,B,LSTAT,y
79,36.6,4.5026,5,398.0,18.7,396.06,9.1,20.3
96,69.6,3.4952,2,276.0,18.0,391.83,11.34,21.4
19,69.5,3.7965,4,307.0,21.0,390.95,11.28,18.2
371,100.0,1.1691,24,666.0,20.2,366.15,9.53,50.0
173,84.1,2.6463,5,296.0,16.6,395.5,9.04,23.6
315,77.7,3.945,4,304.0,18.4,396.42,11.5,16.2
16,29.3,4.4986,4,307.0,21.0,386.85,6.58,23.1
456,87.9,2.5806,24,666.0,20.2,10.48,19.01,12.7
238,18.5,6.1899,6,300.0,16.6,379.41,6.36,23.7
219,92.4,3.3633,5,276.0,16.4,393.74,10.5,23.0
231,76.9,3.6715,8,307.0,17.4,376.14,5.25,31.7
65,17.8,6.6115,4,337.0,16.1,396.9,4.67,23.5
88,86.3,3.4217,2,270.0,17.8,396.9,5.5,23.6
370,97.5,1.2024,24,666.0,20.2,392.05,2.96,50.0
495,28.8,2.7986,6,391.0,19.2,393.29,17.6,23.1
145,100.0,1.4191,5,403.0,14.7,172.91,27.8,13.8
22,91.7,3.9769,4,307.0,21.0,396.9,18.72,15.2
141,100.0,1.4394,4,437.0,21.2,396.9,34.41,14.4
286,31.5,9.0892,1,241.0,18.2,341.6,12.93,20.1
210,92.1,3.8771,4,277.0,18.6,393.25,17.27,21.7
135,98.2,2.1107,4,437.0,21.2,394.67,16.96,18.1
42,6.6,5.7209,3,233.0,17.9,383.37,5.81,25.3
452,91.8,2.3682,24,666.0,20.2,385.09,17.27,16.1
367,100.0,1.5106,24,666.0,20.2,131.42,13.33,23.1
199,15.3,7.6534,3,402.0,17.0,396.9,4.56,34.9
188,29.1,4.5667,5,398.0,15.2,382.84,4.56,29.8
385,98.1,1.4261,24,666.0,20.2,396.9,30.81,7.2
243,7.8,7.0355,6,300.0,16.6,374.71,5.19,23.7
349,34.5,8.7921,1,335.0,19.7,389.85,5.89,26.6
296,51.0,5.9604,4,289.0,16.0,392.85,7.39,27.1
406,100.0,1.1781,24,666.0,20.2,370.22,23.34,11.9
314,87.3,3.6023,4,304.0,18.4,395.69,9.28,23.8
398,100.0,1.4896,24,666.0,20.2,396.9,30.59,5.0
221,91.3,3.048,8,307.0,17.4,395.24,21.46,21.7
174,68.7,2.7019,5,296.0,16.6,393.23,9.64,22.6
232,73.3,3.8384,8,307.0,17.4,385.91,2.47,41.7
92,53.6,3.6659,4,270.0,18.2,395.01,8.16,22.9
97,76.0,3.4952,2,276.0,18.0,396.9,4.21,38.7
450,92.6,2.3236,24,666.0,20.2,0.32,17.44,13.4
108,97.1,2.4329,5,384.0,20.9,395.24,12.27,19.8
183,95.6,2.847,3,193.0,17.8,396.9,5.68,32.5
165,93.0,2.2834,5,403.0,14.7,240.16,9.81,25.0
332,23.3,6.6407,1,304.0,16.9,362.25,7.83,19.4
74,6.0,4.2515,5,398.0,18.7,394.92,6.78,24.1
347,27.7,8.5353,4,351.0,17.9,392.43,6.36,23.1
204,31.9,5.118,4,224.0,14.7,390.55,2.88,50.0
192,26.3,6.4798,5,398.0,15.2,390.49,2.87,36.4
228,17.0,3.3751,8,307.0,17.4,377.51,3.92,46.7
26,90.3,4.682,4,307.0,21.0,376.88,14.81,16.6
310,37.8,2.5194,4,304.0,18.4,350.45,12.64,16.1
103,87.4,2.7147,5,384.0,20.9,394.47,13.44,19.3
496,72.9,2.7986,6,391.0,19.2,396.9,21.14,19.7
215,42.4,3.9454,4,277.0,18.6,393.63,9.47,25.0
474,95.4,2.4298,24,666.0,20.2,352.58,18.14,13.8
407,100.0,1.2852,24,666.0,20.2,332.09,12.13,27.9
80,33.5,5.4007,4,281.0,19.0,396.9,5.29,28.0
267,67.0,2.4216,5,264.0,13.0,384.54,7.44,50.0
304,41.1,4.022,7,222.0,18.4,393.68,6.93,36.1
431,94.3,2.0882,24,666.0,20.2,81.33,19.69,14.1
147,95.7,1.4608,5,403.0,14.7,391.71,29.53,14.6
71,17.5,5.2873,4,305.0,19.2,376.94,9.88,21.7
212,53.8,3.6526,4,277.0,18.6,390.94,16.03,22.4
487,53.2,3.1523,24,666.0,20.2,388.22,11.45,20.6
363,89.0,1.9047,24,666.0,20.2,353.04,14.64,16.8
223,80.8,3.2721,8,307.0,17.4,396.9,7.6,30.1
464,65.4,2.9634,24,666.0,20.2,396.9,13.22,21.4
216,56.0,3.1121,5,276.0,16.4,392.8,13.51,23.3
291,27.7,5.1167,4,245.0,19.2,396.9,3.56,37.3
15,56.5,4.4986,4,307.0,21.0,395.62,8.47,19.9
164,91.8,2.422,5,403.0,14.7,395.11,11.64,22.7
244,76.5,7.9549,7,330.0,19.1,372.49,12.5,17.6
2,61.1,4.9671,2,242.0,17.8,392.83,4.03,34.7
206,52.5,4.3549,4,277.0,18.6,394.87,10.97,24.4
112,92.9,2.3534,6,432.0,17.8,394.95,16.21,18.8
328,25.8,5.2146,4,430.0,16.9,382.44,9.97,19.3
218,93.8,2.8893,5,276.0,16.4,396.9,17.92,21.5
397,98.9,1.6334,24,666.0,20.2,393.1,19.92,8.5
227,79.9,3.2157,8,307.0,17.4,372.08,6.36,31.6
485,51.9,3.9917,24,666.0,20.2,388.62,10.58,21.2
46,33.3,5.1004,3,233.0,17.9,396.9,14.15,20.0
233,70.4,3.6519,8,307.0,17.4,378.95,3.95,48.3
111,81.6,2.6775,6,432.0,17.8,395.59,10.16,22.8
68,36.8,6.498,4,345.0,18.9,396.9,13.09,17.4
70,6.6,5.2873,4,305.0,19.2,383.73,6.72,24.2
104,90.0,2.421,5,384.0,20.9,392.69,12.33,20.1
479,88.0,1.9512,24,666.0,20.2,383.32,13.11,21.4
85,56.1,4.4377,3,247.0,18.5,392.3,6.53,26.6
239,42.2,6.1899,6,300.0,16.6,383.78,7.37,23.3
144,97.8,1.3459,5,403.0,14.7,396.9,29.29,11.8
7,96.1,5.9505,5,311.0,15.2,396.9,19.15,27.1
159,100.0,1.7659,5,403.0,14.7,364.31,7.39,23.3
253,8.4,8.9067,7,330.0,19.1,396.9,3.54,42.8
255,19.1,9.2203,1,315.0,16.4,395.18,9.25,20.9
200,13.9,7.6534,3,402.0,17.0,384.3,4.45,32.9
116,72.5,2.7301,6,432.0,17.8,393.3,12.04,21.2
242,52.9,7.0355,6,300.0,16.6,372.75,11.22,22.2
313,82.8,3.2628,4,304.0,18.4,393.39,7.9,21.6
120,69.7,2.2577,2,188.0,19.1,389.15,14.37,22.0
379,100.0,1.3861,24,666.0,20.2,393.74,21.78,10.2
378,96.2,1.3861,24,666.0,20.2,396.9,23.69,13.1
462,83.0,2.7344,24,666.0,20.2,396.9,13.99,19.5
44,40.0,5.7209,3,233.0,17.9,389.39,9.55,21.2
id,CRIM,ZN,INDUS,CHAS,NOX,RM
91,0.03932,0.0,3.41,0,0.489,6.405
273,0.22188,20.0,6.96,1,0.464,7.691
365,4.55587,0.0,18.1,0,0.718,3.561
270,0.29916,20.0,6.96,0,0.464,5.856
469,13.0751,0.0,18.1,0,0.58,5.713
260,0.54011,20.0,3.97,0,0.647,7.203
248,0.16439,22.0,5.86,0,0.431,6.433
471,4.03841,0.0,18.1,0,0.532,6.229
352,0.07244,60.0,1.69,0,0.411,5.884
211,0.37578,0.0,10.59,1,0.489,5.404
41,0.12744,0.0,6.91,0,0.448,6.77
389,8.15174,0.0,18.1,0,0.7,5.39
466,3.77498,0.0,18.1,0,0.655,5.952
266,0.7857,20.0,3.97,0,0.647,7.014
179,0.0578,0.0,2.46,0,0.488,6.98
36,0.09744,0.0,5.96,0,0.499,5.841
8,0.21124,12.5,7.87,0,0.524,5.631
375,19.6091,0.0,18.1,0,0.671,7.313
257,0.61154,20.0,3.97,0,0.647,8.704
196,0.04011,80.0,1.52,0,0.404,7.287
433,5.58107,0.0,18.1,0,0.713,6.436
10,0.22489,12.5,7.87,0,0.524,6.377
264,0.55007,20.0,3.97,0,0.647,7.206
128,0.32543,0.0,21.89,0,0.624,6.431
102,0.22876,0.0,8.56,0,0.52,6.405
430,8.49213,0.0,18.1,0,0.584,6.348
437,15.1772,0.0,18.1,0,0.74,6.152
424,8.79212,0.0,18.1,0,0.584,5.565
393,8.64476,0.0,18.1,0,0.693,6.193
467,4.42228,0.0,18.1,0,0.584,6.003
345,0.03113,0.0,4.39,0,0.442,6.014
124,0.09849,0.0,25.65,0,0.581,5.879
420,11.0874,0.0,18.1,0,0.718,6.411
34,1.61282,0.0,8.14,0,0.538,6.096
208,0.13587,0.0,10.59,1,0.489,6.064
384,20.0849,0.0,18.1,0,0.7,4.368
374,18.4982,0.0,18.1,0,0.668,4.138
237,0.51183,0.0,6.2,0,0.507,7.358
290,0.03502,80.0,4.95,0,0.411,6.861
25,0.84054,0.0,8.14,0,0.538,5.599
28,0.77299,0.0,8.14,0,0.538,6.495
293,0.08265,0.0,13.92,0,0.437,6.127
395,8.71675,0.0,18.1,0,0.693,6.471
51,0.04337,21.0,5.64,0,0.439,6.115
230,0.537,0.0,6.2,0,0.504,5.981
254,0.04819,80.0,3.64,0,0.392,6.108
32,1.38799,0.0,8.14,0,0.538,5.95
93,0.02875,28.0,15.04,0,0.464,6.211
391,5.29305,0.0,18.1,0,0.7,6.051
130,0.34006,0.0,21.89,0,0.624,6.458
150,1.6566,0.0,19.58,0,0.871,6.122
444,12.8023,0.0,18.1,0,0.74,5.854
38,0.17505,0.0,5.96,0,0.499,5.966
425,15.8603,0.0,18.1,0,0.679,5.896
324,0.34109,0.0,7.38,0,0.493,6.415
241,0.10612,30.0,4.93,0,0.428,6.095
388,14.3337,0.0,18.1,0,0.7,4.88
396,5.87205,0.0,18.1,0,0.693,6.405
240,0.11329,30.0,4.93,0,0.428,6.897
190,0.09068,45.0,3.44,0,0.437,6.951
329,0.06724,0.0,3.24,0,0.46,6.333
342,0.02498,0.0,1.89,0,0.518,6.54
256,0.01538,90.0,3.75,0,0.394,7.454
27,0.95577,0.0,8.14,0,0.538,6.047
340,0.06151,0.0,5.19,0,0.515,5.968
473,4.64689,0.0,18.1,0,0.614,6.98
468,15.5757,0.0,18.1,0,0.58,5.926
57,0.01432,100.0,1.32,0,0.411,6.816
139,0.54452,0.0,21.89,0,0.624,6.151
185,0.06047,0.0,2.46,0,0.488,6.153
300,0.04417,70.0,2.24,0,0.4,6.871
335,0.03961,0.0,5.19,0,0.515,6.037
449,7.52601,0.0,18.1,0,0.713,6.417
52,0.0536,21.0,5.64,0,0.439,6.511
489,0.18337,0.0,27.74,0,0.609,5.414
399,9.91655,0.0,18.1,0,0.693,5.852
364,3.47428,0.0,18.1,1,0.718,8.78
0,0.00632,18.0,2.31,0,0.538,6.575
177,0.05425,0.0,4.05,0,0.51,6.315
72,0.09164,0.0,10.81,0,0.413,6.065
125,0.16902,0.0,25.65,0,0.581,5.986
279,0.21038,20.0,3.33,0,0.4429,6.812
402,9.59571,0.0,18.1,0,0.693,6.404
301,0.03537,34.0,6.09,0,0.433,6.59
213,0.14052,0.0,10.59,0,0.489,6.375
146,2.15505,0.0,19.58,0,0.871,5.628
421,7.02259,0.0,18.1,0,0.718,6.006
372,8.26725,0.0,18.1,1,0.668,5.875
504,0.10959,0.0,11.93,0,0.573,6.794
202,0.02177,82.5,2.03,0,0.415,7.61
438,13.6781,0.0,18.1,0,0.74,5.935
47,0.22927,0.0,6.91,0,0.448,6.03
380,88.9762,0.0,18.1,0,0.671,6.968
343,0.02543,55.0,3.78,0,0.484,6.696
359,4.26131,0.0,18.1,0,0.77,6.112
418,73.5341,0.0,18.1,0,0.679,5.957
40,0.03359,75.0,2.95,0,0.428,7.024
428,7.36711,0.0,18.1,0,0.679,6.193
39,0.02763,75.0,2.95,0,0.428,6.595
224,0.31533,0.0,6.2,0,0.504,8.266
394,13.3598,0.0,18.1,0,0.693,5.887
426,12.2472,0.0,18.1,0,0.584,5.837
502,0.04527,0.0,11.93,0,0.573,6.12
439,9.39063,0.0,18.1,0,0.74,5.627
415,18.0846,0.0,18.1,0,0.679,6.434
37,0.08014,0.0,5.96,0,0.499,5.85
99,0.0686,0.0,2.89,0,0.445,7.416
186,0.05602,0.0,2.46,0,0.488,7.831
234,0.44791,0.0,6.2,1,0.507,6.726
351,0.0795,60.0,1.69,0,0.411,6.579
276,0.10469,40.0,6.41,1,0.447,7.267
476,4.87141,0.0,18.1,0,0.614,6.484
105,0.13262,0.0,8.56,0,0.52,5.851
151,1.49632,0.0,19.58,0,0.871,5.404
262,0.52014,20.0,3.97,0,0.647,8.398
287,0.03871,52.5,5.32,0,0.405,6.209
325,0.19186,0.0,7.38,0,0.493,6.431
414,45.7461,0.0,18.1,0,0.693,4.519
472,3.56868,0.0,18.1,0,0.58,6.437
149,2.73397,0.0,19.58,0,0.871,5.597
412,18.811,0.0,18.1,0,0.597,4.628
169,2.44953,0.0,19.58,0,0.605,6.402
123,0.15038,0.0,25.65,0,0.581,5.856
483,2.81838,0.0,18.1,0,0.532,5.762
63,0.1265,25.0,5.13,0,0.453,6.762
417,25.9406,0.0,18.1,0,0.679,5.304
157,1.22358,0.0,19.58,0,0.605,6.943
499,0.17783,0.0,9.69,0,0.585,5.569
225,0.52693,0.0,6.2,0,0.504,8.725
323,0.28392,0.0,7.38,0,0.493,5.708
201,0.03445,82.5,2.03,0,0.415,6.162
446,6.28807,0.0,18.1,0,0.74,6.341
401,14.2362,0.0,18.1,0,0.693,6.343
480,5.82401,0.0,18.1,0,0.532,6.242
259,0.65665,20.0,3.97,0,0.647,6.842
175,0.06664,0.0,4.05,0,0.51,6.546
503,0.06076,0.0,11.93,0,0.573,6.976
245,0.19133,22.0,5.86,0,0.431,5.605
376,15.288,0.0,18.1,0,0.671,6.649
55,0.01311,90.0,1.22,0,0.403,7.249
443,9.96654,0.0,18.1,0,0.74,6.485
447,9.92485,0.0,18.1,0,0.74,6.251
119,0.14476,0.0,10.01,0,0.547,5.731
440,22.0511,0.0,18.1,0,0.74,5.818
460,4.81213,0.0,18.1,0,0.713,6.701
333,0.05083,0.0,5.19,0,0.515,6.316
48,0.25387,0.0,6.91,0,0.448,5.399
382,9.18702,0.0,18.1,0,0.7,5.536
9,0.17004,12.5,7.87,0,0.524,6.004
252,0.08221,22.0,5.86,0,0.431,6.957
155,3.53501,0.0,19.58,1,0.871,6.152
366,3.69695,0.0,18.1,0,0.718,4.963
338,0.03306,0.0,5.19,0,0.515,6.059
178,0.06642,0.0,4.05,0,0.51,6.86
493,0.17331,0.0,9.69,0,0.585,5.707
432,6.44405,0.0,18.1,0,0.584,6.425
75,0.09512,0.0,12.83,0,0.437,6.286
229,0.44178,0.0,6.2,0,0.504,6.552
346,0.06162,0.0,4.39,0,0.442,5.898
284,0.00906,90.0,2.97,0,0.4,7.088
194,0.01439,60.0,2.93,0,0.401,6.604
161,1.46336,0.0,19.58,0,0.605,7.489
168,2.3004,0.0,19.58,0,0.605,6.319
269,0.09065,20.0,6.96,1,0.464,5.92
83,0.03551,25.0,4.86,0,0.426,6.167
195,0.01381,80.0,0.46,0,0.422,7.875
129,0.88125,0.0,21.89,0,0.624,5.637
184,0.08308,0.0,2.46,0,0.488,5.604
416,10.8342,0.0,18.1,0,0.679,6.782
498,0.23912,0.0,9.69,0,0.585,6.019
354,0.04301,80.0,1.91,0,0.413,5.663
475,6.39312,0.0,18.1,0,0.584,6.162
403,24.8017,0.0,18.1,0,0.693,5.349
115,0.17134,0.0,10.01,0,0.547,5.928
319,0.47547,0.0,9.9,0,0.544,6.113
302,0.09266,34.0,6.09,0,0.433,6.495
156,2.44668,0.0,19.58,0,0.871,5.272
305,0.05479,33.0,2.18,0,0.472,6.616
486,5.69175,0.0,18.1,0,0.583,6.114
261,0.53412,20.0,3.97,0,0.647,7.52
13,0.62976,0.0,8.14,0,0.538,5.949
427,37.6619,0.0,18.1,0,0.679,6.202
307,0.04932,33.0,2.18,0,0.472,6.849
94,0.04294,28.0,15.04,0,0.464,6.249
69,0.12816,12.5,6.07,0,0.409,5.885
336,0.03427,0.0,5.19,0,0.515,5.869
45,0.17142,0.0,6.91,0,0.448,5.682
334,0.03738,0.0,5.19,0,0.515,6.31
299,0.05561,70.0,2.24,0,0.4,7.041
308,0.49298,0.0,9.9,0,0.544,6.635
131,1.19294,0.0,21.89,0,0.624,6.326
60,0.14932,25.0,5.13,0,0.453,5.741
306,0.07503,33.0,2.18,0,0.472,7.42
457,8.20058,0.0,18.1,0,0.713,5.936
84,0.05059,0.0,4.49,0,0.449,6.389
140,0.2909,0.0,21.89,0,0.624,6.174
58,0.15445,25.0,5.13,0,0.453,6.145
29,1.00245,0.0,8.14,0,0.538,6.674
268,0.5405,20.0,3.97,0,0.575,7.47
220,0.35809,0.0,6.2,1,0.507,6.951
285,0.01096,55.0,2.25,0,0.389,6.453
392,11.5779,0.0,18.1,0,0.7,5.036
500,0.22438,0.0,9.69,0,0.585,6.027
362,3.67822,0.0,18.1,0,0.77,5.362
289,0.04297,52.5,5.32,0,0.405,6.565
377,9.82349,0.0,18.1,0,0.671,6.794
198,0.03768,80.0,1.52,0,0.404,7.274
454,9.51363,0.0,18.1,0,0.713,6.728
89,0.05302,0.0,3.41,0,0.489,7.079
353,0.01709,90.0,2.02,0,0.41,6.728
14,0.63796,0.0,8.14,0,0.538,6.096
95,0.12204,0.0,2.89,0,0.445,6.625
166,2.01019,0.0,19.58,0,0.605,7.929
176,0.07022,0.0,4.05,0,0.51,6.02
337,0.03041,0.0,5.19,0,0.515,5.895
309,0.3494,0.0,9.9,0,0.544,5.972
182,0.09103,0.0,2.46,0,0.488,7.155
294,0.08199,0.0,13.92,0,0.437,6.009
316,0.31827,0.0,9.9,0,0.544,5.914
110,0.10793,0.0,8.56,0,0.52,6.195
247,0.19657,22.0,5.86,0,0.431,6.226
126,0.38735,0.0,25.65,0,0.581,5.613
251,0.21409,22.0,5.86,0,0.431,6.438
497,0.26838,0.0,9.69,0,0.585,5.794
327,0.24103,0.0,7.38,0,0.493,6.083
320,0.1676,0.0,7.38,0,0.493,6.426
163,1.51902,0.0,19.58,1,0.605,8.375
278,0.07978,40.0,6.41,0,0.447,6.482
303,0.1,34.0,6.09,0,0.433,6.982
137,0.35233,0.0,21.89,0,0.624,6.454
31,1.35472,0.0,8.14,0,0.538,6.072
505,0.04741,0.0,11.93,0,0.573,6.03
387,22.5971,0.0,18.1,0,0.7,5.0
127,0.25915,0.0,21.89,0,0.624,5.693
136,0.32264,0.0,21.89,0,0.624,5.942
434,13.9134,0.0,18.1,0,0.713,6.208
33,1.15172,0.0,8.14,0,0.538,5.701
203,0.0351,95.0,2.68,0,0.4161,7.853
53,0.04981,21.0,5.64,0,0.439,5.998
459,6.80117,0.0,18.1,0,0.713,6.081
30,1.13081,0.0,8.14,0,0.538,5.713
62,0.11027,25.0,5.13,0,0.453,6.456
274,0.05644,40.0,6.41,1,0.447,6.758
470,4.34879,0.0,18.1,0,0.58,6.167
298,0.06466,70.0,2.24,0,0.4,6.345
109,0.26363,0.0,8.56,0,0.52,6.229
348,0.01501,80.0,2.01,0,0.435,6.635
50,0.08873,21.0,5.64,0,0.439,5.963
410,51.1358,0.0,18.1,0,0.597,5.757
217,0.07013,0.0,13.89,0,0.55,6.642
64,0.01951,17.5,1.38,0,0.4161,7.104
501,0.06263,0.0,11.93,0,0.573,6.593
5,0.02985,0.0,2.18,0,0.458,6.43
356,8.98296,0.0,18.1,1,0.77,6.212
121,0.07165,0.0,25.65,0,0.581,6.004
288,0.0459,52.5,5.32,0,0.405,6.315
250,0.1403,22.0,5.86,0,0.431,6.487
189,0.0837,45.0,3.44,0,0.437,7.185
482,5.73116,0.0,18.1,0,0.532,7.061
18,0.80271,0.0,8.14,0,0.538,5.456
180,0.06588,0.0,2.46,0,0.488,7.765
205,0.13642,0.0,10.59,0,0.489,5.891
82,0.03659,25.0,4.86,0,0.426,6.302
404,41.5292,0.0,18.1,0,0.693,5.531
214,0.28955,0.0,10.59,0,0.489,5.412
339,0.05497,0.0,5.19,0,0.515,5.985
81,0.04462,25.0,4.86,0,0.426,6.619
461,3.69311,0.0,18.1,0,0.713,6.376
350,0.06211,40.0,1.25,0,0.429,6.49
458,7.75223,0.0,18.1,0,0.713,6.301
405,67.9208,0.0,18.1,0,0.693,5.683
295,0.12932,0.0,13.92,0,0.437,6.678
341,0.01301,35.0,1.52,0,0.442,7.241
114,0.14231,0.0,10.01,0,0.547,6.254
24,0.75026,0.0,8.14,0,0.538,5.924
481,5.70818,0.0,18.1,0,0.532,6.75
23,0.98843,0.0,8.14,0,0.538,5.813
283,0.01501,90.0,1.21,1,0.401,7.923
77,0.08707,0.0,12.83,0,0.437,6.14
4,0.06905,0.0,2.18,0,0.458,7.147
170,1.20742,0.0,19.58,0,0.605,5.875
318,0.40202,0.0,9.9,0,0.544,6.382
101,0.11432,0.0,8.56,0,0.52,6.781
422,12.0482,0.0,18.1,0,0.614,5.648
258,0.66351,20.0,3.97,0,0.647,7.333
118,0.13058,0.0,10.01,0,0.547,5.872
106,0.1712,0.0,8.56,0,0.52,5.836
107,0.13117,0.0,8.56,0,0.52,6.127
49,0.21977,0.0,6.91,0,0.448,5.602
330,0.04544,0.0,3.24,0,0.46,6.144
281,0.03705,20.0,3.33,0,0.4429,6.968
98,0.08187,0.0,2.89,0,0.445,7.82
436,14.4208,0.0,18.1,0,0.74,6.461
87,0.07151,0.0,4.49,0,0.449,6.121
368,4.89822,0.0,18.1,0,0.631,4.97
76,0.10153,0.0,12.83,0,0.437,6.279
331,0.05023,35.0,6.06,0,0.4379,5.706
488,0.15086,0.0,27.74,0,0.609,5.454
153,2.14918,0.0,19.58,0,0.871,5.709
20,1.25179,0.0,8.14,0,0.538,5.57
43,0.15936,0.0,6.91,0,0.448,6.211
154,1.41385,0.0,19.58,1,0.871,6.129
409,14.4383,0.0,18.1,0,0.597,6.852
162,1.83377,0.0,19.58,1,0.605,7.802
265,0.76162,20.0,3.97,0,0.647,5.56
361,3.83684,0.0,18.1,0,0.77,6.251
282,0.06129,20.0,3.33,1,0.4429,7.645
275,0.09604,40.0,6.41,0,0.447,6.854
59,0.10328,25.0,5.13,0,0.453,5.927
292,0.03615,80.0,4.95,0,0.411,6.63
61,0.17171,25.0,5.13,0,0.453,5.966
56,0.02055,85.0,0.74,0,0.41,6.383
3,0.03237,0.0,2.18,0,0.458,6.998
226,0.38214,0.0,6.2,0,0.504,8.04
197,0.04666,80.0,1.52,0,0.404,7.107
142,3.32105,0.0,19.58,1,0.871,5.403
455,4.75237,0.0,18.1,0,0.713,6.525
172,0.13914,0.0,4.05,0,0.51,5.572
181,0.06888,0.0,2.46,0,0.488,6.144
134,0.97617,0.0,21.89,0,0.624,5.757
477,15.0234,0.0,18.1,0,0.614,5.304
236,0.52058,0.0,6.2,1,0.507,6.631
494,0.27957,0.0,9.69,0,0.585,5.926
280,0.03578,20.0,3.33,0,0.4429,7.82
152,1.12658,0.0,19.58,1,0.871,5.012
187,0.07875,45.0,3.44,0,0.437,6.782
191,0.06911,45.0,3.44,0,0.437,6.739
209,0.43571,0.0,10.59,1,0.489,5.344
167,1.80028,0.0,19.58,0,0.605,5.877
100,0.14866,0.0,8.56,0,0.52,6.727
445,10.6718,0.0,18.1,0,0.74,6.459
148,2.33099,0.0,19.58,0,0.871,5.186
317,0.24522,0.0,9.9,0,0.544,5.782
390,6.96215,0.0,18.1,0,0.7,5.713
117,0.15098,0.0,10.01,0,0.547,6.021
78,0.05646,0.0,12.83,0,0.437,6.232
369,5.66998,0.0,18.1,1,0.631,6.683
17,0.7842,0.0,8.14,0,0.538,5.99
326,0.30347,0.0,7.38,0,0.493,6.312
441,9.72418,0.0,18.1,0,0.74,6.406
413,28.6558,0.0,18.1,0,0.597,5.155
143,4.0974,0.0,19.58,0,0.871,5.468
272,0.1146,20.0,6.96,0,0.464,6.538
492,0.11132,0.0,27.74,0,0.609,5.983
321,0.18159,0.0,7.38,0,0.493,6.376
171,2.3139,0.0,19.58,0,0.605,5.88
11,0.11747,12.5,7.87,0,0.524,6.009
357,3.8497,0.0,18.1,1,0.77,6.395
322,0.35114,0.0,7.38,0,0.493,6.041
86,0.05188,0.0,4.49,0,0.449,6.015
411,14.0507,0.0,18.1,0,0.597,6.657
160,1.27346,0.0,19.58,1,0.605,6.25
400,25.0461,0.0,18.1,0,0.693,5.987
355,0.10659,80.0,1.91,0,0.413,5.936
297,0.14103,0.0,13.92,0,0.437,5.79
235,0.33045,0.0,6.2,0,0.507,6.086
249,0.19073,22.0,5.86,0,0.431,6.718
429,9.33889,0.0,18.1,0,0.679,6.38
113,0.22212,0.0,10.01,0,0.547,6.092
1,0.02731,0.0,7.07,0,0.469,6.421
358,5.20177,0.0,18.1,1,0.77,6.127
491,0.10574,0.0,27.74,0,0.609,5.983
344,0.03049,55.0,3.78,0,0.484,6.874
193,0.02187,60.0,2.93,0,0.401,6.8
207,0.25199,0.0,10.59,0,0.489,5.783
67,0.05789,12.5,6.07,0,0.409,5.878
246,0.33983,22.0,5.86,0,0.431,6.108
419,11.8123,0.0,18.1,0,0.718,6.824
408,7.40389,0.0,18.1,0,0.597,5.617
132,0.59005,0.0,21.89,0,0.624,6.372
386,24.3938,0.0,18.1,0,0.7,4.652
373,11.1081,0.0,18.1,0,0.668,4.906
484,2.37857,0.0,18.1,0,0.583,5.871
90,0.04684,0.0,3.41,0,0.489,6.417
381,15.8744,0.0,18.1,0,0.671,6.545
122,0.09299,0.0,25.65,0,0.581,5.961
448,9.32909,0.0,18.1,0,0.713,6.185
73,0.19539,0.0,10.81,0,0.413,6.245
6,0.08829,12.5,7.87,0,0.524,6.012
158,1.34284,0.0,19.58,0,0.605,6.066
442,5.66637,0.0,18.1,0,0.74,6.219
383,7.99248,0.0,18.1,0,0.7,5.52
463,5.82115,0.0,18.1,0,0.713,6.513
435,11.1604,0.0,18.1,0,0.74,6.629
465,3.1636,0.0,18.1,0,0.655,5.759
12,0.09378,12.5,7.87,0,0.524,5.889
35,0.06417,0.0,5.96,0,0.499,5.933
478,10.233,0.0,18.1,0,0.614,6.185
360,4.54192,0.0,18.1,0,0.77,6.398
277,0.06127,40.0,6.41,1,0.447,6.826
312,0.26169,0.0,9.9,0,0.544,6.023
451,5.44114,0.0,18.1,0,0.713,6.655
133,0.32982,0.0,21.89,0,0.624,5.822
263,0.82526,20.0,3.97,0,0.647,7.327
490,0.20746,0.0,27.74,0,0.609,5.093
66,0.04379,80.0,3.37,0,0.398,5.787
138,0.2498,0.0,21.89,0,0.624,5.857
54,0.0136,75.0,4.0,0,0.41,5.888
271,0.16211,20.0,6.96,0,0.464,6.24
423,7.05042,0.0,18.1,0,0.614,6.103
311,0.79041,0.0,9.9,0,0.544,6.122
21,0.85204,0.0,8.14,0,0.538,5.965
453,8.24809,0.0,18.1,0,0.713,7.393
222,0.62356,0.0,6.2,1,0.507,6.879
id,AGE,DIS,RAD,TAX,PTRATIO,B,LSTAT,y
91,73.9,3.0921,2,270.0,17.8,393.55,8.2,22.0
273,51.8,4.3665,3,223.0,18.6,390.77,6.58,35.2
365,87.9,1.6132,24,666.0,20.2,354.7,7.12,27.5
270,42.1,4.429,3,223.0,18.6,388.65,13.0,21.1
469,56.7,2.8237,24,666.0,20.2,396.9,14.76,20.1
260,81.8,2.1121,5,264.0,13.0,392.8,9.59,33.8
248,49.1,7.8265,7,330.0,19.1,374.71,9.52,24.5
471,90.7,3.0993,24,666.0,20.2,395.33,12.87,19.6
352,18.5,10.7103,4,411.0,18.3,392.33,7.79,18.6
211,88.6,3.665,4,277.0,18.6,395.24,23.98,19.3
41,2.9,5.7209,3,233.0,17.9,385.41,4.84,26.6
389,98.9,1.7281,24,666.0,20.2,396.9,20.85,11.5
466,84.7,2.8715,24,666.0,20.2,22.01,17.15,19.0
266,84.6,2.1329,5,264.0,13.0,384.07,14.79,30.7
179,58.4,2.829,3,193.0,17.8,396.9,5.04,37.2
36,61.4,3.3779,5,279.0,19.2,377.56,11.41,20.0
8,100.0,6.0821,5,311.0,15.2,386.63,29.93,16.5
375,97.9,1.3163,24,666.0,20.2,396.9,13.44,15.0
257,86.9,1.801,5,264.0,13.0,389.7,5.12,50.0
196,34.1,7.309,2,329.0,12.6,396.9,4.08,33.3
433,87.9,2.3158,24,666.0,20.2,100.19,16.22,14.3
10,94.3,6.3467,5,311.0,15.2,392.52,20.45,15.0
264,91.6,1.9301,5,264.0,13.0,387.89,8.1,36.5
128,98.8,1.8125,4,437.0,21.2,396.9,15.39,18.0
102,85.4,2.7147,5,384.0,20.9,70.8,10.63,18.6
430,86.1,2.0527,24,666.0,20.2,83.45,17.64,14.5
437,100.0,1.9142,24,666.0,20.2,9.32,26.45,8.7
424,70.6,2.0635,24,666.0,20.2,3.65,17.16,11.7
393,92.6,1.7912,24,666.0,20.2,396.9,15.17,13.8
467,94.5,2.5403,24,666.0,20.2,331.29,21.32,19.1
345,48.5,8.0136,3,352.0,18.8,385.64,10.53,17.5
124,95.8,2.0063,2,188.0,19.1,379.38,17.58,18.8
420,100.0,1.8589,24,666.0,20.2,318.75,15.02,16.7
34,96.9,3.7598,4,307.0,21.0,248.31,20.34,13.5
208,59.1,4.2392,4,277.0,18.6,381.32,14.66,24.4
384,91.2,1.4395,24,666.0,20.2,285.83,30.63,8.8
374,100.0,1.137,24,666.0,20.2,396.9,37.97,13.8
237,71.6,4.148,8,307.0,17.4,390.07,4.73,31.5
290,27.9,5.1167,4,245.0,19.2,396.9,3.33,28.5
25,85.7,4.4546,4,307.0,21.0,303.42,16.51,13.9
28,94.4,4.4547,4,307.0,21.0,387.94,12.8,18.4
293,18.4,5.5027,4,289.0,16.0,396.9,8.58,23.9
395,98.8,1.7257,24,666.0,20.2,391.98,17.12,13.1
51,63.0,6.8147,4,243.0,16.8,393.97,9.43,20.5
230,68.1,3.6715,8,307.0,17.4,378.35,11.65,24.3
254,32.0,9.2203,1,315.0,16.4,392.89,6.57,21.9
32,82.0,3.99,4,307.0,21.0,232.6,27.71,13.2
93,28.9,3.6659,4,270.0,18.2,396.33,6.21,25.0
391,82.5,2.1678,24,666.0,20.2,378.38,18.76,23.2
130,98.9,2.1185,4,437.0,21.2,395.04,12.6,19.2
150,97.3,1.618,5,403.0,14.7,372.8,14.1,21.5
444,96.6,1.8956,24,666.0,20.2,240.52,23.79,10.8
38,30.2,3.8473,5,279.0,19.2,393.43,10.13,24.7
425,95.4,1.9096,24,666.0,20.2,7.68,24.39,8.3
324,40.1,4.7211,5,287.0,19.6,396.9,6.12,25.0
241,65.1,6.3361,6,300.0,16.6,394.62,12.4,20.1
388,100.0,1.5895,24,666.0,20.2,372.92,30.62,10.2
396,96.0,1.6768,24,666.0,20.2,396.9,19.37,12.5
240,54.3,6.3361,6,300.0,16.6,391.25,11.38,22.0
190,21.5,6.4798,5,398.0,15.2,377.68,5.1,37.0
329,17.2,5.2146,4,430.0,16.9,375.21,7.34,22.6
342,59.7,6.2669,1,422.0,15.9,389.96,8.65,16.5
256,34.2,6.3361,3,244.0,15.9,386.34,3.11,44.0
27,88.8,4.4534,4,307.0,21.0,306.38,17.28,14.8
340,58.5,4.8122,5,224.0,20.2,396.9,9.29,18.7
473,67.6,2.5329,24,666.0,20.2,374.68,11.66,29.8
468,71.0,2.9084,24,666.0,20.2,368.74,18.13,19.1
57,40.5,8.3248,5,256.0,15.1,392.9,3.95,31.6
139,97.9,1.6687,4,437.0,21.2,396.9,18.46,17.8
185,68.8,3.2797,3,193.0,17.8,387.11,13.15,29.6
300,47.4,7.8278,5,358.0,14.8,390.86,6.07,24.8
335,34.5,5.9853,5,224.0,20.2,396.9,8.01,21.1
449,98.3,2.185,24,666.0,20.2,304.21,19.31,13.0
52,21.1,6.8147,4,243.0,16.8,396.9,5.28,25.0
489,98.3,1.7554,4,711.0,20.1,344.05,23.97,7.0
399,77.8,1.5004,24,666.0,20.2,338.16,29.97,6.3
364,82.9,1.9047,24,666.0,20.2,354.55,5.29,21.9
0,65.2,4.09,1,296.0,15.3,396.9,4.98,24.0
177,73.4,3.3175,5,296.0,16.6,395.6,6.29,24.6
72,7.8,5.2873,4,305.0,19.2,390.91,5.52,22.8
125,88.4,1.9929,2,188.0,19.1,385.02,14.81,21.4
279,32.2,4.1007,5,216.0,14.9,396.9,4.85,35.1
402,100.0,1.639,24,666.0,20.2,376.11,20.31,12.1
301,40.4,5.4917,7,329.0,16.1,395.75,9.5,22.0
213,32.3,3.9454,4,277.0,18.6,385.81,9.38,28.1
146,100.0,1.5166,5,403.0,14.7,169.27,16.65,15.6
421,95.3,1.8746,24,666.0,20.2,319.98,15.7,14.2
372,89.6,1.1296,24,666.0,20.2,347.88,8.88,50.0
504,89.3,2.3889,1,273.0,21.0,393.45,6.48,22.0
202,15.7,6.27,2,348.0,14.7,395.38,3.11,42.3
438,87.9,1.8206,24,666.0,20.2,68.95,34.02,8.4
47,85.5,5.6894,3,233.0,17.9,392.74,18.8,16.6
380,91.9,1.4165,24,666.0,20.2,396.9,17.21,10.4
343,56.4,5.7321,5,370.0,17.6,396.9,7.18,23.9
359,81.3,2.5091,24,666.0,20.2,390.74,12.67,22.6
418,100.0,1.8026,24,666.0,20.2,16.45,20.62,8.8
40,15.8,5.4011,3,252.0,18.3,395.62,1.98,34.9
428,78.1,1.9356,24,666.0,20.2,96.73,21.52,11.0
39,21.8,5.4011,3,252.0,18.3,395.63,4.32,30.8
224,78.3,2.8944,8,307.0,17.4,385.05,4.14,44.8
394,94.7,1.7821,24,666.0,20.2,396.9,16.35,12.7
426,59.7,1.9976,24,666.0,20.2,24.65,15.69,10.2
502,76.7,2.2875,1,273.0,21.0,396.9,9.08,20.6
439,93.9,1.8172,24,666.0,20.2,396.9,22.88,12.8
415,100.0,1.8347,24,666.0,20.2,27.25,29.05,7.2
37,41.5,3.9342,5,279.0,19.2,396.9,8.77,21.0
99,62.5,3.4952,2,276.0,18.0,396.9,6.19,33.2
186,53.6,3.1992,3,193.0,17.8,392.63,4.45,50.0
234,66.5,3.6519,8,307.0,17.4,360.2,8.05,29.0
351,35.9,10.7103,4,411.0,18.3,370.78,5.49,24.1
276,49.0,4.7872,4,254.0,17.6,389.25,6.05,33.2
476,93.6,2.3053,24,666.0,20.2,396.21,18.68,16.7
105,96.7,2.1069,5,384.0,20.9,394.05,16.47,19.5
151,100.0,1.5916,5,403.0,14.7,341.6,13.28,19.6
262,91.5,2.2885,5,264.0,13.0,386.86,5.91,48.8
287,31.3,7.3172,6,293.0,16.6,396.9,7.14,23.2
325,14.7,5.4159,5,287.0,19.6,393.68,5.08,24.6
414,100.0,1.6582,24,666.0,20.2,88.27,36.98,7.0
472,75.0,2.8965,24,666.0,20.2,393.37,14.36,23.2
149,94.9,1.5257,5,403.0,14.7,351.85,21.45,15.4
412,100.0,1.5539,24,666.0,20.2,28.79,34.37,17.9
169,95.2,2.2625,5,403.0,14.7,330.04,11.32,22.3
123,97.0,1.9444,2,188.0,19.1,370.31,25.41,17.3
483,40.3,4.0983,24,666.0,20.2,392.92,10.42,21.8
63,43.4,7.9809,8,284.0,19.7,395.58,9.5,25.0
417,89.1,1.6475,24,666.0,20.2,127.36,26.64,10.4
157,97.4,1.8773,5,403.0,14.7,363.43,4.59,41.3
499,73.5,2.3999,6,391.0,19.2,395.77,15.1,17.5
225,83.0,2.8944,8,307.0,17.4,382.0,4.63,50.0
323,74.3,4.7211,5,287.0,19.6,391.13,11.74,18.5
201,38.4,6.27,2,348.0,14.7,393.77,7.43,24.1
446,96.4,2.072,24,666.0,20.2,318.01,17.79,14.9
401,100.0,1.5741,24,666.0,20.2,396.9,20.32,7.2
480,64.7,3.4242,24,666.0,20.2,396.9,10.74,23.0
259,100.0,2.0107,5,264.0,13.0,391.93,6.9,30.1
175,33.1,3.1323,5,296.0,16.6,390.96,5.33,29.4
503,91.0,2.1675,1,273.0,21.0,396.9,5.64,23.9
245,70.2,7.9549,7,330.0,19.1,389.13,18.46,18.5
376,93.3,1.3449,24,666.0,20.2,363.02,23.24,13.9
55,21.9,8.6966,5,226.0,17.9,395.93,4.81,35.4
443,100.0,1.9784,24,666.0,20.2,386.73,18.85,15.4
447,96.6,2.198,24,666.0,20.2,388.52,16.44,12.6
119,65.2,2.7592,6,432.0,17.8,391.5,13.61,19.3
440,92.4,1.8662,24,666.0,20.2,391.45,22.11,10.5
460,90.0,2.5975,24,666.0,20.2,255.23,16.42,16.4
333,38.1,6.4584,5,224.0,20.2,389.71,5.68,22.2
48,95.3,5.87,3,233.0,17.9,396.9,30.81,14.4
382,100.0,1.5804,24,666.0,20.2,396.9,23.6,11.3
9,85.9,6.5921,5,311.0,15.2,386.71,17.1,18.9
252,6.8,8.9067,7,330.0,19.1,386.09,3.53,29.6
155,82.6,1.7455,5,403.0,14.7,88.01,15.02,15.6
366,91.4,1.7523,24,666.0,20.2,316.03,14.0,21.9
338,37.3,4.8122,5,224.0,20.2,396.14,8.51,20.6
178,74.4,2.9153,5,296.0,16.6,391.27,6.92,29.9
493,54.0,2.3817,6,391.0,19.2,396.9,12.01,21.8
432,74.8,2.2004,24,666.0,20.2,97.95,12.03,16.1
75,45.0,4.5026,5,398.0,18.7,383.23,8.94,21.4
229,21.4,3.3751,8,307.0,17.4,380.34,3.76,31.5
346,52.3,8.0136,3,352.0,18.8,364.61,12.67,17.2
284,20.8,7.3073,1,285.0,15.3,394.72,7.85,32.2
194,18.8,6.2196,1,265.0,15.6,376.7,4.38,29.1
161,90.8,1.9709,5,403.0,14.7,374.43,1.73,50.0
168,96.1,2.1,5,403.0,14.7,297.09,11.1,23.8
269,61.5,3.9175,3,223.0,18.6,391.34,13.65,20.7
83,46.7,5.4007,4,281.0,19.0,390.64,7.51,22.9
195,32.0,5.6484,4,255.0,14.4,394.23,2.97,50.0
129,94.7,1.9799,4,437.0,21.2,396.9,18.34,14.3
184,89.8,2.9879,3,193.0,17.8,391.0,13.98,26.4
416,90.8,1.8195,24,666.0,20.2,21.57,25.79,7.5
498,65.3,2.4091,6,391.0,19.2,396.9,12.92,21.2
354,21.9,10.5857,4,334.0,22.0,382.8,8.05,18.2
475,97.4,2.206,24,666.0,20.2,302.76,24.1,13.3
403,96.0,1.7028,24,666.0,20.2,396.9,19.77,8.3
115,88.2,2.4631,6,432.0,17.8,344.91,15.76,18.3
319,58.8,4.0019,4,304.0,18.4,396.23,12.73,21.0
302,18.4,5.4917,7,329.0,16.1,383.61,8.67,26.4
156,94.0,1.7364,5,403.0,14.7,88.63,16.14,13.1
305,58.1,3.37,7,222.0,18.4,393.36,8.93,28.4
486,79.8,3.5459,24,666.0,20.2,392.68,14.98,19.1
261,89.4,2.1398,5,264.0,13.0,388.37,7.26,43.1
13,61.8,4.7075,4,307.0,21.0,396.9,8.26,20.4
427,78.7,1.8629,24,666.0,20.2,18.82,14.52,10.9
307,70.3,3.1827,7,222.0,18.4,396.9,7.53,28.2
94,77.3,3.615,4,270.0,18.2,396.9,10.59,20.6
69,33.0,6.498,4,345.0,18.9,396.9,8.79,20.9
336,46.3,5.2311,5,224.0,20.2,396.9,9.8,19.5
45,33.8,5.1004,3,233.0,17.9,396.9,10.21,19.3
334,38.5,6.4584,5,224.0,20.2,389.4,6.75,20.7
299,10.0,7.8278,5,358.0,14.8,371.58,4.74,29.0
308,82.5,3.3175,4,304.0,18.4,396.9,4.54,22.8
131,97.7,2.271,4,437.0,21.2,396.9,12.26,19.6
60,66.2,7.2254,8,284.0,19.7,395.11,13.15,18.7
306,71.9,3.0992,7,222.0,18.4,396.9,6.47,33.4
457,80.3,2.7792,24,666.0,20.2,3.5,16.94,13.5
84,48.0,4.7794,3,247.0,18.5,396.9,9.62,23.9
140,93.6,1.6119,4,437.0,21.2,388.08,24.16,14.0
58,29.2,7.8148,8,284.0,19.7,390.68,6.86,23.3
29,87.3,4.239,4,307.0,21.0,380.23,11.98,21.0
268,52.6,2.872,5,264.0,13.0,390.3,3.16,43.5
220,88.5,2.8617,8,307.0,17.4,391.7,9.71,26.7
285,31.9,7.3073,1,300.0,15.3,394.72,8.23,22.0
392,97.0,1.77,24,666.0,20.2,396.9,25.68,9.7
500,79.7,2.4982,6,391.0,19.2,396.9,14.33,16.8
362,96.2,2.1036,24,666.0,20.2,380.79,10.19,20.8
289,22.9,7.3172,6,293.0,16.6,371.72,9.51,24.8
377,98.8,1.358,24,666.0,20.2,396.9,21.24,13.3
198,38.3,7.309,2,329.0,12.6,392.2,6.62,34.6
454,94.1,2.4961,24,666.0,20.2,6.68,18.71,14.9
89,63.1,3.4145,2,270.0,17.8,396.06,5.7,28.7
353,36.1,12.1265,5,187.0,17.0,384.46,4.5,30.1
14,84.5,4.4619,4,307.0,21.0,380.02,10.26,18.2
95,57.8,3.4952,2,276.0,18.0,357.98,6.65,28.4
166,96.2,2.0459,5,403.0,14.7,369.3,3.7,50.0
176,47.2,3.5549,5,296.0,16.6,393.23,10.11,23.2
337,59.6,5.615,5,224.0,20.2,394.81,10.56,18.5
309,76.7,3.1025,4,304.0,18.4,396.24,9.97,20.3
182,92.2,2.7006,3,193.0,17.8,394.12,4.82,37.9
294,42.3,5.5027,4,289.0,16.0,396.9,10.4,21.7
316,83.2,3.9986,4,304.0,18.4,390.7,18.33,17.8
110,54.4,2.7778,5,384.0,20.9,393.49,13.0,21.7
247,79.2,8.0555,7,330.0,19.1,376.14,10.15,20.5
126,95.6,1.7572,2,188.0,19.1,359.29,27.26,15.7
251,8.9,7.3967,7,330.0,19.1,377.07,3.59,24.8
497,70.6,2.8927,6,391.0,19.2,396.9,14.1,18.3
327,43.7,5.4159,5,287.0,19.6,396.9,12.79,22.2
320,52.3,4.5404,5,287.0,19.6,396.9,7.2,23.8
163,93.9,2.162,5,403.0,14.7,388.45,3.32,50.0
278,32.1,4.1403,4,254.0,17.6,396.9,7.19,29.1
303,17.7,5.4917,7,329.0,16.1,390.43,4.86,33.1
137,98.4,1.8498,4,437.0,21.2,394.08,14.59,17.1
31,100.0,4.175,4,307.0,21.0,376.73,13.04,14.5
505,80.8,2.505,1,273.0,21.0,396.9,7.88,11.9
387,89.5,1.5184,24,666.0,20.2,396.9,31.99,7.4
127,96.0,1.7883,4,437.0,21.2,392.11,17.19,16.2
136,93.5,1.9669,4,437.0,21.2,378.25,16.9,17.4
434,95.0,2.2222,24,666.0,20.2,100.63,15.17,11.7
33,95.0,3.7872,4,307.0,21.0,358.77,18.35,13.1
203,33.2,5.118,4,224.0,14.7,392.78,3.81,48.5
53,21.4,6.8147,4,243.0,16.8,396.9,8.43,23.4
459,84.4,2.7175,24,666.0,20.2,396.9,14.7,20.0
30,94.1,4.233,4,307.0,21.0,360.17,22.6,12.7
62,67.8,7.2255,8,284.0,19.7,396.9,6.73,22.2
274,32.9,4.0776,4,254.0,17.6,396.9,3.53,32.4
470,84.0,3.0334,24,666.0,20.2,396.9,16.29,19.9
298,20.1,7.8278,5,358.0,14.8,368.24,4.97,22.5
109,91.2,2.5451,5,384.0,20.9,391.23,15.55,19.4
348,29.7,8.344,4,280.0,17.0,390.94,5.99,24.5
50,45.7,6.8147,4,243.0,16.8,395.56,13.45,19.7
410,100.0,1.413,24,666.0,20.2,2.6,10.11,15.0
217,85.1,3.4211,5,276.0,16.4,392.78,9.69,28.7
64,59.5,9.2229,3,216.0,18.6,393.24,8.05,33.0
501,69.1,2.4786,1,273.0,21.0,391.99,9.67,22.4
5,58.7,6.0622,3,222.0,18.7,394.12,5.21,28.7
356,97.4,2.1222,24,666.0,20.2,377.73,17.6,17.8
121,84.1,2.1974,2,188.0,19.1,377.67,14.27,20.3
288,45.6,7.3172,6,293.0,16.6,396.9,7.6,22.3
250,13.0,7.3967,7,330.0,19.1,396.28,5.9,24.4
189,38.9,4.5667,5,398.0,15.2,396.9,5.39,34.9
482,77.0,3.4106,24,666.0,20.2,395.28,7.01,25.0
18,36.6,3.7965,4,307.0,21.0,288.99,11.69,20.2
180,83.3,2.741,3,193.0,17.8,395.56,7.56,39.8
205,22.3,3.9454,4,277.0,18.6,396.9,10.87,22.6
82,32.2,5.4007,4,281.0,19.0,396.9,6.72,24.8
404,85.4,1.6074,24,666.0,20.2,329.46,27.38,8.5
214,9.8,3.5875,4,277.0,18.6,348.93,29.55,23.7
339,45.4,4.8122,5,224.0,20.2,396.9,9.74,19.0
81,70.4,5.4007,4,281.0,19.0,395.63,7.22,23.9
461,88.4,2.5671,24,666.0,20.2,391.43,14.65,17.7
350,44.4,8.7921,1,335.0,19.7,396.9,5.98,22.9
458,83.7,2.7831,24,666.0,20.2,272.21,16.23,14.9
405,100.0,1.4254,24,666.0,20.2,384.97,22.98,5.0
295,31.1,5.9604,4,289.0,16.0,396.9,6.27,28.6
341,49.3,7.0379,1,284.0,15.5,394.74,5.49,32.7
114,84.2,2.2565,6,432.0,17.8,388.74,10.45,18.5
24,94.1,4.3996,4,307.0,21.0,394.33,16.3,15.6
481,74.9,3.3317,24,666.0,20.2,393.07,7.74,23.7
23,100.0,4.0952,4,307.0,21.0,394.54,19.88,14.5
283,24.8,5.885,1,198.0,13.6,395.52,3.16,50.0
77,45.8,4.0905,5,398.0,18.7,386.96,10.27,20.8
4,54.2,6.0622,3,222.0,18.7,396.9,5.33,36.2
170,94.6,2.4259,5,403.0,14.7,292.29,14.43,17.4
318,67.2,3.5325,4,304.0,18.4,395.21,10.36,23.1
101,71.3,2.8561,5,384.0,20.9,395.58,7.67,26.5
422,87.6,1.9512,24,666.0,20.2,291.55,14.1,20.8
258,100.0,1.8946,5,264.0,13.0,383.29,7.79,36.0
118,73.1,2.4775,6,432.0,17.8,338.63,15.37,20.4
106,91.9,2.211,5,384.0,20.9,395.67,18.66,19.5
107,85.2,2.1224,5,384.0,20.9,387.69,14.09,20.4
49,62.0,6.0877,3,233.0,17.9,396.9,16.2,19.4
330,32.2,5.8736,4,430.0,16.9,368.57,9.09,19.8
281,37.2,5.2447,5,216.0,14.9,392.23,4.59,35.4
98,36.9,3.4952,2,276.0,18.0,393.53,3.57,43.8
436,93.3,2.0026,24,666.0,20.2,27.49,18.05,9.6
87,56.8,3.7476,3,247.0,18.5,395.15,8.44,22.2
368,100.0,1.3325,24,666.0,20.2,375.52,3.26,50.0
76,74.5,4.0522,5,398.0,18.7,373.66,11.97,20.0
331,28.4,6.6407,1,304.0,16.9,394.02,12.43,17.1
488,92.7,1.8209,4,711.0,20.1,395.09,18.06,15.2
153,98.5,1.6232,5,403.0,14.7,261.95,15.79,19.4
20,98.1,3.7979,4,307.0,21.0,376.57,21.02,13.6
43,6.5,5.7209,3,233.0,17.9,394.46,7.44,24.7
154,96.0,1.7494,5,403.0,14.7,321.02,15.12,17.0
409,100.0,1.4655,24,666.0,20.2,179.36,19.78,27.5
162,98.2,2.0407,5,403.0,14.7,389.61,1.92,50.0
265,62.8,1.9865,5,264.0,13.0,392.4,10.45,22.8
361,91.1,2.2955,24,666.0,20.2,350.65,14.19,19.9
282,49.7,5.2119,5,216.0,14.9,377.07,3.01,46.0
275,42.8,4.2673,4,254.0,17.6,396.9,2.98,32.0
59,47.2,6.932,8,284.0,19.7,396.9,9.22,19.6
292,23.4,5.1167,4,245.0,19.2,396.9,4.7,27.9
61,93.4,6.8185,8,284.0,19.7,378.08,14.44,16.0
56,35.7,9.1876,2,313.0,17.3,396.9,5.77,24.7
3,45.8,6.0622,3,222.0,18.7,394.63,2.94,33.4
226,86.5,3.2157,8,307.0,17.4,387.38,3.13,37.6
197,36.6,7.309,2,329.0,12.6,354.31,8.61,30.3
142,100.0,1.3216,5,403.0,14.7,396.9,26.82,13.4
455,86.5,2.4358,24,666.0,20.2,50.92,18.13,14.1
172,88.5,2.5961,5,296.0,16.6,396.9,14.69,23.1
181,62.2,2.5979,3,193.0,17.8,396.9,9.45,36.2
134,98.4,2.346,4,437.0,21.2,262.76,17.31,15.6
477,97.3,2.1007,24,666.0,20.2,349.48,24.91,12.0
236,76.5,4.148,8,307.0,17.4,388.45,9.54,25.1
494,42.6,2.3817,6,391.0,19.2,396.9,13.59,24.5
280,64.5,4.6947,5,216.0,14.9,387.31,3.76,45.4
152,88.0,1.6102,5,403.0,14.7,343.28,12.12,15.3
187,41.1,3.7886,5,398.0,15.2,393.87,6.68,32.0
191,30.8,6.4798,5,398.0,15.2,389.71,4.69,30.5
209,100.0,3.875,4,277.0,18.6,396.9,23.09,20.0
167,79.2,2.4259,5,403.0,14.7,227.61,12.14,23.8
100,79.9,2.7778,5,384.0,20.9,394.76,9.42,27.5
445,94.8,1.9879,24,666.0,20.2,43.06,23.98,11.8
148,93.8,1.5296,5,403.0,14.7,356.99,28.32,17.8
317,71.7,4.0317,4,304.0,18.4,396.9,15.94,19.8
390,97.0,1.9265,24,666.0,20.2,394.43,17.11,15.1
117,82.6,2.7474,6,432.0,17.8,394.51,10.3,19.2
78,53.7,5.0141,5,398.0,18.7,386.4,12.34,21.2
369,96.8,1.3567,24,666.0,20.2,375.33,3.73,50.0
17,81.7,4.2579,4,307.0,21.0,386.75,14.67,17.5
326,28.9,5.4159,5,287.0,19.6,396.9,6.15,23.0
441,97.2,2.0651,24,666.0,20.2,385.96,19.52,17.1
413,100.0,1.5894,24,666.0,20.2,210.97,20.08,16.3
143,100.0,1.4118,5,403.0,14.7,396.9,26.42,15.6
272,58.7,3.9175,3,223.0,18.6,394.96,7.73,24.4
492,83.5,2.1099,4,711.0,20.1,396.9,13.35,20.1
321,54.3,4.5404,5,287.0,19.6,396.9,6.87,23.1
171,97.3,2.3887,5,403.0,14.7,348.13,12.03,19.1
11,82.9,6.2267,5,311.0,15.2,396.9,13.27,18.9
357,91.0,2.5052,24,666.0,20.2,391.34,13.27,21.7
322,49.9,4.7211,5,287.0,19.6,396.9,7.7,20.4
86,45.1,4.4272,3,247.0,18.5,395.99,12.86,22.5
411,100.0,1.5275,24,666.0,20.2,35.05,21.22,17.2
160,92.6,1.7984,5,403.0,14.7,338.92,5.5,27.0
400,100.0,1.5888,24,666.0,20.2,396.9,26.77,5.6
355,19.5,10.5857,4,334.0,22.0,376.04,5.57,20.6
297,58.0,6.32,4,289.0,16.0,396.9,15.84,20.3
235,61.5,3.6519,8,307.0,17.4,376.75,10.88,24.0
249,17.5,7.8265,7,330.0,19.1,393.74,6.56,26.2
429,95.6,1.9682,24,666.0,20.2,60.72,24.08,9.5
113,95.4,2.548,6,432.0,17.8,396.9,17.09,18.7
1,78.9,4.9671,2,242.0,17.8,396.9,9.14,21.6
358,83.4,2.7227,24,666.0,20.2,395.43,11.48,22.7
491,98.8,1.8681,4,711.0,20.1,390.11,18.07,13.6
344,28.1,6.4654,5,370.0,17.6,387.97,4.61,31.2
193,9.9,6.2196,1,265.0,15.6,393.37,5.03,31.1
207,72.7,4.3549,4,277.0,18.6,389.43,18.06,22.5
67,21.4,6.498,4,345.0,18.9,396.21,8.1,22.0
246,34.9,8.0555,7,330.0,19.1,390.18,9.16,24.3
419,76.5,1.794,24,666.0,20.2,48.45,22.74,8.4
408,97.9,1.4547,24,666.0,20.2,314.64,26.4,17.2
132,97.9,2.3274,4,437.0,21.2,385.76,11.12,23.0
386,100.0,1.4672,24,666.0,20.2,396.9,28.28,10.5
373,100.0,1.1742,24,666.0,20.2,396.9,34.77,13.8
484,41.9,3.724,24,666.0,20.2,370.73,13.34,20.6
90,66.1,3.0923,2,270.0,17.8,392.18,8.81,22.6
381,99.1,1.5192,24,666.0,20.2,396.9,21.08,10.9
122,92.9,2.0869,2,188.0,19.1,378.09,17.93,20.5
448,98.7,2.2616,24,666.0,20.2,396.9,18.13,14.1
73,6.2,5.2873,4,305.0,19.2,377.17,7.54,23.4
6,66.6,5.5605,5,311.0,15.2,395.6,12.43,22.9
158,100.0,1.7573,5,403.0,14.7,353.89,6.43,24.3
442,100.0,2.0048,24,666.0,20.2,395.69,16.59,18.4
383,100.0,1.5331,24,666.0,20.2,396.9,24.56,12.3
463,89.9,2.8016,24,666.0,20.2,393.82,10.29,20.2
435,94.6,2.1247,24,666.0,20.2,109.85,23.27,13.4
465,48.2,3.0665,24,666.0,20.2,334.4,14.13,19.9
12,39.0,5.4509,5,311.0,15.2,390.5,15.71,21.7
35,68.2,3.3603,5,279.0,19.2,396.9,9.68,18.9
478,96.7,2.1705,24,666.0,20.2,379.7,18.03,14.6
360,88.0,2.5182,24,666.0,20.2,374.56,7.79,25.0
277,27.6,4.8628,4,254.0,17.6,393.45,4.16,33.1
312,90.4,2.834,4,304.0,18.4,396.3,11.72,19.4
451,98.2,2.3552,24,666.0,20.2,355.29,17.73,15.2
133,95.4,2.4699,4,437.0,21.2,388.69,15.03,18.4
263,94.5,2.0788,5,264.0,13.0,393.42,11.25,31.0
490,98.0,1.8226,4,711.0,20.1,318.43,29.68,8.1
66,31.1,6.6115,4,337.0,16.1,396.9,10.24,19.4
138,98.2,1.6686,4,437.0,21.2,392.04,21.32,13.3
54,47.6,7.3197,3,469.0,21.1,396.9,14.8,18.9
271,16.3,4.429,3,223.0,18.6,396.9,6.59,25.2
423,85.1,2.0218,24,666.0,20.2,2.52,23.29,13.4
311,52.8,2.6403,4,304.0,18.4,396.9,5.98,22.1
21,89.2,4.0123,4,307.0,21.0,392.53,13.83,19.6
453,99.3,2.4527,24,666.0,20.2,375.87,16.74,17.8
222,77.7,3.2721,8,307.0,17.4,390.39,9.93,27.5
{
"party_info": {
"task_manager": "127.0.0.1:50050"
},
"component_params": {
"roles": {
"server": "Alice",
"client": [
"Bob",
"Charlie"
]
},
"common_params": {
"model": "HFL_linear_regression",
"method": "DPSGD",
"process": "train",
"task_name": "HFL_linear_regression_dpsgd_train",
"delta": 1e-3,
"noise_multiplier": 2.0,
"l2_norm_clip": 1.0,
"secure_mode": true,
"learning_rate": 1e-1,
"alpha": 1e-4,
"batch_size": 100,
"global_epoch": 10,
"local_epoch": 1,
"selected_column": null,
"id": "id",
"label": "y",
"print_metrics": true,
"metric_path": "data/result/metrics.json"
},
"role_params": {
"Bob": {
"data_set": "regression_hfl_train_client1",
"model_path": "data/result/Bob_model.pkl"
},
"Charlie": {
"data_set": "regression_hfl_train_client2",
"model_path": "data/result/Charlie_model.pkl"
},
"Alice": {
"data_set": "fl_fake_data"
}
}
}
}
\ No newline at end of file
{
"party_info": {
"task_manager": "127.0.0.1:50050"
},
"component_params": {
"roles": {
"server": "Alice",
"client": [
"Bob",
"Charlie"
]
},
"common_params": {
"model": "HFL_linear_regression",
"method": "Paillier",
"process": "train",
"task_name": "HFL_linear_regression_paillier_train",
"n_length": 2048,
"learning_rate": 1e-1,
"alpha": 1e-5,
"batch_size": 100,
"global_epoch": 10,
"local_epoch": 1,
"selected_column": null,
"id": "id",
"label": "y",
"print_metrics": true,
"metric_path": "data/result/metrics.json"
},
"role_params": {
"Bob": {
"data_set": "regression_hfl_train_client1",
"model_path": "data/result/Bob_model.pkl"
},
"Charlie": {
"data_set": "regression_hfl_train_client2",
"model_path": "data/result/Charlie_model.pkl"
},
"Alice": {
"data_set": "fl_fake_data"
}
}
}
}
\ No newline at end of file
{
"party_info": {
"task_manager": "127.0.0.1:50050"
},
"component_params": {
"roles": {
"server": "Alice",
"client": [
"Bob",
"Charlie"
]
},
"common_params": {
"model": "HFL_linear_regression",
"method": "Plaintext",
"process": "train",
"task_name": "HFL_linear_regression_plaintext_train",
"learning_rate": 1e-1,
"alpha": 1e-5,
"batch_size": 100,
"global_epoch": 10,
"local_epoch": 1,
"selected_column": null,
"id": "id",
"label": "y",
"print_metrics": true,
"metric_path": "data/result/metrics.json"
},
"role_params": {
"Bob": {
"data_set": "regression_hfl_train_client1",
"model_path": "data/result/Bob_model.pkl"
},
"Charlie": {
"data_set": "regression_hfl_train_client2",
"model_path": "data/result/Charlie_model.pkl"
},
"Alice": {
"data_set": "fl_fake_data"
}
}
}
}
\ No newline at end of file
{
"party_info": {
"task_manager": "127.0.0.1:50050"
},
"component_params": {
"roles": {
"client": "Bob"
},
"common_params": {
"model": "HFL_linear_regression",
"process": "predict",
"task_name": "HFL_linear_regression_predict"
},
"role_params": {
"Bob": {
"data_set": "regression_hfl_test_client1",
"model_path": "data/result/Bob_model.pkl",
"predict_path": "data/result/Bob_predict.csv"
}
}
}
}
\ No newline at end of file
{
"party_info": {
"task_manager": "127.0.0.1:50050"
},
"component_params": {
"roles": {
"host": "Bob",
"guest": [
"Charlie"
],
"coordinator": "David"
},
"common_params": {
"model": "VFL_linear_regression",
"method": "CKKS",
"process": "train",
"task_name": "VFL_linear_regression_ckks_train",
"learning_rate": 1e-1,
"alpha": 1e-4,
"epoch": 10,
"shuffle_seed": 0,
"batch_size": 100,
"print_metrics": true
},
"role_params": {
"Bob": {
"data_set": "regression_vfl_train_host",
"selected_column": null,
"id": "id",
"label": "y",
"model_path": "data/result/host_model.pkl",
"metric_path": "data/result/metrics.json"
},
"Charlie": {
"data_set": "regression_vfl_train_guest",
"selected_column": null,
"id": "id",
"model_path": "data/result/guest_model.pkl"
},
"David": {
"data_set": "fl_fake_data"
}
}
}
}
\ No newline at end of file
{
"party_info": {
"task_manager": "127.0.0.1:50050"
},
"component_params": {
"roles": {
"host": "Bob",
"guest": [
"Charlie"
]
},
"common_params": {
"model": "VFL_linear_regression",
"method": "Plaintext",
"process": "train",
"task_name": "VFL_linear_regression_plaintext_train",
"learning_rate": 1e-1,
"alpha": 1e-4,
"epoch": 10,
"shuffle_seed": 0,
"batch_size": 100,
"print_metrics": true
},
"role_params": {
"Bob": {
"data_set": "regression_vfl_train_host",
"selected_column": null,
"id": "id",
"label": "y",
"model_path": "data/result/host_model.pkl",
"metric_path": "data/result/metrics.json"
},
"Charlie": {
"data_set": "regression_vfl_train_guest",
"selected_column": null,
"id": "id",
"model_path": "data/result/guest_model.pkl"
}
}
}
}
\ No newline at end of file
{
"party_info": {
"task_manager": "127.0.0.1:50050"
},
"component_params": {
"roles": {
"host": "Bob",
"guest": [
"Charlie"
]
},
"common_params": {
"model": "VFL_linear_regression",
"process": "predict",
"task_name": "VFL_linear_regression_predict"
},
"role_params": {
"Bob": {
"data_set": "regression_vfl_test_host",
"model_path": "data/result/host_model.pkl",
"predict_path": "data/result/host_predict.csv"
},
"Charlie": {
"data_set": "regression_vfl_test_guest",
"model_path": "data/result/guest_model.pkl"
}
}
}
}
\ No newline at end of file
import tenseal as ts
class CKKS:
def __init__(self, context):
if isinstance(context, bytes):
context = ts.context_from(context)
self.context = context
self.multiply_depth = context.data.seal_context().first_context_data().chain_index()
def encrypt_vector(self, vector, context=None):
if context:
return ts.ckks_vector(context, vector)
else:
return ts.ckks_vector(self.context, vector)
def encrypt_tensor(self, tensor, context=None):
if context:
return ts.ckks_tensor(context, tensor)
else:
return ts.ckks_tensor(self.context, tensor)
def decrypt(self, ciphertext, secret_key=None):
if ciphertext.context().has_secret_key():
return ciphertext.decrypt()
else:
return ciphertext.decrypt(secret_key)
def load_vector(self, vector):
return ts.ckks_vector_from(self.context, vector)
def load_tensor(self, tensor):
return ts.ckks_tensor_from(self.context, tensor)
\ No newline at end of file
class Paillier:
def __init__(self, public_key, private_key):
self.public_key = public_key
self.private_key = private_key
def decrypt_scalar(self, cipher_scalar):
return self.private_key.decrypt(cipher_scalar)
def decrypt_vector(self, cipher_vector):
return [self.private_key.decrypt(i) for i in cipher_vector]
def decrypt_matrix(self, cipher_matrix):
return [[self.private_key.decrypt(i) for i in cv] for cv in cipher_matrix]
def encrypt_scalar(self, plain_scalar):
return self.public_key.encrypt(plain_scalar)
def encrypt_vector(self, plain_vector):
return [self.public_key.encrypt(i) for i in plain_vector]
def encrypt_matrix(self, plain_matrix):
return [[self.private_key.encrypt(i) for i in pv] for pv in plain_matrix]
\ No newline at end of file
import numpy as np
class LinearRegression:
# l2 regularization by default, alpha is the penalty parameter
def __init__(self, x, learning_rate=0.2, alpha=0.0001):
self.learning_rate = learning_rate
self.alpha = alpha
self.weight = np.zeros(x.shape[1])
self.bias = np.zeros(1)
def get_theta(self):
return np.hstack((self.bias, self.weight))
def set_theta(self, theta):
if not isinstance(theta, np.ndarray):
theta = np.array(theta)
self.bias = theta[[0]]
self.weight = theta[1:]
def compute_grad(self, x, y):
error = self.compute_error(x, y)
dw = x.T.dot(error) / x.shape[0] + self.alpha * self.weight
db = error.mean(axis=0, keepdims=True)
return dw, db
def compute_error(self, x, y):
z = self.predict(x)
return z - y
def gradient_descent(self, x, y):
dw, db = self.compute_grad(x, y)
self.weight -= self.learning_rate * dw
self.bias -= self.learning_rate * db
def fit(self, x, y):
self.gradient_descent(x, y)
def predict(self, x):
return x.dot(self.weight) + self.bias
class LinearRegression_DPSGD(LinearRegression):
def __init__(self, x, learning_rate=0.2, alpha=0.0001,
noise_multiplier=1.0, l2_norm_clip=1.0, secure_mode=True):
super().__init__(x, learning_rate, alpha)
self.noise_multiplier = noise_multiplier
self.l2_norm_clip = l2_norm_clip
self.secure_mode = secure_mode
def set_noise_multiplier(self, noise_multiplier):
self.noise_multiplier = noise_multiplier
def set_l2_norm_clip(self, l2_norm_clip):
self.l2_norm_clip = l2_norm_clip
def add_noise(self, x, n=2):
# add gaussian noise
if self.secure_mode:
noise = np.zeros(x.shape)
for _ in range(2 * n):
noise += np.random.normal(
0, self.l2_norm_clip * self.noise_multiplier, x.shape)
noise /= np.sqrt(2 * n)
else:
noise = np.random.normal(
0, self.l2_norm_clip * self.noise_multiplier, x.shape)
return x + noise
def compute_grad(self, x, y):
# compute & clip per-example gradient
error = self.compute_error(x, y)
batch_db = error
batch_dw = x * np.expand_dims(error, axis=1)
batch_grad_l2_norm = np.sqrt((batch_dw**2).sum(axis=1) + batch_db**2)
clip = np.maximum(1., batch_grad_l2_norm / self.l2_norm_clip)
dw = (batch_dw / np.expand_dims(clip, axis=1)).sum(axis=0)
db = (batch_db / clip).sum(axis=0)
# add gaussian noise
dw = self.add_noise(dw) / x.shape[0] + self.alpha * self.weight
db = self.add_noise(db) / x.shape[0]
return dw, db
class LinearRegression_Paillier(LinearRegression):
def gradient_descent(self, x, y):
error = self.compute_error(x, y)
factor = -self.learning_rate / x.shape[0]
self.weight += (factor * x).T.dot(error) + \
(-self.learning_rate * self.alpha) * self.weight
self.bias += factor * error.sum(keepdims=True)
\ No newline at end of file
from primihub.FL.utils.net_work import GrpcClient
from primihub.FL.utils.base import BaseModel
from primihub.FL.utils.file import check_directory_exist
from primihub.FL.utils.dataset import read_data,\
DataLoader,\
DPDataLoader
from primihub.utils.logger_util import logger
from primihub.FL.crypto.paillier import Paillier
import pickle
import pandas as pd
import dp_accounting
from sklearn import metrics
from .base import LinearRegression,\
LinearRegression_DPSGD,\
LinearRegression_Paillier
class LinearRegressionClient(BaseModel):
def __init__(self, **kwargs):
super().__init__(**kwargs)
def run(self):
process = self.common_params['process']
logger.info(f"process: {process}")
if process == 'train':
self.train()
elif process == 'predict':
self.predict()
else:
error_msg = f"Unsupported process: {process}"
logger.error(error_msg)
raise RuntimeError(error_msg)
def train(self):
# setup communication channels
remote_party = self.roles[self.role_params['others_role']]
server_channel = GrpcClient(local_party=self.role_params['self_name'],
remote_party=remote_party,
node_info=self.node_info,
task_info=self.task_info)
# load dataset
selected_column = self.common_params['selected_column']
id = self.common_params['id']
x = read_data(data_info=self.role_params['data'],
selected_column=selected_column,
id=id)
label = self.common_params['label']
y = x.pop(label).values
x = x.values
# client init
method = self.common_params['method']
if method == 'Plaintext':
client = Plaintext_Client(x,
self.common_params['learning_rate'],
self.common_params['alpha'],
server_channel)
elif method == 'DPSGD':
client = DPSGD_Client(x,
self.common_params['learning_rate'],
self.common_params['alpha'],
self.common_params['noise_multiplier'],
self.common_params['l2_norm_clip'],
self.common_params['secure_mode'],
server_channel)
elif method == 'Paillier':
client = Paillier_Client(x, y,
self.common_params['learning_rate'],
self.common_params['alpha'],
server_channel)
else:
error_msg = f"Unsupported method: {method}"
logger.error(error_msg)
raise RuntimeError(error_msg)
# data preprocessing
# minmaxscaler
data_max = x.max(axis=0)
data_min = x.min(axis=0)
server_channel.send('data_max', data_max)
server_channel.send('data_min', data_min)
data_max = server_channel.recv('data_max')
data_min = server_channel.recv('data_min')
x = (x - data_min) / (data_max - data_min)
# client training
num_examples = client.num_examples
batch_size = min(num_examples, self.common_params['batch_size'])
if method == 'DPSGD':
# DP data loader: Poisson sampling
train_dataloader = DPDataLoader(x, y,
batch_size)
else:
train_dataloader = DataLoader(x, y,
batch_size,
shuffle=True)
logger.info("-------- start training --------")
global_epoch = self.common_params['global_epoch']
for i in range(global_epoch):
logger.info(f"-------- global epoch {i+1} / {global_epoch} --------")
local_epoch = self.common_params['local_epoch']
for j in range(local_epoch):
logger.info(f"-------- local epoch {j+1} / {local_epoch} --------")
for batch_x, batch_y in train_dataloader:
client.model.fit(batch_x, batch_y)
client.train()
# print metrics
if self.common_params['print_metrics']:
client.print_metrics(x, y)
logger.info("-------- finish training --------")
# send final epsilon when using DPSGD
if method == 'DPSGD':
delta = self.common_params['delta']
steps = global_epoch * local_epoch * num_examples // batch_size
eps = client.compute_epsilon(steps, batch_size, delta)
server_channel.send("eps", eps)
logger.info(f"For delta={delta}, the current epsilon is {eps}")
# receive plaintext model when using Paillier
elif method == 'Paillier':
client.model.set_theta(server_channel.recv("server_model"))
# send final metrics
client.send_metrics(x, y)
# save model for prediction
modelFile = {
"selected_column": selected_column,
"id": id,
"label": label,
"data_max": data_max,
"data_min": data_min,
"model": client.model
}
model_path = self.role_params['model_path']
check_directory_exist(model_path)
logger.info(f"model path: {model_path}")
with open(model_path, 'wb') as file_path:
pickle.dump(modelFile, file_path)
def predict(self):
# load model for prediction
model_path = self.role_params['model_path']
logger.info(f"model path: {model_path}")
with open(model_path, 'rb') as file_path:
modelFile = pickle.load(file_path)
# load dataset
origin_data = read_data(data_info=self.role_params['data'])
x = origin_data.copy()
selected_column = modelFile['selected_column']
if selected_column:
x = x[selected_column]
id = modelFile['id']
if id in x.columns:
x.pop(id)
label = modelFile['label']
if label in x.columns:
y = x.pop(label).values
x = x.values
# data preprocessing
# minmaxscaler
data_max = modelFile['data_max']
data_min = modelFile['data_min']
x = (x - data_min) / (data_max - data_min)
# test data prediction
model = modelFile['model']
pred_y = model.predict(x)
result = pd.DataFrame({
'pred_y': pred_y
})
data_result = pd.concat([origin_data, result], axis=1)
predict_path = self.role_params['predict_path']
check_directory_exist(predict_path)
logger.info(f"predict path: {predict_path}")
data_result.to_csv(predict_path, index=False)
class Plaintext_Client:
def __init__(self, x, learning_rate, alpha, server_channel):
self.model = LinearRegression(x, learning_rate, alpha)
self.param_init(x, server_channel)
def param_init(self, x, server_channel):
self.server_channel = server_channel
self.num_examples = x.shape[0]
self.send_params()
def train(self):
self.server_channel.send("client_model", self.model.get_theta())
self.model.set_theta(self.server_channel.recv("server_model"))
def send_params(self):
self.server_channel.send('num_examples', self.num_examples)
def send_metrics(self, x, y):
y_hat = self.model.predict(x)
mse = metrics.mean_squared_error(y, y_hat)
mae = metrics.mean_absolute_error(y, y_hat)
logger.info(f"mse={mse}, mae={mae}")
self.server_channel.send('mse', mse)
self.server_channel.send('mae', mae)
def print_metrics(self, x, y):
self.send_metrics(x, y)
class DPSGD_Client(Plaintext_Client):
def __init__(self, x, learning_rate, alpha,
noise_multiplier, l2_norm_clip, secure_mode,
server_channel):
self.model = LinearRegression_DPSGD(x, learning_rate, alpha,
noise_multiplier, l2_norm_clip, secure_mode)
self.param_init(x, server_channel)
def compute_epsilon(self, steps, batch_size, delta):
if self.model.noise_multiplier == 0.0:
return float('inf')
orders = [1 + x / 10. for x in range(1, 100)] + list(range(12, 64))
accountant = dp_accounting.rdp.RdpAccountant(orders)
sampling_probability = batch_size / self.num_examples
event = dp_accounting.SelfComposedDpEvent(
dp_accounting.PoissonSampledDpEvent(
sampling_probability,
dp_accounting.GaussianDpEvent(self.model.noise_multiplier)), steps)
accountant.compose(event)
if delta >= 1. / self.num_examples:
logger.error(f"delta {delta} should be set less than 1 / {self.num_examples}")
return accountant.get_epsilon(target_delta=delta)
class Paillier_Client(Plaintext_Client, Paillier):
def __init__(self, x, y, learning_rate, alpha,
server_channel):
self.model = LinearRegression_Paillier(x, learning_rate, alpha)
self.param_init(x, server_channel)
self.public_key = server_channel.recv("public_key")
self.model.set_theta(self.encrypt_vector(self.model.get_theta()))
def print_metrics(self, x, y):
logger.info('No metrics while using Paillier')
\ No newline at end of file
from primihub.FL.utils.net_work import MultiGrpcClients
from primihub.FL.utils.base import BaseModel
from primihub.FL.utils.file import check_directory_exist
from primihub.utils.logger_util import logger
from primihub.FL.crypto.paillier import Paillier
import json
import numpy as np
from phe import paillier
class LinearRegressionServer(BaseModel):
def __init__(self, **kwargs):
super().__init__(**kwargs)
def run(self):
process = self.common_params['process']
logger.info(f"process: {process}")
if process == 'train':
self.train()
else:
error_msg = f"Unsupported process: {process}"
logger.error(error_msg)
raise RuntimeError(error_msg)
def train(self):
# setup communication channels
remote_parties = self.roles[self.role_params['others_role']]
client_channel = MultiGrpcClients(local_party=self.role_params['self_name'],
remote_parties=remote_parties,
node_info=self.node_info,
task_info=self.task_info)
# server init
method = self.common_params['method']
if method == 'Plaintext' or method == 'DPSGD':
server = Plaintext_DPSGD_Server(self.common_params['alpha'],
client_channel)
elif method == 'Paillier':
server = Paillier_Server(self.common_params['alpha'],
self.common_params['n_length'],
client_channel)
else:
error_msg = f"Unsupported method: {method}"
logger.error(error_msg)
raise RuntimeError(error_msg)
# data preprocessing
# minmaxscaler
data_max = client_channel.recv_all('data_max')
data_min = client_channel.recv_all('data_min')
data_max = np.array(data_max).max(axis=0)
data_min = np.array(data_min).min(axis=0)
client_channel.send_all('data_max', data_max)
client_channel.send_all('data_min', data_min)
# server training
logger.info("-------- start training --------")
global_epoch = self.common_params['global_epoch']
for i in range(global_epoch):
logger.info(f"-------- global epoch {i+1} / {global_epoch} --------")
server.train()
# print metrics
if self.common_params['print_metrics']:
server.print_metrics()
logger.info("-------- finish training --------")
# receive final epsilons when using DPSGD
if method == 'DPSGD':
delta = self.common_params['delta']
eps = client_channel.recv_all("eps")
logger.info(f"For delta={delta}, the current epsilon is {max(eps)}")
# send plaintext model when using Paillier
elif method == 'Paillier':
server.plaintext_server_model_broadcast()
# receive final metrics
trainMetrics = server.get_metrics()
metric_path = self.common_params['metric_path']
check_directory_exist(metric_path)
logger.info(f"metric path: {metric_path}")
with open(metric_path, 'w') as file_path:
file_path.write(json.dumps(trainMetrics))
class Plaintext_DPSGD_Server:
def __init__(self, alpha, client_channel):
self.alpha = alpha
self.client_channel = client_channel
self.theta = None
self.num_examples_weights = None
self.recv_params()
def recv_params(self):
self.num_examples_weights = self.client_channel.recv_all('num_examples')
def client_model_aggregate(self):
client_models = self.client_channel.recv_all("client_model")
self.theta = np.average(client_models,
weights=self.num_examples_weights,
axis=0)
def server_model_broadcast(self):
self.client_channel.send_all("server_model", self.theta)
def train(self):
self.client_model_aggregate()
self.server_model_broadcast()
def get_scalar_metrics(self, metrics_name):
metrics_name = metrics_name.lower()
supported_metrics = ['mse', 'mae']
if metrics_name not in supported_metrics:
error_msg = f"""Unsupported metrics {metrics_name},
use {supported_metrics} instead"""
logger.error(error_msg)
raise RuntimeError(error_msg)
client_metrics = self.client_channel.recv_all(metrics_name)
return np.average(client_metrics,
weights=self.num_examples_weights)
def get_metrics(self):
server_metrics = {}
mse = self.get_scalar_metrics('mse')
server_metrics["train_mse"] = mse
mae = self.get_scalar_metrics('mae')
server_metrics["train_mae"] = mae
logger.info(f"mse={mse}, mae={mae}")
return server_metrics
def print_metrics(self):
self.get_metrics()
class Paillier_Server(Plaintext_DPSGD_Server, Paillier):
def __init__(self, alpha, n_length, client_channel):
Plaintext_DPSGD_Server.__init__(self, alpha, client_channel)
self.public_key,\
self.private_key = paillier.generate_paillier_keypair(n_length=n_length)
self.public_key_broadcast()
def public_key_broadcast(self):
self.client_channel.send_all("public_key", self.public_key)
def client_model_aggregate(self):
client_models = self.client_channel.recv_all("client_model")
self.theta = np.mean(client_models, axis=0)
self.theta = np.array(self.encrypt_vector(self.decrypt_vector(self.theta)))
def plaintext_server_model_broadcast(self):
self.theta = np.array(self.decrypt_vector(self.theta))
self.server_model_broadcast()
def print_metrics(self):
logger.info('No metrics while using Paillier')
import numpy as np
from primihub.utils.logger_util import logger
from .base import LinearRegression
class LinearRegression_Host_Plaintext(LinearRegression):
def compute_z(self, x, guest_z):
z = x.dot(self.weight) + self.bias
z += np.array(guest_z).sum(axis=0)
return z
def compute_error(self, y, z):
return z - y
def compute_grad(self, x, error):
dw = x.T.dot(error) / x.shape[0] + self.alpha * self.weight
db = error.mean(axis=0, keepdims=True)
return dw, db
def gradient_descent(self, x, error):
dw, db = self.compute_grad(x, error)
self.weight -= self.learning_rate * dw
self.bias -= self.learning_rate * db
def fit(self, x, error):
self.gradient_descent(x, error)
class LinearRegression_Host_CKKS(LinearRegression_Host_Plaintext):
def compute_enc_z(self, x, guest_z):
z = self.weight.mm(x.T) + self.bias
z += sum(guest_z)
return z
def gradient_descent(self, x, error):
factor = -self.learning_rate / x.shape[0]
self.bias += error.sum() * factor
self.weight += error.mm(factor * x) \
+ (-self.learning_rate * self.alpha) * self.weight
class LinearRegression_Guest_Plaintext:
def __init__(self, x, learning_rate=0.2, alpha=0.0001):
self.learning_rate = learning_rate
self.alpha = alpha
self.weight = np.zeros(x.shape[1])
def compute_z(self, x):
return x.dot(self.weight)
def compute_grad(self, x, error):
dw = x.T.dot(error) / x.shape[0] + self.alpha * self.weight
return dw
def gradient_descent(self, x, error):
dw = self.compute_grad(x, error)
self.weight -= self.learning_rate * dw
def fit(self, x, error):
self.gradient_descent(x, error)
class LinearRegression_Guest_CKKS(LinearRegression_Guest_Plaintext):
def compute_enc_z(self, x):
return self.weight.mm(x.T)
def gradient_descent(self, x, error):
factor = -self.learning_rate / x.shape[0]
self.weight += error.mm(factor * x) + \
(-self.learning_rate * self.alpha) * self.weight
from primihub.FL.utils.net_work import GrpcClient, MultiGrpcClients
from primihub.FL.utils.base import BaseModel
from primihub.utils.logger_util import logger
from primihub.FL.crypto.ckks import CKKS
import math
import numpy as np
import tenseal as ts
class LinearRegressionCoordinator(BaseModel):
def __init__(self, **kwargs):
super().__init__(**kwargs)
def run(self):
process = self.common_params['process']
logger.info(f"process: {process}")
if process == 'train':
self.train()
else:
error_msg = f"Unsupported process: {process}"
logger.error(error_msg)
raise RuntimeError(error_msg)
def train(self):
# setup communication channels
host_channel = GrpcClient(local_party=self.role_params['self_name'],
remote_party=self.roles['host'],
node_info=self.node_info,
task_info=self.task_info)
guest_channel = MultiGrpcClients(local_party=self.role_params['self_name'],
remote_parties=self.roles['guest'],
node_info=self.node_info,
task_info=self.task_info)
# coordinator init
method = self.common_params['method']
if method == 'CKKS':
batch_size = host_channel.recv('batch_size')
coordinator = CKKSCoordinator(batch_size,
host_channel,
guest_channel)
else:
error_msg = f"Unsupported method: {method}"
logger.error(error_msg)
raise RuntimeError(error_msg)
# coordinator training
logger.info("-------- start training --------")
epoch = self.common_params['epoch']
for i in range(epoch):
logger.info(f"-------- epoch {i+1} / {epoch} --------")
coordinator.train()
# print metrics
if self.common_params['print_metrics']:
coordinator.compute_loss()
logger.info("-------- finish training --------")
# decrypt & send plaintext model
coordinator.update_plaintext_model()
class CKKSCoordinator(CKKS):
def __init__(self, batch_size, host_channel, guest_channel):
self.t = 0
self.host_channel = host_channel
self.guest_channel = guest_channel
# set CKKS params
# use larger poly_mod_degree to support more encrypted multiplications
poly_mod_degree = 8192
# the least multiplication per iteration of gradient descent
# more multiplications lead to larger context size
multiply_per_iter = 2
self.max_iter = 1
multiply_depth = multiply_per_iter * self.max_iter
# sum(coeff_mod_bit_sizes) <= max coeff_modulus bit-length
fe_bits_scale = 60
bits_scale = 49
# 60*2 + 49*1*2 = 218 == 218 (for N = 8192 & 128 bit security)
coeff_mod_bit_sizes = [fe_bits_scale] + \
[bits_scale] * multiply_depth + \
[fe_bits_scale]
# create TenSEALContext
logger.info('create CKKS TenSEAL context')
secret_context = ts.context(ts.SCHEME_TYPE.CKKS,
poly_modulus_degree=poly_mod_degree,
coeff_mod_bit_sizes=coeff_mod_bit_sizes)
secret_context.global_scale = pow(2, bits_scale)
secret_context.generate_galois_keys()
context = secret_context.copy()
context.make_context_public()
super().__init__(context)
self.secret_context = secret_context
self.send_public_context()
self.num_examples = host_channel.recv('num_examples')
self.iter_per_epoch = math.ceil(self.num_examples / batch_size)
def send_public_context(self):
serialize_context = self.context.serialize()
self.host_channel.send("public_context", serialize_context)
self.guest_channel.send_all("public_context", serialize_context)
def recv_model(self):
host_weight = self.load_vector(self.host_channel.recv('host_weight'))
host_bias = self.load_vector(self.host_channel.recv('host_bias'))
guest_weight = self.guest_channel.recv_all('guest_weight')
guest_weight = [self.load_vector(weight) for weight in guest_weight]
return host_weight, host_bias, guest_weight
def send_model(self, host_weight, host_bias, guest_weight):
self.host_channel.send('host_weight', host_weight)
self.host_channel.send('host_bias', host_bias)
# send n sub-lists to n parties seperately
self.guest_channel.send_seperately('guest_weight', guest_weight)
def decrypt_model(self, host_weight, host_bias, guest_weight):
host_weight = self.decrypt(host_weight, self.secret_context.secret_key())
host_bias = self.decrypt(host_bias, self.secret_context.secret_key())
guest_weight = [self.decrypt(weight, self.secret_context.secret_key()) \
for weight in guest_weight]
return host_weight, host_bias, guest_weight
def encrypt_model(self, host_weight, host_bias, guest_weight):
host_weight = self.encrypt_vector(host_weight)
host_bias = self.encrypt_vector(host_bias)
guest_weight = [self.encrypt_vector(weight) for weight in guest_weight]
return host_weight, host_bias, guest_weight
def update_ciphertext_model(self):
host_weight, host_bias, guest_weight = self.recv_model()
host_weight, host_bias, guest_weight = self.decrypt_model(
host_weight, host_bias, guest_weight)
host_weight, host_bias, guest_weight = self.encrypt_model(
host_weight, host_bias, guest_weight)
host_weight = host_weight.serialize()
host_bias = host_bias.serialize()
guest_weight = [weight.serialize() for weight in guest_weight]
self.send_model(host_weight, host_bias, guest_weight)
def update_plaintext_model(self):
host_weight, host_bias, guest_weight = self.recv_model()
host_weight, host_bias, guest_weight = self.decrypt_model(
host_weight, host_bias, guest_weight)
# list to numpy ndarrry
host_weight = np.array(host_weight)
host_bias = np.array(host_bias)
guest_weight = [np.array(weight) for weight in guest_weight]
self.send_model(host_weight, host_bias, guest_weight)
def train(self):
logger.info(f'iteration {self.t} / {self.max_iter}')
self.t += self.iter_per_epoch
num_dec = self.t // self.max_iter
self.t = self.t % self.max_iter
if self.t == 0:
num_dec -= 1
self.t = self.max_iter
for i in range(num_dec):
logger.warning(f'decrypt model #{i+1}')
self.update_ciphertext_model()
def compute_loss(self):
logger.info(f'iteration {self.t} / {self.max_iter}')
if self.t >= self.max_iter:
self.t = 0
logger.warning('decrypt model')
self.update_ciphertext_model()
mse = self.load_vector(self.host_channel.recv('mse'))
mse = self.decrypt(mse, self.secret_context.secret_key())[0]
mse /= self.num_examples
logger.info(f'mse={mse}')
\ No newline at end of file
from primihub.FL.utils.net_work import GrpcClient
from primihub.FL.utils.base import BaseModel
from primihub.FL.utils.file import check_directory_exist
from primihub.FL.utils.dataset import read_data, DataLoader
from primihub.utils.logger_util import logger
from primihub.FL.crypto.ckks import CKKS
import pickle
from sklearn.preprocessing import StandardScaler
from .vfl_base import LinearRegression_Guest_Plaintext,\
LinearRegression_Guest_CKKS
class LinearRegressionGuest(BaseModel):
def __init__(self, **kwargs):
super().__init__(**kwargs)
def run(self):
process = self.common_params['process']
logger.info(f"process: {process}")
if process == 'train':
self.train()
elif process == 'predict':
self.predict()
else:
error_msg = f"Unsupported process: {process}"
logger.error(error_msg)
raise RuntimeError(error_msg)
def train(self):
# setup communication channels
host_channel = GrpcClient(local_party=self.role_params['self_name'],
remote_party=self.roles['host'],
node_info=self.node_info,
task_info=self.task_info)
method = self.common_params['method']
if method == 'CKKS':
coordinator_channel = GrpcClient(local_party=self.role_params['self_name'],
remote_party=self.roles['coordinator'],
node_info=self.node_info,
task_info=self.task_info)
# load dataset
selected_column = self.role_params['selected_column']
id = self.role_params['id']
x = read_data(data_info=self.role_params['data'],
selected_column=selected_column,
id=id)
x = x.values
# guest init
batch_size = min(x.shape[0], self.common_params['batch_size'])
if method == 'Plaintext':
guest = Plaintext_Guest(x,
self.common_params['learning_rate'],
self.common_params['alpha'],
host_channel)
elif method == 'CKKS':
guest = CKKS_Guest(x,
self.common_params['learning_rate'],
self.common_params['alpha'],
host_channel,
coordinator_channel)
else:
error_msg = f"Unsupported method: {method}"
logger.error(error_msg)
raise RuntimeError(error_msg)
# data preprocessing
# StandardScaler
scaler = StandardScaler()
x = scaler.fit_transform(x)
# guest training
train_dataloader = DataLoader(dataset=x,
label=None,
batch_size=batch_size,
shuffle=True,
seed=self.common_params['shuffle_seed'])
logger.info("-------- start training --------")
epoch = self.common_params['epoch']
for i in range(epoch):
logger.info(f"-------- epoch {i+1} / {epoch} --------")
for batch_x in train_dataloader:
guest.train(batch_x)
# print metrics
if self.common_params['print_metrics']:
guest.compute_metrics(x)
logger.info("-------- finish training --------")
# receive plaintext model
if method == 'CKKS':
guest.update_plaintext_model()
# compute final metrics
guest.compute_final_metrics(x)
# save model for prediction
modelFile = {
"selected_column": selected_column,
"id": id,
"transformer": scaler,
"model": guest.model
}
model_path = self.role_params['model_path']
check_directory_exist(model_path)
logger.info(f"model path: {model_path}")
with open(model_path, 'wb') as file_path:
pickle.dump(modelFile, file_path)
def predict(self):
# setup communication channels
remote_party = self.roles[self.role_params['others_role']]
host_channel = GrpcClient(local_party=self.role_params['self_name'],
remote_party=remote_party,
node_info=self.node_info,
task_info=self.task_info)
# load model for prediction
model_path = self.role_params['model_path']
logger.info(f"model path: {model_path}")
with open(model_path, 'rb') as file_path:
modelFile = pickle.load(file_path)
# load dataset
x = read_data(data_info=self.role_params['data'])
selected_column = modelFile['selected_column']
if selected_column:
x = x[selected_column]
id = modelFile['id']
if id in x.columns:
x.pop(id)
x = x.values
# data preprocessing
transformer = modelFile['transformer']
x = transformer.transform(x)
# test data prediction
model = modelFile['model']
guest_z = model.compute_z(x)
host_channel.send('guest_z', guest_z)
class Plaintext_Guest:
def __init__(self, x, learning_rate, alpha, host_channel):
self.model = LinearRegression_Guest_Plaintext(x,
learning_rate,
alpha)
self.host_channel = host_channel
def send_z(self, x):
guest_z = self.model.compute_z(x)
self.host_channel.send('guest_z', guest_z)
def train(self, x):
self.send_z(x)
error = self.host_channel.recv('error')
self.model.fit(x, error)
def compute_metrics(self, x):
self.send_z(x)
def compute_final_metrics(self, x):
self.compute_metrics(x)
class CKKS_Guest(Plaintext_Guest, CKKS):
def __init__(self, x, learning_rate, alpha,
host_channel, coordinator_channel):
self.t = 0
self.model = LinearRegression_Guest_CKKS(x,
learning_rate,
alpha)
self.host_channel = host_channel
self.recv_public_context(coordinator_channel)
CKKS.__init__(self, self.context)
multiply_per_iter = 2
self.max_iter = self.multiply_depth // multiply_per_iter
self.encrypt_model()
def recv_public_context(self, coordinator_channel):
self.coordinator_channel = coordinator_channel
self.context = coordinator_channel.recv('public_context')
def encrypt_model(self):
self.model.weight = self.encrypt_vector(self.model.weight)
def update_ciphertext_model(self):
self.coordinator_channel.send('guest_weight',
self.model.weight.serialize())
self.model.weight = self.load_vector(
self.coordinator_channel.recv('guest_weight'))
def update_plaintext_model(self):
self.coordinator_channel.send('guest_weight',
self.model.weight.serialize())
self.model.weight = self.coordinator_channel.recv('guest_weight')
def send_enc_z(self, x):
guest_z = self.model.compute_enc_z(x)
self.host_channel.send('guest_z', guest_z.serialize())
def train(self, x):
logger.info(f'iteration {self.t} / {self.max_iter}')
if self.t >= self.max_iter:
self.t = 0
logger.warning(f'decrypt model')
self.update_ciphertext_model()
self.t += 1
self.send_enc_z(x)
error = self.load_vector(self.host_channel.recv('error'))
self.model.fit(x, error)
def compute_metrics(self, x):
logger.info(f'iteration {self.t} / {self.max_iter}')
if self.t >= self.max_iter:
self.t = 0
logger.warning(f'decrypt model')
self.update_ciphertext_model()
self.send_enc_z(x)
logger.info('View metrics at coordinator while using CKKS')
def compute_final_metrics(self, x):
super().compute_metrics(x)
\ No newline at end of file
from primihub.FL.utils.net_work import GrpcClient, MultiGrpcClients
from primihub.FL.utils.base import BaseModel
from primihub.FL.utils.file import check_directory_exist
from primihub.FL.utils.dataset import read_data, DataLoader
from primihub.utils.logger_util import logger
from primihub.FL.crypto.ckks import CKKS
import pickle
import json
import pandas as pd
from sklearn import metrics
from sklearn.preprocessing import StandardScaler
from .vfl_base import LinearRegression_Host_Plaintext,\
LinearRegression_Host_CKKS
class LinearRegressionHost(BaseModel):
def __init__(self, **kwargs):
super().__init__(**kwargs)
def run(self):
process = self.common_params['process']
logger.info(f"process: {process}")
if process == 'train':
self.train()
elif process == 'predict':
self.predict()
else:
error_msg = f"Unsupported process: {process}"
logger.error(error_msg)
raise RuntimeError(error_msg)
def train(self):
# setup communication channels
guest_channel = MultiGrpcClients(local_party=self.role_params['self_name'],
remote_parties=self.roles['guest'],
node_info=self.node_info,
task_info=self.task_info)
method = self.common_params['method']
if method == 'CKKS':
coordinator_channel = GrpcClient(local_party=self.role_params['self_name'],
remote_party=self.roles['coordinator'],
node_info=self.node_info,
task_info=self.task_info)
# load dataset
selected_column = self.role_params['selected_column']
id = self.role_params['id']
x = read_data(data_info=self.role_params['data'],
selected_column=selected_column,
id=id)
label = self.role_params['label']
y = x.pop(label).values
x = x.values
# host init
batch_size = min(x.shape[0], self.common_params['batch_size'])
if method == 'Plaintext':
host = Plaintext_Host(x,
self.common_params['learning_rate'],
self.common_params['alpha'],
guest_channel)
elif method == 'CKKS':
coordinator_channel.send('batch_size', batch_size)
host = CKKS_Host(x,
self.common_params['learning_rate'],
self.common_params['alpha'],
guest_channel,
coordinator_channel)
else:
error_msg = f"Unsupported method: {method}"
logger.error(error_msg)
raise RuntimeError(error_msg)
# data preprocessing
# StandardScaler
scaler = StandardScaler()
x = scaler.fit_transform(x)
# host training
train_dataloader = DataLoader(dataset=x,
label=y,
batch_size=batch_size,
shuffle=True,
seed=self.common_params['shuffle_seed'])
logger.info("-------- start training --------")
epoch = self.common_params['epoch']
for i in range(epoch):
logger.info(f"-------- epoch {i+1} / {epoch} --------")
for batch_x, batch_y in train_dataloader:
host.train(batch_x, batch_y)
# print metrics
if self.common_params['print_metrics']:
host.compute_metrics(x, y)
logger.info("-------- finish training --------")
# receive plaintext model
if method == 'CKKS':
host.update_plaintext_model()
# compute final metrics
trainMetrics = host.compute_final_metrics(x, y)
metric_path = self.role_params['metric_path']
check_directory_exist(metric_path)
logger.info(f"metric path: {metric_path}")
with open(metric_path, 'w') as file_path:
file_path.write(json.dumps(trainMetrics))
# save model for prediction
modelFile = {
"selected_column": selected_column,
"id": id,
"label": label,
"transformer": scaler,
"model": host.model
}
model_path = self.role_params['model_path']
check_directory_exist(model_path)
logger.info(f"model path: {model_path}")
with open(model_path, 'wb') as file_path:
pickle.dump(modelFile, file_path)
def predict(self):
# setup communication channels
remote_parties = self.roles[self.role_params['others_role']]
guest_channel = MultiGrpcClients(local_party=self.role_params['self_name'],
remote_parties=remote_parties,
node_info=self.node_info,
task_info=self.task_info)
# load model for prediction
model_path = self.role_params['model_path']
logger.info(f"model path: {model_path}")
with open(model_path, 'rb') as file_path:
modelFile = pickle.load(file_path)
# load dataset
origin_data = read_data(data_info=self.role_params['data'])
x = origin_data.copy()
selected_column = modelFile['selected_column']
if selected_column:
x = x[selected_column]
id = modelFile['id']
if id in x.columns:
x.pop(id)
label = modelFile['label']
if label in x.columns:
y = x.pop(label).values
x = x.values
# data preprocessing
transformer = modelFile['transformer']
x = transformer.transform(x)
# test data prediction
model = modelFile['model']
guest_z = guest_channel.recv_all('guest_z')
z = model.compute_z(x, guest_z)
result = pd.DataFrame({
'pred_y': z
})
data_result = pd.concat([origin_data, result], axis=1)
predict_path = self.role_params['predict_path']
check_directory_exist(predict_path)
logger.info(f"predict path: {predict_path}")
data_result.to_csv(predict_path, index=False)
class Plaintext_Host:
def __init__(self, x, learning_rate, alpha, guest_channel):
self.model = LinearRegression_Host_Plaintext(x,
learning_rate,
alpha)
self.guest_channel = guest_channel
def compute_z(self, x):
guest_z = self.guest_channel.recv_all('guest_z')
return self.model.compute_z(x, guest_z)
def train(self, x, y):
z = self.compute_z(x)
error = self.model.compute_error(y, z)
self.guest_channel.send_all('error', error)
self.model.fit(x, error)
def compute_metrics(self, x, y):
z = self.compute_z(x)
mse = metrics.mean_squared_error(y, z)
mae = metrics.mean_absolute_error(y, z)
logger.info(f"mse={mse}, mae={mae}")
return {
'train_mse': mse,
'train_mae': mae
}
def compute_final_metrics(self, x, y):
return self.compute_metrics(x, y)
class CKKS_Host(Plaintext_Host, CKKS):
def __init__(self, x, learning_rate, alpha,
guest_channel, coordinator_channel):
self.t = 0
self.model = LinearRegression_Host_CKKS(x,
learning_rate,
alpha)
self.guest_channel = guest_channel
self.recv_public_context(coordinator_channel)
coordinator_channel.send('num_examples', x.shape[0])
CKKS.__init__(self, self.context)
multiply_per_iter = 2
self.max_iter = self.multiply_depth // multiply_per_iter
self.encrypt_model()
def recv_public_context(self, coordinator_channel):
self.coordinator_channel = coordinator_channel
self.context = coordinator_channel.recv('public_context')
def encrypt_model(self):
self.model.weight = self.encrypt_vector(self.model.weight)
self.model.bias = self.encrypt_vector(self.model.bias)
def update_ciphertext_model(self):
self.coordinator_channel.send('host_weight',
self.model.weight.serialize())
self.coordinator_channel.send('host_bias',
self.model.bias.serialize())
self.model.weight = self.load_vector(
self.coordinator_channel.recv('host_weight'))
self.model.bias = self.load_vector(
self.coordinator_channel.recv('host_bias'))
def update_plaintext_model(self):
self.coordinator_channel.send('host_weight',
self.model.weight.serialize())
self.coordinator_channel.send('host_bias',
self.model.bias.serialize())
self.model.weight = self.coordinator_channel.recv('host_weight')
self.model.bias = self.coordinator_channel.recv('host_bias')
def compute_enc_z(self, x):
guest_z = self.guest_channel.recv_all('guest_z')
guest_z = [self.load_vector(z) for z in guest_z]
return self.model.compute_enc_z(x, guest_z)
def train(self, x, y):
logger.info(f'iteration {self.t} / {self.max_iter}')
if self.t >= self.max_iter:
self.t = 0
logger.warning(f'decrypt model')
self.update_ciphertext_model()
self.t += 1
z = self.compute_enc_z(x)
error = self.model.compute_error(y, z)
self.guest_channel.send_all('error', error.serialize())
self.model.fit(x, error)
def compute_metrics(self, x, y):
logger.info(f'iteration {self.t} / {self.max_iter}')
if self.t >= self.max_iter:
self.t = 0
logger.warning(f'decrypt model')
self.update_ciphertext_model()
z = self.compute_enc_z(x)
mse = ((z - y) ** 2).sum()
self.coordinator_channel.send('mse', mse.serialize())
logger.info('View metrics at coordinator while using CKKS')
def compute_final_metrics(self, x, y):
return super().compute_metrics(x, y)
\ No newline at end of file
......@@ -122,7 +122,7 @@ class LogisticRegression_DPSGD(LogisticRegression):
return x + noise
def compute_grad(self, x, y):
def compute_grad(self, x, y):
# compute & clip per-example gradient
if self.multiclass:
error = self.predict_prob(x)
......@@ -149,32 +149,7 @@ class LogisticRegression_DPSGD(LogisticRegression):
# add gaussian noise
dw = self.add_noise(dw) / x.shape[0] + self.alpha * self.weight
db = self.add_noise(db) / x.shape[0]
return dw, db
class Paillier:
def __init__(self, public_key, private_key):
self.public_key = public_key
self.private_key = private_key
def decrypt_scalar(self, cipher_scalar):
return self.private_key.decrypt(cipher_scalar)
def decrypt_vector(self, cipher_vector):
return [self.private_key.decrypt(i) for i in cipher_vector]
def decrypt_matrix(self, cipher_matrix):
return [[self.private_key.decrypt(i) for i in cv] for cv in cipher_matrix]
def encrypt_scalar(self, plain_scalar):
return self.public_key.encrypt(plain_scalar)
def encrypt_vector(self, plain_vector):
return [self.public_key.encrypt(i) for i in plain_vector]
def encrypt_matrix(self, plain_matrix):
return [[self.private_key.encrypt(i) for i in pv] for pv in plain_matrix]
return dw, db
class LogisticRegression_Paillier(LogisticRegression):
......
......@@ -5,6 +5,7 @@ from primihub.FL.utils.dataset import read_data,\
DataLoader,\
DPDataLoader
from primihub.utils.logger_util import logger
from primihub.FL.crypto.paillier import Paillier
import pickle
import pandas as pd
......@@ -15,8 +16,7 @@ from primihub.FL.metrics.hfl_metrics import ks_from_fpr_tpr,\
auc_from_fpr_tpr
from .base import LogisticRegression,\
LogisticRegression_DPSGD,\
LogisticRegression_Paillier,\
Paillier
LogisticRegression_Paillier
class LogisticRegressionClient(BaseModel):
......
......@@ -2,6 +2,7 @@ from primihub.FL.utils.net_work import MultiGrpcClients
from primihub.FL.utils.base import BaseModel
from primihub.FL.utils.file import check_directory_exist
from primihub.utils.logger_util import logger
from primihub.FL.crypto.paillier import Paillier
import json
import numpy as np
......@@ -9,7 +10,6 @@ from phe import paillier
from primihub.FL.metrics.hfl_metrics import roc_vertical_avg,\
ks_from_fpr_tpr,\
auc_from_fpr_tpr
from .base import Paillier
class LogisticRegressionServer(BaseModel):
......
from primihub.FL.utils.net_work import GrpcClient, MultiGrpcClients
from primihub.FL.utils.base import BaseModel
from primihub.utils.logger_util import logger
from primihub.FL.crypto.ckks import CKKS
import math
import numpy as np
......@@ -59,39 +60,6 @@ class LogisticRegressionCoordinator(BaseModel):
# decrypt & send plaintext model
coordinator.update_plaintext_model()
class CKKS:
def __init__(self, context):
if isinstance(context, bytes):
context = ts.context_from(context)
self.context = context
self.multiply_depth = context.data.seal_context().first_context_data().chain_index()
def encrypt_vector(self, vector, context=None):
if context:
return ts.ckks_vector(context, vector)
else:
return ts.ckks_vector(self.context, vector)
def encrypt_tensor(self, tensor, context=None):
if context:
return ts.ckks_tensor(context, tensor)
else:
return ts.ckks_tensor(self.context, tensor)
def decrypt(self, ciphertext, secret_key=None):
if ciphertext.context().has_secret_key():
return ciphertext.decrypt()
else:
return ciphertext.decrypt(secret_key)
def load_vector(self, vector):
return ts.ckks_vector_from(self.context, vector)
def load_tensor(self, tensor):
return ts.ckks_tensor_from(self.context, tensor)
class CKKSCoordinator(CKKS):
......
......@@ -3,13 +3,13 @@ from primihub.FL.utils.base import BaseModel
from primihub.FL.utils.file import check_directory_exist
from primihub.FL.utils.dataset import read_data, DataLoader
from primihub.utils.logger_util import logger
from primihub.FL.crypto.ckks import CKKS
import pickle
from sklearn.preprocessing import StandardScaler
from .vfl_base import LogisticRegression_Guest_Plaintext,\
LogisticRegression_Guest_CKKS
from .vfl_coordinator import CKKS
class LogisticRegressionGuest(BaseModel):
......
......@@ -3,6 +3,7 @@ from primihub.FL.utils.base import BaseModel
from primihub.FL.utils.file import check_directory_exist
from primihub.FL.utils.dataset import read_data, DataLoader
from primihub.utils.logger_util import logger
from primihub.FL.crypto.ckks import CKKS
import pickle
import json
......@@ -15,7 +16,6 @@ from sklearn.preprocessing import StandardScaler
from .vfl_base import LogisticRegression_Host_Plaintext,\
LogisticRegression_Host_CKKS
from .vfl_coordinator import CKKS
class LogisticRegressionHost(BaseModel):
......@@ -360,7 +360,7 @@ class CKKS_Host(Plaintext_Host, CKKS):
regular_loss = self.compute_enc_regular_loss()
loss = self.model.loss(y, z, regular_loss)
self.coordinator_channel.send('loss', loss.serialize())
logger.info('View metrics at coordinator while using Paillier')
logger.info('View metrics at coordinator while using CKKS')
def compute_final_metrics(self, x, y):
return super().compute_metrics(x, y)
\ No newline at end of file
......@@ -12,6 +12,15 @@
"host": "primihub.FL.logistic_regression.vfl_host.LogisticRegressionHost",
"coordinator": "primihub.FL.logistic_regression.vfl_coordinator.LogisticRegressionCoordinator"
},
"HFL_linear_regression": {
"client": "primihub.FL.linear_regression.hfl_client.LinearRegressionClient",
"server": "primihub.FL.linear_regression.hfl_server.LinearRegressionServer"
},
"VFL_linear_regression": {
"guest": "primihub.FL.linear_regression.vfl_guest.LinearRegressionGuest",
"host": "primihub.FL.linear_regression.vfl_host.LinearRegressionHost",
"coordinator": "primihub.FL.linear_regression.vfl_coordinator.LinearRegressionCoordinator"
},
"HFL_neural_network": {
"client": "primihub.FL.neural_network.hfl_client.NeuralNetworkClient",
"server": "primihub.FL.neural_network.hfl_server.NeuralNetworkServer"
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册