...
 
Commits (9)
    https://gitcode.net/primihub/primihub/-/commit/1f5fd4059057ffa8f3cd6ee0207438f631928b23 update docker local build (#530) 2023-06-26T13:58:32+08:00 fuxingbit zjning95@126.com * update docker local build https://gitcode.net/primihub/primihub/-/commit/24dd6c714e8ca39624fa5ccc09ce886913bbf6b0 enable kill task from client (#533) 2023-06-26T14:39:28+08:00 phoenix20162016 cuibo20062006@163.com https://gitcode.net/primihub/primihub/-/commit/c85e5e9fc610a39d93ec1473bc704938e45c1635 update data loader & throw exception after error (#534) 2023-06-27T18:45:32+08:00 Xuefeng Xu xuxf100@qq.com * update data loader * throw exception after error * add error message https://gitcode.net/primihub/primihub/-/commit/bab45fec8bd7ff77677651fe7b5ac05468bdd822 fix check meta service start success or not (#536) 2023-06-28T18:07:59+08:00 phoenix20162016 cuibo20062006@163.com https://gitcode.net/primihub/primihub/-/commit/afbdeacdff871e848e84dd193be64ae99e68e413 compile cmake repo using multi cpu core (#539) 2023-06-29T10:53:47+08:00 phoenix20162016 cuibo20062006@163.com https://gitcode.net/primihub/primihub/-/commit/278fc90a9bb027653d212de46b7367fbb73aa479 read data set by specified schema (#537) 2023-06-29T11:04:42+08:00 phoenix20162016 cuibo20062006@163.com https://gitcode.net/primihub/primihub/-/commit/7f86988be5f0ff7e722508f6cf2a7a8b2d1f1b39 refactor VFL logistic regression & support multiclass classification (#538) 2023-06-29T11:21:54+08:00 Xuefeng Xu xuxf100@qq.com * update dataset for FL * update VFL logistic regression * update vfl json * update epoch * update data loader * throw exception after error https://gitcode.net/primihub/primihub/-/commit/a1ab00d7b38ab8f5efbc860a9ebde7b6d9dd6506 fork does not sync to gitee (#540) 2023-06-29T15:07:56+08:00 fuxingbit zjning95@126.com https://gitcode.net/primihub/primihub/-/commit/2f3bfa80fd9324f76e023d199ce484a3b416b2ec PIR read data using dataset driver (#541) 2023-06-29T16:17:49+08:00 phoenix20162016 cuibo20062006@163.com
...@@ -9,6 +9,8 @@ jobs: ...@@ -9,6 +9,8 @@ jobs:
sync: sync:
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: github.event.repository.fork == 'false'
steps: steps:
- name: Sync to Gitee - name: Sync to Gitee
......
...@@ -38,7 +38,15 @@ RUN bash pre_build.sh \ ...@@ -38,7 +38,15 @@ RUN bash pre_build.sh \
//:cli \ //:cli \
//src/primihub/pybind_warpper:opt_paillier_c2py \ //src/primihub/pybind_warpper:opt_paillier_c2py \
//src/primihub/pybind_warpper::linkcontext \ //src/primihub/pybind_warpper::linkcontext \
&& tar zcf /opt/bazel-bin.tar.gz --exclude=*_objs ./bazel-bin/* && tar zcf bazel-bin.tar.gz bazel-bin/cli \
bazel-bin/node \
bazel-bin/py_main \
bazel-bin/src/primihub/pybind_warpper/opt_paillier_c2py.so \
bazel-bin/src/primihub/pybind_warpper/linkcontext.so \
python \
config \
example \
data
FROM ubuntu:20.04 as runner FROM ubuntu:20.04 as runner
...@@ -50,23 +58,15 @@ RUN apt-get update \ ...@@ -50,23 +58,15 @@ RUN apt-get update \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*
COPY --from=builder /opt/bazel-bin.tar.gz /opt/bazel-bin.tar.gz COPY --from=builder /opt/bazel-bin.tar.gz /opt/bazel-bin.tar.gz
# Copy test data files to /tmp/
COPY --from=builder /src/data /app/data
# Copy all test config files to /app/config
COPY --from=builder /src/config /app/config
# Copy primihub python sources to /app and setup to system python3
COPY --from=builder /src/python /app/python
COPY --from=builder /src/src/primihub/protos/ /app/src/primihub/protos/ COPY --from=builder /src/src/primihub/protos/ /app/src/primihub/protos/
WORKDIR /app WORKDIR /app
RUN tar zxf /opt/bazel-bin.tar.gz \
&& mkdir log
# Copy opt_paillier_c2py.so linkcontext.so to /app/python, this enable setup.py find it. # Copy opt_paillier_c2py.so linkcontext.so to /app/python, this enable setup.py find it.
RUN mv $TARGET_PATH/src/primihub/pybind_warpper/opt_paillier_c2py.so /app/python/ \ RUN tar zxf /opt/bazel-bin.tar.gz \
&& mv $TARGET_PATH/src/primihub/pybind_warpper/linkcontext.so /app/python/ \ && mkdir log \
&& ln -s bazel-bin/node primihub-node && ln -s bazel-bin/cli primihub-cli && ln -s bazel-bin/node primihub-node \
&& ln -s bazel-bin/cli primihub-cli
WORKDIR /app/python WORKDIR /app/python
......
...@@ -10,27 +10,16 @@ RUN apt-get update \ ...@@ -10,27 +10,16 @@ RUN apt-get update \
&& ln -fs /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \ && ln -fs /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*
ARG TARGET_PATH=/root/.cache/bazel/_bazel_root/17a1cd4fb136f9bc7469e0db6305b35a/execroot/__main__/bazel-out/k8-fastbuild/bin
WORKDIR $TARGET_PATH
# Copy binaries to TARGET_PATH
COPY . ./
# Change WorkDir to /app # Change WorkDir to /app
WORKDIR /app WORKDIR /app
ADD bazel-bin.tar.gz ./
COPY src/primihub/protos/ src/primihub/protos/
# Make symlink to primihub-node & primihub-cli # Make symlink to primihub-node & primihub-cli
RUN mkdir log \ RUN mkdir log \
&& ln -s $TARGET_PATH/node /app/primihub-node && ln -s $TARGET_PATH/cli /app/primihub-cli && ln -s bazel-bin/node primihub-node \
COPY ./example ./example && ln -s bazel-bin/cli primihub-cli
# Copy all test config files to /app
COPY ./data ./data
COPY ./config ./config
# Copy primihub python sources to /app and setup to system python3
COPY ./python ./python
COPY ./src/primihub/protos/ ./src/primihub/protos/
RUN cp $TARGET_PATH/src/primihub/pybind_warpper/opt_paillier_c2py.so /app/python/
RUN cp $TARGET_PATH/src/primihub/pybind_warpper/linkcontext.so /app/python/
WORKDIR /app/python WORKDIR /app/python
RUN python3 -m pip install --upgrade pip \ RUN python3 -m pip install --upgrade pip \
&& python3 -m pip install -r requirements.txt -i https://pypi.douban.com/simple/ \ && python3 -m pip install -r requirements.txt -i https://pypi.douban.com/simple/ \
......
...@@ -11,19 +11,13 @@ RUN apt-get update \ ...@@ -11,19 +11,13 @@ RUN apt-get update \
WORKDIR /app WORKDIR /app
COPY primihub-linux-amd64.tar.gz primihub-linux-arm64.tar.gz /opt/ COPY primihub-linux-amd64.tar.gz primihub-linux-arm64.tar.gz /opt/
COPY example ./example
COPY data/ ./data
COPY config/ ./config
COPY python/ ./python
COPY src/primihub/protos/ src/primihub/protos/ COPY src/primihub/protos/ src/primihub/protos/
RUN ARCH=`arch | sed s/aarch64/arm64/ | sed s/x86_64/amd64/` \ RUN ARCH=`arch | sed s/aarch64/arm64/ | sed s/x86_64/amd64/` \
&& tar zxf /opt/primihub-linux-${ARCH}.tar.gz \ && tar zxf /opt/primihub-linux-${ARCH}.tar.gz \
&& mkdir log \ && mkdir log \
&& mv bazel-bin/src/primihub/pybind_warpper/opt_paillier_c2py.so python/ \ && ln -s bazel-bin/node primihub-node \
&& mv bazel-bin/src/primihub/pybind_warpper/linkcontext.so python/ \ && ln -s bazel-bin/cli primihub-cli
&& ln -s bazel-bin/node primihub-node && ln -s bazel-bin/cli primihub-cli
WORKDIR /app/python WORKDIR /app/python
......
...@@ -423,11 +423,14 @@ new_git_repository( ...@@ -423,11 +423,14 @@ new_git_repository(
) )
http_archive( http_archive(
name = "com_github_gmp", name = "com_github_gmp",
build_file = "//bazel:gmp.BUILD", build_file = "//bazel:gmp.BUILD",
#sha256 = "87b565e89a9a684fe4ebeeddb8399dce2599f9c9049854ca8c0dfbdea0e21912", #sha256 = "87b565e89a9a684fe4ebeeddb8399dce2599f9c9049854ca8c0dfbdea0e21912",
strip_prefix = "gmp-6.2.1", strip_prefix = "gmp-6.2.1",
urls = ["https://gmplib.org/download/gmp/gmp-6.2.1.tar.xz"], urls = [
"https://primihub.oss-cn-beijing.aliyuncs.com/tools/gmp-6.2.1.tar.xz",
"https://gmplib.org/download/gmp/gmp-6.2.1.tar.xz"
],
) )
# Google dense_hash_set # Google dense_hash_set
......
...@@ -8,13 +8,16 @@ filegroup( ...@@ -8,13 +8,16 @@ filegroup(
) )
cmake( cmake(
name = "eigen", name = "eigen",
cache_entries = { cache_entries = {
"CMAKE_C_FLAGS": "-fPIC", "CMAKE_C_FLAGS": "-fPIC",
}, },
lib_source = ":src", build_args = [
out_headers_only = True, "-j4",
includes = ["eigen3/",], ],
install = True, lib_source = ":src",
visibility = ["//visibility:public"], out_headers_only = True,
) includes = ["eigen3/",],
\ No newline at end of file install = True,
visibility = ["//visibility:public"],
)
...@@ -9,13 +9,16 @@ filegroup( ...@@ -9,13 +9,16 @@ filegroup(
) )
cmake( cmake(
name = "kuku", name = "kuku",
cache_entries = { cache_entries = {
"CMAKE_INSTALL_LIBDIR": "lib", "CMAKE_INSTALL_LIBDIR": "lib",
}, },
lib_source = ":src", build_args = [
out_include_dir = "include/Kuku-2.1", "-j4",
out_static_libs = ["libkuku-2.1.a"], ],
visibility = ["//visibility:public"], lib_source = ":src",
tags = ["requires-network"], out_include_dir = "include/Kuku-2.1",
out_static_libs = ["libkuku-2.1.a"],
visibility = ["//visibility:public"],
tags = ["requires-network"],
) )
...@@ -7,21 +7,24 @@ filegroup( ...@@ -7,21 +7,24 @@ filegroup(
) )
cmake( cmake(
name = "leveldb", name = "leveldb",
visibility = ["//visibility:public"], visibility = ["//visibility:public"],
cache_entries = { cache_entries = {
"CMAKE_BUILD_TYPE": "Release", "CMAKE_BUILD_TYPE": "Release",
"BUILD_SHARED_LIBS": "OFF", "BUILD_SHARED_LIBS": "OFF",
# Turning off building tests and benchmarks as those would # Turning off building tests and benchmarks as those would
# requires first pulling down those git submodules (which # requires first pulling down those git submodules (which
# would also require using 'git_repository' instead of # would also require using 'git_repository' instead of
# 'http_archive'. # 'http_archive'.
"LEVELDB_BUILD_TESTS": "OFF", "LEVELDB_BUILD_TESTS": "OFF",
"LEVELDB_BUILD_BENCHMARKS": "OFF", "LEVELDB_BUILD_BENCHMARKS": "OFF",
"CMAKE_INSTALL_LIBDIR": "lib", "CMAKE_INSTALL_LIBDIR": "lib",
}, },
lib_source = ":all", build_args = [
out_static_libs = ["libleveldb.a"], "-j4",
],
lib_source = ":all",
out_static_libs = ["libleveldb.a"],
) )
#cc_library( #cc_library(
......
...@@ -11,6 +11,9 @@ cmake( ...@@ -11,6 +11,9 @@ cmake(
cache_entries = { cache_entries = {
"CMAKE_INSTALL_LIBDIR": "lib", "CMAKE_INSTALL_LIBDIR": "lib",
}, },
build_args = [
"-j4",
],
linkopts = [ linkopts = [
"-lpthread", "-lpthread",
], ],
......
...@@ -17,7 +17,6 @@ bash pre_build.sh ...@@ -17,7 +17,6 @@ bash pre_build.sh
ARCH=`arch` ARCH=`arch`
bazel build --config=linux_$ARCH --define enable_mysql_driver=true //:node \ bazel build --config=linux_$ARCH --define enable_mysql_driver=true //:node \
//:py_main \ //:py_main \
//:cli \ //:cli \
...@@ -29,30 +28,18 @@ if [ $? -ne 0 ]; then ...@@ -29,30 +28,18 @@ if [ $? -ne 0 ]; then
exit exit
fi fi
BASE_DIR=`ls -l | grep bazel-bin | awk '{print $11}'` git rev-parse --abbrev-ref HEAD >> commit.txt
git rev-parse HEAD >> commit.txt
if [ ! -d "$BASE_DIR" ]; then
echo "BASE_DIR IS NULL" tar zcf bazel-bin.tar.gz bazel-bin/cli \
exit bazel-bin/node \
fi bazel-bin/py_main \
bazel-bin/src/primihub/pybind_warpper/opt_paillier_c2py.so \
key_word="ARG TARGET_PATH=" bazel-bin/src/primihub/pybind_warpper/linkcontext.so \
row_num=$(sed -n "/${key_word}/=" Dockerfile.local) python \
sed -i "${row_num}s#.*#ARG TARGET_PATH="${BASE_DIR}"#" Dockerfile.local config \
example \
rm -rf $BASE_DIR/python $BASE_DIR/config $BASE_DIR/example data \
rm -f $BASE_DIR/Dockerfile.local commit.txt
rm -f $BASE_DIR/.dockerignore
rm -rf $BASE_DIR/data
cp -r ./example $BASE_DIR/
cp -r ./data $BASE_DIR/
cp -r ./python $BASE_DIR/
cp -r ./config $BASE_DIR/
cp ./Dockerfile.local $BASE_DIR/
cp -r ./src $BASE_DIR/
cd $BASE_DIR
find ./ -name "_objs" > .dockerignore
docker build -t $IMAGE_NAME:$TAG . -f Dockerfile.local docker build -t $IMAGE_NAME:$TAG . -f Dockerfile.local
\ No newline at end of file
...@@ -52,8 +52,8 @@ datasets: ...@@ -52,8 +52,8 @@ datasets:
model: "csv" model: "csv"
source: "data/falcon/dataset/MNIST/input_1" source: "data/falcon/dataset/MNIST/input_1"
# FL fake dataset for hfl server # FL fake dataset
- description: "hfl_server_fake_data" - description: "fl_fake_data"
model: "csv" model: "csv"
source: "data/FL/fake.csv" source: "data/FL/fake.csv"
......
...@@ -75,6 +75,12 @@ datasets: ...@@ -75,6 +75,12 @@ datasets:
source: "data/server_e.db3" source: "data/server_e.db3"
# FL binclass # FL binclass
- description: "binclass_vfl_train_host"
model: "csv"
source: "data/FL/binclass/vfl/train/host.csv"
- description: "binclass_vfl_test_host"
model: "csv"
source: "data/FL/binclass/vfl/test/host.csv"
- description: "binclass_hfl_train_client1" - description: "binclass_hfl_train_client1"
model: "csv" model: "csv"
source: "data/FL/binclass/hfl/train/client1.csv" source: "data/FL/binclass/hfl/train/client1.csv"
...@@ -83,6 +89,12 @@ datasets: ...@@ -83,6 +89,12 @@ datasets:
source: "data/FL/binclass/hfl/test/client1.csv" source: "data/FL/binclass/hfl/test/client1.csv"
# FL multiclass # FL multiclass
- description: "multiclass_vfl_train_host"
model: "csv"
source: "data/FL/multiclass/vfl/train/host.csv"
- description: "multiclass_vfl_test_host"
model: "csv"
source: "data/FL/multiclass/vfl/test/host.csv"
- description: "multiclass_hfl_train_client1" - description: "multiclass_hfl_train_client1"
model: "csv" model: "csv"
source: "data/FL/multiclass/hfl/train/client1.csv" source: "data/FL/multiclass/hfl/train/client1.csv"
......
...@@ -54,6 +54,12 @@ datasets: ...@@ -54,6 +54,12 @@ datasets:
source: "data/falcon/dataset/MNIST/input_0" source: "data/falcon/dataset/MNIST/input_0"
# FL binclass # FL binclass
- description: "binclass_vfl_train_guest"
model: "csv"
source: "data/FL/binclass/vfl/train/guest.csv"
- description: "binclass_vfl_test_guest"
model: "csv"
source: "data/FL/binclass/vfl/test/guest.csv"
- description: "binclass_hfl_train_client2" - description: "binclass_hfl_train_client2"
model: "csv" model: "csv"
source: "data/FL/binclass/hfl/train/client2.csv" source: "data/FL/binclass/hfl/train/client2.csv"
...@@ -62,6 +68,12 @@ datasets: ...@@ -62,6 +68,12 @@ datasets:
source: "data/FL/binclass/hfl/test/client2.csv" source: "data/FL/binclass/hfl/test/client2.csv"
# FL multiclass # FL multiclass
- description: "multiclass_vfl_train_guest"
model: "csv"
source: "data/FL/multiclass/vfl/train/guest.csv"
- description: "multiclass_vfl_test_guest"
model: "csv"
source: "data/FL/multiclass/vfl/test/guest.csv"
- description: "multiclass_hfl_train_client2" - description: "multiclass_hfl_train_client2"
model: "csv" model: "csv"
source: "data/FL/multiclass/hfl/train/client2.csv" source: "data/FL/multiclass/hfl/train/client2.csv"
......
...@@ -34,8 +34,8 @@ datasets: ...@@ -34,8 +34,8 @@ datasets:
model: "csv" model: "csv"
source: "data/falcon/dataset/MNIST/input_1" source: "data/falcon/dataset/MNIST/input_1"
# FL fake dataset for hfl server # FL fake dataset
- description: "hfl_server_fake_data" - description: "fl_fake_data"
model: "csv" model: "csv"
source: "data/FL/fake.csv" source: "data/FL/fake.csv"
......
...@@ -55,6 +55,12 @@ datasets: ...@@ -55,6 +55,12 @@ datasets:
source: "data/server_e.db3" source: "data/server_e.db3"
# FL binclass # FL binclass
- description: "binclass_vfl_train_host"
model: "csv"
source: "data/FL/binclass/vfl/train/host.csv"
- description: "binclass_vfl_test_host"
model: "csv"
source: "data/FL/binclass/vfl/test/host.csv"
- description: "binclass_hfl_train_client1" - description: "binclass_hfl_train_client1"
model: "csv" model: "csv"
source: "data/FL/binclass/hfl/train/client1.csv" source: "data/FL/binclass/hfl/train/client1.csv"
...@@ -63,6 +69,12 @@ datasets: ...@@ -63,6 +69,12 @@ datasets:
source: "data/FL/binclass/hfl/test/client1.csv" source: "data/FL/binclass/hfl/test/client1.csv"
# FL multiclass # FL multiclass
- description: "multiclass_vfl_train_host"
model: "csv"
source: "data/FL/multiclass/vfl/train/host.csv"
- description: "multiclass_vfl_test_host"
model: "csv"
source: "data/FL/multiclass/vfl/test/host.csv"
- description: "multiclass_hfl_train_client1" - description: "multiclass_hfl_train_client1"
model: "csv" model: "csv"
source: "data/FL/multiclass/hfl/train/client1.csv" source: "data/FL/multiclass/hfl/train/client1.csv"
......
...@@ -35,6 +35,12 @@ datasets: ...@@ -35,6 +35,12 @@ datasets:
source: "data/falcon/dataset/MNIST/input_0" source: "data/falcon/dataset/MNIST/input_0"
# FL binclass # FL binclass
- description: "binclass_vfl_train_guest"
model: "csv"
source: "data/FL/binclass/vfl/train/guest.csv"
- description: "binclass_vfl_test_guest"
model: "csv"
source: "data/FL/binclass/vfl/test/guest.csv"
- description: "binclass_hfl_train_client2" - description: "binclass_hfl_train_client2"
model: "csv" model: "csv"
source: "data/FL/binclass/hfl/train/client2.csv" source: "data/FL/binclass/hfl/train/client2.csv"
...@@ -43,6 +49,12 @@ datasets: ...@@ -43,6 +49,12 @@ datasets:
source: "data/FL/binclass/hfl/test/client2.csv" source: "data/FL/binclass/hfl/test/client2.csv"
# FL multiclass # FL multiclass
- description: "multiclass_vfl_train_guest"
model: "csv"
source: "data/FL/multiclass/vfl/train/guest.csv"
- description: "multiclass_vfl_test_guest"
model: "csv"
source: "data/FL/multiclass/vfl/test/guest.csv"
- description: "multiclass_hfl_train_client2" - description: "multiclass_hfl_train_client2"
model: "csv" model: "csv"
source: "data/FL/multiclass/hfl/train/client2.csv" source: "data/FL/multiclass/hfl/train/client2.csv"
......
id,mean_radius,mean_texture,mean_perimeter,mean_area,mean_smoothness,mean_compactness,mean_concavity,mean_concave_points,mean_symmetry,mean_fractal_dimension,radius_error,texture_error,perimeter_error,area_error
231,11.32,27.08,71.76,395.7,0.06883,0.03813,0.01633,0.003125,0.1869,0.05628,0.121,0.8927,1.059,8.605
110,9.777,16.99,62.5,290.2,0.1037,0.08404,0.04334,0.01778,0.1584,0.07065,0.403,1.424,2.747,22.87
327,12.03,17.93,76.09,446.0,0.07683,0.03892,0.001546,0.005592,0.1382,0.0607,0.2335,0.9097,1.466,16.97
374,13.69,16.07,87.84,579.1,0.08302,0.06374,0.02556,0.02031,0.1872,0.05669,0.1705,0.5066,1.372,14.0
511,14.81,14.7,94.66,680.7,0.08472,0.05016,0.03416,0.02541,0.1659,0.05348,0.2182,0.6232,1.677,20.72
259,15.53,33.56,103.7,744.9,0.1063,0.1639,0.1751,0.08399,0.2091,0.0665,0.2419,1.278,1.903,23.02
514,15.05,19.07,97.26,701.9,0.09215,0.08597,0.07486,0.04335,0.1561,0.05915,0.386,1.198,2.63,38.49
201,17.54,19.32,115.1,951.6,0.08968,0.1198,0.1036,0.07488,0.1506,0.05491,0.3971,0.8282,3.088,40.73
528,13.94,13.17,90.31,594.2,0.1248,0.09755,0.101,0.06615,0.1976,0.06457,0.5461,2.635,4.091,44.74
390,10.26,12.22,65.75,321.6,0.09996,0.07542,0.01923,0.01968,0.18,0.06569,0.1911,0.5477,1.348,11.88
28,15.3,25.27,102.4,732.4,0.1082,0.1697,0.1683,0.08751,0.1926,0.0654,0.439,1.012,3.498,43.5
346,12.06,18.9,76.66,445.3,0.08386,0.05794,0.00751,0.008488,0.1555,0.06048,0.243,1.152,1.559,18.02
206,9.876,17.27,62.92,295.4,0.1089,0.07232,0.01756,0.01952,0.1934,0.06285,0.2137,1.342,1.517,12.33
428,11.13,16.62,70.47,381.1,0.08151,0.03834,0.01369,0.0137,0.1511,0.06148,0.1415,0.9671,0.968,9.704
277,18.81,19.98,120.9,1102.0,0.08923,0.05884,0.0802,0.05843,0.155,0.04996,0.3283,0.828,2.363,36.74
224,13.27,17.02,84.55,546.4,0.08445,0.04994,0.03554,0.02456,0.1496,0.05674,0.2927,0.8907,2.044,24.68
443,10.57,18.32,66.82,340.9,0.08142,0.04462,0.01993,0.01111,0.2372,0.05768,0.1818,2.542,1.277,13.12
11,15.78,17.89,103.6,781.0,0.0971,0.1292,0.09954,0.06606,0.1842,0.06082,0.5058,0.9849,3.564,54.16
56,19.21,18.57,125.5,1152.0,0.1053,0.1267,0.1323,0.08994,0.1917,0.05961,0.7275,1.193,4.837,102.5
497,12.47,17.31,80.45,480.1,0.08928,0.0763,0.03609,0.02369,0.1526,0.06046,0.1532,0.781,1.253,11.91
345,10.26,14.71,66.2,321.6,0.09882,0.09159,0.03581,0.02037,0.1633,0.07005,0.338,2.509,2.394,19.33
4,20.29,14.34,135.1,1297.0,0.1003,0.1328,0.198,0.1043,0.1809,0.05883,0.7572,0.7813,5.438,94.44
99,14.42,19.77,94.48,642.5,0.09752,0.1141,0.09388,0.05839,0.1879,0.0639,0.2895,1.851,2.376,26.85
86,14.48,21.46,94.25,648.2,0.09444,0.09947,0.1204,0.04938,0.2075,0.05636,0.4204,2.22,3.301,38.87
122,24.25,20.2,166.2,1761.0,0.1447,0.2867,0.4268,0.2012,0.2655,0.06877,1.509,3.12,9.807,233.0
145,11.9,14.65,78.11,432.8,0.1152,0.1296,0.0371,0.03003,0.1995,0.07839,0.3962,0.6538,3.021,25.03
401,11.93,10.91,76.14,442.7,0.08872,0.05242,0.02606,0.01796,0.1601,0.05541,0.2522,1.045,1.649,18.95
409,12.27,17.92,78.41,466.1,0.08685,0.06526,0.03211,0.02653,0.1966,0.05597,0.3342,1.781,2.079,25.79
338,10.05,17.53,64.41,310.8,0.1007,0.07326,0.02511,0.01775,0.189,0.06331,0.2619,2.015,1.778,16.85
15,14.54,27.54,96.73,658.8,0.1139,0.1595,0.1639,0.07364,0.2303,0.07077,0.37,1.033,2.879,32.55
71,8.888,14.64,58.79,244.0,0.09783,0.1531,0.08606,0.02872,0.1902,0.0898,0.5262,0.8522,3.168,25.44
119,17.95,20.01,114.2,982.0,0.08402,0.06722,0.07293,0.05596,0.2129,0.05025,0.5506,1.214,3.357,54.04
458,13.0,25.13,82.61,520.2,0.08369,0.05073,0.01206,0.01762,0.1667,0.05449,0.2621,1.232,1.657,21.19
51,13.64,16.34,87.21,571.8,0.07685,0.06059,0.01857,0.01723,0.1353,0.05953,0.1872,0.9234,1.449,14.55
257,15.32,17.27,103.2,713.3,0.1335,0.2284,0.2448,0.1242,0.2398,0.07596,0.6592,1.059,4.061,59.46
378,13.66,15.15,88.27,580.6,0.08268,0.07548,0.04249,0.02471,0.1792,0.05897,0.1402,0.5417,1.101,11.35
63,9.173,13.86,59.2,260.9,0.07721,0.08751,0.05988,0.0218,0.2341,0.06963,0.4098,2.265,2.608,23.52
475,12.83,15.73,82.89,506.9,0.0904,0.08269,0.05835,0.03078,0.1705,0.05913,0.1499,0.4875,1.195,11.64
407,12.85,21.37,82.63,514.5,0.07551,0.08316,0.06126,0.01867,0.158,0.06114,0.4993,1.798,2.552,41.24
220,13.65,13.16,87.88,568.9,0.09646,0.08711,0.03888,0.02563,0.136,0.06344,0.2102,0.4336,1.391,17.4
413,14.99,22.11,97.53,693.7,0.08515,0.1025,0.06859,0.03876,0.1944,0.05913,0.3186,1.336,2.31,28.51
424,9.742,19.12,61.93,289.7,0.1075,0.08333,0.008934,0.01967,0.2538,0.07029,0.6965,1.747,4.607,43.52
441,17.27,25.42,112.4,928.8,0.08331,0.1109,0.1204,0.05736,0.1467,0.05407,0.51,1.679,3.283,58.38
18,19.81,22.15,130.0,1260.0,0.09831,0.1027,0.1479,0.09498,0.1582,0.05395,0.7582,1.017,5.865,112.4
315,12.49,16.85,79.19,481.6,0.08511,0.03834,0.004473,0.006423,0.1215,0.05673,0.1716,0.7151,1.047,12.69
225,14.34,13.47,92.51,641.2,0.09906,0.07624,0.05724,0.04603,0.2075,0.05448,0.522,0.8121,3.763,48.29
470,9.667,18.49,61.49,289.1,0.08946,0.06258,0.02948,0.01514,0.2238,0.06413,0.3776,1.35,2.569,22.73
451,19.59,25.0,127.7,1191.0,0.1032,0.09871,0.1655,0.09063,0.1663,0.05391,0.4674,1.375,2.916,56.18
152,9.731,15.34,63.78,300.2,0.1072,0.1599,0.4108,0.07857,0.2548,0.09296,0.8245,2.664,4.073,49.85
222,10.18,17.53,65.12,313.1,0.1061,0.08502,0.01768,0.01915,0.191,0.06908,0.2467,1.217,1.641,15.05
487,19.44,18.82,128.1,1167.0,0.1089,0.1448,0.2256,0.1194,0.1823,0.06115,0.5659,1.408,3.631,67.74
364,13.4,16.95,85.48,552.4,0.07937,0.05696,0.02181,0.01473,0.165,0.05701,0.1584,0.6124,1.036,13.22
564,21.56,22.39,142.0,1479.0,0.111,0.1159,0.2439,0.1389,0.1726,0.05623,1.176,1.256,7.673,158.7
193,12.34,26.86,81.15,477.4,0.1034,0.1353,0.1085,0.04562,0.1943,0.06937,0.4053,1.809,2.642,34.44
376,10.57,20.22,70.15,338.3,0.09073,0.166,0.228,0.05941,0.2188,0.0845,0.1115,1.231,2.363,7.228
174,10.66,15.15,67.49,349.6,0.08792,0.04302,0.0,0.0,0.1928,0.05975,0.3309,1.925,2.155,21.98
566,16.6,28.08,108.3,858.1,0.08455,0.1023,0.09251,0.05302,0.159,0.05648,0.4564,1.075,3.425,48.55
301,12.46,19.89,80.43,471.3,0.08451,0.1014,0.0683,0.03099,0.1781,0.06249,0.3642,1.04,2.579,28.32
233,20.51,27.81,134.4,1319.0,0.09159,0.1074,0.1554,0.0834,0.1448,0.05592,0.524,1.189,3.767,70.01
506,12.22,20.04,79.47,453.1,0.1096,0.1152,0.08175,0.02166,0.2124,0.06894,0.1811,0.7959,0.9857,12.58
478,11.49,14.59,73.99,404.9,0.1046,0.08228,0.05308,0.01969,0.1779,0.06574,0.2034,1.166,1.567,14.34
512,13.4,20.52,88.64,556.7,0.1106,0.1469,0.1445,0.08172,0.2116,0.07325,0.3906,0.9306,3.093,33.67
466,13.14,20.74,85.98,536.9,0.08675,0.1089,0.1085,0.0351,0.1562,0.0602,0.3152,0.7884,2.312,27.4
462,14.4,26.99,92.25,646.1,0.06995,0.05223,0.03476,0.01737,0.1707,0.05433,0.2315,0.9112,1.727,20.52
557,9.423,27.88,59.26,271.3,0.08123,0.04971,0.0,0.0,0.1742,0.06059,0.5375,2.927,3.618,29.11
192,9.72,18.22,60.73,288.1,0.0695,0.02344,0.0,0.0,0.1653,0.06447,0.3539,4.885,2.23,21.69
489,16.69,20.2,107.1,857.6,0.07497,0.07112,0.03649,0.02307,0.1846,0.05325,0.2473,0.5679,1.775,22.95
555,10.29,27.61,65.67,321.4,0.0903,0.07658,0.05999,0.02738,0.1593,0.06127,0.2199,2.239,1.437,14.46
249,11.52,14.93,73.87,406.3,0.1013,0.07808,0.04328,0.02929,0.1883,0.06168,0.2562,1.038,1.686,18.62
493,12.46,12.83,78.83,477.3,0.07372,0.04043,0.007173,0.01149,0.1613,0.06013,0.3276,1.486,2.108,24.6
425,10.03,21.28,63.19,307.3,0.08117,0.03912,0.00247,0.005159,0.163,0.06439,0.1851,1.341,1.184,11.6
385,14.6,23.29,93.97,664.7,0.08682,0.06636,0.0839,0.05271,0.1627,0.05416,0.4157,1.627,2.914,33.01
482,13.47,14.06,87.32,546.3,0.1071,0.1155,0.05786,0.05266,0.1779,0.06639,0.1588,0.5733,1.102,12.84
532,13.68,16.33,87.76,575.5,0.09277,0.07255,0.01752,0.0188,0.1631,0.06155,0.2047,0.4801,1.373,17.25
1,20.57,17.77,132.9,1326.0,0.08474,0.07864,0.0869,0.07017,0.1812,0.05667,0.5435,0.7339,3.398,74.08
286,11.94,20.76,77.87,441.0,0.08605,0.1011,0.06574,0.03791,0.1588,0.06766,0.2742,1.39,3.198,21.91
329,16.26,21.88,107.5,826.8,0.1165,0.1283,0.1799,0.07981,0.1869,0.06532,0.5706,1.457,2.961,57.72
70,18.94,21.31,123.6,1130.0,0.09009,0.1029,0.108,0.07951,0.1582,0.05461,0.7888,0.7975,5.486,96.05
6,18.25,19.98,119.6,1040.0,0.09463,0.109,0.1127,0.074,0.1794,0.05742,0.4467,0.7732,3.18,53.91
102,12.18,20.52,77.22,458.7,0.08013,0.04038,0.02383,0.0177,0.1739,0.05677,0.1924,1.571,1.183,14.68
547,10.26,16.58,65.85,320.8,0.08877,0.08066,0.04358,0.02438,0.1669,0.06714,0.1144,1.023,0.9887,7.326
362,12.76,18.84,81.87,496.6,0.09676,0.07952,0.02688,0.01781,0.1759,0.06183,0.2213,1.285,1.535,17.26
278,13.59,17.84,86.24,572.3,0.07948,0.04052,0.01997,0.01238,0.1573,0.0552,0.258,1.166,1.683,22.22
195,12.91,16.33,82.53,516.4,0.07941,0.05366,0.03873,0.02377,0.1829,0.05667,0.1942,0.9086,1.493,15.75
47,13.17,18.66,85.98,534.6,0.1158,0.1231,0.1226,0.0734,0.2128,0.06777,0.2871,0.8937,1.897,24.25
29,17.57,15.05,115.0,955.1,0.09847,0.1157,0.09875,0.07953,0.1739,0.06149,0.6003,0.8225,4.655,61.1
65,14.78,23.94,97.4,668.3,0.1172,0.1479,0.1267,0.09029,0.1953,0.06654,0.3577,1.281,2.45,35.24
508,16.3,15.7,104.7,819.8,0.09427,0.06712,0.05526,0.04563,0.1711,0.05657,0.2067,0.4706,1.146,20.67
69,12.78,16.49,81.37,502.5,0.09831,0.05234,0.03653,0.02864,0.159,0.05653,0.2368,0.8732,1.471,18.33
498,18.49,17.52,121.3,1068.0,0.1012,0.1317,0.1491,0.09183,0.1832,0.06697,0.7923,1.045,4.851,95.77
556,10.16,19.59,64.73,311.7,0.1003,0.07504,0.005025,0.01116,0.1791,0.06331,0.2441,2.09,1.648,16.8
426,10.48,14.98,67.49,333.6,0.09816,0.1013,0.06335,0.02218,0.1925,0.06915,0.3276,1.127,2.564,20.77
412,9.397,21.68,59.75,268.8,0.07969,0.06053,0.03735,0.005128,0.1274,0.06724,0.1186,1.182,1.174,6.802
402,12.96,18.29,84.18,525.2,0.07351,0.07899,0.04057,0.01883,0.1874,0.05899,0.2357,1.299,2.397,20.21
507,11.06,17.12,71.25,366.5,0.1194,0.1071,0.04063,0.04268,0.1954,0.07976,0.1779,1.03,1.318,12.3
279,13.85,15.18,88.99,587.4,0.09516,0.07688,0.04479,0.03711,0.211,0.05853,0.2479,0.9195,1.83,19.41
330,16.03,15.51,105.8,793.2,0.09491,0.1371,0.1204,0.07041,0.1782,0.05976,0.3371,0.7476,2.629,33.27
545,13.62,23.23,87.19,573.2,0.09246,0.06747,0.02974,0.02443,0.1664,0.05801,0.346,1.336,2.066,31.24
232,11.22,33.81,70.79,386.8,0.0778,0.03574,0.004967,0.006434,0.1845,0.05828,0.2239,1.647,1.489,15.46
333,11.25,14.78,71.38,390.0,0.08306,0.04458,0.0009737,0.002941,0.1773,0.06081,0.2144,0.9961,1.529,15.07
290,14.41,19.73,96.03,651.0,0.08757,0.1676,0.1362,0.06602,0.1714,0.07192,0.8811,1.77,4.36,77.11
299,10.51,23.09,66.85,334.2,0.1015,0.06797,0.02495,0.01875,0.1695,0.06556,0.2868,1.143,2.289,20.56
87,19.02,24.59,122.0,1076.0,0.09029,0.1206,0.1468,0.08271,0.1953,0.05629,0.5495,0.6636,3.055,57.65
294,12.72,13.78,81.78,492.1,0.09667,0.08393,0.01288,0.01924,0.1638,0.061,0.1807,0.6931,1.34,13.38
477,13.9,16.62,88.97,599.4,0.06828,0.05319,0.02224,0.01339,0.1813,0.05536,0.1555,0.5762,1.392,14.03
27,18.61,20.25,122.1,1094.0,0.0944,0.1066,0.149,0.07731,0.1697,0.05699,0.8529,1.849,5.632,93.54
84,12.0,15.65,76.95,443.3,0.09723,0.07165,0.04151,0.01863,0.2079,0.05968,0.2271,1.255,1.441,16.16
234,9.567,15.91,60.21,279.6,0.08464,0.04087,0.01652,0.01667,0.1551,0.06403,0.2152,0.8301,1.215,12.64
368,21.71,17.25,140.9,1546.0,0.09384,0.08562,0.1168,0.08465,0.1717,0.05054,1.207,1.051,7.733,224.1
305,11.6,24.49,74.23,417.2,0.07474,0.05688,0.01974,0.01313,0.1935,0.05878,0.2512,1.786,1.961,18.21
5,12.45,15.7,82.57,477.1,0.1278,0.17,0.1578,0.08089,0.2087,0.07613,0.3345,0.8902,2.217,27.19
408,17.99,20.66,117.8,991.7,0.1036,0.1304,0.1201,0.08824,0.1992,0.06069,0.4537,0.8733,3.061,49.81
238,14.22,27.85,92.55,623.9,0.08223,0.1039,0.1103,0.04408,0.1342,0.06129,0.3354,2.324,2.105,29.96
242,11.3,18.19,73.93,389.4,0.09592,0.1325,0.1548,0.02854,0.2054,0.07669,0.2428,1.642,2.369,16.39
id,smoothness_error,compactness_error,concavity_error,concave_points_error,symmetry_error,fractal_dimension_error,worst_radius,worst_texture,worst_perimeter,worst_area,worst_smoothness,worst_compactness,worst_concavity,worst_concave_points,worst_symmetry,worst_fractal_dimension,y
231,0.003653,0.01647,0.01633,0.003125,0.01537,0.002052,12.08,33.75,79.82,452.3,0.09203,0.1432,0.1089,0.02083,0.2849,0.07087,1
110,0.01385,0.02932,0.02722,0.01023,0.03281,0.004638,11.05,21.47,71.68,367.0,0.1467,0.1765,0.13,0.05334,0.2533,0.08468,1
327,0.004729,0.006887,0.001184,0.003951,0.01466,0.001755,13.07,22.25,82.74,523.4,0.1013,0.0739,0.007732,0.02796,0.2171,0.07037,1
374,0.00423,0.01587,0.01169,0.006335,0.01943,0.002177,14.84,20.21,99.16,670.6,0.1105,0.2096,0.1346,0.06987,0.3323,0.07701,1
511,0.006708,0.01197,0.01482,0.01056,0.0158,0.001779,15.61,17.58,101.7,760.2,0.1139,0.1011,0.1101,0.07955,0.2334,0.06142,1
259,0.005345,0.02556,0.02889,0.01022,0.009947,0.003359,18.49,49.54,126.3,1035.0,0.1883,0.5564,0.5703,0.2014,0.3512,0.1204,0
514,0.004952,0.0163,0.02967,0.009423,0.01152,0.001718,17.58,28.06,113.8,967.0,0.1246,0.2101,0.2866,0.112,0.2282,0.06954,0
201,0.00609,0.02569,0.02713,0.01345,0.01594,0.002658,20.42,25.84,139.5,1239.0,0.1381,0.342,0.3508,0.1939,0.2928,0.07867,0
528,0.01004,0.03247,0.04763,0.02853,0.01715,0.005528,14.62,15.38,94.52,653.3,0.1394,0.1364,0.1559,0.1015,0.216,0.07253,1
390,0.005682,0.01365,0.008496,0.006929,0.01938,0.002371,11.38,15.65,73.23,394.5,0.1343,0.165,0.08615,0.06696,0.2937,0.07722,1
28,0.005233,0.03057,0.03576,0.01083,0.01768,0.002967,20.27,36.71,149.3,1269.0,0.1641,0.611,0.6335,0.2024,0.4027,0.09876,0
346,0.00718,0.01096,0.005832,0.005495,0.01982,0.002754,13.64,27.06,86.54,562.6,0.1289,0.1352,0.04506,0.05093,0.288,0.08083,1
206,0.009719,0.01249,0.007975,0.007527,0.0221,0.002472,10.42,23.22,67.08,331.6,0.1415,0.1247,0.06213,0.05588,0.2989,0.0738,1
428,0.005883,0.006263,0.009398,0.006189,0.02009,0.002377,11.68,20.29,74.35,421.1,0.103,0.06219,0.0458,0.04044,0.2383,0.07083,1
277,0.007571,0.01114,0.02623,0.01463,0.0193,0.001676,19.96,24.3,129.0,1236.0,0.1243,0.116,0.221,0.1294,0.2567,0.05737,0
224,0.006032,0.01104,0.02259,0.009057,0.01482,0.002496,15.14,23.6,98.84,708.8,0.1276,0.1311,0.1786,0.09678,0.2506,0.07623,1
443,0.01072,0.01331,0.01993,0.01111,0.01717,0.004492,10.94,23.31,69.35,366.3,0.09794,0.06542,0.03986,0.02222,0.2699,0.06736,1
11,0.005771,0.04061,0.02791,0.01282,0.02008,0.004144,20.42,27.28,136.5,1299.0,0.1396,0.5609,0.3965,0.181,0.3792,0.1048,0
56,0.006458,0.02306,0.02945,0.01538,0.01852,0.002608,26.14,28.14,170.1,2145.0,0.1624,0.3511,0.3879,0.2091,0.3537,0.08294,0
497,0.003796,0.01371,0.01346,0.007096,0.01536,0.001541,14.06,24.34,92.82,607.3,0.1276,0.2506,0.2028,0.1053,0.3035,0.07661,1
345,0.01736,0.04671,0.02611,0.01296,0.03675,0.006758,10.88,19.48,70.89,357.1,0.136,0.1636,0.07162,0.04074,0.2434,0.08488,1
4,0.01149,0.02461,0.05688,0.01885,0.01756,0.005115,22.54,16.67,152.2,1575.0,0.1374,0.205,0.4,0.1625,0.2364,0.07678,0
99,0.008005,0.02895,0.03321,0.01424,0.01462,0.004452,16.33,30.86,109.5,826.4,0.1431,0.3026,0.3194,0.1565,0.2718,0.09353,0
86,0.009369,0.02983,0.05371,0.01761,0.02418,0.003249,16.21,29.25,108.4,808.9,0.1306,0.1976,0.3349,0.1225,0.302,0.06846,0
122,0.02333,0.09806,0.1278,0.01822,0.04547,0.009875,26.02,23.99,180.9,2073.0,0.1696,0.4244,0.5803,0.2248,0.3222,0.08009,0
145,0.01017,0.04741,0.02789,0.0111,0.03127,0.009423,13.15,16.51,86.26,509.6,0.1424,0.2517,0.0942,0.06042,0.2727,0.1036,1
401,0.006175,0.01204,0.01376,0.005832,0.01096,0.001857,13.8,20.14,87.64,589.5,0.1374,0.1575,0.1514,0.06876,0.246,0.07262,1
409,0.005888,0.0231,0.02059,0.01075,0.02578,0.002267,14.1,28.88,89.0,610.2,0.124,0.1795,0.1377,0.09532,0.3455,0.06896,1
338,0.007803,0.01449,0.0169,0.008043,0.021,0.002778,11.16,26.84,71.98,384.0,0.1402,0.1402,0.1055,0.06499,0.2894,0.07664,1
15,0.005607,0.0424,0.04741,0.0109,0.01857,0.005466,17.46,37.13,124.1,943.2,0.1678,0.6577,0.7026,0.1712,0.4218,0.1341,0
71,0.01721,0.09368,0.05671,0.01766,0.02541,0.02193,9.733,15.67,62.56,284.4,0.1207,0.2436,0.1434,0.04786,0.2254,0.1084,1
119,0.004024,0.008422,0.02291,0.009863,0.05014,0.001902,20.58,27.83,129.2,1261.0,0.1072,0.1202,0.2249,0.1185,0.4882,0.06111,0
458,0.006054,0.008974,0.005681,0.006336,0.01215,0.001514,14.34,31.88,91.06,628.5,0.1218,0.1093,0.04462,0.05921,0.2306,0.06291,1
51,0.004477,0.01177,0.01079,0.007956,0.01325,0.002551,14.67,23.19,96.08,656.7,0.1089,0.1582,0.105,0.08586,0.2346,0.08025,1
257,0.01015,0.04588,0.04983,0.02127,0.01884,0.00866,17.73,22.66,119.8,928.8,0.1765,0.4503,0.4429,0.2229,0.3258,0.1191,0
378,0.005212,0.02984,0.02443,0.008356,0.01818,0.004868,14.54,19.64,97.96,657.0,0.1275,0.3104,0.2569,0.1054,0.3387,0.09638,1
63,0.008738,0.03938,0.04312,0.0156,0.04192,0.005822,10.01,19.23,65.59,310.1,0.09836,0.1678,0.1397,0.05087,0.3282,0.0849,1
475,0.004873,0.01796,0.03318,0.00836,0.01601,0.002289,14.09,19.35,93.22,605.8,0.1326,0.261,0.3476,0.09783,0.3006,0.07802,1
407,0.006011,0.0448,0.05175,0.01341,0.02669,0.007731,14.4,27.01,91.63,645.8,0.09402,0.1936,0.1838,0.05601,0.2488,0.08151,1
220,0.004133,0.01695,0.01652,0.006659,0.01371,0.002735,15.34,16.35,99.71,706.2,0.1311,0.2474,0.1759,0.08056,0.238,0.08718,1
413,0.004449,0.02808,0.03312,0.01196,0.01906,0.004015,16.76,31.55,110.2,867.1,0.1077,0.3345,0.3114,0.1308,0.3163,0.09251,1
424,0.01307,0.01885,0.006021,0.01052,0.031,0.004225,11.21,23.17,71.79,380.9,0.1398,0.1352,0.02085,0.04589,0.3196,0.08009,1
441,0.008109,0.04308,0.04942,0.01742,0.01594,0.003739,20.38,35.46,132.8,1284.0,0.1436,0.4122,0.5036,0.1739,0.25,0.07944,0
18,0.006494,0.01893,0.03391,0.01521,0.01356,0.001997,27.32,30.88,186.8,2398.0,0.1512,0.315,0.5372,0.2388,0.2768,0.07615,0
315,0.004928,0.003012,0.00262,0.00339,0.01393,0.001344,13.34,19.71,84.48,544.2,0.1104,0.04953,0.01938,0.02784,0.1917,0.06174,1
225,0.007089,0.01428,0.0236,0.01286,0.02266,0.001463,16.77,16.9,110.4,873.2,0.1297,0.1525,0.1632,0.1087,0.3062,0.06072,1
470,0.007501,0.01989,0.02714,0.009883,0.0196,0.003913,11.14,25.62,70.88,385.2,0.1234,0.1542,0.1277,0.0656,0.3174,0.08524,1
451,0.0119,0.01929,0.04907,0.01499,0.01641,0.001807,21.44,30.96,139.8,1421.0,0.1528,0.1845,0.3977,0.1466,0.2293,0.06091,0
152,0.01097,0.09586,0.396,0.05279,0.03546,0.02984,11.02,19.49,71.04,380.5,0.1292,0.2772,0.8216,0.1571,0.3108,0.1259,1
222,0.007899,0.014,0.008534,0.007624,0.02637,0.003761,11.17,22.84,71.94,375.6,0.1406,0.144,0.06572,0.05575,0.3055,0.08797,1
487,0.005288,0.02833,0.04256,0.01176,0.01717,0.003211,23.96,30.39,153.9,1740.0,0.1514,0.3725,0.5936,0.206,0.3266,0.09009,0
364,0.004394,0.0125,0.01451,0.005484,0.01291,0.002074,14.73,21.7,93.76,663.5,0.1213,0.1676,0.1364,0.06987,0.2741,0.07582,1
564,0.0103,0.02891,0.05198,0.02454,0.01114,0.004239,25.45,26.4,166.1,2027.0,0.141,0.2113,0.4107,0.2216,0.206,0.07115,0
193,0.009098,0.03845,0.03763,0.01321,0.01878,0.005672,15.65,39.34,101.7,768.9,0.1785,0.4706,0.4425,0.1459,0.3215,0.1205,0
376,0.008499,0.07643,0.1535,0.02919,0.01617,0.0122,10.85,22.82,76.51,351.9,0.1143,0.3619,0.603,0.1465,0.2597,0.12,1
174,0.008713,0.01017,0.0,0.0,0.03265,0.001002,11.54,19.2,73.2,408.3,0.1076,0.06791,0.0,0.0,0.271,0.06164,1
566,0.005903,0.03731,0.0473,0.01557,0.01318,0.003892,18.98,34.12,126.7,1124.0,0.1139,0.3094,0.3403,0.1418,0.2218,0.0782,0
301,0.00653,0.03369,0.04712,0.01403,0.0274,0.004651,13.46,23.07,88.13,551.3,0.105,0.2158,0.1904,0.07625,0.2685,0.07764,1
233,0.00502,0.02062,0.03457,0.01091,0.01298,0.002887,24.47,37.38,162.7,1872.0,0.1223,0.2761,0.4146,0.1563,0.2437,0.08328,0
506,0.006272,0.02198,0.03966,0.009894,0.0132,0.003813,13.16,24.17,85.13,515.3,0.1402,0.2315,0.3535,0.08088,0.2709,0.08839,1
478,0.004957,0.02114,0.04156,0.008038,0.01843,0.003614,12.4,21.9,82.04,467.6,0.1352,0.201,0.2596,0.07431,0.2941,0.0918,1
512,0.005414,0.02265,0.03452,0.01334,0.01705,0.004005,16.41,29.66,113.3,844.4,0.1574,0.3856,0.5106,0.2051,0.3585,0.1109,0
466,0.007295,0.03179,0.04615,0.01254,0.01561,0.00323,14.8,25.46,100.9,689.1,0.1351,0.3549,0.4504,0.1181,0.2563,0.08174,1
462,0.005356,0.01679,0.01971,0.00637,0.01414,0.001892,15.4,31.98,100.4,734.6,0.1017,0.146,0.1472,0.05563,0.2345,0.06464,1
557,0.01159,0.01124,0.0,0.0,0.03004,0.003324,10.49,34.24,66.5,330.6,0.1073,0.07158,0.0,0.0,0.2475,0.06969,1
192,0.001713,0.006736,0.0,0.0,0.03799,0.001688,9.968,20.83,62.25,303.8,0.07117,0.02729,0.0,0.0,0.1909,0.06559,1
489,0.002667,0.01446,0.01423,0.005297,0.01961,0.0017,19.18,26.56,127.3,1084.0,0.1009,0.292,0.2477,0.08737,0.4677,0.07623,0
555,0.01205,0.02736,0.04804,0.01721,0.01843,0.004938,10.84,34.91,69.57,357.6,0.1384,0.171,0.2,0.09127,0.2226,0.08283,1
249,0.006662,0.01228,0.02105,0.01006,0.01677,0.002784,12.65,21.19,80.88,491.8,0.1389,0.1582,0.1804,0.09608,0.2664,0.07809,1
493,0.01039,0.01003,0.006416,0.007895,0.02869,0.004821,13.19,16.36,83.24,534.0,0.09439,0.06477,0.01674,0.0268,0.228,0.07028,1
425,0.005724,0.005697,0.002074,0.003527,0.01445,0.002411,11.11,28.94,69.92,376.3,0.1126,0.07094,0.01235,0.02579,0.2349,0.08061,1
385,0.008312,0.01742,0.03389,0.01576,0.0174,0.002871,15.79,31.71,102.2,758.2,0.1312,0.1581,0.2675,0.1359,0.2477,0.06836,0
482,0.00445,0.01452,0.01334,0.008791,0.01698,0.002787,14.83,18.32,94.94,660.2,0.1393,0.2499,0.1848,0.1335,0.3227,0.09326,1
532,0.003828,0.007228,0.007078,0.005077,0.01054,0.001697,15.85,20.2,101.6,773.4,0.1264,0.1564,0.1206,0.08704,0.2806,0.07782,1
1,0.005225,0.01308,0.0186,0.0134,0.01389,0.003532,24.99,23.41,158.8,1956.0,0.1238,0.1866,0.2416,0.186,0.275,0.08902,0
286,0.006719,0.05156,0.04387,0.01633,0.01872,0.008015,13.24,27.29,92.2,546.1,0.1116,0.2813,0.2365,0.1155,0.2465,0.09981,1
329,0.01056,0.03756,0.05839,0.01186,0.04022,0.006187,17.73,25.21,113.7,975.2,0.1426,0.2116,0.3344,0.1047,0.2736,0.07953,0
70,0.004444,0.01652,0.02269,0.0137,0.01386,0.001698,24.86,26.58,165.9,1866.0,0.1193,0.2336,0.2687,0.1789,0.2551,0.06589,0
6,0.004314,0.01382,0.02254,0.01039,0.01369,0.002179,22.88,27.66,153.2,1606.0,0.1442,0.2576,0.3784,0.1932,0.3063,0.08368,0
102,0.00508,0.006098,0.01069,0.006797,0.01447,0.001532,13.34,32.84,84.58,547.8,0.1123,0.08862,0.1145,0.07431,0.2694,0.06878,1
547,0.01027,0.03084,0.02613,0.01097,0.02277,0.00589,10.83,22.04,71.08,357.4,0.1461,0.2246,0.1783,0.08333,0.2691,0.09479,1
362,0.005608,0.01646,0.01529,0.009997,0.01909,0.002133,13.75,25.99,87.82,579.7,0.1298,0.1839,0.1255,0.08312,0.2744,0.07238,1
278,0.003741,0.005274,0.01065,0.005044,0.01344,0.001126,15.5,26.1,98.91,739.1,0.105,0.07622,0.106,0.05185,0.2335,0.06263,1
195,0.005298,0.01587,0.02321,0.00842,0.01853,0.002152,13.88,22.0,90.81,600.6,0.1097,0.1506,0.1764,0.08235,0.3024,0.06949,1
47,0.006532,0.02336,0.02905,0.01215,0.01743,0.003643,15.67,27.95,102.8,759.4,0.1786,0.4166,0.5006,0.2088,0.39,0.1179,0
29,0.005627,0.03033,0.03407,0.01354,0.01925,0.003742,20.01,19.52,134.9,1227.0,0.1255,0.2812,0.2489,0.1456,0.2756,0.07919,0
65,0.006703,0.0231,0.02315,0.01184,0.019,0.003224,17.31,33.39,114.6,925.1,0.1648,0.3416,0.3024,0.1614,0.3321,0.08911,0
508,0.007394,0.01203,0.0247,0.01431,0.01344,0.002569,17.32,17.76,109.8,928.2,0.1354,0.1361,0.1947,0.1357,0.23,0.0723,1
69,0.007962,0.005612,0.01585,0.008662,0.02254,0.001906,13.46,19.76,85.67,554.9,0.1296,0.07061,0.1039,0.05882,0.2383,0.0641,1
498,0.007974,0.03214,0.04435,0.01573,0.01617,0.005255,22.75,22.88,146.4,1600.0,0.1412,0.3089,0.3533,0.1663,0.251,0.09445,0
556,0.01291,0.02222,0.004174,0.007082,0.02572,0.002278,10.65,22.88,67.88,347.3,0.1265,0.12,0.01005,0.02232,0.2262,0.06742,1
426,0.007364,0.03867,0.05263,0.01264,0.02161,0.00483,12.13,21.57,81.41,440.4,0.1327,0.2996,0.2939,0.0931,0.302,0.09646,1
412,0.005515,0.02674,0.03735,0.005128,0.01951,0.004583,9.965,27.99,66.61,301.0,0.1086,0.1887,0.1868,0.02564,0.2376,0.09206,1
402,0.003629,0.03713,0.03452,0.01065,0.02632,0.003705,14.13,24.61,96.31,621.9,0.09329,0.2318,0.1604,0.06608,0.3207,0.07247,1
507,0.01262,0.02348,0.018,0.01285,0.0222,0.008313,11.69,20.74,76.08,411.1,0.1662,0.2031,0.1256,0.09514,0.278,0.1168,1
279,0.004235,0.01541,0.01457,0.01043,0.01528,0.001593,14.98,21.74,98.37,670.0,0.1185,0.1724,0.1456,0.09993,0.2955,0.06912,1
330,0.005839,0.03245,0.03715,0.01459,0.01467,0.003121,18.76,21.98,124.3,1070.0,0.1435,0.4478,0.4956,0.1981,0.3019,0.09124,0
545,0.005868,0.02099,0.02021,0.009064,0.02087,0.002583,15.35,29.09,97.58,729.8,0.1216,0.1517,0.1049,0.07174,0.2642,0.06953,1
232,0.004359,0.006813,0.003223,0.003419,0.01916,0.002534,12.36,41.78,78.44,470.9,0.09994,0.06885,0.02318,0.03002,0.2911,0.07307,1
333,0.005617,0.007124,0.0009737,0.002941,0.017,0.00203,12.76,22.06,82.08,492.7,0.1166,0.09794,0.005518,0.01667,0.2815,0.07418,1
290,0.007762,0.1064,0.0996,0.02771,0.04077,0.02286,15.77,22.13,101.7,767.3,0.09983,0.2472,0.222,0.1021,0.2272,0.08799,1
299,0.01017,0.01443,0.01861,0.0125,0.03464,0.001971,10.93,24.22,70.1,362.7,0.1143,0.08614,0.04158,0.03125,0.2227,0.06777,1
87,0.003872,0.01842,0.0371,0.012,0.01964,0.003337,24.56,30.41,152.9,1623.0,0.1249,0.3206,0.5755,0.1956,0.3956,0.09288,0
294,0.006064,0.0118,0.006564,0.007978,0.01374,0.001392,13.5,17.48,88.54,553.7,0.1298,0.1472,0.05233,0.06343,0.2369,0.06922,1
477,0.003308,0.01315,0.009904,0.004832,0.01316,0.002095,15.14,21.8,101.2,718.9,0.09384,0.2006,0.1384,0.06222,0.2679,0.07698,1
27,0.01075,0.02722,0.05081,0.01911,0.02293,0.004217,21.31,27.26,139.9,1403.0,0.1338,0.2117,0.3446,0.149,0.2341,0.07421,0
84,0.005969,0.01812,0.02007,0.007027,0.01972,0.002607,13.67,24.9,87.78,567.9,0.1377,0.2003,0.2267,0.07632,0.3379,0.07924,1
234,0.01164,0.0104,0.01186,0.009623,0.02383,0.00354,10.51,19.16,65.74,335.9,0.1504,0.09515,0.07161,0.07222,0.2757,0.08178,1
368,0.005568,0.01112,0.02096,0.01197,0.01263,0.001803,30.75,26.44,199.5,3143.0,0.1363,0.1628,0.2861,0.182,0.251,0.06494,0
305,0.006122,0.02337,0.01596,0.006998,0.03194,0.002211,12.44,31.62,81.39,476.5,0.09545,0.1361,0.07239,0.04815,0.3244,0.06745,1
5,0.00751,0.03345,0.03672,0.01137,0.02165,0.005082,15.47,23.75,103.4,741.6,0.1791,0.5249,0.5355,0.1741,0.3985,0.1244,0
408,0.007231,0.02772,0.02509,0.0148,0.01414,0.003336,21.08,25.41,138.1,1349.0,0.1482,0.3735,0.3301,0.1974,0.306,0.08503,0
238,0.006307,0.02845,0.0385,0.01011,0.01185,0.003589,15.75,40.54,102.5,764.0,0.1081,0.2426,0.3064,0.08219,0.189,0.07796,1
242,0.006663,0.05914,0.0888,0.01314,0.01995,0.008675,12.58,27.96,87.16,472.9,0.1347,0.4848,0.7436,0.1218,0.3308,0.1297,1
此差异已折叠。
此差异已折叠。
fake0 0,1
0 0,1
\ No newline at end of file \ No newline at end of file
id,battery_power,blue,clock_speed,dual_sim,fc,four_g,int_memory,m_dep,mobile_wt
1725,1117,1,0.5,1,2,0,21,0.1,177
1103,1375,1,1.3,1,2,0,12,0.1,105
1386,610,1,1.6,1,9,0,52,0.4,90
390,1836,1,1.7,1,3,0,26,0.1,134
1642,936,0,0.5,1,14,1,14,0.1,164
996,774,0,0.5,1,2,1,10,0.5,188
278,538,0,0.8,0,12,1,2,0.8,177
135,1213,1,0.5,0,0,0,16,0.5,108
339,1560,0,2.3,0,0,1,22,0.8,169
1975,1157,1,0.8,0,7,0,27,0.1,88
1617,634,1,0.8,0,5,0,41,0.2,192
1854,1571,0,2.9,0,1,0,30,0.9,150
1047,576,0,1.2,0,1,0,30,0.8,162
103,1939,1,2.8,1,0,1,55,0.6,199
1788,1424,0,0.5,1,16,1,23,0.6,80
1606,557,0,0.5,1,3,0,17,0.2,89
1387,1533,1,1.1,1,18,1,17,0.3,160
1293,1702,0,1.0,0,7,0,36,0.1,177
1013,1694,0,1.8,0,0,0,53,1.0,186
1978,1483,1,2.2,0,3,1,53,0.7,169
1477,1150,1,2.7,0,0,0,39,0.4,151
1585,1992,1,0.6,1,11,0,2,0.1,141
1858,1615,1,2.8,0,3,0,9,0.9,117
1773,1514,1,1.4,0,3,1,12,0.6,138
1332,614,0,2.9,1,3,1,24,0.2,94
1025,1081,1,2.5,1,13,0,49,1.0,99
1971,538,0,1.1,1,0,1,25,0.3,163
304,1177,0,0.9,0,1,1,29,0.4,191
1905,1027,1,0.8,0,7,0,18,1.0,146
749,802,1,2.4,1,1,0,45,0.8,157
1718,637,1,0.8,1,2,0,28,0.6,194
650,1315,1,2.6,0,7,0,64,0.8,128
969,1345,1,3.0,0,7,1,38,0.1,162
1328,507,1,1.9,1,0,1,39,0.7,142
277,1281,1,2.8,1,3,1,8,0.3,144
1041,1727,0,2.1,0,2,1,63,0.6,85
344,618,0,2.1,0,2,0,6,0.3,86
1208,989,1,0.7,0,13,1,51,0.4,132
184,1502,1,1.2,0,0,0,54,0.2,138
133,668,0,2.9,1,7,0,30,0.2,195
846,1489,0,2.4,1,8,0,32,0.6,200
488,1949,0,1.8,0,0,0,20,0.8,186
1670,1569,1,1.6,1,4,1,54,0.9,190
88,668,0,0.5,1,0,0,3,0.1,155
1320,1538,1,2.3,0,6,1,13,0.2,121
1756,1864,0,1.9,1,2,0,57,0.1,181
997,1068,0,0.5,1,0,1,19,0.9,197
1373,1616,1,2.3,1,3,0,52,0.1,100
1536,1412,0,0.6,0,9,0,57,0.2,86
1752,1938,1,0.5,1,5,0,44,0.5,121
735,771,1,0.5,0,3,0,25,1.0,86
1494,574,1,2.0,1,0,1,26,0.5,118
1192,1030,1,0.5,0,4,1,37,0.7,147
535,1185,0,1.9,0,0,0,31,0.4,152
343,1034,1,2.7,1,6,0,37,0.7,120
550,1408,1,2.1,1,10,0,45,0.9,89
883,1918,0,2.8,0,1,0,22,0.3,139
1274,1602,0,2.7,0,4,1,46,0.6,118
153,1485,0,1.0,0,2,1,32,0.7,84
1523,1650,0,2.3,0,0,1,38,0.7,89
457,1128,1,1.4,0,12,0,53,0.4,84
1902,730,0,1.5,0,9,0,9,0.7,122
110,783,0,1.8,1,0,1,43,1.0,106
933,1991,1,1.8,0,11,0,9,0.7,94
580,1164,0,1.0,0,4,1,19,0.9,95
1938,1749,1,0.5,1,7,1,49,0.8,178
1493,1183,1,0.5,1,3,1,48,0.1,120
1541,823,0,1.0,1,9,1,18,0.2,182
1026,606,0,2.5,0,1,1,53,0.2,146
539,525,1,0.5,1,5,0,51,0.5,137
4,1821,1,1.2,0,13,1,44,0.6,141
698,1940,1,0.9,1,4,0,17,0.7,93
8,1445,1,0.5,0,0,0,53,0.7,174
207,1265,0,1.5,0,7,0,49,0.7,182
1484,1689,1,0.5,0,11,0,9,0.5,150
777,1786,0,0.5,0,9,1,58,0.7,161
1323,712,0,0.5,0,6,0,27,0.5,86
1039,504,1,0.5,1,2,1,46,0.9,172
1156,1732,0,0.8,0,2,0,61,0.3,172
1593,1864,0,2.2,0,0,1,7,0.1,142
164,1441,0,2.1,1,11,1,3,0.1,118
53,1457,0,1.9,1,1,1,16,0.3,102
11,1520,1,2.2,0,5,1,33,0.5,177
568,1288,0,2.8,1,15,0,50,0.1,121
45,1514,0,2.9,0,0,0,27,0.2,118
1876,864,0,1.6,1,0,1,53,0.3,88
18,1131,1,0.5,1,11,0,49,0.6,101
1749,1485,1,0.9,1,0,1,28,0.8,158
1903,1361,1,1.4,0,1,0,47,1.0,169
780,1042,0,2.2,0,15,1,11,0.6,139
1463,1147,0,0.8,1,11,0,28,0.7,158
82,1510,1,0.9,1,2,0,45,0.9,180
1554,1957,0,1.2,1,18,1,36,0.8,151
641,518,0,0.8,1,0,0,3,0.1,178
963,1861,1,0.9,0,4,1,55,0.5,148
743,1159,1,2.8,0,5,1,18,0.5,83
649,709,1,2.0,1,0,0,35,0.1,136
1859,649,1,1.4,0,4,1,19,0.8,190
836,902,1,0.6,1,0,0,63,0.7,122
767,1853,0,0.5,1,1,0,27,0.3,197
161,961,1,1.5,1,10,0,51,0.3,152
761,558,1,2.8,0,1,1,49,0.8,142
985,1068,0,0.7,1,9,0,51,0.5,124
821,1883,1,2.7,1,6,1,30,0.5,95
435,954,1,0.5,0,0,0,50,0.3,134
302,768,1,0.9,0,16,1,56,0.1,113
1682,1996,1,2.8,1,0,1,7,0.1,138
1198,708,0,1.9,0,0,0,26,0.3,116
1592,826,0,0.6,0,3,0,62,0.7,187
1077,1569,0,0.8,1,8,0,59,0.9,111
1771,1230,1,1.6,0,0,1,48,0.7,111
186,1266,0,0.5,0,0,1,5,0.1,95
815,1512,1,0.5,0,1,0,52,0.1,131
1427,1489,0,2.1,1,0,0,9,0.4,169
1327,948,0,2.3,0,14,0,22,0.1,173
1145,1635,0,2.6,1,0,1,2,0.6,182
405,1454,1,0.5,1,1,0,34,0.7,83
134,1652,1,1.1,1,1,0,36,0.7,89
377,930,1,1.3,1,1,1,14,0.4,97
1935,798,0,0.6,1,6,1,21,0.5,110
1817,1541,1,1.5,1,6,1,5,0.5,109
1078,1312,1,0.8,1,0,0,24,0.9,156
1175,563,0,2.0,1,3,0,47,0.3,115
1045,531,0,1.1,0,10,0,63,0.7,189
1134,1447,1,2.5,1,2,0,9,0.2,155
257,507,1,0.5,1,1,0,32,0.5,141
773,1760,0,1.9,0,1,1,50,0.8,198
1216,1158,0,0.7,1,1,1,29,0.7,123
1758,581,0,0.6,0,5,1,56,0.2,180
1441,599,1,2.9,0,0,1,64,0.3,186
1108,808,0,2.3,0,3,0,45,0.8,161
1058,643,0,1.0,0,9,1,18,0.2,187
533,1217,1,0.5,0,3,0,17,0.5,182
601,1595,1,0.8,1,0,1,44,0.7,85
1500,1225,0,0.7,1,6,0,60,0.1,107
39,560,0,0.5,1,15,0,50,0.3,159
977,1130,1,2.5,0,4,1,58,0.1,132
1185,1136,0,0.5,0,0,1,9,0.9,150
155,1692,0,2.3,1,4,1,46,0.9,139
446,1991,1,2.7,0,5,1,64,0.9,145
710,663,0,0.8,1,0,0,25,0.6,183
365,763,0,1.7,1,8,1,2,0.5,92
810,691,1,2.7,1,4,1,33,0.9,108
1910,1872,1,1.9,0,5,0,35,0.6,111
139,523,0,2.6,1,1,0,14,0.1,155
1870,1234,0,1.7,1,8,0,17,0.9,174
1020,1119,0,0.5,1,0,1,23,0.4,164
67,1866,0,1.4,0,0,0,30,0.5,182
239,1799,1,0.5,0,0,0,49,0.4,107
1624,555,1,3.0,1,5,1,38,0.8,193
1095,775,1,2.5,1,1,1,10,0.1,185
1018,1000,1,0.5,0,12,0,63,0.7,179
689,705,1,0.5,0,5,1,57,0.9,155
1439,1006,0,2.2,1,4,1,22,0.8,135
899,1112,0,0.5,0,0,1,12,0.9,190
1820,665,1,0.7,0,0,1,31,0.1,185
1034,1949,1,1.4,0,7,0,57,0.3,145
1144,1285,0,2.4,0,1,1,51,1.0,140
477,601,0,2.1,0,4,0,26,0.3,101
937,764,1,1.2,1,1,0,13,1.0,152
747,1648,1,0.5,0,1,1,17,0.9,142
851,757,0,2.3,1,5,1,58,0.2,163
486,1089,1,0.9,1,12,1,2,0.7,145
399,1868,0,2.0,1,0,1,42,0.2,169
1297,956,0,1.9,1,1,0,42,0.1,133
697,1567,0,0.5,1,3,0,22,0.5,119
1927,1023,1,0.7,0,2,1,63,0.7,153
48,578,1,2.6,1,2,1,57,0.2,162
1875,868,1,0.9,0,5,0,5,0.4,137
1946,590,1,1.2,1,0,0,32,0.4,141
629,1914,0,0.7,0,1,0,35,0.6,118
1799,1250,1,1.5,0,14,1,20,0.4,167
1449,602,0,0.9,0,9,0,50,0.9,141
573,1900,1,0.5,1,0,1,55,0.9,171
1767,1136,1,1.6,0,15,0,32,0.5,150
1649,1307,0,1.8,0,2,0,7,0.6,88
974,672,1,0.5,1,0,0,54,0.9,95
928,1349,1,2.2,1,5,0,55,0.1,105
463,965,1,1.5,1,5,0,13,1.0,109
1385,1805,1,0.5,0,0,0,29,0.2,182
1007,1963,1,1.0,0,16,1,34,0.1,157
1279,1602,0,0.6,0,12,0,58,0.4,170
270,1066,0,0.6,0,2,1,7,0.9,114
224,1686,1,2.8,1,3,0,38,0.1,166
1084,1836,0,1.1,1,0,0,14,0.7,104
895,1790,1,2.3,1,3,1,49,0.5,100
758,1264,0,2.9,1,2,1,34,0.2,171
919,1531,0,2.0,0,0,1,60,0.4,162
208,664,1,1.1,1,1,1,51,0.1,110
1691,1603,1,0.5,1,1,1,17,0.5,114
458,664,0,1.2,1,0,0,28,0.9,106
73,1038,0,1.2,0,3,0,43,0.7,141
1929,1602,0,1.6,1,0,1,35,0.1,99
359,1960,1,2.3,1,1,1,61,0.3,86
1307,831,0,1.7,1,7,1,26,0.7,177
99,593,0,0.5,0,6,0,31,0.4,156
578,1195,0,2.0,1,1,1,23,0.1,82
406,1007,0,2.0,0,0,0,45,0.1,95
1801,1035,1,1.9,1,8,0,11,1.0,173
51,1760,0,1.4,1,5,0,63,0.8,127
72,605,0,1.0,0,8,1,9,0.1,142
1556,875,1,1.4,1,1,1,43,0.6,104
1565,1062,0,0.5,0,3,1,39,0.9,124
214,903,0,1.0,1,0,1,50,0.4,91
1526,963,0,0.5,0,1,1,60,0.8,156
1634,986,1,0.8,0,13,1,28,0.8,182
1603,984,1,2.2,1,0,1,39,0.5,119
1823,1493,0,1.5,1,13,0,53,0.5,102
1254,1083,0,1.0,0,6,0,52,0.9,126
1710,548,0,1.0,1,0,1,56,0.4,146
1954,686,1,2.7,1,0,0,36,0.1,194
1893,721,1,2.7,1,1,1,15,0.2,102
401,1042,0,2.7,1,16,1,50,0.8,93
925,1048,1,1.5,0,2,1,29,1.0,182
148,1216,0,3.0,0,4,0,47,0.4,199
1369,818,0,0.5,1,2,1,23,0.2,192
1172,618,1,0.5,1,2,1,57,0.2,88
332,1730,0,2.0,1,0,1,20,0.1,143
786,774,1,2.8,0,7,1,55,0.9,124
1482,520,0,2.3,0,0,1,31,0.8,145
1266,1020,0,1.5,0,13,1,7,0.7,96
1559,671,0,2.9,0,7,1,2,0.1,95
658,1926,1,1.1,0,13,1,50,0.2,179
509,1872,1,2.3,0,6,0,44,0.7,134
807,1312,0,1.3,0,5,1,10,0.6,163
1004,965,1,0.5,0,15,0,53,1.0,131
256,601,1,1.4,0,4,1,4,0.1,154
1436,514,1,2.9,1,5,0,18,0.7,141
802,1591,1,2.2,1,4,1,52,1.0,161
790,911,1,2.9,0,0,0,46,0.1,181
1778,1492,1,1.5,0,5,0,11,0.9,85
384,625,1,1.9,0,12,1,33,0.2,191
1728,1065,0,2.8,1,0,1,21,0.1,158
1644,1175,1,1.7,1,1,1,45,0.6,167
525,1589,1,1.4,0,3,1,26,0.1,100
1608,1129,1,0.8,1,9,1,21,0.6,116
541,793,0,1.2,1,1,1,38,0.6,176
945,832,0,1.1,0,0,1,46,0.3,93
822,839,0,2.0,1,0,0,14,0.4,175
560,1168,0,1.3,0,9,0,61,0.1,90
978,1359,0,0.6,0,4,0,59,0.3,86
614,539,1,0.6,1,9,0,21,0.1,145
94,1322,0,1.7,1,6,0,7,0.8,140
378,1058,0,1.5,0,2,0,19,0.4,166
1534,622,1,1.9,1,12,0,11,0.8,193
656,1583,1,2.1,1,11,0,14,0.7,148
1432,630,0,2.0,0,9,1,14,0.5,180
1985,1829,1,2.1,0,8,0,59,0.1,91
1782,1436,1,2.4,1,3,1,23,0.3,106
480,1417,1,2.7,0,2,1,56,0.9,114
77,1234,1,1.6,1,1,0,33,0.6,172
796,1204,1,2.6,0,0,0,10,0.3,199
1012,536,1,1.4,0,0,1,53,0.7,135
1667,1967,0,0.5,0,0,1,46,0.6,199
430,1887,1,3.0,1,6,1,17,0.2,126
1280,1426,0,1.6,0,4,1,12,0.7,174
504,1661,1,1.4,0,0,0,29,0.3,119
561,1229,0,0.5,1,15,0,14,0.8,191
1015,1371,0,0.5,0,4,0,52,0.5,167
1516,1081,1,1.9,1,13,1,42,0.1,101
1384,1163,1,1.9,0,6,0,2,0.9,133
1106,1266,0,2.6,0,1,0,18,0.4,86
891,1652,0,0.6,1,4,1,40,1.0,198
586,704,1,2.7,0,8,1,29,0.2,80
227,1151,1,2.9,1,0,0,31,0.6,128
1937,1396,1,0.6,0,0,0,37,0.7,134
529,1821,0,0.9,0,9,1,12,0.3,114
223,1452,1,0.5,1,1,1,25,0.9,123
730,1049,0,0.6,1,8,1,2,0.3,172
131,1790,0,2.5,1,9,0,2,0.1,184
102,543,0,0.5,0,0,0,57,0.7,192
1347,1332,0,1.3,1,5,1,7,0.8,168
238,1330,0,0.5,0,0,0,38,1.0,183
947,907,0,1.4,1,3,0,42,0.2,151
242,822,1,0.5,0,0,1,51,0.3,157
892,912,0,0.5,1,0,0,58,0.3,107
163,1193,0,0.9,0,11,0,26,0.5,166
1780,909,1,0.7,1,7,0,47,0.1,146
86,1496,1,2.0,1,4,0,42,0.5,182
15,775,0,1.0,0,3,0,46,0.7,159
1292,1365,0,0.6,0,0,1,31,0.3,125
503,1438,0,2.2,0,0,1,32,0.6,176
1560,1872,0,0.5,0,5,1,26,0.5,172
1832,897,0,1.4,1,1,0,17,0.4,171
1764,1811,1,2.0,0,3,1,22,0.2,82
1022,1099,0,0.5,0,13,1,61,0.3,146
682,804,1,0.8,1,12,1,41,0.9,89
507,512,1,0.5,1,7,0,15,0.9,83
922,874,0,1.3,0,5,0,8,0.5,185
1683,1083,1,2.0,0,3,0,41,0.7,158
204,1472,1,2.3,0,6,0,61,0.1,168
262,1431,1,0.5,0,0,1,56,0.6,191
847,1843,1,1.8,1,13,1,16,0.7,134
1417,1987,1,1.9,0,0,0,50,0.7,119
19,682,1,0.5,0,4,0,19,1.0,121
1688,673,1,1.9,0,13,1,35,0.4,142
200,1703,1,0.5,0,0,1,22,0.6,192
244,1891,0,2.8,0,7,1,37,0.4,129
33,1310,1,2.2,1,0,1,51,0.6,100
1578,1856,0,0.5,1,0,1,24,0.1,155
797,1320,0,2.1,1,0,1,41,0.6,144
1625,1000,0,2.7,0,1,1,58,1.0,135
916,816,1,0.5,0,4,0,14,0.9,198
1475,1154,0,1.1,1,2,1,5,0.2,98
752,1620,0,2.6,1,6,0,5,0.9,186
1537,733,0,0.8,0,7,0,16,0.4,138
724,696,0,0.5,0,6,0,51,0.3,197
1569,1494,0,2.4,0,6,1,3,0.9,93
1822,877,0,1.5,1,0,1,17,0.1,195
1942,1279,1,2.5,0,5,1,7,0.5,105
1775,574,1,3.0,0,1,1,30,0.9,126
1878,608,1,0.5,1,0,0,64,0.9,94
157,1224,0,0.5,0,16,1,6,0.4,109
654,666,1,0.5,1,7,1,54,0.4,81
1252,917,1,0.5,0,1,1,31,0.5,88
1988,1547,1,2.9,0,2,0,57,0.4,114
671,852,0,1.0,1,4,1,62,0.7,104
795,1442,0,2.2,0,0,1,56,0.7,145
1900,1619,1,1.9,0,2,1,21,0.4,138
733,720,1,1.4,1,0,1,61,0.6,96
549,1002,0,0.8,1,0,1,35,0.5,83
1944,544,1,0.5,1,2,1,7,0.2,129
1230,697,0,2.7,1,5,1,20,0.4,173
905,989,1,2.0,0,4,0,17,0.2,166
170,1270,1,0.6,1,2,1,32,0.5,101
439,1469,0,2.1,0,0,0,8,0.7,123
1738,511,0,0.9,1,15,1,24,0.6,136
1119,1537,0,2.9,0,11,1,46,0.8,158
1741,946,1,1.4,0,9,0,26,0.1,186
1675,1630,1,2.8,1,0,1,32,0.9,80
890,1132,0,1.0,0,0,0,8,0.1,157
861,525,0,2.4,1,11,0,35,0.8,190
1795,527,0,0.7,1,4,0,53,0.9,128
1125,1106,1,1.9,0,4,0,54,0.7,107
679,675,0,2.3,0,10,0,60,0.9,144
1869,1278,1,1.0,0,7,1,24,0.8,170
531,532,1,0.8,1,3,0,8,0.1,193
448,1589,0,2.5,0,5,1,51,0.8,195
955,1745,0,1.5,1,4,1,57,0.1,159
720,1702,1,2.3,0,12,1,52,0.5,145
308,1530,0,2.0,0,11,0,42,0.8,177
1619,1641,0,1.1,0,6,0,48,0.7,97
774,969,0,2.4,0,0,0,45,0.3,124
1790,648,1,0.5,1,9,1,54,0.4,109
1201,1481,1,2.0,1,0,0,35,0.5,105
954,922,1,0.7,0,0,1,47,1.0,157
1476,1638,0,1.0,0,6,0,2,0.2,189
1126,635,1,0.6,1,1,1,50,0.3,97
287,1563,0,2.4,0,0,1,24,0.2,170
1092,846,1,2.6,0,0,1,58,0.1,123
105,832,0,0.7,1,2,1,39,0.7,103
1613,1676,0,1.6,0,2,0,37,0.2,145
440,1310,1,2.3,1,6,0,57,0.9,135
1229,1215,0,1.0,0,4,0,16,0.1,130
520,1875,1,0.5,1,11,1,55,0.7,91
1210,805,1,1.7,1,0,1,58,0.9,168
266,1876,0,0.5,0,10,1,40,0.3,86
1922,764,0,1.0,1,9,1,45,0.5,124
1754,1086,1,1.7,1,0,1,43,0.2,111
1629,1232,0,1.2,0,12,0,30,0.9,131
374,1334,1,2.4,0,5,1,42,0.6,115
1149,1396,0,1.5,1,0,1,44,0.1,118
920,934,0,2.0,1,2,0,30,0.7,182
1405,1973,1,0.5,1,3,0,21,0.8,95
1311,643,1,0.7,1,3,1,23,0.8,156
684,706,0,0.5,0,13,1,31,0.1,108
876,1064,0,2.9,1,1,1,34,0.2,184
483,1476,0,0.5,0,0,1,43,0.7,148
1826,925,1,2.1,0,9,0,56,0.2,196
809,972,1,1.3,0,8,0,44,0.7,82
678,1793,0,1.8,1,10,0,12,0.5,169
1037,1104,0,1.7,0,1,1,60,0.4,199
670,638,0,1.7,1,11,0,27,0.5,102
1879,983,0,1.8,0,0,1,53,0.9,187
546,1350,1,0.5,1,0,1,50,0.8,181
1966,1583,0,1.2,1,0,0,14,0.2,148
1021,1699,0,0.5,1,1,0,59,0.5,97
263,767,0,1.5,0,4,0,28,0.7,175
138,536,0,2.4,1,12,1,3,0.3,182
293,1517,0,0.5,0,6,1,48,0.6,143
247,1273,0,1.0,0,3,1,48,0.4,99
1836,1715,0,1.3,0,0,1,12,0.3,94
75,819,0,0.6,1,8,1,42,0.9,188
816,805,1,0.7,0,0,0,64,0.1,97
313,645,0,1.2,0,6,1,41,0.7,184
1602,1494,0,1.6,1,4,1,24,0.2,101
92,885,0,2.3,1,0,1,15,0.4,103
784,1892,1,0.7,0,5,0,47,0.4,110
1591,1830,0,0.5,0,0,0,7,0.9,191
1884,1369,1,1.7,1,3,0,28,0.6,108
1174,673,1,2.2,1,2,0,3,0.4,84
915,1265,0,0.6,1,4,1,49,0.5,90
1178,909,1,0.5,1,9,0,30,0.4,97
1073,1189,1,2.3,0,13,0,8,0.5,198
721,808,0,0.5,1,0,0,8,0.1,192
305,1348,0,2.0,0,18,0,52,0.3,98
253,835,0,1.1,1,2,0,7,0.2,101
1572,895,0,0.5,1,6,0,16,0.3,164
166,748,1,1.7,0,0,1,27,0.9,150
213,1827,0,1.7,0,4,1,39,0.5,128
1070,1659,1,2.5,0,2,0,41,0.1,88
757,554,1,1.3,0,0,0,12,0.1,105
499,869,0,1.3,1,3,0,34,0.1,104
868,981,1,1.9,1,0,0,2,0.1,136
1157,1587,1,0.5,0,0,0,50,0.9,148
29,851,0,0.5,0,3,0,21,0.4,200
489,1138,0,1.1,0,5,1,48,0.6,109
1237,1558,1,2.5,0,2,1,32,0.1,114
173,1577,0,0.5,1,0,1,42,0.3,197
1958,1976,1,2.6,1,6,0,45,1.0,136
1132,1524,1,1.8,1,0,0,10,0.6,174
1375,1604,1,1.9,1,0,1,63,0.6,93
972,1330,0,0.9,0,6,1,52,0.8,84
1513,1343,0,2.4,1,5,1,14,0.9,81
58,1757,0,0.5,0,8,0,49,0.5,180
309,1997,1,1.8,1,1,0,40,0.5,185
1473,541,0,1.0,0,0,1,10,0.4,127
592,1263,0,0.5,0,12,0,64,0.3,175
1248,1935,0,0.5,0,0,1,29,0.6,124
1745,926,1,0.8,0,6,1,59,0.8,161
572,1701,1,0.5,0,13,1,46,0.9,115
1374,1263,1,1.4,1,1,1,32,0.4,103
518,1606,0,1.4,0,3,0,57,0.1,154
566,1170,1,1.2,0,4,0,30,0.4,83
152,1692,1,2.1,0,3,1,61,0.9,176
431,1772,1,0.5,1,3,0,64,0.7,116
648,1112,1,0.5,1,1,0,53,0.6,121
1044,1218,1,2.8,1,3,0,39,0.8,150
281,1429,0,1.5,1,0,1,40,0.9,100
1909,955,1,0.5,0,2,0,2,0.6,155
1785,1354,1,0.7,1,3,1,26,0.5,136
1443,1180,1,2.5,0,0,1,27,0.1,90
1679,1470,1,1.7,1,6,0,45,0.1,121
1102,514,1,0.7,0,4,0,57,0.1,186
591,1658,1,1.4,0,0,1,39,0.7,182
1709,1922,0,0.5,0,15,1,42,0.6,153
547,1117,1,0.6,1,0,0,31,0.2,146
734,1536,0,0.8,0,12,0,13,0.3,105
1261,1562,0,0.5,1,1,1,20,0.8,172
1177,1031,1,2.1,1,2,1,11,0.1,121
1797,873,1,0.5,1,6,0,8,0.6,152
1761,1329,1,1.7,0,5,1,23,0.6,198
28,1453,0,1.6,1,12,1,52,0.3,96
420,1456,1,0.5,1,7,0,7,0.4,105
221,660,0,0.5,0,5,1,40,0.5,111
981,1046,1,2.8,1,0,1,58,0.2,100
517,1059,0,2.4,0,8,0,61,0.1,134
1344,1429,0,0.6,0,8,0,42,0.9,151
409,1726,1,0.9,1,2,0,51,0.6,193
1183,1744,0,2.8,0,0,1,62,0.7,158
1979,1614,0,1.2,0,1,1,9,0.1,161
765,733,0,2.1,0,5,1,7,0.2,174
1396,539,0,1.5,0,6,1,26,0.2,137
808,1852,0,1.4,0,4,1,10,0.7,196
897,1549,1,0.5,0,4,0,8,0.4,158
1547,1611,0,0.5,1,4,1,27,0.9,199
1228,1820,1,1.7,1,0,1,64,0.6,161
1886,1082,0,2.3,1,11,0,2,1.0,101
1918,772,0,1.0,1,0,1,23,0.6,97
635,701,1,0.7,0,7,0,15,0.5,160
1941,1522,0,2.2,0,4,0,21,0.7,162
1488,959,1,1.3,1,0,0,36,0.4,196
423,1681,1,2.5,0,2,0,11,0.4,158
1391,794,0,2.5,1,15,0,14,0.6,197
1993,1467,1,0.5,0,0,0,18,0.6,122
1061,1522,1,0.7,1,4,0,28,0.2,124
1721,1174,1,0.9,0,7,0,30,1.0,147
1413,1303,1,2.1,0,6,1,10,0.4,109
944,674,1,2.8,1,0,1,8,0.2,93
801,1422,0,1.0,0,15,0,3,0.1,112
1831,623,1,0.8,0,6,1,26,0.3,197
1251,1317,0,1.8,1,2,0,43,0.5,113
1421,667,0,1.3,1,4,1,6,0.1,149
1747,618,0,1.1,0,11,1,45,0.2,188
1726,990,1,2.7,1,3,0,15,0.9,153
1167,622,0,0.7,0,0,1,9,0.3,127
195,1526,0,2.1,0,1,1,23,0.2,117
793,1347,0,0.9,1,8,1,53,0.8,108
609,1212,0,2.6,0,1,1,46,0.3,102
574,1974,0,1.0,0,4,0,24,0.3,80
1517,911,0,1.4,1,3,0,14,0.7,199
1241,1221,0,2.3,0,7,1,35,0.4,152
314,1163,0,2.8,1,0,0,49,0.6,155
739,1303,0,1.0,1,0,1,15,0.1,138
363,879,0,1.5,1,0,1,21,0.8,187
1267,896,0,1.0,1,3,1,4,0.3,111
551,1900,1,2.0,1,1,0,41,0.5,190
1295,1347,0,1.5,1,10,0,56,0.3,95
1253,712,0,2.5,1,1,0,49,0.6,99
1802,1289,0,0.8,1,3,1,16,0.9,161
203,671,0,0.9,1,10,0,30,0.7,105
1043,1948,0,2.8,1,5,1,18,0.2,135
1549,1772,1,1.6,0,17,1,45,0.5,159
1335,511,1,3.0,1,5,1,34,0.9,125
776,827,0,1.1,1,2,1,18,0.9,160
1980,930,1,1.0,1,4,1,4,0.9,144
623,644,1,1.3,1,7,1,30,0.5,114
1158,1097,0,0.8,0,10,1,21,0.1,160
869,1236,1,2.8,0,6,1,32,0.3,143
352,1604,1,1.7,0,2,0,33,1.0,191
1595,1206,0,3.0,1,10,1,30,0.6,127
91,1977,1,2.0,1,7,1,54,1.0,171
34,644,1,2.7,0,0,0,22,0.7,157
43,1656,0,1.0,0,5,1,34,0.1,166
151,742,0,2.2,1,4,0,56,0.2,190
449,1844,1,2.3,1,1,0,51,0.7,158
327,1995,1,0.5,0,6,0,26,0.2,129
695,560,1,2.0,1,6,1,57,0.3,81
280,1733,1,2.8,0,3,1,36,1.0,119
1164,860,1,1.3,1,8,1,40,0.7,170
1816,584,1,1.7,1,1,1,19,0.4,199
389,1712,0,1.4,0,1,0,63,0.5,137
1310,1095,1,2.6,1,0,1,44,0.7,103
1036,688,0,2.5,1,0,0,57,0.8,131
864,1296,0,1.4,1,3,0,11,0.2,129
1660,1559,1,1.3,1,10,1,10,0.3,114
687,768,0,0.5,1,0,0,19,0.9,159
1262,1433,1,2.3,0,8,1,27,0.9,153
172,625,1,1.2,1,5,1,10,0.2,183
1088,1655,1,0.5,1,6,1,4,0.1,105
1416,1448,0,0.5,1,18,0,2,0.2,100
1627,865,0,2.9,0,5,0,16,0.1,89
1673,1504,0,1.9,1,7,1,25,0.3,152
982,1035,0,0.6,1,2,1,44,0.5,99
1091,1709,1,0.8,0,8,1,58,0.8,136
1376,539,0,2.0,1,1,0,24,0.5,86
467,1433,0,1.6,0,7,0,4,0.7,133
660,568,0,2.0,0,1,0,36,0.3,179
1623,1862,0,1.5,0,1,0,62,1.0,182
41,1646,1,2.3,0,8,1,41,0.2,185
55,1936,0,2.1,1,10,1,46,0.6,104
32,1319,1,0.9,0,3,1,41,0.9,107
1574,1628,1,1.8,1,11,0,56,0.1,136
1050,718,0,0.7,1,0,1,21,0.8,88
1478,1254,1,2.3,0,0,1,44,0.2,149
1703,942,0,1.4,1,8,0,27,0.5,91
1663,1416,1,1.8,1,0,0,20,0.1,93
1594,586,1,0.6,0,16,1,42,0.3,121
1309,1433,1,2.3,0,2,1,5,0.1,129
988,1413,1,0.5,1,4,1,45,0.4,104
1086,1869,0,2.6,0,0,1,30,0.4,152
677,1260,0,0.6,0,5,1,13,1.0,106
964,648,0,1.9,1,4,0,8,1.0,91
880,1554,0,2.7,1,3,1,47,0.7,185
217,1336,0,0.9,0,5,1,17,0.2,194
426,1190,1,2.2,1,9,0,47,0.3,186
1029,1658,1,1.4,1,0,1,38,0.3,159
973,516,1,0.7,1,1,0,30,0.9,138
165,517,0,1.4,1,3,1,33,0.8,183
1296,1644,0,2.8,0,0,1,44,1.0,137
334,1882,1,1.3,1,8,0,47,0.2,140
1665,1003,1,2.9,0,16,1,16,0.6,119
295,1162,0,2.0,0,0,1,56,0.7,114
1964,1753,0,0.7,0,4,0,40,0.6,191
953,852,1,1.6,1,6,1,54,0.4,147
1169,763,1,0.5,0,9,1,51,0.3,104
498,1926,1,1.7,1,1,1,33,0.4,172
116,909,1,1.4,0,0,0,53,0.3,82
1583,964,0,2.0,0,0,1,22,0.7,86
1940,1600,0,0.5,0,1,1,16,0.3,94
1588,1880,1,2.0,0,11,0,35,0.1,200
372,1703,1,1.5,1,17,1,55,0.7,138
1395,1242,1,2.4,0,8,0,28,0.2,166
1621,962,1,1.1,1,3,0,17,0.1,161
125,1659,0,2.8,0,5,0,16,0.6,89
194,640,0,0.6,0,0,0,20,0.1,163
1472,1595,0,1.9,1,1,0,63,0.9,88
1339,1988,1,0.5,0,0,1,16,0.1,94
1168,1250,1,1.4,0,8,0,44,0.2,182
1186,1039,1,2.7,0,11,0,56,0.3,125
548,641,1,1.1,0,0,1,7,0.9,192
1672,1379,1,1.5,0,6,1,9,0.8,118
1398,1540,0,0.7,1,0,1,29,0.1,157
174,534,0,0.5,1,0,1,16,1.0,189
1154,987,0,2.0,1,1,0,20,0.8,192
557,1519,0,2.1,0,0,0,32,0.7,200
1410,856,0,1.9,1,0,1,14,0.6,115
521,1128,1,1.1,0,9,1,46,0.9,135
1984,797,0,2.2,1,0,0,37,0.9,144
209,1277,1,0.5,1,0,1,35,0.4,142
60,1484,0,3.0,0,3,0,12,0.6,134
540,504,1,0.5,1,5,0,16,0.1,166
576,587,1,2.3,1,4,0,6,0.7,170
466,1653,0,0.5,1,2,1,37,0.9,176
1430,1672,1,1.7,1,1,1,44,0.5,172
1892,1179,1,0.5,0,7,1,32,0.3,182
900,1005,0,0.5,0,0,0,46,0.4,133
683,1713,0,2.5,1,1,0,27,0.9,127
168,1572,0,0.6,0,0,1,58,0.2,157
9,509,1,0.6,1,2,1,9,0.1,93
1969,640,0,1.3,1,5,1,6,0.6,143
1242,989,1,1.6,1,11,1,24,0.6,156
236,1133,0,1.4,0,3,0,42,0.1,199
645,1841,1,1.3,1,6,0,14,0.6,167
1359,1949,1,0.5,1,2,1,31,0.1,145
771,550,1,0.5,0,16,1,23,0.1,145
1202,1068,0,2.7,1,0,1,45,0.6,194
1046,1057,0,2.1,0,4,0,5,0.5,154
112,867,0,1.4,1,0,1,4,0.7,135
818,1320,0,0.5,1,7,0,48,0.2,168
此差异已折叠。
此差异已折叠。
此差异已折叠。
...@@ -19,8 +19,8 @@ ...@@ -19,8 +19,8 @@
"noise_multiplier": 2.0, "noise_multiplier": 2.0,
"l2_norm_clip": 1.0, "l2_norm_clip": 1.0,
"secure_mode": true, "secure_mode": true,
"learning_rate": "optimal", "learning_rate": 1e-0,
"alpha": 0.0001, "alpha": 1e-4,
"batch_size": 100, "batch_size": 100,
"global_epoch": 10, "global_epoch": 10,
"local_epoch": 1, "local_epoch": 1,
...@@ -40,7 +40,7 @@ ...@@ -40,7 +40,7 @@
"model_path": "data/result/Charlie_model.pkl" "model_path": "data/result/Charlie_model.pkl"
}, },
"Alice": { "Alice": {
"data_set": "hfl_server_fake_data" "data_set": "fl_fake_data"
} }
} }
} }
......
...@@ -16,8 +16,8 @@ ...@@ -16,8 +16,8 @@
"process": "train", "process": "train",
"task_name": "HFL_logistic_regression_binclass_paillier_train", "task_name": "HFL_logistic_regression_binclass_paillier_train",
"n_length": 2048, "n_length": 2048,
"learning_rate": "optimal", "learning_rate": 1e-0,
"alpha": 0.01, "alpha": 1e-4,
"batch_size": 100, "batch_size": 100,
"global_epoch": 10, "global_epoch": 10,
"local_epoch": 1, "local_epoch": 1,
...@@ -37,7 +37,7 @@ ...@@ -37,7 +37,7 @@
"model_path": "data/result/Charlie_model.pkl" "model_path": "data/result/Charlie_model.pkl"
}, },
"Alice": { "Alice": {
"data_set": "hfl_server_fake_data" "data_set": "fl_fake_data"
} }
} }
} }
......
...@@ -15,8 +15,8 @@ ...@@ -15,8 +15,8 @@
"method": "Plaintext", "method": "Plaintext",
"process": "train", "process": "train",
"task_name": "HFL_logistic_regression_binclass_plaintext_train", "task_name": "HFL_logistic_regression_binclass_plaintext_train",
"learning_rate": "optimal", "learning_rate": 1e-0,
"alpha": 0.0001, "alpha": 1e-4,
"batch_size": 100, "batch_size": 100,
"global_epoch": 10, "global_epoch": 10,
"local_epoch": 1, "local_epoch": 1,
...@@ -36,7 +36,7 @@ ...@@ -36,7 +36,7 @@
"model_path": "data/result/Charlie_model.pkl" "model_path": "data/result/Charlie_model.pkl"
}, },
"Alice": { "Alice": {
"data_set": "hfl_server_fake_data" "data_set": "fl_fake_data"
} }
} }
} }
......
...@@ -19,8 +19,8 @@ ...@@ -19,8 +19,8 @@
"noise_multiplier": 2.0, "noise_multiplier": 2.0,
"l2_norm_clip": 1.0, "l2_norm_clip": 1.0,
"secure_mode": true, "secure_mode": true,
"learning_rate": "optimal", "learning_rate": 1e-1,
"alpha": 0.0005, "alpha": 1e-4,
"batch_size": 100, "batch_size": 100,
"global_epoch": 10, "global_epoch": 10,
"local_epoch": 1, "local_epoch": 1,
...@@ -40,7 +40,7 @@ ...@@ -40,7 +40,7 @@
"model_path": "data/result/Charlie_model.pkl" "model_path": "data/result/Charlie_model.pkl"
}, },
"Alice": { "Alice": {
"data_set": "hfl_server_fake_data" "data_set": "fl_fake_data"
} }
} }
} }
......
...@@ -15,8 +15,8 @@ ...@@ -15,8 +15,8 @@
"method": "Plaintext", "method": "Plaintext",
"process": "train", "process": "train",
"task_name": "HFL_logistic_regression_multiclass_plaintext_train", "task_name": "HFL_logistic_regression_multiclass_plaintext_train",
"learning_rate": "optimal", "learning_rate": 1e-1,
"alpha": 0.005, "alpha": 1e-4,
"batch_size": 100, "batch_size": 100,
"global_epoch": 10, "global_epoch": 10,
"local_epoch": 1, "local_epoch": 1,
...@@ -36,7 +36,7 @@ ...@@ -36,7 +36,7 @@
"model_path": "data/result/Charlie_model.pkl" "model_path": "data/result/Charlie_model.pkl"
}, },
"Alice": { "Alice": {
"data_set": "hfl_server_fake_data" "data_set": "fl_fake_data"
} }
} }
} }
......
...@@ -4,33 +4,37 @@ ...@@ -4,33 +4,37 @@
}, },
"component_params": { "component_params": {
"roles": { "roles": {
"host": "Bob",
"guest": [ "guest": [
"Charlie" "Charlie"
],
"host": [
"Bob"
] ]
}, },
"common_params": { "common_params": {
"model": "HeteroLRInfer", "model": "VFL_logistic_regression",
"task_name": "predict", "method": "Plaintext",
"metric_path": "data/result/metrics.json", "process": "train",
"model_pred": "data/result/pred.csv" "task_name": "VFL_logistic_regression_binclass_plaintext_train",
"learning_rate": 1e-0,
"alpha": 1e-4,
"epoch": 10,
"shuffle_seed": 0,
"batch_size": 100,
"print_metrics": true
}, },
"role_params": { "role_params": {
"Bob": { "Bob": {
"data_set": "test_hetero_xgb_host", "data_set": "binclass_vfl_train_host",
"id": "id",
"selected_column": null, "selected_column": null,
"id": "id",
"label": "y", "label": "y",
"model_path": "data/result/host_model.pkl" "model_path": "data/result/host_model.pkl",
"metric_path": "data/result/metrics.json"
}, },
"Charlie": { "Charlie": {
"data_set": "test_hetero_xgb_guest", "data_set": "binclass_vfl_train_guest",
"id": "id",
"model_path": "data/result/guest_model.pkl",
"selected_column": null, "selected_column": null,
"label": null "id": "id",
"model_path": "data/result/guest_model.pkl"
} }
} }
} }
......
{
"party_info": {
"task_manager": "127.0.0.1:50050"
},
"component_params": {
"roles": {
"host": "Bob",
"guest": [
"Charlie"
]
},
"common_params": {
"model": "VFL_logistic_regression",
"process": "predict",
"task_name": "VFL_logistic_regression_binclass_predict"
},
"role_params": {
"Bob": {
"data_set": "binclass_vfl_test_host",
"model_path": "data/result/host_model.pkl",
"predict_path": "data/result/host_predict.csv"
},
"Charlie": {
"data_set": "binclass_vfl_test_guest",
"model_path": "data/result/guest_model.pkl"
}
}
}
}
\ No newline at end of file
...@@ -4,49 +4,37 @@ ...@@ -4,49 +4,37 @@
}, },
"component_params": { "component_params": {
"roles": { "roles": {
"host": "Bob",
"guest": [ "guest": [
"Charlie" "Charlie"
],
"host": [
"Bob"
] ]
}, },
"common_params": { "common_params": {
"model": "HeteroLR", "model": "VFL_logistic_regression",
"task_name": "train", "method": "Plaintext",
"learning_rate": 0.01, "process": "train",
"alpha": 0.0001, "task_name": "VFL_logistic_regression_multiclass_plaintext_train",
"epochs": 50, "learning_rate": 1e-0,
"penalty": "l2", "alpha": 1e-4,
"optimal_method": "momentum", "epoch": 10,
"momentum": 0.7, "shuffle_seed": 0,
"random_state": 2023, "batch_size": 100,
"scale_type": "z-score", "print_metrics": true
"batch_size": 512,
"sample_method": "random",
"sample_ratio": 0.3,
"loss_type": "log",
"prev_grad": 0,
"metric_path": "data/result/metrics.json",
"model_pred": "data/result/pred.csv"
}, },
"role_params": { "role_params": {
"Bob": { "Bob": {
"data_set": "train_hetero_xgb_host", "data_set": "multiclass_vfl_train_host",
"id": "id",
"selected_column": null, "selected_column": null,
"add_noise": "regular", "id": "id",
"tol": 0.001,
"label": "y", "label": "y",
"model_path": "data/result/host_model.pkl", "model_path": "data/result/host_model.pkl",
"n_iter_no_change": 5 "metric_path": "data/result/metrics.json"
}, },
"Charlie": { "Charlie": {
"data_set": "train_hetero_xgb_guest", "data_set": "multiclass_vfl_train_guest",
"id": "id",
"model_path": "data/result/guest_model.pkl",
"selected_column": null, "selected_column": null,
"label": null "id": "id",
"model_path": "data/result/guest_model.pkl"
} }
} }
} }
......
{
"party_info": {
"task_manager": "127.0.0.1:50050"
},
"component_params": {
"roles": {
"host": "Bob",
"guest": [
"Charlie"
]
},
"common_params": {
"model": "VFL_logistic_regression",
"process": "predict",
"task_name": "VFL_logistic_regression_multiclass_predict"
},
"role_params": {
"Bob": {
"data_set": "multiclass_vfl_test_host",
"model_path": "data/result/host_model.pkl",
"predict_path": "data/result/host_predict.csv"
},
"Charlie": {
"data_set": "multiclass_vfl_test_guest",
"model_path": "data/result/guest_model.pkl"
}
}
}
}
\ No newline at end of file
...@@ -41,7 +41,7 @@ ...@@ -41,7 +41,7 @@
"model_path": "data/result/Charlie_model.pkl" "model_path": "data/result/Charlie_model.pkl"
}, },
"Alice": { "Alice": {
"data_set": "hfl_server_fake_data" "data_set": "fl_fake_data"
} }
} }
} }
......
...@@ -38,7 +38,7 @@ ...@@ -38,7 +38,7 @@
"model_path": "data/result/Charlie_model.pkl" "model_path": "data/result/Charlie_model.pkl"
}, },
"Alice": { "Alice": {
"data_set": "hfl_server_fake_data" "data_set": "fl_fake_data"
} }
} }
} }
......
...@@ -37,7 +37,7 @@ ...@@ -37,7 +37,7 @@
"model_path": "data/result/Charlie_model.pkl" "model_path": "data/result/Charlie_model.pkl"
}, },
"Alice": { "Alice": {
"data_set": "hfl_server_fake_data" "data_set": "fl_fake_data"
} }
} }
} }
......
...@@ -34,7 +34,7 @@ ...@@ -34,7 +34,7 @@
"model_path": "data/result/Charlie_model.pkl" "model_path": "data/result/Charlie_model.pkl"
}, },
"Alice": { "Alice": {
"data_set": "hfl_server_fake_data" "data_set": "fl_fake_data"
} }
} }
} }
......
...@@ -41,7 +41,7 @@ ...@@ -41,7 +41,7 @@
"model_path": "data/result/Charlie_model.pkl" "model_path": "data/result/Charlie_model.pkl"
}, },
"Alice": { "Alice": {
"data_set": "hfl_server_fake_data" "data_set": "fl_fake_data"
} }
} }
} }
......
...@@ -38,7 +38,7 @@ ...@@ -38,7 +38,7 @@
"model_path": "data/result/Charlie_model.pkl" "model_path": "data/result/Charlie_model.pkl"
}, },
"Alice": { "Alice": {
"data_set": "hfl_server_fake_data" "data_set": "fl_fake_data"
} }
} }
} }
......
...@@ -41,7 +41,7 @@ ...@@ -41,7 +41,7 @@
"model_path": "data/result/Charlie_model.pkl" "model_path": "data/result/Charlie_model.pkl"
}, },
"Alice": { "Alice": {
"data_set": "hfl_server_fake_data" "data_set": "fl_fake_data"
} }
} }
} }
......
...@@ -38,7 +38,7 @@ ...@@ -38,7 +38,7 @@
"model_path": "data/result/Charlie_model.pkl" "model_path": "data/result/Charlie_model.pkl"
}, },
"Alice": { "Alice": {
"data_set": "hfl_server_fake_data" "data_set": "fl_fake_data"
} }
} }
} }
......
import numpy as np import numpy as np
import logging from primihub.utils.logger_util import logger
class LogisticRegression: class LogisticRegression:
# l2 regularization by default, alpha is the penalty parameter # l2 regularization by default, alpha is the penalty parameter
def __init__(self, x, y, learning_rate=0.2, alpha=0.0001): def __init__(self, x, y, learning_rate=0.2, alpha=0.0001):
self.learning_rate = learning_rate self.learning_rate = learning_rate
self.alpha = alpha # regularization parameter self.alpha = alpha
self.t = 0 # iteration number, used for learning rate decay
if max(y) == 1: max_y = max(y)
if max_y == 1:
self.theta = np.zeros(x.shape[1] + 1) self.theta = np.zeros(x.shape[1] + 1)
self.multiclass = False self.multiclass = False
else: else:
self.theta = np.zeros((x.shape[1] + 1, max(y) + 1)) self.theta = np.zeros((x.shape[1] + 1, max_y + 1))
self.multiclass = True self.multiclass = True
# 'optimal' learning rate refer to sklearn SGDClassifier
def dloss(p, y):
z = p * y
if z > 18.0:
return np.exp(-z) * -y
if z < -18.0:
return -y
return -y / (np.exp(z) + 1.0)
if self.learning_rate == 'optimal':
typw = np.sqrt(1.0 / np.sqrt(alpha))
# computing eta0, the initial learning rate
initial_eta0 = typw / max(1.0, dloss(-typw, 1.0))
# initialize t such that eta at first sample equals eta0
self.optimal_init = 1.0 / (initial_eta0 * alpha)
def sigmoid(self, x): def sigmoid(self, x):
return 1.0 / (1.0 + np.exp(-x)) return 1.0 / (1.0 + np.exp(-x))
...@@ -80,19 +65,9 @@ class LogisticRegression: ...@@ -80,19 +65,9 @@ class LogisticRegression:
def gradient_descent(self, x, y): def gradient_descent(self, x, y):
grad = self.compute_grad(x, y) grad = self.compute_grad(x, y)
self.theta -= self.learning_rate * grad self.theta -= self.learning_rate * grad
def gradient_descent_olr(self, x, y):
# optimal learning rate: 1.0 / (alpha * (t0 + t))
grad = self.compute_grad(x, y)
learning_rate = 1.0 / (self.alpha * (self.optimal_init + self.t))
self.t += 1
self.theta -= learning_rate * grad
def fit(self, x, y): def fit(self, x, y):
if self.learning_rate == 'optimal': self.gradient_descent(x, y)
self.gradient_descent_olr(x, y)
else:
self.gradient_descent(x, y)
def predict_prob(self, x): def predict_prob(self, x):
z = x.dot(self.theta[1:]) + self.theta[0] z = x.dot(self.theta[1:]) + self.theta[0]
...@@ -198,7 +173,7 @@ class LogisticRegression_Paillier(LogisticRegression, PaillierFunc): ...@@ -198,7 +173,7 @@ class LogisticRegression_Paillier(LogisticRegression, PaillierFunc):
def compute_grad(self, x, y): def compute_grad(self, x, y):
if self.multiclass: if self.multiclass:
logging.error("Paillier method doesn't support multiclass classification") logger.error("Paillier method doesn't support multiclass classification")
else: else:
# Approximate gradient # Approximate gradient
# First order of taylor expansion: sigmoid(x) = 0.5 + 0.25 * (x.dot(w) + b) # First order of taylor expansion: sigmoid(x) = 0.5 + 0.25 * (x.dot(w) + b)
...@@ -212,4 +187,4 @@ class LogisticRegression_Paillier(LogisticRegression, PaillierFunc): ...@@ -212,4 +187,4 @@ class LogisticRegression_Paillier(LogisticRegression, PaillierFunc):
return (0.5 - y).dot(x.dot(self.theta[1:] + self.theta[0])) / x.shape[0] return (0.5 - y).dot(x.dot(self.theta[1:] + self.theta[0])) / x.shape[0]
def CELoss(self, x, y, eps=1e-20): def CELoss(self, x, y, eps=1e-20):
logging.error("Paillier method doesn't support multiclass classification") logger.error("Paillier method doesn't support multiclass classification")
import pickle
import json
import pandas as pd
import numpy as np
from sklearn import metrics
from primihub.FL.utils.net_work import GrpcClient
from primihub.utils.evaluation import evaluate_ks_and_roc_auc, plot_lift_and_gain, eval_acc
from primihub.FL.utils.base import BaseModel
from primihub.FL.utils.dataset import read_csv
from primihub.FL.utils.file import check_directory_exist
class HeteroLrHostInfer(BaseModel):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.set_inputs()
remote_party = self.roles[self.role_params['others_role']][0]
self.channel = GrpcClient(local_party=self.role_params['self_name'],
remote_party=remote_party,
node_info=self.node_info,
task_info=self.task_info)
def set_inputs(self):
# set common params
self.model = self.common_params['model']
self.task_name = self.common_params['task_name']
self.metric_path = self.common_params['metric_path']
self.model_pred = self.common_params['model_pred']
# set role params
self.data_set = self.role_params['data_set']
self.id = self.role_params['id']
self.selected_column = self.role_params['selected_column']
self.label = self.role_params['label']
self.model_path = self.role_params['model_path']
# read from data path
data_path = self.role_params['data']['data_path']
self.data = read_csv(data_path, selected_column=None, id=None)
def load_dict(self):
with open(self.model_path, "rb") as current_model:
model_dict = pickle.load(current_model)
self.weights = model_dict['weights']
self.bias = model_dict['bias']
self.col_names = model_dict['columns']
self.std = model_dict['std']
def preprocess(self):
if self.id in self.data.columns:
self.data.pop(self.id)
if self.label in self.data.columns:
self.y = self.data.pop(self.label).values
if len(self.col_names) > 0:
self.data = self.data[self.col_names].values
if self.std is not None:
self.data = self.std.transform(self.data)
def predict_raw(self, x):
host_part = np.dot(x, self.weights) + self.bias
guest_part = self.channel.recv("guest_part")
h = host_part + guest_part
return h
def sigmoid(self, x):
return 1.0 / (1 + np.exp(-x))
def run(self):
origin_data = self.data.copy()
self.load_dict()
self.preprocess()
y_hat = self.predict_raw(self.data)
pred_prob = self.sigmoid(y_hat)
pred_y = (pred_prob > 0.5).astype('int')
pred_df = pd.DataFrame({'pred_prob': pred_prob,
'pred_y': pred_y})
data_result = pd.concat([origin_data, pred_df], axis=1)
check_directory_exist(self.model_pred)
data_result.to_csv(self.model_pred, index=False)
# if self.label is not None:
# acc = sum((pred_y == self.y).astype('int')) / self.data.shape[0]
# ks, auc = evaluate_ks_and_roc_auc(self.y, self.sigmoid(y_hat))
# fpr, tpr, threshold = metrics.roc_curve(self.y, self.sigmoid(y_hat))
# evals = {
# "test_acc": acc,
# "test_ks": ks,
# "test_auc": auc,
# "test_fpr": fpr.tolist(),
# "test_tpr": tpr.tolist()
# }
# metrics_buff = json.dumps(evals)
# check_directory_exist(self.metric_path)
# with open(self.metric_path, 'w') as filePath:
# filePath.write(metrics_buff)
# print("test acc is", evals)
def get_summary(self):
return {}
def get_outputs(self):
return {}
def get_status(self):
return {}
class HeteroLrGuestInfer(BaseModel):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.set_inputs()
remote_party = self.roles[self.role_params['others_role']][0]
self.channel = GrpcClient(local_party=self.role_params['self_name'],
remote_party=remote_party,
node_info=self.node_info,
task_info=self.task_info)
def set_inputs(self):
self.model = self.common_params['model']
self.task_name = self.common_params['task_name']
# set role params
self.data_set = self.role_params['data_set']
self.id = self.role_params['id']
self.selected_column = self.role_params['selected_column']
self.label = self.role_params['label']
self.model_path = self.role_params['model_path']
# read from data path
data_path = self.role_params['data']['data_path']
self.data = read_csv(data_path, selected_column=None, id=None)
def load_dict(self):
with open(self.model_path, "rb") as current_model:
model_dict = pickle.load(current_model)
self.weights = model_dict['weights']
self.bias = model_dict['bias']
self.col_names = model_dict['columns']
self.std = model_dict['std']
def preprocess(self):
if self.id in self.data.columns:
self.data.pop(self.id)
if self.label in self.data.columns:
self.y = self.data.pop(self.label).values
if len(self.col_names) > 0:
self.data = self.data[self.col_names].values
if self.std is not None:
self.data = self.std.transform(self.data)
def predict_raw(self, x):
guest_part = np.dot(x, self.weights) + self.bias
self.channel.send("guest_part", guest_part)
def run(self):
self.load_dict()
self.preprocess()
self.predict_raw(self.data)
def get_summary(self):
return {}
def get_outputs(self):
return {}
def get_status(self):
return {}
\ No newline at end of file
from primihub.FL.utils.net_work import GrpcClient from primihub.FL.utils.net_work import GrpcClient
from primihub.FL.utils.base import BaseModel from primihub.FL.utils.base import BaseModel
from primihub.FL.utils.file import check_directory_exist from primihub.FL.utils.file import check_directory_exist
from primihub.FL.utils.dataset import read_csv,\ from primihub.FL.utils.dataset import read_data,\
DataLoader,\ DataLoader,\
DPDataLoader DPDataLoader
from primihub.utils.logger_util import logger from primihub.utils.logger_util import logger
...@@ -19,6 +19,7 @@ from .base import LogisticRegression,\ ...@@ -19,6 +19,7 @@ from .base import LogisticRegression,\
class LogisticRegressionClient(BaseModel): class LogisticRegressionClient(BaseModel):
def __init__(self, **kwargs): def __init__(self, **kwargs):
super().__init__(**kwargs) super().__init__(**kwargs)
...@@ -37,10 +38,11 @@ class LogisticRegressionClient(BaseModel): ...@@ -37,10 +38,11 @@ class LogisticRegressionClient(BaseModel):
task_info=self.task_info) task_info=self.task_info)
# load dataset # load dataset
data_path = self.role_params['data']['data_path']
selected_column = self.common_params['selected_column'] selected_column = self.common_params['selected_column']
id = self.common_params['id'] id = self.common_params['id']
x = read_csv(data_path, selected_column, id) x = read_data(data_info=self.role_params['data'],
selected_column=selected_column,
id=id)
label = self.common_params['label'] label = self.common_params['label']
y = x.pop(label).values y = x.pop(label).values
x = x.values x = x.values
...@@ -66,7 +68,9 @@ class LogisticRegressionClient(BaseModel): ...@@ -66,7 +68,9 @@ class LogisticRegressionClient(BaseModel):
self.common_params['alpha'], self.common_params['alpha'],
server_channel) server_channel)
else: else:
logger.error(f"Not supported method: {method}") error_msg = f"Unsupported method: {method}"
logger.error(error_msg)
raise RuntimeError(error_msg)
# data preprocessing # data preprocessing
# minmaxscaler # minmaxscaler
...@@ -148,8 +152,7 @@ class LogisticRegressionClient(BaseModel): ...@@ -148,8 +152,7 @@ class LogisticRegressionClient(BaseModel):
modelFile = pickle.load(file_path) modelFile = pickle.load(file_path)
# load dataset # load dataset
data_path = self.role_params['data']['data_path'] origin_data = read_data(data_info=self.role_params['data'])
origin_data = read_csv(data_path, selected_column=None, id=None)
x = origin_data.copy() x = origin_data.copy()
selected_column = modelFile['selected_column'] selected_column = modelFile['selected_column']
......
...@@ -13,6 +13,7 @@ from .base import PaillierFunc ...@@ -13,6 +13,7 @@ from .base import PaillierFunc
class LogisticRegressionServer(BaseModel): class LogisticRegressionServer(BaseModel):
def __init__(self, **kwargs): def __init__(self, **kwargs):
super().__init__(**kwargs) super().__init__(**kwargs)
...@@ -38,7 +39,9 @@ class LogisticRegressionServer(BaseModel): ...@@ -38,7 +39,9 @@ class LogisticRegressionServer(BaseModel):
self.common_params['n_length'], self.common_params['n_length'],
client_channel) client_channel)
else: else:
logger.error(f"Not supported method: {method}") error_msg = f"Unsupported method: {method}"
logger.error(error_msg)
raise RuntimeError(error_msg)
# data preprocessing # data preprocessing
# minmaxscaler # minmaxscaler
...@@ -146,8 +149,10 @@ class Plaintext_DPSGD_Server: ...@@ -146,8 +149,10 @@ class Plaintext_DPSGD_Server:
metrics_name = metrics_name.lower() metrics_name = metrics_name.lower()
supported_metrics = ['loss', 'acc', 'auc'] supported_metrics = ['loss', 'acc', 'auc']
if metrics_name not in supported_metrics: if metrics_name not in supported_metrics:
logger.error(f"""Not supported metrics {metrics_name}, error_msg = f"""Unsupported metrics {metrics_name},
use {supported_metrics} instead""") use {supported_metrics} instead"""
logger.error(error_msg)
raise RuntimeError(error_msg)
client_metrics = self.client_channel.recv_all(metrics_name) client_metrics = self.client_channel.recv_all(metrics_name)
......
import numpy as np
from primihub.utils.logger_util import logger
from .base import LogisticRegression
class LogisticRegression_Host_Plaintext(LogisticRegression):
def __init__(self, x, y, learning_rate=0.2, alpha=0.0001):
super().__init__(x, y, learning_rate, alpha)
def compute_z(self, x, guest_z):
z = x.dot(self.theta[1:]) + self.theta[0]
z += np.array(guest_z).sum(axis=0)
return z
def predict_prob(self, z):
if self.multiclass:
return self.softmax(z)
else:
return self.sigmoid(z)
def compute_error(self, y, z):
if self.multiclass:
error = self.predict_prob(z)
idx = np.arange(len(y))
error[idx, y] -= 1
else:
error = self.predict_prob(z) - y
return error
def compute_regular_loss(self, guest_regular_loss):
return 0.5 * self.alpha * (self.theta ** 2).sum() \
+ sum(guest_regular_loss)
def BCELoss(self, y, z, regular_loss):
return (np.maximum(z, 0.).sum() - y.dot(z) +
np.log1p(np.exp(-np.abs(z))).sum()) / z.shape[0] \
+ regular_loss
def CELoss(self, y, z, regular_loss, eps=1e-20):
prob = self.predict_prob(z)
return -np.sum(np.log(np.clip(prob[np.arange(len(y)), y], eps, 1.))) \
/ z.shape[0] + regular_loss
def loss(self, y, z, regular_loss):
if self.multiclass:
return self.CELoss(y, z, regular_loss)
else:
return self.BCELoss(y, z, regular_loss)
def compute_grad(self, x, error):
if self.multiclass:
return np.vstack((error.sum(axis=0, keepdims=True), x.T.dot(error))) \
/ x.shape[0] + self.alpha * self.theta
else:
return np.hstack((error.sum(keepdims=True), x.T.dot(error))) \
/ x.shape[0] + self.alpha * self.theta
def gradient_descent(self, x, error):
grad = self.compute_grad(x, error)
self.theta -= self.learning_rate * grad
def fit(self, x, error):
self.gradient_descent(x, error)
class LogisticRegression_Guest_Plaintext(LogisticRegression_Host_Plaintext):
def __init__(self, x, learning_rate=0.2, alpha=0.0001, output_dim=1):
self.learning_rate = learning_rate
self.alpha = alpha
if output_dim > 2:
self.theta = np.zeros((x.shape[1], output_dim))
self.multiclass = True
else:
self.theta = np.zeros(x.shape[1])
self.multiclass = False
def compute_z(self, x):
return x.dot(self.theta)
def compute_regular_loss(self):
return 0.5 * self.alpha * (self.theta ** 2).sum()
def compute_grad(self, x, error):
return x.T.dot(error) / x.shape[0] + self.alpha * self.theta
\ No newline at end of file
此差异已折叠。
此差异已折叠。
...@@ -7,6 +7,10 @@ ...@@ -7,6 +7,10 @@
"client": "primihub.FL.logistic_regression.hfl_client.LogisticRegressionClient", "client": "primihub.FL.logistic_regression.hfl_client.LogisticRegressionClient",
"server": "primihub.FL.logistic_regression.hfl_server.LogisticRegressionServer" "server": "primihub.FL.logistic_regression.hfl_server.LogisticRegressionServer"
}, },
"VFL_logistic_regression": {
"guest": "primihub.FL.logistic_regression.vfl_guest.LogisticRegressionGuest",
"host": "primihub.FL.logistic_regression.vfl_host.LogisticRegressionHost"
},
"HFL_neural_network": { "HFL_neural_network": {
"client": "primihub.FL.neural_network.hfl_client.NeuralNetworkClient", "client": "primihub.FL.neural_network.hfl_client.NeuralNetworkClient",
"server": "primihub.FL.neural_network.hfl_server.NeuralNetworkServer" "server": "primihub.FL.neural_network.hfl_server.NeuralNetworkServer"
...@@ -27,14 +31,6 @@ ...@@ -27,14 +31,6 @@
"guest": "primihub.FL.xgboost.gbt.VGBGuestInfer", "guest": "primihub.FL.xgboost.gbt.VGBGuestInfer",
"host": "primihub.FL.xgboost.gbt.VGBTHostInfer" "host": "primihub.FL.xgboost.gbt.VGBTHostInfer"
}, },
"HeteroLR": {
"guest": "primihub.FL.logistic_regression.hetero_lr.HeteroLrGuest",
"host": "primihub.FL.logistic_regression.hetero_lr.HeteroLrHost"
},
"HeteroLRInfer": {
"guest": "primihub.FL.logistic_regression.hetero_lr_infer.HeteroLrGuestInfer",
"host": "primihub.FL.logistic_regression.hetero_lr_infer.HeteroLrHostInfer"
},
"HeteroIV": { "HeteroIV": {
"guest": "primihub.FL.preprocessing.woe_iv.IVGuest", "guest": "primihub.FL.preprocessing.woe_iv.IVGuest",
"host": "primihub.FL.preprocessing.woe_iv.IVHost" "host": "primihub.FL.preprocessing.woe_iv.IVHost"
......
...@@ -7,12 +7,15 @@ from .cnn import NeuralNetwork as CNN ...@@ -7,12 +7,15 @@ from .cnn import NeuralNetwork as CNN
def create_model(method, output_dim, device, nn_model='mlp'): def create_model(method, output_dim, device, nn_model='mlp'):
# select model # select model
nn_model = nn_model.lower()
if nn_model == 'mlp': if nn_model == 'mlp':
model = MLP(output_dim) model = MLP(output_dim)
elif nn_model == 'cnn': elif nn_model == 'cnn':
model = CNN(output_dim) model = CNN(output_dim)
else: else:
logger.error(f"Unsupported NN model: {nn_model}") error_msg = f"Unsupported NN model: {nn_model}"
logger.error(error_msg)
raise RuntimeError(error_msg)
# validate model (DPSGD) # validate model (DPSGD)
if method == 'DPSGD': if method == 'DPSGD':
...@@ -25,6 +28,7 @@ def create_model(method, output_dim, device, nn_model='mlp'): ...@@ -25,6 +28,7 @@ def create_model(method, output_dim, device, nn_model='mlp'):
def choose_loss_fn(output_dim, task): def choose_loss_fn(output_dim, task):
task = task.lower()
if task == 'classification': if task == 'classification':
if output_dim == 1: if output_dim == 1:
return torch.nn.BCEWithLogitsLoss() return torch.nn.BCEWithLogitsLoss()
...@@ -33,10 +37,13 @@ def choose_loss_fn(output_dim, task): ...@@ -33,10 +37,13 @@ def choose_loss_fn(output_dim, task):
if task == 'regression': if task == 'regression':
return torch.nn.MSELoss() return torch.nn.MSELoss()
else: else:
logger.error(f"Not supported task: {task}") error_msg = f"Unsupported task: {task}"
logger.error(error_msg)
raise RuntimeError(error_msg)
def choose_optimizer(model, optimizer, learning_rate, alpha): def choose_optimizer(model, optimizer, learning_rate, alpha):
optimizer = optimizer.lower()
if optimizer == 'adadelta': if optimizer == 'adadelta':
return torch.optim.Adadelta(model.parameters(), return torch.optim.Adadelta(model.parameters(),
lr=learning_rate, lr=learning_rate,
...@@ -78,4 +85,6 @@ def choose_optimizer(model, optimizer, learning_rate, alpha): ...@@ -78,4 +85,6 @@ def choose_optimizer(model, optimizer, learning_rate, alpha):
lr=learning_rate, lr=learning_rate,
weight_decay=alpha) weight_decay=alpha)
else: else:
logger.error(f"Not supported optimizer: {optimizer}") error_msg = f"Unsupported optimizer: {optimizer}"
\ No newline at end of file logger.error(error_msg)
raise RuntimeError(error_msg)
\ No newline at end of file
...@@ -41,7 +41,9 @@ class CNNServer(BaseModel): ...@@ -41,7 +41,9 @@ class CNNServer(BaseModel):
device, device,
client_channel) client_channel)
else: else:
logger.error(f"Not supported method: {method}") error_msg = f"Unsupported method: {method}"
logger.error(error_msg)
raise RuntimeError(error_msg)
# model training # model training
logger.info("-------- start training --------") logger.info("-------- start training --------")
......
...@@ -7,12 +7,16 @@ class PreprocessBase: ...@@ -7,12 +7,16 @@ class PreprocessBase:
if FL_type in ['V', 'H']: if FL_type in ['V', 'H']:
self.FL_type = FL_type self.FL_type = FL_type
else: else:
logger.error(f"Unsupported FL type: {FL_type}") error_msg = f"Unsupported FL type: {FL_type}"
logger.error(error_msg)
raise RuntimeError(error_msg)
if role in ['host', 'guest', 'client', 'server']: if role in ['host', 'guest', 'client', 'server']:
self.role = role self.role = role
else: else:
logger.error(f"Unsupported role: {role}") error_msg = f"Unsupported role: {role}"
logger.error(error_msg)
raise RuntimeError(error_msg)
self.channel = channel self.channel = channel
......
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。