提交 c8d9bfeb 编写于 作者: weixin_46662084's avatar weixin_46662084

init v1.0 version

上级
# Linux
build:linux --cxxopt=-std=c++17
build:linux --host_cxxopt=-std=c++17
build:linux --copt=-w
build:linux --linkopt=-lstdc++fs
# Enable remote execution so actions are performed on the remote systems.
build:remote --remote_executor=grpc://118.190.39.100:27941
# windows
build:windows --cxxopt=/std:c++14
build:windows --host_cxxopt=/std:c++14
build:windows --copt=/w
# For using M_* math constants on Windows with MSVC.
build:windows --copt=/D_USE_MATH_DEFINES
build:windows --host_copt=/D_USE_MATH_DEFINES
# macOS
build:macos --cxxopt=-std=c++17
build:macos --host_cxxopt=-std=c++17
build:macos --copt=-w
build:macos --copt=-force_flat_namespace
# Sets the default Apple platform to macOS.
build --apple_platform_type=macos
# Allow debugging with XCODE
build --apple_generate_dsym
# MacOS configs.
build:darwin_x86_64 --apple_platform_type=macos
build:darwin_x86_64 --macos_minimum_os=10.16
build:darwin_x86_64 --cpu=darwin_x86_64
# MacOS Big Sur with Apple Silicon M1
build:darwin_arm64 --apple_platform_type=macos
build:darwin_arm64 --macos_minimum_os=10.16
build:darwin_arm64 --cpu=darwin_arm64
# MacOS Monterey with Apple M1
build:darwin --apple_platform_type=macos
build:darwin --macos_minimum_os=12.3.1
build:darwin --cpu=darwin
*.DS_Store
*.csv
.idea/
*cbp
*.aux
*.bbl
*.blg
*.log
*.out
*.toc
.*.swp
*.synctex
*.pdf
*.dvi
*.a
*.exe
*.rdb
*.0
.vscode/
data/
/lib/
/bin/
*/CMakeFiles/
/CMakeFiles/
*.cmake
CMakeCache.txt
out/
build/
bazel-bin
bazel-out
bazel-primihub
bazel-testlogs
log/
*.pptx
*.xlsx
*.sty
*.gz
*.dtx
x64/*
.vs/*
*/x64/*
*/.vs/*
*/*.user
log.txt
*/log.txt
*.bin
*.o
*.psess
*.vspx
frontend/My Amplifier Results - frontend/
frontend/My Advisor Results - frontend/
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# IDEA
.idea
# macOS cache
.DS_Store
# Fedlearner related
config.mk
*.tfrecord
*.o
*.pb.cc
*.pb.h
*.pyc
*_pb2.py
*_pb2_grpc.py
.*.swp
.*.swo
server.config.js
es.match_phrase.js
es.match_phrase.json
web_console/config
example/**/data
example/**/model
example/**/exp
output
!test/channel/greeter_pb2*.py
# python
*.egg-info/
*.pyc
\ No newline at end of file
此差异已折叠。
# Copyright 2022 Primihub
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
FROM ubuntu:18.04 as builder
ENV LANG C.UTF-8
ENV DEBIAN_FRONTEND=noninteractive
# install dependencies
RUN apt-get update \
&& apt-get install -y gcc-8 automake ca-certificates git g++-8 libtool m4 patch pkg-config python3 python3-dev python-dev unzip make wget curl zip ninja-build \
&& update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-8 800 --slave /usr/bin/g++ g++ /usr/bin/g++-8
# install npm
RUN apt-get install -y npm
# install cmake
RUN wget https://primihub.oss-cn-beijing.aliyuncs.com/cmake-3.20.2-linux-x86_64.tar.gz \
&& tar -zxf cmake-3.20.2-linux-x86_64.tar.gz \
&& chmod +x cmake-3.20.2-linux-x86_64/bin/cmake \
&& ln -s `pwd`/cmake-3.20.2-linux-x86_64/bin/cmake /usr/bin/cmake \
&& rm -rf /var/lib/apt/lists/* cmake-3.20.2-linux-x86_64.tar.gz
# install bazelisk
RUN npm install -g @bazel/bazelisk
WORKDIR /src
ADD . /src
# generate test data files
RUN cd test/primihub/script/ \
&& /bin/bash gen_logistic_data.sh
# Update pybind11 link options in BUILD.bazel
RUN CONFIG=`python3-config --ldflags` \
&& NEWLINE="\ \ \ \ linkopts = LINK_OPTS + [\"${CONFIG}\"]," \
&& sed -i "354c ${NEWLINE}" BUILD.bazel
# Bazel build primihub-node & primihub-cli
RUN bazel build --config=linux :node :cli
FROM ubuntu:18.04 as runner
RUN apt-get update \
&& apt-get install -y python3 python3-dev \
&& rm -rf /var/lib/apt/lists/*
ARG TARGET_PATH=/root/.cache/bazel/_bazel_root/f8087e59fd95af1ae29e8fcb7ff1a3dc/execroot/__main__/bazel-out/k8-fastbuild/bin
WORKDIR $TARGET_PATH
# Copy binaries to TARGET_PATH
COPY --from=builder $TARGET_PATH ./
# Copy test data files to /tmp/
COPY --from=builder /tmp/ /tmp/
# Make symlink to primihub-node & primihub-cli
RUN mkdir /app && ln -s $TARGET_PATH/node /app/primihub-node && ln -s $TARGET_PATH/cli /app/primihub-cli
# Change WorkDir to /app
WORKDIR /app
# Copy all test config files to /app
COPY --from=builder /src/config ./
# gRPC server port
EXPOSE 50050
# Cryptool port
EXPOSE 12120
EXPOSE 12121
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
\ No newline at end of file
# Primihub
## Feature
Primihub is a platform that supports Multi-Party Computing(MPC), Federated Learning, Privacy Intersection (PSI), and Privacy Query (PIR) features, and supports extensions of data source access, data consumption, access application, syntax, semantic and security protocols. For details, see Primihub [Core Feature](http://docs.primihub.com/docs/category/%E6%A0%B8%E5%BF%83%E7%89%B9%E6%80%A7)
## Quick start
Run an Multi-Party Computing application in 5 minutes
Install [docker](https://docs.docker.com/install/overview/) and [docker-compose](https://docs.docker.com/compose/install/)
Download the code and switch to the code root path
```
$ git clone https://github.com/primihub/primihub.git
$ cd primihub
```
## Run an MPC case
![Depolyment](doc/tutorial-depolyment.jpg)
### Start the test nodes
Start three docker containers using docker-compose.
The container includes: one simple bootstrap node, three nodes
```bash
$ docker-compose up
```
Check out the running docker container
```bash
$ docker ps
```
```
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
cf875c1280be primihub-node:1.0.3 "/bin/bash -c './pri…" 11 minutes ago Up 11 minutes 0.0.0.0:12120-12121->12120-12121/tcp, 0.0.0.0:8052->50050/tcp node2_primihub
6a822ff5c6f7 primihub-node:1.0.3 "/bin/bash -c './pri…" 11 minutes ago Up 11 minutes 0.0.0.0:10120->12120/tcp, 0.0.0.0:10121->12121/tcp, 0.0.0.0:8050->50050/tcp node0_primihub
11d55ce06ff0 primihub-node:1.0.3 "/bin/bash -c './pri…" 11 minutes ago Up 11 minutes 0.0.0.0:11120->12120/tcp, 0.0.0.0:11121->12121/tcp, 0.0.0.0:8051->50050/tcp node1_primihub
68befa6ab2a5 primihub/simple-bootstrap-node:1.0 "/app/simple-bootstr…" 11 minutes ago Up 11 minutes 0.0.0.0:4001->4001/tcp simple_bootstrap_node
```
### Create an MPC task
*** Let three nodes jointly perform a logistic regression task of multi-party secure computation (MPC) ***
```bash
$ docker run -it primhub/primihub-node "primihub-cli --server=127.0.0.1:8050"
```
> 💡 The node response the task
>
> You can request computing tasks from any node in the computing cluster
>
> 💡 Available task parameters
>
> The following parameters can be specified through primihub-cli:
> 1. Which node is requested to start the task.
> 2. Which shared datasets are used.
> 3. What kind of private computing tasks to do.
In this example, primihub-cli will use the default parameters to request an ABY3 tripartite logistic regression test task from *** node 0 ***. For the parameters that can be specified by cli, please refer to *** [Create task](http://docs.primihub.com/docs/advance-usage/create-tasks) ***
## Advanced use
To learn how to start from native applications and how to use Primihub features to implement more applications, see [Advanced Usage](http://docs.primihub.com/docs/category/%E8%BF%9B%E9%98%B6%E4%BD%BF%E7%94%A8)
## Developer
* For how to build, see [Build](http://docs.primihub.com/docs/developer-docs/build)
* For how to contribute code, see [Primihub Open Source Community Governance](http://docs.primihub.com/docs/primihub-community)
# Primihub
## 特性
Primihub是一个支持多方计算、联邦学习、隐私求交(PSI)、隐私查询(PIR)特性的平台,支持数据源接入、数据消费、接入应用、语法、语义、安全协议多方面的扩展。 具体请见 Primihub [核心特性](http://docs.primihub.com/docs/category/%E6%A0%B8%E5%BF%83%E7%89%B9%E6%80%A7)
## 快速开始
5分钟运行起来一个MPC应用
安装[docker](https://docs.docker.com/install/overview/)[docker-compose](https://docs.docker.com/compose/install/)
下载代码并切换到代码根路径
```
$ git clone https://github.com/primihub/primihub.git
$ cd primihub
```
## 运行一个MPC案例
![Depolyment](doc/tutorial-depolyment.jpg)
### 启动节点
*** 启动测试用的节点 ***
使用docker-compose 启动三个docker容器。
容器包括:启动点、三个节点
```bash
$ docker-compose up
```
查看运行起来的docker容器:
```bash
$ docker ps
```
```
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
cf875c1280be primihub-node:1.0.3 "/bin/bash -c './pri…" 11 minutes ago Up 11 minutes 0.0.0.0:12120-12121->12120-12121/tcp, 0.0.0.0:8052->50050/tcp node2_primihub
6a822ff5c6f7 primihub-node:1.0.3 "/bin/bash -c './pri…" 11 minutes ago Up 11 minutes 0.0.0.0:10120->12120/tcp, 0.0.0.0:10121->12121/tcp, 0.0.0.0:8050->50050/tcp node0_primihub
11d55ce06ff0 primihub-node:1.0.3 "/bin/bash -c './pri…" 11 minutes ago Up 11 minutes 0.0.0.0:11120->12120/tcp, 0.0.0.0:11121->12121/tcp, 0.0.0.0:8051->50050/tcp node1_primihub
68befa6ab2a5 primihub/simple-bootstrap-node:1.0 "/app/simple-bootstr…" 11 minutes ago Up 11 minutes 0.0.0.0:4001->4001/tcp simple_bootstrap_node
```
### 创建一个MPC任务
*** 让三个节点共同执行一个多方安全计算(MPC)的逻辑回归任务 ***
```bash
$ docker run -it primhub/primihub-node "primihub-cli --server=127.0.0.1:8050"
```
> 💡 请求任务的节点
>
> 你可以向计算集群中任意一个节点请求计算任务
>
> 💡 可用的任务参数
>
> 通过primihub-cli可以指定以下参数
> 1. 请求哪个节点启动任务
> 2. 使用哪些共享数据集
> 3. 做什么样的隐私计算任务
在这个例子中primihub-cli会使用默认参数向 *** node 0 *** 请求一个ABY3的三方逻辑回归测试任务,关于cli可以指定的参数请见 *** [创建任务](http://docs.primihub.com/docs/advance-usage/create-tasks) ***
## 进阶使用
想了解如何从原生应用启动以及关于如何利用Primihub的特性,实现更多应用,见 [进阶使用](http://docs.primihub.com/docs/category/%E8%BF%9B%E9%98%B6%E4%BD%BF%E7%94%A8)
## 开发者
* 关于如何编译,请见[编译](http://docs.primihub.com/docs/developer-docs/build)
* 关于如何贡献代码,请见 [Primihub开源社区治理](http://docs.primihub.com/docs/primihub-community)
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository", "new_git_repository")
all_content = """filegroup(name = "all", srcs = glob(["**"]), visibility = ["//visibility:public"])"""
http_archive(
name = "rules_foreign_cc",
sha256 = "33a5690733c5cc2ede39cb62ebf89e751f2448e27f20c8b2fbbc7d136b166804",
strip_prefix = "rules_foreign_cc-0.5.1",
urls = [
"https://github.com/bazelbuild/rules_foreign_cc/archive/refs/tags/0.5.1.tar.gz",
"https://primihub.oss-cn-beijing.aliyuncs.com/rules_foreign_cc-0.5.1.tar.gz"
]
)
load("@rules_foreign_cc//foreign_cc:repositories.bzl", "rules_foreign_cc_dependencies")
rules_foreign_cc_dependencies()
#openssl-1.1.1L , need by libp2p, must be before boost_deps()
load("//bazel:repos.bzl", openssl_repos="repos")
openssl_repos()
load("//bazel:deps.bzl", openssl_deps="deps")
openssl_deps()
git_repository(
name = "com_github_nelhage_rules_boost",
# commit = "1e3a69bf2d5cd10c34b74f066054cd335d033d71",
branch = "master",
remote = "https://github.com/primihub/rules_boost.git",
# shallow_since = "1591047380 -0700",
)
load("@com_github_nelhage_rules_boost//:boost/boost.bzl", "boost_deps")
boost_deps()
new_git_repository(
name = "toolkit_relic",
build_file = "//bazel:BUILD.relic",
remote = "https://github.com/relic-toolkit/relic.git",
commit = "3f616ad64c3e63039277b8c90915607b6a2c504c",
shallow_since = "1581106153 -0800",
)
new_git_repository(
name = "eigen",
build_file = "//bazel:BUILD.eigen",
remote = "https://gitlab.com/libeigen/eigen.git",
commit = "3dc3a0ea2d0773af4c0ffd7bbcb21c608e28fcef",
shallow_since = "1497510620 +0200",
)
new_git_repository(
name = "lib_function2",
build_file = "//bazel:BUILD.function2",
remote = "https://github.com/Naios/function2.git",
commit = "b8cf935d096a87a645534e5c1015ee80960fe4de",
shallow_since = "1616573746 +0100",
)
new_git_repository(
name = "arrow",
build_file = "//bazel:BUILD.arrow",
patch_cmds = [
# TODO: Remove the fowllowing once arrow issue is resolved.
"""sed -i.bak 's/type_traits/std::max<int16_t>(sizeof(int16_t), type_traits/g' cpp/src/parquet/column_reader.cc""",
"""sed -i.bak 's/value_byte_size/value_byte_size)/g' cpp/src/parquet/column_reader.cc""",
],
branch = "release-4.0.0",
remote = "https://github.com/primihub/arrow.git",
)
# grpc with openssl
git_repository(
name = "com_github_grpc_grpc",
remote = "https://github.com/primihub/grpc.git",
commit = "b115c27f91c7ecee03838ab625032a50b9434678",
)
load("@com_github_grpc_grpc//bazel:grpc_deps.bzl", "grpc_deps")
# Includes boringssl, and other dependencies.
grpc_deps()
load("@com_github_grpc_grpc//bazel:grpc_extra_deps.bzl", "grpc_extra_deps")
# Loads transitive dependencies of GRPC.
grpc_extra_deps()
http_archive(
name = "com_github_glog_glog",
# sha256 = "cbba86b5a63063999e0fc86de620a3ad22d6fd2aa5948bff4995dcd851074a0b",
strip_prefix = "glog-c8f8135a5720aee7de8328b42e4c43f8aa2e60aa",
urls = ["https://github.com/google/glog/archive/c8f8135a5720aee7de8328b42e4c43f8aa2e60aa.zip"],
)
http_archive(
name = "com_github_google_flatbuffers",
url = "https://github.com/google/flatbuffers/archive/refs/tags/v2.0.0.tar.gz",
strip_prefix = "flatbuffers-2.0.0",
sha256 = "9ddb9031798f4f8754d00fca2f1a68ecf9d0f83dfac7239af1311e4fd9a565c4",
)
# gflags Needed for glog
http_archive(
name = "com_github_gflags_gflags",
sha256 = "34af2f15cf7367513b352bdcd2493ab14ce43692d2dcd9dfc499492966c64dcf",
strip_prefix = "gflags-2.2.2",
urls = [
"https://github.com/gflags/gflags/archive/v2.2.2.tar.gz",
],
)
# Abseil C++ libraries
git_repository(
name = "com_google_absl",
remote = "https://github.com/abseil/abseil-cpp.git",
commit = "0f3bb466b868b523cf1dc9b2aaaed65c77b28862",
shallow_since = "1603283562 -0400",
)
# googletest
http_archive(
name = "com_google_googletest",
urls = ["https://github.com/google/googletest/archive/refs/tags/release-1.10.0.zip"],
sha256 = "94c634d499558a76fa649edb13721dce6e98fb1e7018dfaeba3cd7a083945e91",
strip_prefix = "googletest-release-1.10.0",
)
http_archive(
name = "bazel_common",
url = "https://github.com/google/bazel-common/archive/refs/heads/master.zip",
strip_prefix = "bazel-common-master",
sha256 = "b7a8e1a4ad843df69c9714377f023276cd15c3b706a46b6e5a1dc7e101fec419",
)
http_archive(
name = "bazel_skylib",
strip_prefix = None,
url = "https://github.com/bazelbuild/bazel-skylib/releases/download/1.0.2/bazel-skylib-1.0.2.tar.gz",
sha256 = "97e70364e9249702246c0e9444bccdc4b847bed1eb03c5a3ece4f83dfe6abc44",
)
# json
http_archive(
name = "nlohmann_json",
strip_prefix = "json-3.9.1",
urls = ["https://github.com/nlohmann/json/archive/v3.9.1.tar.gz"],
sha256 = "4cf0df69731494668bdd6460ed8cb269b68de9c19ad8c27abc24cd72605b2d5b",
build_file = "//bazel:BUILD.nlohmann_json",
)
# pybind11 , bazel ref:https://github.com/pybind/pybind11_bazel
# _PYBIND11_COMMIT = "72cbbf1fbc830e487e3012862b7b720001b70672"
http_archive(
name = "pybind11_bazel",
strip_prefix = "pybind11_bazel-72cbbf1fbc830e487e3012862b7b720001b70672",
urls = [
"https://github.com/pybind/pybind11_bazel/archive/72cbbf1fbc830e487e3012862b7b720001b70672.zip",
"https://primihub.oss-cn-beijing.aliyuncs.com/pybind11_bazel-72cbbf1fbc830e487e3012862b7b720001b70672.zip"
],
)
# We still require the pybind library.
http_archive(
name = "pybind11",
build_file = "@pybind11_bazel//:pybind11.BUILD",
strip_prefix = "pybind11-2.9.2",
urls = ["https://github.com/pybind/pybind11/archive/refs/tags/v2.9.2.tar.gz"],
)
load("@pybind11_bazel//:python_configure.bzl", "python_configure")
python_configure(name = "local_config_python")
# ======== arrow dependencies start ========
http_archive(
name = "brotli",
build_file = "//bazel:brotli.BUILD",
sha256 = "4c61bfb0faca87219ea587326c467b95acb25555b53d1a421ffa3c8a9296ee2c",
strip_prefix = "brotli-1.0.7",
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/google/brotli/archive/v1.0.7.tar.gz",
"https://github.com/google/brotli/archive/refs/tags/v1.0.7.tar.gz",
"https://primihub.oss-cn-beijing.aliyuncs.com/brotli-1.0.7.tar.gz"
],
)
http_archive(
name = "bzip2",
build_file = "//bazel:bzip2.BUILD",
sha256 = "ab5a03176ee106d3f0fa90e381da478ddae405918153cca248e682cd0c4a2269",
strip_prefix = "bzip2-1.0.8",
urls = [
"https://sourceware.org/pub/bzip2/bzip2-1.0.8.tar.gz",
"https://fossies.org/linux/misc/bzip2-1.0.8.tar.gz",
"https://primihub.oss-cn-beijing.aliyuncs.com/bzip2-1.0.8.tar.gz"
],
)
http_archive(
name = "double-conversion",
sha256 = "a63ecb93182134ba4293fd5f22d6e08ca417caafa244afaa751cbfddf6415b13",
strip_prefix = "double-conversion-3.1.5",
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/google/double-conversion/archive/v3.1.5.tar.gz",
"https://github.com/google/double-conversion/archive/v3.1.5.tar.gz",
"https://primihub.oss-cn-beijing.aliyuncs.com/double-conversion-3.1.5.tar.gz",
],
)
http_archive(
name = "lz4",
build_file = "//bazel:lz4.BUILD",
patch_cmds = [
"""sed -i.bak 's/__attribute__ ((__visibility__ ("default")))//g' lib/lz4frame.h """,
],
sha256 = "658ba6191fa44c92280d4aa2c271b0f4fbc0e34d249578dd05e50e76d0e5efcc",
strip_prefix = "lz4-1.9.2",
urls = [
# "https://storage.googleapis.com/mirror.tensorflow.org/github.com/lz4/lz4/archive/v1.9.2.tar.gz",
"https://github.com/lz4/lz4/archive/v1.9.2.tar.gz",
"https://primihub.oss-cn-beijing.aliyuncs.com/lz4-1.9.2.tar.gz"
],
)
http_archive(
name = "rapidjson",
build_file = "//bazel:rapidjson.BUILD",
sha256 = "30bd2c428216e50400d493b38ca33a25efb1dd65f79dfc614ab0c957a3ac2c28",
strip_prefix = "rapidjson-418331e99f859f00bdc8306f69eba67e8693c55e",
urls = [
"https://github.com/miloyip/rapidjson/archive/418331e99f859f00bdc8306f69eba67e8693c55e.tar.gz",
"https://primihub.oss-cn-beijing.aliyuncs.com/rapidjson-418331e99f859f00bdc8306f69eba67e8693c55e.tar.gz"
],
)
# Note: snappy is placed earlier as tensorflow's snappy does not include snappy-c
http_archive(
name = "snappy",
build_file = "//bazel:snappy.BUILD",
sha256 = "16b677f07832a612b0836178db7f374e414f94657c138e6993cbfc5dcc58651f",
strip_prefix = "snappy-1.1.8",
urls = [
"https://github.com/google/snappy/archive/1.1.8.tar.gz",
"https://primihub.oss-cn-beijing.aliyuncs.com/snappy-1.1.8.tar.gz"
],
)
http_archive(
name = "thrift",
build_file = "//bazel:thrift.BUILD",
sha256 = "5da60088e60984f4f0801deeea628d193c33cec621e78c8a43a5d8c4055f7ad9",
strip_prefix = "thrift-0.13.0",
urls = [
"https://github.com/apache/thrift/archive/v0.13.0.tar.gz",
"https://primihub.oss-cn-beijing.aliyuncs.com/thrift-0.13.0.tar.gz"
],
)
http_archive(
name = "xsimd",
build_file = "//bazel:xsimd.BUILD",
sha256 = "45337317c7f238fe0d64bb5d5418d264a427efc53400ddf8e6a964b6bcb31ce9",
strip_prefix = "xsimd-7.5.0",
urls = [
"https://github.com/xtensor-stack/xsimd/archive/refs/tags/7.5.0.tar.gz",
],
)
http_archive(
name = "zlib",
build_file = "//bazel:zlib.BUILD",
sha256 = "c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1",
strip_prefix = "zlib-1.2.11",
urls = [
"https://zlib.net/zlib-1.2.11.tar.gz",
"https://primihub.oss-cn-beijing.aliyuncs.com/zlib-1.2.11.tar.gz"
],
)
http_archive(
name = "zstd",
build_file = "//bazel:zstd.BUILD",
sha256 = "a364f5162c7d1a455cc915e8e3cf5f4bd8b75d09bc0f53965b0c9ca1383c52c8",
strip_prefix = "zstd-1.4.4",
urls = [
"https://github.com/facebook/zstd/archive/v1.4.4.tar.gz",
"https://primihub.oss-cn-beijing.aliyuncs.com/zstd-1.4.4.tar.gz"
],
)
http_archive(
name = "xz",
build_file = "//bazel:xz.BUILD",
sha256 = "0d2b89629f13dd1a0602810529327195eff5f62a0142ccd65b903bc16a4ac78a",
strip_prefix = "xz-5.2.5",
urls = [
"https://github.com/xz-mirror/xz/archive/v5.2.5.tar.gz",
"https://primihub.oss-cn-beijing.aliyuncs.com/xz-5.2.5.tar.gz"
],
)
http_archive(
name = "io_opentelemetry_cpp",
# sha256 = "<sha256>",
strip_prefix = "opentelemetry-cpp-1.0.1",
urls = [
"https://github.com/open-telemetry/opentelemetry-cpp/archive/refs/tags/v1.0.1.tar.gz",
"https://primihub.oss-cn-beijing.aliyuncs.com/opentelemetry-cpp-1.0.1.tar.gz"
],
)
# Load OpenTelemetry dependencies after load.
load("@io_opentelemetry_cpp//bazel:repository.bzl", "opentelemetry_cpp_deps")
opentelemetry_cpp_deps()
http_archive(
name = "com_google_protobuf",
# sha256 = "<sha256>",
# strip_prefix = "opentelemetry-cpp-1.0.1",
urls = [
"https://github.com/protocolbuffers/protobuf/archive/refs/tags/v3.20.0.tar.gz",
"https://primihub.oss-cn-beijing.aliyuncs.com/protobuf-3.20.0.tar.gz"
],
)
git_repository(
name = "cares-bazel",
branch = "master",
remote = "https://github.com/hobo0cn/cares-bazel.git",
patch_cmds = [
"git submodule update --init --recursive",
],
)
# fmt bazle, ref: https://fossies.org/linux/fmt/support/bazel/README.md
load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository")
new_git_repository(
name = "fmt",
build_file = "//bazel:fmt.BUILD",
remote = "https://github.com/fmtlib/fmt.git",
tag = "6.1.2",
)
new_git_repository(
name = "libp2p",
build_file = "//bazel:libp2p.BUILD",
remote = "https://github.com/primihub/cpp-libp2p.git",
branch="master",
)
# soralog , need by libp2p
# TODO need change to glog
new_git_repository(
name = "com_openmpc_soralog",
build_file = "//bazel:soralog.BUILD",
remote = "https://github.com/primihub/soralog.git",
branch="master",
)
# sqlite, need by libp2p
http_archive(
name = "com_github_soramitsu_sqlite",
build_file = "//bazel:sqlite.BUILD",
strip_prefix = "libp2p-sqlite-modern-cpp-3.2",
urls = [
"https://github.com/soramitsu/libp2p-sqlite-modern-cpp/archive/refs/tags/v3.2.tar.gz",
"https://primihub.oss-cn-beijing.aliyuncs.com/libp2p-sqlite-modern-cpp-3.2.tar.gz"],
)
#yaml-cpp, need by libp2p
git_repository(
name = "com_github_jbeder_yaml_cpp",
remote = "https://github.com/jbeder/yaml-cpp.git",
tag="yaml-cpp-0.7.0",
)
# hat_trie , need by libp2p
new_git_repository(
name = "com_github_masterjedy_hat_hrie",
build_file = "//bazel:hat_trie.BUILD",
remote = "https://github.com/masterjedy/hat-trie.git",
branch="master",
)
# boost di, used by libp2p
http_archive(
name = "com_github_masterjedy_di",
build_file = "//bazel:di.BUILD",
strip_prefix = "di-1.1.0",
urls = [
"https://github.com/boost-ext/di/archive/refs/tags/v1.1.0.tar.gz",
"https://primihub.oss-cn-beijing.aliyuncs.com/di-1.1.0.tar.gz"
],
)
#PSI
load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository")
git_repository(
name = "org_openmined_psi",
remote = "https://github.com/primihub/PSI.git",
branch = "master",
init_submodules = True,
)
load("@org_openmined_psi//private_set_intersection:preload.bzl", "psi_preload")
psi_preload()
load("@org_openmined_psi//private_set_intersection:deps.bzl", "psi_deps")
psi_deps()
load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository")
git_repository(
name = "org_openmined_pir",
remote = "https://github.com/primihub/PIR.git",
branch = "master",
init_submodules = True,
)
load("@org_openmined_pir//pir:preload.bzl", "pir_preload")
pir_preload()
load("@org_openmined_pir//pir:deps.bzl", "pir_deps")
pir_deps()
# Description:
# Apache Arrow library
package(default_visibility = ["//visibility:public"])
licenses(["notice"]) # Apache 2.0
exports_files(["LICENSE.txt"])
load("@com_github_google_flatbuffers//:build_defs.bzl", "flatbuffer_cc_library")
flatbuffer_cc_library(
name = "arrow_format",
srcs = [
"cpp/src/arrow/ipc/feather.fbs",
"format/File.fbs",
"format/Message.fbs",
"format/Schema.fbs",
"format/SparseTensor.fbs",
"format/Tensor.fbs",
],
flatc_args = [
"--scoped-enums",
"--gen-object-api",
],
out_prefix = "cpp/src/generated/",
)
genrule(
name = "arrow_util_config",
srcs = ["cpp/src/arrow/util/config.h.cmake"],
outs = ["cpp/src/arrow/util/config.h"],
cmd = ("sed " +
"-e 's/@ARROW_VERSION_MAJOR@/3/g' " +
"-e 's/@ARROW_VERSION_MINOR@/0/g' " +
"-e 's/@ARROW_VERSION_PATCH@/0/g' " +
"-e 's/cmakedefine ARROW_USE_NATIVE_INT128/undef ARROW_USE_NATIVE_INT128/g' " +
"-e 's/cmakedefine/define/g' " +
"$< >$@"),
)
genrule(
name = "parquet_version_h",
srcs = ["cpp/src/parquet/parquet_version.h.in"],
outs = ["cpp/src/parquet/parquet_version.h"],
cmd = ("sed " +
"-e 's/@PARQUET_VERSION_MAJOR@/1/g' " +
"-e 's/@PARQUET_VERSION_MINOR@/5/g' " +
"-e 's/@PARQUET_VERSION_PATCH@/1/g' " +
"$< >$@"),
)
cc_library(
name = "arrow",
srcs = glob(
[
"cpp/src/arrow/*.cc",
"cpp/src/arrow/array/*.cc",
"cpp/src/arrow/csv/*.cc",
"cpp/src/arrow/io/*.cc",
"cpp/src/arrow/ipc/*.cc",
"cpp/src/arrow/json/*.cc",
"cpp/src/arrow/tensor/*.cc",
"cpp/src/arrow/util/*.cc",
"cpp/src/arrow/filesystem/util_internal.cc",
"cpp/src/arrow/filesystem/localfs.cc",
"cpp/src/arrow/filesystem/filesystem.cc",
"cpp/src/arrow/filesystem/mockfs.cc",
"cpp/src/arrow/filesystem/path_util.cc",
"cpp/src/arrow/compute/cast.cc",
"cpp/src/arrow/compute/exec.cc",
"cpp/src/arrow/compute/kernel.cc",
"cpp/src/arrow/compute/function.cc",
"cpp/src/arrow/compute/registry.cc",
"cpp/src/arrow/compute/kernels/scalar_*.cc",
"cpp/src/arrow/compute/api_*.cc",
"cpp/src/arrow/compute/kernels/codegen_*.cc",
"cpp/src/arrow/compute/kernels/vector_*.cc",
"cpp/src/arrow/compute/kernels/util_*.cc",
"cpp/src/arrow/compute/kernels/aggregate_*.cc",
"cpp/src/arrow/compute/kernels/hash_*.cc",
"cpp/src/arrow/vendored/musl/strptime.c",
"cpp/src/arrow/vendored/optional.hpp",
"cpp/src/arrow/vendored/base64.cpp",
"cpp/src/arrow/vendored/base64.h",
"cpp/src/arrow/vendored/string_view.hpp",
"cpp/src/arrow/vendored/variant.hpp",
"cpp/src/arrow/vendored/uriparser/*.c",
"cpp/src/arrow/**/*.h",
"cpp/src/parquet/**/*.h",
"cpp/src/parquet/**/*.cc",
"cpp/src/generated/*.h",
"cpp/src/generated/*.cpp",
],
exclude = [
"cpp/src/**/*_benchmark.cc",
"cpp/src/**/*_main.cc",
"cpp/src/**/*_nossl.cc",
"cpp/src/**/*_test.cc",
"cpp/src/**/test_*.cc",
"cpp/src/**/*hdfs*.cc",
"cpp/src/**/*fuzz*.cc",
"cpp/src/**/file_to_stream.cc",
"cpp/src/**/stream_to_file.cc",
"cpp/src/arrow/util/bpacking_avx2.cc",
"cpp/src/arrow/util/bpacking_avx512.cc",
],
),
hdrs = [
# declare header from above genrule
"cpp/src/arrow/util/config.h",
"cpp/src/parquet/parquet_version.h",
"cpp/src/arrow/filesystem/localfs.h",
"cpp/src/arrow/filesystem/filesystem.h",
],
copts = [],
defines = [
"ARROW_WITH_BROTLI",
"ARROW_WITH_SNAPPY",
"ARROW_WITH_LZ4",
"ARROW_WITH_ZLIB",
"ARROW_WITH_ZSTD",
"ARROW_WITH_BZ2",
"ARROW_STATIC",
"ARROW_EXPORT=",
"PARQUET_STATIC",
"PARQUET_EXPORT=",
"WIN32_LEAN_AND_MEAN",
],
includes = [
"cpp/src",
"cpp/src/arrow/vendored/xxhash",
],
textual_hdrs = [
"cpp/src/arrow/vendored/xxhash/xxhash.c",
],
deps = [
":arrow_format",
"@openssl",
"@boost//:multiprecision",
"@boost//:random",
"@boost//:asio",
"@boost//:filesystem",
"@boost//:program_options",
"@brotli",
"@bzip2",
"@double-conversion",
"@lz4",
"@rapidjson",
"@snappy",
"@thrift",
"@xsimd",
"@zlib",
"@zstd",
"@io_opentelemetry_cpp//api",
"@io_opentelemetry_cpp//exporters/ostream:ostream_span_exporter",
# "@io_opentelemetry_cpp//exporters/otlp:otlp_http_exporter",
"@io_opentelemetry_cpp//sdk/src/trace",
# "@liborc//:liborc",
# "@com_google_protobuf//:protobuf",
],
)
licenses([
# Note: Eigen is an MPL2 library that includes GPL v3 and LGPL v2.1+ code.
# We've taken special care to not reference any restricted code.
"reciprocal", # MPL2
"notice", # Portions BSD
])
exports_files(["COPYING.MPL2"])
EIGEN_FILES = [
"Eigen/**",
"unsupported/Eigen/CXX11/**",
"unsupported/Eigen/FFT",
"unsupported/Eigen/KroneckerProduct",
"unsupported/Eigen/src/FFT/**",
"unsupported/Eigen/src/KroneckerProduct/**",
"unsupported/Eigen/MatrixFunctions",
"unsupported/Eigen/SpecialFunctions",
"unsupported/Eigen/src/MatrixFunctions/**",
"unsupported/Eigen/src/SpecialFunctions/**",
]
# List of files picked up by glob but actually part of another target.
EIGEN_EXCLUDE_FILES = [
"Eigen/src/Core/arch/AVX/PacketMathGoogleTest.cc",
]
EIGEN_MPL2_HEADER_FILES = glob(
EIGEN_FILES,
exclude = EIGEN_EXCLUDE_FILES,
)
cc_library(
name = "eigen",
hdrs = EIGEN_MPL2_HEADER_FILES,
includes = ["."],
visibility = ["//visibility:public"],
)
package(default_visibility = ["//visibility:public"])
cc_library(
name = "function2",
hdrs = ["function2/function2.hpp"],
includes = ["function2/function2.hpp"],
visibility = ["//visibility:public"],
)
genrule(
name = "function2-build",
srcs = glob(["**"]),
outs = [
"function2/function2.hpp",
],
cmd = """
set -x
FUNCTION2_ROOT=$$(dirname $(location CMakeLists.txt))
cp $$FUNCTION2_ROOT/include/function2/function2.hpp $(location function2/function2.hpp)
""",
visibility = ["//visibility:public"],
)
COPTS = [] + select({
"@bazel_tools//src/conditions:windows": [
"-D_CRT_DECLARE_NONSTDC_NAMES=0", # don't define off_t, to avoid conflicts
],
"//conditions:default": [
],
}) + select({
"@//:msvc-cl": [
],
"//conditions:default": [
# Old versions of GCC (e.g. 4.9.2) can fail to compile Redis's C without this.
"-std=c99",
],
})
# This library is for internal hiredis use, because hiredis assumes a
# different include prefix for itself than external libraries do.
cc_library(
name = "_hiredis",
hdrs = [
"dict.c",
],
copts = COPTS,
)
cc_library(
name = "hiredis",
srcs = glob(
[
"*.c",
"*.h",
],
exclude =
[
"ssl.c",
"test.c",
],
),
hdrs = glob([
"*.h",
"adapters/*.h",
]),
copts = COPTS,
include_prefix = "hiredis",
deps = [
":_hiredis",
],
visibility = ["//visibility:public"],
)
cc_library(
name = "nlohmann_json",
hdrs = glob([
"include/**/*.hpp",
]),
includes = ["include"],
visibility = ["//visibility:public"],
alwayslink = 1,
)
# TODO (chenhongbo) use .bazelrc platform specific settings
config_setting(
name = "darwin",
values = {
"cpu": "darwin_x86_64",
},
)
cc_library(
name = "relic",
hdrs = glob(["include/*.h"]) + ["include/relic_conf.h"],
srcs = ["librelic_s.a"],
includes = ["include"],
linkopts = select({
":darwin": [],
"//conditions:default": ["-lpthread", "-ldl"],
}),
visibility = ["//visibility:public"],
)
genrule(
name = "relic-build",
srcs = glob(["**/*"],
exclude=["bazel-*"]),
outs = [
"librelic_s.a",
"include/relic_conf.h",
],
cmd = """
set -x
RELIC_ROOT=$$(dirname $(location CMakeLists.txt))
pushd $$RELIC_ROOT
cmake -DARCH=ARM -DWSIZE=32 .
make
popd
cp $$RELIC_ROOT/lib/librelic_s.a $(location librelic_s.a)
cp $$RELIC_ROOT/include/relic_conf.h $(location include/relic_conf.h)
""",
)
# Description:
# Brotli library
licenses(["notice"]) # MIT license
exports_files(["LICENSE"])
cc_library(
name = "brotli",
srcs = glob([
"c/common/*.c",
"c/common/*.h",
"c/dec/*.c",
"c/dec/*.h",
"c/enc/*.c",
"c/enc/*.h",
"c/include/brotli/*.h",
]),
hdrs = [],
defines = [],
includes = [
"c/dec",
"c/include",
],
linkopts = [],
visibility = ["//visibility:public"],
)
\ No newline at end of file
package(default_visibility = ["//visibility:public"])
licenses(["notice"]) # BSD-like license
cc_library(
name = "bzip2",
srcs = [
"blocksort.c",
"bzlib.c",
"bzlib_private.h",
"compress.c",
"crctable.c",
"decompress.c",
"huffman.c",
"randtable.c",
],
hdrs = [
"bzlib.h",
],
copts = [
],
includes = ["."],
)
\ No newline at end of file
# load("@rules_foreign_cc//foreign_cc:defs.bzl", "configure_make")
# filegroup(
# name = "all_srcs",
# srcs = glob(["**"]),
# visibility = ["//visibility:public"],
# )
# configure_make(
# name = "cares",
# autoconf=True,
# lib_source = ":all_srcs",
# )
cc_library(
name = "ares",
srcs = [
"cares/ares__close_sockets.c",
"cares/ares__get_hostent.c",
"cares/ares__read_line.c",
"cares/ares__timeval.c",
"cares/ares_cancel.c",
"cares/ares_create_query.c",
"cares/ares_data.c",
"cares/ares_destroy.c",
"cares/ares_expand_name.c",
"cares/ares_expand_string.c",
"cares/ares_fds.c",
"cares/ares_free_hostent.c",
"cares/ares_free_string.c",
"cares/ares_getenv.c",
"cares/ares_gethostbyaddr.c",
"cares/ares_gethostbyname.c",
"cares/ares_getnameinfo.c",
"cares/ares_getopt.c",
"cares/ares_getsock.c",
"cares/ares_init.c",
"cares/ares_library_init.c",
"cares/ares_llist.c",
"cares/ares_mkquery.c",
"cares/ares_nowarn.c",
"cares/ares_options.c",
"cares/ares_parse_a_reply.c",
"cares/ares_parse_aaaa_reply.c",
"cares/ares_parse_mx_reply.c",
"cares/ares_parse_naptr_reply.c",
"cares/ares_parse_ns_reply.c",
"cares/ares_parse_ptr_reply.c",
"cares/ares_parse_soa_reply.c",
"cares/ares_parse_srv_reply.c",
"cares/ares_parse_txt_reply.c",
"cares/ares_platform.c",
"cares/ares_process.c",
"cares/ares_query.c",
"cares/ares_search.c",
"cares/ares_send.c",
"cares/ares_strcasecmp.c",
"cares/ares_strdup.c",
"cares/ares_strerror.c",
"cares/ares_timeout.c",
"cares/ares_version.c",
"cares/ares_writev.c",
"cares/bitncmp.c",
"cares/inet_net_pton.c",
"cares/inet_ntop.c",
"cares/windows_port.c",
],
hdrs = [
"ares_build.h",
"cares/ares.h",
"cares/ares_data.h",
"cares/ares_dns.h",
"cares/ares_getenv.h",
"cares/ares_getopt.h",
"cares/ares_inet_net_pton.h",
"cares/ares_iphlpapi.h",
"cares/ares_ipv6.h",
"cares/ares_library_init.h",
"cares/ares_llist.h",
"cares/ares_nowarn.h",
"cares/ares_platform.h",
"cares/ares_private.h",
"cares/ares_rules.h",
"cares/ares_setup.h",
"cares/ares_strcasecmp.h",
"cares/ares_strdup.h",
"cares/ares_version.h",
"cares/bitncmp.h",
"cares/config-win32.h",
"cares/nameser.h",
"cares/setup_once.h",
] + select({
":darwin": ["config_darwin/ares_config.h"],
"//conditions:default": ["config_linux/ares_config.h"],
}),
includes = [
".",
"cares"
] + select({
":darwin": ["config_darwin"],
"//conditions:default": ["config_linux"],
}),
linkstatic = 1,
visibility = [
"//visibility:public",
],
copts = [
"-D_GNU_SOURCE",
"-D_HAS_EXCEPTIONS=0",
"-DNOMINMAX",
"-DHAVE_CONFIG_H",
],
)
\ No newline at end of file
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe")
load("@rules_foreign_cc//foreign_cc:repositories.bzl", "rules_foreign_cc_dependencies")
def deps(repo_mapping = {}):
rules_foreign_cc_dependencies()
maybe(
http_archive,
name = "openssl",
build_file = "@com_github_3rdparty_bazel_rules_openssl//:BUILD.openssl.bazel",
# sha256 = "892a0875b9872acd04a9fde79b1f943075d5ea162415de3047c327df33fbaee5",
strip_prefix = "openssl-1.1.1l",
urls = [
"https://www.openssl.org/source/openssl-1.1.1l.tar.gz",
# "https://github.com/openssl/openssl/archive/OpenSSL_1_1_1l.tar.gz",
],
repo_mapping = repo_mapping,
)
maybe(
http_archive,
name = "nasm",
build_file = "@com_github_3rdparty_bazel_rules_openssl//:BUILD.nasm.bazel",
sha256 = "f5c93c146f52b4f1664fa3ce6579f961a910e869ab0dae431bd871bdd2584ef2",
strip_prefix = "nasm-2.15.05",
urls = [
"https://mirror.bazel.build/www.nasm.us/pub/nasm/releasebuilds/2.15.05/win64/nasm-2.15.05-win64.zip",
"https://www.nasm.us/pub/nasm/releasebuilds/2.15.05/win64/nasm-2.15.05-win64.zip",
],
repo_mapping = repo_mapping,
)
maybe(
http_archive,
name = "perl",
build_file = "@com_github_3rdparty_bazel_rules_openssl//:BUILD.perl.bazel",
sha256 = "aeb973da474f14210d3e1a1f942dcf779e2ae7e71e4c535e6c53ebabe632cc98",
urls = [
"https://mirror.bazel.build/strawberryperl.com/download/5.32.1.1/strawberry-perl-5.32.1.1-64bit.zip",
"https://strawberryperl.com/download/5.32.1.1/strawberry-perl-5.32.1.1-64bit.zip",
],
repo_mapping = repo_mapping,
)
\ No newline at end of file
# Description:
# di library
package(default_visibility = ["//visibility:public"])
cc_library(
name = "boost_di",
srcs = glob(
[
# "src/**/*.cpp",
# "src/**/*.hpp",
],
),
hdrs = glob([
"include/**/*.hpp",
]
),
textual_hdrs = [
],
copts = ['--std=c++17'],
includes = [
"include/",
],
# include_prefix = "src/common/",
deps = [
],
)
\ No newline at end of file
# Description:
# fmt library
package(default_visibility = ["//visibility:public"])
cc_library(
name = "fmt",
srcs = [
#"src/fmt.cc", # No C++ module support
"src/format.cc",
"src/posix.cc",
],
hdrs = glob([
"include/fmt/*.h"
]),
includes = [
"include",
"src",
],
strip_include_prefix = "include",
visibility = ["//visibility:public"],
)
\ No newline at end of file
# Description:
# hat-hrie library
package(default_visibility = ["//visibility:public"])
cc_library(
name = "hat-trie",
srcs = glob(
[
# "src/**/*.cpp",
# "src/**/*.hpp",
],
),
hdrs = glob([
"include/**/*.h",
]
),
textual_hdrs = [
# "src/common/gsl/assert",
# "src/common/gsl/span",
# "src/common/gsl/gsl_assert",
# "src/common/gsl/gsl_util",
# "src/common/gsl/gsl_byte",
# "src/common/gsl/gsl_algorithm",
# "src/common/gsl/gsl",
# "src/common/gsl/pointers",
# "src/common/gsl/multi_span",
# "src/common/gsl/string_span",
# "src/common/gsl/gls-lite.hpp",
],
copts = ['--std=c++17'],
includes = [
"include/",
],
# include_prefix = "src/common/",
deps = [
],
)
\ No newline at end of file
package(default_visibility = ["//visibility:public"])
licenses(["notice"]) # Apache 2.0
exports_files(["LICENSE"])
# Note: orc-proto-wrapper.cc includes orc_proto.pb.cc
# and prefix with Adaptor.hh. The Adaptor.hh
# was supposed to capture platform discrepancies.
# However, since orc_proto.pb.cc can be compiled
# with cc_proto_library successfully, there is no need
# for orc-proto-wrapper.cc
cc_library(
name = "liborc",
srcs = glob(
[
"c++/src/*.cc",
"c++/src/*.hh",
"c++/src/io/*.cc",
"c++/src/io/*.hh",
"c++/src/wrap/*.cc",
"c++/src/wrap/*.hh",
"c++/src/wrap/*.h",
"c++/src/sargs/SearchArgument.cc",
"c++/src/sargs/SearchArgument.hh",
"c++/include/orc/*.hh",
],
exclude = [
"c++/src/wrap/orc-proto-wrapper.cc",
"c++/src/OrcHdfsFile.cc",
],
) + select({
"@bazel_tools//src/conditions:windows": [],
"//conditions:default": [
"c++/src/OrcHdfsFile.cc",
],
}),
hdrs = [
"c++/include/orc/orc-config.hh",
"c++/include/orc/sargs/SearchArgument.hh",
"c++/src/wrap",
],
copts = [],
defines = [],
includes = [
"c++/include",
"c++/src",
"c++/src/io",
"c++/src/wrap",
"proto",
],
linkopts = [],
visibility = ["//visibility:public"],
deps = [
":libhdfspp",
":orc_cc_proto",
"@lz4",
"@snappy",
"@zlib",
"@zstd",
],
)
cc_library(
name = "libhdfspp",
srcs = glob(
[
"c++/libs/libhdfspp/include/hdfspp/*.h",
],
exclude = [
],
),
hdrs = [
],
copts = [],
defines = [],
includes = [
"c++/libs/libhdfspp/include",
],
deps = [],
)
proto_library(
name = "orc_proto",
srcs = ["proto/orc_proto.proto"],
)
cc_proto_library(
name = "orc_cc_proto",
deps = [":orc_proto"],
)
genrule(
name = "orc-config_hh",
srcs = ["c++/include/orc/orc-config.hh.in"],
outs = ["c++/include/orc/orc-config.hh"],
cmd = ("sed " +
"-e 's/@ORC_VERSION@/1.6.7/g' " +
"-e 's/cmakedefine/define/g' " +
"$< >$@"),
)
genrule(
name = "Adaptor_hh",
srcs = ["c++/src/Adaptor.hh.in"],
outs = ["c++/src/Adaptor.hh"],
cmd = select({
"@bazel_tools//src/conditions:windows": (
"sed " +
"-e 's/cmakedefine HAS_PREAD/undef HAS_PREAD/g' " +
"-e 's/cmakedefine NEEDS_REDUNDANT_MOVE/undef NEEDS_REDUNDANT_MOVE/g' " +
"-e 's/cmakedefine NEEDS_Z_PREFIX/undef NEEDS_Z_PREFIX/g' " +
"-e 's/cmakedefine/define/g' " +
"$< >$@"
),
"@bazel_tools//src/conditions:darwin": (
"sed " +
"-e 's/cmakedefine NEEDS_REDUNDANT_MOVE/undef NEEDS_REDUNDANT_MOVE/g' " +
"-e 's/cmakedefine NEEDS_Z_PREFIX/undef NEEDS_Z_PREFIX/g' " +
"-e 's/cmakedefine/define/g' " +
"$< >$@"
),
"//conditions:default": (
"sed " +
"-e 's/cmakedefine INT64_IS_LL/undef INT64_IS_LL/g' " +
"-e 's/cmakedefine HAS_POST_2038/undef HAS_POST_2038/g' " +
"-e 's/cmakedefine NEEDS_REDUNDANT_MOVE/undef NEEDS_REDUNDANT_MOVE/g' " +
"-e 's/cmakedefine NEEDS_Z_PREFIX/undef NEEDS_Z_PREFIX/g' " +
"-e 's/cmakedefine/define/g' " +
"$< >$@"
),
}),
)
\ No newline at end of file
# Description:
# cpp-libp2p library
package(default_visibility = ["//visibility:public"])
load("@rules_cc//cc:defs.bzl", "cc_proto_library")
load("@rules_proto//proto:defs.bzl", "proto_library")
proto_library(
name = "libp2p_protos",
srcs = [
"src/protocol/kademlia/protobuf/kademlia.proto",
"src/crypto/protobuf/keys.proto",
"src/security/plaintext/protobuf/plaintext.proto",
"src/security/secio/protobuf/secio.proto",
"src/security/noise/protobuf/noise.proto",
"src/protocol/identify/protobuf/identify.proto",
"src/protocol/gossip/protobuf/rpc.proto",
]
)
cc_proto_library(
name = "libp2p_cc_pb",
deps = [":libp2p_protos"],
)
cc_library(
name = "p2p",
srcs = glob(
[
"src/**/*.cpp",
"src/**/*.hpp",
],
exclude = [
"src/storage/sqlite.cpp"
],
),
hdrs = glob([
"include/libp2p/**/*.hpp",
"include/generated/*.h",
],
exclude = [
"include/libp2p/sqlite.hpp"
],
),
textual_hdrs = [
"src/common/gsl/span",
"src/common/gsl/gsl_assert",
"src/common/gsl/gsl_util",
"src/common/gsl/gsl_byte",
"src/common/gsl/gsl_algorithm",
"src/common/gsl/gsl",
"src/common/gsl/pointers",
"src/common/gsl/multi_span",
"src/common/gsl/string_span",
],
copts = [
"--std=c++17",
"-O3",
"-g",
"-Wall",
"-ggdb",
"-Wno-reserved-user-defined-literal",
"-Wno-narrowing"
],
includes = [
"include/",
"src/common/",
],
include_prefix = "src/common/",
deps = [
# "@boost//:asio",
"@boost//:asio_ssl",
"@boost//:random",
"@openssl",
"@boost//:beast",
"@boost//:multiprecision",
"@boost//:outcome",
"@boost//:filesystem",
"@boost//:program_options",
"@boost//:signals2",
"@boost//:format",
"@com_github_cares_cares//:ares",
"@fmt//:fmt",
"@com_google_absl//absl/base",
"@com_openmpc_soralog//:soralog",
"@com_github_masterjedy_hat_hrie//:hat-trie",
# "@com_github_soramitsu_sqlite//:sqlite-cpp",
"@com_github_masterjedy_di//:boost_di",
":libp2p_cc_pb",
],
)
# Description:
# LZ4 library
licenses(["notice"]) # BSD license
exports_files(["LICENSE"])
cc_library(
name = "lz4",
srcs = glob([
"lib/lz4.c",
"lib/lz4.h",
"lib/lz4frame.c",
"lib/lz4frame.h",
"lib/lz4hc.h",
"lib/lz4hc.c",
"lib/xxhash.h",
]),
hdrs = [],
defines = [
"XXH_PRIVATE_API",
"LZ4LIB_VISIBILITY=",
],
includes = [
"lib",
],
linkopts = [],
textual_hdrs = [
"lib/xxhash.c",
"lib/lz4.c",
],
visibility = ["//visibility:public"],
)
\ No newline at end of file
load("@com_github_google_flatbuffers//:build_defs.bzl", "flatbuffer_library_public")
load("@bazel_skylib//rules:copy_file.bzl", "copy_file")
load("@bazel_common//tools/maven:pom_file.bzl", "pom_file")
COPTS_WITHOUT_LOG = select({
"//:opt": ["-DBAZEL_OPT"],
"//conditions:default": [],
}) + select({
"@bazel_tools//src/conditions:windows": [
# TODO(mehrdadn): (How to) support dynamic linking?
"-DRAY_STATIC",
],
"//conditions:default": [
],
}) + select({
"//:clang-cl": [
"-Wno-builtin-macro-redefined", # To get rid of warnings caused by deterministic build macros (e.g. #define __DATE__ "redacted")
"-Wno-microsoft-unqualified-friend", # This shouldn't normally be enabled, but otherwise we get: google/protobuf/map_field.h: warning: unqualified friend declaration referring to type outside of the nearest enclosing namespace is a Microsoft extension; add a nested name specifier (for: friend class DynamicMessage)
],
"//conditions:default": [
],
})
COPTS = COPTS_WITHOUT_LOG
PYX_COPTS = select({
"//:msvc-cl": [
],
"//conditions:default": [
# Ignore this warning since CPython and Cython have issue removing deprecated tp_print on MacOS
"-Wno-deprecated-declarations",
],
}) + select({
"@bazel_tools//src/conditions:windows": [
"/FI" + "src/shims/windows/python-nondebug.h",
],
"//conditions:default": [
],
})
PYX_SRCS = [] + select({
"@bazel_tools//src/conditions:windows": [
"src/shims/windows/python-nondebug.h",
],
"//conditions:default": [
],
})
def flatbuffer_py_library(name, srcs, outs, out_prefix, includes = [], include_paths = []):
flatbuffer_library_public(
name = name,
srcs = srcs,
outs = outs,
language_flag = "-p",
out_prefix = out_prefix,
include_paths = include_paths,
includes = includes,
)
def define_java_module(
name,
additional_srcs = [],
exclude_srcs = [],
additional_resources = [],
define_test_lib = False,
test_deps = [],
**kwargs):
lib_name = "io_ray_ray_" + name
pom_file_targets = [lib_name]
native.java_library(
name = lib_name,
srcs = additional_srcs + native.glob(
[name + "/src/main/java/**/*.java"],
exclude = exclude_srcs,
),
resources = native.glob([name + "/src/main/resources/**"]) + additional_resources,
**kwargs
)
if define_test_lib:
test_lib_name = "io_ray_ray_" + name + "_test"
pom_file_targets.append(test_lib_name)
native.java_library(
name = test_lib_name,
srcs = native.glob([name + "/src/test/java/**/*.java"]),
deps = test_deps,
)
pom_file(
name = "io_ray_ray_" + name + "_pom",
targets = pom_file_targets,
template_file = name + "/pom_template.xml",
substitutions = {
"{auto_gen_header}": "<!-- This file is auto-generated by Bazel from pom_template.xml, do not modify it. -->",
},
)
def copy_to_workspace(name, srcs, dstdir = ""):
if dstdir.startswith("/") or dstdir.startswith("\\"):
fail("Subdirectory must be a relative path: " + dstdir)
src_locations = " ".join(["$(locations %s)" % (src,) for src in srcs])
native.genrule(
name = name,
srcs = srcs,
outs = [name + ".out"],
# Keep this Bash script equivalent to the batch script below (or take out the batch script)
cmd = r"""
mkdir -p -- {dstdir}
for f in {locations}; do
rm -f -- {dstdir}$${{f##*/}}
cp -f -- "$$f" {dstdir}
done
date > $@
""".format(
locations = src_locations,
dstdir = "." + ("/" + dstdir.replace("\\", "/")).rstrip("/") + "/",
),
# Keep this batch script equivalent to the Bash script above (or take out the batch script)
cmd_bat = """
(
if not exist {dstdir} mkdir {dstdir}
) && (
for %f in ({locations}) do @(
(if exist {dstdir}%~nxf del /f /q {dstdir}%~nxf) &&
copy /B /Y %f {dstdir} >NUL
)
) && >$@ echo %TIME%
""".replace("\r", "").replace("\n", " ").format(
locations = src_locations,
dstdir = "." + ("\\" + dstdir.replace("/", "\\")).rstrip("\\") + "\\",
),
local = 1,
)
def native_java_binary(module_name, name, native_binary_name):
"""Copy native binary file to different path based on operating systems"""
copy_file(
name = name + "_darwin",
src = native_binary_name,
out = module_name + "/src/main/resources/native/darwin/" + name,
)
copy_file(
name = name + "_linux",
src = native_binary_name,
out = module_name + "/src/main/resources/native/linux/" + name,
)
copy_file(
name = name + "_windows",
src = native_binary_name,
out = module_name + "/src/main/resources/native/windows/" + name,
)
native.filegroup(
name = name,
srcs = select({
"@bazel_tools//src/conditions:darwin": [name + "_darwin"],
"@bazel_tools//src/conditions:windows": [name + "_windows"],
"//conditions:default": [name + "_linux"],
}),
visibility = ["//visibility:public"],
)
def native_java_library(module_name, name, native_library_name):
"""Copy native library file to different path based on operating systems"""
copy_file(
name = name + "_darwin",
src = native_library_name,
out = module_name + "/src/main/resources/native/darwin/lib{}.dylib".format(name),
)
copy_file(
name = name + "_linux",
src = native_library_name,
out = module_name + "/src/main/resources/native/linux/lib{}.so".format(name),
)
native.filegroup(
name = name,
srcs = select({
"@bazel_tools//src/conditions:darwin": [name + "_darwin"],
"@bazel_tools//src/conditions:windows": [],
"//conditions:default": [name + "_linux"],
}),
visibility = ["//visibility:public"],
)
package(default_visibility = ["//visibility:public"])
licenses(["notice"]) # MIT/JSON license
cc_library(
name = "rapidjson",
srcs = glob([
"include/**/*.h",
]),
copts = [],
includes = [
"include",
],
)
\ No newline at end of file
"""Adds repositories/archives."""
########################################################################
# DO NOT EDIT THIS FILE unless you are inside the
# https://github.com/3rdparty/bazel-rules-openssl repository. If you
# encounter it anywhere else it is because it has been copied there in
# order to simplify adding transitive dependencies. If you want a
# different version of bazel-rules-openssl follow the Bazel build
# instructions at https://github.com/3rdparty/bazel-rules-openssl.
########################################################################
load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository")
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe")
def repos(external = True, repo_mapping = {}):
maybe(
http_archive,
name = "rules_foreign_cc",
url = "https://github.com/bazelbuild/rules_foreign_cc/archive/0.5.1.tar.gz",
sha256 = "33a5690733c5cc2ede39cb62ebf89e751f2448e27f20c8b2fbbc7d136b166804",
strip_prefix = "rules_foreign_cc-0.5.1",
repo_mapping = repo_mapping,
)
if external:
maybe(
git_repository,
name = "com_github_3rdparty_bazel_rules_openssl",
remote = "https://github.com/3rdparty/bazel-rules-openssl",
commit = "fd41d1a19c75dd82979c76fc0b2aadd6c4393e89",
shallow_since = "1632212203 +0000",
repo_mapping = repo_mapping,
)
\ No newline at end of file
package(default_visibility = ["//visibility:public"])
licenses(["notice"]) # BSD 3-Clause
exports_files(["COPYING"])
cc_library(
name = "snappy",
srcs = glob(
[
"*.cc",
"*.h",
],
exclude = [
"*test.*",
"*fuzzer.*",
],
),
hdrs = [
"snappy-stubs-public.h",
],
copts = [],
includes = ["."],
)
genrule(
name = "snappy_stubs_public_h",
srcs = ["snappy-stubs-public.h.in"],
outs = ["snappy-stubs-public.h"],
cmd = ("sed " +
"-e 's/$${HAVE_SYS_UIO_H_01}/HAVE_SYS_UIO_H/g' " +
"-e 's/$${PROJECT_VERSION_MAJOR}/1/g' " +
"-e 's/$${PROJECT_VERSION_MINOR}/1/g' " +
"-e 's/$${PROJECT_VERSION_PATCH}/8/g' " +
"$< >$@"),
)
\ No newline at end of file
# Description:
# soralog library
package(default_visibility = ["//visibility:public"])
cc_library(
name = "soralog",
srcs = glob(
[
"src/**/*.cpp",
"src/*.cpp",
]
),
hdrs = glob([
"include/soralog/*.hpp",
"include/soralog/**/*.hpp",
]
),
copts = ['--std=c++17'],
includes = [
"include/",
],
include_prefix = "include",
deps = [
"@fmt",
"@com_github_jbeder_yaml_cpp//:yaml-cpp",
],
)
\ No newline at end of file
# Description:
# sqlite library
package(default_visibility = ["//visibility:public"])
cc_library(
name = "sqlite-cpp",
srcs = glob(
[
# "src/**/*.cpp",
# "src/**/*.hpp",
],
),
hdrs = glob([
"hdr/sqlite_modern_cpp/**/*.h",
"hdr/sqlite_modern_cpp/*.h",
]
),
copts = ['--std=c++17'],
includes = [
"hdr/sqlite_modern_cpp/",
],
include_prefix = "hdr/",
deps = [
],
)
\ No newline at end of file
# Description:
# Apache Thrift library
package(default_visibility = ["//visibility:public"])
licenses(["notice"]) # Apache 2.0
exports_files(["LICENSE"])
cc_library(
name = "thrift",
srcs = glob([
"lib/cpp/src/thrift/**/*.h",
]) + [
"lib/cpp/src/thrift/protocol/TProtocol.cpp",
"lib/cpp/src/thrift/transport/TBufferTransports.cpp",
"lib/cpp/src/thrift/transport/TTransportException.cpp",
],
hdrs = [
"compiler/cpp/src/thrift/version.h",
"lib/cpp/src/thrift/config.h",
],
includes = [
"lib/cpp/src",
],
textual_hdrs = [
"lib/cpp/src/thrift/protocol/TBinaryProtocol.tcc",
"lib/cpp/src/thrift/protocol/TCompactProtocol.tcc",
],
deps = [
"@boost//:asio"
],
)
genrule(
name = "version_h",
srcs = [
"compiler/cpp/src/thrift/version.h.in",
],
outs = [
"compiler/cpp/src/thrift/version.h",
],
cmd = "sed 's/@PACKAGE_VERSION@/0.12.0/g' $< > $@",
)
genrule(
name = "config_h",
srcs = ["build/cmake/config.h.in"],
outs = ["lib/cpp/src/thrift/config.h"],
cmd = ("sed " +
"-e 's/cmakedefine/define/g' " +
"-e 's/$${PACKAGE}/thrift/g' " +
"-e 's/$${PACKAGE_BUGREPORT}//g' " +
"-e 's/$${PACKAGE_NAME}/thrift/g' " +
"-e 's/$${PACKAGE_TARNAME}/thrift/g' " +
"-e 's/$${PACKAGE_URL}//g' " +
"-e 's/$${PACKAGE_VERSION}/0.12.0/g' " +
"-e 's/$${PACKAGE_STRING}/thrift 0.12.0/g' " +
"$< >$@"),
)
\ No newline at end of file
package(default_visibility = ["//visibility:public"])
licenses(["notice"]) # BSD 3-Clause
exports_files(["LICENSE"])
cc_library(
name = "xsimd",
srcs = [],
hdrs = glob(
[
"include/xsimd/*.hpp",
"include/xsimd/config/*.hpp",
"include/xsimd/math/*.hpp",
"include/xsimd/memory/*.hpp",
"include/xsimd/stl/*.hpp",
"include/xsimd/types/*.hpp",
"include/xsimd/arch/**/*.hpp",
],
exclude = [
],
),
copts = [],
defines = [],
includes = [
"include",
],
linkopts = [],
visibility = ["//visibility:public"],
deps = [
],
)
\ No newline at end of file
package(default_visibility = ["//visibility:public"])
licenses(["notice"]) # public domain
config_setting(
name = "darwin_arm64",
values = {
"cpu": "darwin_arm64"
}
)
config_setting(
name = "darwin_x86_64",
values = {
"cpu": "darwin_x86_64"
}
)
cc_library(
name = "lzma",
srcs = select ({
"darwin_arm64": [
"src/common/tuklib_cpucores.c",
"src/common/tuklib_physmem.c",
"src/liblzma/check/check.c",
"src/liblzma/check/crc32_fast.c",
"src/liblzma/check/crc32_table.c",
"src/liblzma/check/crc64_fast.c",
"src/liblzma/check/crc64_table.c",
"src/liblzma/check/sha256.c",
"src/liblzma/common/alone_decoder.c",
"src/liblzma/common/alone_encoder.c",
"src/liblzma/common/auto_decoder.c",
"src/liblzma/common/block_buffer_decoder.c",
"src/liblzma/common/block_buffer_encoder.c",
"src/liblzma/common/block_decoder.c",
"src/liblzma/common/block_encoder.c",
"src/liblzma/common/block_header_decoder.c",
"src/liblzma/common/block_header_encoder.c",
"src/liblzma/common/block_util.c",
"src/liblzma/common/common.c",
"src/liblzma/common/easy_buffer_encoder.c",
"src/liblzma/common/easy_decoder_memusage.c",
"src/liblzma/common/easy_encoder.c",
"src/liblzma/common/easy_encoder_memusage.c",
"src/liblzma/common/easy_preset.c",
"src/liblzma/common/filter_buffer_decoder.c",
"src/liblzma/common/filter_buffer_encoder.c",
"src/liblzma/common/filter_common.c",
"src/liblzma/common/filter_decoder.c",
"src/liblzma/common/filter_encoder.c",
"src/liblzma/common/filter_flags_decoder.c",
"src/liblzma/common/filter_flags_encoder.c",
"src/liblzma/common/hardware_cputhreads.c",
"src/liblzma/common/hardware_physmem.c",
"src/liblzma/common/index.c",
"src/liblzma/common/index_decoder.c",
"src/liblzma/common/index_encoder.c",
"src/liblzma/common/index_hash.c",
"src/liblzma/common/outqueue.c",
"src/liblzma/common/stream_buffer_decoder.c",
"src/liblzma/common/stream_buffer_encoder.c",
"src/liblzma/common/stream_decoder.c",
"src/liblzma/common/stream_encoder.c",
"src/liblzma/common/stream_encoder_mt.c",
"src/liblzma/common/stream_flags_common.c",
"src/liblzma/common/stream_flags_decoder.c",
"src/liblzma/common/stream_flags_encoder.c",
"src/liblzma/common/vli_decoder.c",
"src/liblzma/common/vli_encoder.c",
"src/liblzma/common/vli_size.c",
"src/liblzma/delta/delta_common.c",
"src/liblzma/delta/delta_decoder.c",
"src/liblzma/delta/delta_encoder.c",
"src/liblzma/lz/lz_decoder.c",
# "src/liblzma/lz/lz_encoder.c", # TODO only for x86
# "src/liblzma/lz/lz_encoder_mf.c", // TODO only for x86
"src/liblzma/lzma/fastpos_table.c",
"src/liblzma/lzma/lzma2_decoder.c",
"src/liblzma/lzma/lzma2_encoder.c",
"src/liblzma/lzma/lzma_decoder.c",
"src/liblzma/lzma/lzma_encoder.c",
# "src/liblzma/lzma/lzma_encoder_optimum_fast.c", // TODO only for x86
# "src/liblzma/lzma/lzma_encoder_optimum_normal.c",// TODO only for x86
"src/liblzma/lzma/lzma_encoder_presets.c",
"src/liblzma/rangecoder/price_table.c",
"src/liblzma/simple/arm.c",
"src/liblzma/simple/armthumb.c",
"src/liblzma/simple/ia64.c",
"src/liblzma/simple/powerpc.c",
"src/liblzma/simple/simple_coder.c",
"src/liblzma/simple/simple_decoder.c",
"src/liblzma/simple/simple_encoder.c",
"src/liblzma/simple/sparc.c",
"src/liblzma/simple/x86.c",
],
"//conditions:default": [
"src/common/tuklib_cpucores.c",
"src/common/tuklib_physmem.c",
"src/liblzma/check/check.c",
"src/liblzma/check/crc32_fast.c",
"src/liblzma/check/crc32_table.c",
"src/liblzma/check/crc64_fast.c",
"src/liblzma/check/crc64_table.c",
"src/liblzma/check/sha256.c",
"src/liblzma/common/alone_decoder.c",
"src/liblzma/common/alone_encoder.c",
"src/liblzma/common/auto_decoder.c",
"src/liblzma/common/block_buffer_decoder.c",
"src/liblzma/common/block_buffer_encoder.c",
"src/liblzma/common/block_decoder.c",
"src/liblzma/common/block_encoder.c",
"src/liblzma/common/block_header_decoder.c",
"src/liblzma/common/block_header_encoder.c",
"src/liblzma/common/block_util.c",
"src/liblzma/common/common.c",
"src/liblzma/common/easy_buffer_encoder.c",
"src/liblzma/common/easy_decoder_memusage.c",
"src/liblzma/common/easy_encoder.c",
"src/liblzma/common/easy_encoder_memusage.c",
"src/liblzma/common/easy_preset.c",
"src/liblzma/common/filter_buffer_decoder.c",
"src/liblzma/common/filter_buffer_encoder.c",
"src/liblzma/common/filter_common.c",
"src/liblzma/common/filter_decoder.c",
"src/liblzma/common/filter_encoder.c",
"src/liblzma/common/filter_flags_decoder.c",
"src/liblzma/common/filter_flags_encoder.c",
"src/liblzma/common/hardware_cputhreads.c",
"src/liblzma/common/hardware_physmem.c",
"src/liblzma/common/index.c",
"src/liblzma/common/index_decoder.c",
"src/liblzma/common/index_encoder.c",
"src/liblzma/common/index_hash.c",
"src/liblzma/common/outqueue.c",
"src/liblzma/common/stream_buffer_decoder.c",
"src/liblzma/common/stream_buffer_encoder.c",
"src/liblzma/common/stream_decoder.c",
"src/liblzma/common/stream_encoder.c",
"src/liblzma/common/stream_encoder_mt.c",
"src/liblzma/common/stream_flags_common.c",
"src/liblzma/common/stream_flags_decoder.c",
"src/liblzma/common/stream_flags_encoder.c",
"src/liblzma/common/vli_decoder.c",
"src/liblzma/common/vli_encoder.c",
"src/liblzma/common/vli_size.c",
"src/liblzma/delta/delta_common.c",
"src/liblzma/delta/delta_decoder.c",
"src/liblzma/delta/delta_encoder.c",
"src/liblzma/lz/lz_decoder.c",
"src/liblzma/lz/lz_encoder.c", # TODO only for x86
"src/liblzma/lz/lz_encoder_mf.c", # TODO only for x86
"src/liblzma/lzma/fastpos_table.c",
"src/liblzma/lzma/lzma2_decoder.c",
"src/liblzma/lzma/lzma2_encoder.c",
"src/liblzma/lzma/lzma_decoder.c",
"src/liblzma/lzma/lzma_encoder.c",
"src/liblzma/lzma/lzma_encoder_optimum_fast.c", # TODO only for x86
"src/liblzma/lzma/lzma_encoder_optimum_normal.c",# TODO only for x86
"src/liblzma/lzma/lzma_encoder_presets.c",
"src/liblzma/rangecoder/price_table.c",
"src/liblzma/simple/arm.c",
"src/liblzma/simple/armthumb.c",
"src/liblzma/simple/ia64.c",
"src/liblzma/simple/powerpc.c",
"src/liblzma/simple/simple_coder.c",
"src/liblzma/simple/simple_decoder.c",
"src/liblzma/simple/simple_encoder.c",
"src/liblzma/simple/sparc.c",
"src/liblzma/simple/x86.c",
]
}),
hdrs = glob(["src/**/*.h"]) + ["config.h"],
copts = select({
"@bazel_tools//src/conditions:windows": [],
"//conditions:default": [
"-std=c99",
],
}),
defines = [
"HAVE_CONFIG_H",
"LZMA_API_STATIC",
],
includes = [
".",
"src",
"src/common",
"src/liblzma",
"src/liblzma/api",
"src/liblzma/check",
"src/liblzma/common",
"src/liblzma/delta",
"src/liblzma/lz",
"src/liblzma/lzma",
"src/liblzma/rangecoder",
"src/liblzma/simple",
],
linkopts = ["-lpthread"],
visibility = ["//visibility:public"],
)
genrule(
name = "configure",
outs = ["config.h"],
cmd = "\n".join([
"cat <<'EOF' >$@",
"",
"#if defined(_MSC_VER)",
"",
"#define HAVE_VISIBILITY 0",
"#define MYTHREAD_VISTA 1",
"",
"#elif defined(__APPLE__)",
"",
"#define TUKLIB_CPUCORES_SYSCTL 1",
"#define ENABLE_NLS 1",
"#define HAVE_CLOCK_GETTIME 1",
"#define HAVE_DCGETTEXT 1",
"#define HAVE_DECL_CLOCK_MONOTONIC 1",
"#define HAVE_DECL_PROGRAM_INVOCATION_NAME 1",
"#define HAVE_DLFCN_H 1",
"#define HAVE_FCNTL_H 1",
"#define HAVE_FUTIMENS 1",
"#define HAVE_GETOPT_H 1",
"#define HAVE_GETOPT_LONG 1",
"#define HAVE_GETTEXT 1",
"#define HAVE_IMMINTRIN_H 1",
"#define HAVE_MBRTOWC 1",
"#define HAVE_MEMORY_H 1",
"#define HAVE_POSIX_FADVISE 1",
"#define HAVE_PTHREAD_PRIO_INHERIT 1",
"#define HAVE_STRINGS_H 1",
"#define HAVE_SYS_TIME_H 1",
"#define HAVE_STRUCT_STAT_ST_ATIM_TV_NSEC 1",
"#define HAVE_SYS_PARAM_H 1",
"#define HAVE_SYS_STAT_H 1",
"#define HAVE_SYS_TYPES_H 1",
"#define HAVE_UINTPTR_T 1",
"#define HAVE_UNISTD_H 1",
"#define HAVE_WCWIDTH 1",
"#define HAVE__MM_MOVEMASK_EPI8 1",
"#define HAVE_VISIBILITY 0",
"#define MYTHREAD_POSIX 1",
"",
"#define _ALL_SOURCE 1",
"#define _GNU_SOURCE 1",
"#define _POSIX_PTHREAD_SEMANTICS 1",
"#define _TANDEM_SOURCE 1",
"#define __EXTENSIONS__ 1",
"",
"#define _DARWIN_USE_64_BIT_INODE 1",
"",
"#else",
"",
"#define TUKLIB_CPUCORES_SCHED_GETAFFINITY 1",
"#define ENABLE_NLS 1",
"#define HAVE_BSWAP_16 1",
"#define HAVE_BSWAP_32 1",
"#define HAVE_BSWAP_64 1",
"#define HAVE_BYTESWAP_H 1",
"#define HAVE_CLOCK_GETTIME 1",
"#define HAVE_DCGETTEXT 1",
"#define HAVE_DECL_CLOCK_MONOTONIC 1",
"#define HAVE_DECL_PROGRAM_INVOCATION_NAME 1",
"#define HAVE_DLFCN_H 1",
"#define HAVE_FCNTL_H 1",
"#define HAVE_FUTIMENS 1",
"#define HAVE_GETOPT_H 1",
"#define HAVE_GETOPT_LONG 1",
"#define HAVE_GETTEXT 1",
"#if defined __x86_64__",
"#define HAVE_IMMINTRIN_H 1",
"#endif",
"#define HAVE_MBRTOWC 1",
"#define HAVE_MEMORY_H 1",
"#define HAVE_POSIX_FADVISE 1",
"#define HAVE_PTHREAD_PRIO_INHERIT 1",
"#define HAVE_PTHREAD_CONDATTR_SETCLOCK 1",
"#define HAVE_STRINGS_H 1",
"#define HAVE_SYS_TIME_H 1",
"#define HAVE_STRUCT_STAT_ST_ATIM_TV_NSEC 1",
"#define HAVE_SYS_PARAM_H 1",
"#define HAVE_SYS_STAT_H 1",
"#define HAVE_SYS_TYPES_H 1",
"#define HAVE_UINTPTR_T 1",
"#define HAVE_UNISTD_H 1",
"#define HAVE_WCWIDTH 1",
"#define HAVE__MM_MOVEMASK_EPI8 1",
"#define HAVE_VISIBILITY 0",
"#define MYTHREAD_POSIX 1",
"",
"#define _ALL_SOURCE 1",
"#define _GNU_SOURCE 1",
"#define _POSIX_PTHREAD_SEMANTICS 1",
"#define _TANDEM_SOURCE 1",
"#define __EXTENSIONS__ 1",
"",
"#endif",
"",
"#define ASSUME_RAM 128",
"#define HAVE_CHECK_CRC32 1",
"#define HAVE_CHECK_CRC64 1",
"#define HAVE_CHECK_SHA256 1",
"#define HAVE_DECODERS 1",
"#define HAVE_DECODER_ARM 1",
"#define HAVE_DECODER_ARMTHUMB 1",
"#define HAVE_DECODER_DELTA 1",
"#define HAVE_DECODER_IA64 1",
"#define HAVE_DECODER_LZMA1 1",
"#define HAVE_DECODER_LZMA2 1",
"#define HAVE_DECODER_POWERPC 1",
"#define HAVE_DECODER_SPARC 1",
"#define HAVE_DECODER_X86 1",
"#define HAVE_ENCODERS 1",
"#define HAVE_ENCODER_ARM 1",
"#define HAVE_ENCODER_ARMTHUMB 1",
"#define HAVE_ENCODER_DELTA 1",
"#define HAVE_ENCODER_IA64 1",
"#define HAVE_ENCODER_LZMA1 1",
"#define HAVE_ENCODER_LZMA2 1",
"#define HAVE_ENCODER_POWERPC 1",
"#define HAVE_ENCODER_SPARC 1",
"#define HAVE_ENCODER_X86 1",
"#define HAVE_INTTYPES_H 1",
"#define HAVE_LIMITS_H 1",
"#define HAVE_MF_BT2 1",
"#define HAVE_MF_BT3 1",
"#define HAVE_MF_BT4 1",
"#define HAVE_MF_HC3 1",
"#define HAVE_MF_HC4 1",
"#define HAVE_STDBOOL_H 1",
"#define HAVE_STDINT_H 1",
"#define HAVE_STDLIB_H 1",
"#define HAVE_STRING_H 1",
"#define HAVE__BOOL 1",
"#define LT_OBJDIR \".libs/\"",
"#define NDEBUG 1",
"#define PACKAGE \"xz\"",
"#define PACKAGE_BUGREPORT \"lasse.collin@tukaani.org\"",
"#define PACKAGE_NAME \"XZ Utils\"",
"#define PACKAGE_STRING \"XZ Utils 5.2.4\"",
"#define PACKAGE_TARNAME \"xz\"",
"#define PACKAGE_URL \"https://tukaani.org/xz/\"",
"#define PACKAGE_VERSION \"5.2.4\"",
"#define SIZEOF_SIZE_T 8",
"#define STDC_HEADERS 1",
"#define TUKLIB_FAST_UNALIGNED_ACCESS 1",
"#define TUKLIB_PHYSMEM_SYSCONF 1",
"#define VERSION \"5.2.4\"",
"",
"EOF",
]),
)
\ No newline at end of file
package(default_visibility = ["//visibility:public"])
licenses(["notice"]) # BSD/MIT-like license (for zlib)
cc_library(
name = "zlib",
srcs = glob([
"*.c",
"*.h",
]) + [
"contrib/minizip/ioapi.c",
"contrib/minizip/ioapi.h",
"contrib/minizip/unzip.c",
"contrib/minizip/unzip.h",
],
hdrs = [
"zlib.h",
],
copts = select({
"@bazel_tools//src/conditions:windows": [],
"//conditions:default": [
"-Wno-shift-negative-value",
"-DZ_HAVE_UNISTD_H",
],
}),
includes = [
".",
"contrib/minizip",
],
)
\ No newline at end of file
# Description:
# Zstandard library
licenses(["notice"]) # BSD license
exports_files(["LICENSE"])
cc_library(
name = "zstd",
srcs = glob(
[
"lib/common/*.h",
"lib/common/*.c",
"lib/compress/*.c",
"lib/compress/*.h",
"lib/decompress/*.c",
"lib/decompress/*.h",
],
exclude = [
"lib/common/xxhash.c",
],
),
hdrs = [
"lib/zstd.h",
],
defines = [
"XXH_PRIVATE_API",
"ZSTDLIB_VISIBILITY=",
"ZSTDERRORLIB_VISIBILITY=",
],
includes = [
"lib",
"lib/common",
],
linkopts = [],
textual_hdrs = [
"lib/common/xxhash.c",
],
visibility = ["//visibility:public"],
)
\ No newline at end of file
# Copyright 2022 Primihub
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
version: 1.0
node: "node0"
location: "127.0.0.1"
grpc_port: 50050
# node_keypair:
# public_key:
# private_key:
# load datasets
datasets:
- description: "train_party_0"
model: "csv"
source: "/tmp/train_party_0.csv"
- description: "test_party_0"
model: "csv"
source: "/tmp/test_party_0.csv"
# Dataset authorization
# authorization:
# - node:
# task:
# p2p paramaters
p2p:
bootstrap_nodes:
- "/ip4/127.0.0.1/tcp/4001/ipfs/QmP2C45o2vZfy1JXWFZDUEzrQCigMtd4r3nesvArV8dFKd"
- "/ip4/127.0.0.1/tcp/4001/ipfs/QmdSyhb8eR9dDSR5jjnRoTDBwpBCSAjT7WueKJ9cQArYoA"
multi_addr: "/ip4/127.0.0.1/tcp/8886"
# Copyright 2022 Primihub
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
version: 1.0
node: "node1"
location: "127.0.0.1"
grpc_port: 50051
# load datasets
datasets:
- description: "train_party_1"
model: "csv"
source: "/tmp/train_party_1.csv"
- description: "test_party_1"
model: "csv"
source: "/tmp/test_party_1.csv"
- description: "guest_dataset"
model: "csv"
source: "/tmp/test_party_1.csv"
# p2p paramaters
p2p:
bootstrap_nodes:
- "/ip4/127.0.0.1/tcp/4001/ipfs/QmP2C45o2vZfy1JXWFZDUEzrQCigMtd4r3nesvArV8dFKd"
- "/ip4/127.0.0.1/tcp/4001/ipfs/QmdSyhb8eR9dDSR5jjnRoTDBwpBCSAjT7WueKJ9cQArYoA"
multi_addr: "/ip4/127.0.0.1/tcp/8888"
# Copyright 2022 Primihub
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
version: 1.0
node: "node2"
location: "127.0.0.1"
grpc_port: 50052
# load datasets
datasets:
- description: "train_party_2"
model: "csv"
source: "/tmp/train_party_2.csv"
- description: "test_party_2"
model: "csv"
source: "/tmp/test_party_2.csv"
- description: "label_dataset"
model: "csv"
source: "/tmp/test_party_2.csv"
# p2p paramaters
p2p:
bootstrap_nodes:
- "/ip4/127.0.0.1/tcp/4001/ipfs/QmP2C45o2vZfy1JXWFZDUEzrQCigMtd4r3nesvArV8dFKd"
- "/ip4/127.0.0.1/tcp/4001/ipfs/QmdSyhb8eR9dDSR5jjnRoTDBwpBCSAjT7WueKJ9cQArYoA"
multi_addr: "/ip4/127.0.0.1/tcp/8889"
version: 1.0
node: "node0"
location: "172.28.1.10"
grpc_port: 50050
datasets:
- description: "train_party_0"
model: "csv"
source: "/tmp/train_party_0.csv"
- description: "test_party_0"
model: "csv"
source: "/tmp/test_party_0.csv"
p2p:
bootstrap_nodes:
- "/ip4/172.28.1.13/tcp/4001/ipfs/QmP2C45o2vZfy1JXWFZDUEzrQCigMtd4r3nesvArV8dFKd"
- "/ip4/172.28.1.13/tcp/4001/ipfs/QmdSyhb8eR9dDSR5jjnRoTDBwpBCSAjT7WueKJ9cQArYoA"
multi_addr: "/ip4/172.28.1.10/tcp/8880"
\ No newline at end of file
version: 1.0
node: "node1"
location: "172.28.1.11"
grpc_port: 50050
datasets:
- description: "train_party_1"
model: "csv"
source: "/tmp/train_party_1.csv"
- description: "test_party_1"
model: "csv"
source: "/tmp/test_party_1.csv"
p2p:
bootstrap_nodes:
- "/ip4/172.28.1.13/tcp/4001/ipfs/QmP2C45o2vZfy1JXWFZDUEzrQCigMtd4r3nesvArV8dFKd"
- "/ip4/172.28.1.13/tcp/4001/ipfs/QmdSyhb8eR9dDSR5jjnRoTDBwpBCSAjT7WueKJ9cQArYoA"
multi_addr: "/ip4/172.28.1.11/tcp/8881"
\ No newline at end of file
version: 1.0
node: "node2"
location: "172.28.1.12"
grpc_port: 50050
datasets:
- description: "train_party_2"
model: "csv"
source: "/tmp/train_party_2.csv"
- description: "test_party_2"
model: "csv"
source: "/tmp/test_party_2.csv"
p2p:
bootstrap_nodes:
- "/ip4/172.28.1.13/tcp/4001/ipfs/QmP2C45o2vZfy1JXWFZDUEzrQCigMtd4r3nesvArV8dFKd"
- "/ip4/172.28.1.13/tcp/4001/ipfs/QmdSyhb8eR9dDSR5jjnRoTDBwpBCSAjT7WueKJ9cQArYoA"
multi_addr: "/ip4/172.28.1.12/tcp/8882"
\ No newline at end of file
version: '3.9'
services:
node0:
build:
context: .
dockerfile: Dockerfile
restart: "no"
hostname: node0_primihub
container_name: node0_primihub
ports:
- "8050:50050"
- "10120:12120"
- "10121:12121"
volumes:
- ./config:/app/config
extra_hosts:
- "node0_primihub:172.28.1.10"
- "node1_primihub:172.28.1.11"
- "node2_primihub:172.28.1.12"
working_dir: /app
entrypoint:
- "/bin/bash"
- "-c"
- "./primihub-node --service_port=50050 --node_id=node0 --config=/app/config/primihub_node0.yaml"
networks:
dev_net:
ipv4_address: 172.28.1.10
depends_on:
- simple_bootstrap_node
# node1
node1:
build:
context: .
dockerfile: Dockerfile
restart: "no"
hostname: node1_primihub
container_name: node1_primihub
ports:
- "8051:50050"
- "11120:12120"
- "11121:12121"
volumes:
- ./config:/app/config
extra_hosts:
- "node0_primihub:172.28.1.10"
- "node1_primihub:172.28.1.11"
- "node2_primihub:172.28.1.12"
working_dir: /app
entrypoint:
- "/bin/bash"
- "-c"
- "./primihub-node --service_port=50050 --node_id=node1 --config=/app/config/primihub_node1.yaml"
networks:
dev_net:
ipv4_address: 172.28.1.11
depends_on:
- simple_bootstrap_node
node2:
build:
context: .
dockerfile: Dockerfile
restart: "no"
hostname: node2_primihub
container_name: node2_primihub
ports:
- "8052:50050"
- "12120:12120"
- "12121:12121"
volumes:
- ./config:/app/config
extra_hosts:
- "node0_primihub:172.28.1.10"
- "node1_primihub:172.28.1.11"
- "node2_primihub:172.28.1.12"
working_dir: /app
entrypoint:
- "/bin/bash"
- "-c"
- "./primihub-node --service_port=50050 --node_id=node2 --config=/app/config/primihub_node2.yaml"
networks:
dev_net:
ipv4_address: 172.28.1.12
depends_on:
- simple_bootstrap_node
simple_bootstrap_node:
image: primihub/simple-bootstrap-node:1.0
container_name: simple_bootstrap_node
restart: "always"
ports:
- "4001:4001"
entrypoint:
- "/app/simple-bootstrap-node"
networks:
dev_net:
ipv4_address: 172.28.1.13
networks:
dev_net:
driver: bridge
ipam:
config:
- subnet: 172.28.0.0/16
gateway: 172.28.0.1
\ No newline at end of file
version: '3.9'
services:
node0:
image: primihub/primihub-node:1.0.3
restart: "no"
hostname: node0_primihub
container_name: node0_primihub
ports:
- "8050:50050"
- "10120:12120"
- "10121:12121"
volumes:
- ./config:/app/config
extra_hosts:
- "node0_primihub:172.28.1.10"
- "node1_primihub:172.28.1.11"
- "node2_primihub:172.28.1.12"
working_dir: /app
entrypoint:
- "/bin/bash"
- "-c"
- "./primihub-node --service_port=50050 --node_id=node0 --config=/app/config/primihub_node0.yaml"
networks:
testing_net:
ipv4_address: 172.28.1.10
depends_on:
- simple_bootstrap_node
node1:
image: primihub/primihub-node:1.0.3
restart: "no"
hostname: node1_primihub
container_name: node1_primihub
ports:
- "8051:50050"
- "11120:12120"
- "11121:12121"
volumes:
- ./config:/app/config
extra_hosts:
- "node0_primihub:172.28.1.10"
- "node1_primihub:172.28.1.11"
- "node2_primihub:172.28.1.12"
working_dir: /app
entrypoint:
- "/bin/bash"
- "-c"
- "./primihub-node --service_port=50050 --node_id=node1 --config=/app/config/primihub_node1.yaml"
networks:
testing_net:
ipv4_address: 172.28.1.11
depends_on:
- simple_bootstrap_node
node2:
image: primihub/primihub-node:1.0.3
restart: "no"
hostname: node2_primihub
container_name: node2_primihub
ports:
- "8052:50050"
- "12120:12120"
- "12121:12121"
volumes:
- ./config:/app/config
extra_hosts:
- "node0_primihub:172.28.1.10"
- "node1_primihub:172.28.1.11"
- "node2_primihub:172.28.1.12"
working_dir: /app
entrypoint:
- "/bin/bash"
- "-c"
- "./primihub-node --service_port=50050 --node_id=node2 --config=/app/config/primihub_node2.yaml"
networks:
testing_net:
ipv4_address: 172.28.1.12
depends_on:
- simple_bootstrap_node
simple_bootstrap_node:
image: primihub/simple-bootstrap-node:1.0
container_name: simple_bootstrap_node
restart: "always"
ports:
- "4001:4001"
entrypoint:
- "/app/simple-bootstrap-node"
networks:
testing_net:
ipv4_address: 172.28.1.13
networks:
testing_net:
driver: bridge
ipam:
config:
- subnet: 172.28.0.0/16
gateway: 172.28.0.1
\ No newline at end of file
package(
default_visibility = [
"//visibility:public",
],
)
load("@rules_foreign_cc//tools/build_defs:boost_build.bzl", "boost_build")
boost_build(
name = "boost_fiber",
lib_source = "@boost//:all",
static_libraries = ["libboost_fiber.a"],
user_options = ["--with-fiber"],
visibility = ["//visibility:public"],
deps = [":boost_context"],
)
boost_build(
name = "boost_context",
lib_source = "@boost//:all",
static_libraries = ["libboost_context.a"],
user_options = ["--with-context"],
visibility = ["//visibility:public"],
)
genrule(
name = "cryptoTools_config_h",
outs = [
"cryptoTools/Common/config.h",
],
cmd = """
set -x
tmpdir="cryptoTools.tmp"
mkdir -p "$${tmpdir}/cryptoTools/Common"
echo "#pragma once \r\n \
#define ENABLE_RELIC ON \r\n \
#define ENABLE_CIRCUITS ON \r\n \
#define ENABLE_FULL_GSL ON \r\n \
#define ENABLE_CPP_14 ON \r\n \
#define ENABLE_BOOST ON \r\n \
#define ENABLE_SSE ON \r\n \
#define ENABLE_CPP_14 ON \r\n \
#define ENABLE_FULL_GSL ON \r\n \
#define ENABLE_NET_LOG ON \r\n \
#define ENABLE_NASM ON \r\n \
#if (defined(_MSC_VER) || defined(__SSE2__)) && defined(ENABLE_SSE) \r\n \
#define ENABLE_SSE_BLAKE2 ON \r\n \
#define OC_ENABLE_SSE2 ON \r\n \
#endif \r\n \
#if (defined(_MSC_VER) || defined(__PCLMUL__)) && defined(ENABLE_SSE) \r\n \
#define OC_ENABLE_PCLMUL \r\n \
#endif \r\n \
#if (defined(_MSC_VER) || defined(__AES__)) && defined(ENABLE_SSE) \r\n \
#define OC_ENABLE_AESNI ON \r\n \
#else \r\n \
#define OC_ENABLE_PORTABLE_AES ON \r\n \
#endif \r\n \
">"$${tmpdir}"/cryptoTools/Common/config.h
ls -ltrh "$${tmpdir}"
mv "$${tmpdir}"/cryptoTools/Common/config.h $(location cryptoTools/Common/config.h)
rm -r -f -- "$${tmpdir}"
""",
visibility = ["//visibility:public"],
)
cc_library(
name = "cryptoTools",
srcs = glob(
["cryptoTools/Circuit/*.cpp",
"cryptoTools/Common/*.cpp",
"cryptoTools/Crypto/*.cpp",
"cryptoTools/Crypto/blake2/c/*.c",
"cryptoTools/Crypto/blake2/sse/*.c",
"cryptoTools/Network/*.cpp"],
),
hdrs = [":cryptoTools_config_h"] + glob(
["cryptoTools/Circuit/*.h",
"cryptoTools/Common/*.h",
"cryptoTools/Crypto/*.h",
"cryptoTools/Crypto/blake2/c/*.h",
"cryptoTools/Crypto/blake2/sse/*.h",
"cryptoTools/Network/*.h"],
),
textual_hdrs = [
"cryptoTools/gsl/span",
"cryptoTools/gsl/gsl_assert",
"cryptoTools/gsl/gsl_util",
"cryptoTools/gsl/gsl_byte",
"cryptoTools/gsl/gsl_algorithm",
"cryptoTools/gsl/gsl",
"cryptoTools/gsl/multi_span",
"cryptoTools/gsl/string_span",
"cryptoTools/gsl/gls-lite.hpp",
],
includes = [".", ":cryptoTools_config_h"],
copts = ["-I@tookit_relic//:relic/include -std=c++14 -O0 -g -ggdb -rdynamic -maes -msse2 -msse3 -msse4.1 -mpclmul -DENABLE_CIRCUITS=ON -DENABLE_RELIC=ON -DENABLE_BOOST=ON -DENABLE_SSE=ON -DRAND=HASHD -DBoost_USE_MULTITHREADED=ON"],
linkopts = ["-pthread"],
linkstatic = True,
deps = [
":boost_fiber",
"@toolkit_relic//:relic",
],
)
cc_library(
name = "tests_cryptoTools",
srcs = glob(
["tests_cryptoTools/**/*.cpp"],
),
hdrs = glob(
["tests_cryptoTools/**/*.h"],
),
copts = ["-std=c++14 -O0 -g -ggdb -rdynamic -maes -msse2 -msse3 -msse4.1 -mpclmul -DENABLE_CIRCUITS=ON -DENABLE_RELIC=ON -DENABLE_BOOST=ON -DBoost_USE_MULTITHREADED=ON"],
linkopts = ["-pthread"],
linkstatic = True,
deps = [
":cryptoTools",
"@toolkit_relic//:relic",
],
)
cc_library(
name = "lib_frontend_cryptoTools",
srcs = glob([
"frontend_cryptoTools/**/*.cpp"
],),
hdrs = glob([
"frontend_cryptoTools/**/*.h"
],),
copts = ["-std=c++14 -O0 -g -ggdb -rdynamic -maes -msse2 -msse3 -msse4.1 -mpclmul -DENABLE_CIRCUITS=ON -DENABLE_RELIC=ON -DENABLE_BOOST=ON -DENABLE_SSE=ON -DBoost_USE_MULTITHREADED=ON -DENABLE_FULL_GSL=ON -DENABLE_CPP_14=ON"],
linkopts = ["-pthread -lstdc++"],
linkstatic = True,
deps = [
":cryptoTools",
":tests_cryptoTools",
"@toolkit_relic//:relic",
],
)
cc_binary(
name = "frontend_cryptoTools",
srcs = glob(
["frontend_cryptoTools/**/*.cpp"], ["frontend_cryptoTools/**/*.h"],
),
includes = [".", "Tutorials/Network.h"],
copts = ["-std=c++14 -O0 -g -ggdb -rdynamic -maes -msse2 -msse3 -msse4.1 -mpclmul -DENABLE_CIRCUITS=ON -DENABLE_RELIC=ON -DENABLE_BOOST=ON -DENABLE_SSE=ON"],
linkopts = ["-pthread -lstdc++"],
linkstatic = False,
deps = [
":lib_frontend_cryptoTools",
":cryptoTools",
":tests_cryptoTools",
"@toolkit_relic//:relic",
],
)
package(
default_visibility = [
"//visibility:public",
],
)
genrule(
name = "libOTe_config_h",
outs = [
"libOTe/config.h",
],
cmd = """
set -x
tmpdir="libOTe.tmp"
mkdir -p "$${tmpdir}"
echo "#pragma once \r\n \
#include \\"libOTe/version.h\\" \r\n \
#define LIBOTE_VERSION (LIBOTE_VERSION_MAJOR * 10000 + LIBOTE_VERSION_MINOR * 100 + LIBOTE_VERSION_PATCH) \r\n \
/* #define ENABLE_SIMPLESTOT ON */ \r\n \
/* #define ENABLE_SIMPLESTOT_ASM ON */ \r\n \
#if defined(ENABLE_SIMPLESTOT_ASM) && defined(_MSC_VER) \r\n \
#undef ENABLE_SIMPLESTOT_ASM \r\n \
#pragma message(\\"ENABLE_SIMPLESTOT_ASM should not be defined on windows.\\") \r\n \
#endif \r\n \
#if defined(ENABLE_MR_KYBER) && defined(_MSC_VER) \r\n \
#undef ENABLE_MR_KYBER \r\n \
#pragma message(\\"ENABLE_MR_KYBER should not be defined on windows.\\") \r\n \
#endif \r\n \
">"$${tmpdir}"/config.h
ls -ltrh "$${tmpdir}"
mv "$${tmpdir}"/config.h $(location libOTe/config.h)
rm -r -f -- "$${tmpdir}"
""",
visibility = ["//visibility:public"],
)
cc_library(
name = "SimplestOT",
srcs = glob(
["SimplestOT/*.c"],
["SimplestOT/*.s"],
),
hdrs = glob(
["SimplestOT/*.h"],
),
textual_hdrs = glob(["SimplestOT/*.data"]),
linkopts = ["-pthread"],
linkstatic = True,
)
cc_library(
name = "KyberOT",
srcs = glob(
["KyberOT/**/*.c"],
["KyberOT/**/*.s"],
),
hdrs = glob(
["KyberOT/**/*.h"],
),
textual_hdrs = glob(["KyberOT/**/*.macros"]),
copts= ["-mavx2"],
linkopts = ["-pthread"],
linkstatic = True,
)
cc_library(
name = "libOTe",
srcs = glob(
["libOTe/**/*.cpp"],
["libOTe/**/*.c"],
),
hdrs = [":libOTe_config_h"] + glob(
["libOTe/**/*.h"],
),
includes = ["./", ":libOTe_config_h"],
copts = ["-std=c++14 -O0 -g -ggdb -rdynamic -IlibOTe -maes -msse2 -msse3 -msse4.1 -mpclmul"],
linkopts = ["-pthread"],
linkstatic = True,
deps = [
"@cryptoTools//:cryptoTools",
],
)
cc_library(
name = "libOTe_Tests",
srcs = glob(
["libOTe_Tests/**/*.cpp"],
["libOTe_Tests/**/*.c"],
),
hdrs = glob(
["libOTe_Tests/**/*.h"],
),
copts = ["-std=c++14 -O0 -g -ggdb -rdynamic -IlibOTe -maes -msse2 -msse3 -msse4.1 -mpclmul"],
linkopts = ["-pthread"],
linkstatic = True,
deps = [
":libOTe",
],
)
cc_library(
name = "lib_frontend_libOTe",
srcs = glob(["frontend/**/*.cpp"]),
hdrs = glob(["frontend/**/*.h"]),
copts = ["-std=c++14 -O0 -g -ggdb -rdynamic -maes -msse2 -msse3 -msse4.1 -mpclmul -DENABLE_CIRCUITS=ON -DENABLE_RELIC=ON -DENABLE_BOOST=ON -DENABLE_SSE=ON"],
linkopts = ["-pthread -lstdc++"],
deps = [
"@cryptoTools//:tests_cryptoTools",
":libOTe_Tests",
":SimplestOT",
],
)
cc_binary(
name = "frontend_libOTe",
srcs = glob(
["frontend/**/*.cpp"],
["frontend/**/*.h"],
) + ["@cryptoTools//:tests_cryptoTools/UnitTests.h"],
includes = ["./", "@cryptoTools/tests_cryptoTools/"],
copts = ["-I@cryptoTools/tests_cryptoTools/ -std=c++14"],
linkopts = ["-pthread",
"-L@external/cryptoTools/tests_cryptoTools/",
],
linkstatic = False,
deps = [
"@cryptoTools//:tests_cryptoTools",
":lib_frontend_libOTe",
":SimplestOT",
],
)
#!/bin/sh
# update local python LINK_OPTS
CONFIG=`python3-config --ldflags` \
&& NEWLINE="\ \ \ \ linkopts = LINK_OPTS + [\"${CONFIG}\"]," \
&& sed -i "354c ${NEWLINE}" BUILD.bazel
echo "done"
\ No newline at end of file
# Primihub python library
\ No newline at end of file
import pandas as pd
import numpy as np
class XGB:
def __init__(self,
base_score = 0.5,
max_depth=3,
n_estimators=10,
learning_rate = 0.1,
reg_lambda = 1,
gamma = 0,
min_child_sample = None,
min_child_weight = 1,
objective = 'linear'):
self.base_score = base_score #最开始时给叶子节点权重所赋的值,默认0.5,迭代次数够多的话,结果对这个初值不敏感
self.max_depth = max_depth #最大数深度
self.n_estimators = n_estimators #树的个数
self.learning_rate = learning_rate #学习率,别和梯度下降里的学习率搞混了,这里是每棵树要乘以的权重系数
self.reg_lambda = reg_lambda #L2正则项的权重系数
self.gamma = gamma #正则项中,叶子节点数T的权重系数
self.min_child_sample = min_child_sample #每个叶子节点的样本数(自己加的)
self.min_child_weight = min_child_weight #每个叶子节点的Hessian矩阵和,下面代码会细讲
self.objective = objective #目标函数,可选linear和logistic
self.tree_structure = {} #用一个字典来存储每一颗树的树结构
def xgb_cart_tree(self, X, w, m_dpth):
'''
递归的方式构造XGB中的Cart树
X:训练数据集
w:每个样本的权重值,递归赋值
m_dpth:树的深度
'''
#边界条件:递归到指定最大深度后,跳出
if m_dpth > self.max_depth:
return
best_var, best_cut = None, None
#这里增益的初值一定要设置为0,相当于对树做剪枝,即如果算None出的增益小于0则不做分裂
max_gain = 0
G_left_best, G_right_best, H_left_best, H_right_best = 0,0,0,0
#遍历每个变量的每个切点,寻找分裂增益gain最大的切点并记录下来
for item in [x for x in X.columns if x not in ['g','h','y']]:
for cut in list(set(X[item])):
#这里如果指定了min_child_sample则限制分裂后叶子节点的样本数都不能小于指定值
if self.min_child_sample:
if (X.loc[X[item]<cut].shape[0]<self.min_child_sample)\
|(X.loc[X[item]>=cut].shape[0]<self.min_child_sample):
continue
G_left = X.loc[X[item]<cut,'g'].sum()
G_right = X.loc[X[item]>=cut,'g'].sum()
H_left = X.loc[X[item]<cut,'h'].sum()
H_right = X.loc[X[item]>=cut,'h'].sum()
#min_child_weight在这里起作用,指的是每个叶子节点上的H,即目标函数二阶导的加和
#当目标函数为linear,即1/2*(y-y_hat)**2时,它的二阶导是1,那min_child_weight就等价于min_child_sample
#当目标函数为logistic,其二阶导为sigmoid(y_hat)*(1-sigmoid(y_hat)),可理解为叶子节点的纯度,更详尽的解释可参看:
#https://stats.stackexchange.com/questions/317073/explanation-of-min-child-weight-in-xgboost-algorithm#
if self.min_child_weight:
if (H_left<self.min_child_weight)|(H_right<self.min_child_weight):
continue
gain = G_left**2/(H_left + self.reg_lambda) + \
G_right**2/(H_right + self.reg_lambda) - \
(G_left + G_right)**2/(H_left + H_right + self.reg_lambda)
gain = gain/2 - self.gamma
if gain > max_gain:
best_var, best_cut = item, cut
max_gain = gain
G_left_best, G_right_best, H_left_best, H_right_best = G_left, G_right, H_left, H_right
#如果遍历完找不到可分列的点,则返回None
if best_var is None:
return None
#给每个叶子节点上的样本分别赋上相应的权重值
id_left = X.loc[X[best_var]<best_cut].index.tolist()
w_left = - G_left_best / (H_left_best + self.reg_lambda)
id_right = X.loc[X[best_var]>=best_cut].index.tolist()
w_right = - G_right_best / (H_right_best + self.reg_lambda)
w[id_left] = w_left
w[id_right] = w_right
#用俄罗斯套娃式的json串把树的结构给存下来
tree_structure = {(best_var,best_cut):{}}
tree_structure[(best_var,best_cut)][('left',w_left)] = self.xgb_cart_tree(X.loc[id_left], w, m_dpth+1)
tree_structure[(best_var,best_cut)][('right',w_right)] = self.xgb_cart_tree(X.loc[id_right], w, m_dpth+1)
return tree_structure
def _grad(self, y_hat, Y):
'''
计算目标函数的一阶导
支持linear和logistic
'''
if self.objective == 'logistic':
y_hat = 1.0/(1.0+np.exp(-y_hat))
return y_hat - Y
elif self.objective == 'linear':
return y_hat - Y
else:
raise KeyError('objective must be linear or logistic!')
def _hess(self,y_hat, Y):
'''
计算目标函数的二阶导
支持linear和logistic
'''
if self.objective == 'logistic':
y_hat = 1.0/(1.0+np.exp(-y_hat))
return y_hat * (1.0 - y_hat)
elif self.objective == 'linear':
return np.array([1]*Y.shape[0])
else:
raise KeyError('objective must be linear or logistic!')
def fit(self, X:pd.DataFrame, Y):
'''
根据训练数据集X和Y训练出树结构和权重
'''
if X.shape[0]!=Y.shape[0]:
raise ValueError('X and Y must have the same length!')
X = X.reset_index(drop='True')
Y = Y.values
#这里根据base_score参数设定权重初始值
y_hat = np.array([self.base_score]*Y.shape[0])
for t in range(self.n_estimators):
print('fitting tree {}...'.format(t+1))
X['g'] = self._grad(y_hat, Y)
X['h'] = self._hess(y_hat, Y)
f_t = pd.Series([0]*Y.shape[0])
self.tree_structure[t+1] = self.xgb_cart_tree(X, f_t, 1)
y_hat = y_hat + self.learning_rate * f_t
print('tree {} fit done!'.format(t+1))
print(self.tree_structure)
def _get_tree_node_w(self, X, tree, w):
'''
以递归的方法,把树结构解构出来,把权重值赋到w上面
'''
if not tree is None:
k = list(tree.keys())[0]
var,cut = k[0],k[1]
X_left = X.loc[X[var]<cut]
id_left = X_left.index.tolist()
X_right = X.loc[X[var]>=cut]
id_right = X_right.index.tolist()
for kk in tree[k].keys():
if kk[0] == 'left':
tree_left = tree[k][kk]
w[id_left] = kk[1]
elif kk[0] == 'right':
tree_right = tree[k][kk]
w[id_right] = kk[1]
self._get_tree_node_w(X_left, tree_left, w)
self._get_tree_node_w(X_right, tree_right, w)
def predict_raw(self, X:pd.DataFrame):
'''
根据训练结果预测
返回原始预测值
'''
X = X.reset_index(drop='True')
Y = pd.Series([self.base_score]*X.shape[0])
for t in range(self.n_estimators):
tree = self.tree_structure[t+1]
y_t = pd.Series([0]*X.shape[0])
self._get_tree_node_w(X, tree, y_t)
Y = Y + self.learning_rate * y_t
return Y
def predict_prob(self, X:pd.DataFrame):
'''
当指定objective为logistic时,输出概率要做一个logistic转换
'''
Y = self.predict_raw(X)
sigmoid = lambda x:1/(1+np.exp(-x))
Y = Y.apply(sigmoid)
return Y
"""
Copyright 2022 Primihub
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import pandas as pd
from primihub.FL.model.xgboost.plain_xgb import XGB
class XGB_GUEST(XGB):
def get_GH(self, X):
# Calculate G_left、G_right、H_left、H_right under feature segmentation
GH = pd.DataFrame(columns=['G_left', 'G_right', 'H_left', 'H_right', 'var', 'cut'])
i = 0
for item in [x for x in X.columns if x not in ['g', 'h']]:
# Categorical variables using greedy algorithm
if len(list(set(X[item]))) < 5:
for cuts in list(set(X[item])):
if self.min_child_sample:
if (X.loc[X[item] < cuts].shape[0] < self.min_child_sample) \
| (X.loc[X[item] >= cuts].shape[0] < self.min_child_sample):
continue
GH.loc[i, 'G_left'] = X.loc[X[item] < cuts, 'g'].sum()
GH.loc[i, 'G_right'] = X.loc[X[item] >= cuts, 'g'].sum()
GH.loc[i, 'H_left'] = X.loc[X[item] < cuts, 'h'].sum()
GH.loc[i, 'H_right'] = X.loc[X[item] >= cuts, 'h'].sum()
GH.loc[i, 'var'] = item
GH.loc[i, 'cut'] = cuts
i = i + 1
# Continuous variables using approximation algorithm
else:
old_list = list(set(X[item]))
new_list = []
# four candidate points
j = int(len(old_list) / 4)
for z in range(0, len(old_list), j):
new_list.append(old_list[z])
for cuts in new_list:
if self.min_child_sample:
if (X.loc[X[item] < cuts].shape[0] < self.min_child_sample) \
| (X.loc[X[item] >= cuts].shape[0] < self.min_child_sample):
continue
GH.loc[i, 'G_left'] = X.loc[X[item] < cuts, 'g'].sum()
GH.loc[i, 'G_right'] = X.loc[X[item] >= cuts, 'g'].sum()
GH.loc[i, 'H_left'] = X.loc[X[item] < cuts, 'h'].sum()
GH.loc[i, 'H_right'] = X.loc[X[item] >= cuts, 'h'].sum()
GH.loc[i, 'var'] = item
GH.loc[i, 'cut'] = cuts
i = i + 1
return GH
def find_split(self, GH):
# Find the feature corresponding to the best split and the split value
GH_best = pd.DataFrame(columns=['G_left', 'G_right', 'H_left', 'H_right', 'var', 'cut'])
max_gain = 0
for item in GH.index:
gain = GH.loc[item, 'G_left'] ** 2 / (GH.loc[item, 'H_left'] + self.reg_lambda) + \
GH.loc[item, 'G_right'] ** 2 / (GH.loc[item, 'H_right'] + self.reg_lambda) - \
(GH.loc[item, 'G_left'] + GH.loc[item, 'G_right']) ** 2 / (
GH.loc[item, 'H_left'] + GH.loc[item, 'H_right'] + + self.reg_lambda)
gain = gain / 2 - self.gamma
if gain > max_gain:
max_gain = gain
GH_best.loc[0, 'G_left'] = GH.loc[item, 'G_left']
GH_best.loc[0, 'G_right'] = GH.loc[item, 'G_right']
GH_best.loc[0, 'H_left'] = GH.loc[item, 'H_left']
GH_best.loc[0, 'H_right'] = GH.loc[item, 'H_right']
GH_best.loc[0, 'var'] = GH.loc[item, 'var']
GH_best.loc[0, 'cut'] = GH.loc[item, 'cut']
return GH_best
def split(self, X, best_var, best_cut, GH_best, w):
# Calculate the weight of leaf nodes after splitting
id_left = X.loc[X[best_var] < best_cut].index.tolist()
w_left = -GH_best['G_left_best'] / (GH_best['H_left_best'] + self.reg_lambda)
id_right = X.loc[X[best_var] >= best_cut].index.tolist()
w_right = -GH_best['G_right_best'] / (GH_best['H_right_best'] + self.reg_lambda)
w[id_left] = w_left
w[id_right] = w_right
return w, id_right, id_left, w_right, w_left
此差异已折叠。
[[source]]
url = "https://pypi.org/simple"
verify_ssl = true
name = "pypi"
[packages]
pyarrow = "*"
pandas = "*"
pytest = "*"
numpy = "*"
functools = "*"
[dev-packages]
[requires]
python_version = "3.6"
from .context import function, reg_dataset
from .dataset import dataset
VERSION = (0, 1, 0)
__author__ = 'Primihub.Inc'
__contact__ = "openmpc@primihub.com"
__homepage__ = "https://www.primihub.com"
__version__ = ".".join(map(str, VERSION))
__license__ = "Apache 2.0"
__apidoc__ = "TODO"
__all__ = [
"function",
"reg_dataset",
"dataset",
]
\ No newline at end of file
"""
Copyright 2022 Primihub
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
session_map = dict()
endpoint_map = dict()
class MockIOService:
pass
class MockChannel:
""" mock channel
"""
def __init__(self, session):
self.session = session
def send(self, data):
print("send data...")
print(self.session)
self.session["server"] = data
def recv(self, data_size=0):
print("recv message...")
send_data = self.session["server"]
self.session["client"] = send_data
# mock 消息接收后被消费
# self.session["server"] = None
# print("del send", self.session)
recv_data = self.session["client"]
# self.session["client"] = None
# print("del recv", self.session)
return recv_data
def close(self):
print("channel close")
self.session_map = {}
class MockSession:
def __init__(self, io_service, address, session_mode, endpoint):
self.io_server = io_service
self.address = address
self.session_mode = session_mode
self.endpoint = endpoint
session_map[endpoint] = endpoint_map
if session_mode == "server":
server_map = {"server": None}
endpoint_map.update(server_map)
if session_mode == "client":
client_map = {"client": None}
endpoint_map.update(client_map)
self.session = session_map[endpoint]
print("session: ", self.session)
def addChannel(self, *args) -> MockChannel:
print(args)
ch = MockChannel(self.session)
return ch
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
setuptools==41.0.0
pyarrow
pandas
pytest
numpy
functools
dill==0.3.5.1
\ No newline at end of file
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
#include "src/primihub/algorithm/plainML.h"
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
// Copyright [2021] <primihub.com>
#include "src/primihub/common/config/config.h"
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册