提交 95034210 编写于 作者: D Dong Li

Merge remote-tracking branch 'apollo_1.5' into apollo

要显示的变更太多。

To preserve performance only 1000 of 1000+ files are displayed.
# Bazel # Bazel
bazel-apollo bazel-apollo
bazel-apollo_dev
bazel-bin bazel-bin
bazel-genfiles bazel-genfiles
bazel-out bazel-out
...@@ -7,6 +8,8 @@ bazel-testlogs ...@@ -7,6 +8,8 @@ bazel-testlogs
/Debug/ /Debug/
*.pyc *.pyc
WORKSPACE WORKSPACE
# temporary files
*.swp
# javascript # javascript
*bundle.js *bundle.js
...@@ -15,6 +18,7 @@ node_modules ...@@ -15,6 +18,7 @@ node_modules
# generated files # generated files
generatedViews generatedViews
generatedScripts generatedScripts
modules/common/data/ssl_keys
# database/log files # database/log files
*.sqlite *.sqlite
dreamview.log dreamview.log
...@@ -26,6 +30,9 @@ npm-debug.log ...@@ -26,6 +30,9 @@ npm-debug.log
.settings .settings
.classpath .classpath
# Vscode files
.vscode
# atom.io files # atom.io files
*.gch *.gch
...@@ -54,6 +61,24 @@ data/log ...@@ -54,6 +61,24 @@ data/log
data/core data/core
data/bag data/bag
data/cov data/cov
data/pcd
# Doxygen # Doxygen
docs/doxygen docs/doxygen
# Esd can lib
third_party/can_card_library/esd_can/include
third_party/can_card_library/esd_can/lib
# Map data files
modules/map/data
# python proto
py_proto
# gnss conf files
modules/drivers/gnss/conf/conf_beijing
modules/drivers/gnss/conf/conf_us
# calibration files
modules/calibration/data/mkz056
...@@ -19,7 +19,7 @@ Many build problems are related to the environment settings. ...@@ -19,7 +19,7 @@ Many build problems are related to the environment settings.
1. Run the script to get your environment: `bash scripts/env.sh >& env.txt` 1. Run the script to get your environment: `bash scripts/env.sh >& env.txt`
2. Provide the content of env.txt in your post. 2. Provide the content of env.txt in your post.
## Which ports need be white list to run Apollo in public cloud instance? ## Which ports must be whitelisted to run Apollo in a public cloud instance?
Use these ports for HMI and Dreamview: Use these ports for HMI and Dreamview:
- 8887: HMI - 8887: HMI
- 8888: Dreamview - 8888: Dreamview
......
...@@ -40,97 +40,6 @@ new_http_archive( ...@@ -40,97 +40,6 @@ new_http_archive(
url = "https://github.com/google/benchmark/archive/v1.1.0.tar.gz", url = "https://github.com/google/benchmark/archive/v1.1.0.tar.gz",
) )
# proto rules (Protobuf and GRPC)
http_archive(
name = "org_pubref_rules_protobuf",
sha256 = "646b39438d8eeba02d9af890dee444c7e4e9d08ae8611bc0e0621257010162db",
strip_prefix = "rules_protobuf-0.7.1",
url = "https://github.com/pubref/rules_protobuf/archive/v0.7.1.tar.gz",
)
load("@org_pubref_rules_protobuf//cpp:rules.bzl", "cpp_proto_repositories")
cpp_proto_repositories(
lang_deps = {
# Grpc repo is required by multiple languages but we put it here.
"com_github_grpc_grpc": {
"rule": "git_repository",
"remote": "https://github.com/grpc/grpc.git",
"init_submodules": True,
"commit": "3808b6efe66b87269d43847bc113e94e2d3d28fb",
#"tag": "v1.0.1",
},
# Hooray! The boringssl team provides a "chromium-stable-with-bazel" branch
# with all BUILD files ready to go.
"boringssl": {
"rule": "http_archive",
"url": "https://github.com/wanglei828/third-party/raw/master/chromium-stable-with-bazel.zip",
},
# libssl is required for c++ grpc where it is expected in
# //external:libssl. This can be either boringssl or openssl.
"libssl": {
"rule": "bind",
"actual": "@boringssl//boringssl-chromium-stable-with-bazel:ssl",
},
# C-library for zlib
"com_github_madler_zlib": {
"rule": "new_git_repository",
"remote": "https://github.com/madler/zlib",
"tag": "v1.2.8",
"build_file": "third_party/com_github_madler_zlib.BUILD",
},
# grpc++ expects //external:zlib
"zlib": {
"rule": "bind",
"actual": "@com_github_madler_zlib//:zlib",
},
# grpc++ expects "//external:protobuf_clib"
"protobuf_clib": {
"rule": "bind",
"actual": "@com_github_google_protobuf//:protobuf",
},
# grpc++ expects //external:nanopb
"nanopb": {
"rule": "bind",
"actual": "@com_github_grpc_grpc//third_party/nanopb",
},
# Bind the executable cc_binary grpc plugin into
# //external:protoc_gen_grpc_cpp. Expects
# //external:protobuf_compiler. TODO: is it really necessary to
# bind it in external?
"protoc_gen_grpc_cpp": {
"rule": "bind",
"actual": "@com_github_grpc_grpc//:grpc_cpp_plugin",
},
# Bind the protobuf proto_lib into //external. Required for
# compiling the protoc_gen_grpc plugin
"protobuf_compiler": {
"rule": "bind",
"actual": "@com_github_google_protobuf//:protoc_lib",
},
# GTest is for our own internal cc tests.
"gtest": {
"rule": "new_git_repository",
"remote": "https://github.com/google/googletest.git",
"commit": "ed9d1e1ff92ce199de5ca2667a667cd0a368482a",
"build_file": "third_party/protobuf_gtest.BUILD",
},
},
)
load("@org_pubref_rules_protobuf//python:rules.bzl", "py_proto_repositories")
py_proto_repositories()
# cpplint from google style guide # cpplint from google style guide
new_git_repository( new_git_repository(
name = "google_styleguide", name = "google_styleguide",
...@@ -171,5 +80,93 @@ new_http_archive( ...@@ -171,5 +80,93 @@ new_http_archive(
name = "ros", name = "ros",
build_file = "third_party/ros.BUILD", build_file = "third_party/ros.BUILD",
strip_prefix = "ros", strip_prefix = "ros",
url = "https://github.com/ApolloAuto/apollo-platform/releases/download/1.0.0/ros-indigo-apollo-1.0.0.MACHINE_ARCH.tar.gz", url = "https://github.com/ApolloAuto/apollo-platform/releases/download/1.5.0/ros-indigo-apollo-1.5.0-MACHINE_ARCH.tar.gz",
)
# OpenCV 2.4.13.2
new_http_archive(
name = "opencv2",
build_file = "third_party/opencv2.BUILD",
strip_prefix = "opencv-2.4.13.2",
url = "https://github.com/opencv/opencv/archive/2.4.13.2.zip",
)
# PCL 1.7
# =======
# This requires libpcl-dev to be installed in your Ubuntu/Debian.
new_local_repository(
name = "pcl",
build_file = "third_party/pcl.BUILD",
path = "/usr/local/include/pcl-1.7",
)
new_local_repository(
name = "glew",
build_file = "third_party/glew.BUILD",
path = "/usr/include",
)
new_local_repository(
name = "opengl",
build_file = "third_party/opengl.BUILD",
path = "/usr/include",
)
new_local_repository(
name = "glfw",
build_file = "third_party/glfw.BUILD",
path = "/usr/include",
)
new_local_repository(
name = "vtk",
build_file = "third_party/vtk.BUILD",
path = "/usr/include/vtk-5.8",
)
# Caffe
new_local_repository(
name = "caffe",
build_file = "third_party/caffe.BUILD",
path = "/usr/include/caffe",
)
# YAML-CPP
new_http_archive(
name = "yaml_cpp",
build_file = "third_party/yaml_cpp.BUILD",
strip_prefix = "yaml-cpp-yaml-cpp-0.5.3",
url = "https://github.com/jbeder/yaml-cpp/archive/yaml-cpp-0.5.3.zip",
)
# qpOASES
new_http_archive(
name = "qp_oases",
build_file = "third_party/qp_oases.BUILD",
sha256 = "ae15eee80455c26d0c26078498893582b67b1d71df18f14f12591023561e5f88",
strip_prefix = "qpOASES-3.2.1",
url = "https://www.coin-or.org/download/source/qpOASES/qpOASES-3.2.1.zip",
)
# Proj.4
new_http_archive(
name = "proj4",
build_file = "third_party/proj4.BUILD",
strip_prefix = "proj.4-4.9.3",
url = "https://github.com/OSGeo/proj.4/archive/4.9.3.zip",
)
# tinyxml2
new_http_archive(
name = "tinyxml2",
build_file = "third_party/tinyxml2.BUILD",
strip_prefix = "tinyxml2-5.0.1",
url = "https://github.com/leethomason/tinyxml2/archive/5.0.1.zip",
)
#protobuf 3.3
http_archive(
name = "com_google_protobuf",
strip_prefix = "protobuf-3.3.0",
url = "https://github.com/google/protobuf/releases/download/v3.3.0/protobuf-cpp-3.3.0.tar.gz",
) )
...@@ -82,51 +82,57 @@ function check_esd_files() { ...@@ -82,51 +82,57 @@ function check_esd_files() {
} }
function generate_build_targets() { function generate_build_targets() {
BUILD_TARGETS=$(bazel query //... | grep -v "_test$" | grep -v "third_party" \ BUILD_TARGETS=$(bazel query //...)
| grep -v "_cpplint$" | grep -v "release" | grep -v "kernel")
if [ $? -ne 0 ]; then if [ $? -ne 0 ]; then
fail 'Build failed!' fail 'Build failed!'
fi fi
if ! $USE_ESD_CAN; then if ! $USE_ESD_CAN; then
BUILD_TARGETS=$(echo $BUILD_TARGETS |tr ' ' '\n' | grep -v "hwmonitor" | grep -v "esd") BUILD_TARGETS=$(echo $BUILD_TARGETS |tr ' ' '\n' | grep -v "hwmonitor" | grep -v "esd")
fi fi
} }
function generate_test_targets() {
TEST_TARGETS=$(bazel query //... | grep "_test$" | grep -v "third_party" | grep -v "kernel")
if [ $? -ne 0 ]; then
fail 'Test failed!'
fi
if ! $USE_ESD_CAN; then
TEST_TARGETS=$(echo $TEST_TARGETS| tr ' ' '\n' | grep -v "hwmonitor" | grep -v "esd")
fi
}
#================================================= #=================================================
# Build functions # Build functions
#================================================= #=================================================
function apollo_build() { function build() {
START_TIME=$(get_now) START_TIME=$(get_now)
echo "Start building, please wait ..." echo "Start building, please wait ..."
generate_build_targets generate_build_targets
echo "Building on $MACHINE_ARCH, with targets:" echo "Building on $MACHINE_ARCH..."
echo "$BUILD_TARGETS" echo "$BUILD_TARGETS" | xargs bazel build $DEFINES -c $1
echo "$BUILD_TARGETS" | xargs bazel --batch --batch_cpu_scheduling build --jobs=10 --define ARCH="$MACHINE_ARCH" --define CAN_CARD=${CAN_CARD} --cxxopt=-DUSE_ESD_CAN=${USE_ESD_CAN} -c dbg
if [ $? -eq 0 ]; then if [ $? -eq 0 ]; then
success 'Build passed!' success 'Build passed!'
else else
fail 'Build failed!' fail 'Build failed!'
fi fi
find bazel-genfiles/* -type d -exec touch "{}/__init__.py" \;
# Build python proto
build_py_proto
}
function apollo_build_dbg() {
build "dbg"
}
function apollo_build_opt() {
build "opt"
}
function build_py_proto() {
if [ -d "./py_proto" ];then
rm -rf py_proto
fi
mkdir py_proto
PROTOC='./bazel-out/host/bin/external/com_google_protobuf/protoc'
find modules/ -name "*.proto" | grep -v gnss | xargs ${PROTOC} --python_out=py_proto
find py_proto/* -type d -exec touch "{}/__init__.py" \;
} }
function check() { function check() {
local check_start_time=$(get_now) local check_start_time=$(get_now)
apollo_build && run_test && run_lint apollo_build_dbg && run_test && run_lint
START_TIME=$check_start_time START_TIME=$check_start_time
if [ $? -eq 0 ]; then if [ $? -eq 0 ]; then
...@@ -157,16 +163,19 @@ function release() { ...@@ -157,16 +163,19 @@ function release() {
# modules # modules
MODULES_DIR=$ROOT_DIR/modules MODULES_DIR=$ROOT_DIR/modules
mkdir -p $MODULES_DIR mkdir -p $MODULES_DIR
for m in control canbus localization decision perception prediction planning for m in control canbus localization decision perception \
prediction planning routing calibration
do do
TARGET_DIR=$MODULES_DIR/$m TARGET_DIR=$MODULES_DIR/$m
mkdir -p $TARGET_DIR mkdir -p $TARGET_DIR
cp bazel-bin/modules/$m/$m $TARGET_DIR if [ -e bazel-bin/modules/$m/$m ]; then
cp bazel-bin/modules/$m/$m $TARGET_DIR
fi
if [ -d modules/$m/conf ];then if [ -d modules/$m/conf ];then
cp -r modules/$m/conf $TARGET_DIR cp -r modules/$m/conf $TARGET_DIR
fi fi
if [ -d modules/$m/data ];then if [ -d modules/$m/data ];then
cp -r modules/$m/conf $TARGET_DIR cp -r modules/$m/data $TARGET_DIR
fi fi
done done
...@@ -180,6 +189,7 @@ function release() { ...@@ -180,6 +189,7 @@ function release() {
# ros # ros
cp -Lr bazel-apollo/external/ros $ROOT_DIR/ cp -Lr bazel-apollo/external/ros $ROOT_DIR/
rm -f ${ROOT_DIR}/ros/*.tar.gz
# scripts # scripts
cp -r scripts $ROOT_DIR cp -r scripts $ROOT_DIR
...@@ -188,17 +198,32 @@ function release() { ...@@ -188,17 +198,32 @@ function release() {
cp -Lr bazel-bin/modules/dreamview/dreamview.runfiles/apollo/modules/dreamview $MODULES_DIR cp -Lr bazel-bin/modules/dreamview/dreamview.runfiles/apollo/modules/dreamview $MODULES_DIR
cp -r modules/dreamview/conf $MODULES_DIR/dreamview cp -r modules/dreamview/conf $MODULES_DIR/dreamview
# map
mkdir $MODULES_DIR/map
cp -r modules/map/data $MODULES_DIR/map
# common data # common data
mkdir $MODULES_DIR/common mkdir $MODULES_DIR/common
cp -r modules/common/data $MODULES_DIR/common cp -r modules/common/data $MODULES_DIR/common
# hmi # hmi
mkdir -p $MODULES_DIR/hmi/ros_node $MODULES_DIR/hmi/utils mkdir -p $MODULES_DIR/hmi/ros_bridge $MODULES_DIR/hmi/utils
cp bazel-bin/modules/hmi/ros_node/ros_node_service $MODULES_DIR/hmi/ros_node/ cp bazel-bin/modules/hmi/ros_bridge/ros_bridge $MODULES_DIR/hmi/ros_bridge/
cp -r modules/hmi/conf $MODULES_DIR/hmi cp -r modules/hmi/conf $MODULES_DIR/hmi
cp -r modules/hmi/web $MODULES_DIR/hmi cp -r modules/hmi/web $MODULES_DIR/hmi
cp -r modules/hmi/utils/*.py $MODULES_DIR/hmi/utils cp -r modules/hmi/utils/*.py $MODULES_DIR/hmi/utils
# perception
cp -r modules/perception/model/ $MODULES_DIR/perception
# gnss config
mkdir -p $MODULES_DIR/drivers/gnss
cp -r modules/drivers/gnss/conf/ $MODULES_DIR/drivers/gnss
# velodyne launch
mkdir -p $MODULES_DIR/drivers/velodyne/velodyne
cp -r modules/drivers/velodyne/velodyne/launch $MODULES_DIR/drivers/velodyne/velodyne
# lib # lib
LIB_DIR=$ROOT_DIR/lib LIB_DIR=$ROOT_DIR/lib
mkdir $LIB_DIR mkdir $LIB_DIR
...@@ -215,7 +240,8 @@ function release() { ...@@ -215,7 +240,8 @@ function release() {
mkdir -p $MODULES_DIR/monitor/hwmonitor/hw/tools/ mkdir -p $MODULES_DIR/monitor/hwmonitor/hw/tools/
cp bazel-bin/modules/monitor/hwmonitor/hw/tools/esdcan_test_app $MODULES_DIR/monitor/hwmonitor/hw/tools/ cp bazel-bin/modules/monitor/hwmonitor/hw/tools/esdcan_test_app $MODULES_DIR/monitor/hwmonitor/hw/tools/
fi fi
cp -r bazel-genfiles/* $LIB_DIR cp -r bazel-genfiles/external $LIB_DIR
cp -r py_proto/modules $LIB_DIR
# doc # doc
cp -r docs $ROOT_DIR cp -r docs $ROOT_DIR
...@@ -229,14 +255,13 @@ function release() { ...@@ -229,14 +255,13 @@ function release() {
} }
function gen_coverage() { function gen_coverage() {
START_TIME=$(get_now)
bazel clean bazel clean
generate_test_targets generate_build_targets
echo "$TEST_TARGETS" | xargs bazel test --define ARCH="$(uname -m)" --define CAN_CARD=${CAN_CARD} --cxxopt=-DUSE_ESD_CAN=${USE_ESD_CAN} -c dbg --config=coverage echo "$BUILD_TARGETS" | grep -v "cnn_segmentation_test" | xargs bazel test $DEFINES -c dbg --config=coverage
if [ $? -ne 0 ]; then if [ $? -ne 0 ]; then
fail 'run test failed!' fail 'run test failed!'
fi fi
COV_DIR=data/cov COV_DIR=data/cov
rm -rf $COV_DIR rm -rf $COV_DIR
files=$(find bazel-out/local-dbg/bin/modules/ -iname "*.gcda" -o -iname "*.gcno" | grep -v external) files=$(find bazel-out/local-dbg/bin/modules/ -iname "*.gcda" -o -iname "*.gcno" | grep -v external)
...@@ -245,6 +270,14 @@ function gen_coverage() { ...@@ -245,6 +270,14 @@ function gen_coverage() {
mkdir -p "$(dirname "$target")" mkdir -p "$(dirname "$target")"
cp "$f" "$target" cp "$f" "$target"
done done
files=$(find bazel-out/local-opt/bin/modules/ -iname "*.gcda" -o -iname "*.gcno" | grep -v external)
for f in $files; do
target="$COV_DIR/objs/modules/${f##*modules}"
mkdir -p "$(dirname "$target")"
cp "$f" "$target"
done
lcov --capture --directory "$COV_DIR/objs" --output-file "$COV_DIR/conv.info" lcov --capture --directory "$COV_DIR/objs" --output-file "$COV_DIR/conv.info"
if [ $? -ne 0 ]; then if [ $? -ne 0 ]; then
fail 'lcov failed!' fail 'lcov failed!'
...@@ -257,15 +290,19 @@ function gen_coverage() { ...@@ -257,15 +290,19 @@ function gen_coverage() {
"tools/*" \ "tools/*" \
-o $COV_DIR/stripped_conv.info -o $COV_DIR/stripped_conv.info
genhtml $COV_DIR/stripped_conv.info --output-directory $COV_DIR/report genhtml $COV_DIR/stripped_conv.info --output-directory $COV_DIR/report
echo "Generated coverage report in $COV_DIR/report/index.html"
success 'Generated coverage report in $COV_DIR/report/index.html'
} }
function run_test() { function run_test() {
START_TIME=$(get_now) START_TIME=$(get_now)
generate_test_targets generate_build_targets
echo "$TEST_TARGETS" | xargs bazel test --define "ARCH=$MACHINE_ARCH" --define CAN_CARD=${CAN_CARD} --config=unit_test --cxxopt=-DUSE_ESD_CAN=${USE_ESD_CAN} -c dbg --test_verbose_timeout_warnings if [ "$USE_GPU" == "1" ]; then
echo -e "${RED}Need GPU to run the tests.${NO_COLOR}"
echo "$BUILD_TARGETS" | xargs bazel test $DEFINES --config=unit_test -c dbg --test_verbose_timeout_warnings
else
echo "$BUILD_TARGETS" | grep -v "cnn_segmentation_test" | xargs bazel test $DEFINES --config=unit_test -c dbg --test_verbose_timeout_warnings
fi
if [ $? -eq 0 ]; then if [ $? -eq 0 ]; then
success 'Test passed!' success 'Test passed!'
return 0 return 0
...@@ -276,11 +313,12 @@ function run_test() { ...@@ -276,11 +313,12 @@ function run_test() {
} }
function run_cpp_lint() { function run_cpp_lint() {
bazel test --config=cpplint //... generate_build_targets
echo "$BUILD_TARGETS" | xargs bazel test --config=cpplint -c dbg
} }
function run_bash_lint() { function run_bash_lint() {
FILES=$(find "${APOLLO_ROOT_DIR}" -type f -name "*.sh" | grep -v ros | grep -v kernel) FILES=$(find "${APOLLO_ROOT_DIR}" -type f -name "*.sh" | grep -v ros)
echo "${FILES}" | xargs shellcheck echo "${FILES}" | xargs shellcheck
} }
...@@ -323,23 +361,9 @@ function buildify() { ...@@ -323,23 +361,9 @@ function buildify() {
rm ~/.buildifier rm ~/.buildifier
} }
function print_usage() { function build_fe() {
echo 'Usage: cd modules/dreamview/frontend
./apollo.sh [OPTION]' yarn build
echo 'Options:
build : run build only
buildify: fix style of BUILD files
check: run build/lint/test, please make sure it passes before checking in new code
clean: runs Bazel clean
config: run configurator tool
coverage: generate test coverage report
doc: generate doxygen document
lint: run code style check
print_usage: prints this menu
release: to build release version
test: run all the unit tests
version: current commit and date
'
} }
function gen_doc() { function gen_doc() {
...@@ -375,13 +399,13 @@ function build_gnss() { ...@@ -375,13 +399,13 @@ function build_gnss() {
protoc modules/drivers/gnss/proto/gnss.proto --cpp_out=./ protoc modules/drivers/gnss/proto/gnss.proto --cpp_out=./
protoc modules/drivers/gnss/proto/imu.proto --cpp_out=./ protoc modules/drivers/gnss/proto/imu.proto --cpp_out=./
protoc modules/drivers/gnss/proto/ins.proto --cpp_out=./ protoc modules/drivers/gnss/proto/ins.proto --cpp_out=./ --python_out=./
protoc modules/drivers/gnss/proto/config.proto --cpp_out=./ protoc modules/drivers/gnss/proto/config.proto --cpp_out=./
protoc modules/drivers/gnss/proto/gnss_status.proto --cpp_out=./ protoc modules/drivers/gnss/proto/gnss_status.proto --cpp_out=./ --python_out=./
protoc modules/drivers/gnss/proto/gpgga.proto --cpp_out=./ protoc modules/drivers/gnss/proto/gpgga.proto --cpp_out=./
cd modules cd modules
catkin_make_isolated --install --source drivers \ catkin_make_isolated --install --source drivers/gnss \
--install-space "${ROS_PATH}" -DCMAKE_BUILD_TYPE=Release \ --install-space "${ROS_PATH}" -DCMAKE_BUILD_TYPE=Release \
--cmake-args --no-warn-unused-cli --cmake-args --no-warn-unused-cli
find "${ROS_PATH}" -name "*.pyc" -print0 | xargs -0 rm -rf find "${ROS_PATH}" -name "*.pyc" -print0 | xargs -0 rm -rf
...@@ -391,6 +415,7 @@ function build_gnss() { ...@@ -391,6 +415,7 @@ function build_gnss() {
rm -rf modules/common/proto/*.pb.h rm -rf modules/common/proto/*.pb.h
rm -rf modules/drivers/gnss/proto/*.pb.cc rm -rf modules/drivers/gnss/proto/*.pb.cc
rm -rf modules/drivers/gnss/proto/*.pb.h rm -rf modules/drivers/gnss/proto/*.pb.h
rm -rf modules/drivers/gnss/proto/*_pb2.py
rm -rf modules/localization/proto/*.pb.cc rm -rf modules/localization/proto/*.pb.cc
rm -rf modules/localization/proto/*.pb.h rm -rf modules/localization/proto/*.pb.h
...@@ -399,22 +424,93 @@ function build_gnss() { ...@@ -399,22 +424,93 @@ function build_gnss() {
rm -rf modules/devel_isolated/ rm -rf modules/devel_isolated/
} }
function build_velodyne() {
CURRENT_PATH=$(pwd)
if [ -d "${CURRENT_PATH}/bazel-apollo/external/ros" ]; then
ROS_PATH="${CURRENT_PATH}/bazel-apollo/external/ros"
else
warning "ROS not found. Run apolllo.sh build first."
exit 1
fi
source "${ROS_PATH}/setup.bash"
cd modules
catkin_make_isolated --install --source drivers/velodyne \
--install-space "${ROS_PATH}" -DCMAKE_BUILD_TYPE=Release \
--cmake-args --no-warn-unused-cli
find "${ROS_PATH}" -name "*.pyc" -print0 | xargs -0 rm -rf
cd -
rm -rf modules/.catkin_workspace
rm -rf modules/build_isolated/
rm -rf modules/devel_isolated/
}
function config() { function config() {
${APOLLO_ROOT_DIR}/scripts/configurator.sh ${APOLLO_ROOT_DIR}/scripts/configurator.sh
} }
function print_usage() {
RED='\033[0;31m'
BLUE='\033[0;34m'
BOLD='\033[1m'
NONE='\033[0m'
echo -e "\n${RED}Usage${NONE}:
.${BOLD}/apollo.sh${NONE} [OPTION]"
echo -e "\n${RED}Options${NONE}:
${BLUE}build${NONE}: run build only
${BLUE}build_opt${NONE}: build optimized binary for the code
${BLUE}build_gpu${NONE}: run build only with Caffe GPU mode support
${BLUE}build_opt_gpu${NONE}: build optimized binary with Caffe GPU mode support
${BLUE}build_fe${NONE}: compile frontend javascript code, this requires all the node_modules to be installed already
${BLUE}buildify${NONE}: fix style of BUILD files
${BLUE}check${NONE}: run build/lint/test, please make sure it passes before checking in new code
${BLUE}clean${NONE}: run Bazel clean
${BLUE}config${NONE}: run configurator tool
${BLUE}coverage${NONE}: generate test coverage report
${BLUE}doc${NONE}: generate doxygen document
${BLUE}lint${NONE}: run code style check
${BLUE}usage${NONE}: print this menu
${BLUE}release${NONE}: build release version
${BLUE}test${NONE}: run all unit tests
${BLUE}version${NONE}: display current commit and date
"
}
function main() { function main() {
source_apollo_base source_apollo_base
apollo_check_system_config apollo_check_system_config
check_machine_arch check_machine_arch
check_esd_files check_esd_files
DEFINES="--define ARCH=${MACHINE_ARCH} --define CAN_CARD=${CAN_CARD} --cxxopt=-DUSE_ESD_CAN=${USE_ESD_CAN}"
case $1 in case $1 in
check) check)
DEFINES="${DEFINES} --cxxopt=-DCPU_ONLY"
check check
;; ;;
build) build)
apollo_build DEFINES="${DEFINES} --cxxopt=-DCPU_ONLY"
apollo_build_dbg
;;
build_opt)
DEFINES="${DEFINES} --cxxopt=-DCPU_ONLY"
apollo_build_opt
;;
build_gpu)
DEFINES="${DEFINES} --cxxopt=-DUSE_CAFFE_GPU"
apollo_build_dbg
;;
build_opt_gpu)
DEFINES="${DEFINES} --cxxopt=-DUSE_CAFFE_GPU"
apollo_build_opt
;;
build_fe)
build_fe
;; ;;
buildify) buildify)
buildify buildify
...@@ -422,6 +518,12 @@ function main() { ...@@ -422,6 +518,12 @@ function main() {
buildgnss) buildgnss)
build_gnss build_gnss
;; ;;
build_py)
build_py_proto
;;
buildvelodyne)
build_velodyne
;;
config) config)
config config
;; ;;
...@@ -432,6 +534,12 @@ function main() { ...@@ -432,6 +534,12 @@ function main() {
run_lint run_lint
;; ;;
test) test)
DEFINES="${DEFINES} --cxxopt=-DCPU_ONLY"
run_test
;;
test_gpu)
DEFINES="${DEFINES} --cxxopt=-DUSE_CAFFE_GPU"
USE_GPU="1"
run_test run_test
;; ;;
release) release)
...@@ -449,6 +557,9 @@ function main() { ...@@ -449,6 +557,9 @@ function main() {
version) version)
version version
;; ;;
usage)
print_usage
;;
*) *)
print_usage print_usage
;; ;;
......
...@@ -4,6 +4,8 @@ ...@@ -4,6 +4,8 @@
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
cd "${DIR}" cd "${DIR}"
# the machine type, currently support x86_64, aarch64
MACHINE_ARCH=$(uname -m)
source ${DIR}/scripts/apollo_base.sh source ${DIR}/scripts/apollo_base.sh
TIME=$(date +%Y%m%d_%H%M) TIME=$(date +%Y%m%d_%H%M)
...@@ -12,23 +14,34 @@ if [ -z "${DOCKER_REPO}" ]; then ...@@ -12,23 +14,34 @@ if [ -z "${DOCKER_REPO}" ]; then
fi fi
function print_usage() { function print_usage() {
echo 'Usage: RED='\033[0;31m'
./apollo_docker.sh [OPTION]' BLUE='\033[0;34m'
echo 'Options: BOLD='\033[1m'
build : run build only NONE='\033[0m'
buildify: fix style of BUILD files
check: run build/lint/test, please make sure it passes before checking in new code echo -e "\n${RED}Usage${NONE}:
clean: runs Bazel clean .${BOLD}/apollo_docker.sh${NONE} [OPTION]"
coverage: generate test coverage report
doc: generate doxygen document echo -e "\n${RED}Options${NONE}:
push: pushes the images to Docker hub ${BLUE}build${NONE}: run build only
gen: release a docker release image ${BLUE}build_opt${NONE}: build optimized binary for the code
lint: run code style check ${BLUE}build_gpu${NONE}: run build only with Caffe GPU mode support
release: to build release version ${BLUE}build_opt_gpu${NONE}: build optimized binary with Caffe GPU mode support
test: run all the unit tests ${BLUE}build_fe${NONE}: compile frontend javascript code, this requires all the node_modules to be installed already
version: current commit and date ${BLUE}buildify${NONE}: fix style of BUILD files
print_usage: prints this menu ${BLUE}check${NONE}: run build/lint/test, please make sure it passes before checking in new code
' ${BLUE}clean${NONE}: run Bazel clean
${BLUE}config${NONE}: run configurator tool
${BLUE}coverage${NONE}: generate test coverage report
${BLUE}doc${NONE}: generate doxygen document
${BLUE}lint${NONE}: run code style check
${BLUE}usage${NONE}: print this menu
${BLUE}release${NONE}: build release version
${BLUE}test${NONE}: run all unit tests
${BLUE}version${NONE}: display current commit and date
${BLUE}push${NONE}: pushes the images to Docker hub
${BLUE}gen${NONE}: release a docker release image
"
} }
function start_build_docker() { function start_build_docker() {
...@@ -39,10 +52,10 @@ function start_build_docker() { ...@@ -39,10 +52,10 @@ function start_build_docker() {
} }
function gen_docker() { function gen_docker() {
IMG="apolloauto/apollo:run-env-20170712_1738" IMG="apolloauto/apollo:run-${MACHINE_ARCH}-20170917_1439"
RELEASE_DIR=${HOME}/.cache/release RELEASE_DIR=${HOME}/.cache/release
RELEASE_NAME="${DOCKER_REPO}:release-${TIME}" RELEASE_NAME="${DOCKER_REPO}:release-${MACHINE_ARCH}-${TIME}"
DEFAULT_NAME="${DOCKER_REPO}:release-latest" DEFAULT_NAME="${DOCKER_REPO}:release-${MACHINE_ARCH}-latest"
docker pull $IMG docker pull $IMG
docker ps -a --format "{{.Names}}" | grep 'apollo_release' 1>/dev/null docker ps -a --format "{{.Names}}" | grep 'apollo_release' 1>/dev/null
...@@ -66,8 +79,8 @@ function gen_docker() { ...@@ -66,8 +79,8 @@ function gen_docker() {
} }
function push() { function push() {
local DEFAULT_NAME="${DOCKER_REPO}:release-latest" local DEFAULT_NAME="${DOCKER_REPO}:release-${MACHINE_ARCH}-latest"
local RELEASE_NAME="${DOCKER_REPO}:release-${TIME}" local RELEASE_NAME="${DOCKER_REPO}:release-${MACHINE_ARCH}-${TIME}"
docker tag "$DEFAULT_NAME" "$RELEASE_NAME" docker tag "$DEFAULT_NAME" "$RELEASE_NAME"
docker push "$DEFAULT_NAME" docker push "$DEFAULT_NAME"
docker push "$RELEASE_NAME" docker push "$RELEASE_NAME"
......
...@@ -3,7 +3,6 @@ FROM aarch64/ubuntu:16.04 ...@@ -3,7 +3,6 @@ FROM aarch64/ubuntu:16.04
ENV DEBIAN_FRONTEND=noninteractive ENV DEBIAN_FRONTEND=noninteractive
RUN apt-get update RUN apt-get update
## the following commands are OK, skip running again
RUN apt-get install -y build-essential RUN apt-get install -y build-essential
RUN apt-get install -y apt-utils RUN apt-get install -y apt-utils
RUN apt-get install -y curl RUN apt-get install -y curl
...@@ -39,23 +38,23 @@ RUN apt-get install -y oracle-java8-installer ...@@ -39,23 +38,23 @@ RUN apt-get install -y oracle-java8-installer
RUN apt-get clean autoclean && apt-get autoremove -y RUN apt-get clean autoclean && apt-get autoremove -y
RUN rm -fr /var/lib/apt/lists/* RUN rm -fr /var/lib/apt/lists/*
## copy bazel to /usr/local/bin # Install protobuf 3.3.0
RUN mkdir -p /usr/local/bin
WORKDIR /usr/local/bin/
RUN wget https://github.com/startcode/bazel-arm64/releases/download/0.4.4/bazel-aarch64 && ln -rs bazel-aarch64 bazel
WORKDIR /tmp WORKDIR /tmp
## install protobuf 3.1.0 RUN wget https://github.com/google/protobuf/releases/download/v3.3.0/protobuf-cpp-3.3.0.tar.gz
RUN wget https://github.com/google/protobuf/releases/download/v3.1.0/protobuf-cpp-3.1.0.tar.gz RUN tar xzf protobuf-cpp-3.3.0.tar.gz
RUN tar xzf protobuf-cpp-3.1.0.tar.gz WORKDIR /tmp/protobuf-3.3.0
WORKDIR /tmp/protobuf-3.1.0
RUN ./configure --prefix=/usr RUN ./configure --prefix=/usr
RUN make RUN make
RUN make install RUN make install
RUN chmod 755 /usr/bin/protoc
# set up node v8.0.0 # Set up node v8.0.0
RUN curl -sL https://deb.nodesource.com/setup_8.x | bash - WORKDIR /tmp
RUN apt-get install -y nodejs RUN wget https://github.com/tj/n/archive/v2.1.0.tar.gz
RUN tar xzf v2.1.0.tar.gz
WORKDIR /tmp/n-2.1.0
RUN make install
RUN n 8.0.0
## Install required python packages. ## Install required python packages.
WORKDIR /tmp WORKDIR /tmp
...@@ -67,12 +66,9 @@ RUN curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - ...@@ -67,12 +66,9 @@ RUN curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add -
RUN echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list RUN echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list
RUN apt-get update && apt-get install -y yarn RUN apt-get update && apt-get install -y yarn
# Remove all temporary files.
RUN rm -fr /tmp/*
ENV ROSCONSOLE_FORMAT '${file}:${line} ${function}() [${severity}] [${time}]: ${message}' ENV ROSCONSOLE_FORMAT '${file}:${line} ${function}() [${severity}] [${time}]: ${message}'
# install dependency for ros build # Install dependency for ros build
RUN sh -c 'echo "deb http://packages.ros.org/ros/ubuntu $(lsb_release -sc) main" > /etc/apt/sources.list.d/ros-latest.list' RUN sh -c 'echo "deb http://packages.ros.org/ros/ubuntu $(lsb_release -sc) main" > /etc/apt/sources.list.d/ros-latest.list'
RUN apt-key adv --keyserver hkp://ha.pool.sks-keyservers.net:80 --recv-key 421C365BD9FF1F717815A3895523BAEEB01FA116 RUN apt-key adv --keyserver hkp://ha.pool.sks-keyservers.net:80 --recv-key 421C365BD9FF1F717815A3895523BAEEB01FA116
...@@ -92,3 +88,38 @@ RUN apt-get install -y zlib1g-dev ...@@ -92,3 +88,38 @@ RUN apt-get install -y zlib1g-dev
## https://stackoverflow.com/questions/25193161/chfn-pam-system-error-intermittently-in-docker-hub-builds ## https://stackoverflow.com/questions/25193161/chfn-pam-system-error-intermittently-in-docker-hub-builds
RUN ln -s -f /bin/true /usr/bin/chfn RUN ln -s -f /bin/true /usr/bin/chfn
# Install pcl and opencv, prerequisites for Caffe (CPU_ONLY mode)
RUN apt-get update
RUN apt-get install -y libatlas-base-dev
RUN apt-get install -y libflann-dev
RUN apt-get install -y libhdf5-serial-dev
RUN apt-get install -y libicu-dev
RUN apt-get install -y libleveldb-dev
RUN apt-get install -y liblmdb-dev
RUN apt-get install -y libopencv-dev
RUN apt-get install -y libopenni-dev
RUN apt-get install -y libqhull-dev
RUN apt-get install -y libsnappy-dev
RUN apt-get install -y libvtk5-dev
RUN apt-get install -y libvtk5-qt4-dev
RUN apt-get install -y mpi-default-dev
ENV CAFFE_ROOT=/apollo/bazel-genfiles/external/caffe
RUN echo "$CAFFE_ROOT/lib" >> /etc/ld.so.conf.d/caffe.conf && ldconfig
# Install Opengl
RUN echo "deb http://ppa.launchpad.net/keithw/glfw3/ubuntu trusty main" | tee -a /etc/apt/sources.list.d/fillwave_ext.list
RUN echo "deb-src http://ppa.launchpad.net/keithw/glfw3/ubuntu trusty main" | tee -a /etc/apt/sources.list.d/fillwave_ext.list
RUN apt-get update && apt-get install -y --force-yes libglfw3 libglfw3-dev freeglut3-dev
# Install GLEW
WORKDIR /tmp
RUN wget https://github.com/nigels-com/glew/releases/download/glew-2.0.0/glew-2.0.0.zip
RUN unzip glew-2.0.0.zip
WORKDIR /tmp/glew-2.0.0
RUN make && make install
# Remove all temporary files.
WORKDIR /
RUN rm -fr /tmp/*
...@@ -6,6 +6,7 @@ RUN apt-get update && apt-get install -y \ ...@@ -6,6 +6,7 @@ RUN apt-get update && apt-get install -y \
apt-transport-https \ apt-transport-https \
bc \ bc \
build-essential \ build-essential \
cmake \
cppcheck \ cppcheck \
curl \ curl \
debconf-utils \ debconf-utils \
...@@ -17,13 +18,14 @@ RUN apt-get update && apt-get install -y \ ...@@ -17,13 +18,14 @@ RUN apt-get update && apt-get install -y \
libcurl4-openssl-dev \ libcurl4-openssl-dev \
libfreetype6-dev \ libfreetype6-dev \
lsof \ lsof \
python-pip \
python-matplotlib \ python-matplotlib \
python-pip \
python-scipy \ python-scipy \
python-software-properties \ python-software-properties \
realpath \ realpath \
software-properties-common \ software-properties-common \
unzip \ unzip \
vim \
wget \ wget \
zip zip
...@@ -39,29 +41,23 @@ RUN rm -fr /var/lib/apt/lists/* ...@@ -39,29 +41,23 @@ RUN rm -fr /var/lib/apt/lists/*
COPY ./modules/tools/py27_requirements.txt /tmp/ COPY ./modules/tools/py27_requirements.txt /tmp/
WORKDIR /tmp WORKDIR /tmp
# install protobuf 3.1.0 # install protobuf 3.3.0
RUN wget https://github.com/google/protobuf/releases/download/v3.1.0/protobuf-cpp-3.1.0.tar.gz RUN wget https://github.com/google/protobuf/releases/download/v3.3.0/protobuf-cpp-3.3.0.tar.gz
RUN tar xzf protobuf-cpp-3.1.0.tar.gz RUN tar xzf protobuf-cpp-3.3.0.tar.gz
WORKDIR /tmp/protobuf-3.1.0 WORKDIR /tmp/protobuf-3.3.0
RUN ./configure --prefix=/usr RUN ./configure --prefix=/usr && make && make install
RUN make
RUN make install
WORKDIR /tmp
RUN wget https://github.com/google/protobuf/releases/download/v3.1.0/protoc-3.1.0-linux-x86_64.zip
RUN unzip protoc-3.1.0-linux-x86_64.zip -d protoc3
RUN mv protoc3/bin/protoc /usr/bin/
RUN chmod 755 /usr/bin/protoc RUN chmod 755 /usr/bin/protoc
# set up node v8.0.0 # Set up node v8.0.0
WORKDIR /tmp
RUN wget https://github.com/tj/n/archive/v2.1.0.tar.gz RUN wget https://github.com/tj/n/archive/v2.1.0.tar.gz
RUN tar xzf v2.1.0.tar.gz RUN tar xzf v2.1.0.tar.gz
WORKDIR /tmp/n-2.1.0 WORKDIR /tmp/n-2.1.0
RUN make install RUN make install
RUN n 8.0.0 RUN n 8.0.0
WORKDIR /tmp
# Install required python packages. # Install required python packages.
WORKDIR /tmp
RUN pip install -r py27_requirements.txt RUN pip install -r py27_requirements.txt
# Install yarn # Install yarn
...@@ -69,25 +65,22 @@ RUN curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | sudo apt-key add - ...@@ -69,25 +65,22 @@ RUN curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | sudo apt-key add -
RUN echo "deb https://dl.yarnpkg.com/debian/ stable main" | sudo tee /etc/apt/sources.list.d/yarn.list RUN echo "deb https://dl.yarnpkg.com/debian/ stable main" | sudo tee /etc/apt/sources.list.d/yarn.list
RUN apt-get update && apt-get install -y yarn RUN apt-get update && apt-get install -y yarn
# Remove all temporary files.
RUN rm -fr /tmp/*
ENV ROSCONSOLE_FORMAT '${file}:${line} ${function}() [${severity}] [${time}]: ${message}' ENV ROSCONSOLE_FORMAT '${file}:${line} ${function}() [${severity}] [${time}]: ${message}'
# install dependency for ros build # Install dependency for ros build
RUN sh -c 'echo "deb http://packages.ros.org/ros/ubuntu $(lsb_release -sc) main" > /etc/apt/sources.list.d/ros-latest.list' RUN sh -c 'echo "deb http://packages.ros.org/ros/ubuntu $(lsb_release -sc) main" > /etc/apt/sources.list.d/ros-latest.list'
RUN apt-key adv --keyserver hkp://ha.pool.sks-keyservers.net:80 --recv-key 421C365BD9FF1F717815A3895523BAEEB01FA116 RUN apt-key adv --keyserver hkp://ha.pool.sks-keyservers.net:80 --recv-key 421C365BD9FF1F717815A3895523BAEEB01FA116
RUN apt-get update && apt-get install -y \ RUN apt-get update && apt-get install -y \
ros-indigo-catkin \
libbz2-dev \ libbz2-dev \
libconsole-bridge-dev \ libconsole-bridge-dev \
liblog4cxx10-dev \
libeigen3-dev \ libeigen3-dev \
liblog4cxx10-dev \
liblz4-dev \ liblz4-dev \
libpoco-dev \ libpoco-dev \
libproj-dev \ libproj-dev \
libtinyxml-dev \ libtinyxml-dev \
libyaml-cpp-dev \ libyaml-cpp-dev \
ros-indigo-catkin \
sip-dev \ sip-dev \
uuid-dev \ uuid-dev \
zlib1g-dev zlib1g-dev
...@@ -97,3 +90,55 @@ RUN apt-get update && apt-get install shellcheck ...@@ -97,3 +90,55 @@ RUN apt-get update && apt-get install shellcheck
# https://stackoverflow.com/questions/25193161/chfn-pam-system-error-intermittently-in-docker-hub-builds # https://stackoverflow.com/questions/25193161/chfn-pam-system-error-intermittently-in-docker-hub-builds
RUN ln -s -f /bin/true /usr/bin/chfn RUN ln -s -f /bin/true /usr/bin/chfn
# Install pcl and opencv, prerequisites for Caffe (CPU_ONLY mode)
RUN apt-get update && apt-get install -y \
libatlas-base-dev \
libflann-dev \
libhdf5-serial-dev \
libicu-dev \
libleveldb-dev \
liblmdb-dev \
libopencv-dev \
libopenni-dev \
libqhull-dev \
libsnappy-dev \
libvtk5-dev \
libvtk5-qt4-dev \
mpi-default-dev
# Install glog
WORKDIR /tmp
RUN wget https://github.com/google/glog/archive/v0.3.5.tar.gz
RUN tar xzf v0.3.5.tar.gz
WORKDIR /tmp/glog-0.3.5
RUN ./configure && make && make install
# Install gflags
WORKDIR /tmp
RUN wget https://github.com/gflags/gflags/archive/v2.2.0.tar.gz
RUN tar xzf v2.2.0.tar.gz
WORKDIR /tmp/gflags-2.2.0
RUN mkdir build
WORKDIR /tmp/gflags-2.2.0/build
RUN CXXFLAGS="-fPIC" cmake .. && make && make install
ENV CAFFE_ROOT=/apollo/bazel-genfiles/external/caffe
RUN echo "$CAFFE_ROOT/lib" >> /etc/ld.so.conf.d/caffe.conf && ldconfig
# Install Opengl
RUN echo "deb http://ppa.launchpad.net/keithw/glfw3/ubuntu trusty main" | sudo tee -a /etc/apt/sources.list.d/fillwave_ext.list
RUN echo "deb-src http://ppa.launchpad.net/keithw/glfw3/ubuntu trusty main" | sudo tee -a /etc/apt/sources.list.d/fillwave_ext.list
RUN apt-get update && apt-get install -y --force-yes libglfw3 libglfw3-dev freeglut3-dev
# Install GLEW
WORKDIR /tmp
RUN wget https://github.com/nigels-com/glew/releases/download/glew-2.0.0/glew-2.0.0.zip
RUN unzip glew-2.0.0.zip
WORKDIR /tmp/glew-2.0.0
RUN make && make install
RUN ln -s /usr/lib64/libGLEW.so /usr/lib/libGLEW.so
RUN ln -s /usr/lib64/libGLEW.so.2.0 /usr/lib/libGLEW.so.2.0
# Remove all temporary files.
RUN rm -fr /tmp/*
FROM ubuntu:14.04 FROM ubuntu:14.04
RUN apt-get update && apt-get install -y \ RUN apt-get update && apt-get install -y \
curl \
build-essential \ build-essential \
curl \
git \
libatlas-base-dev \
libboost-all-dev \ libboost-all-dev \
libconsole-bridge-dev \
libcurl4-openssl-dev \ libcurl4-openssl-dev \
libflann-dev \
libfreetype6-dev \ libfreetype6-dev \
libgflags-dev \
libgoogle-glog-dev \
libhdf5-serial-dev \
libicu-dev \
libleveldb-dev \
liblmdb-dev \
liblog4cxx10 \ liblog4cxx10 \
liblz4-dev \
libopencv-dev \
libopenni-dev \
libpoco-dev \
libproj-dev \
libpython2.7-dev \ libpython2.7-dev \
libqhull-dev \
libsnappy-dev \
libtinyxml-dev \
libvtk5-dev \
libvtk5-qt4-dev \
libyaml-cpp-dev \ libyaml-cpp-dev \
libyaml-dev \ libyaml-dev \
python-pip \ mpi-default-dev \
python-matplotlib \ python-matplotlib \
python-pip \
python-scipy \ python-scipy \
python-software-properties \ python-software-properties \
realpath \ realpath \
software-properties-common \
tmux \ tmux \
unzip \ unzip \
wget \ wget
libtinyxml-dev \
libpoco-dev \
libproj-dev \
liblz4-dev \
libconsole-bridge-dev \
git
RUN apt-get clean autoclean && apt-get autoremove -y RUN apt-get clean autoclean && apt-get autoremove -y
RUN rm -fr /var/lib/apt/lists/* RUN rm -fr /var/lib/apt/lists/*
COPY ./modules/tools/py27_requirements.txt /tmp/ COPY ./modules/tools/py27_requirements.txt /tmp/
WORKDIR /tmp WORKDIR /tmp
# install protobuf 3.1.0 # install protobuf 3.3.0
RUN wget https://github.com/google/protobuf/releases/download/v3.1.0/protobuf-cpp-3.1.0.tar.gz RUN wget https://github.com/google/protobuf/releases/download/v3.3.0/protobuf-cpp-3.3.0.tar.gz
RUN tar xzf protobuf-cpp-3.1.0.tar.gz RUN tar xzf protobuf-cpp-3.3.0.tar.gz
WORKDIR /tmp/protobuf-3.1.0 WORKDIR /tmp/protobuf-3.3.0
RUN ./configure --prefix=/usr RUN ./configure --prefix=/usr && make && make install
RUN make RUN chmod 755 /usr/bin/protoc
RUN make install
ENV ROSCONSOLE_FORMAT '${file}:${line} ${function}() [${severity}] [${time}]: ${message}' ENV ROSCONSOLE_FORMAT '${file}:${line} ${function}() [${severity}] [${time}]: ${message}'
WORKDIR /tmp
RUN wget https://github.com/google/protobuf/releases/download/v3.1.0/protoc-3.1.0-linux-x86_64.zip
RUN unzip protoc-3.1.0-linux-x86_64.zip -d protoc3
RUN mv protoc3/bin/protoc /usr/bin/
RUN chmod 755 /usr/bin/protoc
# set up node v8.0.0 # set up node v8.0.0
WORKDIR /tmp
RUN wget https://github.com/tj/n/archive/v2.1.0.tar.gz RUN wget https://github.com/tj/n/archive/v2.1.0.tar.gz
RUN tar xzf v2.1.0.tar.gz RUN tar xzf v2.1.0.tar.gz
WORKDIR /tmp/n-2.1.0 WORKDIR /tmp/n-2.1.0
...@@ -57,8 +67,21 @@ WORKDIR /tmp ...@@ -57,8 +67,21 @@ WORKDIR /tmp
# Install required python packages. # Install required python packages.
RUN pip install -r py27_requirements.txt RUN pip install -r py27_requirements.txt
# Remove all temporary files.
RUN rm -fr /tmp/*
# https://stackoverflow.com/questions/25193161/chfn-pam-system-error-intermittently-in-docker-hub-builds # https://stackoverflow.com/questions/25193161/chfn-pam-system-error-intermittently-in-docker-hub-builds
RUN ln -s -f /bin/true /usr/bin/chfn RUN ln -s -f /bin/true /usr/bin/chfn
# install Opengl
RUN echo "deb http://ppa.launchpad.net/keithw/glfw3/ubuntu trusty main" | sudo tee -a /etc/apt/sources.list.d/fillwave_ext.list
RUN echo "deb-src http://ppa.launchpad.net/keithw/glfw3/ubuntu trusty main" | sudo tee -a /etc/apt/sources.list.d/fillwave_ext.list
RUN apt-get update && apt-get install -y --force-yes libglfw3 libglfw3-dev
WORKDIR /tmp
RUN wget https://github.com/nigels-com/glew/releases/download/glew-2.0.0/glew-2.0.0.zip
RUN unzip glew-2.0.0.zip
WORKDIR /tmp/glew-2.0.0
RUN make && make install
RUN ln -s /usr/lib64/libGLEW.so /usr/lib/libGLEW.so
RUN ln -s /usr/lib64/libGLEW.so.2.0 /usr/lib/libGLEW.so.2.0
# Remove all temporary files.
RUN rm -fr /tmp/*
...@@ -16,13 +16,11 @@ ...@@ -16,13 +16,11 @@
# limitations under the License. # limitations under the License.
############################################################################### ###############################################################################
TIME=$(date +%Y%m%d_%H%M) TIME=$(date +%Y%m%d_%H%M)
if [ -z "${DOCKER_REPO}" ]; then if [ -z "${DOCKER_REPO}" ]; then
DOCKER_REPO=apolloauto/apollo DOCKER_REPO=apolloauto/apollo
fi fi
APOLLO_ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )/../.." && pwd )" APOLLO_ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )/../.." && pwd )"
ARCH=$(uname -m) ARCH=$(uname -m)
TAG="dev-${ARCH}-${TIME}" TAG="dev-${ARCH}-${TIME}"
...@@ -31,5 +29,3 @@ TAG="dev-${ARCH}-${TIME}" ...@@ -31,5 +29,3 @@ TAG="dev-${ARCH}-${TIME}"
docker build -t "${DOCKER_REPO}:${TAG}" \ docker build -t "${DOCKER_REPO}:${TAG}" \
-f "${APOLLO_ROOT}/docker/dev.${ARCH}.dockerfile" \ -f "${APOLLO_ROOT}/docker/dev.${ARCH}.dockerfile" \
"${APOLLO_ROOT}" "${APOLLO_ROOT}"
sed -i "s/dev-${ARCH}-.*\"/${TAG}\"/g" ${APOLLO_ROOT}/docker/scripts/dev_start.sh
...@@ -16,7 +16,6 @@ ...@@ -16,7 +16,6 @@
# limitations under the License. # limitations under the License.
############################################################################### ###############################################################################
xhost +local:root 1>/dev/null 2>&1 xhost +local:root 1>/dev/null 2>&1
docker exec \ docker exec \
-u $USER \ -u $USER \
......
...@@ -18,7 +18,7 @@ ...@@ -18,7 +18,7 @@
VERSION="" VERSION=""
ARCH=$(uname -m) ARCH=$(uname -m)
VERSION_X86_64="dev-x86_64-20170707_1129" VERSION_X86_64="dev-x86_64-20170919_1058"
VERSION_AARCH64="dev-aarch64-20170712_1533" VERSION_AARCH64="dev-aarch64-20170712_1533"
if [[ $# == 1 ]];then if [[ $# == 1 ]];then
VERSION=$1 VERSION=$1
...@@ -36,38 +36,19 @@ if [ -z "${DOCKER_REPO}" ]; then ...@@ -36,38 +36,19 @@ if [ -z "${DOCKER_REPO}" ]; then
fi fi
IMG=${DOCKER_REPO}:$VERSION IMG=${DOCKER_REPO}:$VERSION
LOCAL_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )/../.." && pwd )" APOLLO_ROOT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )/../.." && pwd )"
if [ ! -e "${LOCAL_DIR}/data/log" ]; then if [ ! -e /apollo ]; then
mkdir -p "${LOCAL_DIR}/data/log" sudo ln -sf ${APOLLO_ROOT_DIR} /apollo
fi
if [ ! -e "${LOCAL_DIR}/data/bag" ]; then
mkdir -p "${LOCAL_DIR}/data/bag"
fi
if [ ! -e "${LOCAL_DIR}/data/core" ]; then
mkdir -p "${LOCAL_DIR}/data/core"
fi fi
source ${LOCAL_DIR}/scripts/apollo_base.sh echo "/apollo/data/core/core_%e.%p" | sudo tee /proc/sys/kernel/core_pattern
function find_device() { source ${APOLLO_ROOT_DIR}/scripts/apollo_base.sh
# ${1} = device pattern
local device_list=$(find /dev -name "${1}")
if [ -z "${device_list}" ]; then
warning "Failed to find device with pattern \"${1}\" ..."
else
local devices=""
for device in $(find /dev -name "${1}"); do
ok "Found device: ${device}."
devices="${devices} --device ${device}:${device}"
done
echo "${devices}"
fi
}
function main(){ function main(){
docker pull $IMG docker pull $IMG
docker ps -a --format "{{.Names}}" | grep 'apollo_dev' 1>/dev/null docker ps -a --format "{{.Names}}" | grep 'apollo_dev' 1>/dev/null
if [ $? == 0 ]; then if [ $? == 0 ]; then
docker stop apollo_dev 1>/dev/null docker stop apollo_dev 1>/dev/null
...@@ -80,14 +61,7 @@ function main(){ ...@@ -80,14 +61,7 @@ function main(){
display="${DISPLAY}" display="${DISPLAY}"
fi fi
setup_device
# setup CAN device
if [ ! -e /dev/can0 ]; then
sudo mknod --mode=a+rw /dev/can0 c 52 0
fi
# enable coredump
echo "${LOCAL_DIR}/data/core/core_%e.%p" | sudo tee /proc/sys/kernel/core_pattern
local devices="" local devices=""
devices="${devices} $(find_device ttyUSB*)" devices="${devices} $(find_device ttyUSB*)"
...@@ -95,6 +69,7 @@ function main(){ ...@@ -95,6 +69,7 @@ function main(){
devices="${devices} $(find_device can*)" devices="${devices} $(find_device can*)"
devices="${devices} $(find_device ram*)" devices="${devices} $(find_device ram*)"
devices="${devices} $(find_device loop*)" devices="${devices} $(find_device loop*)"
devices="${devices} $(find_device nvidia*)"
USER_ID=$(id -u) USER_ID=$(id -u)
GRP=$(id -g -n) GRP=$(id -g -n)
GRP_ID=$(id -g) GRP_ID=$(id -g)
...@@ -108,6 +83,7 @@ function main(){ ...@@ -108,6 +83,7 @@ function main(){
fi fi
docker run -it \ docker run -it \
-d \ -d \
--privileged \
--name apollo_dev \ --name apollo_dev \
-e DISPLAY=$display \ -e DISPLAY=$display \
-e DOCKER_USER=$USER \ -e DOCKER_USER=$USER \
...@@ -116,7 +92,7 @@ function main(){ ...@@ -116,7 +92,7 @@ function main(){
-e DOCKER_GRP=$GRP \ -e DOCKER_GRP=$GRP \
-e DOCKER_GRP_ID=$GRP_ID \ -e DOCKER_GRP_ID=$GRP_ID \
-v /tmp/.X11-unix:/tmp/.X11-unix:rw \ -v /tmp/.X11-unix:/tmp/.X11-unix:rw \
-v $LOCAL_DIR:/apollo \ -v $APOLLO_ROOT_DIR:/apollo \
-v /media:/media \ -v /media:/media \
-v $HOME/.cache:${DOCKER_HOME}/.cache \ -v $HOME/.cache:${DOCKER_HOME}/.cache \
-v /etc/localtime:/etc/localtime:ro \ -v /etc/localtime:/etc/localtime:ro \
......
...@@ -16,7 +16,6 @@ ...@@ -16,7 +16,6 @@
# limitations under the License. # limitations under the License.
############################################################################### ###############################################################################
xhost +local:root 1>/dev/null 2>&1 xhost +local:root 1>/dev/null 2>&1
docker exec \ docker exec \
-u $USER \ -u $USER \
......
...@@ -17,33 +17,15 @@ ...@@ -17,33 +17,15 @@
############################################################################### ###############################################################################
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" APOLLO_ROOT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}")/../.." && pwd )"
# the machine type, currently support x86_64, aarch64
if [ -e "$DIR/../../scripts/apollo_base.sh" ]; then MACHINE_ARCH=$(uname -m)
# run from source
APOLLO_ROOT_DIR=$(cd "${DIR}/../.." && pwd)
else
# run from script only
APOLLO_ROOT_DIR=~
fi
source $APOLLO_ROOT_DIR/scripts/apollo_base.sh source $APOLLO_ROOT_DIR/scripts/apollo_base.sh
export APOLLO_ROOT_DIR echo "/apollo/data/core/core_%e.%p" | sudo tee /proc/sys/kernel/core_pattern
if [ ! -e "${APOLLO_ROOT_DIR}/data/log" ]; then
mkdir -p "${APOLLO_ROOT_DIR}/data/log"
fi
if [ ! -e "${APOLLO_ROOT_DIR}/data/bag" ]; then
mkdir -p "${APOLLO_ROOT_DIR}/data/bag"
fi
if [ ! -e "${APOLLO_ROOT_DIR}/data/core" ]; then
mkdir -p "${APOLLO_ROOT_DIR}/data/core"
fi
echo "APOLLO_ROOT_DIR=$APOLLO_ROOT_DIR"
VERSION=release-20170712_1820 VERSION="release-${MACHINE_ARCH}-latest"
if [[ $# == 1 ]];then if [[ $# == 1 ]];then
VERSION=$1 VERSION=$1
fi fi
...@@ -52,21 +34,18 @@ if [ -z "${DOCKER_REPO}" ]; then ...@@ -52,21 +34,18 @@ if [ -z "${DOCKER_REPO}" ]; then
fi fi
IMG=${DOCKER_REPO}:$VERSION IMG=${DOCKER_REPO}:$VERSION
DATA_DIR="${HOME}/data"
if [ ! -e "${DATA_DIR}/log" ]; then
mkdir -p "${DATA_DIR}/log"
fi
if [ ! -e "${DATA_DIR}/bag" ]; then
mkdir -p "${DATA_DIR}/bag"
fi
function find_device() { if [ ! -e "${DATA_DIR}/core" ]; then
# ${1} = device pattern mkdir -p "${DATA_DIR}/core"
local device_list=$(find /dev -name "${1}") fi
if [ -z "${device_list}" ]; then
warning "Failed to find device with pattern \"${1}\" ..."
else
local devices=""
for device in $(find /dev -name "${1}"); do
ok "Found device: ${device}."
devices="${devices} --device ${device}:${device}"
done
echo "${devices}"
fi
}
function main() { function main() {
docker pull "$IMG" docker pull "$IMG"
...@@ -77,13 +56,7 @@ function main() { ...@@ -77,13 +56,7 @@ function main() {
docker rm -f apollo_release 1>/dev/null docker rm -f apollo_release 1>/dev/null
fi fi
# setup CAN device setup_device
if [ ! -e /dev/can0 ]; then
sudo mknod --mode=a+rw /dev/can0 c 52 0
fi
# enable coredump
echo "${APOLLO_ROOT_DIR}/data/core/core_%e.%p" | sudo tee /proc/sys/kernel/core_pattern
local devices="" local devices=""
devices="${devices} $(find_device ttyUSB*)" devices="${devices} $(find_device ttyUSB*)"
...@@ -91,6 +64,7 @@ function main() { ...@@ -91,6 +64,7 @@ function main() {
devices="${devices} $(find_device can*)" devices="${devices} $(find_device can*)"
devices="${devices} $(find_device ram*)" devices="${devices} $(find_device ram*)"
devices="${devices} $(find_device loop*)" devices="${devices} $(find_device loop*)"
devices="${devices} $(find_device nvidia*)"
local display="" local display=""
if [[ -z ${DISPLAY} ]];then if [[ -z ${DISPLAY} ]];then
display=":0" display=":0"
...@@ -113,7 +87,7 @@ function main() { ...@@ -113,7 +87,7 @@ function main() {
--name apollo_release \ --name apollo_release \
--net host \ --net host \
-v /media:/media \ -v /media:/media \
-v ${APOLLO_ROOT_DIR}/data:/apollo/data \ -v ${HOME}/data:/apollo/data \
-v /tmp/.X11-unix:/tmp/.X11-unix:rw \ -v /tmp/.X11-unix:/tmp/.X11-unix:rw \
-v /etc/localtime:/etc/localtime:ro \ -v /etc/localtime:/etc/localtime:ro \
-v $HOME/.cache:${DOCKER_HOME}/.cache \ -v $HOME/.cache:${DOCKER_HOME}/.cache \
...@@ -134,7 +108,11 @@ function main() { ...@@ -134,7 +108,11 @@ function main() {
$IMG $IMG
if [ "${USER}" != "root" ]; then if [ "${USER}" != "root" ]; then
docker exec apollo_release bash -c "/apollo/scripts/docker_adduser.sh" docker exec apollo_release bash -c "/apollo/scripts/docker_adduser.sh"
docker exec apollo_release bash -c "chown -R ${USER}:${GRP} /apollo" docker exec apollo_release bash -c "chown -R ${USER}:${GRP} /apollo/data"
docker exec apollo_release bash -c "chmod a+rw -R /apollo/ros/share/velodyne_pointcloud"
docker exec apollo_release bash -c "chmod a+rw -R /apollo/modules/common/data"
docker exec apollo_release bash -c "chmod a+rw -R /apollo/ros/share/gnss_driver"
docker exec apollo_release bash -c "chmod a+rw -R /apollo/ros/share/velodyne"
fi fi
docker exec -u ${USER} -it apollo_release "/apollo/scripts/hmi.sh" docker exec -u ${USER} -it apollo_release "/apollo/scripts/hmi.sh"
} }
......
...@@ -31,5 +31,3 @@ TAG="run-env-${TIME}" ...@@ -31,5 +31,3 @@ TAG="run-env-${TIME}"
docker build -t "${DOCKER_REPO}:${TAG}" \ docker build -t "${DOCKER_REPO}:${TAG}" \
-f "${APOLLO_ROOT}/docker/run_env.dockerfile" \ -f "${APOLLO_ROOT}/docker/run_env.dockerfile" \
"${APOLLO_ROOT}" "${APOLLO_ROOT}"
sed -i "s/run-env.*\"/${TAG}\"/g" ${APOLLO_ROOT}/apollo_docker.sh
# How to add a new evaluator in prediction module
## Introduction
Evaluator generates features (from the raw information of obstacles and the ego vehicle) to get the model output by applying the pre-trained deep learning model.
## Steps to add a new evaluator
Please follow the steps to add a new evaluator named `NewEvaluator`.
* Add a field in proto
* Define a class that inherits `Evaluator`
* Implement the class `NewEvaluator`
* Update prediction conf
* Upate evaluator manager
### Step 1: Add a field in proto
Assume the new evaluating result named `new_output` and also assume its type is `int32`. If the output is related directly to the obstacles, you can add it into `modules/prediction/proto/feature.proto` like this:
```cpp
message Feature {
// Other existing features
optional int32 new_output = 1000;
}
```
If the output is related to the lane sequences, please add it into `modules/prediction/proto/lane_graph.proto` like this:
```cpp
message LaneSequence {
// Other existing features
optional int32 new_output = 1000;
}
```
### Step 2: Define a class that inherits `Evaluator`
Create a new file named `new_evaluator.h` in the folder `modules/prediction/evaluator/vehicle`. And define it like this:
```cpp
#include "modules/prediction/evaluator/evaluator.h"
namespace apollo {
namespace prediction {
class MLPEvaluator : public Evaluator {
public:
MLPEvaluator();
virtual ~MLPEvaluator();
void Evaluate(Obstacle* obstacle_ptr) override;
// Other useful functions and fields.
};
} // namespace prediction
} // namespace apollo
```
### Step 3 Implement the class `NewEvaluator`
Create a new file named `new_evaluator.cc` in the same folder of `new_evaluator.h`. Implement it like this:
```cpp
#include "modules/prediction/evaluator/vehicle/new_evaluator.h"
namespace apollo {
namespace prediction {
NewEvaluator::NewEvaluator() {
// Implement
}
NewEvaluator::NewEvaluator() {
// Implement
}
NewEvaluator::Evaluate(Obstacle* obstacle_ptr)() {
// Extract features
// Compute new_output by applying pre-trained model
}
// Other functions
} // namespace prediction
} // namespace apollo
```
### Step 4: Update prediction conf
In the file `modules/prediction/conf/prediction_conf.pb.txt`, update the field `evaluator_type` like this:
```
obstacle_conf {
obstacle_type: VEHICLE
obstacle_status: ON_LANE
evaluator_type: NEW_EVALUATOR
predictor_type: NEW_PREDICTOR
}
```
### Step 5: Upate evaluator manager
Update `vehicle_on_lane_evaluator_` in `modlues/prediction/evaluator/evaluator_manager.h` like this:
```cpp
ObstacleConf::EvaluatorType vehicle_on_lane_evaluator_ =
ObstacleConf::NEW_EVALUATOR;
```
After this procedure, the new evaluator will be created.
# How to add a new predictor in prediction module
## Introduction
Predictor generates the predicted trajectory for each obstacle. Here assume we want to add a new predictor for vehicle, for other types of obstacles, the procedure is very similar.
## Steps to add a new predictor
Please follow the steps to add a new predictor named `NewPredictor`.
### Step 1: Define a class that inherits `Predictor`
Create a new file named `new_predictor.h` in the folder `modules/prediction/predictor/vehicle`. And define it like this:
```cpp
#include "modules/prediction/predictor/predictor.h"
namespace apollo {
namespace prediction {
class NewPredictor : public Predictor {
public:
void Predict(Obstacle* obstacle) override;
// Other useful functions and fields.
};
} // namespace prediction
} // namespace apollo
```
### Step 2 Implement the class `NewPredictor`
Create a new file named `new_predictor.cc` in the same folder of `new_predictor.h`. Implement it like this:
```cpp
#include "modules/prediction/predictor/vehicle/new_predictor.h"
namespace apollo {
namespace prediction {
NewPredictor::Predict(Obstacle* obstacle)() {
// Get the results from evaluator
// Generate the predicted trajectory
}
// Other functions
} // namespace prediction
} // namespace apollo
```
### Step 3: Update prediction conf
In the file `modules/prediction/conf/prediction_conf.pb.txt`, update the field `predictor_type` like this:
```
obstacle_conf {
obstacle_type: VEHICLE
obstacle_status: ON_LANE
evaluator_type: NEW_EVALUATOR
predictor_type: NEW_PREDICTOR
}
```
### Step 4: Upate predictor manager
Update `vehicle_on_lane_predictor_` in `modlues/prediction/predictor/predictor_manager.h` like this:
```cpp
ObstacleConf::PredictorType vehicle_on_lane_predictor_ =
ObstacleConf::NEW_PREDICTOR;
```
After this procedure, the new predictor will be created.
...@@ -25,14 +25,14 @@ _Apollo 快速入门指南 1.0_ 提供了所有关于了解、安装以及构建 ...@@ -25,14 +25,14 @@ _Apollo 快速入门指南 1.0_ 提供了所有关于了解、安装以及构建
下表列出了本文档中使用的归约: 下表列出了本文档中使用的归约:
| **Icon** | **描述** | | **Icon** | **描述** |
| ----------------------------------- | ---------------------------------------- | | ----------------------------------- | ---------------------------------------- |
| **粗体** | 重要 | | **粗体** | 重要 |
| `等宽字体` | 代码,类型数据 | | `等宽字体` | 代码,类型数据 |
| _斜体_ | 文件标题,章节和标题使用的术语 | | _斜体_ | 文件标题,章节和标题使用的术语 |
| ![info](images/info_icon.png) | **Info** 包含可能有用的信息。忽略信息图标没有消极的后果 | | ![info](images/info_icon.png) | **Info** 包含可能有用的信息。忽略信息图标没有消极的后果 |
| ![tip](images/tip_icon.png) | **Tip**. 包括有用的提示或可能有助于您完成任务的捷径。 | | ![tip](images/tip_icon.png) | **Tip**. 包括有用的提示或可能有助于您完成任务的捷径。 |
| ![online](images/online_icon.png) | **Online**. 提供指向特定网站的链接,您可以在其中获取更多信息 | | ![online](images/online_icon.png) | **Online**. 提供指向特定网站的链接,您可以在其中获取更多信息 |
| ![warning](images/warning_icon.png) | **Warning**. 包含**不**能忽略的信息,或者执行某个任务或步骤时,您将面临失败风险 | | ![warning](images/warning_icon.png) | **Warning**. 包含**不**能忽略的信息,或者执行某个任务或步骤时,您将面临失败风险 |
# 概览 # 概览
...@@ -140,7 +140,7 @@ bash docker/scripts/release_into.sh ...@@ -140,7 +140,7 @@ bash docker/scripts/release_into.sh
[Apollo's Coordinate System](https://github.com/ApolloAuto/apollo/blob/master/docs/specs/coordination.pdf) 找到您当地的区号。例如,如果你在北京,中国,你必须设置`+zone=50` [Apollo's Coordinate System](https://github.com/ApolloAuto/apollo/blob/master/docs/specs/coordination.pdf) 找到您当地的区号。例如,如果你在北京,中国,你必须设置`+zone=50`
5. 通过修改以下文件,为GNSS驱动程序设置实时运动(RTK)基站: 5. 通过修改以下文件,为GNSS驱动程序设置实时运动(RTK)基站:
`./ros/share/gnss_driver/conf/gnss_conf_mkz.txt` `./ros/share/gnss_driver/conf/gnss_conf_mkz.txt`
有关典型的RTK设置,请参阅以下示例: 有关典型的RTK设置,请参阅以下示例:
...@@ -191,7 +191,7 @@ bash docker/scripts/release_commit.sh ...@@ -191,7 +191,7 @@ bash docker/scripts/release_commit.sh
- 打开平台车辆 - 打开平台车辆
- 打开工业PC机(IPC). - 打开工业PC机(IPC).
![](images/ipc_power_on.png) ![](images/IPC_powerbutton.png)
- 通过按住电源按钮打开调制解调器电源,直到指示灯亮起 - 通过按住电源按钮打开调制解调器电源,直到指示灯亮起
- 设置IPC的网络配置:静态IP(例如192.168.10.6),子网掩码(例如255.255.255.0)和网关(例如192.168.10.1) - 设置IPC的网络配置:静态IP(例如192.168.10.6),子网掩码(例如255.255.255.0)和网关(例如192.168.10.1)
- 配置您的DNS服务器IP(例如,8.8.8.8)。 - 配置您的DNS服务器IP(例如,8.8.8.8)。
......
## Apollo Sensor Calibration Service
Welcome to the Apollo sensor calibration service. This document describes the process of the extrinsic calibration service between 64-beam Light Detection And Ranging (LiDAR) and Inertial Navigation System (INS).
## Apollo Sensor Calibration Catalog
- Service overview
- Preparation
- Recording calibration data
- Uploading calibration data and creating task
- Obtaining calibration results
- Error description
### Overview
The Apollo vehicle sensor calibration function provides the extrinsic calibration between Velodyne HDL-64ES3 and IMU. The calibration results can be used to transfer the obstacle location detected by LiDAR to the IMU coordinate system, and then to the world coordinate system. The results are provided by `.yaml` format files.
### Preparation
To calibrate the sensors it is important to prepare using the following steps:
1.Install 64-beams LiDAR and INS supported by Apollo, and then deploy the docker running environment.
2.Start up the 64-beams LiDAR and INS. The INS must be aligned when it is powered on. At this point, the car should be driven straight, then turned left and turned right in an open area, until the initialization is completed.
3.Confirm that all sensor topics required by this service have output. See: [How to Check the Sensor Output?](https://github.com/ApolloAuto/apollo/blob/master/docs/quickstart/lidar_calibration/FAQ0.md)
The topics required by the calibration service are shown in the following Table 1:
Table 1. Sensor topics.
Sensor | Topic Name | Topic Feq. (Hz)
--- | ------- | --- |
HDL-64ES3 | /apollo/sensor/velodyne64/VelodyneScanUnified | 10
INS | /apollo/sensor/gnss/odometry | 100
INS | /apollo/sensor/gnss/ins_stat | 1
4.Confirm that the INS status is 56 when recording data. See: [How to Check INS Status?](https://github.com/ApolloAuto/apollo/blob/master/docs/quickstart/lidar_calibration/FAQ1.md)
5.Choose an appropriate calibration field.
An ideal calibration field requires no tall buildings around the calibration area. If buildings are near, low-rising building facades are preferred. Finally, the ground should be smooth, not rough, and it should be easy to drive the car following the trajectory that looks like the ∞ symbol as illustrated in Figure 1. An example of a good calibration field is shown in Figure 2.
![](lidar_calibration/images/calibration/trajectory.png)
<p align="center">Figure 1. The trajectory for calibration.</p>
![](lidar_calibration/images/calibration/field.png)
<p align="center">Figure 2. Calibration field.</p>
### Recording Calibration Data
After the preparation steps are completed, drive the vehicle to the calibration field to record the calibration data.
1.The recording script is `apollo/script/lidar_calibration.sh`.
2.Run the following command to record data:
```bash
bash lidar_calibration.sh start_record
```
The recorded bag is under the directory `apollo/data/bag`.
3.Drive the car following a ∞ symbol path, using a controlled speed of 20-40km/h, and make the turning radius as small as possible.
The total time length should within 3 minutes, but please make sure that your calibration drive contains at least one full ∞ symbol path.
4.After recording, run the following command to stop the data recording.
```bash
bash lidar_calibration.sh stop_record
```
5.Then, the program will detect whether or not the recorded bag contains all the required topics. After passing the test, the bag will be packaged into file `lidar_calib_data.tar.gz`, including the recorded rosbag and the corresponding MD5 checksum file.
### Uploading Calibration Data and Creating a Calibration Service Task
After recording the calibration data, please login to the [calibration service page](https://console.bce.baidu.com/apollo/calibrator/index/list) to complete the calibration.
1.Enter the calibration service page and click the **New Task** button which in **Task Management** list to create a new calibration task.
2.After entering the creating new task page, you need to fill in a simple description of this task.Then click the **Upload and create a task** button and select the upload calibration file to start uploading the calibration data.
3.After start uploading the data, the page will jump to the task process view. The process figure is the upload progress page. The task will start to calibrate when the upload progress reaches 100%. Please keep the network unblocked during uploading.
4.When the data is uploaded, the Data Verification Process will begin, as shown in Figure 3. The validation process ensures data integrity and suitability. The validation items are:
* Decompress test
* MD5 checksum
* Data format validation
* ∞ symbol path validation
* INS status validation
If validation fails, the corresponding error message is prompted. See the Error Description section below for details.
![](lidar_calibration/images/calib_valid_en.png)
<p align="center">Figure 3. Calibration data verification.</p>
6.After data validation, the calibration process begins, as shown in Figure 4. A detailed calibration progress page is displayed to users. Depending on the size and quality of the data, the overall calibration time lasts about 10-30 minutes. You can enter the page at any time to see the progress of the current task.
![](lidar_calibration/images/calib_progress_en.png)
<p align="center">Figure 4. Calibration progress page.</p>
7.When calibration succeeds, click the **View detail** button to display a stitched point cloud. You can confirm the quality verification by checking the sharpness of the point cloud. If you are satisfied with the calibration quality, you can click **Confirm** to keep the result and download the calibration results by clicking **Download**. This fulfills the completion of the calibration process.
For additional information, see: [How to Check Point Cloud Quality?](https://github.com/ApolloAuto/apollo/blob/master/docs/quickstart/lidar_calibration/FAQ2.md)
### Obtaining Calibration Results
1.Before obtaining the calibration results, the service requires that you confirm the quality of the calibration results based on visualized point cloud.
2.After confirming the quality of the calibration result, you can click the **Confirm** button to store the calibration result. After that, you can download the result on the task page. The **Download** button will *not* appear on the task page if the result failed to pass quality verification.
3.Extrinsic file format instruction — The extrinsic is returned to you in a `.yaml` format file. Below is an example of an extrinsic file.
The field meanings shown in this example are defined in Table 2 below.
```bash
header:
seq: 0
stamp:
secs: 1504765807
nsecs: 0
frame_id: novatel
child_frame_id: velodyne64
transform:
rotation:
x: 0.02883904659307384
y: -0.03212457531272153
z: 0.697030811535172
w: 0.7157404339725393
translation:
x: 0.000908140840832566
y: 1.596564931858745
z: 1
```
Table 2. Definition of the keys in the yaml file.
Field | Meaning
------ | -----
`header` | Header information, including timestamps.
`child_frame_id` | Source sensor ID in calibration. Will be HDL-64ES3 here.
`frame_id` | Target sensor ID in calibration. Will be Novatel here.
`rotation`| Rotation part of the extrinsic parameters. Represented by a quaternion.
`translation`| Translation part of the extrinsic parameters.
4.How to use extrinsic parameters?
Enter the following command to create the calibration file directory in the apollo directory:
```bash
mkdir -p modules/calibration/data/[CAR_ID]/
```
Here, **CAR\_ID** is the vehicle ID for calibrating vehicles. Then, copy the downloaded extrinsic yaml file to the corresponding **CAR\_ID** folder. Finally, after you start HMI, select the correct **CAR\_ID** to load the corresponding calibration yaml file.
### Error Description
1. Data unpacking error: The uploaded data is not a valid `tar.gz` file
2. MD5 checksum error: If the MD5 checksum of the uploaded data differs from the MD5 checksum computed by the server side, it could be caused by network transmission problems.
3. Data format error: The uploaded data is not a rosbag, or necessary topics are missing or unexpected topics exist. The server-side calibration program failed to read it.
4. No ∞ symbol path error: No ∞ symbol path was found in the uploaded data. Verify that the recorded data contains at least one ∞ symbol path.
5. INS status error: In the uploaded data, the location does not meet the requirement. Ensure that the INS status is 56 during the data recording.
欢迎使用Apollo传感器标定服务。本文档提供64线激光雷达与组合惯导之间的外参标定服务使用流程。
### 文档概览
1. 服务概述
2. 准备工作
3. 标定数据录制
4. 标定数据上传以及任务创建
5. 标定结果获取
6. 错误说明
### 服务概述
本服务作为Apollo整车传感器标定功能中的一部分,提供Velodyne 64线激光雷达HDL-64ES3与IMU之间的外参标定功能。标定结果可用于将激光雷达检测的障碍物转换至IMU坐标系,进而转到世界坐标系下。标定结果以 `.yaml` 文件形式返回。
### 准备工作
为了更好地使用本服务,请按以下顺序进行准备工作:
1.安装Apollo所支持的64线激光雷达和组合惯性导航系统,下载镜像安装docker环境。
2.开机并启动64线激光雷达以及组合惯导系统。Novatel组合惯导初次上电时需要校准。此时应将车在开阔地带进行直行、左右转弯等操作,直至惯导初始化完成。
3.确认本服务所需传感器数据的topic均有输出。[如何查看传感器有数据输出?](https://github.com/ApolloAuto/apollo/blob/master/docs/quickstart/lidar_calibration/FAQ0_cn.md)
本服务所需的topics如下表1所示:
表1. 传感器topic名称
传感器 | Topic名称 | Topic发送频率(Hz)
--- | ------- | --- |
HDL-64ES3 | /apollo/sensor/velodyne64/VelodyneScanUnified | 10
INS | /apollo/sensor/gnss/odometry | 100
INS | /apollo/sensor/gnss/ins_stat | 1
4.确认车辆采集标定数据时的定位状态为56。[如何查看车辆定位状态?](https://github.com/ApolloAuto/apollo/blob/master/docs/quickstart/lidar_calibration/FAQ1_cn.md)
5.选择合适的标定场地。
标定的地点需要选择无高楼遮挡、地面平坦、四周有平整的建筑物并且可以进行如图1所示8字轨迹行驶的地方。一个合适的标定场地如图2所示。
![](lidar_calibration/images/trajectory.png)
<p align="center">图1 标定所需车辆行驶的轨迹。</p>
![](lidar_calibration/images/field.png)
<p align="center">图2 标定场地。</p>
### 标定数据录制
准备工作完成后,将车辆驶入标定场地进行标定数据的录制。
1.录制脚本工具为 `apollo/script/lidar_calibration.sh`
2.运行以下命令,开始数据录制工作:
```bash
bash lidar_calibration.sh start_record
```
所录制的bag在 `apollo/data/bag` 目录下。
3.以8字形轨迹驾驶汽车,将车速控制在20-40km/h,并使转弯半径尽量小。行驶的时长3分钟即可,但要保证标定数据至少包含一个完整的8字。
4.录制完成后,输入以下命令结束数据录制:
```bash
bash lidar_calibration.sh stop_record
```
5.随后,程序会检测所录制的bag中是否含有所需的所有topics。检测通过后,会将bag打包成 `lidar_calib_data.tar.gz` 文件,内容包括录制的rosbag以及对应的MD5校验和文件。
### 标定数据上传以及任务创建
录制好标定数据后,登录至[标定服务页面](https://console.bce.baidu.com/apollo/calibrator/index/list)以完成标定。
1.进入标定服务页面,在**任务管理**列表下点击**新建任务**按钮以新建一个标定任务。
2.进入新建任务页面后,需先填写简单的任务描述,然后点击**上传数据并创建任务**按钮,选择上传标定文件,则可以开始进行数据上传。
3.开始上传数据后,页面将跳转至任务流程视图。流程视图图示为上传进度页面,待其到达100%后则可以开始进行标定。上传期间请保持网络畅通。
4.数据上传完毕后,将开始数据校验流程,如图3所示。校验流程可以保证数据完整以及适合标定,校验项目有:
* 数据包解压校验
* MD5校验
* 数据格式校验
* 8字路径与GPS质量校验
* 初始外参评估合格
若数据校验失败,则会提示相应错误。错误的原因请参照错误说明。
![](lidar_calibration/images/calib_valid_cn.png)
<p align="center">图3 标定数据校验流程。</p>
6.校验通过后将开始标定流程,一个标定进度页面会展示给用户,如图4所示。视数据大小和质量的影响,整体标定时间大约持续10-30分钟,用户可以随时进入该页面查看当前任务的标定进度。
![](lidar_calibration/images/calib_progress_cn.png)
<p align="center">图4 标定进度页面。</p>
7.标定完成后,进入人工质检环节。点击[查看]按钮会弹出用于质检的拼接点云,此时可以开始人工质检。若质检通过,则可以点击**确认入库**按钮以保存标定结果。最后,点击**下载数据**按钮来下载标定结果,至此标定流程完成。[如何进行质检?](https://github.com/ApolloAuto/apollo/blob/master/docs/quickstart/lidar_calibration/FAQ2_cn.md)
### 标定结果获取
1.获取标定结果前,本服务需要用户根据可视化效果确认标定结果的质量。
2.确认该标定结果质量合格后,用户可点击**确认入库**按钮将标定结果入库。之后可以在任务页面进行下载,未通过质检并入库的标定结果在任务页面不会出现下载地址。
3.外参格式解析。外参以yaml文件形式返回给用户,下面是一个外参结果文件的样例。
表1中说明了几个字段的含义。
```bash
header:
seq: 0
stamp:
secs: 1504765807
nsecs: 0
frame_id: novatel
child_frame_id: velodyne64
transform:
rotation:
x: 0.02883904659307384
y: -0.03212457531272153
z: 0.697030811535172
w: 0.7157404339725393
translation:
x: 0.000908140840832566
y: 1.596564931858745
z: 1
```
表2. 外参YAML文件字段含义
字段 | 含义
---- | ----
header | 头信息,主要包含标定时间
child_frame_id | 所标定的源传感器ID,此时为HDL-64ES3
frame_id | 所标定的目标传感器ID,此时为Novatel
rotation | 以四元数表示的外参旋转部分
translation | 外参的平移部分
4.外参使用方式
首先在`/apollo`目录下输入以下命令创建标定文件目录:
```bash
mkdir -p modules/calibration/data/[CAR_ID]/
```
其中,**CAR\_ID**为标定车辆的车辆ID。然后将下载的外参yaml文件拷贝至对应的**CAR\_ID** 文件夹内。最后,在启动hmi后,选择需正确的**CAR\_ID**即可载入对应的标定yaml文件。
### 错误说明
1. 数据解包错误:上传的数据不是一个合法的 `tar.gz` 文件。
2. MD5校验和错误:上传数据的MD5校验和与服务器端计算的MD5校验和不同,通常由网络传输问题引发。
3. 数据格式错误:上传的数据不是一个rosbag,或者bag里缺少指定的topic或包含其他非指定的topic,服务器端标定程序读取失败。
4. 无8字路径错误:在上传的数据中没有发现8字路径。需要确认录制的数据中是否包含至少一个8字形路径。
5. 组合惯导定位精度不足:在上传的数据中发现定位状态不符合要求。需要确认在录制过程中的定位状态为56。
# Apollo 1.5 Quick Start Guide
This quick start focuses on Apollo 1.5 new features. For general Apollo concepts, please refer to [Apollo 1.0 Quick Start](https://github.com/ApolloAuto/apollo/blob/master/docs/quickstart/apollo_1_0_quick_start.md).
Before doing the following steps, make sure you have calibrated the extrinsic parameters between the LiDAR and the GNSS/INS. For sensor calibration, please refer to [Apollo 1.5 LiDAR calibration guide](https://github.com/ApolloAuto/apollo/blob/master/docs/quickstart/apollo_1_5_lidar_calibration_guide.md).
## Launch release env Docker Image
Run the following commands:
```bash
cd $APOLLO_HOME
bash docker/scripts/release_start.sh
```
When Docker starts, it creates a port mapping, which maps the Docker internal port 8887 to the host port 8887. You can then visit the HMI web service in your host machine browser:
Open the Chrome browser and start the Apollo HMI by going to **localhost:8887**.
![](images/hmi_setup_profile.png)
You'll be required to setup profile before doing anything else. Click the
dropdown menu to select your HDMap and vehicle in use. The list are defined in
[HMI config file](https://raw.githubusercontent.com/ApolloAuto/apollo/master/modules/hmi/conf/config.pb.txt).
Then your HMI comes to live!
*Note: It's also possible to change profile on the right panel of HMI, but just
remember to click "Reset All" on the top-right corner to restart the system.*
![](images/start_hmi.png)
## (*New!*) Start Auto
In Apollo 1.5, we released the new feature, auto following the traffic until destination.
1. To make it work, you need setup the system by clicking the "Setup"
button on left panel.
![](images/hmi_setup_1.5.png)
2. Make sure all modules are on and hardware is ready, and the vehicle is in a
good state which is safe to enter auto mode to follow the traffic to destination.
Click the "Start Auto" button, then it will drive you there!
![](images/hmi_start_auto_following.png)
docs/quickstart/images/start_hmi.png

107.7 KB | W: | H:

docs/quickstart/images/start_hmi.png

267.8 KB | W: | H:

docs/quickstart/images/start_hmi.png
docs/quickstart/images/start_hmi.png
docs/quickstart/images/start_hmi.png
docs/quickstart/images/start_hmi.png
  • 2-up
  • Swipe
  • Onion skin
### How to Check the Sensor Output?
Use the `rostopic` command. For example, type the following command to check the
output of HDL-64ES3:
```bash
rostopic echo /apollo/sensor/velodyne64/VelodyneScanUnified
```
If the topic data is displayed on the terminal, the LiDAR works normally.
\ No newline at end of file
### 如何查看传感器是否有数据输出?
使用 rostopic 命令。例如,查看 HDL-64ES3 的输出,可以在终端中输入:
```bash
rostopic echo /apollo/sensor/velodyne64/VelodyneScanUnified
```
若该 topic 的数据会显示在终端上,则激光雷达工作正常。
### How to Check INS Status?
Using Novatel INS as an example, type the following command to check the INS status:
```bash
rostopic echo /apollo/sensor/gnss/ins_stat
```
Find the `pos_type` field: If the value is 56, it has entered a good positioning status (RTK_FIXED) and can be used for calibration. If it is not 56, reliable calibration results cannot be obtained.
\ No newline at end of file
### 如何查看车辆的定位状态?
以使用 Novatel 组合惯导为例,在终端中输入:
```bash
rostopic echo /apollo/sensor/gnss/ins_stat
```
找到“pos_type”字段,若该字段的值为 56,则表示进入了良好的定位状态 (RTK_FIXED),可以用于标定。若不为 56,则无法获得可靠的标定结果。
\ No newline at end of file
### How to Complete a Quality Inspection?
At present, you complete the quality verification manually with a visual inspection of the results.
When the calibration is completed, the point cloud stitched during the calibration process is provided. In the point cloud, details of the calibration field can be easily identified. Assess the calibration quality for clarity. Look at objects such as building facades, street lights, poles and road curbs. If the point cloud is blurry and a ghosting effect can be found, the calibration is poor. If the calibration result is good, a sharp and clear stitched point cloud is shown.
Figure 1 shows the comparison between the stitched point clouds with good (a) and insufficient(b) calibration quality.
![](images/good_calib.png)
<p align="center">
(a)
</p>
![](images/poor_calib.png)
<p align="center">
(b)
</p>
<p align="center">
Figure 1. (a) a high quality calibration result (b) an insufficient one.
</p>
\ No newline at end of file
### 如何进行质检?
目前进行质检方法主要通过人工来完成。标定完成后,页面会提供标定过程中拼接得到的点云。若标定结果良好,会得到锐利和清晰的拼接点云,可反映出标定场地的细节。通常质检的参照物有平整的建筑立面、路灯和电线杆以及路沿等。若标定质量较差,则会使拼接点云出现一些模糊、重影的效果。图1是两张不同标定质量的拼接点云对比。
![](images/good_calib.png)
<p align="center">
(a)
</p>
![](images/poor_calib.png)
<p align="center">
(b)
</p>
<p align="center">
图1. (a) 高质量的标定结果 (b) 质量较差的标定结果。
</p>
\ No newline at end of file
3D Obstacle Perception
===================
The following sections describe the perception pipeline of obstacles
that are resolved by Apollo:
- HDMap Region of Interest (ROI) Filter
- Convolutional Neural Networks (CNN) Segmentation
- MinBox Builder
- HM Object Tracker
HDMap Region of Interest (ROI) Filter
-------------------------------------
The Region of Interest (ROI) specifies the drivable area that includes
road surfaces and junctions and are retrieved from the HD
(hi-resolution) map. The HDMap ROI filter processes LiDAR points that
are outside ROI, removing background objects, e.g., buildings and trees
around the road. What remains are the point cloud in the ROI for
subsequent processing.
Given a HD map, the affiliation of each LiDAR point indicates whether it
is inside or outside the ROI. Each LiDAR point can be queried with a
lookup table (LUT) of 2D quantization of the region around the car. The
input and output of the HDMap ROI filter module are summarized in the
table below.
|Input |Output |
|------------------------------------------------------------------------- |---------------------------------------------------------------------------|
|The point cloud: A set of 3D points captured from LiDAR Sensor. |The indices of input points that are inside the ROI defined by HDMap. |
|HDMap: A set of polygons each of which is an ordered set of points. | |
In general, the Apollo HDMap ROI filter consists of three successive
steps:
1. Coordinate transformation
2. ROI LUT construction
3. Point inquiry with ROI LUT
### Coordinate Transformation
For the HDMap ROI filter, the data interface for HD map is defined in
terms of a set of polygons, each of which is actually an ordered set of
points in the world coordinate system. Running an inquiry on the points
with the HDMap ROI requires that the point cloud and polygons are
represented in the same coordinate system. For this purpose, Apollo
transforms the points of input point cloud and the HDMap polygons into a
local coordinate system that originates from the LiDAR sensor’s
location.
### ROI LUT Construction
To determine an input point whether inside or outside the ROI, Apollo
adopts a grid-wise LUT that quantifies the ROI into a birds-eye view 2D
grid. As shown in figure 1, this LUT covers a rectangle region, bounded
by a predefined spatial range around the general view from above in the
boundary of HDMap. Then it represents the affiliation with the ROI for
each cell of the grid (i.e., 1/0 represents it is inside/outside the
ROI). For computational efficiency, Apollo uses a scan line algorithm
and bitmap encoding to construct the ROI LUT.
<div align=center><img src="images/3d_obstacle_perception/roi_lookup_table.png"></div>
<div align=center>Figure 1 Illustration of ROI lookup table (LUT)</div>
The blue lines show the boundary of HDMap ROI, including road surfaces and
junctions. The red solid dot represents the origin of the local coordinate
system corresponding to the LiDAR sensor’s location. The 2D grid is composed
of 8×8 cells that are shown as green squares. The cells inside the ROI are
blue-filled squares while the ones outside the ROI are yellow-filled squares.
### Point Inquiry with ROI LUT
Based on the ROI LUT, the affiliation of each input point is queried
using two-step verification. Then, Apollo conducts data compilation and
output as described below. For the point inquiry process, Apollo:
1. Checks whether the point is inside or outside the rectangle region
of ROI LUT.
2. Queries the corresponding cell of the point in the LUT for its
affiliation with respect to the ROI.
3. Collects all the points that belong to the ROI and output their
indices with respect to the input point cloud.
The user-defined parameters can be set in the configuration file of
modules/perception/model/hdmap_roi_filter.config. Please refer the
table below on the usage of parameters for HDMap ROI Filter.
|Parameter Name |Usage |Default |
|------------------- |------------------------------------------------------------------------------ |------------|
|rectangle |The range of ROI LUT (the 2D grid) with respect to the origin (LiDAR sensor). |70.0 meters |
|cell_size |The size of cells for quantizing the 2D grid. |0.25 meter |
|extend_dist |The distance of extending the ROI from the polygon boundary. |0.0 meter |
Convolutional Neural Networks (CNN) Segmentation
------------------------------------------------
After the HDMap ROI filter, Apollo obtains the filtered point cloud that
includes *only* the points inside ROI (i.e., the drivable road and
junction areas). Most of the background obstacles, such as buildings and
trees around the road region, have been removed, and the point cloud
inside ROI is fed into the segmentation module. This process detects and
segments out foreground obstacles, e.g., cars, trucks, bicycles, and
pedestrians.
|Input |Output |
|---------------------------------------------------------------------------- |---------------------------------------------------------------|
|The point cloud (a set of 3D points) |A set of objects corresponding to obstacles in the ROI. |
|The point indices indicating points inside the ROI as defined in HDMap | |
Apollo uses a deep CNN for accurate obstacle detection and segmentation.
The Apollo CNN segmentation consists of four successive steps:
- Channel Feature Extraction
- CNN-Based Obstacle Predication
- Obstacle Clustering
- Post-processing
The following sections describe the deep CNN in detail.
### Channel Feature Extraction
Given a frame of point cloud, Apollo build a birds-eye view (i.e.,
projected to the X-Y plane) 2D grid in the local coordinate system. Each
point within a predefined range with respect to the origin (i.e., the
LiDAR sensor) is quantized into one cell of the 2D grid based on its X
and Y coordinates. After quantization, Apollo computes 8 statistical
measurements of the points for each cell of the grid, which will be the
input channel features fed into the CNN in the next step. The
statistical measurements computed are the:
1. Maximum height of points in the cell
2. Intensity of the highest point in the cell
3. Mean height of points in the cell
4. Mean intensity of points in the cell
5. Number of points in the cell
6. Angle of the cell’s center with respect to the origin
7. Distance between the cell’s center and the origin
8. Binary value indicating whether the cell is empty or occupied
### CNN-Based Obstacle Predication
Based on the channel features described above, Apollo uses a deep fully
convolutional neural network (FCNN) to predict the cell-wise obstacle
attributes including the offset displacement with respect to the
potential object center, called center offset, (see figure 2 below),
objectness, positiveness, and object height. As shown in figure 2, the
input of the network is a *W*×*H*×*C* channel image where:
- *W* represents the column number of the grid.
- *H* represents the row number of the grid.
- *C* represents the number of channel features.
The FCNN is composed of three layers:
- Downstream encoding layers (feature encoder)
- Upstream decoding layers (feature decoder)
- Obstacle attribute prediction layers (predictor)
The feature encoder takes the channel feature image as input and
successively down-samples its spatial resolution with increasing feature
abstraction. Then the feature decoder gradually up-samples the encoded
feature image to the spatial resolution of the input 2D grid, which can
recover the spatial details of feature image to facilitate the cell-wise
obstacle attribute prediction. The down-sampling and up-sampling
operations are implemented in terms of stacked convolution/devolution
layers with non-linear activation (i.e., ReLu) layers.
<div align=center><img src="images/3d_obstacle_perception/FCNN.png"></div>
<div align=center>Figure 2 The FCNN for cell-wise obstacle prediction</div>
### Obstacle Clustering
After the CNN-based prediction step, Apollo obtains prediction
information for individual cells. Apollo utilizes four cell object
attribute images that contain the:
- Center offset
- Objectness
- Positiveness
- Object height
To generate obstacle objects, Apollo constructs a directed graph based
on the cell center offset prediction and searches the connected
components as candidate object clusters.
As shown in figure 3, each cell is a node of the graph and the directed
edge is built based on the center offset prediction of the cell, which
points to its parent node corresponding to another cell.
Given this graph, Apollo adopts a compressed Union Find algorithm to
efficiently find the connected components, each of which is a candidate
obstacle object cluster. The objectness is the probability of being a
valid object for one individual cell. So Apollo defines the non-object
cells as the ones with the objectness less than 0.5. Thus Apollo filters
out the empty cells and non-object ones for each candidate object
cluster.
<div align=center><img src="images/3d_obstacle_perception/obstacle_clustering.png"></div>
<div align=center>Figure 3 Illustration of obstacle clustering</div>
(a) The red arrow represents the object center offset prediction for
each cell. The blue mask corresponds to the object cells for which
the objectness probability is no less than 0.5.
(b) The cells within solid red polygon compose a candidate object
cluster.
The red filled five-pointed stars indicate the root nodes (cells) of
sub-graphs that correspond to the connected components. One candidate
object cluster can be composed of multiple neighboring connected
components whose root nodes are adjacent to each other.
### Post-processing
After clustering, Apollo obtains a set of candidate object clusters each
of which includes several cells. In the post-processing step, Apollo
first computes the detection confidence score and object height for each
candidate cluster by averaging the positiveness and object height values
of its involved cells respectively. Then, Apollo removes the points that
are too high with respect to the predicted object height and collects
the points of valid cells for each candidate cluster. Finally, Apollo
removes the candidate clusters that have either a very low confidence
score or small number of points, to output the final obstacle
clusters/segments.
The user-defined parameters can be set in the configuration file of
modules/perception/model/cnn\_segmentation/cnnseg.conf. The table below
explains the parameter usage and default values for CNN Segmentation.
|Parameter Name |Usage |Default |
|-----------------------------------|--------------------------------------------------------------------------------------------|-----------|
|objectness_thresh |The threshold of objectness for filtering out non-object cells in obstacle clustering step. |0.5 |
|use_all_grids_for_clustering |The option of specifying whether or not to use all cells to construct the graph in the obstacle clustering step.If not, only the occupied cells will be considered. |true |
|confidence_thresh |The detection confidence score threshold for filtering out the candidate clusters in the post-processing step. |0.1 |
|height_thresh |If it is non-negative, the points that are higher than the predicted object height by height_thresh will be filtered out in the post-processing step. |0.5 meters |
|min_pts_num |In the post-processing step, the candidate clusters with less than min_pts_num points are removed. |3 |
|use_full_cloud |If it is set by true, all the points of the original point cloud will be used for extracting channel features. Otherwise only the points of input point cloud (i.e., the points after HDMap ROI filter) are used. |true |
|grid_id |The ID of the GPU device used in the CNN-based obstacle prediction step. |0 |
|feature_param {width} |The number of cells in X (column) axis of the 2D grid. |512 |
|feature_param {height} |The number of cells in Y (row) axis of the 2D grid. |512 |
|feature_param {range} |The range of the 2D grid with respect to the origin (the LiDAR sensor). |60 meters |
MinBox Builder
--------------
The object builder component establishes a bounding box for the detected
obstacles. Due to occlusions or distance to the LiDAR sensor, the point
cloud forming an obstacle can be sparse and cover only a portion of
surfaces. Thus, the box builder works to recover the full bounding box
given the polygon point. The main purpose of the bounding box is to
estimate the heading of the obstacle (e.g., vehicle) even if the point
cloud is sparse. Equally, the bounding box is used to visualize the
obstacles.
The idea behind the algorithm is to find the all areas given an edge of
the polygon point. In the following example, if AB is the edge, Apollo
projects other polygon points onto AB and establishes the pair of
intersections that has the maximum distance. That’s one of the edges
belonging to the bounding box. Then it is straightforward to obtain the
other edge of the bounding box. By iterating all edges in the polygon,
in the following example as shown in figure 4, Apollo determines a
6-edge bounding box. Apollo then selects the solution that has the
minimum area as the final bounding box.
<div align=center><img src="images/3d_obstacle_perception/object_building.png"></div>
<div align=center>Figure 4 Illustration of MinBox Object Builder</div>
HM Object Tracker
-----------------
The HM object tracker is designed to track obstacles detected by the
segmentation step. In general, it forms and updates track lists by
associating current detections with existing track lists, deletes the
old track lists if it no longer persists, and spawns new track lists if new detections are identified. The motion state of the updated track
lists will be estimated after association. In HM object tracker, the
Hungarian algorithm is used for detection-to-track association, and a
Robust Kalman Filter is adopted for motion estimation.
### Detection-to-Track Association
When associating detection to existing track lists, Apollo constructs a
bipartite graph and then uses the Hungarian algorithm to find the best
detection-to-track matching with minimum cost (distance).
**Computing Association Distance Matrix**
In the first step, an association distance matrix is established. The
distance between a given detection and one track is calculated according to
a series of association features including motion consistency,
appearance consistency, etc. Some features used in HM tracker’s distance
computing are shown as below:
|Association Feature Name |Description |
|-------------------------|----------------------------------|
|location_distance |Evaluating motion consistency |
|direction_distance |Evaluating motion consistency |
|bbox_size_distance |Evaluating appearance consistency |
|point_num_distance |Evaluating appearance consistency |
|histogram_distance |Evaluating appearance consistency |
Besides, there are some important parameters of distance weights which are
used for combining the above-mentioned association features into a final
distance measurement.
**Bipartite Graph Matching via Hungarian Algorithm**
Given the association distance matrix, as shown in figure 5, Apollo
constructs a bipartite graph and uses Hungarian algorithm to find the
best detection-to-track matching via minimizing the distance cost. It
solves the assignment problem within O(n\^3) time complexity. To boost
its computing performance, the Hungarian algorithm is implemented after
cutting original bipartite graph into subgraphs, by deleting vertices
with distance greater than a reasonable maximum distance threshold.
<div align=center><img src="images/3d_obstacle_perception/bipartite_graph_matching.png"></div>
<div align=center>Figure 5 Illustration of Bipartite Graph Matching</div>
### Track Motion Estimation
After the detection-to-track association, HM object tracker uses a
Robust Kalman Filter to estimate the motion states of current track
lists with a constant velocity motion model. The motion states include
its belief anchor point and belief velocity, which correspond to the 3D
position and its 3D velocity respectively. To overcome possible
distraction caused from imperfect detections, Robust Statistics
techniques are implemented in the tracker’s filtering algorithm.
**Observation Redundancy**
The measurement of velocity, which is the input of filtering algorithm,
is selected among a series of redundant observations, including anchor
point shift, bounding box center shift, bounding box corner point shift,
etc. Redundant observations will bring extra robustness to filtering
measurement, as the probability that all observations fail is much less
than the one that a single observation fails.
**Breakdown**
Gaussian Filter algorithms always assume their noises are generated from
Gaussian distribution. However, this hypothesis may fail in motion
estimation problem, as the noise of its measurement may draw from
fat-tail distributions. To overcome the over-estimation of update gain,
a breakdown threshold is used in the process of filtering.
**Update according Association Quality**
The original Kalman Filter updates its states without distinguishing the
quality of its measurements. However, the quality of measurement is a
beneficial cue of filtering noise and somehow can be estimated. For
instance, the distance calculated in the association step could be a
reasonable estimate of quality of measurement. Updating the state of
filtering algorithm according to the association quality enhances
robustness and smoothness to the motion estimation problem.
A high-level workflow of HM object tracker is given in figure 6.
<div align=center><img src="images/3d_obstacle_perception/hm_object_tracker.png"></div>
<div align=center>Figure 6 Workflow of HM Object Tracker</div>
1) Construct the tracked objects and transform them into world coordinates.
2) Predict the states of existing track lists and match detections to
them.
3) Update the motion state of updated track lists and collect the
tracking results.
# QP-Spline-Path Optimizer
Quadratic programming + Spline interpolation
## 1. Objective function
### 1.1 Get path length
Path is defined in station-lateral coordination system. The **s** range from vehicle's current position to default planing path length.
### 1.2 Get spline segments
Split the path into **n** segments. each segment trajectory is defined by a polynomial.
### 1.3 Define function for each spline segment
Each segment ***i*** has accumulated distance $d_i$ along reference line. The trajectory for the segment is defined as a polynomial of degree five by default.
$$
l = f_i(s)
= a_{i0} + a_{i1} * s + a_{i2} * s^2 + a_{i3} * s^3 + a_{i4} * s^4 + a_{i5} * s^5 (0 \leq s \leq d_{i})
$$
### 1.4 Define objective function of optimization for each segment
$$
cost = \sum_{i=1}^{n} \Big( w_1 \cdot \int\limits_{0}^{d_i} (f_i')^2(s) ds + w_2 \cdot \int\limits_{0}^{d_i} (f_i'')^2(s) ds + w_3 \cdot \int\limits_{0}^{d_i} (f_i^{\prime\prime\prime})^2(s) ds \Big)
$$
### 1.5 Convert the cost function to QP formulation
QP formulation:
$$
\frac{1}{2} \cdot x^T \cdot H \cdot x + f^T \cdot x
\\
s.t. LB \leq x \leq UB
\\
A_{eq}x = b_{eq}
\\
Ax \leq b
$$
Below is the example for converting the cost function into the QP formulaiton.
$$
f_i(s) =
\begin{vmatrix} a_{i0} & a_{i1} & a_{i2} & a_{i3} & a_{i4} & a_{i5} \end{vmatrix}
\cdot
\begin{vmatrix} 1 \\ s \\ s^2 \\ s^3 \\ s^4 \\ s^5 \end{vmatrix}
$$
And
$$
f_i'(s) =
\begin{vmatrix} a_{i0} & a_{i1} & a_{i2} & a_{i3} & a_{i4} & a_{i5} \end{vmatrix}
\cdot
\begin{vmatrix} 0 \\ 1 \\ s \\ s^2 \\ s^3 \\ s^4 \end{vmatrix}
$$
And
$$
f_i'(s)^2 =
\begin{vmatrix} a_{i0} & a_{i1} & a_{i2} & a_{i3} & a_{i4} & a_{i5} \end{vmatrix}
\cdot
\begin{vmatrix} 0 \\ 1 \\ s \\ s^2 \\ s^3 \\ s^4 \end{vmatrix}
\cdot
\begin{vmatrix} 0 & 1 & s & s^2 & s^3 & s^4 \end{vmatrix}
\cdot
\begin{vmatrix} a_{i0} \\ a_{i1} \\ a_{i2} \\ a_{i3} \\ a_{i4} \\ a_{i5} \end{vmatrix}
$$
And
$$
\int\limits_{0}^{d_i} f_i'(s)^2 ds =
\int\limits_{0}^{d_i}
\begin{vmatrix} a_{i0} & a_{i1} & a_{i2} & a_{i3} & a_{i4} & a_{i5} \end{vmatrix}
\cdot
\begin{vmatrix} 0 \\ 1 \\ s \\ s^2 \\ s^3 \\ s^4 \end{vmatrix}
\cdot
\begin{vmatrix} 0 & 1 & s & s^2 & s^3 & s^4 \end{vmatrix}
\cdot
\begin{vmatrix} a_{i0} \\ a_{i1} \\ a_{i2} \\ a_{i3} \\ a_{i4} \\ a_{i5} \end{vmatrix} ds
$$
And
$$
\int\limits_{0}^{d_i} f'(s)^2 ds =
\begin{vmatrix} a_{i0} & a_{i1} & a_{i2} & a_{i3} & a_{i4} & a_{i5} \end{vmatrix}
\cdot
\int\limits_{0}^{d_i}
\begin{vmatrix} 0 \\ 1 \\ s \\ s^2 \\ s^3 \\ s^4 \end{vmatrix}
\cdot
\begin{vmatrix} 0 & 1 & s & s^2 & s^3 & s^4 \end{vmatrix} ds
\cdot
\begin{vmatrix} a_{i0} \\ a_{i1} \\ a_{i2} \\ a_{i3} \\ a_{i4} \\ a_{i5} \end{vmatrix}
$$
And
$$
\int\limits_{0}^{d_i}
f'(s)^2 ds =\begin{vmatrix} a_{i0} & a_{i1} & a_{i2} & a_{i3} & a_{i4} & a_{i5} \end{vmatrix}
\cdot \int\limits_{0}^{d_i}
\begin{vmatrix}
0 & 0 &0&0&0&0\\
0 & 1 & s & s^2 & s^3 & s^4\\
0 & s & s^2 & s^3 & s^4 & s^5\\
0 & s^2 & s^3 & s^4&s^5&s^6 \\
0 & s^3 & s^4 &s^5 &s^6&s^7 \\
0 & s^4 & s^5 & s^6 & s^7 & s^8
\end{vmatrix} ds
\cdot
\begin{vmatrix} a_{i0} \\ a_{i1} \\ a_{i2} \\ a_{i3} \\ a_{i4} \\ a_{i5} \end{vmatrix}
$$
And
$$
\int\limits_{0}^{d_i}
f'_i(s)^2 ds =\begin{vmatrix} a_{i0} & a_{i1} & a_{i2} & a_{i3} & a_{i4} & a_{i5} \end{vmatrix}
\cdot \begin{vmatrix}
0 & 0 & 0 & 0 &0&0\\
0 & d_i & \frac{d_i^2}{2} & \frac{d_i^3}{3} & \frac{d_i^4}{4}&\frac{d_i^5}{5}\\
0& \frac{d_i^2}{2} & \frac{d_i^3}{3} & \frac{d_i^4}{4} & \frac{d_i^5}{5}&\frac{d_i^6}{6}\\
0& \frac{d_i^3}{3} & \frac{d_i^4}{4} & \frac{d_i^5}{5} & \frac{d_i^6}{6}&\frac{d_i^7}{7}\\
0& \frac{d_i^4}{4} & \frac{d_i^5}{5} & \frac{d_i^6}{6} & \frac{d_i^7}{7}&\frac{d_i^8}{8}\\
0& \frac{d_i^5}{5} & \frac{d_i^6}{6} & \frac{d_i^7}{7} & \frac{d_i^8}{8}&\frac{d_i^9}{9}\\
\end{vmatrix}
\cdot
\begin{vmatrix} a_{i0} \\ a_{i1} \\ a_{i2} \\ a_{i3} \\ a_{i4} \\ a_{i5} \end{vmatrix}
$$
## 2 Constraints
### 2.1 The init point constraints
Assume that the first point is ($s0$, $l0$), and that $l0$ is on the planned path $f_i(s)$, $f'i(s)$, and $f_i(s)''$. Convert those constraints into QP equality constraints, using:
$$
A_{eq}x = b_{eq}
$$
Below are the steps of conversion.
$$
f_i(s_0) =
\begin{vmatrix} 1 & s_0 & s_0^2 & s_0^3 & s_0^4&s_0^5 \end{vmatrix}
\cdot
\begin{vmatrix} a_{i0} \\ a_{i1} \\ a_{i2} \\ a_{i3} \\ a_{i4} \\ a_{i5}\end{vmatrix} = l_0
$$
And
$$
f'_i(s_0) =
\begin{vmatrix} 0& 1 & s_0 & s_0^2 & s_0^3 & s_0^4 \end{vmatrix}
\cdot
\begin{vmatrix} a_{i0} \\ a_{i1} \\ a_{i2} \\ a_{i3} \\ a_{i4} \\ a_{i5} \end{vmatrix} = l_0
$$
And
$$
f''_i(s_0) =
\begin{vmatrix} 0&0& 1 & s_0 & s_0^2 & s_0^3 \end{vmatrix}
\cdot
\begin{vmatrix} a_{i0} \\ a_{i1} \\ a_{i2} \\ a_{i3} \\ a_{i4} \\ a_{i5} \end{vmatrix} = l_0
$$
The $i$ is the index of segment that contains the $s_0$.
Therefore the equality constraint is:
$$
\begin{vmatrix}
1 & s_0 & s_0^2 & s_0^3 & s_0^4&s_0^5 \\
0&1 & s_0 & s_0^2 & s_0^3 & s_0^4 \\
0& 0&1 & s_0 & s_0^2 & s_0^3
\end{vmatrix}
\cdot
\begin{vmatrix} a_{i0} \\ a_{i1} \\ a_{i2} \\ a_{i3} \\ a_{i4} \\ a_{i5} \end{vmatrix}
=
\begin{vmatrix}
l_0\\
l_0\\
l_0\\
\end{vmatrix}
$$
### 2.2 The end point constraints
Similar to the init point, the end point $(s_e, l_e)$ is known and should produce the same constraint as described in the init point calculations.
Combine the init point and end point, and show the equality constraint as:
$$
\begin{vmatrix}
1 & s_0 & s_0^2 & s_0^3 & s_0^4&s_0^5 \\
0&1 & s_0 & s_0^2 & s_0^3 & s_0^4 \\
0& 0&1 & s_0 & s_0^2 & s_0^3 \\
1 & s_e & s_e^2 & s_e^3 & s_e^4&s_e^5 \\
0&1 & s_e & s_e^2 & s_e^3 & s_e^4 \\
0& 0&1 & s_e & s_e^2 & s_e^3
\end{vmatrix}
\cdot
\begin{vmatrix} a_{i0} \\ a_{i1} \\ a_{i2} \\ a_{i3} \\ a_{i4} \\ a_{i5} \end{vmatrix}
=
\begin{vmatrix}
l_0\\
l_0\\
l_0\\
l_e\\
l_e\\
l_e\\
\end{vmatrix}
$$
### 2.3 Joint smoothness constraints
This constraint is designed to smooth the spline joint. Assume two segments $seg_k$ and $seg_{k+1}$ are connected, and the accumulated **s** of segment $seg_k$ is $s_k$. Calculate the constraint equation as:
$$
f_k(s_k) = f_{k+1} (s_0)
$$
Below are the steps of the calculation.
$$
\begin{vmatrix}
1 & s_k & s_k^2 & s_k^3 & s_k^4&s_k^5 \\
\end{vmatrix}
\cdot
\begin{vmatrix}
a_{k0} \\ a_{k1} \\ a_{k2} \\ a_{k3} \\ a_{k4} \\ a_{k5}
\end{vmatrix}
=
\begin{vmatrix}
1 & s_{0} & s_{0}^2 & s_{0}^3 & s_{0}^4&s_{0}^5 \\
\end{vmatrix}
\cdot
\begin{vmatrix}
a_{k+1,0} \\ a_{k+1,1} \\ a_{k+1,2} \\ a_{k+1,3} \\ a_{k+1,4} \\ a_{k+1,5}
\end{vmatrix}
$$
Then
$$
\begin{vmatrix}
1 & s_k & s_k^2 & s_k^3 & s_k^4&s_k^5 & -1 & -s_{0} & -s_{0}^2 & -s_{0}^3 & -s_{0}^4&-s_{0}^5\\
\end{vmatrix}
\cdot
\begin{vmatrix}
a_{k0} \\ a_{k1} \\ a_{k2} \\ a_{k3} \\ a_{k4} \\ a_{k5} \\ a_{k+1,0} \\ a_{k+1,1} \\ a_{k+1,2} \\ a_{k+1,3} \\ a_{k+1,4} \\ a_{k+1,5}
\end{vmatrix}
= 0
$$
Use $s_0$ = 0 in the equation.
Similarly calculate the equality constraints for:
$$
f'_k(s_k) = f'_{k+1} (s_0)
\\
f''_k(s_k) = f''_{k+1} (s_0)
\\
f'''_k(s_k) = f'''_{k+1} (s_0)
$$
### 2.4 Sampled points for boundary constraint
Evenly sample **m** points along the path, and check the obstacle boundary at those points. Convert the constraint into QP inequality constraints, using:
$$
Ax \leq b
$$
First find the lower boundary $l_{lb,j}$ at those points ($s_j$, $l_j$) and $j\in[0, m]$ based on the road width and surrounding obstacles. Calculate the inequality constraints as:
$$
\begin{vmatrix}
1 & s_0 & s_0^2 & s_0^3 & s_0^4&s_0^5 \\
1 & s_1 & s_1^2 & s_1^3 & s_1^4&s_1^5 \\
...&...&...&...&...&... \\
1 & s_m & s_m^2 & s_m^3 & s_m^4&s_m^5 \\
\end{vmatrix} \cdot \begin{vmatrix}a_{i0} \\ a_{i1} \\ a_{i2} \\ a_{i3} \\ a_{i4} \\ a_{i5} \end{vmatrix}
\leq
\begin{vmatrix}
l_{lb,0}\\
l_{lb,1}\\
...\\
l_{lb,m}\\
\end{vmatrix}
$$
Similarly, for the upper boundary $l_{ub,j}$, calculate the inequality constraints as:
$$
\begin{vmatrix}
1 & s_0 & s_0^2 & s_0^3 & s_0^4&s_0^5 \\
1 & s_1 & s_1^2 & s_1^3 & s_1^4&s_1^5 \\
...&...&...&...&...&... \\
1 & s_m & s_m^2 & s_m^3 & s_m^4&s_m^5 \\
\end{vmatrix}
\cdot
\begin{vmatrix} a_{i0} \\ a_{i1} \\ a_{i2} \\ a_{i3} \\ a_{i4} \\ a_{i5} \end{vmatrix}
\leq
-1 \cdot
\begin{vmatrix}
l_{ub,0}\\
l_{ub,1}\\
...\\
l_{ub,m}\\
\end{vmatrix}
$$
# QP-Spline-ST-Speed Optimizer
## 1 Definition
After finding a path in QP-Spline-Path, Apollo converts all obstacles on the path and the ADV (autonomous driving vehicle) into an ST graph, which represents the station changes over time along the path. The speed optimization task is to find a path on the ST graph that is collision-free and safe.
Apollo uses spline to define the path. To find the best path, Apollo leverages Quadratic programming with a set of conditions. The QP formulation is defined as:
$$
\frac{1}{2} \cdot x^T \cdot H \cdot x + f^T \cdot x
\\
s.t. LB \leq x \leq UB
\\
A_{eq}x = b_{eq}
\\
Ax \leq b
$$
## 2 Objective function
### 1.1 Get spline segments
Split the path into **n** segments. Each segment trajectory is defined by a polynomial.
### 1.2 Define function for each spline segment
Each segment ***i*** has accumulated distance $d_i$ along a reference line. And the trajectory for the segment is defined as a polynomial of degree five by default.
$$
s = f_i(t)
= a_{0i} + a_{1i} \cdot t + a_{2i} \cdot t^2 + a_{3i} \cdot t^3 + a_{4i} \cdot t^4 + a_{5i} \cdot t^5
$$
### 1.3 Define objective function of optimization for each segment
Apollo first defines $cost_1$ to make the trajectory smooth:
$$
cost_1 = \sum_{i=1}^{n} \Big( w_1 \cdot \int\limits_{0}^{d_i} (f_i')^2(s) ds + w_2 \cdot \int\limits_{0}^{d_i} (f_i'')^2(s) ds + w_3 \cdot \int\limits_{0}^{d_i} (f_i^{\prime\prime\prime})^2(s) ds \Big)
$$
Then Apollo defines $cost_2$ as the difference between the final ST trajectory and the cruise ST trajectory (with given speed limits — m points):
$$
cost_2 = \sum_{i=1}^{n}\sum_{j=1}^{m}\Big(f_i(t_j)- s_j\Big)^2
$$
Similarly, Apollo defines $cost_3$ that is the difference between the first ST path and the follow ST path (o points):
$$
cost_3 = \sum_{i=1}^{n}\sum_{j=1}^{o}\Big(f_i(t_j)- s_j\Big)^2
$$
Finally, the objective function is defined as:
$$
cost = cost_1 + cost_2 + cost_3
$$
## 3 Constraints
### 3.1 The init point constraints
Given the assumption that the the first point is ($t0$, $s0$), and $s0$ is on the planned path $f_i(t)$, $f'i(t)$, and $f_i(t)''$ (position, velocity, acceleration). Apollo converts those constraint into QP equality constraints:
$$
A_{eq}x = b_{eq}
$$
### 3.2 Monotone constraint
The path must be monotone, e.g., the vehicle can only drive forward.
Sample **m** points on the path, for each $j$ and $j-1$ point pairs ($j\in[1,...,m]$):
If the two points on the same spline $k$:
$$
\begin{vmatrix} 1 & t_j & t_j^2 & t_j^3 & t_j^4&t_j^5 \\ \end{vmatrix}
\cdot
\begin{vmatrix} a_k \\ b_k \\ c_k \\ d_k \\ e_k \\ f_k \end{vmatrix}
>
\begin{vmatrix} 1 & t_{j-1} & t_{j-1}^2 & t_{j-1}^3 & t_{j-1}^4&t_{j-1}^5 \\ \end{vmatrix}
\cdot
\begin{vmatrix} a_{k} \\ b_{k} \\ c_{k} \\ d_{k} \\ e_{k} \\ f_{k} \end{vmatrix}
$$
If the two points on the different spline $k$ and $l$:
$$
\begin{vmatrix} 1 & t_j & t_j^2 & t_j^3 & t_j^4&t_j^5 \\ \end{vmatrix}
\cdot
\begin{vmatrix} a_k \\ b_k \\ c_k \\ d_k \\ e_k \\ f_k \end{vmatrix}
>
\begin{vmatrix} 1 & t_{j-1} & t_{j-1}^2 & t_{j-1}^3 & t_{j-1}^4&t_{j-1}^5 \\ \end{vmatrix}
\cdot
\begin{vmatrix} a_{l} \\ b_{l} \\ c_{l} \\ d_{l} \\ e_{l} \\ f_{l} \end{vmatrix}
$$
### 3.3 Joint smoothness constraints
This constraint is designed to smooth the spline joint. Given the assumption that two segments, $seg_k$ and $seg_{k+1}$, are connected, and the accumulated **s** of segment is $seg_k$ is $s_k$, Apollo calculates the constraint equation as:
$$
f_k(t_k) = f_{k+1} (t_0)
$$
Namely:
$$
\begin{vmatrix}
1 & t_k & t_k^2 & t_k^3 & t_k^4&t_k^5 \\
\end{vmatrix}
\cdot
\begin{vmatrix}
a_{k0} \\ a_{k1} \\ a_{k2} \\ a_{k3} \\ a_{k4} \\ a_{k5}
\end{vmatrix}
=
\begin{vmatrix}
1 & t_{0} & t_{0}^2 & t_{0}^3 & t_{0}^4&t_{0}^5 \\
\end{vmatrix}
\cdot
\begin{vmatrix}
a_{k+1,0} \\ a_{k+1,1} \\ a_{k+1,2} \\ a_{k+1,3} \\ a_{k+1,4} \\ a_{k+1,5}
\end{vmatrix}
$$
Then
$$
\begin{vmatrix}
1 & t_k & t_k^2 & t_k^3 & t_k^4&t_k^5 & -1 & -t_{0} & -t_{0}^2 & -t_{0}^3 & -t_{0}^4&-t_{0}^5\\
\end{vmatrix}
\cdot
\begin{vmatrix}
a_{k0} \\ a_{k1} \\ a_{k2} \\ a_{k3} \\ a_{k4} \\ a_{k5} \\ a_{k+1,0} \\ a_{k+1,1} \\ a_{k+1,2} \\ a_{k+1,3} \\ a_{k+1,4} \\ a_{k+1,5}
\end{vmatrix}
= 0
$$
The result is $t_0$ = 0 in the equation.
Similarly calculate the equality constraints for
$$
f'_k(t_k) = f'_{k+1} (t_0)
\\
f''_k(t_k) = f''_{k+1} (t_0)
\\
f'''_k(t_k) = f'''_{k+1} (t_0)
$$
### 3.4 Sampled points for boundary constraint
Evenly sample **m** points along the path, and check the obstacle boundary at those points. Convert the constraint into QP inequality constraints, using:
$$
Ax \leq b
$$
Apollo first finds the lower boundary $l_{lb,j}$ at those points ($s_j$, $l_j$) and $j\in[0, m]$ based on the road width and surrounding obstacles. Then it calculates the inequality constraints as:
$$
\begin{vmatrix}
1 & t_0 & t_0^2 & t_0^3 & t_0^4&t_0^5 \\
1 & t_1 & t_1^2 & t_1^3 & t_1^4&t_1^5 \\
...&...&...&...&...&... \\
1 & t_m & t_m^2 & t_m^3 & t_m^4&t_m^5 \\
\end{vmatrix} \cdot \begin{vmatrix} a_i \\ b_i \\ c_i \\ d_i \\ e_i \\ f_i \end{vmatrix}
\leq
\begin{vmatrix}
l_{lb,0}\\
l_{lb,1}\\
...\\
l_{lb,m}\\
\end{vmatrix}
$$
Similarly, for upper boundary $l_{ub,j}$, Apollo calculates the inequality constraints as:
$$
\begin{vmatrix}
1 & t_0 & t_0^2 & t_0^3 & t_0^4&t_0^5 \\
1 & t_1 & t_1^2 & t_1^3 & t_1^4&t_1^5 \\
...&...&...&...&...&... \\
1 & t_m & t_m^2 & t_m^3 & t_m^4&t_m^5 \\
\end{vmatrix} \cdot \begin{vmatrix} a_i \\ b_i \\ c_i \\ d_i \\ e_i \\ f_i \end{vmatrix}
\leq
-1 \cdot
\begin{vmatrix}
l_{ub,0}\\
l_{ub,1}\\
...\\
l_{ub,m}\\
\end{vmatrix}
$$
### 3.5 Speed Boundary constraint
Apollo establishes a speed limit boundary as well.
Sample **m** points on the st curve, and get speed limits defined as an upper boundary and a lower boundary for each point $j$, e.g., $v{ub,j}$ and $v{lb,j}$ . The constraints are defined as:
$$
f'(t_j) \geq v_{lb,j}
$$
Namely
$$
\begin{vmatrix}
0& 1 & t_0 & t_0^2 & t_0^3 & t_0^4 \\
0 & 1 & t_1 & t_1^2 & t_1^3 & t_1^4 \\
...&...&...&...&...&... \\
0& 1 & t_m & t_m^2 & t_m^3 & t_m^4 \\
\end{vmatrix}
\cdot
\begin{vmatrix}
a_i \\ b_i \\ c_i \\ d_i \\ e_i \\ f_i
\end{vmatrix}
\geq
\begin{vmatrix} v_{lb,0}\\ v_{lb,1}\\ ...\\ v_{lb,m}\\ \end{vmatrix}
$$
And
$$
f'(t_j) \leq v_{ub,j}
$$
Namely
$$
\begin{vmatrix}
0& 1 & t_0 & t_0^2 & t_0^3 & t_0^4 \\
0 & 1 & t_1 & t_1^2 & t_1^3 & t_1^4 \\
...&...&...&...&...&... \\
0 &1 & t_m & t_m^2 & t_m^3 & t_m^4 \\
\end{vmatrix} \cdot \begin{vmatrix} a_i \\ b_i \\ c_i \\ d_i \\ e_i \\ f_i \end{vmatrix}
\leq
\begin{vmatrix}
v_{ub,0}\\
v_{ub,1}\\
...\\
v_{ub,m}\\
\end{vmatrix}
$$
# reference line smoother
Quadratic programming + Spline interpolation
## 1. Objective function
### 1.1 Segment routing path
Segment routing path into **n** segments. each segment trajectory is defined by two polynomials:
$$
x = f_i(t)
= a_{i0} + a_{i1} * t + a_{i2} * t^2 + a_{i3} * t^3 + a_{i4} * t^4 + a_{i5} * t^5
$$
$$
y = g_i(t) = b_{i0} + b_{i1} * t + b_{i2} * t^2 + b_{i3} * t^3 + b_{i4} * t^4 + b_{i5} * t^5
$$
### 1.2 Define objective function of optimization for each segment
$$
cost =
\sum_{i=1}^{n}
\Big(
\int\limits_{0}^{t_i} (f_i''')^2(t) dt
+ \int\limits_{0}^{t_i} (g_i''')^2(t) dt
\Big)
$$
### 1.3 Convert the cost function to QP formulation
QP formulation:
$$
\frac{1}{2} \cdot x^T \cdot H \cdot x + f^T \cdot x
\\
s.t. LB \leq x \leq UB
\\
A_{eq}x = b_{eq}
\\
Ax \leq b
$$
## 2 Constraints
### 2.1 Joint smoothness constraints
This constraint smoothes the spline joint. Let's assume two segments, $seg_k$ and $seg_{k+1}$, are connected and the accumulated **s** of segment $seg_k$ is $s_k$. Calculate the constraint equation as:
$$
f_k(s_k) = f_{k+1} (s_0)
$$
Similarly the formula works for the equality constraints, such as:
$$
f'_k(s_k) = f'_{k+1} (s_0)
\\
f''_k(s_k) = f''_{k+1} (s_0)
\\
f'''_k(s_k) = f'''_{k+1} (s_0)
\\
g_k(s_k) = g_{k+1} (s_0)
\\
g'_k(s_k) = g'_{k+1} (s_0)
\\
g''_k(s_k) = g''_{k+1} (s_0)
\\
g'''_k(s_k) = g'''_{k+1} (s_0)
$$
### 2.2 Sampled points for boundary constraint
Evenly sample **m** points along the path and check the predefined boundaries at those points.
$$
f_i(t_l) - x_l< boundary
\\
g_i(t_l) - y_l< boundary
$$
header:
seq: 0
stamp:
secs: 1504071530
nsecs: 0
frame_id: novatel
transform:
rotation:
x: 0.02586839453030634
y: -0.03004435225954187
z: 0.6994715770653714
w: 0.7135598614968552
translation:
x: 0.001596842549225523
y: 1.586873016593748
z: 1
child_frame_id: velodyne64
header:
seq: 0
stamp:
secs: 1504071530
nsecs: 0
frame_id: novatel
transform:
rotation:
x: 0.02586839453030634
y: -0.03004435225954187
z: 0.6994715770653714
w: 0.7135598614968552
translation:
x: 0.001596842549225523
y: 1.586873016593748
z: 1
child_frame_id: velodyne64
child_frame_id: velodyne64
transform:
rotation:
x: 0.0178712428342356
y: -0.01105483165178439
z: 0.7086047865380262
w: 0.7052926101074
translation:
x: -0.0008540722033043019
y: 1.410303305307884
z: 1
header:
seq: 0
stamp:
secs: 1502870042
nsecs: 0
frame_id: novatel
load("//tools:cpplint.bzl", "cpplint")
package(default_visibility = ["//visibility:public"])
cc_library(
name = "lidar_ex_checker_lib",
srcs = ["lidar_ex_checker.cc"],
hdrs = ["lidar_ex_checker.h"],
copts = [
"-Wno-deprecated",
],
deps = [
"//modules/calibration/lidar_ex_checker/common:lidar_ex_checker_common",
"//modules/common:apollo_app",
"//modules/common:log",
"//modules/common/adapters:adapter_manager",
"//modules/localization/proto:gps_proto",
"//modules/perception/lib/pcl_util",
"@vtk//:vtk",
],
)
cc_binary(
name = "lidar_ex_checker",
srcs = ["main.cc"],
data = [
"//modules/calibration/lidar_ex_checker/conf:lidar_ex_checker_adapter_manager_config",
],
linkstatic = 0,
deps = [
":lidar_ex_checker_lib",
"//external:gflags",
"//modules/common:log",
"@ros//:ros_common",
],
)
cpplint()
# calibration visualzer
This node is used to check lidar-to-gps calibration file. Run command:
```bash
$:./lidar_ex_checker --flagfile=/apollo/modules/calibration/lidar_ex_checker/conf/lidar_ex_checker.conf
```
load("//tools:cpplint.bzl", "cpplint")
package(default_visibility = ["//visibility:public"])
cc_library(
name = "lidar_ex_checker_common",
srcs = glob([
"*.cc",
]),
hdrs = glob([
"*.h",
]),
deps = [
"//modules/common",
],
)
cpplint()
/******************************************************************************
* Copyright 2017 The Apollo Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*****************************************************************************/
#include "modules/calibration/lidar_ex_checker/common/lidar_ex_checker_gflags.h"
DEFINE_string(node_name, "lidar_ex_checker",
"The lidar extrinsics checker module name in proto");
DEFINE_int32(capture_cloud_count, 3, "the number of cloud count to capture");
DEFINE_double(capture_distance, 15.0, "the distance between two clouds");
DEFINE_string(adapter_config_filename,
"/apollo/modules/calibration/lidar_ex_checker/conf/adapter.conf",
"The adapter config file");
/******************************************************************************
* Copyright 2017 The Apollo Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*****************************************************************************/
#ifndef MODULES_CALIBRATION_LIDAR_EX_CHECKER_COMMON_LIDAR_EX_CHECKER_GFLAGS_H_
#define MODULES_CALIBRATION_LIDAR_EX_CHECKER_COMMON_LIDAR_EX_CHECKER_GFLAGS_H_
#include "gflags/gflags.h"
DECLARE_string(node_name);
// the number of cloud count to capture
DECLARE_int32(capture_cloud_count);
// the distance between two clouds
DECLARE_double(capture_distance);
DECLARE_string(adapter_config_filename);
#endif
/* MODULES_CALIBRATION_LIDAR_EX_CHECKER_COMMON_LIDAR_EX_CHECKER_GFLAGS_H_ */
package(default_visibility = ["//visibility:public"])
filegroup(
name = "lidar_ex_checker_adapter_manager_config",
srcs = [
"adapter.conf",
],
)
filegroup(
name = "lidar_ex_checker_config",
srcs = [
"lidar_ex_checker.conf",
],
)
config {
type: POINT_CLOUD
mode: RECEIVE_ONLY
message_history_limit: 10
}
config {
type: GPS
mode: RECEIVE_ONLY
message_history_limit: 100
}
config {
type: INS_STAT
mode: RECEIVE_ONLY
message_history_limit: 1
}
is_ros: true
####################################################################
# The pointcloud topic name.
# type: string
# default: /sensor/velodyne64/compensator/PointCloud2
--pointcloud_topic=/apollo/sensor/velodyne64/compensator/PointCloud2
# The gnss topic name.
# type: string
# default: /sensor/gnss/odometry
--gps_topic=/apollo/sensor/gnss/odometry
# Project work root directory.
# type: string
# default: ""
--work_root=modules/calibration
/******************************************************************************
* Copyright 2017 The Apollo Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*****************************************************************************/
/**
* @file
*/
#ifndef MODEULES_CALIBRATION_LIDAR_EX_CHECKER_H_
#define MODEULES_CALIBRATION_LIDAR_EX_CHECKER_H_
#include <map>
#include <string>
#include <vector>
#include <utility>
#include "Eigen/Eigen"
#include "sensor_msgs/PointCloud2.h"
#include "modules/common/apollo_app.h"
#include "modules/common/macro.h"
#include "modules/drivers/gnss/proto/ins.pb.h"
#include "modules/localization/proto/gps.pb.h"
#include "modules/perception/lib/pcl_util/pcl_types.h"
#include "ros/include/ros/ros.h"
/**
* @namespace apollo::calibration
* @brief apollo::calibration
*/
namespace apollo {
namespace calibration {
using apollo::perception::pcl_util::PointXYZIT;
class LidarExChecker : public apollo::common::ApolloApp {
public:
std::string Name() const override;
apollo::common::Status Init() override;
apollo::common::Status Start() override;
void Stop() override;
private:
// get extrinsics which are calibrated before
bool GetExtrinsics();
// visualize the checking result
void VisualizeClouds();
// Upon receiving point cloud data
void OnPointCloud(const sensor_msgs::PointCloud2& message);
// Upon receiving GPS data
void OnGps(const localization::Gps& message);
// Upon receiving INS status data
void OnInsStat(const drivers::gnss::InsStat& msg);
bool is_first_gps_msg_;
Eigen::Vector3d last_position_;
Eigen::Affine3d offset_;
Eigen::Affine3d extrinsics_;
// the complete pose data;
std::map<double, Eigen::Affine3d> gps_poses_;
// the complete cloud data;
std::vector<pcl::PointCloud<PointXYZIT>> clouds_;
// to ensure the pose of given timestamp can be found,
// we pad some redundant clouds
uint32_t top_redundant_cloud_count_;
uint32_t bottom_redundant_cloud_count_;
// if program has took enough clouds
bool enough_data_;
// the number of cloud count to take
uint32_t cloud_count_;
// the distance between two clouds
double capture_distance_;
// latest INS status
uint32_t position_type_;
};
} // namespace calibration
} // namespace apollo
#endif // MODEULES_CALIBRATION_LIDAR_EX_CHECKER_H_
...@@ -18,7 +18,6 @@ ...@@ -18,7 +18,6 @@
#include "modules/common/log.h" #include "modules/common/log.h"
#include "ros/include/ros/ros.h" #include "ros/include/ros/ros.h"
#include "modules/decision/common/decision_gflags.h" #include "modules/calibration/lidar_ex_checker/lidar_ex_checker.h"
#include "modules/decision/decision.h"
APOLLO_MAIN(apollo::decision::Decision); APOLLO_MAIN(apollo::calibration::LidarExChecker);
load("//tools:cpplint.bzl", "cpplint")
package(default_visibility = ["//visibility:public"])
cc_library(
name = "republish_msg_lib",
srcs = ["republish_msg.cc"],
hdrs = ["republish_msg.h"],
deps = [
"//modules/calibration/republish_msg/common:republish_msg_common",
"//modules/calibration/republish_msg/proto:relative_odometry_proto",
"//modules/common:apollo_app",
"//modules/common:log",
"//modules/common/adapters:adapter_manager",
"//modules/drivers/gnss/proto:ins_proto",
"//modules/localization/proto:gps_proto",
],
)
cc_binary(
name = "republish_msg",
srcs = ["main.cc"],
data = [
"//modules/calibration/republish_msg/conf:republish_msg_adapter_manager_config",
"//modules/calibration/republish_msg/conf:republish_msg_config",
],
linkstatic = 0,
deps = [
":republish_msg_lib",
"//external:gflags",
"//modules/common:log",
"@ros//:ros_common",
],
)
cpplint()
# republish message
This node is used to re-publish messages. Run command:
```bash
$:./republish_msg --flagfile=/apollo/modules/calibration/republish_msg/conf/republish_msg.conf
```
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册