提交 974a861a 编写于 作者: S Shanqing Cai 提交者: Yifei Feng

Let build_server.sh take whl file URL as an input argument. (#5206)

This make it possible to test OSS GRPC distributed runtime in
dist_test/remote_test.sh against a release build.

Usage example:
1. Build the server using a release whl file. (Obviously this means that
the Linxu CPU PIP release build has to pass first.)
  $ export DOCKER_VERSION_TAG="0.11.0rc1"
    $ tensorflow/tools/dist_test/build_server.sh
    tensorflow/tf_grpc_test_server:${DOCKER_VERSION_TAG}
    http://ci.tensorflow.org/view/Release/job/release-matrix-cpu/TF_BUILD_CONTAINER_TYPE=CPU,TF_BUILD_IS_OPT=OPT,TF_BUILD_IS_PIP=PIP,TF_BUILD_PYTHON_VERSION=PYTHON2,label=cpu-slave/lastSuccessfulBuild/artifact/pip_test/whl/tensorflow-${DOCKER_VERSION_TAG}-cp27-none-linux_x86_64.whl
    --test

    2. Run remote_test.sh:
      $ export TF_DIST_DOCKER_NO_CACHE=1
        $ export
	TF_DIST_SERVER_DOCKER_IMAGE="tensorflow/tf_grpc_test_server:${DOCKER_VERSION_TAG}"
	  $ export TF_DIST_GCLOUD_PROJECT="my-project"
	    $ export TF_DIST_GCLOUD_COMPUTE_ZONE="my-zone"
	      $ export TF_DIST_CONTAINER_CLUSTER="my-cluster"
	        $ export TF_DIST_GCLOUD_KEY_FILE="/path/to/my/key.json"
		  $ tensorflow/tools/dist_test/remote_test.sh
		  "http://ci.tensorflow.org/view/Release/job/release-matrix-cpu/TF_BUILD_CONTAINER_TYPE=CPU,TF_BUILD_IS_OPT=OPT,TF_BUILD_IS_PIP=PIP,TF_BUILD_PYTHON_VERSION=PYTHON2,label=cpu-slave/lastSuccessfulBuild/artifact/pip_test/whl/tensorflow-${DOCKER_VERSION_TAG}-cp27-none-linux_x86_64.whl"
(cherry picked from commit 7ba17e2c)
上级 2db79581
......@@ -16,7 +16,14 @@
#
# Builds the test server for distributed (GRPC) TensorFlow
#
# Usage: build_server.sh <docker_image_name> [--test]
# Usage: build_server.sh <docker_image_name> <whl_url> [--test]
#
# Arguments:
# docker_image_name: Name of the docker image to build.
# E.g.: tensorflow/tf_grpc_test_server:0.11.0rc1
#
# whl_url: URL from which the TensorFlow whl file will be downloaded.
# E.g.: https://ci.tensorflow.org/view/Nightly/job/nightly-matrix-cpu/TF_BUILD_IS_OPT=OPT,TF_BUILD_IS_PIP=PIP,TF_BUILD_PYTHON_VERSION=PYTHON2,label=cpu-slave/lastSuccessfulBuild/artifact/pip_test/whl/tensorflow-0.11.0rc1-cp27-none-linux_x86_64.whl
#
# The optional flag --test lets the script to use the Dockerfile for the
# testing GRPC server. Without the flag, the script will build the non-test
......@@ -33,22 +40,35 @@ die() {
}
# Check arguments
if [[ $# != 1 ]] && [[ $# != 2 ]]; then
die "Usage: $0 <docker_image_name> [--test]"
if [[ $# -lt 2 ]]; then
die "Usage: $0 <docker_image_name> <whl_url> [--test]"
fi
DOCKER_IMG_NAME=$1
shift
WHL_URL=$2
shift 2
# Current script directory
DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
DOCKER_FILE="${DIR}/server/Dockerfile"
BUILD_DIR=$(mktemp -d)
echo ""
echo "Using whl file URL: ${WHL_URL}"
echo "Building in temporary directory: ${BUILD_DIR}"
cp -r ${DIR}/* "${BUILD_DIR}"/ || \
die "Failed to copy files to ${BUILD_DIR}"
DOCKER_FILE="${BUILD_DIR}/server/Dockerfile"
if [[ $1 == "--test" ]]; then
DOCKER_FILE="${DIR}/server/Dockerfile.test"
DOCKER_FILE="${BUILD_DIR}/server/Dockerfile.test"
fi
echo "Using Docker file: ${DOCKER_FILE}"
# Download whl file into the build context directory.
wget -P "${BUILD_DIR}" ${WHL_URL} || \
die "Failed to download tensorflow whl file from URL: ${WHL_URL}"
if [[ ! -f "${DOCKER_FILE}" ]]; then
die "ERROR: Unable to find dockerfile: ${DOCKER_FILE}"
fi
......@@ -56,5 +76,8 @@ echo "Dockerfile: ${DOCKER_FILE}"
# Call docker build
docker build --no-cache -t "${DOCKER_IMG_NAME}" \
-f "${DOCKER_FILE}" \
"${DIR}"
-f "${DOCKER_FILE}" "${BUILD_DIR}" || \
die "Failed to build docker image: ${DOCKER_IMG_NAME}"
# Clean up docker build context directory.
rm -rf "${BUILD_DIR}"
......@@ -34,9 +34,10 @@ RUN curl -O https://bootstrap.pypa.io/get-pip.py && \
python get-pip.py && \
rm get-pip.py
# Install TensorFlow CPU version from nightly build
RUN pip --no-cache-dir install \
https://ci.tensorflow.org/view/Nightly/job/nightly-matrix-cpu/TF_BUILD_IS_OPT=OPT,TF_BUILD_IS_PIP=PIP,TF_BUILD_PYTHON_VERSION=PYTHON2,label=cpu-slave/lastSuccessfulBuild/artifact/pip_test/whl/tensorflow-0.11.0rc2-cp27-none-linux_x86_64.whl
# Install TensorFlow wheel
COPY tensorflow-*.whl /
RUN pip install /tensorflow-*.whl && \
rm -f /tensorflow-*.whl
# Copy files, including the GRPC server binary at
# server/grpc_tensorflow_server.py
......
......@@ -40,9 +40,10 @@ RUN curl -O https://bootstrap.pypa.io/get-pip.py && \
# Install python panda for the census wide&deep test
RUN pip install --upgrade pandas==0.18.1
# Install TensorFlow CPU version.
RUN pip --no-cache-dir install \
https://ci.tensorflow.org/view/Nightly/job/nightly-matrix-cpu/TF_BUILD_IS_OPT=OPT,TF_BUILD_IS_PIP=PIP,TF_BUILD_PYTHON_VERSION=PYTHON2,label=cpu-slave/lastSuccessfulBuild/artifact/pip_test/whl/tensorflow-0.11.0rc2-cp27-none-linux_x86_64.whl
# Install TensorFlow wheel
COPY tensorflow-*.whl /
RUN pip install /tensorflow-*.whl && \
rm -f /tensorflow-*.whl
# Copy files, including the GRPC server binary at
# server/grpc_tensorflow_server.py
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册