提交 8ec8b63b 编写于 作者: R Robert Metzger

[FLINK-18643][Azure] Build a Flink snapshot release with the nightly cron-job.

This closes #13125
上级 3780929d
......@@ -74,6 +74,11 @@ stages:
dependsOn: [] # depending on an empty array makes the stages run in parallel
condition: or(eq(variables['Build.Reason'], 'Schedule'), eq(variables['MODE'], 'nightly'))
jobs:
- template: build-nightly-dist.yml
parameters:
stage_name: cron_snapshot_deployment
environment: PROFILE=""
container: flink-build-container
- template: jobs-template.yml
parameters:
stage_name: cron_azure
......
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
jobs:
- job: ${{parameters.stage_name}}_binary
pool:
vmImage: 'ubuntu-16.04'
container: flink-build-container
workspace:
clean: all
steps:
- task: Cache@2
displayName: Cache Maven local repo
inputs:
key: $(CACHE_KEY)
restoreKeys: $(CACHE_FALLBACK_KEY)
path: $(MAVEN_CACHE_FOLDER)
continueOnError: true
- task: CmdLine@2
displayName: Build snapshot binary release
inputs:
script: |
source ./tools/ci/maven-utils.sh
setup_maven
run_mvn -version
export MVN="run_mvn"
export RELEASE_VERSION=$(MVN_RUN_VERBOSE=false run_mvn help:evaluate -Dexpression=project.version -q -DforceStdout)
echo "Determined RELEASE_VERSION as '$RELEASE_VERSION' "
cd tools
MVN_RUN_VERBOSE=true SKIP_GPG=true ./releasing/create_binary_release.sh
echo "Created files:"
find ./releasing/release
cd ..
- task: CmdLine@2
displayName: Upload artifacts to S3
inputs:
script: |
source ./tools/ci/deploy_nightly_to_s3.sh
upload_to_s3 ./tools/releasing/release
env:
ARTIFACTS_S3_BUCKET: $(ARTIFACTS_S3_BUCKET)
ARTIFACTS_AWS_ACCESS_KEY_ID: $(ARTIFACTS_AWS_ACCESS_KEY_ID)
ARTIFACTS_AWS_SECRET_ACCESS_KEY: $(ARTIFACTS_AWS_SECRET_ACCESS_KEY)
# Activate this to publish the binary release as a pipeline artifact on Azure
#- task: PublishPipelineArtifact@1
# displayName: Upload snapshot binary release
# inputs:
# targetPath: ./tools/releasing/release
# artifact: nightly-release
- job: ${{parameters.stage_name}}_maven
pool:
vmImage: 'ubuntu-16.04'
container: flink-build-container
timeoutInMinutes: 100 # 40 minutes per scala version + 20 buffer
workspace:
clean: all
steps:
- task: Cache@2
displayName: Cache Maven local repo
inputs:
key: $(CACHE_KEY)
restoreKeys: $(CACHE_FALLBACK_KEY)
path: $(MAVEN_CACHE_FOLDER)
continueOnError: true
# Upload snapshot
- task: CmdLine@2
displayName: Deploy maven snapshot
inputs:
script: |
source ./tools/ci/maven-utils.sh
setup_maven
run_mvn -version
cd tools
cat << EOF > deploy-settings.xml
<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0
https://maven.apache.org/xsd/settings-1.0.0.xsd">
<servers>
<server>
<id>apache.snapshots.https</id>
<username>${MAVEN_DEPLOY_USER}</username>
<password>${MAVEN_DEPLOY_PASS}</password>
</server>
</servers>
<mirrors>
<mirror>
<id>google-maven-central</id>
<name>GCS Maven Central mirror</name>
<url>https://maven-central-eu.storage-download.googleapis.com/maven2/</url>
<mirrorOf>central</mirrorOf>
</mirror>
</mirrors>
</settings>
EOF
export CUSTOM_OPTIONS="-Dgpg.skip -Drat.skip -Dcheckstyle.skip --settings $(pwd)/deploy-settings.xml"
export MVN_RUN_VERBOSE=true
./releasing/deploy_staging_jars.sh
env:
MAVEN_DEPLOY_USER: $(MAVEN_DEPLOY_USER)
MAVEN_DEPLOY_PASS: $(MAVEN_DEPLOY_PASS)
#!/usr/bin/env bash
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
# fail on errors
set -e -x
function upload_to_s3() {
local FILES_DIR=$1
echo "Installing artifacts deployment script"
export ARTIFACTS_DEST="$HOME/bin/artifacts"
curl -sL https://raw.githubusercontent.com/travis-ci/artifacts/master/install | bash
PATH="$(dirname "$ARTIFACTS_DEST"):$PATH"
echo "Uploading contents of $FILES_DIR to S3:"
artifacts upload \
--bucket $ARTIFACTS_S3_BUCKET \
--key $ARTIFACTS_AWS_ACCESS_KEY_ID \
--secret $ARTIFACTS_AWS_SECRET_ACCESS_KEY \
--target-paths / $FILES_DIR
}
#!/usr/bin/env bash
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
# fail on errors
set -e
#
# Deploys snapshot builds to Apache's snapshot repository.
#
function getVersion() {
here="`dirname \"$0\"`" # relative
here="`( cd \"$here\" && pwd )`" # absolutized and normalized
if [ -z "$here" ] ; then
# error; for some reason, the path is not accessible
# to the script (e.g. permissions re-evaled after suid)
exit 1 # fail
fi
flink_home="`dirname \"$here\"`"
cd "$flink_home"
echo `mvn org.apache.maven.plugins:maven-help-plugin:2.1.1:evaluate -Dexpression=project.version | grep -E '^([0-9]+.[0-9]+(.[0-9]+)?(-[a-zA-Z0-9]+)?)$'`
}
function deploy_to_s3() {
local CURRENT_FLINK_VERSION=$1
local HD=$2
echo "Installing artifacts deployment script"
export ARTIFACTS_DEST="$HOME/bin/artifacts"
curl -sL https://raw.githubusercontent.com/travis-ci/artifacts/master/install | bash
PATH="$(dirname "$ARTIFACTS_DEST"):$PATH"
echo "Deploying flink version $CURRENT_FLINK_VERSION (hadoop=$HD) to s3:"
mkdir flink-$CURRENT_FLINK_VERSION
cp -r flink-dist/target/flink-*-bin/flink-*/* flink-$CURRENT_FLINK_VERSION/
tar -czf flink-$CURRENT_FLINK_VERSION-bin-$HD.tgz flink-$CURRENT_FLINK_VERSION
artifacts upload \
--bucket $ARTIFACTS_S3_BUCKET \
--key $ARTIFACTS_AWS_ACCESS_KEY_ID \
--secret $ARTIFACTS_AWS_SECRET_ACCESS_KEY \
--target-paths / \
flink-$CURRENT_FLINK_VERSION-bin-$HD.tgz
# delete files again
rm -rf flink-$CURRENT_FLINK_VERSION
rm flink-$CURRENT_FLINK_VERSION-bin-$HD.tgz
}
pwd
echo "install lifecycle mapping fake plugin"
git clone https://github.com/mfriedenhagen/dummy-lifecycle-mapping-plugin.git
cd dummy-lifecycle-mapping-plugin
mvn -B install
cd ..
rm -rf dummy-lifecycle-mapping-plugin
CURRENT_FLINK_VERSION=`getVersion`
echo "detected current version as: '$CURRENT_FLINK_VERSION'"
#
# This script deploys our project to sonatype SNAPSHOTS.
# It will deploy a hadoop v2 (yarn) artifact
#
if [[ $CURRENT_FLINK_VERSION == *SNAPSHOT* ]] ; then
MVN_SNAPSHOT_OPTS="-B -Pdocs-and-source -DskipTests -Drat.skip=true -Drat.ignoreErrors=true -Dcheckstyle.skip=true \
-DretryFailedDeploymentCount=10 clean deploy"
# hadoop2 scala 2.11
echo "deploy standard version (hadoop2) for scala 2.11"
mvn ${MVN_SNAPSHOT_OPTS}
deploy_to_s3 $CURRENT_FLINK_VERSION "hadoop2"
exit 0
else
exit 1
fi
......@@ -21,6 +21,7 @@
## Variables with defaults (if not overwritten by environment)
##
MVN=${MVN:-mvn}
CUSTOM_OPTIONS=${CUSTOM_OPTIONS:-}
# fail immediately
set -o errexit
......@@ -40,7 +41,7 @@ cd ..
echo "Deploying to repository.apache.org"
COMMON_OPTIONS="-Prelease,docs-and-source -DskipTests -DretryFailedDeploymentCount=10"
COMMON_OPTIONS="-Prelease,docs-and-source -DskipTests -DretryFailedDeploymentCount=10 $CUSTOM_OPTIONS"
echo "Deploying Scala 2.11 version"
$MVN clean deploy $COMMON_OPTIONS -Dscala-2.11
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册