提交 a6039cab 编写于 作者: A Aljoscha Krettek 提交者: Stephan Ewen

[FLINK-7809] Remove support for Scala 2.10

上级 76e9b09b
......@@ -64,27 +64,27 @@ matrix:
- jdk: "openjdk8"
env:
- TEST="core"
- PROFILE="-Dhadoop.version=2.4.1 -Dscala-2.10"
- PROFILE="-Dhadoop.version=2.4.1"
- CACHE_NAME=JDK8_H241_CO
- jdk: "openjdk8"
env:
- TEST="libraries"
- PROFILE="-Dhadoop.version=2.4.1 -Dscala-2.10"
- PROFILE="-Dhadoop.version=2.4.1"
- CACHE_NAME=JDK8_H241_L
- jdk: "openjdk8"
env:
- TEST="connectors"
- PROFILE="-Dhadoop.version=2.4.1 -Dscala-2.10 -Pinclude-kinesis"
- PROFILE="-Dhadoop.version=2.4.1 -Pinclude-kinesis"
- CACHE_NAME=JDK8_H241_CN
- jdk: "openjdk8"
env:
- TEST="tests"
- PROFILE="-Dhadoop.version=2.4.1 -Dscala-2.10"
- PROFILE="-Dhadoop.version=2.4.1"
- CACHE_NAME=JDK8_H241_T
- jdk: "openjdk8"
env:
- TEST="misc"
- PROFILE="-Dhadoop.version=2.4.1 -Dscala-2.10"
- PROFILE="-Dhadoop.version=2.4.1"
- CACHE_NAME=JDK8_H241_M
git:
......
......@@ -115,19 +115,6 @@ The `-Pvendor-repos` activates a Maven [build profile](http://maven.apache.org/g
Flink has APIs, libraries, and runtime modules written in [Scala](http://scala-lang.org). Users of the Scala API and libraries may have to match the Scala version of Flink with the Scala version of their projects (because Scala is not strictly backwards compatible).
**By default, Flink is built with the Scala 2.11**. To build Flink with Scala *2.10*, you can change the default Scala *binary version* by using *scala-2.10* build profile:
~~~bash
# Build with Scala version 2.10
mvn clean install -DskipTests -Pscala-2.10
~~~
To build against custom Scala versions, you need to define new custom build profile that will override *scala.version* and *scala.binary.version* values.
Flink is developed against Scala *2.11* and tested additionally against Scala *2.10*. These two versions are known to be compatible. Earlier versions (like Scala *2.9*) are *not* compatible.
Newer versions may be compatible, depending on breaking changes in the language features used by Flink, and the availability of Flink's dependencies in those Scala versions. The dependencies written in Scala include for example *Kafka*, *Akka*, *Scalatest*, and *scopt*.
{% top %}
## Encrypted File Systems
......
......@@ -892,7 +892,7 @@ abstract class TableEnvironment(val config: TableConfig) {
throw new TableException("Field name can not be '*'.")
}
(fieldNames.toArray, fieldIndexes.toArray) // build fails in Scala 2.10 if not converted
(fieldNames, fieldIndexes)
}
protected def generateRowConverterFunction[OUT](
......
......@@ -209,29 +209,4 @@ under the License.
</plugins>
</build>
<profiles>
<profile>
<id>scala-2.10</id>
<activation>
<property>
<name>scala-2.10</name>
</property>
</activation>
<!-- only required for Scala 2.10 -->
<dependencies>
<dependency>
<groupId>org.scalamacros</groupId>
<artifactId>quasiquotes_2.10</artifactId>
<version>${scala.macros.version}</version>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>jline</artifactId>
<version>2.10.4</version>
</dependency>
</dependencies>
</profile>
</profiles>
</project>
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.api.scala
import java.io.BufferedReader
import _root_.scala.tools.nsc.interpreter._
class ILoopCompat(
in0: Option[BufferedReader],
out0: JPrintWriter)
extends ILoop(in0, out0) {
override def prompt = "Scala-Flink> "
}
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.api.scala
import java.io.BufferedReader
import _root_.scala.tools.nsc.interpreter._
import _root_.scala.io.AnsiColor.{MAGENTA, RESET}
class ILoopCompat(
in0: Option[BufferedReader],
out0: JPrintWriter)
extends ILoop(in0, out0) {
override def prompt = {
val promptStr = "Scala-Flink> "
s"$MAGENTA$promptStr$RESET"
}
protected def addThunk(f: => Unit): Unit = f
}
......@@ -35,7 +35,7 @@ class FlinkILoop(
val externalJars: Option[Array[String]],
in0: Option[BufferedReader],
out0: JPrintWriter)
extends ILoopCompat(in0, out0) {
extends ILoop(in0, out0) {
def this(
host: String,
......@@ -145,15 +145,13 @@ class FlinkILoop(
override def createInterpreter(): Unit = {
super.createInterpreter()
addThunk {
intp.beQuietDuring {
// import dependencies
intp.interpret("import " + packageImports.mkString(", "))
intp.beQuietDuring {
// import dependencies
intp.interpret("import " + packageImports.mkString(", "))
// set execution environment
intp.bind("benv", this.scalaBenv)
intp.bind("senv", this.scalaSenv)
}
// set execution environment
intp.bind("benv", this.scalaBenv)
intp.bind("senv", this.scalaSenv)
}
}
......
......@@ -256,24 +256,4 @@ under the License.
</plugins>
</build>
<profiles>
<profile>
<id>scala-2.10</id>
<activation>
<property>
<name>scala-2.10</name>
</property>
</activation>
<!-- only required for Scala 2.10 -->
<dependencies>
<dependency>
<groupId>org.scalamacros</groupId>
<artifactId>quasiquotes_2.10</artifactId>
<version>${scala.macros.version}</version>
</dependency>
</dependencies>
</profile>
</profiles>
</project>
......@@ -528,20 +528,6 @@ under the License.
</dependencyManagement>
<profiles>
<!-- Profile to switch to Scala Version 2.10 -->
<profile>
<id>scala-2.10</id>
<activation>
<property>
<name>scala-2.10</name>
</property>
</activation>
<properties>
<scala.version>2.10.6</scala.version>
<scala.binary.version>2.10</scala.binary.version>
</properties>
</profile>
<profile>
<id>spotbugs</id>
......
......@@ -241,14 +241,9 @@ deploy_to_maven() {
cd flink
cp ../../deploysettings.xml .
echo "Deploying Scala 2.11 version"
$MVN clean deploy -Prelease,docs-and-source,scala-2.11 --settings deploysettings.xml -DskipTests -Dgpg.executable=$GPG -Dgpg.keyname=$GPG_KEY -Dgpg.passphrase=$GPG_PASSPHRASE -DretryFailedDeploymentCount=10
# It is important to first deploy scala 2.11 and then scala 2.10 so that the quickstarts (which are independent of the scala version)
# are depending on scala 2.10.
echo "Deploying Scala 2.10 version"
$MVN clean deploy -Prelease,docs-and-source,scala-2.10 --settings deploysettings.xml -DskipTests -Dgpg.executable=$GPG -Dgpg.keyname=$GPG_KEY -Dgpg.passphrase=$GPG_PASSPHRASE -DretryFailedDeploymentCount=10
}
copy_data() {
......@@ -273,18 +268,12 @@ make_source_release
# build dist by input parameter of "--scala-vervion xxx --hadoop-version xxx"
if [ "$SCALA_VERSION" == "none" ] && [ "$HADOOP_VERSION" == "none" ]; then
make_binary_release "hadoop2" "" "2.10"
make_binary_release "hadoop26" "-Dhadoop.version=2.6.5" "2.10"
make_binary_release "hadoop27" "-Dhadoop.version=2.7.3" "2.10"
make_binary_release "hadoop28" "-Dhadoop.version=2.8.0" "2.10"
make_binary_release "hadoop2" "" "2.11"
make_binary_release "hadoop26" "-Dhadoop.version=2.6.5" "2.11"
make_binary_release "hadoop27" "-Dhadoop.version=2.7.3" "2.11"
make_binary_release "hadoop28" "-Dhadoop.version=2.8.0" "2.11"
elif [ "$SCALA_VERSION" == none ] && [ "$HADOOP_VERSION" != "none" ]
then
make_binary_release "hadoop2" "-Dhadoop.version=$HADOOP_VERSION" "2.10"
make_binary_release "hadoop2" "-Dhadoop.version=$HADOOP_VERSION" "2.11"
elif [ "$SCALA_VERSION" != none ] && [ "$HADOOP_VERSION" == "none" ]
then
......
......@@ -87,15 +87,10 @@ if [[ $CURRENT_FLINK_VERSION == *SNAPSHOT* ]] ; then
MVN_SNAPSHOT_OPTS="-B -Pdocs-and-source -DskipTests -Drat.skip=true -Drat.ignoreErrors=true \
-DretryFailedDeploymentCount=10 --settings deploysettings.xml clean deploy"
# hadoop2 scala 2.10
echo "deploy standard version (hadoop2) for scala 2.10"
mvn ${MVN_SNAPSHOT_OPTS} -Pscala-2.10
deploy_to_s3 $CURRENT_FLINK_VERSION "hadoop2"
# hadoop2 scala 2.11
echo "deploy hadoop2 version (standard) for scala 2.11"
echo "deploy standard version (hadoop2) for scala 2.11"
mvn ${MVN_SNAPSHOT_OPTS} -Pscala-2.11
deploy_to_s3 $CURRENT_FLINK_VERSION "hadoop2_2.11"
deploy_to_s3 $CURRENT_FLINK_VERSION "hadoop2"
exit 0
else
......
......@@ -78,18 +78,12 @@ cd ..
if [ "$SCALA_VERSION" == "none" ] && [ "$HADOOP_VERSION" == "none" ]; then
make_binary_release "hadoop2" "" "2.10"
make_binary_release "hadoop26" "-Dhadoop.version=2.6.5" "2.10"
make_binary_release "hadoop27" "-Dhadoop.version=2.7.3" "2.10"
make_binary_release "hadoop28" "-Dhadoop.version=2.8.0" "2.10"
make_binary_release "hadoop2" "" "2.11"
make_binary_release "hadoop26" "-Dhadoop.version=2.6.5" "2.11"
make_binary_release "hadoop27" "-Dhadoop.version=2.7.3" "2.11"
make_binary_release "hadoop28" "-Dhadoop.version=2.8.0" "2.11"
elif [ "$SCALA_VERSION" == none ] && [ "$HADOOP_VERSION" != "none" ]
then
make_binary_release "hadoop2" "-Dhadoop.version=$HADOOP_VERSION" "2.10"
make_binary_release "hadoop2" "-Dhadoop.version=$HADOOP_VERSION" "2.11"
elif [ "$SCALA_VERSION" != none ] && [ "$HADOOP_VERSION" == "none" ]
then
......
......@@ -43,9 +43,3 @@ echo "Deploying to repository.apache.org"
echo "Deploying Scala 2.11 version"
$MVN clean deploy -Prelease,docs-and-source,scala-2.11 -DskipTests -DretryFailedDeploymentCount=10
# It is important to first deploy scala 2.11 and then scala 2.10 so that the quickstarts (which are independent of the scala version)
# are depending on scala 2.10.
echo "Deploying Scala 2.10 version"
$MVN clean deploy -Prelease,docs-and-source,scala-2.10 -DskipTests -DretryFailedDeploymentCount=10
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册