提交 8d62033c 编写于 作者: C Chiwan Park 提交者: Fabian Hueske

[FLINK-2767] [scala shell] Add Scala 2.11 support to Scala shell.

Update Scala 2.11 version and jline dependency.

This closes #1197
上级 a437a2b3
......@@ -125,33 +125,16 @@ under the License.
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-scala-shell</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
<!-- See main pom.xml for explanation of profiles -->
<profiles>
<profile>
<id>scala-2.10</id>
<activation>
<property>
<!-- this is the default scala profile -->
<name>!scala-2.11</name>
</property>
</activation>
<properties>
<scala.version>2.10.4</scala.version>
<scala.binary.version>2.10</scala.binary.version>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-scala-shell</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
</profile>
<profile>
<id>include-yarn</id>
<activation>
......
......@@ -76,12 +76,6 @@ under the License.
<version>${scala.version}</version>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>jline</artifactId>
<version>2.10.4</version>
</dependency>
<!-- tests -->
<dependency>
<groupId>org.apache.flink</groupId>
......@@ -180,6 +174,7 @@ under the License.
<configuration>
<sources>
<source>src/main/scala</source>
<source>src/main/scala-${scala.binary.version}</source>
</sources>
</configuration>
</execution>
......@@ -274,6 +269,12 @@ under the License.
<artifactId>quasiquotes_${scala.binary.version}</artifactId>
<version>${scala.macros.version}</version>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>jline</artifactId>
<version>2.10.4</version>
</dependency>
</dependencies>
</profile>
</profiles>
......
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.api.scala
import java.io.BufferedReader
import _root_.scala.tools.nsc.interpreter._
class ILoopCompat(
in0: Option[BufferedReader],
out0: JPrintWriter)
extends ILoop(in0, out0) {
}
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.api.scala
import java.io.BufferedReader
import _root_.scala.tools.nsc.interpreter._
class ILoopCompat(
in0: Option[BufferedReader],
out0: JPrintWriter)
extends ILoop(in0, out0) {
protected def addThunk(f: => Unit): Unit = f
}
......@@ -20,11 +20,11 @@ package org.apache.flink.api.scala
import java.io.{BufferedReader, File, FileOutputStream}
import scala.tools.nsc.interpreter._
import org.apache.flink.api.java.{JarHelper, ScalaShellRemoteEnvironment}
import org.apache.flink.util.AbstractID
import scala.tools.nsc.interpreter._
class FlinkILoop(
val host: String,
......@@ -32,10 +32,8 @@ class FlinkILoop(
val externalJars: Option[Array[String]],
in0: Option[BufferedReader],
out0: JPrintWriter)
extends ILoop(in0, out0) {
extends ILoopCompat(in0, out0) {
def this(host:String,
port:Int,
externalJars : Option[Array[String]],
......@@ -51,6 +49,7 @@ class FlinkILoop(
def this(host: String, port: Int, in0: BufferedReader, out: JPrintWriter){
this(host: String, port: Int, None, in0: BufferedReader, out: JPrintWriter)
}
// remote environment
private val remoteEnv: ScalaShellRemoteEnvironment = {
// allow creation of environments
......@@ -71,17 +70,6 @@ class FlinkILoop(
scalaEnv
}
addThunk {
intp.beQuietDuring {
// automatically imports the flink scala api
intp.addImports("org.apache.flink.api.scala._")
intp.addImports("org.apache.flink.api.common.functions._")
// with this we can access this object in the scala shell
intp.bindValue("env", this.scalaEnv)
}
}
/**
* creates a temporary directory to store compiled console files
*/
......@@ -107,6 +95,24 @@ class FlinkILoop(
new File(tmpDirBase, "scala_shell_commands.jar")
}
private val packageImports = Seq[String](
"org.apache.flink.api.scala._",
"org.apache.flink.api.common.functions._"
)
override def createInterpreter(): Unit = {
super.createInterpreter()
addThunk {
intp.beQuietDuring {
// import dependencies
intp.interpret("import " + packageImports.mkString(", "))
// set execution environment
intp.bind("env", this.scalaEnv)
}
}
}
/**
* Packages the compiled classes of the current shell session into a Jar file for execution
......
......@@ -18,12 +18,11 @@
package org.apache.flink.api.scala
import scala.tools.nsc.Settings
import org.apache.flink.configuration.Configuration
import org.apache.flink.runtime.minicluster.LocalFlinkMiniCluster
import scala.tools.nsc.Settings
object FlinkShell {
......
......@@ -22,7 +22,7 @@ import java.io._
import java.util.concurrent.TimeUnit
import org.apache.flink.runtime.StreamingMode
import org.apache.flink.test.util.{ForkableFlinkMiniCluster, TestBaseUtils, TestEnvironment}
import org.apache.flink.test.util.{TestEnvironment, ForkableFlinkMiniCluster, TestBaseUtils}
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{BeforeAndAfterAll, FunSuite, Matchers}
......
......@@ -46,6 +46,7 @@ under the License.
<module>flink-ml</module>
<module>flink-language-binding</module>
<module>flink-gelly-scala</module>
<module>flink-scala-shell</module>
</modules>
<!-- See main pom.xml for explanation of profiles -->
......@@ -71,22 +72,5 @@ under the License.
<module>flink-tez</module>
</modules>
</profile>
<profile>
<id>scala-2.10</id>
<activation>
<property>
<!-- this is the default scala profile -->
<name>!scala-2.11</name>
</property>
</activation>
<properties>
<scala.version>2.10.4</scala.version>
<scala.binary.version>2.10</scala.binary.version>
</properties>
<modules>
<module>flink-scala-shell</module>
</modules>
</profile>
</profiles>
</project>
......@@ -370,7 +370,7 @@ under the License.
</property>
</activation>
<properties>
<scala.version>2.11.4</scala.version>
<scala.version>2.11.7</scala.version>
<scala.binary.version>2.11</scala.binary.version>
</properties>
</profile>
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册