提交 3fe6d19a 编写于 作者: M mbalassi

[contrib] Added log properties files to contrib & minor clean ups

上级 12b13f9c
......@@ -64,11 +64,11 @@ import java.util.Map;
*/
public class FlinkClient {
//The jobmanager's host name
/** The jobmanager's host name */
private final String jobManagerHost;
//The jobmanager's rpc port
/** The jobmanager's rpc port */
private final int jobManagerPort;
//The user specified timeout in milliseconds
/** The user specified timeout in milliseconds */
private final String timeout;
// The following methods are derived from "backtype.storm.utils.NimbusClient"
......@@ -77,8 +77,6 @@ public class FlinkClient {
* Instantiates a new {@link FlinkClient} for the given configuration, host name, and port. If values for {@link
* Config#NIMBUS_HOST} and {@link Config#NIMBUS_THRIFT_PORT} of the given configuration are ignored.
*
* @param conf
* A configuration.
* @param host
* The jobmanager's host name.
* @param port
......@@ -92,8 +90,6 @@ public class FlinkClient {
* Instantiates a new {@link FlinkClient} for the given configuration, host name, and port. If values for {@link
* Config#NIMBUS_HOST} and {@link Config#NIMBUS_THRIFT_PORT} of the given configuration are ignored.
*
* @param conf
* A configuration.
* @param host
* The jobmanager's host name.
* @param port
......@@ -139,8 +135,6 @@ public class FlinkClient {
public void close() {/* nothing to do */}
// The following methods are derived from "backtype.storm.generated.Nimubs.Client"
/**
* Parameter {@code uploadedJarLocation} is actually used to point to the local jar, because Flink does not support
* uploading a jar file before hand. Jar files are always uploaded directly when a program is submitted.
......
......@@ -36,7 +36,7 @@ import java.util.List;
*/
final class FlinkOutputFieldsDeclarer implements OutputFieldsDeclarer {
// the declared output schema
/** the declared output schema */
private Fields outputSchema;
@Override
......
......@@ -74,10 +74,6 @@ public class FlinkSubmitter {
* the topology-specific configuration. See {@link Config}.
* @param topology
* the processing to execute.
* @param opts
* to manipulate the starting of the topology
* @param progressListener
* to track the progress of the jar upload process
* @throws AlreadyAliveException
* if a topology with this name is already running
* @throws InvalidTopologyException
......@@ -145,8 +141,6 @@ public class FlinkSubmitter {
* the topology-specific configuration. See {@link Config}.
* @param topology
* the processing to execute.
* @param opts
* to manipulate the starting of the topology
* @throws AlreadyAliveException
* if a topology with this name is already running
* @throws InvalidTopologyException
......@@ -179,12 +173,8 @@ public class FlinkSubmitter {
* returned value is parameter localJar, because this give the best integration of Storm behavior within a Flink
* environment.
*
* @param conf
* the topology-specific configuration. See {@link Config}.
* @param localJar
* file path of the jar file to submit
* @param listener
* progress listener to track the jar file upload
* @return the value of parameter localJar
*/
public static String submitJar(final String localJar) {
......
......@@ -30,9 +30,9 @@ import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
*/
class FlinkTopology extends StreamExecutionEnvironment {
// The corresponding {@link StormTopology} that is mimicked by this {@link FlinkTopology}
/** The corresponding {@link StormTopology} that is mimicked by this {@link FlinkTopology} */
private final StormTopology stormTopology;
// The number of declared tasks for the whole program (ie, sum over all dops)
/** The number of declared tasks for the whole program (ie, sum over all dops) */
private int numberOfTasks = 0;
public FlinkTopology(final StormTopology stormTopology) {
......
......@@ -54,11 +54,11 @@ import java.util.Set;
*/
public class FlinkTopologyBuilder {
// A Storm {@link TopologyBuilder} to build a real Storm topology
/** A Storm {@link TopologyBuilder} to build a real Storm topology */
private final TopologyBuilder stormBuilder = new TopologyBuilder();
// All user spouts by their ID
/** All user spouts by their ID */
private final HashMap<String, IRichSpout> spouts = new HashMap<String, IRichSpout>();
// All user bolts by their ID
/** All user bolts by their ID */
private final HashMap<String, IRichBolt> bolts = new HashMap<String, IRichBolt>();
/**
......
......@@ -37,7 +37,9 @@ import org.apache.flink.streaming.runtime.tasks.StreamingRuntimeContext;
public abstract class AbstractStormSpoutWrapper<OUT> extends RichParallelSourceFunction<OUT> {
private static final long serialVersionUID = 4993283609095408765L;
// Number of attributes of the bolt's output tuples.
/**
* Number of attributes of the bolt's output tuples.
*/
private final int numberOfAttributes;
/**
* The wrapped Storm {@link IRichSpout spout}.
......
......@@ -34,7 +34,7 @@ import java.util.List;
*/
class StormBoltCollector<OUT> extends AbstractStormCollector<OUT> implements IOutputCollector {
// The Flink output object
/** The Flink output object */
private final Output<OUT> flinkOutput;
/**
......
......@@ -46,9 +46,9 @@ import org.apache.flink.streaming.runtime.tasks.StreamingRuntimeContext;
public class StormBoltWrapper<IN, OUT> extends AbstractStreamOperator<OUT> implements OneInputStreamOperator<IN, OUT> {
private static final long serialVersionUID = -4788589118464155835L;
// The wrapped Storm {@link IRichBolt bolt}
/** The wrapped Storm {@link IRichBolt bolt} */
private final IRichBolt bolt;
// Number of attributes of the bolt's output tuples
/** Number of attributes of the bolt's output tuples */
private final int numberOfAttributes;
/**
......
......@@ -32,7 +32,7 @@ import org.apache.flink.api.java.tuple.Tuple25;
public class StormFiniteSpoutWrapper<OUT> extends AbstractStormSpoutWrapper<OUT> {
private static final long serialVersionUID = 3883246587044801286L;
// The number of {@link IRichSpout#nextTuple()} calls
/** The number of {@link IRichSpout#nextTuple()} calls */
private int numberOfInvocations;
/**
......
......@@ -27,7 +27,7 @@ import backtype.storm.utils.Utils;
*/
class StormOutputFieldsDeclarer implements OutputFieldsDeclarer {
// The output schema declared by the wrapped bolt.
/** The output schema declared by the wrapped bolt. */
private Fields outputSchema = null;
@Override
......
......@@ -45,8 +45,7 @@ public class StormSpoutWrapper<OUT> extends AbstractStormSpoutWrapper<OUT> {
/**
* Instantiates a new {@link StormSpoutWrapper} that wraps the given Storm {@link IRichSpout spout} such that it
* can
* be used within a Flink streaming program. The output type can be any type if parameter {@code rawOutput} is
* can be used within a Flink streaming program. The output type can be any type if parameter {@code rawOutput} is
* {@code true} and the spout's number of declared output tuples is 1. If {@code rawOutput} is {@code false} the
* output type will be one of {@link Tuple1} to {@link Tuple25} depending on the spout's declared number of
* attributes.
......
......@@ -37,7 +37,7 @@ import java.util.List;
*/
class StormTuple<IN> implements backtype.storm.tuple.Tuple {
// The storm representation of the original Flink tuple
/** The storm representation of the original Flink tuple */
private final Values stormTuple;
/**
......
......@@ -42,7 +42,7 @@ class StormWrapperSetupHelper {
* {@link StormBoltWrapper}. Returns zero for raw output type or a value within range [1;25] for
* output type {@link org.apache.flink.api.java.tuple.Tuple1 Tuple1} to
* {@link org.apache.flink.api.java.tuple.Tuple25 Tuple25} . In case of a data sink, {@code -1}
* is returned. .
* is returned.
*
* @param spoutOrBolt
* The Storm {@link IRichSpout spout} or {@link IRichBolt bolt} to be used.
......
......@@ -28,6 +28,8 @@ import java.util.LinkedList;
public class FlinkOutputFieldsDeclarerTest extends AbstractTest {
@Test
public void testDeclare() {
for (int i = 0; i < 4; ++i) {
......@@ -58,7 +60,7 @@ public class FlinkOutputFieldsDeclarerTest extends AbstractTest {
}
private void runDeclareTest(final int testCase, final int numberOfAttributes) {
final FlinkOutputFieldsDeclarer declarere = new FlinkOutputFieldsDeclarer();
final FlinkOutputFieldsDeclarer declarer = new FlinkOutputFieldsDeclarer();
final String[] attributes = new String[numberOfAttributes];
for (int i = 0; i < numberOfAttributes; ++i) {
......@@ -67,19 +69,19 @@ public class FlinkOutputFieldsDeclarerTest extends AbstractTest {
switch (testCase) {
case 0:
this.declareSimple(declarere, attributes);
this.declareSimple(declarer, attributes);
break;
case 1:
this.declareNonDirect(declarere, attributes);
this.declareNonDirect(declarer, attributes);
break;
case 2:
this.declareDefaultStream(declarere, attributes);
this.declareDefaultStream(declarer, attributes);
break;
default:
this.declareFull(declarere, attributes);
this.declareFull(declarer, attributes);
}
final TypeInformation<?> type = declarere.getOutputType();
final TypeInformation<?> type = declarer.getOutputType();
if (numberOfAttributes == 0) {
Assert.assertNull(type);
......@@ -93,20 +95,20 @@ public class FlinkOutputFieldsDeclarerTest extends AbstractTest {
}
}
private void declareSimple(final FlinkOutputFieldsDeclarer declarere, final String[] attributes) {
declarere.declare(new Fields(attributes));
private void declareSimple(final FlinkOutputFieldsDeclarer declarer, final String[] attributes) {
declarer.declare(new Fields(attributes));
}
private void declareNonDirect(final FlinkOutputFieldsDeclarer declarere, final String[] attributes) {
declarere.declare(false, new Fields(attributes));
private void declareNonDirect(final FlinkOutputFieldsDeclarer declarer, final String[] attributes) {
declarer.declare(false, new Fields(attributes));
}
private void declareDefaultStream(final FlinkOutputFieldsDeclarer declarere, final String[] attributes) {
declarere.declareStream(Utils.DEFAULT_STREAM_ID, new Fields(attributes));
private void declareDefaultStream(final FlinkOutputFieldsDeclarer declarer, final String[] attributes) {
declarer.declareStream(Utils.DEFAULT_STREAM_ID, new Fields(attributes));
}
private void declareFull(final FlinkOutputFieldsDeclarer declarere, final String[] attributes) {
declarere.declareStream(Utils.DEFAULT_STREAM_ID, false, new Fields(attributes));
private void declareFull(final FlinkOutputFieldsDeclarer declarer, final String[] attributes) {
declarer.declareStream(Utils.DEFAULT_STREAM_ID, false, new Fields(attributes));
}
@Test(expected = UnsupportedOperationException.class)
......@@ -137,8 +139,8 @@ public class FlinkOutputFieldsDeclarerTest extends AbstractTest {
attributes[i] = "a" + i;
}
final FlinkOutputFieldsDeclarer declarere = new FlinkOutputFieldsDeclarer();
declarere.declare(new Fields(attributes));
final FlinkOutputFieldsDeclarer declarer = new FlinkOutputFieldsDeclarer();
declarer.declare(new Fields(attributes));
final int numberOfKeys = 1 + this.r.nextInt(25);
final LinkedList<String> groupingFields = new LinkedList<String>();
......@@ -161,7 +163,7 @@ public class FlinkOutputFieldsDeclarerTest extends AbstractTest {
}
}
final int[] result = declarere.getGroupingFieldIndexes(groupingFields);
final int[] result = declarer.getGroupingFieldIndexes(groupingFields);
Assert.assertEquals(expectedResult.length, result.length);
for (int i = 0; i < expectedResult.length; ++i) {
......
......@@ -83,7 +83,7 @@ public class StormBoltCollectorTest extends AbstractTest {
@Test(expected = UnsupportedOperationException.class)
public void testEmitDirect() {
new StormBoltCollector<Object>(1, mock(Output.class)).emitDirect(0, null,
(Collection) null, null);
null, null);
}
@SuppressWarnings("unchecked")
......
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
# Set root logger level to DEBUG and its only appender to A1.
log4j.rootLogger=OFF, A1
# A1 is set to be a ConsoleAppender.
log4j.appender.A1=org.apache.log4j.ConsoleAppender
# A1 uses PatternLayout.
log4j.appender.A1.layout=org.apache.log4j.PatternLayout
log4j.appender.A1.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n
\ No newline at end of file
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
# This file ensures that tests executed from the IDE show log output
log4j.rootLogger=OFF, console
# Log all infos in the given file
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.target = System.err
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=%d{HH:mm:ss,SSS} %-5p %-60c %x - %m%n
\ No newline at end of file
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<configuration>
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{60} %X{sourceThread} - %msg%n</pattern>
</encoder>
</appender>
<root level="WARN">
<appender-ref ref="STDOUT"/>
</root>
<logger name="org.apache.flink.runtime.client.JobClient" level="OFF"/>
</configuration>
\ No newline at end of file
......@@ -33,7 +33,7 @@ import org.apache.flink.stormcompatibility.util.StormInMemorySpout;
public class ExclamationTopology {
public final static String spoutId = "source";
public final static String firstBoltId = "exlamation1";
public final static String firstBoltId = "exclamation1";
public final static String secondBoltId = "exclamation2";
public final static String sinkId = "sink";
private final static OutputFormatter formatter = new SimpleOutputFormatter();
......
......@@ -27,7 +27,7 @@ import java.util.Map;
/**
* Base class for Storm Spout that reads data line by line from an arbitrary source. The declared output schema has a
* single attribute calle {@code line} and should be of type {@link String}.
* single attribute called {@code line} and should be of type {@link String}.
*/
public abstract class AbstractStormSpout implements IRichSpout {
private static final long serialVersionUID = 8876828403487806771L;
......
......@@ -78,8 +78,6 @@ public class StormBoltCounter implements IRichBolt {
/**
* A counter helper to emit immutable tuples to the given stormCollector and avoid unnecessary object
* creating/deletion.
*
* @author mjsax
*/
private static final class Count {
public int count;
......
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
# Set root logger level to DEBUG and its only appender to A1.
log4j.rootLogger=OFF, A1
# A1 is set to be a ConsoleAppender.
log4j.appender.A1=org.apache.log4j.ConsoleAppender
# A1 uses PatternLayout.
log4j.appender.A1.layout=org.apache.log4j.PatternLayout
log4j.appender.A1.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n
\ No newline at end of file
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
# This file ensures that tests executed from the IDE show log output
log4j.rootLogger=OFF, console
# Log all infos in the given file
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.target = System.err
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=%d{HH:mm:ss,SSS} %-5p %-60c %x - %m%n
\ No newline at end of file
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<configuration>
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{60} %X{sourceThread} - %msg%n</pattern>
</encoder>
</appender>
<root level="WARN">
<appender-ref ref="STDOUT"/>
</root>
<logger name="org.apache.flink.runtime.client.JobClient" level="OFF"/>
</configuration>
\ No newline at end of file
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
# Set root logger level to DEBUG and its only appender to A1.
log4j.rootLogger=OFF, A1
# A1 is set to be a ConsoleAppender.
log4j.appender.A1=org.apache.log4j.ConsoleAppender
# A1 uses PatternLayout.
log4j.appender.A1.layout=org.apache.log4j.PatternLayout
log4j.appender.A1.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n
\ No newline at end of file
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
# This file ensures that tests executed from the IDE show log output
log4j.rootLogger=OFF, console
# Log all infos in the given file
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.target = System.err
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=%d{HH:mm:ss,SSS} %-5p %-60c %x - %m%n
\ No newline at end of file
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<configuration>
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{60} %X{sourceThread} - %msg%n</pattern>
</encoder>
</appender>
<root level="WARN">
<appender-ref ref="STDOUT"/>
</root>
<logger name="org.apache.flink.runtime.client.JobClient" level="OFF"/>
</configuration>
\ No newline at end of file
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
# Set root logger level to DEBUG and its only appender to A1.
log4j.rootLogger=OFF, A1
# A1 is set to be a ConsoleAppender.
log4j.appender.A1=org.apache.log4j.ConsoleAppender
# A1 uses PatternLayout.
log4j.appender.A1.layout=org.apache.log4j.PatternLayout
log4j.appender.A1.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n
\ No newline at end of file
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
# This file ensures that tests executed from the IDE show log output
log4j.rootLogger=OFF, console
# Log all infos in the given file
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.target = System.err
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=%d{HH:mm:ss,SSS} %-5p %-60c %x - %m%n
\ No newline at end of file
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<configuration>
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{60} %X{sourceThread} - %msg%n</pattern>
</encoder>
</appender>
<root level="WARN">
<appender-ref ref="STDOUT"/>
</root>
<logger name="org.apache.flink.runtime.client.JobClient" level="OFF"/>
</configuration>
\ No newline at end of file
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册