提交 5a0c268d 编写于 作者: Z zentol 提交者: Fabian Hueske

[FLINK-3949] [metrics] Add numSplitsProcessed counter metric.

This closes #2119
上级 18744b2c
......@@ -101,7 +101,7 @@ public class DataSourceTask<OT> extends AbstractInvokable {
LOG.debug(getLogString("Starting data source operator"));
RuntimeContext ctx = createRuntimeContext();
Counter splitCounter = ctx.getMetricGroup().counter("numSplitsProcessed");
Counter completedSplitsCounter = ctx.getMetricGroup().counter("numSplitsProcessed");
Counter numRecordsOut = ctx.getMetricGroup().counter("numRecordsOut");
if (RichInputFormat.class.isAssignableFrom(this.format.getClass())) {
......@@ -172,7 +172,7 @@ public class DataSourceTask<OT> extends AbstractInvokable {
// close. We close here such that a regular close throwing an exception marks a task as failed.
format.close();
}
splitCounter.inc();
completedSplitsCounter.inc();
} // end for all input splits
// close the collector. if it is a chaining task collector, it will close its chained tasks
......
......@@ -26,6 +26,7 @@ import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.core.fs.FileInputSplit;
import org.apache.flink.core.memory.DataInputViewStreamWrapper;
import org.apache.flink.metrics.Counter;
import org.apache.flink.runtime.state.AbstractStateBackend;
import org.apache.flink.runtime.state.StreamStateHandle;
import org.apache.flink.streaming.api.operators.AbstractStreamOperator;
......@@ -235,6 +236,7 @@ public class ContinuousFileReaderOperator<OUT, S extends Serializable> extends A
public void run() {
try {
Counter completedSplitsCounter = getMetricGroup().counter("numSplitsProcessed");
this.format.openInputFormat();
while (this.isRunning) {
......@@ -290,6 +292,7 @@ public class ContinuousFileReaderOperator<OUT, S extends Serializable> extends A
}
}
}
completedSplitsCounter.inc();
} finally {
// close and prepare for the next iteration
......
......@@ -24,6 +24,7 @@ import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.core.io.InputSplit;
import org.apache.flink.metrics.Counter;
import org.apache.flink.runtime.jobgraph.tasks.InputSplitProvider;
import org.apache.flink.streaming.api.operators.StreamingRuntimeContext;
......@@ -70,6 +71,7 @@ public class InputFormatSourceFunction<OUT> extends RichParallelSourceFunction<O
public void run(SourceContext<OUT> ctx) throws Exception {
try {
Counter completedSplitsCounter = getRuntimeContext().getMetricGroup().counter("numSplitsProcessed");
if (isRunning && format instanceof RichInputFormat) {
((RichInputFormat) format).openInputFormat();
}
......@@ -86,6 +88,7 @@ public class InputFormatSourceFunction<OUT> extends RichParallelSourceFunction<O
ctx.collect(nextElement);
}
format.close();
completedSplitsCounter.inc();
if (isRunning) {
isRunning = splitIterator.hasNext();
......
......@@ -26,6 +26,8 @@ import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.core.io.InputSplit;
import org.apache.flink.core.io.InputSplitAssigner;
import org.apache.flink.metrics.MetricGroup;
import org.apache.flink.metrics.groups.UnregisteredMetricsGroup;
import org.apache.flink.runtime.jobgraph.tasks.InputSplitProvider;
import org.apache.flink.runtime.memory.MemoryManager;
import org.apache.flink.runtime.operators.testutils.MockEnvironment;
......@@ -252,6 +254,11 @@ public class InputFormatSourceFunctionTest {
this.format = format;
}
@Override
public MetricGroup getMetricGroup() {
return new UnregisteredMetricsGroup();
}
@Override
public InputSplitProvider getInputSplitProvider() {
try {
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册