提交 5fce538e 编写于 作者: S sunhaibotb 提交者: Piotr Nowojski

[FLINK-14231][task] Change StreamTask to close operators with...

[FLINK-14231][task] Change StreamTask to close operators with StreamOperatorWrapper to make the endInput semantics on the chain strict
上级 56ce6960
......@@ -30,6 +30,7 @@ import org.apache.flink.streaming.api.functions.async.AsyncFunction;
import org.apache.flink.streaming.api.functions.async.ResultFuture;
import org.apache.flink.streaming.api.graph.StreamConfig;
import org.apache.flink.streaming.api.operators.AbstractUdfStreamOperator;
import org.apache.flink.streaming.api.operators.BoundedOneInput;
import org.apache.flink.streaming.api.operators.ChainingStrategy;
import org.apache.flink.streaming.api.operators.MailboxExecutor;
import org.apache.flink.streaming.api.operators.OneInputStreamOperator;
......@@ -76,7 +77,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
@Internal
public class AsyncWaitOperator<IN, OUT>
extends AbstractUdfStreamOperator<OUT, AsyncFunction<IN, OUT>>
implements OneInputStreamOperator<IN, OUT> {
implements OneInputStreamOperator<IN, OUT>, BoundedOneInput {
private static final long serialVersionUID = 1L;
private static final String STATE_NAME = "_async_wait_operator_state_";
......@@ -234,13 +235,11 @@ public class AsyncWaitOperator<IN, OUT>
}
@Override
public void close() throws Exception {
try {
waitInFlightInputsFinished();
}
finally {
super.close();
}
public void endInput() throws Exception {
// we should wait here for the data in flight to be finished. the reason is that the
// timer not in running will be forbidden to fire after this, so that when the async
// operation is stuck, it results in deadlock due to what the timeout timer is not fired
waitInFlightInputsFinished();
}
/**
......
......@@ -42,8 +42,6 @@ import org.apache.flink.streaming.api.collector.selector.DirectedOutput;
import org.apache.flink.streaming.api.collector.selector.OutputSelector;
import org.apache.flink.streaming.api.graph.StreamConfig;
import org.apache.flink.streaming.api.graph.StreamEdge;
import org.apache.flink.streaming.api.operators.BoundedMultiInput;
import org.apache.flink.streaming.api.operators.BoundedOneInput;
import org.apache.flink.streaming.api.operators.OneInputStreamOperator;
import org.apache.flink.streaming.api.operators.Output;
import org.apache.flink.streaming.api.operators.StreamOperator;
......@@ -278,25 +276,33 @@ public class OperatorChain<OUT, OP extends StreamOperator<OUT>> implements Strea
* @param inputId the input ID starts from 1 which indicates the first input.
*/
public void endHeadOperatorInput(int inputId) throws Exception {
endOperatorInput(getHeadOperator(), inputId);
if (headOperatorWrapper != null) {
headOperatorWrapper.endOperatorInput(inputId);
}
}
/**
* Ends all inputs of the non-head operator specified by {@code streamOperator})
* (now there is only one input for each non-head operator).
*
* @param streamOperator non-head operator for ending the only input.
* Executes {@link StreamOperator#initializeState()} followed by {@link StreamOperator#open()}
* of each operator in the chain of this {@link StreamTask}. State initialization and opening
* happens from <b>tail to head</b> operator in the chain, contrary to {@link StreamOperator#close()}
* which happens <b>head to tail</b>(see {@link #closeOperators(StreamTaskActionExecutor)}).
*/
public void endNonHeadOperatorInput(StreamOperator<?> streamOperator) throws Exception {
checkState(streamOperator != getHeadOperator());
endOperatorInput(streamOperator, 1);
protected void initializeStateAndOpenOperators() throws Exception {
for (StreamOperatorWrapper<?, ?> operatorWrapper : getAllOperators(true)) {
StreamOperator<?> operator = operatorWrapper.getStreamOperator();
operator.initializeState();
operator.open();
}
}
private void endOperatorInput(StreamOperator<?> streamOperator, int inputId) throws Exception {
if (streamOperator instanceof BoundedOneInput) {
((BoundedOneInput) streamOperator).endInput();
} else if (streamOperator instanceof BoundedMultiInput) {
((BoundedMultiInput) streamOperator).endInput(inputId);
/**
* Closes all operators in a chain effect way. Closing happens from <b>head to tail</b> operator
* in the chain, contrary to {@link StreamOperator#open()} which happens <b>tail to head</b>
* (see {@link #initializeStateAndOpenOperators()}).
*/
protected void closeOperators(StreamTaskActionExecutor actionExecutor) throws Exception {
if (headOperatorWrapper != null) {
headOperatorWrapper.close(actionExecutor);
}
}
......
......@@ -453,8 +453,7 @@ public abstract class StreamTask<OUT, OP extends StreamOperator<OUT>>
// both the following operations are protected by the lock
// so that we avoid race conditions in the case that initializeState()
// registers a timer, that fires before the open() is called.
initializeStateAndOpen();
operatorChain.initializeStateAndOpenOperators();
});
}
......@@ -494,14 +493,12 @@ public abstract class StreamTask<OUT, OP extends StreamOperator<OUT>>
final CompletableFuture<Void> timersFinishedFuture = new CompletableFuture<>();
// close all operators in a chain effect way
operatorChain.closeOperators(actionExecutor);
// make sure no further checkpoint and notification actions happen.
// we make sure that no other thread is currently in the locked scope before
// we close the operators by trying to acquire the checkpoint scope lock
// we also need to make sure that no triggers fire concurrently with the close logic
// at the same time, this makes sure that during any "regular" exit where still
actionExecutor.runThrowing(() -> {
// this is part of the main logic, so if this fails, the task is considered failed
closeAllOperators();
// make sure no new timers can come
FutureUtils.forward(timerService.quiesce(), timersFinishedFuture);
......@@ -608,32 +605,6 @@ public abstract class StreamTask<OUT, OP extends StreamOperator<OUT>>
return canceled;
}
/**
* Execute {@link StreamOperator#close()} of each operator in the chain of this
* {@link StreamTask}. Closing happens from <b>head to tail</b> operator in the chain,
* contrary to {@link StreamOperator#open()} which happens <b>tail to head</b>
* (see {@link #initializeStateAndOpen()}).
*/
private void closeAllOperators() throws Exception {
// We need to close them first to last, since upstream operators in the chain might emit
// elements in their close methods.
boolean isHeadOperator = true;
for (StreamOperatorWrapper<?, ?> operatorWrapper : operatorChain.getAllOperators()) {
StreamOperator<?> operator = operatorWrapper.getStreamOperator();
// The operators on the chain, except for the head operator, must be one-input operators.
// So after the upstream operator on the chain is closed, the input of its downstream operator
// reaches the end.
if (!isHeadOperator) {
operatorChain.endNonHeadOperatorInput(operator);
} else {
isHeadOperator = false;
}
operator.close();
}
}
private void shutdownAsyncThreads() throws Exception {
if (!asyncOperationsThreadPool.isShutdown()) {
asyncOperationsThreadPool.shutdownNow();
......@@ -961,20 +932,6 @@ public abstract class StreamTask<OUT, OP extends StreamOperator<OUT>>
checkpointingOperation.executeCheckpointing();
}
/**
* Execute {@link StreamOperator#initializeState()} followed by {@link StreamOperator#open()} of each operator in
* the chain of this {@link StreamTask}. State initialization and opening happens from <b>tail to head</b> operator
* in the chain, contrary to {@link StreamOperator#close()} which happens <b>head to tail</b>
* (see {@link #closeAllOperators()}.
*/
private void initializeStateAndOpen() throws Exception {
for (StreamOperatorWrapper<?, ?> operatorWrapper : operatorChain.getAllOperators(true)) {
StreamOperator<?> operator = operatorWrapper.getStreamOperator();
operator.initializeState();
operator.open();
}
}
// ------------------------------------------------------------------------
// Operator Events
// ------------------------------------------------------------------------
......
......@@ -276,6 +276,7 @@ public class AsyncWaitOperatorTest extends TestLogger {
// wait until all async collectors in the buffer have been emitted out.
synchronized (testHarness.getCheckpointLock()) {
testHarness.endInput();
testHarness.close();
}
......@@ -347,6 +348,7 @@ public class AsyncWaitOperatorTest extends TestLogger {
expectedOutput.add(new StreamRecord<>(16, initialTime + 8));
synchronized (testHarness.getCheckpointLock()) {
testHarness.endInput();
testHarness.close();
}
......@@ -665,6 +667,7 @@ public class AsyncWaitOperatorTest extends TestLogger {
// wait until all async collectors in the buffer have been emitted out.
synchronized (testHarness.getCheckpointLock()) {
testHarness.endInput();
testHarness.close();
}
......@@ -698,6 +701,7 @@ public class AsyncWaitOperatorTest extends TestLogger {
}
synchronized (harness.getCheckpointLock()) {
harness.endInput();
harness.close();
}
......@@ -861,6 +865,7 @@ public class AsyncWaitOperatorTest extends TestLogger {
}
synchronized (recoverHarness.getCheckpointLock()) {
recoverHarness.endInput();
recoverHarness.close();
}
......
......@@ -39,7 +39,7 @@ import org.apache.flink.util.function.RunnableWithException;
import org.junit.Test;
import java.util.Arrays;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.RejectedExecutionException;
import java.util.stream.Collectors;
......@@ -60,32 +60,44 @@ public class MailboxOperatorTest extends TestLogger {
BasicTypeInfo.INT_TYPE_INFO,
BasicTypeInfo.INT_TYPE_INFO);
testHarness.setupOperatorChain(new OperatorID(), new ReplicatingMailOperatorFactory())
.chain(new OperatorID(), new ReplicatingMailOperatorFactory(), IntSerializer.INSTANCE)
final int maxProcessingElements = 3;
testHarness.setupOperatorChain(new OperatorID(), new ReplicatingMailOperatorFactory(maxProcessingElements))
.chain(new OperatorID(), new ReplicatingMailOperatorFactory(maxProcessingElements), IntSerializer.INSTANCE)
.finish();
testHarness.invoke();
testHarness.waitForTaskRunning();
testHarness.processElement(new StreamRecord<>(0));
testHarness.processElement(new StreamRecord<>(0));
testHarness.processElement(new StreamRecord<>(0));
for (int i = 0; i < maxProcessingElements; i++) {
testHarness.processElement(new StreamRecord<>(0));
}
testHarness.endInput();
testHarness.waitForTaskCompletion();
// with each input two mails should be processed, one of each operator in the chain
List<Integer> expected = new ArrayList<>();
for (int i = 0; i < maxProcessingElements; i++) {
expected.add(i * 2);
}
List<Integer> numMailsProcessed = testHarness.getOutput().stream()
.map(element -> ((StreamRecord<Integer>) element).getValue())
.collect(Collectors.toList());
assertThat(numMailsProcessed, is(Arrays.asList(0, 2, 4)));
assertThat(numMailsProcessed, is(expected));
}
private static class ReplicatingMailOperatorFactory extends AbstractStreamOperatorFactory<Integer>
implements OneInputStreamOperatorFactory<Integer, Integer>, YieldingOperatorFactory<Integer> {
private final int maxProcessingElements;
private MailboxExecutor mailboxExecutor;
ReplicatingMailOperatorFactory(final int maxProcessingElements) {
this.maxProcessingElements = maxProcessingElements;
}
@Override
public void setMailboxExecutor(MailboxExecutor mailboxExecutor) {
this.mailboxExecutor = mailboxExecutor;
......@@ -96,7 +108,7 @@ public class MailboxOperatorTest extends TestLogger {
StreamTask<?, ?> containingTask,
StreamConfig config,
Output<StreamRecord<Integer>> output) {
ReplicatingMailOperator operator = new ReplicatingMailOperator(mailboxExecutor);
ReplicatingMailOperator operator = new ReplicatingMailOperator(maxProcessingElements, mailboxExecutor);
operator.setProcessingTimeService(processingTimeService);
operator.setup(containingTask, config, output);
return (Operator) operator;
......@@ -114,25 +126,42 @@ public class MailboxOperatorTest extends TestLogger {
private static class ReplicatingMailOperator extends AbstractStreamOperator<Integer>
implements OneInputStreamOperator<Integer, Integer> {
private final int maxProcessingElements;
private final ReplicatingMail replicatingMail;
ReplicatingMailOperator(final MailboxExecutor mailboxExecutor) {
replicatingMail = new ReplicatingMail(mailboxExecutor);
private long numProcessedElements = 0;
ReplicatingMailOperator(final int maxProcessingElements, final MailboxExecutor mailboxExecutor) {
this.maxProcessingElements = maxProcessingElements;
this.replicatingMail = new ReplicatingMail(mailboxExecutor);
}
@Override
public void processElement(StreamRecord<Integer> upstreamMailCount) throws Exception {
if (numProcessedElements >= maxProcessingElements) {
return;
}
// for the very first element, enqueue one mail that replicates itself
if (!replicatingMail.hasBeenEnqueued()) {
replicatingMail.run();
}
// output how many mails have been processed so far (from upstream and this operator)
output.collect(new StreamRecord<>(replicatingMail.getMailCount() + upstreamMailCount.getValue()));
if (++numProcessedElements == maxProcessingElements) {
replicatingMail.stop();
}
}
}
private static class ReplicatingMail implements RunnableWithException {
private int mailCount = -1;
private boolean stopped = false;
private final MailboxExecutor mailboxExecutor;
ReplicatingMail(final MailboxExecutor mailboxExecutor) {
......@@ -142,7 +171,9 @@ public class MailboxOperatorTest extends TestLogger {
@Override
public void run() {
try {
mailboxExecutor.execute(this, "Blocking mail" + ++mailCount);
if (!stopped) {
mailboxExecutor.execute(this, "Blocking mail" + ++mailCount);
}
} catch (RejectedExecutionException e) {
// during shutdown the executor will reject new mails, which is fine for us.
}
......@@ -155,5 +186,9 @@ public class MailboxOperatorTest extends TestLogger {
int getMailCount() {
return mailCount;
}
void stop() {
stopped = true;
}
}
}
......@@ -84,6 +84,7 @@ import scala.concurrent.duration.Deadline;
import scala.concurrent.duration.FiniteDuration;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
......@@ -602,7 +603,7 @@ public class OneInputStreamTaskTest extends TestLogger {
}
@Test
public void testHandlingEndOfInput() throws Exception {
public void testClosingAllOperatorsOnChainProperly() throws Exception {
final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>(
OneInputStreamTask::new,
BasicTypeInfo.STRING_TYPE_INFO,
......@@ -616,8 +617,6 @@ public class OneInputStreamTaskTest extends TestLogger {
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()))
.finish();
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.invoke();
testHarness.waitForTaskRunning();
......@@ -626,15 +625,16 @@ public class OneInputStreamTaskTest extends TestLogger {
testHarness.waitForTaskCompletion();
expectedOutput.add(new StreamRecord<>("Hello"));
expectedOutput.add(new StreamRecord<>("[Operator0]: EndOfInput"));
expectedOutput.add(new StreamRecord<>("[Operator0]: Bye"));
expectedOutput.add(new StreamRecord<>("[Operator1]: EndOfInput"));
expectedOutput.add(new StreamRecord<>("[Operator1]: Bye"));
ArrayList<StreamRecord<String>> expected = new ArrayList<>();
Collections.addAll(expected,
new StreamRecord<>("Hello"),
new StreamRecord<>("[Operator0]: End of input"),
new StreamRecord<>("[Operator0]: Bye"),
new StreamRecord<>("[Operator1]: End of input"),
new StreamRecord<>("[Operator1]: Bye"));
TestHarnessUtil.assertOutputEquals("Output was not correct.",
expectedOutput,
testHarness.getOutput());
final Object[] output = testHarness.getOutput().toArray();
assertArrayEquals("Output was not correct.", expected.toArray(), output);
}
private static class TestOperator
......
......@@ -25,6 +25,7 @@ import org.apache.flink.api.common.typeutils.base.StringSerializer;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.typeutils.TupleTypeInfo;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.core.testutils.MultiShotLatch;
import org.apache.flink.runtime.checkpoint.CheckpointMetaData;
import org.apache.flink.runtime.checkpoint.CheckpointOptions;
import org.apache.flink.runtime.execution.CancelTaskException;
......@@ -51,6 +52,7 @@ import javax.annotation.Nonnull;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Random;
......@@ -66,6 +68,7 @@ import java.util.concurrent.Semaphore;
import java.util.concurrent.atomic.AtomicLong;
import static org.apache.flink.util.Preconditions.checkState;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
......@@ -170,7 +173,7 @@ public class SourceStreamTaskTest {
}
@Test
public void testMarkingEndOfInput() throws Exception {
public void testClosingAllOperatorsOnChainProperly() throws Exception {
final StreamTaskTestHarness<String> testHarness = new StreamTaskTestHarness<>(
SourceStreamTask::new,
BasicTypeInfo.STRING_TYPE_INFO);
......@@ -189,20 +192,19 @@ public class SourceStreamTaskTest {
StreamConfig streamConfig = testHarness.getStreamConfig();
streamConfig.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.invoke();
testHarness.waitForTaskCompletion();
expectedOutput.add(new StreamRecord<>("Hello"));
expectedOutput.add(new StreamRecord<>("[Source0]: EndOfInput"));
expectedOutput.add(new StreamRecord<>("[Source0]: Bye"));
expectedOutput.add(new StreamRecord<>("[Operator1]: EndOfInput"));
expectedOutput.add(new StreamRecord<>("[Operator1]: Bye"));
ArrayList<StreamRecord<String>> expected = new ArrayList<>();
Collections.addAll(expected,
new StreamRecord<>("Hello"),
new StreamRecord<>("[Source0]: End of input"),
new StreamRecord<>("[Source0]: Bye"),
new StreamRecord<>("[Operator1]: End of input"),
new StreamRecord<>("[Operator1]: Bye"));
TestHarnessUtil.assertOutputEquals("Output was not correct.",
expectedOutput,
testHarness.getOutput());
final Object[] output = testHarness.getOutput().toArray();
assertArrayEquals("Output was not correct.", expected.toArray(), output);
}
@Test
......@@ -228,7 +230,7 @@ public class SourceStreamTaskTest {
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.invoke();
CancelTestSource.getDataProcessing().get();
CancelTestSource.getDataProcessing().await();
testHarness.getTask().cancel();
try {
......@@ -616,9 +618,9 @@ public class SourceStreamTaskTest {
private static class CancelTestSource extends FromElementsFunction<String> {
private static final long serialVersionUID = 8713065281092996067L;
private static CompletableFuture<Void> dataProcessing = new CompletableFuture<>();
private static MultiShotLatch dataProcessing = new MultiShotLatch();
private static CompletableFuture<Void> cancellationWaiting = new CompletableFuture<>();
private static MultiShotLatch cancellationWaiting = new MultiShotLatch();
public CancelTestSource(TypeSerializer<String> serializer, String... elements) throws IOException {
super(serializer, elements);
......@@ -628,18 +630,17 @@ public class SourceStreamTaskTest {
public void run(SourceContext<String> ctx) throws Exception {
super.run(ctx);
dataProcessing.complete(null);
cancellationWaiting.get();
dataProcessing.trigger();
cancellationWaiting.await();
}
@Override
public void cancel() {
super.cancel();
cancellationWaiting.complete(null);
cancellationWaiting.trigger();
}
public static CompletableFuture<Void> getDataProcessing() {
public static MultiShotLatch getDataProcessing() {
return dataProcessing;
}
}
......@@ -696,12 +697,20 @@ public class SourceStreamTaskTest {
@Override
public void endInput() {
output.collect(new StreamRecord<>("[" + name + "]: EndOfInput"));
output("[" + name + "]: End of input");
}
@Override
public void close() {
output.collect(new StreamRecord<>("[" + name + "]: Bye"));
public void close() throws Exception {
ProcessingTimeService timeService = getProcessingTimeService();
timeService.registerTimer(timeService.getCurrentProcessingTime(), t -> output("[" + name + "]: Timer registered in close"));
output("[" + name + "]: Bye");
super.close();
}
private void output(String record) {
output.collect(new StreamRecord<>(record));
}
}
}
......
......@@ -44,12 +44,19 @@ public class TestBoundedOneInputStreamOperator extends AbstractStreamOperator<St
@Override
public void endInput() {
output.collect(new StreamRecord<>("[" + name + "]: EndOfInput"));
output("[" + name + "]: End of input");
}
@Override
public void close() throws Exception {
output.collect(new StreamRecord<>("[" + name + "]: Bye"));
ProcessingTimeService timeService = getProcessingTimeService();
timeService.registerTimer(timeService.getCurrentProcessingTime(), t -> output("[" + name + "]: Timer registered in close"));
output("[" + name + "]: Bye");
super.close();
}
private void output(String record) {
output.collect(new StreamRecord<>(record));
}
}
......@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.flink.streaming.util;
package org.apache.flink.streaming.runtime.tasks;
import org.apache.flink.streaming.api.operators.AbstractStreamOperator;
import org.apache.flink.streaming.api.operators.BoundedMultiInput;
......@@ -49,12 +49,19 @@ public class TestBoundedTwoInputOperator extends AbstractStreamOperator<String>
@Override
public void endInput(int inputId) {
output.collect(new StreamRecord<>("[" + name + "-" + inputId + "]: EndOfInput"));
output("[" + name + "-" + inputId + "]: End of input");
}
@Override
public void close() throws Exception {
ProcessingTimeService timeService = getProcessingTimeService();
timeService.registerTimer(timeService.getCurrentProcessingTime(), t -> output("[" + name + "]: Timer registered in close"));
output.collect(new StreamRecord<>("[" + name + "]: Bye"));
super.close();
}
private void output(String record) {
output.collect(new StreamRecord<>(record));
}
}
......@@ -50,14 +50,15 @@ import org.apache.flink.streaming.api.watermark.Watermark;
import org.apache.flink.streaming.runtime.io.StreamTwoInputProcessor;
import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
import org.apache.flink.streaming.runtime.streamstatus.StreamStatus;
import org.apache.flink.streaming.util.TestBoundedTwoInputOperator;
import org.apache.flink.streaming.util.TestHarnessUtil;
import org.hamcrest.collection.IsMapContaining;
import org.junit.Assert;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
......@@ -65,6 +66,7 @@ import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
/**
......@@ -579,7 +581,7 @@ public class TwoInputStreamTaskTest {
}
@Test
public void testHandlingEndOfInput() throws Exception {
public void testClosingAllOperatorsOnChainProperly() throws Exception {
final TwoInputStreamTaskTestHarness<String, String, String> testHarness = new TwoInputStreamTaskTestHarness<>(
TwoInputStreamTask::new,
BasicTypeInfo.STRING_TYPE_INFO,
......@@ -596,8 +598,6 @@ public class TwoInputStreamTaskTest {
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()))
.finish();
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.invoke();
testHarness.waitForTaskRunning();
......@@ -611,17 +611,18 @@ public class TwoInputStreamTaskTest {
testHarness.waitForTaskCompletion();
expectedOutput.add(new StreamRecord<>("[Operator0-1]: Hello-1"));
expectedOutput.add(new StreamRecord<>("[Operator0-1]: EndOfInput"));
expectedOutput.add(new StreamRecord<>("[Operator0-2]: Hello-2"));
expectedOutput.add(new StreamRecord<>("[Operator0-2]: EndOfInput"));
expectedOutput.add(new StreamRecord<>("[Operator0]: Bye"));
expectedOutput.add(new StreamRecord<>("[Operator1]: EndOfInput"));
expectedOutput.add(new StreamRecord<>("[Operator1]: Bye"));
TestHarnessUtil.assertOutputEquals("Output was not correct.",
expectedOutput,
testHarness.getOutput());
ArrayList<StreamRecord<String>> expected = new ArrayList<>();
Collections.addAll(expected,
new StreamRecord<>("[Operator0-1]: Hello-1"),
new StreamRecord<>("[Operator0-1]: End of input"),
new StreamRecord<>("[Operator0-2]: Hello-2"),
new StreamRecord<>("[Operator0-2]: End of input"),
new StreamRecord<>("[Operator0]: Bye"),
new StreamRecord<>("[Operator1]: End of input"),
new StreamRecord<>("[Operator1]: Bye"));
final Object[] output = testHarness.getOutput().toArray();
assertArrayEquals("Output was not correct.", expected.toArray(), output);
}
/**
......
......@@ -21,6 +21,7 @@ package org.apache.flink.streaming.util;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.runtime.jobgraph.OperatorID;
import org.apache.flink.runtime.operators.testutils.MockEnvironment;
import org.apache.flink.streaming.api.operators.BoundedOneInput;
import org.apache.flink.streaming.api.operators.OneInputStreamOperator;
import org.apache.flink.streaming.api.operators.OneInputStreamOperatorFactory;
import org.apache.flink.streaming.api.watermark.Watermark;
......@@ -170,6 +171,12 @@ public class OneInputStreamOperatorTestHarness<IN, OUT>
getOneInputOperator().processWatermark(mark);
}
public void endInput() throws Exception {
if (operator instanceof BoundedOneInput) {
((BoundedOneInput) operator).endInput();
}
}
public long getCurrentWatermark() {
return currentWatermark;
}
......
......@@ -234,6 +234,8 @@ public abstract class WindowOperator<K, W extends Window>
public void open() throws Exception {
super.open();
functionsClosed = false;
collector = new TimestampedCollector<>(output);
collector.eraseTimestamp();
......@@ -385,6 +387,10 @@ public abstract class WindowOperator<K, W extends Window>
@Override
public void onProcessingTime(InternalTimer<K, W> timer) throws Exception {
if (functionsClosed) {
return;
}
setCurrentKey(timer.getKey());
triggerContext.window = timer.getNamespace();
......@@ -630,7 +636,6 @@ public abstract class WindowOperator<K, W extends Window>
}
}
// ------------------------------------------------------------------------------
// Visible For Testing
// ------------------------------------------------------------------------------
......
......@@ -107,6 +107,7 @@ public class AsyncLookupJoinHarnessTest {
// wait until all async collectors in the buffer have been emitted out.
synchronized (testHarness.getCheckpointLock()) {
testHarness.endInput();
testHarness.close();
}
......@@ -137,6 +138,7 @@ public class AsyncLookupJoinHarnessTest {
// wait until all async collectors in the buffer have been emitted out.
synchronized (testHarness.getCheckpointLock()) {
testHarness.endInput();
testHarness.close();
}
......@@ -166,6 +168,7 @@ public class AsyncLookupJoinHarnessTest {
// wait until all async collectors in the buffer have been emitted out.
synchronized (testHarness.getCheckpointLock()) {
testHarness.endInput();
testHarness.close();
}
......@@ -198,6 +201,7 @@ public class AsyncLookupJoinHarnessTest {
// wait until all async collectors in the buffer have been emitted out.
synchronized (testHarness.getCheckpointLock()) {
testHarness.endInput();
testHarness.close();
}
......@@ -265,7 +269,6 @@ public class AsyncLookupJoinHarnessTest {
// ---------------------------------------------------------------------------------
/**
* The {@link TestingFetcherFunction} only accepts a single integer lookup key and
* returns zero or one or more BaseRows.
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册