提交 9ed9ea6f 编写于 作者: Z zhangminglei 提交者: Tzu-Li (Gordon) Tai

[FLINK-6789] [optimizer] Remove duplicated test utility reducer in optimizer

This closes #4216.
上级 2d275e0c
...@@ -34,7 +34,7 @@ import org.apache.flink.api.java.tuple.Tuple3; ...@@ -34,7 +34,7 @@ import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.optimizer.plan.OptimizedPlan; import org.apache.flink.optimizer.plan.OptimizedPlan;
import org.apache.flink.optimizer.plan.SingleInputPlanNode; import org.apache.flink.optimizer.plan.SingleInputPlanNode;
import org.apache.flink.optimizer.plan.SinkPlanNode; import org.apache.flink.optimizer.plan.SinkPlanNode;
import org.apache.flink.optimizer.testfunctions.DummyReducer; import org.apache.flink.optimizer.testfunctions.SelectOneReducer;
import org.apache.flink.optimizer.testfunctions.IdentityGroupReducerCombinable; import org.apache.flink.optimizer.testfunctions.IdentityGroupReducerCombinable;
import org.apache.flink.optimizer.util.CompilerTestBase; import org.apache.flink.optimizer.util.CompilerTestBase;
import org.apache.flink.runtime.operators.shipping.ShipStrategyType; import org.apache.flink.runtime.operators.shipping.ShipStrategyType;
...@@ -53,7 +53,7 @@ public class GroupingKeySelectorTranslationTest extends CompilerTestBase { ...@@ -53,7 +53,7 @@ public class GroupingKeySelectorTranslationTest extends CompilerTestBase {
data.groupBy(new TestKeySelector<Tuple2<Integer,Integer>>()) data.groupBy(new TestKeySelector<Tuple2<Integer,Integer>>())
.withPartitioner(new TestPartitionerInt()) .withPartitioner(new TestPartitionerInt())
.reduce(new DummyReducer<Tuple2<Integer,Integer>>()) .reduce(new SelectOneReducer<Tuple2<Integer,Integer>>())
.output(new DiscardingOutputFormat<Tuple2<Integer, Integer>>()); .output(new DiscardingOutputFormat<Tuple2<Integer, Integer>>());
Plan p = env.createProgramPlan(); Plan p = env.createProgramPlan();
......
...@@ -30,7 +30,7 @@ import org.apache.flink.api.java.io.DiscardingOutputFormat; ...@@ -30,7 +30,7 @@ import org.apache.flink.api.java.io.DiscardingOutputFormat;
import org.apache.flink.optimizer.plan.OptimizedPlan; import org.apache.flink.optimizer.plan.OptimizedPlan;
import org.apache.flink.optimizer.plan.SingleInputPlanNode; import org.apache.flink.optimizer.plan.SingleInputPlanNode;
import org.apache.flink.optimizer.plan.SinkPlanNode; import org.apache.flink.optimizer.plan.SinkPlanNode;
import org.apache.flink.optimizer.testfunctions.DummyReducer; import org.apache.flink.optimizer.testfunctions.SelectOneReducer;
import org.apache.flink.optimizer.testfunctions.IdentityGroupReducerCombinable; import org.apache.flink.optimizer.testfunctions.IdentityGroupReducerCombinable;
import org.apache.flink.optimizer.util.CompilerTestBase; import org.apache.flink.optimizer.util.CompilerTestBase;
import org.apache.flink.runtime.operators.shipping.ShipStrategyType; import org.apache.flink.runtime.operators.shipping.ShipStrategyType;
...@@ -48,7 +48,7 @@ public class GroupingPojoTranslationTest extends CompilerTestBase { ...@@ -48,7 +48,7 @@ public class GroupingPojoTranslationTest extends CompilerTestBase {
.rebalance().setParallelism(4); .rebalance().setParallelism(4);
data.groupBy("a").withPartitioner(new TestPartitionerInt()) data.groupBy("a").withPartitioner(new TestPartitionerInt())
.reduce(new DummyReducer<Pojo2>()) .reduce(new SelectOneReducer<Pojo2>())
.output(new DiscardingOutputFormat<Pojo2>()); .output(new DiscardingOutputFormat<Pojo2>());
Plan p = env.createProgramPlan(); Plan p = env.createProgramPlan();
...@@ -255,4 +255,4 @@ public class GroupingPojoTranslationTest extends CompilerTestBase { ...@@ -255,4 +255,4 @@ public class GroupingPojoTranslationTest extends CompilerTestBase {
return 0; return 0;
} }
} }
} }
\ No newline at end of file
...@@ -33,7 +33,7 @@ import org.apache.flink.api.java.tuple.Tuple4; ...@@ -33,7 +33,7 @@ import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.optimizer.plan.OptimizedPlan; import org.apache.flink.optimizer.plan.OptimizedPlan;
import org.apache.flink.optimizer.plan.SingleInputPlanNode; import org.apache.flink.optimizer.plan.SingleInputPlanNode;
import org.apache.flink.optimizer.plan.SinkPlanNode; import org.apache.flink.optimizer.plan.SinkPlanNode;
import org.apache.flink.optimizer.testfunctions.DummyReducer; import org.apache.flink.optimizer.testfunctions.SelectOneReducer;
import org.apache.flink.optimizer.testfunctions.IdentityGroupReducerCombinable; import org.apache.flink.optimizer.testfunctions.IdentityGroupReducerCombinable;
import org.apache.flink.optimizer.util.CompilerTestBase; import org.apache.flink.optimizer.util.CompilerTestBase;
import org.apache.flink.runtime.operators.shipping.ShipStrategyType; import org.apache.flink.runtime.operators.shipping.ShipStrategyType;
...@@ -80,7 +80,7 @@ public class GroupingTupleTranslationTest extends CompilerTestBase { ...@@ -80,7 +80,7 @@ public class GroupingTupleTranslationTest extends CompilerTestBase {
.rebalance().setParallelism(4); .rebalance().setParallelism(4);
data.groupBy(0).withPartitioner(new TestPartitionerInt()) data.groupBy(0).withPartitioner(new TestPartitionerInt())
.reduce(new DummyReducer<Tuple2<Integer,Integer>>()) .reduce(new SelectOneReducer<Tuple2<Integer,Integer>>())
.output(new DiscardingOutputFormat<Tuple2<Integer, Integer>>()); .output(new DiscardingOutputFormat<Tuple2<Integer, Integer>>());
Plan p = env.createProgramPlan(); Plan p = env.createProgramPlan();
......
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.optimizer.testfunctions;
import org.apache.flink.api.common.functions.RichReduceFunction;
public class DummyReducer<T> extends RichReduceFunction<T> {
private static final long serialVersionUID = 1L;
@Override
public T reduce(T a, T b) {
return a;
}
}
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册