提交 97b58eb1 编写于 作者: N nishantmonu51

review comments

1) Rename firehose to IngestSegment
2) fix segment overlapping, intervals
3) fix overshadow
上级 1de39080
...@@ -24,7 +24,7 @@ import com.fasterxml.jackson.databind.jsontype.NamedType; ...@@ -24,7 +24,7 @@ import com.fasterxml.jackson.databind.jsontype.NamedType;
import com.fasterxml.jackson.databind.module.SimpleModule; import com.fasterxml.jackson.databind.module.SimpleModule;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.inject.Binder; import com.google.inject.Binder;
import io.druid.indexing.firehose.DruidFirehoseFactory; import io.druid.indexing.firehose.IngestSegmentFirehoseFactory;
import io.druid.initialization.DruidModule; import io.druid.initialization.DruidModule;
import io.druid.segment.realtime.firehose.EventReceiverFirehoseFactory; import io.druid.segment.realtime.firehose.EventReceiverFirehoseFactory;
...@@ -39,7 +39,7 @@ public class IndexingServiceFirehoseModule implements DruidModule ...@@ -39,7 +39,7 @@ public class IndexingServiceFirehoseModule implements DruidModule
new SimpleModule("IndexingServiceFirehoseModule") new SimpleModule("IndexingServiceFirehoseModule")
.registerSubtypes( .registerSubtypes(
new NamedType(EventReceiverFirehoseFactory.class, "receiver"), new NamedType(EventReceiverFirehoseFactory.class, "receiver"),
new NamedType(DruidFirehoseFactory.class, "druid") new NamedType(IngestSegmentFirehoseFactory.class, "ingestSegment")
) )
); );
} }
......
...@@ -61,6 +61,7 @@ import io.druid.segment.loading.SegmentLoadingException; ...@@ -61,6 +61,7 @@ import io.druid.segment.loading.SegmentLoadingException;
import io.druid.timeline.DataSegment; import io.druid.timeline.DataSegment;
import io.druid.timeline.TimelineObjectHolder; import io.druid.timeline.TimelineObjectHolder;
import io.druid.timeline.VersionedIntervalTimeline; import io.druid.timeline.VersionedIntervalTimeline;
import io.druid.utils.Runnables;
import org.joda.time.DateTime; import org.joda.time.DateTime;
import org.joda.time.Interval; import org.joda.time.Interval;
...@@ -72,9 +73,9 @@ import java.util.List; ...@@ -72,9 +73,9 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
public class DruidFirehoseFactory implements FirehoseFactory<InputRowParser> public class IngestSegmentFirehoseFactory implements FirehoseFactory<InputRowParser>
{ {
private static final EmittingLogger log = new EmittingLogger(DruidFirehoseFactory.class); private static final EmittingLogger log = new EmittingLogger(IngestSegmentFirehoseFactory.class);
private final String dataSource; private final String dataSource;
private final Interval interval; private final Interval interval;
private final DimFilter dimFilter; private final DimFilter dimFilter;
...@@ -83,7 +84,7 @@ public class DruidFirehoseFactory implements FirehoseFactory<InputRowParser> ...@@ -83,7 +84,7 @@ public class DruidFirehoseFactory implements FirehoseFactory<InputRowParser>
private final Injector injector; private final Injector injector;
@JsonCreator @JsonCreator
public DruidFirehoseFactory( public IngestSegmentFirehoseFactory(
@JsonProperty("dataSource") final String dataSource, @JsonProperty("dataSource") final String dataSource,
@JsonProperty("interval") Interval interval, @JsonProperty("interval") Interval interval,
@JsonProperty("filter") DimFilter dimFilter, @JsonProperty("filter") DimFilter dimFilter,
...@@ -155,6 +156,9 @@ public class DruidFirehoseFactory implements FirehoseFactory<InputRowParser> ...@@ -155,6 +156,9 @@ public class DruidFirehoseFactory implements FirehoseFactory<InputRowParser>
VersionedIntervalTimeline<String, DataSegment> timeline = new VersionedIntervalTimeline<String, DataSegment>( VersionedIntervalTimeline<String, DataSegment> timeline = new VersionedIntervalTimeline<String, DataSegment>(
Ordering.<String>natural().nullsFirst() Ordering.<String>natural().nullsFirst()
); );
final List<TimelineObjectHolder<String, DataSegment>> timeLineSegments = timeline.lookup(
interval
);
for (DataSegment segment : usedSegments) { for (DataSegment segment : usedSegments) {
timeline.add(segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(segment)); timeline.add(segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(segment));
} }
...@@ -163,8 +167,8 @@ public class DruidFirehoseFactory implements FirehoseFactory<InputRowParser> ...@@ -163,8 +167,8 @@ public class DruidFirehoseFactory implements FirehoseFactory<InputRowParser>
dims = dimensions; dims = dimensions;
} else { } else {
Set<String> dimSet = new HashSet<>(); Set<String> dimSet = new HashSet<>();
for (DataSegment segment : usedSegments) { for (TimelineObjectHolder<String, DataSegment> timelineObjectHolder : timeLineSegments) {
dimSet.addAll(segment.getDimensions()); dimSet.addAll(timelineObjectHolder.getObject().getChunk(0).getObject().getDimensions());
} }
dims = Lists.newArrayList(dimSet); dims = Lists.newArrayList(dimSet);
} }
...@@ -174,15 +178,15 @@ public class DruidFirehoseFactory implements FirehoseFactory<InputRowParser> ...@@ -174,15 +178,15 @@ public class DruidFirehoseFactory implements FirehoseFactory<InputRowParser>
metricsList = metrics; metricsList = metrics;
} else { } else {
Set<String> metricsSet = new HashSet<>(); Set<String> metricsSet = new HashSet<>();
for (DataSegment segment : usedSegments) { for (TimelineObjectHolder<String, DataSegment> timelineObjectHolder : timeLineSegments) {
metricsSet.addAll(segment.getMetrics()); metricsSet.addAll(timelineObjectHolder.getObject().getChunk(0).getObject().getDimensions());
} }
metricsList = Lists.newArrayList(metricsSet); metricsList = Lists.newArrayList(metricsSet);
} }
final List<StorageAdapter> adapters = Lists.transform( final List<StorageAdapter> adapters = Lists.transform(
timeline.lookup(new Interval("1000-01-01/3000-01-01")), timeLineSegments,
new Function<TimelineObjectHolder<String, DataSegment>, StorageAdapter>() new Function<TimelineObjectHolder<String, DataSegment>, StorageAdapter>()
{ {
@Override @Override
...@@ -234,11 +238,11 @@ public class DruidFirehoseFactory implements FirehoseFactory<InputRowParser> ...@@ -234,11 +238,11 @@ public class DruidFirehoseFactory implements FirehoseFactory<InputRowParser>
{ {
@Nullable @Nullable
@Override @Override
public Sequence<InputRow> apply(@Nullable StorageAdapter input) public Sequence<InputRow> apply(@Nullable StorageAdapter adapter)
{ {
return Sequences.concat( return Sequences.concat(
Sequences.map( Sequences.map(
input.makeCursors( adapter.makeCursors(
Filters.convertDimensionFilters(dimFilter), Filters.convertDimensionFilters(dimFilter),
interval, interval,
QueryGranularity.ALL QueryGranularity.ALL
...@@ -246,24 +250,24 @@ public class DruidFirehoseFactory implements FirehoseFactory<InputRowParser> ...@@ -246,24 +250,24 @@ public class DruidFirehoseFactory implements FirehoseFactory<InputRowParser>
{ {
@Nullable @Nullable
@Override @Override
public Sequence<InputRow> apply(@Nullable Cursor input) public Sequence<InputRow> apply(@Nullable Cursor cursor)
{ {
TimestampColumnSelector timestampColumnSelector = input.makeTimestampColumnSelector(); TimestampColumnSelector timestampColumnSelector = cursor.makeTimestampColumnSelector();
Map<String, DimensionSelector> dimSelectors = Maps.newHashMap(); Map<String, DimensionSelector> dimSelectors = Maps.newHashMap();
for (String dim : dims) { for (String dim : dims) {
final DimensionSelector dimSelector = input.makeDimensionSelector(dim); final DimensionSelector dimSelector = cursor.makeDimensionSelector(dim);
dimSelectors.put(dim, dimSelector); dimSelectors.put(dim, dimSelector);
} }
Map<String, ObjectColumnSelector> metSelectors = Maps.newHashMap(); Map<String, ObjectColumnSelector> metSelectors = Maps.newHashMap();
for (String metric : metrics) { for (String metric : metrics) {
final ObjectColumnSelector metricSelector = input.makeObjectColumnSelector(metric); final ObjectColumnSelector metricSelector = cursor.makeObjectColumnSelector(metric);
metSelectors.put(metric, metricSelector); metSelectors.put(metric, metricSelector);
} }
List<InputRow> rowList = Lists.newArrayList(); List<InputRow> rowList = Lists.newArrayList();
while (!input.isDone()) { while (!cursor.isDone()) {
final Map<String, Object> theEvent = Maps.newLinkedHashMap(); final Map<String, Object> theEvent = Maps.newLinkedHashMap();
final long timestamp = timestampColumnSelector.getTimestamp(); final long timestamp = timestampColumnSelector.getTimestamp();
theEvent.put(EventHolder.timestampKey, new DateTime(timestamp)); theEvent.put(EventHolder.timestampKey, new DateTime(timestamp));
...@@ -291,7 +295,7 @@ public class DruidFirehoseFactory implements FirehoseFactory<InputRowParser> ...@@ -291,7 +295,7 @@ public class DruidFirehoseFactory implements FirehoseFactory<InputRowParser>
theEvent.put(metric, selector.get()); theEvent.put(metric, selector.get());
} }
rowList.add(new MapBasedInputRow(timestamp, dims, theEvent)); rowList.add(new MapBasedInputRow(timestamp, dims, theEvent));
input.advance(); cursor.advance();
} }
return Sequences.simple(rowList); return Sequences.simple(rowList);
} }
...@@ -333,14 +337,7 @@ public class DruidFirehoseFactory implements FirehoseFactory<InputRowParser> ...@@ -333,14 +337,7 @@ public class DruidFirehoseFactory implements FirehoseFactory<InputRowParser>
@Override @Override
public Runnable commit() public Runnable commit()
{ {
return new Runnable() return Runnables.getNoopRunnable();
{
@Override
public void run()
{
// Nothing to do.
}
};
} }
@Override @Override
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册