提交 92bc3400 编写于 作者: F fjy

Merge branch 'master' into more-tests

...@@ -28,7 +28,7 @@ ...@@ -28,7 +28,7 @@
<parent> <parent>
<groupId>io.druid</groupId> <groupId>io.druid</groupId>
<artifactId>druid</artifactId> <artifactId>druid</artifactId>
<version>0.6.124-SNAPSHOT</version> <version>0.6.129-SNAPSHOT</version>
</parent> </parent>
<dependencies> <dependencies>
......
...@@ -28,7 +28,7 @@ ...@@ -28,7 +28,7 @@
<parent> <parent>
<groupId>io.druid</groupId> <groupId>io.druid</groupId>
<artifactId>druid</artifactId> <artifactId>druid</artifactId>
<version>0.6.124-SNAPSHOT</version> <version>0.6.129-SNAPSHOT</version>
</parent> </parent>
<dependencies> <dependencies>
......
...@@ -24,6 +24,7 @@ import com.google.common.base.Throwables; ...@@ -24,6 +24,7 @@ import com.google.common.base.Throwables;
import com.google.common.collect.Ordering; import com.google.common.collect.Ordering;
import com.google.common.io.Closeables; import com.google.common.io.Closeables;
import com.metamx.common.guava.Accumulator; import com.metamx.common.guava.Accumulator;
import com.metamx.common.guava.CloseQuietly;
import com.metamx.common.guava.Sequence; import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.Yielder; import com.metamx.common.guava.Yielder;
import com.metamx.common.guava.Yielders; import com.metamx.common.guava.Yielders;
...@@ -70,7 +71,7 @@ public class OrderedMergeSequence<T> implements Sequence<T> ...@@ -70,7 +71,7 @@ public class OrderedMergeSequence<T> implements Sequence<T>
return yielder.get(); return yielder.get();
} }
finally { finally {
Closeables.closeQuietly(yielder); CloseQuietly.close(yielder);
} }
} }
......
...@@ -46,14 +46,9 @@ public class SerializerUtils ...@@ -46,14 +46,9 @@ public class SerializerUtils
public void writeString(OutputSupplier<? extends OutputStream> supplier, String name) throws IOException public void writeString(OutputSupplier<? extends OutputStream> supplier, String name) throws IOException
{ {
OutputStream out = null; try (OutputStream out = supplier.getOutput()) {
try {
out = supplier.getOutput();
writeString(out, name); writeString(out, name);
} }
finally {
Closeables.closeQuietly(out);
}
} }
public void writeString(WritableByteChannel out, String name) throws IOException public void writeString(WritableByteChannel out, String name) throws IOException
......
...@@ -17,7 +17,7 @@ ...@@ -17,7 +17,7 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/ */
package io.druid.server; package io.druid.common.utils;
import java.lang.reflect.InvocationTargetException; import java.lang.reflect.InvocationTargetException;
......
...@@ -89,6 +89,9 @@ The interval is the [ISO8601 interval](http://en.wikipedia.org/wiki/ISO_8601#Tim ...@@ -89,6 +89,9 @@ The interval is the [ISO8601 interval](http://en.wikipedia.org/wiki/ISO_8601#Tim
"user": "username", "user": "username",
"password": "passmeup", "password": "passmeup",
"segmentTable": "segments" "segmentTable": "segments"
},
"jobProperties": {
"mapreduce.job.queuename": "default"
} }
} }
``` ```
...@@ -109,6 +112,7 @@ The interval is the [ISO8601 interval](http://en.wikipedia.org/wiki/ISO_8601#Tim ...@@ -109,6 +112,7 @@ The interval is the [ISO8601 interval](http://en.wikipedia.org/wiki/ISO_8601#Tim
|leaveIntermediate|leave behind files in the workingPath when job completes or fails (debugging tool).|no| |leaveIntermediate|leave behind files in the workingPath when job completes or fails (debugging tool).|no|
|partitionsSpec|a specification of how to partition each time bucket into segments, absence of this property means no partitioning will occur.|no| |partitionsSpec|a specification of how to partition each time bucket into segments, absence of this property means no partitioning will occur.|no|
|metadataUpdateSpec|a specification of how to update the metadata for the druid cluster these segments belong to.|yes| |metadataUpdateSpec|a specification of how to update the metadata for the druid cluster these segments belong to.|yes|
|jobProperties|a map of properties to add to the Hadoop job configuration.|no|
### Path specification ### Path specification
......
...@@ -154,6 +154,7 @@ Druid storage nodes maintain information about segments they have already downlo ...@@ -154,6 +154,7 @@ Druid storage nodes maintain information about segments they have already downlo
|--------|-----------|-------| |--------|-----------|-------|
|`druid.segmentCache.locations`|Segments assigned to a Historical node are first stored on the local file system (in a disk cache) and then served by the Historical node. These locations define where that local cache resides. | none (no caching) | |`druid.segmentCache.locations`|Segments assigned to a Historical node are first stored on the local file system (in a disk cache) and then served by the Historical node. These locations define where that local cache resides. | none (no caching) |
|`druid.segmentCache.deleteOnRemove`|Delete segment files from cache once a node is no longer serving a segment.|true| |`druid.segmentCache.deleteOnRemove`|Delete segment files from cache once a node is no longer serving a segment.|true|
|`druid.segmentCache.dropSegmentDelayMillis`|How long a node delays before completely dropping segment.|5 minutes|
|`druid.segmentCache.infoDir`|Historical nodes keep track of the segments they are serving so that when the process is restarted they can reload the same segments without waiting for the Coordinator to reassign. This path defines where this metadata is kept. Directory will be created if needed.|${first_location}/info_dir| |`druid.segmentCache.infoDir`|Historical nodes keep track of the segments they are serving so that when the process is restarted they can reload the same segments without waiting for the Coordinator to reassign. This path defines where this metadata is kept. Directory will be created if needed.|${first_location}/info_dir|
### Jetty Server Module ### Jetty Server Module
......
...@@ -69,10 +69,9 @@ A sample worker setup spec is shown below: ...@@ -69,10 +69,9 @@ A sample worker setup spec is shown below:
"keyName":"keyName" "keyName":"keyName"
}, },
"userData":{ "userData":{
"classType":"galaxy", "impl":"string",
"env":"druid", "data":"version=:VERSION:",
"version":"druid_version", "versionReplacementString":":VERSION:"
"type":"sample_cluster/worker"
} }
} }
``` ```
...@@ -81,8 +80,8 @@ Issuing a GET request at the same URL will return the current worker setup spec ...@@ -81,8 +80,8 @@ Issuing a GET request at the same URL will return the current worker setup spec
|Property|Description|Default| |Property|Description|Default|
|--------|-----------|-------| |--------|-----------|-------|
|`minVersion`|The coordinator only assigns tasks to workers with a version greater than the minVersion. If this is not specified, the minVersion will be the same as the coordinator version.|none| |`minVersion`|The coordinator only assigns tasks to workers with a version greater than the minVersion. If this is not specified, the minVersion will be druid.indexer.runner.minWorkerVersion.|none|
|`minNumWorkers`|The minimum number of workers that can be in the cluster at any given time.|0| |`minNumWorkers`|The minimum number of workers that can be in the cluster at any given time.|0|
|`maxNumWorkers`|The maximum number of workers that can be in the cluster at any given time.|0| |`maxNumWorkers`|The maximum number of workers that can be in the cluster at any given time.|0|
|`nodeData`|A JSON object that contains metadata about new nodes to create.|none| |`nodeData`|A JSON object that describes how to launch new nodes. Currently, only EC2 is supported.|none; required|
|`userData`|A JSON object that contains metadata about how the node should register itself on startup. This data is sent with node creation requests.|none| |`userData`|A JSON object that describes how to configure new nodes. Currently, only EC2 is supported. If you have set druid.indexer.autoscale.workerVersion, this must have a versionReplacementString. Otherwise, a versionReplacementString is not necessary.|none; optional|
...@@ -22,6 +22,7 @@ Additional peon configs include: ...@@ -22,6 +22,7 @@ Additional peon configs include:
|`druid.indexer.task.baseTaskDir`|Base temporary working directory for tasks.|/tmp/persistent/tasks| |`druid.indexer.task.baseTaskDir`|Base temporary working directory for tasks.|/tmp/persistent/tasks|
|`druid.indexer.task.hadoopWorkingPath`|Temporary working directory for Hadoop tasks.|/tmp/druid-indexing| |`druid.indexer.task.hadoopWorkingPath`|Temporary working directory for Hadoop tasks.|/tmp/druid-indexing|
|`druid.indexer.task.defaultRowFlushBoundary`|Highest row count before persisting to disk. Used for indexing generating tasks.|50000| |`druid.indexer.task.defaultRowFlushBoundary`|Highest row count before persisting to disk. Used for indexing generating tasks.|50000|
|`druid.indexer.task.defaultHadoopCoordinates`|Hadoop version to use with HadoopIndexTasks that do not request a particular version.|org.apache.hadoop:hadoop-client:2.3.0|
|`druid.indexer.task.chathandler.type`|Choices are "noop" and "announce". Certain tasks will use service discovery to announce an HTTP endpoint that events can be posted to.|noop| |`druid.indexer.task.chathandler.type`|Choices are "noop" and "announce". Certain tasks will use service discovery to announce an HTTP endpoint that events can be posted to.|noop|
If the peon is running in remote mode, there must be an overlord up and running. Running peons in remote mode require the following configurations: If the peon is running in remote mode, there must be an overlord up and running. Running peons in remote mode require the following configurations:
......
...@@ -153,7 +153,7 @@ druid.indexer.logs.s3Bucket=#{LOGS_BUCKET} ...@@ -153,7 +153,7 @@ druid.indexer.logs.s3Bucket=#{LOGS_BUCKET}
druid.indexer.logs.s3Prefix=prod/logs/v1 druid.indexer.logs.s3Prefix=prod/logs/v1
# Dedicate more resources to peons # Dedicate more resources to peons
druid.indexer.runner.javaOpts=-server -Xmx6g -Xms6g -XX:NewSize=256m -XX:MaxNewSize=256m -XX:+PrintGCDetails -XX:+PrintGCTimeStamps druid.indexer.runner.javaOpts=-server -Xmx3g -XX:+UseG1GC -XX:MaxGCPauseMillis=100 -XX:+PrintGCDetails -XX:+PrintGCTimeStamps
druid.indexer.runner.taskDir=/mnt/persistent/task/ druid.indexer.runner.taskDir=/mnt/persistent/task/
druid.indexer.task.taskDir=/mnt/persistent/task/ druid.indexer.task.taskDir=/mnt/persistent/task/
druid.indexer.task.chathandler.type=announce druid.indexer.task.chathandler.type=announce
......
...@@ -69,6 +69,20 @@ You should see a bunch of files: ...@@ -69,6 +69,20 @@ You should see a bunch of files:
* run_example_client.sh * run_example_client.sh
* LICENSE, config, examples, lib directories * LICENSE, config, examples, lib directories
Setting up Zookeeper
--------------------
Before we get started, we need to start Apache Zookeeper.
```bash
curl http://apache.osuosl.org/zookeeper/zookeeper-3.4.5/zookeeper-3.4.5.tar.gz -o zookeeper-3.4.5.tar.gz
tar xzf zookeeper-3.4.5.tar.gz
cd zookeeper-3.4.5
cp conf/zoo_sample.cfg conf/zoo.cfg
./bin/zkServer.sh start
cd ..
```
Running Example Scripts Running Example Scripts
----------------------- -----------------------
......
...@@ -85,28 +85,23 @@ config/overlord/runtime.properties ...@@ -85,28 +85,23 @@ config/overlord/runtime.properties
The configurations for the overlord node are as follows: The configurations for the overlord node are as follows:
```bash ```bash
-server druid.host=localhost
-Xmx256m druid.port=8087
-Duser.timezone=UTC druid.service=overlord
-Dfile.encoding=UTF-8
-Ddruid.host=localhost druid.zk.service.host=localhost
-Ddruid.port=8080
-Ddruid.service=overlord
-Ddruid.zk.service.host=localhost druid.extensions.coordinates=["io.druid.extensions:druid-kafka-seven:0.6.121"]
-Ddruid.extensions.coordinates=["io.druid.extensions:druid-kafka-seven:0.6.121"] druid.db.connector.connectURI=jdbc:mysql://localhost:3306/druid
druid.db.connector.user=druid
druid.db.connector.password=diurd
-Ddruid.db.connector.connectURI=jdbc:mysql://localhost:3306/druid druid.selectors.indexing.serviceName=overlord
-Ddruid.db.connector.user=druid druid.indexer.queue.startDelay=PT0M
-Ddruid.db.connector.password=diurd druid.indexer.runner.javaOpts="-server -Xmx256m"
druid.indexer.fork.property.druid.processing.numThreads=1
-Ddruid.selectors.indexing.serviceName=overlord druid.indexer.fork.property.druid.computation.buffer.size=100000000
-Ddruid.indexer.queue.startDelay=PT0M
-Ddruid.indexer.runner.javaOpts="-server -Xmx1g"
-Ddruid.indexer.fork.property.druid.processing.numThreads=1
-Ddruid.indexer.fork.property.druid.computation.buffer.size=100000000
``` ```
If you are interested in reading more about these configurations, see [here](Indexing-Service.html). If you are interested in reading more about these configurations, see [here](Indexing-Service.html).
......
...@@ -48,7 +48,7 @@ CREATE database druid; ...@@ -48,7 +48,7 @@ CREATE database druid;
#### Setting up Zookeeper #### Setting up Zookeeper
```bash ```bash
curl http://www.motorlogy.com/apache/zookeeper/zookeeper-3.4.5/zookeeper-3.4.5.tar.gz -o zookeeper-3.4.5.tar.gz curl http://apache.osuosl.org/zookeeper/zookeeper-3.4.5/zookeeper-3.4.5.tar.gz -o zookeeper-3.4.5.tar.gz
tar xzf zookeeper-3.4.5.tar.gz tar xzf zookeeper-3.4.5.tar.gz
cd zookeeper-3.4.5 cd zookeeper-3.4.5
cp conf/zoo_sample.cfg conf/zoo.cfg cp conf/zoo_sample.cfg conf/zoo.cfg
...@@ -120,7 +120,7 @@ druid.db.connector.connectURI=jdbc\:mysql\://localhost\:3306/druid ...@@ -120,7 +120,7 @@ druid.db.connector.connectURI=jdbc\:mysql\://localhost\:3306/druid
druid.db.connector.user=druid druid.db.connector.user=druid
druid.db.connector.password=diurd druid.db.connector.password=diurd
druid.coordinator.startDelay=PT60s druid.coordinator.startDelay=PT70s
``` ```
To start the coordinator node: To start the coordinator node:
......
-server druid.host=localhost
-Xmx256m druid.port=8087
-Duser.timezone=UTC druid.service=overlord
-Dfile.encoding=UTF-8
-Ddruid.host=localhost druid.zk.service.host=localhost
-Ddruid.port=8080
-Ddruid.service=overlord
-Ddruid.zk.service.host=localhost druid.extensions.coordinates=["io.druid.extensions:druid-kafka-seven:0.6.121"]
-Ddruid.extensions.coordinates=["io.druid.extensions:druid-kafka-seven:0.6.121"] druid.db.connector.connectURI=jdbc:mysql://localhost:3306/druid
druid.db.connector.user=druid
druid.db.connector.password=diurd
-Ddruid.db.connector.connectURI=jdbc:mysql://localhost:3306/druid druid.selectors.indexing.serviceName=overlord
-Ddruid.db.connector.user=druid druid.indexer.queue.startDelay=PT0M
-Ddruid.db.connector.password=diurd druid.indexer.runner.javaOpts="-server -Xmx256m"
druid.indexer.fork.property.druid.processing.numThreads=1
-Ddruid.selectors.indexing.serviceName=overlord druid.indexer.fork.property.druid.computation.buffer.size=100000000
-Ddruid.indexer.queue.startDelay=PT0M \ No newline at end of file
-Ddruid.indexer.runner.javaOpts="-server -Xmx256m"
-Ddruid.indexer.fork.property.druid.processing.numThreads=1
-Ddruid.indexer.fork.property.druid.computation.buffer.size=100000000
\ No newline at end of file
...@@ -28,7 +28,7 @@ ...@@ -28,7 +28,7 @@
<parent> <parent>
<groupId>io.druid</groupId> <groupId>io.druid</groupId>
<artifactId>druid</artifactId> <artifactId>druid</artifactId>
<version>0.6.124-SNAPSHOT</version> <version>0.6.129-SNAPSHOT</version>
</parent> </parent>
<dependencies> <dependencies>
......
...@@ -28,7 +28,7 @@ ...@@ -28,7 +28,7 @@
<parent> <parent>
<groupId>io.druid</groupId> <groupId>io.druid</groupId>
<artifactId>druid</artifactId> <artifactId>druid</artifactId>
<version>0.6.124-SNAPSHOT</version> <version>0.6.129-SNAPSHOT</version>
</parent> </parent>
<dependencies> <dependencies>
......
...@@ -21,6 +21,7 @@ package io.druid.storage.hdfs; ...@@ -21,6 +21,7 @@ package io.druid.storage.hdfs;
import com.google.common.io.Closeables; import com.google.common.io.Closeables;
import com.google.inject.Inject; import com.google.inject.Inject;
import com.metamx.common.guava.CloseQuietly;
import io.druid.segment.loading.DataSegmentPuller; import io.druid.segment.loading.DataSegmentPuller;
import io.druid.segment.loading.SegmentLoadingException; import io.druid.segment.loading.SegmentLoadingException;
import io.druid.timeline.DataSegment; import io.druid.timeline.DataSegment;
...@@ -52,22 +53,17 @@ public class HdfsDataSegmentPuller implements DataSegmentPuller ...@@ -52,22 +53,17 @@ public class HdfsDataSegmentPuller implements DataSegmentPuller
final FileSystem fs = checkPathAndGetFilesystem(path); final FileSystem fs = checkPathAndGetFilesystem(path);
FSDataInputStream in = null; if (path.getName().endsWith(".zip")) {
try { try {
if (path.getName().endsWith(".zip")) { try (FSDataInputStream in = fs.open(path)) {
in = fs.open(path); CompressionUtils.unzip(in, dir);
CompressionUtils.unzip(in, dir); }
in.close();
} }
else { catch (IOException e) {
throw new SegmentLoadingException("Unknown file type[%s]", path); throw new SegmentLoadingException(e, "Some IOException");
} }
} } else {
catch (IOException e) { throw new SegmentLoadingException("Unknown file type[%s]", path);
throw new SegmentLoadingException(e, "Some IOException");
}
finally {
Closeables.closeQuietly(in);
} }
} }
...@@ -85,7 +81,8 @@ public class HdfsDataSegmentPuller implements DataSegmentPuller ...@@ -85,7 +81,8 @@ public class HdfsDataSegmentPuller implements DataSegmentPuller
} }
} }
private Path getPath(DataSegment segment) { private Path getPath(DataSegment segment)
{
return new Path(String.valueOf(segment.getLoadSpec().get("path"))); return new Path(String.valueOf(segment.getLoadSpec().get("path")));
} }
......
...@@ -26,6 +26,7 @@ import com.google.common.io.ByteStreams; ...@@ -26,6 +26,7 @@ import com.google.common.io.ByteStreams;
import com.google.common.io.Closeables; import com.google.common.io.Closeables;
import com.google.common.io.OutputSupplier; import com.google.common.io.OutputSupplier;
import com.google.inject.Inject; import com.google.inject.Inject;
import com.metamx.common.guava.CloseQuietly;
import com.metamx.common.logger.Logger; import com.metamx.common.logger.Logger;
import io.druid.segment.SegmentUtils; import io.druid.segment.SegmentUtils;
import io.druid.segment.loading.DataSegmentPusher; import io.druid.segment.loading.DataSegmentPusher;
...@@ -78,17 +79,10 @@ public class HdfsDataSegmentPusher implements DataSegmentPusher ...@@ -78,17 +79,10 @@ public class HdfsDataSegmentPusher implements DataSegmentPusher
fs.mkdirs(outFile.getParent()); fs.mkdirs(outFile.getParent());
log.info("Compressing files from[%s] to [%s]", inDir, outFile); log.info("Compressing files from[%s] to [%s]", inDir, outFile);
FSDataOutputStream out = null;
long size;
try {
out = fs.create(outFile);
long size;
try (FSDataOutputStream out = fs.create(outFile)) {
size = CompressionUtils.zip(inDir, out); size = CompressionUtils.zip(inDir, out);
out.close();
}
finally {
Closeables.closeQuietly(out);
} }
return createDescriptorFile( return createDescriptorFile(
......
...@@ -28,7 +28,7 @@ ...@@ -28,7 +28,7 @@
<parent> <parent>
<groupId>io.druid</groupId> <groupId>io.druid</groupId>
<artifactId>druid</artifactId> <artifactId>druid</artifactId>
<version>0.6.124-SNAPSHOT</version> <version>0.6.129-SNAPSHOT</version>
</parent> </parent>
<dependencies> <dependencies>
......
...@@ -44,7 +44,9 @@ import io.druid.guice.JsonConfigProvider; ...@@ -44,7 +44,9 @@ import io.druid.guice.JsonConfigProvider;
import io.druid.guice.annotations.Self; import io.druid.guice.annotations.Self;
import io.druid.indexer.partitions.PartitionsSpec; import io.druid.indexer.partitions.PartitionsSpec;
import io.druid.indexer.path.PathSpec; import io.druid.indexer.path.PathSpec;
import io.druid.guice.GuiceInjectors;
import io.druid.initialization.Initialization; import io.druid.initialization.Initialization;
import io.druid.segment.column.ColumnConfig;
import io.druid.segment.indexing.granularity.GranularitySpec; import io.druid.segment.indexing.granularity.GranularitySpec;
import io.druid.server.DruidNode; import io.druid.server.DruidNode;
import io.druid.timeline.DataSegment; import io.druid.timeline.DataSegment;
...@@ -81,7 +83,7 @@ public class HadoopDruidIndexerConfig ...@@ -81,7 +83,7 @@ public class HadoopDruidIndexerConfig
static { static {
injector = Initialization.makeInjectorWithModules( injector = Initialization.makeInjectorWithModules(
Initialization.makeStartupInjector(), GuiceInjectors.makeStartupInjector(),
ImmutableList.<Object>of( ImmutableList.<Object>of(
new Module() new Module()
{ {
...@@ -166,12 +168,14 @@ public class HadoopDruidIndexerConfig ...@@ -166,12 +168,14 @@ public class HadoopDruidIndexerConfig
private volatile HadoopIngestionSpec schema; private volatile HadoopIngestionSpec schema;
private volatile PathSpec pathSpec; private volatile PathSpec pathSpec;
private volatile ColumnConfig columnConfig;
@JsonCreator @JsonCreator
public HadoopDruidIndexerConfig( public HadoopDruidIndexerConfig(
final @JsonProperty("schema") HadoopIngestionSpec schema final @JsonProperty("schema") HadoopIngestionSpec schema
) )
{ {
this.columnConfig = columnConfig;
this.schema = schema; this.schema = schema;
this.pathSpec = jsonMapper.convertValue(schema.getIOConfig().getPathSpec(), PathSpec.class); this.pathSpec = jsonMapper.convertValue(schema.getIOConfig().getPathSpec(), PathSpec.class);
} }
...@@ -182,6 +186,11 @@ public class HadoopDruidIndexerConfig ...@@ -182,6 +186,11 @@ public class HadoopDruidIndexerConfig
return schema; return schema;
} }
public ColumnConfig getColumnConfig()
{
return columnConfig;
}
public String getDataSource() public String getDataSource()
{ {
return schema.getDataSchema().getDataSource(); return schema.getDataSchema().getDataSource();
......
...@@ -31,6 +31,7 @@ import com.google.common.io.Closeables; ...@@ -31,6 +31,7 @@ import com.google.common.io.Closeables;
import com.google.common.primitives.Longs; import com.google.common.primitives.Longs;
import com.metamx.common.IAE; import com.metamx.common.IAE;
import com.metamx.common.ISE; import com.metamx.common.ISE;
import com.metamx.common.guava.CloseQuietly;
import com.metamx.common.logger.Logger; import com.metamx.common.logger.Logger;
import io.druid.data.input.InputRow; import io.druid.data.input.InputRow;
import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.StringInputRowParser;
...@@ -425,7 +426,7 @@ public class IndexGeneratorJob implements Jobby ...@@ -425,7 +426,7 @@ public class IndexGeneratorJob implements Jobby
if (caughtException == null) { if (caughtException == null) {
Closeables.close(out, false); Closeables.close(out, false);
} else { } else {
Closeables.closeQuietly(out); CloseQuietly.close(out);
throw Throwables.propagate(caughtException); throw Throwables.propagate(caughtException);
} }
} }
...@@ -605,7 +606,7 @@ public class IndexGeneratorJob implements Jobby ...@@ -605,7 +606,7 @@ public class IndexGeneratorJob implements Jobby
} }
} }
finally { finally {
Closeables.closeQuietly(in); CloseQuietly.close(in);
} }
out.closeEntry(); out.closeEntry();
context.progress(); context.progress();
......
...@@ -45,7 +45,6 @@ public class HadoopDruidIndexerConfigTest ...@@ -45,7 +45,6 @@ public class HadoopDruidIndexerConfigTest
} }
} }
@Test @Test
public void shouldMakeHDFSCompliantSegmentOutputPath() public void shouldMakeHDFSCompliantSegmentOutputPath()
{ {
......
...@@ -28,7 +28,7 @@ ...@@ -28,7 +28,7 @@
<parent> <parent>
<groupId>io.druid</groupId> <groupId>io.druid</groupId>
<artifactId>druid</artifactId> <artifactId>druid</artifactId>
<version>0.6.124-SNAPSHOT</version> <version>0.6.129-SNAPSHOT</version>
</parent> </parent>
<dependencies> <dependencies>
......
...@@ -49,7 +49,6 @@ import java.io.IOException; ...@@ -49,7 +49,6 @@ import java.io.IOException;
import java.util.Collection; import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
/** /**
...@@ -187,7 +186,8 @@ public class TaskToolbox ...@@ -187,7 +186,8 @@ public class TaskToolbox
return retVal; return retVal;
} }
public void pushSegments(Iterable<DataSegment> segments) throws IOException { public void pushSegments(Iterable<DataSegment> segments) throws IOException
{
// Request segment pushes for each set // Request segment pushes for each set
final Multimap<Interval, DataSegment> segmentMultimap = Multimaps.index( final Multimap<Interval, DataSegment> segmentMultimap = Multimaps.index(
segments, segments,
......
...@@ -29,6 +29,7 @@ import io.druid.indexing.common.actions.TaskActionClientFactory; ...@@ -29,6 +29,7 @@ import io.druid.indexing.common.actions.TaskActionClientFactory;
import io.druid.indexing.common.config.TaskConfig; import io.druid.indexing.common.config.TaskConfig;
import io.druid.indexing.common.task.Task; import io.druid.indexing.common.task.Task;
import io.druid.query.QueryRunnerFactoryConglomerate; import io.druid.query.QueryRunnerFactoryConglomerate;
import io.druid.segment.column.ColumnConfig;
import io.druid.segment.loading.DataSegmentArchiver; import io.druid.segment.loading.DataSegmentArchiver;
import io.druid.segment.loading.DataSegmentKiller; import io.druid.segment.loading.DataSegmentKiller;
import io.druid.segment.loading.DataSegmentMover; import io.druid.segment.loading.DataSegmentMover;
......
...@@ -21,11 +21,17 @@ package io.druid.indexing.common.config; ...@@ -21,11 +21,17 @@ package io.druid.indexing.common.config;
import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.collect.ImmutableList;
import java.io.File; import java.io.File;
import java.util.List;
public class TaskConfig public class TaskConfig
{ {
public static List<String> DEFAULT_DEFAULT_HADOOP_COORDINATES = ImmutableList.of(
"org.apache.hadoop:hadoop-client:2.3.0"
);
@JsonProperty @JsonProperty
private final String baseDir; private final String baseDir;
...@@ -38,40 +44,57 @@ public class TaskConfig ...@@ -38,40 +44,57 @@ public class TaskConfig
@JsonProperty @JsonProperty
private final int defaultRowFlushBoundary; private final int defaultRowFlushBoundary;
@JsonProperty
private final List<String> defaultHadoopCoordinates;
@JsonCreator @JsonCreator
public TaskConfig( public TaskConfig(
@JsonProperty("baseDir") String baseDir, @JsonProperty("baseDir") String baseDir,
@JsonProperty("baseTaskDir") String baseTaskDir, @JsonProperty("baseTaskDir") String baseTaskDir,
@JsonProperty("hadoopWorkingPath") String hadoopWorkingPath, @JsonProperty("hadoopWorkingPath") String hadoopWorkingPath,
@JsonProperty("defaultRowFlushBoundary") Integer defaultRowFlushBoundary @JsonProperty("defaultRowFlushBoundary") Integer defaultRowFlushBoundary,
@JsonProperty("defaultHadoopCoordinates") List<String> defaultHadoopCoordinates
) )
{ {
this.baseDir = baseDir == null ? "/tmp" : baseDir; this.baseDir = baseDir == null ? "/tmp" : baseDir;
this.baseTaskDir = new File(defaultDir(baseTaskDir, "persistent/task")); this.baseTaskDir = new File(defaultDir(baseTaskDir, "persistent/task"));
this.hadoopWorkingPath = defaultDir(hadoopWorkingPath, "druid-indexing"); this.hadoopWorkingPath = defaultDir(hadoopWorkingPath, "druid-indexing");
this.defaultRowFlushBoundary = defaultRowFlushBoundary == null ? 500000 : defaultRowFlushBoundary; this.defaultRowFlushBoundary = defaultRowFlushBoundary == null ? 500000 : defaultRowFlushBoundary;
this.defaultHadoopCoordinates = defaultHadoopCoordinates == null
? DEFAULT_DEFAULT_HADOOP_COORDINATES
: defaultHadoopCoordinates;
} }
@JsonProperty
public String getBaseDir() public String getBaseDir()
{ {
return baseDir; return baseDir;
} }
@JsonProperty
public File getBaseTaskDir() public File getBaseTaskDir()
{ {
return baseTaskDir; return baseTaskDir;
} }
@JsonProperty
public String getHadoopWorkingPath() public String getHadoopWorkingPath()
{ {
return hadoopWorkingPath; return hadoopWorkingPath;
} }
@JsonProperty
public int getDefaultRowFlushBoundary() public int getDefaultRowFlushBoundary()
{ {
return defaultRowFlushBoundary; return defaultRowFlushBoundary;
} }
@JsonProperty
public List<String> getDefaultHadoopCoordinates()
{
return defaultHadoopCoordinates;
}
private String defaultDir(String configParameter, final String defaultVal) private String defaultDir(String configParameter, final String defaultVal)
{ {
if (configParameter == null) { if (configParameter == null) {
...@@ -80,4 +103,4 @@ public class TaskConfig ...@@ -80,4 +103,4 @@ public class TaskConfig
return configParameter; return configParameter;
} }
} }
\ No newline at end of file
...@@ -27,6 +27,7 @@ import com.google.api.client.util.Lists; ...@@ -27,6 +27,7 @@ import com.google.api.client.util.Lists;
import com.google.common.base.Joiner; import com.google.common.base.Joiner;
import com.google.common.base.Optional; import com.google.common.base.Optional;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables; import com.google.common.collect.Iterables;
import com.metamx.common.logger.Logger; import com.metamx.common.logger.Logger;
import io.druid.common.utils.JodaUtils; import io.druid.common.utils.JodaUtils;
...@@ -41,8 +42,9 @@ import io.druid.indexing.common.TaskToolbox; ...@@ -41,8 +42,9 @@ import io.druid.indexing.common.TaskToolbox;
import io.druid.indexing.common.actions.LockAcquireAction; import io.druid.indexing.common.actions.LockAcquireAction;
import io.druid.indexing.common.actions.LockTryAcquireAction; import io.druid.indexing.common.actions.LockTryAcquireAction;
import io.druid.indexing.common.actions.TaskActionClient; import io.druid.indexing.common.actions.TaskActionClient;
import io.druid.guice.GuiceInjectors;
import io.druid.initialization.Initialization; import io.druid.initialization.Initialization;
import io.druid.server.initialization.ExtensionsConfig; import io.druid.guice.ExtensionsConfig;
import io.druid.timeline.DataSegment; import io.druid.timeline.DataSegment;
import io.tesla.aether.internal.DefaultTeslaAether; import io.tesla.aether.internal.DefaultTeslaAether;
import org.joda.time.DateTime; import org.joda.time.DateTime;
...@@ -62,11 +64,9 @@ public class HadoopIndexTask extends AbstractTask ...@@ -62,11 +64,9 @@ public class HadoopIndexTask extends AbstractTask
private static final ExtensionsConfig extensionsConfig; private static final ExtensionsConfig extensionsConfig;
static { static {
extensionsConfig = Initialization.makeStartupInjector().getInstance(ExtensionsConfig.class); extensionsConfig = GuiceInjectors.makeStartupInjector().getInstance(ExtensionsConfig.class);
} }
public static String DEFAULT_HADOOP_COORDINATES = "org.apache.hadoop:hadoop-client:2.3.0";
private static String getTheDataSource(HadoopIngestionSpec spec, HadoopIngestionSpec config) private static String getTheDataSource(HadoopIngestionSpec spec, HadoopIngestionSpec config)
{ {
if (spec != null) { if (spec != null) {
...@@ -115,9 +115,14 @@ public class HadoopIndexTask extends AbstractTask ...@@ -115,9 +115,14 @@ public class HadoopIndexTask extends AbstractTask
Preconditions.checkArgument(this.spec.getTuningConfig().getWorkingPath() == null, "workingPath must be absent"); Preconditions.checkArgument(this.spec.getTuningConfig().getWorkingPath() == null, "workingPath must be absent");
Preconditions.checkArgument(this.spec.getIOConfig().getMetadataUpdateSpec() == null, "updaterJobSpec must be absent"); Preconditions.checkArgument(this.spec.getIOConfig().getMetadataUpdateSpec() == null, "updaterJobSpec must be absent");
this.hadoopDependencyCoordinates = hadoopDependencyCoordinates == null ? Arrays.<String>asList( if (hadoopDependencyCoordinates != null) {
hadoopCoordinates == null ? DEFAULT_HADOOP_COORDINATES : hadoopCoordinates this.hadoopDependencyCoordinates = hadoopDependencyCoordinates;
) : hadoopDependencyCoordinates; } else if (hadoopCoordinates != null) {
this.hadoopDependencyCoordinates = ImmutableList.of(hadoopCoordinates);
} else {
// Will be defaulted to something at runtime, based on taskConfig.
this.hadoopDependencyCoordinates = null;
}
} }
@Override @Override
...@@ -158,6 +163,10 @@ public class HadoopIndexTask extends AbstractTask ...@@ -158,6 +163,10 @@ public class HadoopIndexTask extends AbstractTask
@Override @Override
public TaskStatus run(TaskToolbox toolbox) throws Exception public TaskStatus run(TaskToolbox toolbox) throws Exception
{ {
final List<String> finalHadoopDependencyCoordinates = hadoopDependencyCoordinates != null
? hadoopDependencyCoordinates
: toolbox.getConfig().getDefaultHadoopCoordinates();
final DefaultTeslaAether aetherClient = Initialization.getAetherClient(extensionsConfig); final DefaultTeslaAether aetherClient = Initialization.getAetherClient(extensionsConfig);
final List<URL> extensionURLs = Lists.newArrayList(); final List<URL> extensionURLs = Lists.newArrayList();
...@@ -174,7 +183,7 @@ public class HadoopIndexTask extends AbstractTask ...@@ -174,7 +183,7 @@ public class HadoopIndexTask extends AbstractTask
final List<URL> driverURLs = Lists.newArrayList(); final List<URL> driverURLs = Lists.newArrayList();
driverURLs.addAll(nonHadoopURLs); driverURLs.addAll(nonHadoopURLs);
// put hadoop dependencies last to avoid jets3t & apache.httpcore version conflicts // put hadoop dependencies last to avoid jets3t & apache.httpcore version conflicts
for (String hadoopDependencyCoordinate : hadoopDependencyCoordinates) { for (String hadoopDependencyCoordinate : finalHadoopDependencyCoordinates) {
final ClassLoader hadoopLoader = Initialization.getClassLoaderForCoordinates( final ClassLoader hadoopLoader = Initialization.getClassLoaderForCoordinates(
aetherClient, hadoopDependencyCoordinate aetherClient, hadoopDependencyCoordinate
); );
......
...@@ -19,14 +19,15 @@ ...@@ -19,14 +19,15 @@
package io.druid.indexing.common.task; package io.druid.indexing.common.task;
import com.fasterxml.jackson.annotation.JacksonInject;
import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Throwables; import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.io.Closeables;
import com.metamx.common.Granularity; import com.metamx.common.Granularity;
import com.metamx.common.exception.FormattedException; import com.metamx.common.guava.CloseQuietly;
import com.metamx.common.parsers.ParseException;
import com.metamx.emitter.EmittingLogger; import com.metamx.emitter.EmittingLogger;
import io.druid.data.input.Firehose; import io.druid.data.input.Firehose;
import io.druid.data.input.FirehoseFactory; import io.druid.data.input.FirehoseFactory;
...@@ -43,9 +44,10 @@ import io.druid.query.QueryRunner; ...@@ -43,9 +44,10 @@ import io.druid.query.QueryRunner;
import io.druid.query.QueryRunnerFactory; import io.druid.query.QueryRunnerFactory;
import io.druid.query.QueryRunnerFactoryConglomerate; import io.druid.query.QueryRunnerFactoryConglomerate;
import io.druid.query.QueryToolChest; import io.druid.query.QueryToolChest;
import io.druid.segment.column.ColumnConfig;
import io.druid.segment.indexing.DataSchema; import io.druid.segment.indexing.DataSchema;
import io.druid.segment.indexing.RealtimeTuningConfig;
import io.druid.segment.indexing.RealtimeIOConfig; import io.druid.segment.indexing.RealtimeIOConfig;
import io.druid.segment.indexing.RealtimeTuningConfig;
import io.druid.segment.indexing.granularity.UniformGranularitySpec; import io.druid.segment.indexing.granularity.UniformGranularitySpec;
import io.druid.segment.realtime.FireDepartment; import io.druid.segment.realtime.FireDepartment;
import io.druid.segment.realtime.FireDepartmentConfig; import io.druid.segment.realtime.FireDepartmentConfig;
...@@ -353,7 +355,7 @@ public class RealtimeIndexTask extends AbstractTask ...@@ -353,7 +355,7 @@ public class RealtimeIndexTask extends AbstractTask
nextFlush = new DateTime().plus(intermediatePersistPeriod).getMillis(); nextFlush = new DateTime().plus(intermediatePersistPeriod).getMillis();
} }
} }
catch (FormattedException e) { catch (ParseException e) {
log.warn(e, "unparseable line"); log.warn(e, "unparseable line");
fireDepartment.getMetrics().incrementUnparseable(); fireDepartment.getMetrics().incrementUnparseable();
} }
...@@ -375,7 +377,7 @@ public class RealtimeIndexTask extends AbstractTask ...@@ -375,7 +377,7 @@ public class RealtimeIndexTask extends AbstractTask
log.makeAlert(e, "Failed to finish realtime task").emit(); log.makeAlert(e, "Failed to finish realtime task").emit();
} }
finally { finally {
Closeables.closeQuietly(firehose); CloseQuietly.close(firehose);
toolbox.getMonitorScheduler().removeMonitor(metricsMonitor); toolbox.getMonitorScheduler().removeMonitor(metricsMonitor);
} }
} }
......
...@@ -24,7 +24,6 @@ import com.fasterxml.jackson.annotation.JsonIgnore; ...@@ -24,7 +24,6 @@ import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Function; import com.google.common.base.Function;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import com.metamx.common.guava.FunctionalIterable; import com.metamx.common.guava.FunctionalIterable;
import com.metamx.common.logger.Logger; import com.metamx.common.logger.Logger;
......
...@@ -585,7 +585,7 @@ public class RemoteTaskRunner implements TaskRunner, TaskLogStreamer ...@@ -585,7 +585,7 @@ public class RemoteTaskRunner implements TaskRunner, TaskLogStreamer
// Syncing state with Zookeeper - don't assign new tasks until the task we just assigned is actually running // Syncing state with Zookeeper - don't assign new tasks until the task we just assigned is actually running
// on a worker - this avoids overflowing a worker with tasks // on a worker - this avoids overflowing a worker with tasks
Stopwatch timeoutStopwatch = new Stopwatch(); Stopwatch timeoutStopwatch = Stopwatch.createUnstarted();
timeoutStopwatch.start(); timeoutStopwatch.start();
synchronized (statusLock) { synchronized (statusLock) {
while (!isWorkerRunningTask(theWorker, task.getId())) { while (!isWorkerRunningTask(theWorker, task.getId())) {
......
...@@ -29,19 +29,14 @@ import com.amazonaws.services.ec2.model.Reservation; ...@@ -29,19 +29,14 @@ import com.amazonaws.services.ec2.model.Reservation;
import com.amazonaws.services.ec2.model.RunInstancesRequest; import com.amazonaws.services.ec2.model.RunInstancesRequest;
import com.amazonaws.services.ec2.model.RunInstancesResult; import com.amazonaws.services.ec2.model.RunInstancesResult;
import com.amazonaws.services.ec2.model.TerminateInstancesRequest; import com.amazonaws.services.ec2.model.TerminateInstancesRequest;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Function; import com.google.common.base.Function;
import com.google.common.base.Supplier; import com.google.common.base.Supplier;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.inject.Inject; import com.google.inject.Inject;
import com.metamx.emitter.EmittingLogger; import com.metamx.emitter.EmittingLogger;
import io.druid.guice.annotations.Json;
import io.druid.indexing.overlord.setup.EC2NodeData; import io.druid.indexing.overlord.setup.EC2NodeData;
import io.druid.indexing.overlord.setup.GalaxyUserData;
import io.druid.indexing.overlord.setup.WorkerSetupData; import io.druid.indexing.overlord.setup.WorkerSetupData;
import org.apache.commons.codec.binary.Base64;
import javax.annotation.Nullable;
import java.util.List; import java.util.List;
/** /**
...@@ -50,20 +45,17 @@ public class EC2AutoScalingStrategy implements AutoScalingStrategy ...@@ -50,20 +45,17 @@ public class EC2AutoScalingStrategy implements AutoScalingStrategy
{ {
private static final EmittingLogger log = new EmittingLogger(EC2AutoScalingStrategy.class); private static final EmittingLogger log = new EmittingLogger(EC2AutoScalingStrategy.class);
private final ObjectMapper jsonMapper;
private final AmazonEC2 amazonEC2Client; private final AmazonEC2 amazonEC2Client;
private final SimpleResourceManagementConfig config; private final SimpleResourceManagementConfig config;
private final Supplier<WorkerSetupData> workerSetupDataRef; private final Supplier<WorkerSetupData> workerSetupDataRef;
@Inject @Inject
public EC2AutoScalingStrategy( public EC2AutoScalingStrategy(
@Json ObjectMapper jsonMapper,
AmazonEC2 amazonEC2Client, AmazonEC2 amazonEC2Client,
SimpleResourceManagementConfig config, SimpleResourceManagementConfig config,
Supplier<WorkerSetupData> workerSetupDataRef Supplier<WorkerSetupData> workerSetupDataRef
) )
{ {
this.jsonMapper = jsonMapper;
this.amazonEC2Client = amazonEC2Client; this.amazonEC2Client = amazonEC2Client;
this.config = config; this.config = config;
this.workerSetupDataRef = workerSetupDataRef; this.workerSetupDataRef = workerSetupDataRef;
...@@ -73,15 +65,21 @@ public class EC2AutoScalingStrategy implements AutoScalingStrategy ...@@ -73,15 +65,21 @@ public class EC2AutoScalingStrategy implements AutoScalingStrategy
public AutoScalingData provision() public AutoScalingData provision()
{ {
try { try {
WorkerSetupData setupData = workerSetupDataRef.get(); final WorkerSetupData setupData = workerSetupDataRef.get();
EC2NodeData workerConfig = setupData.getNodeData(); final EC2NodeData workerConfig = setupData.getNodeData();
final String userDataBase64;
GalaxyUserData userData = setupData.getUserData();
if (config.getWorkerVersion() != null) { if (setupData.getUserData() == null) {
userData = userData.withVersion(config.getWorkerVersion()); userDataBase64 = null;
} else {
if (config.getWorkerVersion() == null) {
userDataBase64 = setupData.getUserData().getUserDataBase64();
} else {
userDataBase64 = setupData.getUserData().withVersion(config.getWorkerVersion()).getUserDataBase64();
}
} }
RunInstancesResult result = amazonEC2Client.runInstances( final RunInstancesResult result = amazonEC2Client.runInstances(
new RunInstancesRequest( new RunInstancesRequest(
workerConfig.getAmiId(), workerConfig.getAmiId(),
workerConfig.getMinInstances(), workerConfig.getMinInstances(),
...@@ -91,16 +89,10 @@ public class EC2AutoScalingStrategy implements AutoScalingStrategy ...@@ -91,16 +89,10 @@ public class EC2AutoScalingStrategy implements AutoScalingStrategy
.withSecurityGroupIds(workerConfig.getSecurityGroupIds()) .withSecurityGroupIds(workerConfig.getSecurityGroupIds())
.withPlacement(new Placement(setupData.getAvailabilityZone())) .withPlacement(new Placement(setupData.getAvailabilityZone()))
.withKeyName(workerConfig.getKeyName()) .withKeyName(workerConfig.getKeyName())
.withUserData( .withUserData(userDataBase64)
Base64.encodeBase64String(
jsonMapper.writeValueAsBytes(
userData
)
)
)
); );
List<String> instanceIds = Lists.transform( final List<String> instanceIds = Lists.transform(
result.getReservation().getInstances(), result.getReservation().getInstances(),
new Function<Instance, String>() new Function<Instance, String>()
{ {
......
/*
* Druid - a distributed column store.
* Copyright (C) 2012, 2013 Metamarkets Group Inc.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package io.druid.indexing.overlord.setup;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
/**
* Represents any user data that may be needed to launch EC2 instances.
*/
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "impl", defaultImpl = GalaxyEC2UserData.class)
@JsonSubTypes(value = {
@JsonSubTypes.Type(name = "galaxy", value = GalaxyEC2UserData.class),
@JsonSubTypes.Type(name = "string", value = StringEC2UserData.class)
})
public interface EC2UserData<T extends EC2UserData>
{
/**
* Return a copy of this instance with a different worker version. If no changes are needed (possibly because the
* user data does not depend on the worker version) then it is OK to return "this".
*/
public EC2UserData<T> withVersion(String version);
public String getUserDataBase64();
}
...@@ -19,24 +19,32 @@ ...@@ -19,24 +19,32 @@
package io.druid.indexing.overlord.setup; package io.druid.indexing.overlord.setup;
import com.fasterxml.jackson.annotation.JacksonInject;
import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.api.client.repackaged.com.google.common.base.Throwables;
import io.druid.guice.annotations.Json;
import org.apache.commons.codec.binary.Base64;
/** /**
*/ */
public class GalaxyUserData public class GalaxyEC2UserData implements EC2UserData<GalaxyEC2UserData>
{ {
public final String env; private final ObjectMapper jsonMapper;
public final String version; private final String env;
public final String type; private final String version;
private final String type;
@JsonCreator @JsonCreator
public GalaxyUserData( public GalaxyEC2UserData(
@JacksonInject @Json ObjectMapper jsonMapper,
@JsonProperty("env") String env, @JsonProperty("env") String env,
@JsonProperty("version") String version, @JsonProperty("version") String version,
@JsonProperty("type") String type @JsonProperty("type") String type
) )
{ {
this.jsonMapper = jsonMapper;
this.env = env; this.env = env;
this.version = version; this.version = version;
this.type = type; this.type = type;
...@@ -60,9 +68,21 @@ public class GalaxyUserData ...@@ -60,9 +68,21 @@ public class GalaxyUserData
return type; return type;
} }
public GalaxyUserData withVersion(String ver) @Override
public GalaxyEC2UserData withVersion(String ver)
{
return new GalaxyEC2UserData(jsonMapper, env, ver, type);
}
@Override
public String getUserDataBase64()
{ {
return new GalaxyUserData(env, ver, type); try {
return Base64.encodeBase64String(jsonMapper.writeValueAsBytes(this));
}
catch (Exception e) {
throw Throwables.propagate(e);
}
} }
@Override @Override
......
/*
* Druid - a distributed column store.
* Copyright (C) 2012, 2013 Metamarkets Group Inc.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package io.druid.indexing.overlord.setup;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.api.client.util.Charsets;
import org.apache.commons.codec.binary.Base64;
public class StringEC2UserData implements EC2UserData<StringEC2UserData>
{
private final String data;
private final String versionReplacementString;
private final String version;
@JsonCreator
public StringEC2UserData(
@JsonProperty("data") String data,
@JsonProperty("versionReplacementString") String versionReplacementString,
@JsonProperty("version") String version
)
{
this.data = data;
this.versionReplacementString = versionReplacementString;
this.version = version;
}
@JsonProperty
public String getData()
{
return data;
}
@JsonProperty
public String getVersionReplacementString()
{
return versionReplacementString;
}
@JsonProperty
public String getVersion()
{
return version;
}
@Override
public StringEC2UserData withVersion(final String _version)
{
return new StringEC2UserData(data, versionReplacementString, _version);
}
@Override
public String getUserDataBase64()
{
final String finalData;
if (versionReplacementString != null && version != null) {
finalData = data.replace(versionReplacementString, version);
} else {
finalData = data;
}
return Base64.encodeBase64String(finalData.getBytes(Charsets.UTF_8));
}
@Override
public String toString()
{
return "StringEC2UserData{" +
"data='" + data + '\'' +
", versionReplacementString='" + versionReplacementString + '\'' +
", version='" + version + '\'' +
'}';
}
}
...@@ -33,7 +33,7 @@ public class WorkerSetupData ...@@ -33,7 +33,7 @@ public class WorkerSetupData
private final int maxNumWorkers; private final int maxNumWorkers;
private final String availabilityZone; private final String availabilityZone;
private final EC2NodeData nodeData; private final EC2NodeData nodeData;
private final GalaxyUserData userData; private final EC2UserData userData;
@JsonCreator @JsonCreator
public WorkerSetupData( public WorkerSetupData(
...@@ -42,7 +42,7 @@ public class WorkerSetupData ...@@ -42,7 +42,7 @@ public class WorkerSetupData
@JsonProperty("maxNumWorkers") int maxNumWorkers, @JsonProperty("maxNumWorkers") int maxNumWorkers,
@JsonProperty("availabilityZone") String availabilityZone, @JsonProperty("availabilityZone") String availabilityZone,
@JsonProperty("nodeData") EC2NodeData nodeData, @JsonProperty("nodeData") EC2NodeData nodeData,
@JsonProperty("userData") GalaxyUserData userData @JsonProperty("userData") EC2UserData userData
) )
{ {
this.minVersion = minVersion; this.minVersion = minVersion;
...@@ -84,7 +84,7 @@ public class WorkerSetupData ...@@ -84,7 +84,7 @@ public class WorkerSetupData
} }
@JsonProperty @JsonProperty
public GalaxyUserData getUserData() public EC2UserData getUserData()
{ {
return userData; return userData;
} }
......
...@@ -19,19 +19,51 @@ ...@@ -19,19 +19,51 @@
package io.druid.indexing.common; package io.druid.indexing.common;
import com.fasterxml.jackson.databind.BeanProperty;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.InjectableValues;
import com.fasterxml.jackson.databind.Module;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Stopwatch; import com.google.common.base.Stopwatch;
import com.metamx.common.ISE; import com.metamx.common.ISE;
import io.druid.guice.ServerModule;
import io.druid.jackson.DefaultObjectMapper;
import java.util.List;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
/** /**
*/ */
public class TestUtils public class TestUtils
{ {
public static final ObjectMapper MAPPER = new DefaultObjectMapper();
static {
final List<? extends Module> list = new ServerModule().getJacksonModules();
for (Module module : list) {
MAPPER.registerModule(module);
}
MAPPER.setInjectableValues(
new InjectableValues()
{
@Override
public Object findInjectableValue(
Object valueId, DeserializationContext ctxt, BeanProperty forProperty, Object beanInstance
)
{
if (valueId.equals("com.fasterxml.jackson.databind.ObjectMapper")) {
return TestUtils.MAPPER;
}
throw new ISE("No Injectable value found");
}
}
);
}
public static boolean conditionValid(IndexingServiceCondition condition) public static boolean conditionValid(IndexingServiceCondition condition)
{ {
try { try {
Stopwatch stopwatch = new Stopwatch(); Stopwatch stopwatch = Stopwatch.createUnstarted();
stopwatch.start(); stopwatch.start();
while (!condition.isValid()) { while (!condition.isValid()) {
Thread.sleep(100); Thread.sleep(100);
......
...@@ -48,6 +48,8 @@ import java.io.File; ...@@ -48,6 +48,8 @@ import java.io.File;
public class TaskSerdeTest public class TaskSerdeTest
{ {
private static final ObjectMapper jsonMapper = new DefaultObjectMapper();
@Test @Test
public void testIndexTaskSerde() throws Exception public void testIndexTaskSerde() throws Exception
{ {
...@@ -68,7 +70,6 @@ public class TaskSerdeTest ...@@ -68,7 +70,6 @@ public class TaskSerdeTest
-1 -1
); );
final ObjectMapper jsonMapper = new DefaultObjectMapper();
for (final Module jacksonModule : new FirehoseModule().getJacksonModules()) { for (final Module jacksonModule : new FirehoseModule().getJacksonModules()) {
jsonMapper.registerModule(jacksonModule); jsonMapper.registerModule(jacksonModule);
} }
...@@ -102,7 +103,6 @@ public class TaskSerdeTest ...@@ -102,7 +103,6 @@ public class TaskSerdeTest
) )
); );
final ObjectMapper jsonMapper = new DefaultObjectMapper();
final String json = jsonMapper.writeValueAsString(task); final String json = jsonMapper.writeValueAsString(task);
Thread.sleep(100); // Just want to run the clock a bit to make sure the task id doesn't change Thread.sleep(100); // Just want to run the clock a bit to make sure the task id doesn't change
...@@ -131,7 +131,6 @@ public class TaskSerdeTest ...@@ -131,7 +131,6 @@ public class TaskSerdeTest
new Interval("2010-01-01/P1D") new Interval("2010-01-01/P1D")
); );
final ObjectMapper jsonMapper = new DefaultObjectMapper();
final String json = jsonMapper.writeValueAsString(task); final String json = jsonMapper.writeValueAsString(task);
Thread.sleep(100); // Just want to run the clock a bit to make sure the task id doesn't change Thread.sleep(100); // Just want to run the clock a bit to make sure the task id doesn't change
...@@ -153,7 +152,6 @@ public class TaskSerdeTest ...@@ -153,7 +152,6 @@ public class TaskSerdeTest
DataSegment.builder().dataSource("foo").interval(new Interval("2010-01-01/P1D")).version("1234").build() DataSegment.builder().dataSource("foo").interval(new Interval("2010-01-01/P1D")).version("1234").build()
); );
final ObjectMapper jsonMapper = new DefaultObjectMapper();
final String json = jsonMapper.writeValueAsString(task); final String json = jsonMapper.writeValueAsString(task);
Thread.sleep(100); // Just want to run the clock a bit to make sure the task id doesn't change Thread.sleep(100); // Just want to run the clock a bit to make sure the task id doesn't change
...@@ -177,7 +175,6 @@ public class TaskSerdeTest ...@@ -177,7 +175,6 @@ public class TaskSerdeTest
DataSegment.builder().dataSource("foo").interval(new Interval("2010-01-01/P1D")).version("1234").build() DataSegment.builder().dataSource("foo").interval(new Interval("2010-01-01/P1D")).version("1234").build()
); );
final ObjectMapper jsonMapper = new DefaultObjectMapper();
final String json = jsonMapper.writeValueAsString(task); final String json = jsonMapper.writeValueAsString(task);
Thread.sleep(100); // Just want to run the clock a bit to make sure the task id doesn't change Thread.sleep(100); // Just want to run the clock a bit to make sure the task id doesn't change
...@@ -208,7 +205,6 @@ public class TaskSerdeTest ...@@ -208,7 +205,6 @@ public class TaskSerdeTest
null null
); );
final ObjectMapper jsonMapper = new DefaultObjectMapper();
final String json = jsonMapper.writeValueAsString(task); final String json = jsonMapper.writeValueAsString(task);
Thread.sleep(100); // Just want to run the clock a bit to make sure the task id doesn't change Thread.sleep(100); // Just want to run the clock a bit to make sure the task id doesn't change
...@@ -251,7 +247,6 @@ public class TaskSerdeTest ...@@ -251,7 +247,6 @@ public class TaskSerdeTest
new Interval("2010-01-01/P1D") new Interval("2010-01-01/P1D")
); );
final ObjectMapper jsonMapper = new DefaultObjectMapper();
final String json = jsonMapper.writeValueAsString(task); final String json = jsonMapper.writeValueAsString(task);
Thread.sleep(100); // Just want to run the clock a bit to make sure the task id doesn't change Thread.sleep(100); // Just want to run the clock a bit to make sure the task id doesn't change
...@@ -269,7 +264,6 @@ public class TaskSerdeTest ...@@ -269,7 +264,6 @@ public class TaskSerdeTest
@Test @Test
public void testDeleteTaskFromJson() throws Exception public void testDeleteTaskFromJson() throws Exception
{ {
final ObjectMapper jsonMapper = new DefaultObjectMapper();
final DeleteTask task = (DeleteTask) jsonMapper.readValue( final DeleteTask task = (DeleteTask) jsonMapper.readValue(
"{\"type\":\"delete\",\"dataSource\":\"foo\",\"interval\":\"2010-01-01/P1D\"}", "{\"type\":\"delete\",\"dataSource\":\"foo\",\"interval\":\"2010-01-01/P1D\"}",
Task.class Task.class
...@@ -300,7 +294,6 @@ public class TaskSerdeTest ...@@ -300,7 +294,6 @@ public class TaskSerdeTest
) )
); );
final ObjectMapper jsonMapper = new DefaultObjectMapper();
final String json = jsonMapper.writeValueAsString(task); final String json = jsonMapper.writeValueAsString(task);
Thread.sleep(100); // Just want to run the clock a bit to make sure the task id doesn't change Thread.sleep(100); // Just want to run the clock a bit to make sure the task id doesn't change
...@@ -325,7 +318,6 @@ public class TaskSerdeTest ...@@ -325,7 +318,6 @@ public class TaskSerdeTest
new Interval("2010-01-01/P1D") new Interval("2010-01-01/P1D")
); );
final ObjectMapper jsonMapper = new DefaultObjectMapper();
final String json = jsonMapper.writeValueAsString(task); final String json = jsonMapper.writeValueAsString(task);
Thread.sleep(100); // Just want to run the clock a bit to make sure the task id doesn't change Thread.sleep(100); // Just want to run the clock a bit to make sure the task id doesn't change
...@@ -350,7 +342,6 @@ public class TaskSerdeTest ...@@ -350,7 +342,6 @@ public class TaskSerdeTest
new Interval("2010-01-01/P1D") new Interval("2010-01-01/P1D")
); );
final ObjectMapper jsonMapper = new DefaultObjectMapper();
final String json = jsonMapper.writeValueAsString(task); final String json = jsonMapper.writeValueAsString(task);
Thread.sleep(100); // Just want to run the clock a bit to make sure the task id doesn't change Thread.sleep(100); // Just want to run the clock a bit to make sure the task id doesn't change
...@@ -375,7 +366,6 @@ public class TaskSerdeTest ...@@ -375,7 +366,6 @@ public class TaskSerdeTest
ImmutableMap.<String, Object>of("bucket", "hey", "baseKey", "what") ImmutableMap.<String, Object>of("bucket", "hey", "baseKey", "what")
); );
final ObjectMapper jsonMapper = new DefaultObjectMapper();
final String json = jsonMapper.writeValueAsString(task); final String json = jsonMapper.writeValueAsString(task);
Thread.sleep(100); // Just want to run the clock a bit to make sure the task id doesn't change Thread.sleep(100); // Just want to run the clock a bit to make sure the task id doesn't change
...@@ -434,7 +424,6 @@ public class TaskSerdeTest ...@@ -434,7 +424,6 @@ public class TaskSerdeTest
null null
); );
final ObjectMapper jsonMapper = new DefaultObjectMapper();
final String json = jsonMapper.writeValueAsString(task); final String json = jsonMapper.writeValueAsString(task);
final HadoopIndexTask task2 = (HadoopIndexTask) jsonMapper.readValue(json, Task.class); final HadoopIndexTask task2 = (HadoopIndexTask) jsonMapper.readValue(json, Task.class);
......
...@@ -43,6 +43,7 @@ import io.druid.data.input.InputRow; ...@@ -43,6 +43,7 @@ import io.druid.data.input.InputRow;
import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.MapBasedInputRow;
import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.InputRowParser;
import io.druid.granularity.QueryGranularity; import io.druid.granularity.QueryGranularity;
import io.druid.segment.column.ColumnConfig;
import io.druid.segment.indexing.granularity.UniformGranularitySpec; import io.druid.segment.indexing.granularity.UniformGranularitySpec;
import io.druid.indexing.common.SegmentLoaderFactory; import io.druid.indexing.common.SegmentLoaderFactory;
import io.druid.indexing.common.TaskLock; import io.druid.indexing.common.TaskLock;
...@@ -135,7 +136,7 @@ public class TaskLifecycleTest ...@@ -135,7 +136,7 @@ public class TaskLifecycleTest
mdc = newMockMDC(); mdc = newMockMDC();
tac = new LocalTaskActionClientFactory(ts, new TaskActionToolbox(tl, mdc, newMockEmitter())); tac = new LocalTaskActionClientFactory(ts, new TaskActionToolbox(tl, mdc, newMockEmitter()));
tb = new TaskToolboxFactory( tb = new TaskToolboxFactory(
new TaskConfig(tmp.toString(), null, null, 50000), new TaskConfig(tmp.toString(), null, null, 50000, null),
tac, tac,
newMockEmitter(), newMockEmitter(),
new DataSegmentPusher() new DataSegmentPusher()
......
/*
* Druid - a distributed column store.
* Copyright (C) 2012, 2013 Metamarkets Group Inc.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package io.druid.indexing.overlord;
import com.google.common.base.Charsets;
import io.druid.indexing.common.TestUtils;
import io.druid.indexing.overlord.setup.EC2UserData;
import io.druid.indexing.overlord.setup.GalaxyEC2UserData;
import io.druid.indexing.overlord.setup.StringEC2UserData;
import org.apache.commons.codec.binary.Base64;
import org.junit.Assert;
import org.junit.Test;
import java.io.IOException;
public class WorkerSetupDataTest
{
@Test
public void testGalaxyEC2UserDataSerde() throws IOException
{
final String json = "{\"env\":\"druid\",\"version\":null,\"type\":\"typical\"}";
final GalaxyEC2UserData userData = (GalaxyEC2UserData) TestUtils.MAPPER.readValue(json, EC2UserData.class);
Assert.assertEquals("druid", userData.getEnv());
Assert.assertEquals("typical", userData.getType());
Assert.assertNull(userData.getVersion());
Assert.assertEquals("1234", userData.withVersion("1234").getVersion());
}
@Test
public void testStringEC2UserDataSerde() throws IOException
{
final String json = "{\"impl\":\"string\",\"data\":\"hey :ver:\",\"versionReplacementString\":\":ver:\",\"version\":\"1234\"}";
final StringEC2UserData userData = (StringEC2UserData) TestUtils.MAPPER.readValue(json, EC2UserData.class);
Assert.assertEquals("hey :ver:", userData.getData());
Assert.assertEquals("1234", userData.getVersion());
Assert.assertEquals(
Base64.encodeBase64String("hey 1234".getBytes(Charsets.UTF_8)),
userData.getUserDataBase64()
);
Assert.assertEquals(
Base64.encodeBase64String("hey xyz".getBytes(Charsets.UTF_8)),
userData.withVersion("xyz").getUserDataBase64()
);
}
}
...@@ -30,7 +30,7 @@ import com.amazonaws.services.ec2.model.TerminateInstancesRequest; ...@@ -30,7 +30,7 @@ import com.amazonaws.services.ec2.model.TerminateInstancesRequest;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import io.druid.common.guava.DSuppliers; import io.druid.common.guava.DSuppliers;
import io.druid.indexing.overlord.setup.EC2NodeData; import io.druid.indexing.overlord.setup.EC2NodeData;
import io.druid.indexing.overlord.setup.GalaxyUserData; import io.druid.indexing.overlord.setup.GalaxyEC2UserData;
import io.druid.indexing.overlord.setup.WorkerSetupData; import io.druid.indexing.overlord.setup.WorkerSetupData;
import io.druid.jackson.DefaultObjectMapper; import io.druid.jackson.DefaultObjectMapper;
import org.easymock.EasyMock; import org.easymock.EasyMock;
...@@ -75,7 +75,6 @@ public class EC2AutoScalingStrategyTest ...@@ -75,7 +75,6 @@ public class EC2AutoScalingStrategyTest
.withPrivateIpAddress(IP); .withPrivateIpAddress(IP);
strategy = new EC2AutoScalingStrategy( strategy = new EC2AutoScalingStrategy(
new DefaultObjectMapper(),
amazonEC2Client, amazonEC2Client,
new SimpleResourceManagementConfig().setWorkerPort(8080).setWorkerVersion(""), new SimpleResourceManagementConfig().setWorkerPort(8080).setWorkerVersion(""),
DSuppliers.of(workerSetupData) DSuppliers.of(workerSetupData)
...@@ -101,7 +100,7 @@ public class EC2AutoScalingStrategyTest ...@@ -101,7 +100,7 @@ public class EC2AutoScalingStrategyTest
1, 1,
"", "",
new EC2NodeData(AMI_ID, INSTANCE_ID, 1, 1, Lists.<String>newArrayList(), "foo"), new EC2NodeData(AMI_ID, INSTANCE_ID, 1, 1, Lists.<String>newArrayList(), "foo"),
new GalaxyUserData("env", "version", "type") new GalaxyEC2UserData(new DefaultObjectMapper(), "env", "version", "type")
) )
); );
......
...@@ -20,11 +20,17 @@ ...@@ -20,11 +20,17 @@
package io.druid.indexing.worker; package io.druid.indexing.worker;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.introspect.AnnotationIntrospectorPair;
import com.fasterxml.jackson.databind.introspect.GuiceAnnotationIntrospector;
import com.fasterxml.jackson.databind.introspect.GuiceInjectableValues;
import com.fasterxml.jackson.databind.jsontype.NamedType; import com.fasterxml.jackson.databind.jsontype.NamedType;
import com.google.common.base.Joiner; import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.io.Files; import com.google.common.io.Files;
import com.google.inject.Binder;
import com.google.inject.Guice;
import com.google.inject.Injector;
import io.druid.curator.PotentiallyGzippedCompressionProvider; import io.druid.curator.PotentiallyGzippedCompressionProvider;
import io.druid.indexing.common.IndexingServiceCondition; import io.druid.indexing.common.IndexingServiceCondition;
import io.druid.indexing.common.SegmentLoaderFactory; import io.druid.indexing.common.SegmentLoaderFactory;
...@@ -38,6 +44,7 @@ import io.druid.indexing.overlord.TestRemoteTaskRunnerConfig; ...@@ -38,6 +44,7 @@ import io.druid.indexing.overlord.TestRemoteTaskRunnerConfig;
import io.druid.indexing.overlord.ThreadPoolTaskRunner; import io.druid.indexing.overlord.ThreadPoolTaskRunner;
import io.druid.indexing.worker.config.WorkerConfig; import io.druid.indexing.worker.config.WorkerConfig;
import io.druid.jackson.DefaultObjectMapper; import io.druid.jackson.DefaultObjectMapper;
import io.druid.segment.column.ColumnConfig;
import io.druid.segment.loading.DataSegmentPuller; import io.druid.segment.loading.DataSegmentPuller;
import io.druid.segment.loading.LocalDataSegmentPuller; import io.druid.segment.loading.LocalDataSegmentPuller;
import io.druid.segment.loading.OmniSegmentLoader; import io.druid.segment.loading.OmniSegmentLoader;
...@@ -61,6 +68,40 @@ import java.util.List; ...@@ -61,6 +68,40 @@ import java.util.List;
public class WorkerTaskMonitorTest public class WorkerTaskMonitorTest
{ {
private static final ObjectMapper jsonMapper = new DefaultObjectMapper(); private static final ObjectMapper jsonMapper = new DefaultObjectMapper();
private static final Injector injector = Guice.createInjector(
new com.google.inject.Module()
{
@Override
public void configure(Binder binder)
{
binder.bind(ColumnConfig.class).toInstance(
new ColumnConfig()
{
@Override
public int columnCacheSizeBytes()
{
return 1024 * 1024;
}
}
);
}
}
);
static {
final GuiceAnnotationIntrospector guiceIntrospector = new GuiceAnnotationIntrospector();
jsonMapper.setInjectableValues(new GuiceInjectableValues(injector));
jsonMapper.setAnnotationIntrospectors(
new AnnotationIntrospectorPair(
guiceIntrospector, jsonMapper.getSerializationConfig().getAnnotationIntrospector()
),
new AnnotationIntrospectorPair(
guiceIntrospector, jsonMapper.getDeserializationConfig().getAnnotationIntrospector()
)
);
}
private static final Joiner joiner = Joiner.on("/"); private static final Joiner joiner = Joiner.on("/");
private static final String basePath = "/test/druid"; private static final String basePath = "/test/druid";
private static final String tasksPath = String.format("%s/indexer/tasks/worker", basePath); private static final String tasksPath = String.format("%s/indexer/tasks/worker", basePath);
...@@ -121,7 +162,7 @@ public class WorkerTaskMonitorTest ...@@ -121,7 +162,7 @@ public class WorkerTaskMonitorTest
workerCuratorCoordinator, workerCuratorCoordinator,
new ThreadPoolTaskRunner( new ThreadPoolTaskRunner(
new TaskToolboxFactory( new TaskToolboxFactory(
new TaskConfig(tmp.toString(), null, null, 0), new TaskConfig(tmp.toString(), null, null, 0, null),
null, null, null, null, null, null, null, null, null, null, null, new SegmentLoaderFactory( null, null, null, null, null, null, null, null, null, null, null, new SegmentLoaderFactory(
new OmniSegmentLoader( new OmniSegmentLoader(
ImmutableMap.<String, DataSegmentPuller>of( ImmutableMap.<String, DataSegmentPuller>of(
......
...@@ -28,7 +28,7 @@ ...@@ -28,7 +28,7 @@
<parent> <parent>
<groupId>io.druid</groupId> <groupId>io.druid</groupId>
<artifactId>druid</artifactId> <artifactId>druid</artifactId>
<version>0.6.124-SNAPSHOT</version> <version>0.6.129-SNAPSHOT</version>
</parent> </parent>
<dependencies> <dependencies>
......
...@@ -24,7 +24,6 @@ import com.fasterxml.jackson.annotation.JsonCreator; ...@@ -24,7 +24,6 @@ import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import com.metamx.common.exception.FormattedException;
import com.metamx.common.logger.Logger; import com.metamx.common.logger.Logger;
import io.druid.data.input.ByteBufferInputRowParser; import io.druid.data.input.ByteBufferInputRowParser;
import io.druid.data.input.Firehose; import io.druid.data.input.Firehose;
...@@ -115,7 +114,7 @@ public class KafkaEightFirehoseFactory implements FirehoseFactory<ByteBufferInpu ...@@ -115,7 +114,7 @@ public class KafkaEightFirehoseFactory implements FirehoseFactory<ByteBufferInpu
} }
@Override @Override
public InputRow nextRow() throws FormattedException public InputRow nextRow()
{ {
final byte[] message = iter.next().message(); final byte[] message = iter.next().message();
...@@ -123,15 +122,7 @@ public class KafkaEightFirehoseFactory implements FirehoseFactory<ByteBufferInpu ...@@ -123,15 +122,7 @@ public class KafkaEightFirehoseFactory implements FirehoseFactory<ByteBufferInpu
return null; return null;
} }
try { return theParser.parse(ByteBuffer.wrap(message));
return theParser.parse(ByteBuffer.wrap(message));
}
catch (Exception e) {
throw new FormattedException.Builder()
.withErrorCode(FormattedException.ErrorCode.UNPARSABLE_ROW)
.withMessage(String.format("Error parsing[%s], got [%s]", ByteBuffer.wrap(message), e.toString()))
.build();
}
} }
@Override @Override
......
...@@ -28,7 +28,7 @@ ...@@ -28,7 +28,7 @@
<parent> <parent>
<groupId>io.druid</groupId> <groupId>io.druid</groupId>
<artifactId>druid</artifactId> <artifactId>druid</artifactId>
<version>0.6.124-SNAPSHOT</version> <version>0.6.129-SNAPSHOT</version>
</parent> </parent>
<dependencies> <dependencies>
......
...@@ -23,7 +23,6 @@ import com.fasterxml.jackson.annotation.JsonCreator; ...@@ -23,7 +23,6 @@ import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import com.metamx.common.exception.FormattedException;
import com.metamx.common.logger.Logger; import com.metamx.common.logger.Logger;
import io.druid.data.input.ByteBufferInputRowParser; import io.druid.data.input.ByteBufferInputRowParser;
import io.druid.data.input.Firehose; import io.druid.data.input.Firehose;
...@@ -123,7 +122,7 @@ public class KafkaSevenFirehoseFactory implements FirehoseFactory<ByteBufferInpu ...@@ -123,7 +122,7 @@ public class KafkaSevenFirehoseFactory implements FirehoseFactory<ByteBufferInpu
} }
@Override @Override
public InputRow nextRow() throws FormattedException public InputRow nextRow()
{ {
final Message message = iter.next().message(); final Message message = iter.next().message();
...@@ -134,17 +133,9 @@ public class KafkaSevenFirehoseFactory implements FirehoseFactory<ByteBufferInpu ...@@ -134,17 +133,9 @@ public class KafkaSevenFirehoseFactory implements FirehoseFactory<ByteBufferInpu
return parseMessage(message); return parseMessage(message);
} }
public InputRow parseMessage(Message message) throws FormattedException public InputRow parseMessage(Message message)
{ {
try { return theParser.parse(message.payload());
return theParser.parse(message.payload());
}
catch (Exception e) {
throw new FormattedException.Builder()
.withErrorCode(FormattedException.ErrorCode.UNPARSABLE_ROW)
.withMessage(String.format("Error parsing[%s], got [%s]", message.payload(), e.toString()))
.build();
}
} }
@Override @Override
......
...@@ -23,14 +23,14 @@ ...@@ -23,14 +23,14 @@
<groupId>io.druid</groupId> <groupId>io.druid</groupId>
<artifactId>druid</artifactId> <artifactId>druid</artifactId>
<packaging>pom</packaging> <packaging>pom</packaging>
<version>0.6.124-SNAPSHOT</version> <version>0.6.129-SNAPSHOT</version>
<name>druid</name> <name>druid</name>
<description>druid</description> <description>druid</description>
<scm> <scm>
<connection>scm:git:ssh://git@github.com/metamx/druid.git</connection> <connection>scm:git:ssh://git@github.com/metamx/druid.git</connection>
<developerConnection>scm:git:ssh://git@github.com/metamx/druid.git</developerConnection> <developerConnection>scm:git:ssh://git@github.com/metamx/druid.git</developerConnection>
<url>http://www.github.com/metamx/druid</url> <url>http://www.github.com/metamx/druid</url>
<tag>druid-0.6.107-SNAPSHOT</tag> <tag>druid-0.6.117-SNAPSHOT</tag>
</scm> </scm>
<prerequisites> <prerequisites>
...@@ -39,9 +39,9 @@ ...@@ -39,9 +39,9 @@
<properties> <properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<metamx.java-util.version>0.25.6</metamx.java-util.version> <metamx.java-util.version>0.26.5</metamx.java-util.version>
<apache.curator.version>2.4.0</apache.curator.version> <apache.curator.version>2.5.0</apache.curator.version>
<druid.api.version>0.2.3</druid.api.version> <druid.api.version>0.2.4</druid.api.version>
</properties> </properties>
<modules> <modules>
...@@ -198,22 +198,22 @@ ...@@ -198,22 +198,22 @@
<dependency> <dependency>
<groupId>com.google.guava</groupId> <groupId>com.google.guava</groupId>
<artifactId>guava</artifactId> <artifactId>guava</artifactId>
<version>14.0.1</version> <version>17.0</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.google.inject</groupId> <groupId>com.google.inject</groupId>
<artifactId>guice</artifactId> <artifactId>guice</artifactId>
<version>4.0-beta4</version> <version>4.0-beta</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.google.inject.extensions</groupId> <groupId>com.google.inject.extensions</groupId>
<artifactId>guice-servlet</artifactId> <artifactId>guice-servlet</artifactId>
<version>4.0-beta4</version> <version>4.0-beta</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.google.inject.extensions</groupId> <groupId>com.google.inject.extensions</groupId>
<artifactId>guice-multibindings</artifactId> <artifactId>guice-multibindings</artifactId>
<version>4.0-beta4</version> <version>4.0-beta</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.ibm.icu</groupId> <groupId>com.ibm.icu</groupId>
...@@ -561,15 +561,7 @@ ...@@ -561,15 +561,7 @@
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-release-plugin</artifactId> <artifactId>maven-release-plugin</artifactId>
<version>2.4.2</version> <version>2.5</version>
<dependencies>
<dependency>
<groupId>org.apache.maven.scm</groupId>
<artifactId>maven-scm-provider-gitexe</artifactId>
<!-- This version is necessary for use with git version 1.8.5 and above -->
<version>1.8.1</version>
</dependency>
</dependencies>
</plugin> </plugin>
</plugins> </plugins>
</pluginManagement> </pluginManagement>
......
...@@ -28,7 +28,7 @@ ...@@ -28,7 +28,7 @@
<parent> <parent>
<groupId>io.druid</groupId> <groupId>io.druid</groupId>
<artifactId>druid</artifactId> <artifactId>druid</artifactId>
<version>0.6.124-SNAPSHOT</version> <version>0.6.129-SNAPSHOT</version>
</parent> </parent>
<dependencies> <dependencies>
......
...@@ -28,10 +28,8 @@ import com.google.protobuf.ByteString; ...@@ -28,10 +28,8 @@ import com.google.protobuf.ByteString;
import com.google.protobuf.Descriptors; import com.google.protobuf.Descriptors;
import com.google.protobuf.DynamicMessage; import com.google.protobuf.DynamicMessage;
import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.InvalidProtocolBufferException;
import com.metamx.common.exception.FormattedException;
import com.metamx.common.logger.Logger; import com.metamx.common.logger.Logger;
import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.DimensionsSpec;
import io.druid.data.input.impl.InputRowParser;
import io.druid.data.input.impl.JSONParseSpec; import io.druid.data.input.impl.JSONParseSpec;
import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.MapInputRowParser;
import io.druid.data.input.impl.ParseSpec; import io.druid.data.input.impl.ParseSpec;
...@@ -94,7 +92,7 @@ public class ProtoBufInputRowParser implements ByteBufferInputRowParser ...@@ -94,7 +92,7 @@ public class ProtoBufInputRowParser implements ByteBufferInputRowParser
} }
@Override @Override
public InputRow parse(ByteBuffer input) throws FormattedException public InputRow parse(ByteBuffer input)
{ {
// We should really create a ProtoBufBasedInputRow that does not need an intermediate map but accesses // We should really create a ProtoBufBasedInputRow that does not need an intermediate map but accesses
// the DynamicMessage directly... // the DynamicMessage directly...
......
...@@ -17,7 +17,7 @@ ...@@ -17,7 +17,7 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/ */
package io.druid.server.initialization; package io.druid.guice;
import com.google.inject.Binder; import com.google.inject.Binder;
import com.google.inject.Module; import com.google.inject.Module;
......
...@@ -17,7 +17,7 @@ ...@@ -17,7 +17,7 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/ */
package io.druid.server.initialization; package io.druid.guice;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
......
/*
* Druid - a distributed column store.
* Copyright (C) 2012, 2013 Metamarkets Group Inc.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package io.druid.guice;
import com.google.common.collect.Lists;
import com.google.inject.Binder;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.Module;
import com.google.inject.util.Modules;
import io.druid.guice.ConfigModule;
import io.druid.guice.DruidGuiceExtensions;
import io.druid.guice.DruidSecondaryModule;
import io.druid.guice.ExtensionsConfig;
import io.druid.guice.JsonConfigProvider;
import io.druid.guice.PropertiesModule;
import io.druid.jackson.JacksonModule;
import java.util.List;
/**
*/
public class GuiceInjectors
{
public static Injector makeStartupInjector()
{
return Guice.createInjector(
new DruidGuiceExtensions(),
new JacksonModule(),
new PropertiesModule("runtime.properties"),
new ConfigModule(),
new Module()
{
@Override
public void configure(Binder binder)
{
binder.bind(DruidSecondaryModule.class);
JsonConfigProvider.bind(binder, "druid.extensions", ExtensionsConfig.class);
}
}
);
}
public static Injector makeStartupInjectorWithModules(Iterable<Module> modules)
{
List<Module> theModules = Lists.newArrayList();
theModules.add(new DruidGuiceExtensions());
theModules.add(new JacksonModule());
theModules.add(new PropertiesModule("runtime.properties"));
theModules.add(new ConfigModule());
theModules.add(
new Module()
{
@Override
public void configure(Binder binder)
{
binder.bind(DruidSecondaryModule.class);
JsonConfigProvider.bind(binder, "druid.extensions", ExtensionsConfig.class);
}
}
);
for (Module theModule : modules) {
theModules.add(theModule);
}
return Guice.createInjector(theModules);
}
}
/*
* Druid - a distributed column store.
* Copyright (C) 2012, 2013 Metamarkets Group Inc.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package io.druid.guice;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Lists;
import com.google.inject.Injector;
import com.google.inject.Key;
import com.google.inject.Module;
import com.metamx.common.ISE;
import io.druid.guice.annotations.Json;
import io.druid.guice.annotations.Smile;
import io.druid.initialization.DruidModule;
import java.util.Collections;
import java.util.List;
/**
*/
public class ModuleList
{
private final Injector baseInjector;
private final ObjectMapper jsonMapper;
private final ObjectMapper smileMapper;
private final List<Module> modules;
public ModuleList(Injector baseInjector)
{
this.baseInjector = baseInjector;
this.jsonMapper = baseInjector.getInstance(Key.get(ObjectMapper.class, Json.class));
this.smileMapper = baseInjector.getInstance(Key.get(ObjectMapper.class, Smile.class));
this.modules = Lists.newArrayList();
}
public List<Module> getModules()
{
return Collections.unmodifiableList(modules);
}
public void addModule(Object input)
{
if (input instanceof DruidModule) {
baseInjector.injectMembers(input);
modules.add(registerJacksonModules(((DruidModule) input)));
} else if (input instanceof Module) {
baseInjector.injectMembers(input);
modules.add((Module) input);
} else if (input instanceof Class) {
if (DruidModule.class.isAssignableFrom((Class) input)) {
modules.add(registerJacksonModules(baseInjector.getInstance((Class<? extends DruidModule>) input)));
} else if (Module.class.isAssignableFrom((Class) input)) {
modules.add(baseInjector.getInstance((Class<? extends Module>) input));
return;
} else {
throw new ISE("Class[%s] does not implement %s", input.getClass(), Module.class);
}
} else {
throw new ISE("Unknown module type[%s]", input.getClass());
}
}
public void addModules(Object... object)
{
for (Object o : object) {
addModule(o);
}
}
private DruidModule registerJacksonModules(DruidModule module)
{
for (com.fasterxml.jackson.databind.Module jacksonModule : module.getJacksonModules()) {
jsonMapper.registerModule(jacksonModule);
smileMapper.registerModule(jacksonModule);
}
return module;
}
}
...@@ -17,13 +17,13 @@ ...@@ -17,13 +17,13 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/ */
package io.druid.server.initialization; package io.druid.guice;
import com.google.common.base.Charsets; import com.google.common.base.Charsets;
import com.google.common.base.Throwables; import com.google.common.base.Throwables;
import com.google.common.io.Closeables;
import com.google.inject.Binder; import com.google.inject.Binder;
import com.google.inject.Module; import com.google.inject.Module;
import com.metamx.common.guava.CloseQuietly;
import com.metamx.common.logger.Logger; import com.metamx.common.logger.Logger;
import java.io.BufferedInputStream; import java.io.BufferedInputStream;
...@@ -80,7 +80,7 @@ public class PropertiesModule implements Module ...@@ -80,7 +80,7 @@ public class PropertiesModule implements Module
log.wtf(e, "This can only happen if the .exists() call lied. That's f'd up."); log.wtf(e, "This can only happen if the .exists() call lied. That's f'd up.");
} }
finally { finally {
Closeables.closeQuietly(stream); CloseQuietly.close(stream);
} }
binder.bind(Properties.class).toInstance(props); binder.bind(Properties.class).toInstance(props);
......
...@@ -17,14 +17,13 @@ ...@@ -17,14 +17,13 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/ */
package io.druid.server; package io.druid.query;
import com.metamx.common.concurrent.ExecutorServiceConfig; import com.metamx.common.concurrent.ExecutorServiceConfig;
import io.druid.segment.column.ColumnConfig;
import org.skife.config.Config; import org.skife.config.Config;
/** public abstract class DruidProcessingConfig extends ExecutorServiceConfig implements ColumnConfig
*/
public abstract class DruidProcessingConfig extends ExecutorServiceConfig
{ {
@Config({"druid.computation.buffer.size", "${base_path}.buffer.sizeBytes"}) @Config({"druid.computation.buffer.size", "${base_path}.buffer.sizeBytes"})
public int intermediateComputeSizeBytes() public int intermediateComputeSizeBytes()
...@@ -39,4 +38,10 @@ public abstract class DruidProcessingConfig extends ExecutorServiceConfig ...@@ -39,4 +38,10 @@ public abstract class DruidProcessingConfig extends ExecutorServiceConfig
final int processors = Runtime.getRuntime().availableProcessors(); final int processors = Runtime.getRuntime().availableProcessors();
return processors > 1 ? processors - 1 : processors; return processors > 1 ? processors - 1 : processors;
} }
@Config(value = "${base_path}.columnCache.sizeBytes")
public int columnCacheSizeBytes()
{
return 1024 * 1024;
}
} }
...@@ -19,7 +19,7 @@ ...@@ -19,7 +19,7 @@
package io.druid.query; package io.druid.query;
import com.google.common.io.Closeables; import com.metamx.common.guava.CloseQuietly;
import com.metamx.common.guava.ResourceClosingSequence; import com.metamx.common.guava.ResourceClosingSequence;
import com.metamx.common.guava.Sequence; import com.metamx.common.guava.Sequence;
import io.druid.segment.ReferenceCountingSegment; import io.druid.segment.ReferenceCountingSegment;
...@@ -52,7 +52,7 @@ public class ReferenceCountingSegmentQueryRunner<T> implements QueryRunner<T> ...@@ -52,7 +52,7 @@ public class ReferenceCountingSegmentQueryRunner<T> implements QueryRunner<T>
return new ResourceClosingSequence<T>(baseSequence, closeable); return new ResourceClosingSequence<T>(baseSequence, closeable);
} }
catch (RuntimeException e) { catch (RuntimeException e) {
Closeables.closeQuietly(closeable); CloseQuietly.close(closeable);
throw e; throw e;
} }
} }
......
...@@ -54,7 +54,7 @@ public class CardinalityAggregator implements Aggregator ...@@ -54,7 +54,7 @@ public class CardinalityAggregator implements Aggregator
// nothing to add to hasher if size == 0, only handle size == 1 and size != 0 cases. // nothing to add to hasher if size == 0, only handle size == 1 and size != 0 cases.
if (size == 1) { if (size == 1) {
final String value = selector.lookupName(row.get(0)); final String value = selector.lookupName(row.get(0));
hasher.putString(value != null ? value : NULL_STRING); hasher.putUnencodedChars(value != null ? value : NULL_STRING);
} else if (size != 0) { } else if (size != 0) {
final String[] values = new String[size]; final String[] values = new String[size];
for (int i = 0; i < size; ++i) { for (int i = 0; i < size; ++i) {
...@@ -67,7 +67,7 @@ public class CardinalityAggregator implements Aggregator ...@@ -67,7 +67,7 @@ public class CardinalityAggregator implements Aggregator
if (i != 0) { if (i != 0) {
hasher.putChar(SEPARATOR); hasher.putChar(SEPARATOR);
} }
hasher.putString(values[i]); hasher.putUnencodedChars(values[i]);
} }
} }
} }
...@@ -79,7 +79,7 @@ public class CardinalityAggregator implements Aggregator ...@@ -79,7 +79,7 @@ public class CardinalityAggregator implements Aggregator
for (final DimensionSelector selector : selectors) { for (final DimensionSelector selector : selectors) {
for (final Integer index : selector.getRow()) { for (final Integer index : selector.getRow()) {
final String value = selector.lookupName(index); final String value = selector.lookupName(index);
collector.add(hashFn.hashString(value == null ? NULL_STRING : value).asBytes()); collector.add(hashFn.hashUnencodedChars(value == null ? NULL_STRING : value).asBytes());
} }
} }
} }
......
...@@ -128,8 +128,9 @@ public class HyperUniquesSerde extends ComplexMetricSerde ...@@ -128,8 +128,9 @@ public class HyperUniquesSerde extends ComplexMetricSerde
@Override @Override
public HyperLogLogCollector fromByteBuffer(ByteBuffer buffer, int numBytes) public HyperLogLogCollector fromByteBuffer(ByteBuffer buffer, int numBytes)
{ {
buffer.limit(buffer.position() + numBytes); final ByteBuffer readOnlyBuffer = buffer.asReadOnlyBuffer();
return HyperLogLogCollector.makeCollector(buffer); readOnlyBuffer.limit(readOnlyBuffer.position() + numBytes);
return HyperLogLogCollector.makeCollector(readOnlyBuffer);
} }
@Override @Override
......
...@@ -98,4 +98,12 @@ public class JavascriptDimExtractionFn implements DimExtractionFn ...@@ -98,4 +98,12 @@ public class JavascriptDimExtractionFn implements DimExtractionFn
{ {
return false; return false;
} }
@Override
public String toString()
{
return "JavascriptDimExtractionFn{" +
"function='" + function + '\'' +
'}';
}
} }
...@@ -25,12 +25,12 @@ import com.google.common.collect.ImmutableList; ...@@ -25,12 +25,12 @@ import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterators; import com.google.common.collect.Iterators;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import com.google.common.io.Closeables;
import com.google.common.primitives.Ints; import com.google.common.primitives.Ints;
import com.google.inject.Inject; import com.google.inject.Inject;
import com.metamx.common.IAE; import com.metamx.common.IAE;
import com.metamx.common.ISE; import com.metamx.common.ISE;
import com.metamx.common.guava.BaseSequence; import com.metamx.common.guava.BaseSequence;
import com.metamx.common.guava.CloseQuietly;
import com.metamx.common.guava.FunctionalIterator; import com.metamx.common.guava.FunctionalIterator;
import com.metamx.common.guava.Sequence; import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.Sequences; import com.metamx.common.guava.Sequences;
...@@ -123,7 +123,7 @@ public class GroupByQueryEngine ...@@ -123,7 +123,7 @@ public class GroupByQueryEngine
@Override @Override
public void cleanup(RowIterator iterFromMake) public void cleanup(RowIterator iterFromMake)
{ {
Closeables.closeQuietly(iterFromMake); CloseQuietly.close(iterFromMake);
} }
} }
); );
...@@ -135,7 +135,7 @@ public class GroupByQueryEngine ...@@ -135,7 +135,7 @@ public class GroupByQueryEngine
@Override @Override
public void close() throws IOException public void close() throws IOException
{ {
Closeables.closeQuietly(bufferHolder); CloseQuietly.close(bufferHolder);
} }
} }
) )
......
...@@ -52,24 +52,6 @@ public class TimeBoundaryQuery extends BaseQuery<Result<TimeBoundaryResultValue> ...@@ -52,24 +52,6 @@ public class TimeBoundaryQuery extends BaseQuery<Result<TimeBoundaryResultValue>
private static final byte CACHE_TYPE_ID = 0x0; private static final byte CACHE_TYPE_ID = 0x0;
public static Iterable<Result<TimeBoundaryResultValue>> buildResult(DateTime timestamp, DateTime min, DateTime max)
{
List<Result<TimeBoundaryResultValue>> results = Lists.newArrayList();
Map<String, Object> result = Maps.newHashMap();
if (min != null) {
result.put(MIN_TIME, min);
}
if (max != null) {
result.put(MAX_TIME, max);
}
if (!result.isEmpty()) {
results.add(new Result<>(timestamp, new TimeBoundaryResultValue(result)));
}
return results;
}
private final String bound; private final String bound;
@JsonCreator @JsonCreator
...@@ -150,6 +132,24 @@ public class TimeBoundaryQuery extends BaseQuery<Result<TimeBoundaryResultValue> ...@@ -150,6 +132,24 @@ public class TimeBoundaryQuery extends BaseQuery<Result<TimeBoundaryResultValue>
.array(); .array();
} }
public Iterable<Result<TimeBoundaryResultValue>> buildResult(DateTime timestamp, DateTime min, DateTime max)
{
List<Result<TimeBoundaryResultValue>> results = Lists.newArrayList();
Map<String, Object> result = Maps.newHashMap();
if (min != null) {
result.put(MIN_TIME, min);
}
if (max != null) {
result.put(MAX_TIME, max);
}
if (!result.isEmpty()) {
results.add(new Result<>(timestamp, new TimeBoundaryResultValue(result)));
}
return results;
}
public Iterable<Result<TimeBoundaryResultValue>> mergeResults(List<Result<TimeBoundaryResultValue>> results) public Iterable<Result<TimeBoundaryResultValue>> mergeResults(List<Result<TimeBoundaryResultValue>> results)
{ {
if (results == null || results.isEmpty()) { if (results == null || results.isEmpty()) {
......
...@@ -19,8 +19,8 @@ ...@@ -19,8 +19,8 @@
package io.druid.query.topn; package io.druid.query.topn;
import com.google.common.io.Closeables;
import com.metamx.common.Pair; import com.metamx.common.Pair;
import com.metamx.common.guava.CloseQuietly;
import io.druid.collections.ResourceHolder; import io.druid.collections.ResourceHolder;
import io.druid.collections.StupidPool; import io.druid.collections.StupidPool;
import io.druid.query.aggregation.BufferAggregator; import io.druid.query.aggregation.BufferAggregator;
...@@ -233,7 +233,7 @@ public class PooledTopNAlgorithm ...@@ -233,7 +233,7 @@ public class PooledTopNAlgorithm
if (resultsBufHolder != null) { if (resultsBufHolder != null) {
resultsBufHolder.get().clear(); resultsBufHolder.get().clear();
} }
Closeables.closeQuietly(resultsBufHolder); CloseQuietly.close(resultsBufHolder);
} }
public static class PooledTopNParams extends TopNParams public static class PooledTopNParams extends TopNParams
......
...@@ -19,8 +19,8 @@ ...@@ -19,8 +19,8 @@
package io.druid.segment; package io.druid.segment;
import com.google.common.io.Closeables;
import com.metamx.collections.spatial.ImmutableRTree; import com.metamx.collections.spatial.ImmutableRTree;
import com.metamx.common.guava.CloseQuietly;
import io.druid.query.filter.BitmapIndexSelector; import io.druid.query.filter.BitmapIndexSelector;
import io.druid.segment.column.Column; import io.druid.segment.column.Column;
import io.druid.segment.column.DictionaryEncodedColumn; import io.druid.segment.column.DictionaryEncodedColumn;
...@@ -95,7 +95,7 @@ public class ColumnSelectorBitmapIndexSelector implements BitmapIndexSelector ...@@ -95,7 +95,7 @@ public class ColumnSelectorBitmapIndexSelector implements BitmapIndexSelector
return column.length(); return column.length();
} }
finally { finally {
Closeables.closeQuietly(column); CloseQuietly.close(column);
} }
} }
......
...@@ -22,6 +22,7 @@ package io.druid.segment; ...@@ -22,6 +22,7 @@ package io.druid.segment;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.base.Predicate; import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables; import com.google.common.collect.Iterables;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
...@@ -31,6 +32,9 @@ import com.google.common.io.ByteStreams; ...@@ -31,6 +32,9 @@ import com.google.common.io.ByteStreams;
import com.google.common.io.Closeables; import com.google.common.io.Closeables;
import com.google.common.io.Files; import com.google.common.io.Files;
import com.google.common.primitives.Ints; import com.google.common.primitives.Ints;
import com.google.inject.Binder;
import com.google.inject.Injector;
import com.google.inject.Module;
import com.metamx.collections.spatial.ImmutableRTree; import com.metamx.collections.spatial.ImmutableRTree;
import com.metamx.common.IAE; import com.metamx.common.IAE;
import com.metamx.common.ISE; import com.metamx.common.ISE;
...@@ -41,9 +45,13 @@ import com.metamx.common.io.smoosh.SmooshedWriter; ...@@ -41,9 +45,13 @@ import com.metamx.common.io.smoosh.SmooshedWriter;
import com.metamx.common.logger.Logger; import com.metamx.common.logger.Logger;
import com.metamx.emitter.EmittingLogger; import com.metamx.emitter.EmittingLogger;
import io.druid.common.utils.SerializerUtils; import io.druid.common.utils.SerializerUtils;
import io.druid.guice.ConfigProvider;
import io.druid.guice.GuiceInjectors;
import io.druid.jackson.DefaultObjectMapper; import io.druid.jackson.DefaultObjectMapper;
import io.druid.query.DruidProcessingConfig;
import io.druid.segment.column.Column; import io.druid.segment.column.Column;
import io.druid.segment.column.ColumnBuilder; import io.druid.segment.column.ColumnBuilder;
import io.druid.segment.column.ColumnConfig;
import io.druid.segment.column.ColumnDescriptor; import io.druid.segment.column.ColumnDescriptor;
import io.druid.segment.column.ValueType; import io.druid.segment.column.ValueType;
import io.druid.segment.data.ArrayIndexed; import io.druid.segment.data.ArrayIndexed;
...@@ -90,6 +98,9 @@ public class IndexIO ...@@ -90,6 +98,9 @@ public class IndexIO
{ {
public static final byte V8_VERSION = 0x8; public static final byte V8_VERSION = 0x8;
public static final byte V9_VERSION = 0x9; public static final byte V9_VERSION = 0x9;
public static final int CURRENT_VERSION_ID = V9_VERSION;
public static final ByteOrder BYTE_ORDER = ByteOrder.nativeOrder();
private static final Map<Integer, IndexLoader> indexLoaders = private static final Map<Integer, IndexLoader> indexLoaders =
ImmutableMap.<Integer, IndexLoader>builder() ImmutableMap.<Integer, IndexLoader>builder()
...@@ -107,13 +118,33 @@ public class IndexIO ...@@ -107,13 +118,33 @@ public class IndexIO
private static final EmittingLogger log = new EmittingLogger(IndexIO.class); private static final EmittingLogger log = new EmittingLogger(IndexIO.class);
private static final SerializerUtils serializerUtils = new SerializerUtils(); private static final SerializerUtils serializerUtils = new SerializerUtils();
public static final ByteOrder BYTE_ORDER = ByteOrder.nativeOrder();
// This should really be provided by DI, should be changed once we switch around to using a DI framework private static final ObjectMapper mapper;
private static final ObjectMapper mapper = new DefaultObjectMapper(); protected static final ColumnConfig columnConfig;
static {
final Injector injector = GuiceInjectors.makeStartupInjectorWithModules(
ImmutableList.<Module>of(
new Module()
{
@Override
public void configure(Binder binder)
{
ConfigProvider.bind(
binder,
DruidProcessingConfig.class,
ImmutableMap.of("base_path", "druid.processing")
);
binder.bind(ColumnConfig.class).to(DruidProcessingConfig.class);
}
}
)
);
mapper = injector.getInstance(ObjectMapper.class);
columnConfig = injector.getInstance(ColumnConfig.class);
}
private static volatile IndexIOHandler handler = null; private static volatile IndexIOHandler handler = null;
public static final int CURRENT_VERSION_ID = V9_VERSION;
@Deprecated @Deprecated
public static MMappedIndex mapDir(final File inDir) throws IOException public static MMappedIndex mapDir(final File inDir) throws IOException
...@@ -165,15 +196,10 @@ public class IndexIO ...@@ -165,15 +196,10 @@ public class IndexIO
} }
final File indexFile = new File(inDir, "index.drd"); final File indexFile = new File(inDir, "index.drd");
InputStream in = null;
int version; int version;
try { try (InputStream in = new FileInputStream(indexFile)) {
in = new FileInputStream(indexFile);
version = in.read(); version = in.read();
} }
finally {
Closeables.closeQuietly(in);
}
return version; return version;
} }
...@@ -194,8 +220,8 @@ public class IndexIO ...@@ -194,8 +220,8 @@ public class IndexIO
case 2: case 2:
case 3: case 3:
log.makeAlert("Attempt to load segment of version <= 3.") log.makeAlert("Attempt to load segment of version <= 3.")
.addData("version", version) .addData("version", version)
.emit(); .emit();
return false; return false;
case 4: case 4:
case 5: case 5:
...@@ -631,7 +657,10 @@ public class IndexIO ...@@ -631,7 +657,10 @@ public class IndexIO
.setHasMultipleValues(true) .setHasMultipleValues(true)
.setDictionaryEncodedColumn( .setDictionaryEncodedColumn(
new DictionaryEncodedColumnSupplier( new DictionaryEncodedColumnSupplier(
index.getDimValueLookup(dimension), null, (index.getDimColumn(dimension)) index.getDimValueLookup(dimension),
null,
index.getDimColumn(dimension),
columnConfig.columnCacheSizeBytes()
) )
) )
.setBitmapIndex( .setBitmapIndex(
...@@ -743,7 +772,7 @@ public class IndexIO ...@@ -743,7 +772,7 @@ public class IndexIO
ColumnDescriptor serde = mapper.readValue( ColumnDescriptor serde = mapper.readValue(
serializerUtils.readString(byteBuffer), ColumnDescriptor.class serializerUtils.readString(byteBuffer), ColumnDescriptor.class
); );
return serde.read(byteBuffer); return serde.read(byteBuffer, columnConfig);
} }
} }
......
...@@ -38,6 +38,7 @@ import com.metamx.collections.spatial.RTree; ...@@ -38,6 +38,7 @@ import com.metamx.collections.spatial.RTree;
import com.metamx.collections.spatial.split.LinearGutmanSplitStrategy; import com.metamx.collections.spatial.split.LinearGutmanSplitStrategy;
import com.metamx.common.IAE; import com.metamx.common.IAE;
import com.metamx.common.ISE; import com.metamx.common.ISE;
import com.metamx.common.guava.CloseQuietly;
import com.metamx.common.guava.FunctionalIterable; import com.metamx.common.guava.FunctionalIterable;
import com.metamx.common.guava.MergeIterable; import com.metamx.common.guava.MergeIterable;
import com.metamx.common.guava.nary.BinaryFn; import com.metamx.common.guava.nary.BinaryFn;
...@@ -50,6 +51,7 @@ import io.druid.common.utils.JodaUtils; ...@@ -50,6 +51,7 @@ import io.druid.common.utils.JodaUtils;
import io.druid.common.utils.SerializerUtils; import io.druid.common.utils.SerializerUtils;
import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.ToLowerCaseAggregatorFactory; import io.druid.query.aggregation.ToLowerCaseAggregatorFactory;
import io.druid.segment.column.ColumnConfig;
import io.druid.segment.data.ByteBufferWriter; import io.druid.segment.data.ByteBufferWriter;
import io.druid.segment.data.CompressedLongsSupplierSerializer; import io.druid.segment.data.CompressedLongsSupplierSerializer;
import io.druid.segment.data.ConciseCompressedIndexedInts; import io.druid.segment.data.ConciseCompressedIndexedInts;
...@@ -432,9 +434,9 @@ public class IndexMerger ...@@ -432,9 +434,9 @@ public class IndexMerger
serializerUtils.writeString(channel, String.format("%s/%s", minTime, maxTime)); serializerUtils.writeString(channel, String.format("%s/%s", minTime, maxTime));
} }
finally { finally {
Closeables.closeQuietly(channel); CloseQuietly.close(channel);
channel = null; channel = null;
Closeables.closeQuietly(fileOutputStream); CloseQuietly.close(fileOutputStream);
fileOutputStream = null; fileOutputStream = null;
} }
IndexIO.checkFileSize(indexFile); IndexIO.checkFileSize(indexFile);
...@@ -875,7 +877,7 @@ public class IndexMerger ...@@ -875,7 +877,7 @@ public class IndexMerger
); );
} }
finally { finally {
Closeables.closeQuietly(channel); CloseQuietly.close(channel);
channel = null; channel = null;
} }
IndexIO.checkFileSize(indexFile); IndexIO.checkFileSize(indexFile);
......
...@@ -20,7 +20,7 @@ ...@@ -20,7 +20,7 @@
package io.druid.segment; package io.druid.segment;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import com.google.common.io.Closeables; import com.metamx.common.guava.CloseQuietly;
import io.druid.segment.data.ConciseCompressedIndexedInts; import io.druid.segment.data.ConciseCompressedIndexedInts;
import io.druid.segment.data.Indexed; import io.druid.segment.data.Indexed;
import io.druid.segment.data.IndexedFloats; import io.druid.segment.data.IndexedFloats;
...@@ -118,9 +118,9 @@ public class MMappedIndexAdapter implements IndexableAdapter ...@@ -118,9 +118,9 @@ public class MMappedIndexAdapter implements IndexableAdapter
{ {
final boolean hasNext = currRow < numRows; final boolean hasNext = currRow < numRows;
if (!hasNext && !done) { if (!hasNext && !done) {
Closeables.closeQuietly(timestamps); CloseQuietly.close(timestamps);
for (IndexedFloats floatMetric : floatMetrics) { for (IndexedFloats floatMetric : floatMetrics) {
Closeables.closeQuietly(floatMetric); CloseQuietly.close(floatMetric);
} }
done = true; done = true;
} }
......
...@@ -20,11 +20,11 @@ ...@@ -20,11 +20,11 @@
package io.druid.segment; package io.druid.segment;
import com.google.common.io.ByteStreams; import com.google.common.io.ByteStreams;
import com.google.common.io.Closeables;
import com.google.common.io.InputSupplier; import com.google.common.io.InputSupplier;
import com.google.common.io.OutputSupplier; import com.google.common.io.OutputSupplier;
import com.metamx.common.IAE; import com.metamx.common.IAE;
import com.metamx.common.ISE; import com.metamx.common.ISE;
import com.metamx.common.guava.CloseQuietly;
import io.druid.common.utils.SerializerUtils; import io.druid.common.utils.SerializerUtils;
import io.druid.segment.data.CompressedFloatsIndexedSupplier; import io.druid.segment.data.CompressedFloatsIndexedSupplier;
import io.druid.segment.data.CompressedFloatsSupplierSerializer; import io.druid.segment.data.CompressedFloatsSupplierSerializer;
...@@ -84,8 +84,8 @@ public class MetricHolder ...@@ -84,8 +84,8 @@ public class MetricHolder
ByteStreams.copy(in, out); ByteStreams.copy(in, out);
} }
finally { finally {
Closeables.closeQuietly(out); CloseQuietly.close(out);
Closeables.closeQuietly(in); CloseQuietly.close(in);
} }
} }
......
...@@ -22,8 +22,8 @@ package io.druid.segment; ...@@ -22,8 +22,8 @@ package io.druid.segment;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import com.google.common.io.Closeables;
import com.metamx.common.ISE; import com.metamx.common.ISE;
import com.metamx.common.guava.CloseQuietly;
import com.metamx.common.logger.Logger; import com.metamx.common.logger.Logger;
import io.druid.segment.column.BitmapIndex; import io.druid.segment.column.BitmapIndex;
import io.druid.segment.column.Column; import io.druid.segment.column.Column;
...@@ -208,10 +208,10 @@ public class QueryableIndexIndexableAdapter implements IndexableAdapter ...@@ -208,10 +208,10 @@ public class QueryableIndexIndexableAdapter implements IndexableAdapter
{ {
final boolean hasNext = currRow < numRows; final boolean hasNext = currRow < numRows;
if (!hasNext && !done) { if (!hasNext && !done) {
Closeables.closeQuietly(timestamps); CloseQuietly.close(timestamps);
for (Object metric : metrics) { for (Object metric : metrics) {
if (metric instanceof Closeable) { if (metric instanceof Closeable) {
Closeables.closeQuietly((Closeable) metric); CloseQuietly.close((Closeable) metric);
} }
} }
done = true; done = true;
......
...@@ -21,9 +21,10 @@ package io.druid.segment; ...@@ -21,9 +21,10 @@ package io.druid.segment;
import com.google.common.base.Function; import com.google.common.base.Function;
import com.google.common.base.Predicates; import com.google.common.base.Predicates;
import com.google.common.collect.Iterators;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import com.google.common.io.Closeables; import com.metamx.common.guava.CloseQuietly;
import com.metamx.common.guava.Sequence; import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.Sequences; import com.metamx.common.guava.Sequences;
import io.druid.granularity.QueryGranularity; import io.druid.granularity.QueryGranularity;
...@@ -38,12 +39,12 @@ import io.druid.segment.column.ValueType; ...@@ -38,12 +39,12 @@ import io.druid.segment.column.ValueType;
import io.druid.segment.data.Indexed; import io.druid.segment.data.Indexed;
import io.druid.segment.data.IndexedInts; import io.druid.segment.data.IndexedInts;
import io.druid.segment.data.Offset; import io.druid.segment.data.Offset;
import io.druid.segment.data.SingleIndexedInts;
import org.joda.time.DateTime; import org.joda.time.DateTime;
import org.joda.time.Interval; import org.joda.time.Interval;
import java.io.Closeable; import java.io.Closeable;
import java.io.IOException; import java.io.IOException;
import java.util.Iterator;
import java.util.Map; import java.util.Map;
/** /**
...@@ -109,7 +110,7 @@ public class QueryableIndexStorageAdapter implements StorageAdapter ...@@ -109,7 +110,7 @@ public class QueryableIndexStorageAdapter implements StorageAdapter
return new DateTime(column.getLongSingleValueRow(0)); return new DateTime(column.getLongSingleValueRow(0));
} }
finally { finally {
Closeables.closeQuietly(column); CloseQuietly.close(column);
} }
} }
...@@ -122,7 +123,7 @@ public class QueryableIndexStorageAdapter implements StorageAdapter ...@@ -122,7 +123,7 @@ public class QueryableIndexStorageAdapter implements StorageAdapter
return new DateTime(column.getLongSingleValueRow(column.length() - 1)); return new DateTime(column.getLongSingleValueRow(column.length() - 1));
} }
finally { finally {
Closeables.closeQuietly(column); CloseQuietly.close(column);
} }
} }
...@@ -186,6 +187,7 @@ public class QueryableIndexStorageAdapter implements StorageAdapter ...@@ -186,6 +187,7 @@ public class QueryableIndexStorageAdapter implements StorageAdapter
{ {
final Offset baseOffset = offset.clone(); final Offset baseOffset = offset.clone();
final Map<String, DictionaryEncodedColumn> dictionaryColumnCache = Maps.newHashMap();
final Map<String, GenericColumn> genericColumnCache = Maps.newHashMap(); final Map<String, GenericColumn> genericColumnCache = Maps.newHashMap();
final Map<String, ComplexColumn> complexColumnCache = Maps.newHashMap(); final Map<String, ComplexColumn> complexColumnCache = Maps.newHashMap();
final Map<String, Object> objectColumnCache = Maps.newHashMap(); final Map<String, Object> objectColumnCache = Maps.newHashMap();
...@@ -270,12 +272,16 @@ public class QueryableIndexStorageAdapter implements StorageAdapter ...@@ -270,12 +272,16 @@ public class QueryableIndexStorageAdapter implements StorageAdapter
public DimensionSelector makeDimensionSelector(String dimension) public DimensionSelector makeDimensionSelector(String dimension)
{ {
final String dimensionName = dimension.toLowerCase(); final String dimensionName = dimension.toLowerCase();
DictionaryEncodedColumn cachedColumn = dictionaryColumnCache.get(dimensionName);
final Column columnDesc = index.getColumn(dimensionName); final Column columnDesc = index.getColumn(dimensionName);
if (columnDesc == null) {
return null; if (cachedColumn == null && columnDesc != null) {
cachedColumn = columnDesc.getDictionaryEncoding();
dictionaryColumnCache.put(dimensionName, cachedColumn);
} }
final DictionaryEncodedColumn column = columnDesc.getDictionaryEncoding(); final DictionaryEncodedColumn column = cachedColumn;
if (column == null) { if (column == null) {
return null; return null;
...@@ -313,7 +319,27 @@ public class QueryableIndexStorageAdapter implements StorageAdapter ...@@ -313,7 +319,27 @@ public class QueryableIndexStorageAdapter implements StorageAdapter
@Override @Override
public IndexedInts getRow() public IndexedInts getRow()
{ {
return new SingleIndexedInts(column.getSingleValueRow(cursorOffset.getOffset())); // using an anonymous class is faster than creating a class that stores a copy of the value
return new IndexedInts()
{
@Override
public int size()
{
return 1;
}
@Override
public int get(int index)
{
return column.getSingleValueRow(cursorOffset.getOffset());
}
@Override
public Iterator<Integer> iterator()
{
return Iterators.singletonIterator(column.getSingleValueRow(cursorOffset.getOffset()));
}
};
} }
@Override @Override
...@@ -535,16 +561,19 @@ public class QueryableIndexStorageAdapter implements StorageAdapter ...@@ -535,16 +561,19 @@ public class QueryableIndexStorageAdapter implements StorageAdapter
@Override @Override
public void close() throws IOException public void close() throws IOException
{ {
Closeables.closeQuietly(timestamps); CloseQuietly.close(timestamps);
for (DictionaryEncodedColumn column : dictionaryColumnCache.values()) {
CloseQuietly.close(column);
}
for (GenericColumn column : genericColumnCache.values()) { for (GenericColumn column : genericColumnCache.values()) {
Closeables.closeQuietly(column); CloseQuietly.close(column);
} }
for (ComplexColumn complexColumn : complexColumnCache.values()) { for (ComplexColumn complexColumn : complexColumnCache.values()) {
Closeables.closeQuietly(complexColumn); CloseQuietly.close(complexColumn);
} }
for (Object column : objectColumnCache.values()) { for (Object column : objectColumnCache.values()) {
if(column instanceof Closeable) { if(column instanceof Closeable) {
Closeables.closeQuietly((Closeable) column); CloseQuietly.close((Closeable) column);
} }
} }
} }
...@@ -620,6 +649,7 @@ public class QueryableIndexStorageAdapter implements StorageAdapter ...@@ -620,6 +649,7 @@ public class QueryableIndexStorageAdapter implements StorageAdapter
*/ */
public Sequence<Cursor> build() public Sequence<Cursor> build()
{ {
final Map<String, DictionaryEncodedColumn> dictionaryColumnCache = Maps.newHashMap();
final Map<String, GenericColumn> genericColumnCache = Maps.newHashMap(); final Map<String, GenericColumn> genericColumnCache = Maps.newHashMap();
final Map<String, ComplexColumn> complexColumnCache = Maps.newHashMap(); final Map<String, ComplexColumn> complexColumnCache = Maps.newHashMap();
final Map<String, Object> objectColumnCache = Maps.newHashMap(); final Map<String, Object> objectColumnCache = Maps.newHashMap();
...@@ -697,41 +727,45 @@ public class QueryableIndexStorageAdapter implements StorageAdapter ...@@ -697,41 +727,45 @@ public class QueryableIndexStorageAdapter implements StorageAdapter
public DimensionSelector makeDimensionSelector(String dimension) public DimensionSelector makeDimensionSelector(String dimension)
{ {
final String dimensionName = dimension.toLowerCase(); final String dimensionName = dimension.toLowerCase();
final Column column = index.getColumn(dimensionName);
if (column == null) { DictionaryEncodedColumn cachedColumn = dictionaryColumnCache.get(dimensionName);
return null; final Column columnDesc = index.getColumn(dimensionName);
if (cachedColumn == null && columnDesc != null) {
cachedColumn = columnDesc.getDictionaryEncoding();
dictionaryColumnCache.put(dimensionName, cachedColumn);
} }
final DictionaryEncodedColumn dict = column.getDictionaryEncoding(); final DictionaryEncodedColumn column = cachedColumn;
if (dict == null) { if (column == null) {
return null; return null;
} else if (column.getCapabilities().hasMultipleValues()) { } else if (columnDesc.getCapabilities().hasMultipleValues()) {
return new DimensionSelector() return new DimensionSelector()
{ {
@Override @Override
public IndexedInts getRow() public IndexedInts getRow()
{ {
return dict.getMultiValueRow(currRow); return column.getMultiValueRow(currRow);
} }
@Override @Override
public int getValueCardinality() public int getValueCardinality()
{ {
return dict.getCardinality(); return column.getCardinality();
} }
@Override @Override
public String lookupName(int id) public String lookupName(int id)
{ {
final String retVal = dict.lookupName(id); final String retVal = column.lookupName(id);
return retVal == null ? "" : retVal; return retVal == null ? "" : retVal;
} }
@Override @Override
public int lookupId(String name) public int lookupId(String name)
{ {
return dict.lookupId(name); return column.lookupId(name);
} }
}; };
} else { } else {
...@@ -740,25 +774,45 @@ public class QueryableIndexStorageAdapter implements StorageAdapter ...@@ -740,25 +774,45 @@ public class QueryableIndexStorageAdapter implements StorageAdapter
@Override @Override
public IndexedInts getRow() public IndexedInts getRow()
{ {
return new SingleIndexedInts(dict.getSingleValueRow(currRow)); // using an anonymous class is faster than creating a class that stores a copy of the value
return new IndexedInts()
{
@Override
public int size()
{
return 1;
}
@Override
public int get(int index)
{
return column.getSingleValueRow(currRow);
}
@Override
public Iterator<Integer> iterator()
{
return Iterators.singletonIterator(column.getSingleValueRow(currRow));
}
};
} }
@Override @Override
public int getValueCardinality() public int getValueCardinality()
{ {
return dict.getCardinality(); return column.getCardinality();
} }
@Override @Override
public String lookupName(int id) public String lookupName(int id)
{ {
return dict.lookupName(id); return column.lookupName(id);
} }
@Override @Override
public int lookupId(String name) public int lookupId(String name)
{ {
return dict.lookupId(name); return column.lookupId(name);
} }
}; };
} }
...@@ -962,16 +1016,19 @@ public class QueryableIndexStorageAdapter implements StorageAdapter ...@@ -962,16 +1016,19 @@ public class QueryableIndexStorageAdapter implements StorageAdapter
@Override @Override
public void close() throws IOException public void close() throws IOException
{ {
Closeables.closeQuietly(timestamps); CloseQuietly.close(timestamps);
for (DictionaryEncodedColumn column : dictionaryColumnCache.values()) {
CloseQuietly.close(column);
}
for (GenericColumn column : genericColumnCache.values()) { for (GenericColumn column : genericColumnCache.values()) {
Closeables.closeQuietly(column); CloseQuietly.close(column);
} }
for (ComplexColumn complexColumn : complexColumnCache.values()) { for (ComplexColumn complexColumn : complexColumnCache.values()) {
Closeables.closeQuietly(complexColumn); CloseQuietly.close(complexColumn);
} }
for (Object column : objectColumnCache.values()) { for (Object column : objectColumnCache.values()) {
if (column instanceof Closeable) { if (column instanceof Closeable) {
Closeables.closeQuietly((Closeable) column); CloseQuietly.close((Closeable) column);
} }
} }
} }
...@@ -979,31 +1036,4 @@ public class QueryableIndexStorageAdapter implements StorageAdapter ...@@ -979,31 +1036,4 @@ public class QueryableIndexStorageAdapter implements StorageAdapter
); );
} }
} }
private static class NullDimensionSelector implements DimensionSelector
{
@Override
public IndexedInts getRow()
{
return new SingleIndexedInts(0);
}
@Override
public int getValueCardinality()
{
return 1;
}
@Override
public String lookupName(int id)
{
return "";
}
@Override
public int lookupId(String name)
{
return 0;
}
}
} }
/*
* Druid - a distributed column store.
* Copyright (C) 2012, 2013, 2014 Metamarkets Group Inc.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package io.druid.segment.column;
public interface ColumnConfig
{
public int columnCacheSizeBytes();
}
...@@ -92,14 +92,14 @@ public class ColumnDescriptor ...@@ -92,14 +92,14 @@ public class ColumnDescriptor
} }
} }
public Column read(ByteBuffer buffer) public Column read(ByteBuffer buffer, ColumnConfig columnConfig)
{ {
final ColumnBuilder builder = new ColumnBuilder() final ColumnBuilder builder = new ColumnBuilder()
.setType(valueType) .setType(valueType)
.setHasMultipleValues(hasMultipleValues); .setHasMultipleValues(hasMultipleValues);
for (ColumnPartSerde part : parts) { for (ColumnPartSerde part : parts) {
part.read(buffer, builder); part.read(buffer, builder, columnConfig);
} }
return builder.build(); return builder.build();
......
...@@ -21,9 +21,11 @@ package io.druid.segment.column; ...@@ -21,9 +21,11 @@ package io.druid.segment.column;
import io.druid.segment.data.IndexedInts; import io.druid.segment.data.IndexedInts;
import java.io.Closeable;
/** /**
*/ */
public interface DictionaryEncodedColumn public interface DictionaryEncodedColumn extends Closeable
{ {
public int length(); public int length();
public boolean hasMultipleValues(); public boolean hasMultipleValues();
......
...@@ -20,7 +20,7 @@ ...@@ -20,7 +20,7 @@
package io.druid.segment.column; package io.druid.segment.column;
import com.google.common.base.Supplier; import com.google.common.base.Supplier;
import com.google.common.io.Closeables; import com.metamx.common.guava.CloseQuietly;
/** /**
*/ */
...@@ -68,7 +68,7 @@ class SimpleColumn implements Column ...@@ -68,7 +68,7 @@ class SimpleColumn implements Column
return column.length(); return column.length();
} }
finally { finally {
Closeables.closeQuietly(column); CloseQuietly.close(column);
} }
} }
......
...@@ -24,6 +24,8 @@ import io.druid.segment.data.IndexedInts; ...@@ -24,6 +24,8 @@ import io.druid.segment.data.IndexedInts;
import io.druid.segment.data.VSizeIndexed; import io.druid.segment.data.VSizeIndexed;
import io.druid.segment.data.VSizeIndexedInts; import io.druid.segment.data.VSizeIndexedInts;
import java.io.IOException;
/** /**
*/ */
public class SimpleDictionaryEncodedColumn implements DictionaryEncodedColumn public class SimpleDictionaryEncodedColumn implements DictionaryEncodedColumn
...@@ -84,4 +86,10 @@ public class SimpleDictionaryEncodedColumn implements DictionaryEncodedColumn ...@@ -84,4 +86,10 @@ public class SimpleDictionaryEncodedColumn implements DictionaryEncodedColumn
{ {
return lookups.size(); return lookups.size();
} }
@Override
public void close() throws IOException
{
lookups.close();
}
} }
/* /*
* Druid - a distributed column store. * Druid - a distributed column store.
* Copyright (C) 2012, 2013 Metamarkets Group Inc. * Copyright (C) 2012, 2013, 2014 Metamarkets Group Inc.
* *
* This program is free software; you can redistribute it and/or * This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License * modify it under the terms of the GNU General Public License
...@@ -19,35 +19,8 @@ ...@@ -19,35 +19,8 @@
package io.druid.segment.data; package io.druid.segment.data;
import com.google.common.collect.Iterators;
import java.util.Iterator;
/** /**
*/ * Implementing CacheableObjectStrategy instead of ObjectSrategy indicates
public class SingleIndexedInts implements IndexedInts * that a column scan may cache the results of fromByteBuffer
{ */
private final int value; public interface CacheableObjectStrategy<T> extends ObjectStrategy<T> {}
public SingleIndexedInts(int value) {
this.value = value;
}
@Override
public int size()
{
return 1;
}
@Override
public int get(int index)
{
return value;
}
@Override
public Iterator<Integer> iterator()
{
return Iterators.singletonIterator(value);
}
}
...@@ -25,6 +25,7 @@ import com.google.common.io.Closeables; ...@@ -25,6 +25,7 @@ import com.google.common.io.Closeables;
import com.google.common.primitives.Floats; import com.google.common.primitives.Floats;
import com.google.common.primitives.Ints; import com.google.common.primitives.Ints;
import com.metamx.common.IAE; import com.metamx.common.IAE;
import com.metamx.common.guava.CloseQuietly;
import io.druid.collections.ResourceHolder; import io.druid.collections.ResourceHolder;
import io.druid.collections.StupidResourceHolder; import io.druid.collections.StupidResourceHolder;
...@@ -123,7 +124,7 @@ public class CompressedFloatsIndexedSupplier implements Supplier<IndexedFloats> ...@@ -123,7 +124,7 @@ public class CompressedFloatsIndexedSupplier implements Supplier<IndexedFloats>
private void loadBuffer(int bufferNum) private void loadBuffer(int bufferNum)
{ {
Closeables.closeQuietly(holder); CloseQuietly.close(holder);
holder = baseFloatBuffers.get(bufferNum); holder = baseFloatBuffers.get(bufferNum);
buffer = holder.get(); buffer = holder.get();
currIndex = bufferNum; currIndex = bufferNum;
......
...@@ -106,17 +106,11 @@ public class CompressedFloatsSupplierSerializer ...@@ -106,17 +106,11 @@ public class CompressedFloatsSupplierSerializer
flattener.close(); flattener.close();
OutputStream out = null; try (OutputStream out = consolidatedOut.getOutput()) {
try {
out = consolidatedOut.getOutput();
out.write(CompressedFloatsIndexedSupplier.version); out.write(CompressedFloatsIndexedSupplier.version);
out.write(Ints.toByteArray(numInserted)); out.write(Ints.toByteArray(numInserted));
out.write(Ints.toByteArray(sizePer)); out.write(Ints.toByteArray(sizePer));
ByteStreams.copy(flattener.combineStreams(), out); ByteStreams.copy(flattener.combineStreams(), out);
} }
finally {
Closeables.closeQuietly(out);
}
} }
} }
...@@ -25,6 +25,7 @@ import com.google.common.io.Closeables; ...@@ -25,6 +25,7 @@ import com.google.common.io.Closeables;
import com.google.common.primitives.Ints; import com.google.common.primitives.Ints;
import com.google.common.primitives.Longs; import com.google.common.primitives.Longs;
import com.metamx.common.IAE; import com.metamx.common.IAE;
import com.metamx.common.guava.CloseQuietly;
import io.druid.collections.ResourceHolder; import io.druid.collections.ResourceHolder;
import io.druid.collections.StupidResourceHolder; import io.druid.collections.StupidResourceHolder;
...@@ -122,7 +123,7 @@ public class CompressedLongsIndexedSupplier implements Supplier<IndexedLongs> ...@@ -122,7 +123,7 @@ public class CompressedLongsIndexedSupplier implements Supplier<IndexedLongs>
private void loadBuffer(int bufferNum) private void loadBuffer(int bufferNum)
{ {
Closeables.closeQuietly(holder); CloseQuietly.close(holder);
holder = baseLongBuffers.get(bufferNum); holder = baseLongBuffers.get(bufferNum);
buffer = holder.get(); buffer = holder.get();
currIndex = bufferNum; currIndex = bufferNum;
......
...@@ -100,17 +100,11 @@ public class CompressedLongsSupplierSerializer ...@@ -100,17 +100,11 @@ public class CompressedLongsSupplierSerializer
flattener.close(); flattener.close();
OutputStream out = null; try (OutputStream out = consolidatedOut.getOutput()) {
try {
out = consolidatedOut.getOutput();
out.write(CompressedLongsIndexedSupplier.version); out.write(CompressedLongsIndexedSupplier.version);
out.write(Ints.toByteArray(numInserted)); out.write(Ints.toByteArray(numInserted));
out.write(Ints.toByteArray(sizePer)); out.write(Ints.toByteArray(sizePer));
ByteStreams.copy(flattener.combineStreams(), out); ByteStreams.copy(flattener.combineStreams(), out);
} }
finally {
Closeables.closeQuietly(out);
}
} }
} }
...@@ -20,7 +20,7 @@ ...@@ -20,7 +20,7 @@
package io.druid.segment.data; package io.druid.segment.data;
import com.google.common.base.Throwables; import com.google.common.base.Throwables;
import com.google.common.io.Closeables; import com.metamx.common.guava.CloseQuietly;
import com.ning.compress.lzf.ChunkEncoder; import com.ning.compress.lzf.ChunkEncoder;
import com.ning.compress.lzf.LZFChunk; import com.ning.compress.lzf.LZFChunk;
import com.ning.compress.lzf.LZFDecoder; import com.ning.compress.lzf.LZFDecoder;
...@@ -74,7 +74,7 @@ public class CompressedObjectStrategy<T extends Buffer> implements ObjectStrateg ...@@ -74,7 +74,7 @@ public class CompressedObjectStrategy<T extends Buffer> implements ObjectStrateg
buf.put(outputBytes, 0, numDecompressedBytes); buf.put(outputBytes, 0, numDecompressedBytes);
buf.flip(); buf.flip();
Closeables.closeQuietly(outputBytesHolder); CloseQuietly.close(outputBytesHolder);
return new ResourceHolder<T>() return new ResourceHolder<T>()
{ {
...@@ -105,7 +105,7 @@ public class CompressedObjectStrategy<T extends Buffer> implements ObjectStrateg ...@@ -105,7 +105,7 @@ public class CompressedObjectStrategy<T extends Buffer> implements ObjectStrateg
final ResourceHolder<ChunkEncoder> encoder = CompressedPools.getChunkEncoder(); final ResourceHolder<ChunkEncoder> encoder = CompressedPools.getChunkEncoder();
LZFChunk chunk = encoder.get().encodeChunk(buf.array(), 0, buf.array().length); LZFChunk chunk = encoder.get().encodeChunk(buf.array(), 0, buf.array().length);
Closeables.closeQuietly(encoder); CloseQuietly.close(encoder);
return chunk.getData(); return chunk.getData();
} }
......
...@@ -123,8 +123,9 @@ public class ConciseCompressedIndexedInts implements IndexedInts, Comparable<Con ...@@ -123,8 +123,9 @@ public class ConciseCompressedIndexedInts implements IndexedInts, Comparable<Con
@Override @Override
public ImmutableConciseSet fromByteBuffer(ByteBuffer buffer, int numBytes) public ImmutableConciseSet fromByteBuffer(ByteBuffer buffer, int numBytes)
{ {
buffer.limit(buffer.position() + numBytes); final ByteBuffer readOnlyBuffer = buffer.asReadOnlyBuffer();
return new ImmutableConciseSet(buffer.asReadOnlyBuffer()); readOnlyBuffer.limit(readOnlyBuffer.position() + numBytes);
return new ImmutableConciseSet(readOnlyBuffer);
} }
@Override @Override
......
...@@ -20,10 +20,11 @@ ...@@ -20,10 +20,11 @@
package io.druid.segment.data; package io.druid.segment.data;
import com.google.common.base.Charsets; import com.google.common.base.Charsets;
import com.google.common.collect.Maps;
import com.google.common.collect.Ordering; import com.google.common.collect.Ordering;
import com.google.common.io.Closeables;
import com.google.common.primitives.Ints; import com.google.common.primitives.Ints;
import com.metamx.common.IAE; import com.metamx.common.IAE;
import com.metamx.common.guava.CloseQuietly;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
import java.io.Closeable; import java.io.Closeable;
...@@ -32,6 +33,8 @@ import java.nio.ByteBuffer; ...@@ -32,6 +33,8 @@ import java.nio.ByteBuffer;
import java.nio.channels.WritableByteChannel; import java.nio.channels.WritableByteChannel;
import java.util.Arrays; import java.util.Arrays;
import java.util.Iterator; import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
/** /**
* A generic, flat storage mechanism. Use static methods fromArray() or fromIterable() to construct. If input * A generic, flat storage mechanism. Use static methods fromArray() or fromIterable() to construct. If input
...@@ -46,10 +49,14 @@ import java.util.Iterator; ...@@ -46,10 +49,14 @@ import java.util.Iterator;
* bytes 10-((numElements * 4) + 10): integers representing *end* offsets of byte serialized values * bytes 10-((numElements * 4) + 10): integers representing *end* offsets of byte serialized values
* bytes ((numElements * 4) + 10)-(numBytesUsed + 2): 4-byte integer representing length of value, followed by bytes for value * bytes ((numElements * 4) + 10)-(numBytesUsed + 2): 4-byte integer representing length of value, followed by bytes for value
*/ */
public class GenericIndexed<T> implements Indexed<T> public class GenericIndexed<T> implements Indexed<T>, Closeable
{ {
private static final byte version = 0x1; private static final byte version = 0x1;
public static final int INITIAL_CACHE_CAPACITY = 16384;
private int indexOffset;
public static <T> GenericIndexed<T> fromArray(T[] objects, ObjectStrategy<T> strategy) public static <T> GenericIndexed<T> fromArray(T[] objects, ObjectStrategy<T> strategy)
{ {
return fromIterable(Arrays.asList(objects), strategy); return fromIterable(Arrays.asList(objects), strategy);
...@@ -73,14 +80,14 @@ public class GenericIndexed<T> implements Indexed<T> ...@@ -73,14 +80,14 @@ public class GenericIndexed<T> implements Indexed<T>
allowReverseLookup = false; allowReverseLookup = false;
} }
if (prevVal instanceof Closeable) { if (prevVal instanceof Closeable) {
Closeables.closeQuietly((Closeable) prevVal); CloseQuietly.close((Closeable) prevVal);
} }
prevVal = next; prevVal = next;
++count; ++count;
} }
if (prevVal instanceof Closeable) { if (prevVal instanceof Closeable) {
Closeables.closeQuietly((Closeable) prevVal); CloseQuietly.close((Closeable) prevVal);
} }
ByteArrayOutputStream headerBytes = new ByteArrayOutputStream(4 + (count * 4)); ByteArrayOutputStream headerBytes = new ByteArrayOutputStream(4 + (count * 4));
...@@ -98,7 +105,7 @@ public class GenericIndexed<T> implements Indexed<T> ...@@ -98,7 +105,7 @@ public class GenericIndexed<T> implements Indexed<T>
valueBytes.write(bytes); valueBytes.write(bytes);
if (object instanceof Closeable) { if (object instanceof Closeable) {
Closeables.closeQuietly((Closeable) object); CloseQuietly.close((Closeable) object);
} }
} }
} }
...@@ -114,11 +121,44 @@ public class GenericIndexed<T> implements Indexed<T> ...@@ -114,11 +121,44 @@ public class GenericIndexed<T> implements Indexed<T>
return new GenericIndexed<T>(theBuffer.asReadOnlyBuffer(), strategy, allowReverseLookup); return new GenericIndexed<T>(theBuffer.asReadOnlyBuffer(), strategy, allowReverseLookup);
} }
private static class SizedLRUMap<K, V> extends LinkedHashMap<K, V>
{
final Map<K, Integer> sizes = Maps.newHashMap();
int numBytes = 0;
int maxBytes = 0;
public SizedLRUMap(int initialCapacity, int maxBytes)
{
super(initialCapacity, 0.75f, true);
this.maxBytes = maxBytes;
}
@Override
protected boolean removeEldestEntry(Map.Entry<K, V> eldest)
{
if (numBytes > maxBytes) {
numBytes -= sizes.remove(eldest.getKey());
return true;
}
return false;
}
public V put(K key, V value, int size)
{
numBytes += size;
sizes.put(key, size);
return super.put(key, value);
}
}
private final ByteBuffer theBuffer; private final ByteBuffer theBuffer;
private final ObjectStrategy<T> strategy; private final ObjectStrategy<T> strategy;
private final boolean allowReverseLookup; private final boolean allowReverseLookup;
private final int size; private final int size;
private final boolean cacheable;
private final ThreadLocal<ByteBuffer> cachedBuffer;
private final ThreadLocal<SizedLRUMap<Integer, T>> cachedValues;
private final int valuesOffset; private final int valuesOffset;
GenericIndexed( GenericIndexed(
...@@ -132,7 +172,45 @@ public class GenericIndexed<T> implements Indexed<T> ...@@ -132,7 +172,45 @@ public class GenericIndexed<T> implements Indexed<T>
this.allowReverseLookup = allowReverseLookup; this.allowReverseLookup = allowReverseLookup;
size = theBuffer.getInt(); size = theBuffer.getInt();
indexOffset = theBuffer.position();
valuesOffset = theBuffer.position() + (size << 2); valuesOffset = theBuffer.position() + (size << 2);
this.cachedBuffer = new ThreadLocal<ByteBuffer>()
{
@Override
protected ByteBuffer initialValue()
{
return theBuffer.asReadOnlyBuffer();
}
};
this.cacheable = false;
this.cachedValues = new ThreadLocal<>();
}
/**
* Creates a copy of the given indexed with the given cache size
* The resulting copy must be closed to release resources used by the cache
*/
GenericIndexed(GenericIndexed<T> other, final int maxBytes)
{
this.theBuffer = other.theBuffer;
this.strategy = other.strategy;
this.allowReverseLookup = other.allowReverseLookup;
this.size = other.size;
this.indexOffset = other.indexOffset;
this.valuesOffset = other.valuesOffset;
this.cachedBuffer = other.cachedBuffer;
this.cachedValues = new ThreadLocal<SizedLRUMap<Integer, T>>() {
@Override
protected SizedLRUMap<Integer, T> initialValue()
{
return new SizedLRUMap<>(INITIAL_CACHE_CAPACITY, maxBytes);
}
};
this.cacheable = strategy instanceof CacheableObjectStrategy;
} }
@Override @Override
...@@ -157,24 +235,41 @@ public class GenericIndexed<T> implements Indexed<T> ...@@ -157,24 +235,41 @@ public class GenericIndexed<T> implements Indexed<T>
throw new IAE(String.format("Index[%s] >= size[%s]", index, size)); throw new IAE(String.format("Index[%s] >= size[%s]", index, size));
} }
ByteBuffer myBuffer = theBuffer.asReadOnlyBuffer(); if(cacheable) {
int startOffset = 4; final T cached = cachedValues.get().get(index);
int endOffset; if (cached != null) {
return cached;
}
}
// using a cached copy of the buffer instead of making a read-only copy every time get() is called is faster
final ByteBuffer copyBuffer = this.cachedBuffer.get();
final int startOffset;
final int endOffset;
if (index == 0) { if (index == 0) {
endOffset = myBuffer.getInt(); startOffset = 4;
endOffset = copyBuffer.getInt(indexOffset);
} else { } else {
myBuffer.position(myBuffer.position() + ((index - 1) * 4)); copyBuffer.position(indexOffset + ((index - 1) * 4));
startOffset = myBuffer.getInt() + 4; startOffset = copyBuffer.getInt() + 4;
endOffset = myBuffer.getInt(); endOffset = copyBuffer.getInt();
} }
if (startOffset == endOffset) { if (startOffset == endOffset) {
return null; return null;
} }
myBuffer.position(valuesOffset + startOffset); copyBuffer.position(valuesOffset + startOffset);
return strategy.fromByteBuffer(myBuffer, endOffset - startOffset); final int size = endOffset - startOffset;
// fromByteBuffer must not modify the buffer limit
final T value = strategy.fromByteBuffer(copyBuffer, size);
if(cacheable) {
cachedValues.get().put(index, value, size);
}
return value;
} }
@Override @Override
...@@ -220,6 +315,25 @@ public class GenericIndexed<T> implements Indexed<T> ...@@ -220,6 +315,25 @@ public class GenericIndexed<T> implements Indexed<T>
channel.write(theBuffer.asReadOnlyBuffer()); channel.write(theBuffer.asReadOnlyBuffer());
} }
/**
* The returned GenericIndexed must be closed to release the underlying memory
* @param maxBytes
* @return
*/
public GenericIndexed<T> withCache(int maxBytes)
{
return new GenericIndexed<>(this, maxBytes);
}
@Override
public void close() throws IOException
{
if(cacheable) {
cachedValues.get().clear();
cachedValues.remove();
}
}
public static <T> GenericIndexed<T> read(ByteBuffer buffer, ObjectStrategy<T> strategy) public static <T> GenericIndexed<T> read(ByteBuffer buffer, ObjectStrategy<T> strategy)
{ {
byte versionFromBuffer = buffer.get(); byte versionFromBuffer = buffer.get();
...@@ -241,7 +355,7 @@ public class GenericIndexed<T> implements Indexed<T> ...@@ -241,7 +355,7 @@ public class GenericIndexed<T> implements Indexed<T>
throw new IAE("Unknown version[%s]", versionFromBuffer); throw new IAE("Unknown version[%s]", versionFromBuffer);
} }
public static ObjectStrategy<String> stringStrategy = new ObjectStrategy<String>() public static ObjectStrategy<String> stringStrategy = new CacheableObjectStrategy<String>()
{ {
@Override @Override
public Class<? extends String> getClazz() public Class<? extends String> getClazz()
...@@ -250,9 +364,9 @@ public class GenericIndexed<T> implements Indexed<T> ...@@ -250,9 +364,9 @@ public class GenericIndexed<T> implements Indexed<T>
} }
@Override @Override
public String fromByteBuffer(ByteBuffer buffer, int numBytes) public String fromByteBuffer(final ByteBuffer buffer, final int numBytes)
{ {
byte[] bytes = new byte[numBytes]; final byte[] bytes = new byte[numBytes];
buffer.get(bytes); buffer.get(bytes);
return new String(bytes, Charsets.UTF_8); return new String(bytes, Charsets.UTF_8);
} }
......
...@@ -22,7 +22,7 @@ package io.druid.segment.data; ...@@ -22,7 +22,7 @@ package io.druid.segment.data;
import com.google.common.base.Function; import com.google.common.base.Function;
import com.google.common.collect.Iterables; import com.google.common.collect.Iterables;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.io.Closeables; import com.metamx.common.guava.CloseQuietly;
import io.druid.collections.ResourceHolder; import io.druid.collections.ResourceHolder;
import io.druid.collections.StupidResourceHolder; import io.druid.collections.StupidResourceHolder;
...@@ -153,7 +153,7 @@ public class InMemoryCompressedFloats implements IndexedFloats ...@@ -153,7 +153,7 @@ public class InMemoryCompressedFloats implements IndexedFloats
private void loadBuffer(int bufferNum) private void loadBuffer(int bufferNum)
{ {
loadBuffer = null; loadBuffer = null;
Closeables.closeQuietly(holder); CloseQuietly.close(holder);
final byte[] compressedBytes = compressedBuffers.get(bufferNum); final byte[] compressedBytes = compressedBuffers.get(bufferNum);
holder = strategy.fromByteBuffer(ByteBuffer.wrap(compressedBytes), compressedBytes.length); holder = strategy.fromByteBuffer(ByteBuffer.wrap(compressedBytes), compressedBytes.length);
loadBuffer = holder.get(); loadBuffer = holder.get();
...@@ -191,6 +191,6 @@ public class InMemoryCompressedFloats implements IndexedFloats ...@@ -191,6 +191,6 @@ public class InMemoryCompressedFloats implements IndexedFloats
@Override @Override
public void close() throws IOException public void close() throws IOException
{ {
Closeables.closeQuietly(holder); CloseQuietly.close(holder);
} }
} }
...@@ -23,6 +23,7 @@ import com.google.common.base.Function; ...@@ -23,6 +23,7 @@ import com.google.common.base.Function;
import com.google.common.collect.Iterables; import com.google.common.collect.Iterables;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.io.Closeables; import com.google.common.io.Closeables;
import com.metamx.common.guava.CloseQuietly;
import io.druid.collections.ResourceHolder; import io.druid.collections.ResourceHolder;
import io.druid.collections.StupidResourceHolder; import io.druid.collections.StupidResourceHolder;
...@@ -163,7 +164,7 @@ public class InMemoryCompressedLongs implements IndexedLongs ...@@ -163,7 +164,7 @@ public class InMemoryCompressedLongs implements IndexedLongs
private void loadBuffer(int bufferNum) private void loadBuffer(int bufferNum)
{ {
loadBuffer = null; loadBuffer = null;
Closeables.closeQuietly(holder); CloseQuietly.close(holder);
final byte[] compressedBytes = compressedBuffers.get(bufferNum); final byte[] compressedBytes = compressedBuffers.get(bufferNum);
holder = strategy.fromByteBuffer(ByteBuffer.wrap(compressedBytes), compressedBytes.length); holder = strategy.fromByteBuffer(ByteBuffer.wrap(compressedBytes), compressedBytes.length);
loadBuffer = holder.get(); loadBuffer = holder.get();
......
...@@ -81,8 +81,10 @@ public class IndexedRTree implements Comparable<IndexedRTree> ...@@ -81,8 +81,10 @@ public class IndexedRTree implements Comparable<IndexedRTree>
@Override @Override
public ImmutableRTree fromByteBuffer(ByteBuffer buffer, int numBytes) public ImmutableRTree fromByteBuffer(ByteBuffer buffer, int numBytes)
{ {
buffer.limit(buffer.position() + numBytes);
return new ImmutableRTree(buffer.asReadOnlyBuffer()); final ByteBuffer readOnlyBuffer = buffer.asReadOnlyBuffer();
readOnlyBuffer.limit(readOnlyBuffer.position() + numBytes);
return new ImmutableRTree(readOnlyBuffer);
} }
@Override @Override
......
...@@ -101,8 +101,9 @@ public class IntBufferIndexedInts implements IndexedInts, Comparable<IntBufferIn ...@@ -101,8 +101,9 @@ public class IntBufferIndexedInts implements IndexedInts, Comparable<IntBufferIn
@Override @Override
public IntBufferIndexedInts fromByteBuffer(ByteBuffer buffer, int numBytes) public IntBufferIndexedInts fromByteBuffer(ByteBuffer buffer, int numBytes)
{ {
buffer.limit(buffer.position() + numBytes); final ByteBuffer readOnlyBuffer = buffer.asReadOnlyBuffer();
return new IntBufferIndexedInts(buffer); readOnlyBuffer.limit(readOnlyBuffer.position() + numBytes);
return new IntBufferIndexedInts(readOnlyBuffer);
} }
@Override @Override
......
...@@ -22,11 +22,20 @@ package io.druid.segment.data; ...@@ -22,11 +22,20 @@ package io.druid.segment.data;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.Comparator; import java.util.Comparator;
/**
*/
public interface ObjectStrategy<T> extends Comparator<T> public interface ObjectStrategy<T> extends Comparator<T>
{ {
public Class<? extends T> getClazz(); public Class<? extends T> getClazz();
/**
* Convert values from their underlying byte representation.
*
* Implementations of this method must not change the given buffer mark, or limit, but may modify its position.
* Use buffer.asReadOnlyBuffer() or buffer.duplicate() if mark or limit need to be set.
*
* @param buffer buffer to read value from
* @param numBytes number of bytes used to store the value, starting at buffer.position()
* @return
*/
public T fromByteBuffer(ByteBuffer buffer, int numBytes); public T fromByteBuffer(ByteBuffer buffer, int numBytes);
public byte[] toBytes(T val); public byte[] toBytes(T val);
} }
...@@ -23,12 +23,14 @@ import com.google.common.base.Function; ...@@ -23,12 +23,14 @@ import com.google.common.base.Function;
import com.google.common.base.Joiner; import com.google.common.base.Joiner;
import com.google.common.base.Predicate; import com.google.common.base.Predicate;
import com.google.common.base.Splitter; import com.google.common.base.Splitter;
import com.google.common.base.Throwables;
import com.google.common.collect.Iterables; import com.google.common.collect.Iterables;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import com.google.common.primitives.Floats; import com.google.common.primitives.Floats;
import com.metamx.common.ISE; import com.metamx.common.ISE;
import com.metamx.common.parsers.ParseException;
import io.druid.data.input.InputRow; import io.druid.data.input.InputRow;
import io.druid.data.input.impl.SpatialDimensionSchema; import io.druid.data.input.impl.SpatialDimensionSchema;
...@@ -134,14 +136,20 @@ public class SpatialDimensionRowFormatter ...@@ -134,14 +136,20 @@ public class SpatialDimensionRowFormatter
} }
@Override @Override
public Object getRaw(String dimension) { public Object getRaw(String dimension)
{
return row.getRaw(dimension); return row.getRaw(dimension);
} }
@Override @Override
public float getFloatMetric(String metric) public float getFloatMetric(String metric)
{ {
return row.getFloatMetric(metric); try {
return row.getFloatMetric(metric);
}
catch (ParseException e) {
throw Throwables.propagate(e);
}
} }
@Override @Override
......
...@@ -22,6 +22,7 @@ package io.druid.segment.serde; ...@@ -22,6 +22,7 @@ package io.druid.segment.serde;
import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeInfo;
import io.druid.segment.column.ColumnBuilder; import io.druid.segment.column.ColumnBuilder;
import io.druid.segment.column.ColumnConfig;
import java.io.IOException; import java.io.IOException;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
...@@ -40,5 +41,5 @@ public interface ColumnPartSerde ...@@ -40,5 +41,5 @@ public interface ColumnPartSerde
{ {
public long numBytes(); public long numBytes();
public void write(WritableByteChannel channel) throws IOException; public void write(WritableByteChannel channel) throws IOException;
public ColumnPartSerde read(ByteBuffer buffer, ColumnBuilder builder); public ColumnPartSerde read(ByteBuffer buffer, ColumnBuilder builder, ColumnConfig columnConfig);
} }
...@@ -22,6 +22,7 @@ package io.druid.segment.serde; ...@@ -22,6 +22,7 @@ package io.druid.segment.serde;
import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import io.druid.segment.column.ColumnBuilder; import io.druid.segment.column.ColumnBuilder;
import io.druid.segment.column.ColumnConfig;
import io.druid.segment.data.GenericIndexed; import io.druid.segment.data.GenericIndexed;
import java.io.IOException; import java.io.IOException;
...@@ -71,7 +72,7 @@ public class ComplexColumnPartSerde implements ColumnPartSerde ...@@ -71,7 +72,7 @@ public class ComplexColumnPartSerde implements ColumnPartSerde
} }
@Override @Override
public ColumnPartSerde read(ByteBuffer buffer, ColumnBuilder builder) public ColumnPartSerde read(ByteBuffer buffer, ColumnBuilder builder, ColumnConfig columnConfig)
{ {
return serde == null ? this : serde.deserializeColumn(buffer, builder); return serde == null ? this : serde.deserializeColumn(buffer, builder);
} }
......
...@@ -25,6 +25,7 @@ import com.google.common.primitives.Ints; ...@@ -25,6 +25,7 @@ import com.google.common.primitives.Ints;
import com.metamx.collections.spatial.ImmutableRTree; import com.metamx.collections.spatial.ImmutableRTree;
import com.metamx.common.IAE; import com.metamx.common.IAE;
import io.druid.segment.column.ColumnBuilder; import io.druid.segment.column.ColumnBuilder;
import io.druid.segment.column.ColumnConfig;
import io.druid.segment.column.ValueType; import io.druid.segment.column.ValueType;
import io.druid.segment.data.ByteBufferSerializer; import io.druid.segment.data.ByteBufferSerializer;
import io.druid.segment.data.ConciseCompressedIndexedInts; import io.druid.segment.data.ConciseCompressedIndexedInts;
...@@ -43,7 +44,9 @@ import java.nio.channels.WritableByteChannel; ...@@ -43,7 +44,9 @@ import java.nio.channels.WritableByteChannel;
public class DictionaryEncodedColumnPartSerde implements ColumnPartSerde public class DictionaryEncodedColumnPartSerde implements ColumnPartSerde
{ {
@JsonCreator @JsonCreator
public static DictionaryEncodedColumnPartSerde createDeserializer(boolean singleValued) public static DictionaryEncodedColumnPartSerde createDeserializer(
boolean singleValued
)
{ {
return new DictionaryEncodedColumnPartSerde(); return new DictionaryEncodedColumnPartSerde();
} }
...@@ -125,7 +128,7 @@ public class DictionaryEncodedColumnPartSerde implements ColumnPartSerde ...@@ -125,7 +128,7 @@ public class DictionaryEncodedColumnPartSerde implements ColumnPartSerde
} }
@Override @Override
public ColumnPartSerde read(ByteBuffer buffer, ColumnBuilder builder) public ColumnPartSerde read(ByteBuffer buffer, ColumnBuilder builder, ColumnConfig columnConfig)
{ {
final boolean isSingleValued = buffer.get() == 0x0; final boolean isSingleValued = buffer.get() == 0x0;
final GenericIndexed<String> dictionary = GenericIndexed.read(buffer, GenericIndexed.stringStrategy); final GenericIndexed<String> dictionary = GenericIndexed.read(buffer, GenericIndexed.stringStrategy);
...@@ -138,12 +141,12 @@ public class DictionaryEncodedColumnPartSerde implements ColumnPartSerde ...@@ -138,12 +141,12 @@ public class DictionaryEncodedColumnPartSerde implements ColumnPartSerde
singleValuedColumn = VSizeIndexedInts.readFromByteBuffer(buffer); singleValuedColumn = VSizeIndexedInts.readFromByteBuffer(buffer);
multiValuedColumn = null; multiValuedColumn = null;
builder.setHasMultipleValues(false) builder.setHasMultipleValues(false)
.setDictionaryEncodedColumn(new DictionaryEncodedColumnSupplier(dictionary, singleValuedColumn, null)); .setDictionaryEncodedColumn(new DictionaryEncodedColumnSupplier(dictionary, singleValuedColumn, null, columnConfig.columnCacheSizeBytes()));
} else { } else {
singleValuedColumn = null; singleValuedColumn = null;
multiValuedColumn = VSizeIndexed.readFromByteBuffer(buffer); multiValuedColumn = VSizeIndexed.readFromByteBuffer(buffer);
builder.setHasMultipleValues(true) builder.setHasMultipleValues(true)
.setDictionaryEncodedColumn(new DictionaryEncodedColumnSupplier(dictionary, null, multiValuedColumn)); .setDictionaryEncodedColumn(new DictionaryEncodedColumnSupplier(dictionary, null, multiValuedColumn, columnConfig.columnCacheSizeBytes()));
} }
GenericIndexed<ImmutableConciseSet> bitmaps = GenericIndexed.read( GenericIndexed<ImmutableConciseSet> bitmaps = GenericIndexed.read(
......
...@@ -33,21 +33,28 @@ public class DictionaryEncodedColumnSupplier implements Supplier<DictionaryEncod ...@@ -33,21 +33,28 @@ public class DictionaryEncodedColumnSupplier implements Supplier<DictionaryEncod
private final GenericIndexed<String> dictionary; private final GenericIndexed<String> dictionary;
private final VSizeIndexedInts singleValuedColumn; private final VSizeIndexedInts singleValuedColumn;
private final VSizeIndexed multiValuedColumn; private final VSizeIndexed multiValuedColumn;
private final int lookupCacheSize;
public DictionaryEncodedColumnSupplier( public DictionaryEncodedColumnSupplier(
GenericIndexed<String> dictionary, GenericIndexed<String> dictionary,
VSizeIndexedInts singleValuedColumn, VSizeIndexedInts singleValuedColumn,
VSizeIndexed multiValuedColumn VSizeIndexed multiValuedColumn,
int lookupCacheSize
) )
{ {
this.dictionary = dictionary; this.dictionary = dictionary;
this.singleValuedColumn = singleValuedColumn; this.singleValuedColumn = singleValuedColumn;
this.multiValuedColumn = multiValuedColumn; this.multiValuedColumn = multiValuedColumn;
this.lookupCacheSize = lookupCacheSize;
} }
@Override @Override
public DictionaryEncodedColumn get() public DictionaryEncodedColumn get()
{ {
return new SimpleDictionaryEncodedColumn(singleValuedColumn, multiValuedColumn, dictionary); return new SimpleDictionaryEncodedColumn(
singleValuedColumn,
multiValuedColumn,
lookupCacheSize > 0 ? dictionary.withCache(lookupCacheSize) : dictionary
);
} }
} }
...@@ -22,6 +22,7 @@ package io.druid.segment.serde; ...@@ -22,6 +22,7 @@ package io.druid.segment.serde;
import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import io.druid.segment.column.ColumnBuilder; import io.druid.segment.column.ColumnBuilder;
import io.druid.segment.column.ColumnConfig;
import io.druid.segment.column.ValueType; import io.druid.segment.column.ValueType;
import io.druid.segment.data.CompressedFloatsIndexedSupplier; import io.druid.segment.data.CompressedFloatsIndexedSupplier;
...@@ -70,7 +71,7 @@ public class FloatGenericColumnPartSerde implements ColumnPartSerde ...@@ -70,7 +71,7 @@ public class FloatGenericColumnPartSerde implements ColumnPartSerde
} }
@Override @Override
public ColumnPartSerde read(ByteBuffer buffer, ColumnBuilder builder) public ColumnPartSerde read(ByteBuffer buffer, ColumnBuilder builder, ColumnConfig columnConfig)
{ {
final CompressedFloatsIndexedSupplier column = CompressedFloatsIndexedSupplier.fromByteBuffer(buffer, byteOrder); final CompressedFloatsIndexedSupplier column = CompressedFloatsIndexedSupplier.fromByteBuffer(buffer, byteOrder);
......
...@@ -22,6 +22,7 @@ package io.druid.segment.serde; ...@@ -22,6 +22,7 @@ package io.druid.segment.serde;
import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import io.druid.segment.column.ColumnBuilder; import io.druid.segment.column.ColumnBuilder;
import io.druid.segment.column.ColumnConfig;
import io.druid.segment.column.ValueType; import io.druid.segment.column.ValueType;
import io.druid.segment.data.CompressedLongsIndexedSupplier; import io.druid.segment.data.CompressedLongsIndexedSupplier;
...@@ -70,7 +71,7 @@ public class LongGenericColumnPartSerde implements ColumnPartSerde ...@@ -70,7 +71,7 @@ public class LongGenericColumnPartSerde implements ColumnPartSerde
} }
@Override @Override
public ColumnPartSerde read(ByteBuffer buffer, ColumnBuilder builder) public ColumnPartSerde read(ByteBuffer buffer, ColumnBuilder builder, ColumnConfig columnConfig)
{ {
final CompressedLongsIndexedSupplier column = CompressedLongsIndexedSupplier.fromByteBuffer(buffer, byteOrder); final CompressedLongsIndexedSupplier column = CompressedLongsIndexedSupplier.fromByteBuffer(buffer, byteOrder);
......
...@@ -65,6 +65,7 @@ public class ChainedExecutionQueryRunnerTest ...@@ -65,6 +65,7 @@ public class ChainedExecutionQueryRunnerTest
); );
final CountDownLatch queriesStarted = new CountDownLatch(2); final CountDownLatch queriesStarted = new CountDownLatch(2);
final CountDownLatch queriesInterrupted = new CountDownLatch(2);
final CountDownLatch queryIsRegistered = new CountDownLatch(1); final CountDownLatch queryIsRegistered = new CountDownLatch(1);
Capture<ListenableFuture> capturedFuture = new Capture<>(); Capture<ListenableFuture> capturedFuture = new Capture<>();
...@@ -86,9 +87,9 @@ public class ChainedExecutionQueryRunnerTest ...@@ -86,9 +87,9 @@ public class ChainedExecutionQueryRunnerTest
EasyMock.replay(watcher); EasyMock.replay(watcher);
DyingQueryRunner runner1 = new DyingQueryRunner(queriesStarted); DyingQueryRunner runner1 = new DyingQueryRunner(queriesStarted, queriesInterrupted);
DyingQueryRunner runner2 = new DyingQueryRunner(queriesStarted); DyingQueryRunner runner2 = new DyingQueryRunner(queriesStarted, queriesInterrupted);
DyingQueryRunner runner3 = new DyingQueryRunner(queriesStarted); DyingQueryRunner runner3 = new DyingQueryRunner(queriesStarted, queriesInterrupted);
ChainedExecutionQueryRunner chainedRunner = new ChainedExecutionQueryRunner<>( ChainedExecutionQueryRunner chainedRunner = new ChainedExecutionQueryRunner<>(
exec, exec,
Ordering.<Integer>natural(), Ordering.<Integer>natural(),
...@@ -135,6 +136,7 @@ public class ChainedExecutionQueryRunnerTest ...@@ -135,6 +136,7 @@ public class ChainedExecutionQueryRunnerTest
Assert.assertTrue(e.getCause() instanceof QueryInterruptedException); Assert.assertTrue(e.getCause() instanceof QueryInterruptedException);
cause = (QueryInterruptedException)e.getCause(); cause = (QueryInterruptedException)e.getCause();
} }
Assert.assertTrue(queriesInterrupted.await(500, TimeUnit.MILLISECONDS));
Assert.assertNotNull(cause); Assert.assertNotNull(cause);
Assert.assertTrue(future.isCancelled()); Assert.assertTrue(future.isCancelled());
Assert.assertTrue(runner1.hasStarted); Assert.assertTrue(runner1.hasStarted);
...@@ -170,6 +172,7 @@ public class ChainedExecutionQueryRunnerTest ...@@ -170,6 +172,7 @@ public class ChainedExecutionQueryRunnerTest
); );
final CountDownLatch queriesStarted = new CountDownLatch(2); final CountDownLatch queriesStarted = new CountDownLatch(2);
final CountDownLatch queriesInterrupted = new CountDownLatch(2);
final CountDownLatch queryIsRegistered = new CountDownLatch(1); final CountDownLatch queryIsRegistered = new CountDownLatch(1);
Capture<ListenableFuture> capturedFuture = new Capture<>(); Capture<ListenableFuture> capturedFuture = new Capture<>();
...@@ -191,9 +194,9 @@ public class ChainedExecutionQueryRunnerTest ...@@ -191,9 +194,9 @@ public class ChainedExecutionQueryRunnerTest
EasyMock.replay(watcher); EasyMock.replay(watcher);
DyingQueryRunner runner1 = new DyingQueryRunner(queriesStarted); DyingQueryRunner runner1 = new DyingQueryRunner(queriesStarted, queriesInterrupted);
DyingQueryRunner runner2 = new DyingQueryRunner(queriesStarted); DyingQueryRunner runner2 = new DyingQueryRunner(queriesStarted, queriesInterrupted);
DyingQueryRunner runner3 = new DyingQueryRunner(queriesStarted); DyingQueryRunner runner3 = new DyingQueryRunner(queriesStarted, queriesInterrupted);
ChainedExecutionQueryRunner chainedRunner = new ChainedExecutionQueryRunner<>( ChainedExecutionQueryRunner chainedRunner = new ChainedExecutionQueryRunner<>(
exec, exec,
Ordering.<Integer>natural(), Ordering.<Integer>natural(),
...@@ -210,7 +213,7 @@ public class ChainedExecutionQueryRunnerTest ...@@ -210,7 +213,7 @@ public class ChainedExecutionQueryRunnerTest
.dataSource("test") .dataSource("test")
.intervals("2014/2015") .intervals("2014/2015")
.aggregators(Lists.<AggregatorFactory>newArrayList(new CountAggregatorFactory("count"))) .aggregators(Lists.<AggregatorFactory>newArrayList(new CountAggregatorFactory("count")))
.context(ImmutableMap.<String, Object>of("timeout", (100), "queryId", "test")) .context(ImmutableMap.<String, Object>of("timeout", 100, "queryId", "test"))
.build() .build()
); );
...@@ -229,10 +232,10 @@ public class ChainedExecutionQueryRunnerTest ...@@ -229,10 +232,10 @@ public class ChainedExecutionQueryRunnerTest
Assert.assertTrue(queryIsRegistered.await(1, TimeUnit.SECONDS)); Assert.assertTrue(queryIsRegistered.await(1, TimeUnit.SECONDS));
Assert.assertTrue(queriesStarted.await(1, TimeUnit.SECONDS)); Assert.assertTrue(queriesStarted.await(1, TimeUnit.SECONDS));
// cancel the query
Assert.assertTrue(capturedFuture.hasCaptured()); Assert.assertTrue(capturedFuture.hasCaptured());
ListenableFuture future = capturedFuture.getValue(); ListenableFuture future = capturedFuture.getValue();
// wait for query to time out
QueryInterruptedException cause = null; QueryInterruptedException cause = null;
try { try {
resultFuture.get(); resultFuture.get();
...@@ -241,6 +244,7 @@ public class ChainedExecutionQueryRunnerTest ...@@ -241,6 +244,7 @@ public class ChainedExecutionQueryRunnerTest
Assert.assertEquals("Query timeout", e.getCause().getMessage()); Assert.assertEquals("Query timeout", e.getCause().getMessage());
cause = (QueryInterruptedException)e.getCause(); cause = (QueryInterruptedException)e.getCause();
} }
Assert.assertTrue(queriesInterrupted.await(500, TimeUnit.MILLISECONDS));
Assert.assertNotNull(cause); Assert.assertNotNull(cause);
Assert.assertTrue(future.isCancelled()); Assert.assertTrue(future.isCancelled());
Assert.assertTrue(runner1.hasStarted); Assert.assertTrue(runner1.hasStarted);
...@@ -257,23 +261,27 @@ public class ChainedExecutionQueryRunnerTest ...@@ -257,23 +261,27 @@ public class ChainedExecutionQueryRunnerTest
private static class DyingQueryRunner implements QueryRunner<Integer> private static class DyingQueryRunner implements QueryRunner<Integer>
{ {
private final CountDownLatch latch; private final CountDownLatch start;
private final CountDownLatch stop;
private boolean hasStarted = false; private boolean hasStarted = false;
private boolean hasCompleted = false; private boolean hasCompleted = false;
private boolean interrupted = false; private boolean interrupted = false;
public DyingQueryRunner(CountDownLatch latch) public DyingQueryRunner(CountDownLatch start, CountDownLatch stop)
{ {
this.latch = latch; this.start = start;
this.stop = stop;
} }
@Override @Override
public Sequence<Integer> run(Query<Integer> query) public Sequence<Integer> run(Query<Integer> query)
{ {
hasStarted = true; hasStarted = true;
latch.countDown(); start.countDown();
if (Thread.interrupted()) { if (Thread.interrupted()) {
interrupted = true; interrupted = true;
stop.countDown();
throw new QueryInterruptedException("I got killed"); throw new QueryInterruptedException("I got killed");
} }
...@@ -283,10 +291,12 @@ public class ChainedExecutionQueryRunnerTest ...@@ -283,10 +291,12 @@ public class ChainedExecutionQueryRunnerTest
} }
catch (InterruptedException e) { catch (InterruptedException e) {
interrupted = true; interrupted = true;
stop.countDown();
throw new QueryInterruptedException("I got killed"); throw new QueryInterruptedException("I got killed");
} }
hasCompleted = true; hasCompleted = true;
stop.countDown();
return Sequences.simple(Lists.newArrayList(123)); return Sequences.simple(Lists.newArrayList(123));
} }
} }
......
...@@ -23,6 +23,7 @@ import com.google.common.collect.Iterables; ...@@ -23,6 +23,7 @@ import com.google.common.collect.Iterables;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import io.druid.granularity.QueryGranularity; import io.druid.granularity.QueryGranularity;
import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.AggregatorFactory;
import io.druid.segment.column.ColumnConfig;
import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndex;
import io.druid.segment.incremental.IncrementalIndexAdapter; import io.druid.segment.incremental.IncrementalIndexAdapter;
import org.joda.time.Interval; import org.joda.time.Interval;
......
/*
* Druid - a distributed column store.
* Copyright (C) 2012, 2013, 2014 Metamarkets Group Inc.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package io.druid.segment;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
public class IndexIOTest
{
@Test @Ignore // this test depends on static fields, so it has to be tested independently
public void testInjector() throws Exception
{
System.setProperty("druid.processing.columnCache.sizeBytes", "1234");
Assert.assertEquals(1234, IndexIO.columnConfig.columnCacheSizeBytes());
}
}
...@@ -26,6 +26,7 @@ import com.google.common.io.Files; ...@@ -26,6 +26,7 @@ import com.google.common.io.Files;
import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.MapBasedInputRow;
import io.druid.granularity.QueryGranularity; import io.druid.granularity.QueryGranularity;
import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.AggregatorFactory;
import io.druid.segment.column.ColumnConfig;
import io.druid.segment.data.IncrementalIndexTest; import io.druid.segment.data.IncrementalIndexTest;
import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndex;
import junit.framework.Assert; import junit.framework.Assert;
......
...@@ -35,6 +35,7 @@ import io.druid.query.aggregation.AggregatorFactory; ...@@ -35,6 +35,7 @@ import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.DoubleSumAggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory;
import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory;
import io.druid.query.aggregation.hyperloglog.HyperUniquesSerde; import io.druid.query.aggregation.hyperloglog.HyperUniquesSerde;
import io.druid.segment.column.ColumnConfig;
import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndex;
import io.druid.segment.serde.ComplexMetrics; import io.druid.segment.serde.ComplexMetrics;
import org.joda.time.DateTime; import org.joda.time.DateTime;
...@@ -180,6 +181,7 @@ public class TestIndex ...@@ -180,6 +181,7 @@ public class TestIndex
new TimestampSpec("ts", "iso"), new TimestampSpec("ts", "iso"),
new DimensionsSpec(Arrays.asList(DIMENSIONS), null, null), new DimensionsSpec(Arrays.asList(DIMENSIONS), null, null),
"\t", "\t",
"\u0001",
Arrays.asList(COLUMNS) Arrays.asList(COLUMNS)
), ),
null, null, null, null null, null, null, null
......
...@@ -19,8 +19,8 @@ ...@@ -19,8 +19,8 @@
package io.druid.segment.data; package io.druid.segment.data;
import com.google.common.io.Closeables;
import com.google.common.primitives.Longs; import com.google.common.primitives.Longs;
import com.metamx.common.guava.CloseQuietly;
import org.junit.After; import org.junit.After;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
...@@ -47,7 +47,7 @@ public class CompressedLongsIndexedSupplierTest ...@@ -47,7 +47,7 @@ public class CompressedLongsIndexedSupplierTest
@Before @Before
public void setUp() throws Exception public void setUp() throws Exception
{ {
Closeables.closeQuietly(indexed); CloseQuietly.close(indexed);
indexed = null; indexed = null;
supplier = null; supplier = null;
vals = null; vals = null;
...@@ -56,7 +56,7 @@ public class CompressedLongsIndexedSupplierTest ...@@ -56,7 +56,7 @@ public class CompressedLongsIndexedSupplierTest
@After @After
public void tearDown() throws Exception public void tearDown() throws Exception
{ {
Closeables.closeQuietly(indexed); CloseQuietly.close(indexed);
} }
private void setupSimple() private void setupSimple()
...@@ -247,7 +247,7 @@ public class CompressedLongsIndexedSupplierTest ...@@ -247,7 +247,7 @@ public class CompressedLongsIndexedSupplierTest
stopLatch.await(); stopLatch.await();
} }
finally { finally {
Closeables.closeQuietly(indexed2); CloseQuietly.close(indexed2);
} }
if (failureHappened.get()) { if (failureHappened.get()) {
......
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册