Skip to content

Commit

Permalink
One granularity (apache#3850)
Browse files Browse the repository at this point in the history
* Refactor Segment Granularity

* Beginning of one granularity

* Copy the fix for custom periods in segment-grunalrity over here.

* Remove the custom serialization for now.

* Compilation cleanup

* Reformat code

* Fixing unit tests

* Unify to use a single iterable

* Backward compatibility for rolling upgrade

* Minor check style. Cosmetic changes.

* Rename length and millis to duration

* CR feedback

* Minor changes.
  • Loading branch information
praveev authored and leventov committed Feb 25, 2017
1 parent 58b704c commit c3bf401
Show file tree
Hide file tree
Showing 214 changed files with 2,597 additions and 3,294 deletions.
6 changes: 3 additions & 3 deletions api/src/main/java/io/druid/timeline/DataSegment.java
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@

import io.druid.jackson.CommaListJoinDeserializer;
import io.druid.jackson.CommaListJoinSerializer;
import io.druid.java.util.common.Granularity;
import io.druid.java.util.common.granularity.Granularity;
import io.druid.query.SegmentDescriptor;
import io.druid.timeline.partition.NoneShardSpec;
import io.druid.timeline.partition.ShardSpec;
Expand Down Expand Up @@ -288,8 +288,8 @@ public int compare(DataSegment lhs, DataSegment rhs)
{
int retVal;

DateTime lhsMonth = Granularity.MONTH.truncate(lhs.getInterval().getStart());
DateTime rhsMonth = Granularity.MONTH.truncate(rhs.getInterval().getStart());
DateTime lhsMonth = Granularity.MONTH.bucketStart(lhs.getInterval().getStart());
DateTime rhsMonth = Granularity.MONTH.bucketStart(rhs.getInterval().getStart());

retVal = lhsMonth.compareTo(rhsMonth);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,9 @@
import io.druid.benchmark.datagen.BenchmarkSchemas;
import io.druid.data.input.InputRow;
import io.druid.data.input.impl.DimensionsSpec;
import io.druid.granularity.QueryGranularities;
import io.druid.hll.HyperLogLogHash;
import io.druid.jackson.DefaultObjectMapper;
import io.druid.java.util.common.granularity.Granularity;
import io.druid.java.util.common.guava.Sequence;
import io.druid.java.util.common.guava.Sequences;
import io.druid.java.util.common.logger.Logger;
Expand Down Expand Up @@ -230,7 +230,7 @@ private IncrementalIndex makeIncIndex()
{
return new OnheapIncrementalIndex(
new IncrementalIndexSchema.Builder()
.withQueryGranularity(QueryGranularities.NONE)
.withQueryGranularity(Granularity.NONE)
.withMetrics(schemaInfo.getAggsArray())
.withDimensionsSpec(new DimensionsSpec(null, null, null))
.build(),
Expand Down Expand Up @@ -508,7 +508,7 @@ public void readComplexOrFilterCNF(Blackhole blackhole) throws Exception

private Sequence<Cursor> makeCursors(StorageAdapter sa, Filter filter)
{
return sa.makeCursors(filter, schemaInfo.getDataInterval(), VirtualColumns.EMPTY, QueryGranularities.ALL, false);
return sa.makeCursors(filter, schemaInfo.getDataInterval(), VirtualColumns.EMPTY, Granularity.ALL, false);
}

private Sequence<List<String>> readCursors(Sequence<Cursor> cursors, final Blackhole blackhole)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,9 @@
import io.druid.benchmark.query.QueryBenchmarkUtil;
import io.druid.data.input.InputRow;
import io.druid.data.input.impl.DimensionsSpec;
import io.druid.granularity.QueryGranularities;
import io.druid.hll.HyperLogLogHash;
import io.druid.jackson.DefaultObjectMapper;
import io.druid.java.util.common.granularity.Granularity;
import io.druid.java.util.common.guava.Sequence;
import io.druid.java.util.common.guava.Sequences;
import io.druid.java.util.common.logger.Logger;
Expand Down Expand Up @@ -214,7 +214,7 @@ public void setup() throws IOException

query = Druids.newTimeseriesQueryBuilder()
.dataSource("blah")
.granularity(QueryGranularities.ALL)
.granularity(Granularity.ALL)
.intervals(intervalSpec)
.aggregators(queryAggs)
.descending(false)
Expand All @@ -231,7 +231,7 @@ private IncrementalIndex makeIncIndex(AggregatorFactory[] metrics)
{
return new OnheapIncrementalIndex(
new IncrementalIndexSchema.Builder()
.withQueryGranularity(QueryGranularities.NONE)
.withQueryGranularity(Granularity.NONE)
.withMetrics(metrics)
.withDimensionsSpec(new DimensionsSpec(null, null, null))
.build(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,10 +37,9 @@
import io.druid.data.input.InputRow;
import io.druid.data.input.Row;
import io.druid.data.input.impl.DimensionsSpec;
import io.druid.granularity.QueryGranularities;
import io.druid.granularity.QueryGranularity;
import io.druid.hll.HyperLogLogHash;
import io.druid.jackson.DefaultObjectMapper;
import io.druid.java.util.common.granularity.Granularity;
import io.druid.java.util.common.guava.Sequence;
import io.druid.java.util.common.guava.Sequences;
import io.druid.java.util.common.logger.Logger;
Expand Down Expand Up @@ -190,7 +189,7 @@ private void setupQueries()
.setAggregatorSpecs(
queryAggs
)
.setGranularity(QueryGranularity.fromString(queryGranularity))
.setGranularity(Granularity.fromString(queryGranularity))
.build();

GroupByQuery queryLongFloat = GroupByQuery
Expand All @@ -204,7 +203,7 @@ private void setupQueries()
.setAggregatorSpecs(
queryAggs
)
.setGranularity(QueryGranularity.fromString(queryGranularity))
.setGranularity(Granularity.fromString(queryGranularity))
.build();

GroupByQuery queryLong = GroupByQuery
Expand All @@ -217,7 +216,7 @@ private void setupQueries()
.setAggregatorSpecs(
queryAggs
)
.setGranularity(QueryGranularity.fromString(queryGranularity))
.setGranularity(Granularity.fromString(queryGranularity))
.build();

GroupByQuery queryFloat = GroupByQuery
Expand All @@ -230,7 +229,7 @@ private void setupQueries()
.setAggregatorSpecs(
queryAggs
)
.setGranularity(QueryGranularity.fromString(queryGranularity))
.setGranularity(Granularity.fromString(queryGranularity))
.build();

basicQueries.put("string", queryString);
Expand Down Expand Up @@ -258,7 +257,7 @@ private void setupQueries()
.setAggregatorSpecs(
queryAggs
)
.setGranularity(QueryGranularities.DAY)
.setGranularity(Granularity.DAY)
.build();

GroupByQuery queryA = GroupByQuery
Expand All @@ -271,7 +270,7 @@ private void setupQueries()
.setAggregatorSpecs(
queryAggs
)
.setGranularity(QueryGranularities.WEEK)
.setGranularity(Granularity.WEEK)
.build();

basicQueries.put("nested", queryA);
Expand Down Expand Up @@ -435,7 +434,7 @@ private IncrementalIndex makeIncIndex()
{
return new OnheapIncrementalIndex(
new IncrementalIndexSchema.Builder()
.withQueryGranularity(QueryGranularities.NONE)
.withQueryGranularity(Granularity.NONE)
.withMetrics(schemaInfo.getAggsArray())
.withDimensionsSpec(new DimensionsSpec(null, null, null))
.build(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,7 @@
import com.google.common.collect.ImmutableMap;
import io.druid.data.input.InputRow;
import io.druid.data.input.MapBasedInputRow;
import io.druid.granularity.QueryGranularities;

import io.druid.java.util.common.granularity.Granularity;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.CountAggregatorFactory;
import io.druid.query.aggregation.DoubleSumAggregatorFactory;
Expand Down Expand Up @@ -123,7 +122,7 @@ private IncrementalIndex makeIncIndex()
{
return new OnheapIncrementalIndex(
0,
QueryGranularities.NONE,
Granularity.NONE,
aggs,
false,
false,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,9 @@
import io.druid.concurrent.Execs;
import io.druid.data.input.InputRow;
import io.druid.data.input.impl.DimensionsSpec;
import io.druid.granularity.QueryGranularities;
import io.druid.hll.HyperLogLogHash;
import io.druid.jackson.DefaultObjectMapper;
import io.druid.java.util.common.granularity.Granularity;
import io.druid.java.util.common.guava.Sequence;
import io.druid.java.util.common.guava.Sequences;
import io.druid.java.util.common.logger.Logger;
Expand Down Expand Up @@ -171,7 +171,7 @@ private void setupQueries()
// Use an IdentityExtractionFn to force usage of DimExtractionTopNAlgorithm
TopNQueryBuilder queryBuilderString = new TopNQueryBuilder()
.dataSource("blah")
.granularity(QueryGranularities.ALL)
.granularity(Granularity.ALL)
.dimension(new ExtractionDimensionSpec("dimSequential", "dimSequential", IdentityExtractionFn.getInstance()))
.metric("sumFloatNormal")
.intervals(intervalSpec)
Expand All @@ -180,15 +180,15 @@ private void setupQueries()
// DimExtractionTopNAlgorithm is always used for numeric columns
TopNQueryBuilder queryBuilderLong = new TopNQueryBuilder()
.dataSource("blah")
.granularity(QueryGranularities.ALL)
.granularity(Granularity.ALL)
.dimension("metLongUniform")
.metric("sumFloatNormal")
.intervals(intervalSpec)
.aggregators(queryAggs);

TopNQueryBuilder queryBuilderFloat = new TopNQueryBuilder()
.dataSource("blah")
.granularity(QueryGranularities.ALL)
.granularity(Granularity.ALL)
.dimension("metFloatNormal")
.metric("sumFloatNormal")
.intervals(intervalSpec)
Expand All @@ -206,7 +206,7 @@ private void setupQueries()

TopNQueryBuilder queryBuilderA = new TopNQueryBuilder()
.dataSource("blah")
.granularity(QueryGranularities.ALL)
.granularity(Granularity.ALL)
.dimension("dimUniform")
.metric(new DimensionTopNMetricSpec(null, StringComparators.NUMERIC))
.intervals(intervalSpec)
Expand All @@ -222,7 +222,7 @@ private void setupQueries()

TopNQueryBuilder queryBuilderA = new TopNQueryBuilder()
.dataSource("blah")
.granularity(QueryGranularities.ALL)
.granularity(Granularity.ALL)
.dimension("dimUniform")
.metric(new DimensionTopNMetricSpec(null, StringComparators.ALPHANUMERIC))
.intervals(intervalSpec)
Expand Down Expand Up @@ -316,7 +316,7 @@ private IncrementalIndex makeIncIndex()
{
return new OnheapIncrementalIndex(
new IncrementalIndexSchema.Builder()
.withQueryGranularity(QueryGranularities.NONE)
.withQueryGranularity(Granularity.NONE)
.withMetrics(schemaInfo.getAggsArray())
.withDimensionsSpec(new DimensionsSpec(null, null, null))
.build(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@
import io.druid.benchmark.datagen.BenchmarkSchemas;
import io.druid.data.input.InputRow;
import io.druid.data.input.impl.DimensionsSpec;
import io.druid.granularity.QueryGranularities;
import io.druid.hll.HyperLogLogHash;
import io.druid.java.util.common.granularity.Granularity;
import io.druid.java.util.common.guava.Sequence;
import io.druid.java.util.common.guava.Sequences;
import io.druid.java.util.common.logger.Logger;
Expand Down Expand Up @@ -125,7 +125,7 @@ private IncrementalIndex makeIncIndex()
{
return new OnheapIncrementalIndex(
new IncrementalIndexSchema.Builder()
.withQueryGranularity(QueryGranularities.NONE)
.withQueryGranularity(Granularity.NONE)
.withMetrics(schemaInfo.getAggsArray())
.withDimensionsSpec(new DimensionsSpec(null, null, null))
.withRollup(rollup)
Expand Down Expand Up @@ -203,7 +203,7 @@ private Sequence<Cursor> makeCursors(IncrementalIndexStorageAdapter sa, DimFilte
filter.toFilter(),
schemaInfo.getDataInterval(),
VirtualColumns.EMPTY,
QueryGranularities.ALL,
Granularity.ALL,
false
);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
import io.druid.benchmark.datagen.BenchmarkSchemas;
import io.druid.data.input.InputRow;
import io.druid.data.input.impl.DimensionsSpec;
import io.druid.granularity.QueryGranularities;
import io.druid.java.util.common.granularity.Granularity;
import io.druid.hll.HyperLogLogHash;
import io.druid.java.util.common.logger.Logger;
import io.druid.query.aggregation.hyperloglog.HyperUniquesSerde;
Expand Down Expand Up @@ -106,7 +106,7 @@ private IncrementalIndex makeIncIndex()
{
return new OnheapIncrementalIndex(
new IncrementalIndexSchema.Builder()
.withQueryGranularity(QueryGranularities.NONE)
.withQueryGranularity(Granularity.NONE)
.withMetrics(schemaInfo.getAggsArray())
.withDimensionsSpec(new DimensionsSpec(null, null, null))
.withRollup(rollup)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,9 @@
import io.druid.benchmark.datagen.BenchmarkSchemas;
import io.druid.data.input.InputRow;
import io.druid.data.input.impl.DimensionsSpec;
import io.druid.granularity.QueryGranularities;
import io.druid.hll.HyperLogLogHash;
import io.druid.jackson.DefaultObjectMapper;
import io.druid.java.util.common.granularity.Granularity;
import io.druid.java.util.common.logger.Logger;
import io.druid.query.aggregation.hyperloglog.HyperUniquesSerde;
import io.druid.segment.IndexIO;
Expand Down Expand Up @@ -163,7 +163,7 @@ private IncrementalIndex makeIncIndex()
{
return new OnheapIncrementalIndex(
new IncrementalIndexSchema.Builder()
.withQueryGranularity(QueryGranularities.NONE)
.withQueryGranularity(Granularity.NONE)
.withMetrics(schemaInfo.getAggsArray())
.withDimensionsSpec(new DimensionsSpec(null, null, null))
.withRollup(rollup)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,9 @@
import io.druid.benchmark.datagen.BenchmarkSchemas;
import io.druid.data.input.InputRow;
import io.druid.data.input.impl.DimensionsSpec;
import io.druid.granularity.QueryGranularities;
import io.druid.hll.HyperLogLogHash;
import io.druid.jackson.DefaultObjectMapper;
import io.druid.java.util.common.granularity.Granularity;
import io.druid.java.util.common.logger.Logger;
import io.druid.query.aggregation.hyperloglog.HyperUniquesSerde;
import io.druid.segment.IndexIO;
Expand Down Expand Up @@ -157,7 +157,7 @@ private IncrementalIndex makeIncIndex()
{
return new OnheapIncrementalIndex(
new IncrementalIndexSchema.Builder()
.withQueryGranularity(QueryGranularities.NONE)
.withQueryGranularity(Granularity.NONE)
.withMetrics(schemaInfo.getAggsArray())
.withDimensionsSpec(new DimensionsSpec(null, null, null))
.withRollup(rollup)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,10 +37,9 @@
import io.druid.data.input.InputRow;
import io.druid.data.input.Row;
import io.druid.data.input.impl.DimensionsSpec;
import io.druid.granularity.QueryGranularities;
import io.druid.granularity.QueryGranularity;
import io.druid.hll.HyperLogLogHash;
import io.druid.jackson.DefaultObjectMapper;
import io.druid.java.util.common.granularity.Granularity;
import io.druid.java.util.common.guava.Sequence;
import io.druid.java.util.common.guava.Sequences;
import io.druid.java.util.common.logger.Logger;
Expand Down Expand Up @@ -195,7 +194,7 @@ private void setupQueries()
.setAggregatorSpecs(
queryAggs
)
.setGranularity(QueryGranularity.fromString(queryGranularity))
.setGranularity(Granularity.fromString(queryGranularity))
.build();

basicQueries.put("A", queryA);
Expand All @@ -220,7 +219,7 @@ private void setupQueries()
.setAggregatorSpecs(
queryAggs
)
.setGranularity(QueryGranularities.DAY)
.setGranularity(Granularity.DAY)
.build();

GroupByQuery queryA = GroupByQuery
Expand All @@ -233,7 +232,7 @@ private void setupQueries()
.setAggregatorSpecs(
queryAggs
)
.setGranularity(QueryGranularities.WEEK)
.setGranularity(Granularity.WEEK)
.build();

basicQueries.put("nested", queryA);
Expand Down Expand Up @@ -262,7 +261,7 @@ private void setupQueries()
.setAggregatorSpecs(
queryAggs
)
.setGranularity(QueryGranularity.fromString(queryGranularity))
.setGranularity(Granularity.fromString(queryGranularity))
.build();

simpleQueries.put("A", queryA);
Expand All @@ -289,7 +288,7 @@ private void setupQueries()
.setAggregatorSpecs(
queryAggs
)
.setGranularity(QueryGranularity.fromString(queryGranularity))
.setGranularity(Granularity.fromString(queryGranularity))
.build();

simpleLongQueries.put("A", queryA);
Expand All @@ -316,7 +315,7 @@ private void setupQueries()
.setAggregatorSpecs(
queryAggs
)
.setGranularity(QueryGranularity.fromString(queryGranularity))
.setGranularity(Granularity.fromString(queryGranularity))
.build();

simpleFloatQueries.put("A", queryA);
Expand Down Expand Up @@ -478,7 +477,7 @@ private IncrementalIndex makeIncIndex(boolean withRollup)
{
return new OnheapIncrementalIndex(
new IncrementalIndexSchema.Builder()
.withQueryGranularity(QueryGranularities.NONE)
.withQueryGranularity(Granularity.NONE)
.withMetrics(schemaInfo.getAggsArray())
.withDimensionsSpec(new DimensionsSpec(null, null, null))
.withRollup(withRollup)
Expand Down
Loading

0 comments on commit c3bf401

Please sign in to comment.