diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/query/MSQWindowFunctionsBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/query/MSQWindowFunctionsBenchmark.java index 51380dc9192a..df9cac28c647 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/query/MSQWindowFunctionsBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/query/MSQWindowFunctionsBenchmark.java @@ -20,7 +20,6 @@ package org.apache.druid.benchmark.query; import com.google.common.collect.ImmutableMap; -import com.google.inject.Injector; import org.apache.druid.java.util.common.io.Closer; import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.msq.sql.MSQTaskSqlEngine; @@ -28,8 +27,6 @@ import org.apache.druid.msq.test.MSQTestOverlordServiceClient; import org.apache.druid.msq.test.StandardMSQComponentSupplier; import org.apache.druid.msq.util.MultiStageQueryContext; -import org.apache.druid.query.QueryRunnerFactoryConglomerate; -import org.apache.druid.segment.join.JoinableFactoryWrapper; import org.apache.druid.server.SpecificSegmentsQuerySegmentWalker; import org.apache.druid.sql.calcite.BaseCalciteQueryTest; import org.apache.druid.sql.calcite.QueryTestBuilder; @@ -202,18 +199,11 @@ public MSQComponentSupplier(TempDirProducer tempFolderProducer) } @Override - public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker( - QueryRunnerFactoryConglomerate conglomerate, - JoinableFactoryWrapper joinableFactory, - Injector injector - ) + public SpecificSegmentsQuerySegmentWalker addSegmentsToWalker(SpecificSegmentsQuerySegmentWalker walker) { - final SpecificSegmentsQuerySegmentWalker retVal = super.createQuerySegmentWalker( - conglomerate, - joinableFactory, - injector); - TestDataBuilder.attachIndexesForBenchmarkDatasource(retVal); - return retVal; + walker = super.addSegmentsToWalker(walker); + TestDataBuilder.attachIndexesForBenchmarkDatasource(walker); + return walker; } } } diff --git a/extensions-contrib/compressed-bigdecimal/src/test/java/org/apache/druid/compressedbigdecimal/CompressedBigDecimalSqlAggregatorTestBase.java b/extensions-contrib/compressed-bigdecimal/src/test/java/org/apache/druid/compressedbigdecimal/CompressedBigDecimalSqlAggregatorTestBase.java index 1167c4b882df..7296f001f335 100644 --- a/extensions-contrib/compressed-bigdecimal/src/test/java/org/apache/druid/compressedbigdecimal/CompressedBigDecimalSqlAggregatorTestBase.java +++ b/extensions-contrib/compressed-bigdecimal/src/test/java/org/apache/druid/compressedbigdecimal/CompressedBigDecimalSqlAggregatorTestBase.java @@ -20,7 +20,6 @@ package org.apache.druid.compressedbigdecimal; import com.google.common.collect.ImmutableList; -import com.google.inject.Injector; import org.apache.druid.compressedbigdecimal.CompressedBigDecimalSqlAggregatorTestBase.CompressedBigDecimalComponentSupplier; import org.apache.druid.data.input.InputRow; import org.apache.druid.data.input.InputRowSchema; @@ -30,14 +29,12 @@ import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.granularity.Granularities; import org.apache.druid.query.Druids; -import org.apache.druid.query.QueryRunnerFactoryConglomerate; import org.apache.druid.query.aggregation.CountAggregatorFactory; import org.apache.druid.query.aggregation.DoubleSumAggregatorFactory; import org.apache.druid.query.spec.MultipleIntervalSegmentSpec; import org.apache.druid.segment.IndexBuilder; import org.apache.druid.segment.QueryableIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; -import org.apache.druid.segment.join.JoinableFactoryWrapper; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.server.SpecificSegmentsQuerySegmentWalker; import org.apache.druid.sql.calcite.BaseCalciteQueryTest; @@ -84,11 +81,7 @@ public DruidModule getCoreModule() } @Override - public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker( - final QueryRunnerFactoryConglomerate conglomerate, - final JoinableFactoryWrapper joinableFactory, - final Injector injector - ) + public SpecificSegmentsQuerySegmentWalker addSegmentsToWalker(SpecificSegmentsQuerySegmentWalker walker) { QueryableIndex index = IndexBuilder.create() @@ -106,7 +99,7 @@ public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker( .rows(ROWS1) .buildMMappedIndex(); - return SpecificSegmentsQuerySegmentWalker.createWalker(injector, conglomerate).add( + return walker.add( DataSegment.builder() .dataSource(CalciteTests.DATASOURCE1) .interval(index.getDataInterval()) diff --git a/extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/sql/TDigestSketchSqlAggregatorTest.java b/extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/sql/TDigestSketchSqlAggregatorTest.java index 535af99bd5aa..c53edfc5ca28 100644 --- a/extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/sql/TDigestSketchSqlAggregatorTest.java +++ b/extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/sql/TDigestSketchSqlAggregatorTest.java @@ -20,13 +20,11 @@ package org.apache.druid.query.aggregation.tdigestsketch.sql; import com.google.common.collect.ImmutableList; -import com.google.inject.Injector; import org.apache.druid.initialization.DruidModule; import org.apache.druid.java.util.common.granularity.Granularities; import org.apache.druid.math.expr.ExprMacroTable; import org.apache.druid.query.Druids; import org.apache.druid.query.QueryDataSource; -import org.apache.druid.query.QueryRunnerFactoryConglomerate; import org.apache.druid.query.aggregation.CountAggregatorFactory; import org.apache.druid.query.aggregation.DoubleSumAggregatorFactory; import org.apache.druid.query.aggregation.FilteredAggregatorFactory; @@ -42,7 +40,6 @@ import org.apache.druid.segment.QueryableIndex; import org.apache.druid.segment.column.ColumnType; import org.apache.druid.segment.incremental.IncrementalIndexSchema; -import org.apache.druid.segment.join.JoinableFactoryWrapper; import org.apache.druid.segment.virtual.ExpressionVirtualColumn; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.server.SpecificSegmentsQuerySegmentWalker; @@ -77,11 +74,7 @@ public DruidModule getCoreModule() } @Override - public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker( - final QueryRunnerFactoryConglomerate conglomerate, - final JoinableFactoryWrapper joinableFactory, - final Injector injector - ) + public SpecificSegmentsQuerySegmentWalker addSegmentsToWalker(SpecificSegmentsQuerySegmentWalker walker) { TDigestSketchModule.registerSerde(); @@ -106,7 +99,7 @@ public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker( .rows(TestDataBuilder.ROWS1) .buildMMappedIndex(); - return SpecificSegmentsQuerySegmentWalker.createWalker(injector, conglomerate).add( + return walker.add( DataSegment.builder() .dataSource(CalciteTests.DATASOURCE1) .interval(index.getDataInterval()) diff --git a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/hll/sql/HllSketchSqlAggregatorTest.java b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/hll/sql/HllSketchSqlAggregatorTest.java index aa6a05469fe7..2565cd040aa6 100644 --- a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/hll/sql/HllSketchSqlAggregatorTest.java +++ b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/hll/sql/HllSketchSqlAggregatorTest.java @@ -22,7 +22,6 @@ import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.inject.Injector; import org.apache.druid.error.DruidException; import org.apache.druid.initialization.DruidModule; import org.apache.druid.java.util.common.Intervals; @@ -34,7 +33,6 @@ import org.apache.druid.query.BaseQuery; import org.apache.druid.query.Druids; import org.apache.druid.query.QueryDataSource; -import org.apache.druid.query.QueryRunnerFactoryConglomerate; import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.aggregation.CountAggregatorFactory; import org.apache.druid.query.aggregation.DoubleSumAggregatorFactory; @@ -71,7 +69,6 @@ import org.apache.druid.segment.VirtualColumns; import org.apache.druid.segment.column.ColumnType; import org.apache.druid.segment.incremental.IncrementalIndexSchema; -import org.apache.druid.segment.join.JoinableFactoryWrapper; import org.apache.druid.segment.virtual.ExpressionVirtualColumn; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.server.SpecificSegmentsQuerySegmentWalker; @@ -261,13 +258,8 @@ public DruidModule getCoreModule() return DruidModuleCollection.of(super.getCoreModule(), new HllSketchModule()); } - @SuppressWarnings("resource") @Override - public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker( - final QueryRunnerFactoryConglomerate conglomerate, - final JoinableFactoryWrapper joinableFactory, - final Injector injector - ) + public SpecificSegmentsQuerySegmentWalker addSegmentsToWalker(SpecificSegmentsQuerySegmentWalker walker) { HllSketchModule.registerSerde(); final QueryableIndex index = IndexBuilder @@ -292,7 +284,7 @@ public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker( .rows(TestDataBuilder.ROWS1_WITH_NUMERIC_DIMS) .buildMMappedIndex(); - return SpecificSegmentsQuerySegmentWalker.createWalker(injector, conglomerate).add( + return walker.add( DataSegment.builder() .dataSource(CalciteTests.DATASOURCE1) .interval(index.getDataInterval()) diff --git a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/quantiles/sql/DoublesSketchSqlAggregatorTest.java b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/quantiles/sql/DoublesSketchSqlAggregatorTest.java index 26fffde8deb7..4c303b9f1462 100644 --- a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/quantiles/sql/DoublesSketchSqlAggregatorTest.java +++ b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/quantiles/sql/DoublesSketchSqlAggregatorTest.java @@ -21,7 +21,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.inject.Injector; import org.apache.druid.initialization.DruidModule; import org.apache.druid.java.util.common.Intervals; import org.apache.druid.java.util.common.granularity.Granularities; @@ -30,7 +29,6 @@ import org.apache.druid.query.JoinDataSource; import org.apache.druid.query.QueryContexts; import org.apache.druid.query.QueryDataSource; -import org.apache.druid.query.QueryRunnerFactoryConglomerate; import org.apache.druid.query.aggregation.CountAggregatorFactory; import org.apache.druid.query.aggregation.DoubleSumAggregatorFactory; import org.apache.druid.query.aggregation.FilteredAggregatorFactory; @@ -59,7 +57,6 @@ import org.apache.druid.segment.column.ColumnType; import org.apache.druid.segment.incremental.IncrementalIndexSchema; import org.apache.druid.segment.join.JoinType; -import org.apache.druid.segment.join.JoinableFactoryWrapper; import org.apache.druid.segment.virtual.ExpressionVirtualColumn; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.server.SpecificSegmentsQuerySegmentWalker; @@ -97,11 +94,7 @@ public DruidModule getCoreModule() } @Override - public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker( - final QueryRunnerFactoryConglomerate conglomerate, - final JoinableFactoryWrapper joinableFactory, - final Injector injector - ) + public SpecificSegmentsQuerySegmentWalker addSegmentsToWalker(SpecificSegmentsQuerySegmentWalker walker) { DoublesSketchModule.registerSerde(); @@ -126,7 +119,7 @@ public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker( .rows(TestDataBuilder.ROWS1) .buildMMappedIndex(); - return SpecificSegmentsQuerySegmentWalker.createWalker(injector, conglomerate).add( + return walker.add( DataSegment.builder() .dataSource(CalciteTests.DATASOURCE1) .interval(index.getDataInterval()) diff --git a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/sql/ThetaSketchSqlAggregatorTest.java b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/sql/ThetaSketchSqlAggregatorTest.java index 70a9d8c762c7..b28143e0faf6 100644 --- a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/sql/ThetaSketchSqlAggregatorTest.java +++ b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/sql/ThetaSketchSqlAggregatorTest.java @@ -21,7 +21,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.inject.Injector; import org.apache.druid.error.DruidException; import org.apache.druid.initialization.DruidModule; import org.apache.druid.java.util.common.Intervals; @@ -31,7 +30,6 @@ import org.apache.druid.math.expr.ExprMacroTable; import org.apache.druid.query.Druids; import org.apache.druid.query.QueryDataSource; -import org.apache.druid.query.QueryRunnerFactoryConglomerate; import org.apache.druid.query.aggregation.CountAggregatorFactory; import org.apache.druid.query.aggregation.DoubleSumAggregatorFactory; import org.apache.druid.query.aggregation.FilteredAggregatorFactory; @@ -58,7 +56,6 @@ import org.apache.druid.segment.QueryableIndex; import org.apache.druid.segment.column.ColumnType; import org.apache.druid.segment.incremental.IncrementalIndexSchema; -import org.apache.druid.segment.join.JoinableFactoryWrapper; import org.apache.druid.segment.virtual.ExpressionVirtualColumn; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.server.SpecificSegmentsQuerySegmentWalker; @@ -120,11 +117,7 @@ public DruidModule getCoreModule() } @Override - public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker( - final QueryRunnerFactoryConglomerate conglomerate, - final JoinableFactoryWrapper joinableFactory, - final Injector injector - ) + public SpecificSegmentsQuerySegmentWalker addSegmentsToWalker(SpecificSegmentsQuerySegmentWalker walker) { SketchModule.registerSerde(); @@ -151,7 +144,7 @@ public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker( .rows(TestDataBuilder.ROWS1) .buildMMappedIndex(); - return SpecificSegmentsQuerySegmentWalker.createWalker(injector, conglomerate).add( + return walker.add( DataSegment.builder() .dataSource(DATA_SOURCE) .interval(index.getDataInterval()) diff --git a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/tuple/sql/ArrayOfDoublesSketchSqlAggregatorTest.java b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/tuple/sql/ArrayOfDoublesSketchSqlAggregatorTest.java index 64c8be46bf1e..d71ad2a6af75 100644 --- a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/tuple/sql/ArrayOfDoublesSketchSqlAggregatorTest.java +++ b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/tuple/sql/ArrayOfDoublesSketchSqlAggregatorTest.java @@ -21,13 +21,11 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.inject.Injector; import org.apache.druid.data.input.InputRow; import org.apache.druid.initialization.DruidModule; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.granularity.Granularities; import org.apache.druid.query.Druids; -import org.apache.druid.query.QueryRunnerFactoryConglomerate; import org.apache.druid.query.aggregation.CountAggregatorFactory; import org.apache.druid.query.aggregation.LongSumAggregatorFactory; import org.apache.druid.query.aggregation.datasketches.tuple.ArrayOfDoublesSketchAggregatorFactory; @@ -43,7 +41,6 @@ import org.apache.druid.segment.QueryableIndex; import org.apache.druid.segment.column.ColumnType; import org.apache.druid.segment.incremental.IncrementalIndexSchema; -import org.apache.druid.segment.join.JoinableFactoryWrapper; import org.apache.druid.segment.virtual.ExpressionVirtualColumn; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.server.SpecificSegmentsQuerySegmentWalker; @@ -118,11 +115,7 @@ public DruidModule getCoreModule() } @Override - public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker( - final QueryRunnerFactoryConglomerate conglomerate, - final JoinableFactoryWrapper joinableFactory, - final Injector injector - ) + public SpecificSegmentsQuerySegmentWalker addSegmentsToWalker(SpecificSegmentsQuerySegmentWalker walker) { ArrayOfDoublesSketchModule.registerSerde(); @@ -150,7 +143,7 @@ public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker( .rows(ROWS) .buildMMappedIndex(); - return SpecificSegmentsQuerySegmentWalker.createWalker(injector, conglomerate).add( + return walker.add( DataSegment.builder() .dataSource(DATA_SOURCE) .interval(index.getDataInterval()) diff --git a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/aggregation/bloom/sql/BloomFilterSqlAggregatorTest.java b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/aggregation/bloom/sql/BloomFilterSqlAggregatorTest.java index 35abc25de464..cb9e04de583f 100644 --- a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/aggregation/bloom/sql/BloomFilterSqlAggregatorTest.java +++ b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/aggregation/bloom/sql/BloomFilterSqlAggregatorTest.java @@ -21,13 +21,11 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; -import com.google.inject.Injector; import org.apache.druid.data.input.InputRow; import org.apache.druid.guice.BloomFilterExtensionModule; import org.apache.druid.initialization.DruidModule; import org.apache.druid.java.util.common.granularity.Granularities; import org.apache.druid.query.Druids; -import org.apache.druid.query.QueryRunnerFactoryConglomerate; import org.apache.druid.query.aggregation.CountAggregatorFactory; import org.apache.druid.query.aggregation.DoubleSumAggregatorFactory; import org.apache.druid.query.aggregation.FilteredAggregatorFactory; @@ -44,7 +42,6 @@ import org.apache.druid.segment.QueryableIndex; import org.apache.druid.segment.column.ColumnType; import org.apache.druid.segment.incremental.IncrementalIndexSchema; -import org.apache.druid.segment.join.JoinableFactoryWrapper; import org.apache.druid.segment.virtual.ExpressionVirtualColumn; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.server.SpecificSegmentsQuerySegmentWalker; @@ -83,11 +80,7 @@ public DruidModule getCoreModule() } @Override - public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker( - final QueryRunnerFactoryConglomerate conglomerate, - final JoinableFactoryWrapper joinableFactory, - final Injector injector - ) + public SpecificSegmentsQuerySegmentWalker addSegmentsToWalker(SpecificSegmentsQuerySegmentWalker walker) { final QueryableIndex index = IndexBuilder.create() @@ -106,7 +99,7 @@ public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker( .rows(TestDataBuilder.ROWS1_WITH_NUMERIC_DIMS) .buildMMappedIndex(); - return SpecificSegmentsQuerySegmentWalker.createWalker(injector, conglomerate).add( + return walker.add( DataSegment.builder() .dataSource(DATA_SOURCE) .interval(index.getDataInterval()) diff --git a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/FixedBucketsHistogramQuantileSqlAggregatorTest.java b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/FixedBucketsHistogramQuantileSqlAggregatorTest.java index da9b229f5c8a..9187fedfa32d 100644 --- a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/FixedBucketsHistogramQuantileSqlAggregatorTest.java +++ b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/FixedBucketsHistogramQuantileSqlAggregatorTest.java @@ -21,13 +21,11 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.inject.Injector; import org.apache.druid.initialization.DruidModule; import org.apache.druid.java.util.common.granularity.Granularities; import org.apache.druid.query.Druids; import org.apache.druid.query.QueryContexts; import org.apache.druid.query.QueryDataSource; -import org.apache.druid.query.QueryRunnerFactoryConglomerate; import org.apache.druid.query.aggregation.CountAggregatorFactory; import org.apache.druid.query.aggregation.DoubleSumAggregatorFactory; import org.apache.druid.query.aggregation.FilteredAggregatorFactory; @@ -46,7 +44,6 @@ import org.apache.druid.segment.QueryableIndex; import org.apache.druid.segment.column.ColumnType; import org.apache.druid.segment.incremental.IncrementalIndexSchema; -import org.apache.druid.segment.join.JoinableFactoryWrapper; import org.apache.druid.segment.virtual.ExpressionVirtualColumn; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.server.SpecificSegmentsQuerySegmentWalker; @@ -81,11 +78,7 @@ public DruidModule getCoreModule() } @Override - public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker( - final QueryRunnerFactoryConglomerate conglomerate, - final JoinableFactoryWrapper joinableFactory, - final Injector injector - ) + public SpecificSegmentsQuerySegmentWalker addSegmentsToWalker(SpecificSegmentsQuerySegmentWalker walker) { ApproximateHistogramDruidModule.registerSerde(); @@ -113,7 +106,7 @@ public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker( .rows(TestDataBuilder.ROWS1) .buildMMappedIndex(); - return SpecificSegmentsQuerySegmentWalker.createWalker(injector, conglomerate).add( + return walker.add( DataSegment.builder() .dataSource(CalciteTests.DATASOURCE1) .interval(index.getDataInterval()) diff --git a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java index 6202d4f7957c..083de994e633 100644 --- a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java +++ b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java @@ -20,13 +20,11 @@ package org.apache.druid.query.aggregation.histogram.sql; import com.google.common.collect.ImmutableList; -import com.google.inject.Injector; import org.apache.druid.initialization.DruidModule; import org.apache.druid.java.util.common.granularity.Granularities; import org.apache.druid.math.expr.ExprMacroTable; import org.apache.druid.query.Druids; import org.apache.druid.query.QueryDataSource; -import org.apache.druid.query.QueryRunnerFactoryConglomerate; import org.apache.druid.query.aggregation.CountAggregatorFactory; import org.apache.druid.query.aggregation.DoubleSumAggregatorFactory; import org.apache.druid.query.aggregation.FilteredAggregatorFactory; @@ -45,7 +43,6 @@ import org.apache.druid.segment.QueryableIndex; import org.apache.druid.segment.column.ColumnType; import org.apache.druid.segment.incremental.IncrementalIndexSchema; -import org.apache.druid.segment.join.JoinableFactoryWrapper; import org.apache.druid.segment.virtual.ExpressionVirtualColumn; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.server.SpecificSegmentsQuerySegmentWalker; @@ -80,11 +77,7 @@ public DruidModule getCoreModule() } @Override - public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker( - final QueryRunnerFactoryConglomerate conglomerate, - final JoinableFactoryWrapper joinableFactory, - final Injector injector - ) + public SpecificSegmentsQuerySegmentWalker addSegmentsToWalker(SpecificSegmentsQuerySegmentWalker walker) { ApproximateHistogramDruidModule.registerSerde(); @@ -112,7 +105,7 @@ public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker( .rows(TestDataBuilder.ROWS1) .buildMMappedIndex(); - return SpecificSegmentsQuerySegmentWalker.createWalker(injector, conglomerate).add( + return walker.add( DataSegment.builder() .dataSource(CalciteTests.DATASOURCE1) .interval(index.getDataInterval()) diff --git a/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/sql/VarianceSqlAggregatorTest.java b/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/sql/VarianceSqlAggregatorTest.java index 9ccd61547e33..a7cefae2394f 100644 --- a/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/sql/VarianceSqlAggregatorTest.java +++ b/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/sql/VarianceSqlAggregatorTest.java @@ -20,7 +20,6 @@ package org.apache.druid.query.aggregation.variance.sql; import com.google.common.collect.ImmutableList; -import com.google.inject.Injector; import org.apache.druid.data.input.InputRow; import org.apache.druid.data.input.impl.DimensionSchema; import org.apache.druid.data.input.impl.DimensionsSpec; @@ -32,7 +31,6 @@ import org.apache.druid.java.util.common.granularity.Granularities; import org.apache.druid.math.expr.ExprMacroTable; import org.apache.druid.query.Druids; -import org.apache.druid.query.QueryRunnerFactoryConglomerate; import org.apache.druid.query.aggregation.CountAggregatorFactory; import org.apache.druid.query.aggregation.DoubleSumAggregatorFactory; import org.apache.druid.query.aggregation.FilteredAggregatorFactory; @@ -52,7 +50,6 @@ import org.apache.druid.segment.QueryableIndex; import org.apache.druid.segment.column.ColumnType; import org.apache.druid.segment.incremental.IncrementalIndexSchema; -import org.apache.druid.segment.join.JoinableFactoryWrapper; import org.apache.druid.segment.serde.ComplexMetrics; import org.apache.druid.segment.virtual.ExpressionVirtualColumn; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; @@ -92,11 +89,7 @@ public DruidModule getCoreModule() } @Override - public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker( - final QueryRunnerFactoryConglomerate conglomerate, - final JoinableFactoryWrapper joinableFactory, - final Injector injector - ) + public SpecificSegmentsQuerySegmentWalker addSegmentsToWalker(SpecificSegmentsQuerySegmentWalker walker) { ComplexMetrics.registerSerde(VarianceSerde.TYPE_NAME, new VarianceSerde()); @@ -127,7 +120,7 @@ public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker( .rows(TestDataBuilder.ROWS1_WITH_NUMERIC_DIMS) .buildMMappedIndex(); - return SpecificSegmentsQuerySegmentWalker.createWalker(injector, conglomerate).add( + return walker.add( DataSegment.builder() .dataSource(CalciteTests.DATASOURCE3) .interval(index.getDataInterval()) diff --git a/processing/src/main/java/org/apache/druid/data/input/impl/LocalInputSource.java b/processing/src/main/java/org/apache/druid/data/input/impl/LocalInputSource.java index 18eb7c7472c2..365a59afecfd 100644 --- a/processing/src/main/java/org/apache/druid/data/input/impl/LocalInputSource.java +++ b/processing/src/main/java/org/apache/druid/data/input/impl/LocalInputSource.java @@ -145,6 +145,11 @@ public Set getConfiguredSystemFields() return systemFields.getFields(); } + public SystemFields getSystemFields() + { + return systemFields; + } + public List getFiles() { return files; diff --git a/processing/src/test/java/org/apache/druid/data/input/impl/LocalInputSourceTest.java b/processing/src/test/java/org/apache/druid/data/input/impl/LocalInputSourceTest.java index ff742ac057a3..d6210d2e4f77 100644 --- a/processing/src/test/java/org/apache/druid/data/input/impl/LocalInputSourceTest.java +++ b/processing/src/test/java/org/apache/druid/data/input/impl/LocalInputSourceTest.java @@ -249,6 +249,21 @@ public void testFileIteratorWithEmptyFilesIteratingNonEmptyFilesOnly() Assert.assertTrue(iteratedFiles.stream().allMatch(file -> file.length() > 0)); } + @Test + public void testRebuildSame() + { + final LocalInputSource src = new LocalInputSource( + new File("myFile"), + "myFilter", + ImmutableList.of(new File("someFile")), + new SystemFields(EnumSet.of(SystemField.URI, SystemField.PATH)) + ); + LocalInputSource copy = new LocalInputSource( + src.getBaseDir(), src.getFilter(), src.getFiles(), src.getSystemFields() + ); + Assert.assertEquals(src, copy); + } + private static List mockFiles(int numFiles, long fileSize) { final List files = new ArrayList<>(); diff --git a/processing/src/test/java/org/apache/druid/segment/IndexBuilder.java b/processing/src/test/java/org/apache/druid/segment/IndexBuilder.java index 986266015d30..f081ed46fa55 100644 --- a/processing/src/test/java/org/apache/druid/segment/IndexBuilder.java +++ b/processing/src/test/java/org/apache/druid/segment/IndexBuilder.java @@ -55,6 +55,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.Iterator; import java.util.List; import java.util.UUID; import java.util.concurrent.ThreadLocalRandom; @@ -223,16 +224,21 @@ public IndexBuilder mapSchema(Function it = reader.read()) { + return buildIncrementalIndexWithRows(schema, maxRows, it); + } + catch (IOException e) { + throw new RuntimeException(e); + } } - return buildIncrementalIndexWithRows(schema, maxRows, rows); + return buildIncrementalIndexWithRows(schema, maxRows, rows.iterator()); } public File buildMMappedIndexFile() @@ -437,7 +443,7 @@ public FrameSegment buildFrameSegment(FrameType frameType) private static IncrementalIndex buildIncrementalIndexWithRows( IncrementalIndexSchema schema, int maxRows, - Iterable rows + Iterator rows ) { Preconditions.checkNotNull(schema, "schema"); @@ -446,8 +452,9 @@ private static IncrementalIndex buildIncrementalIndexWithRows( .setMaxRowCount(maxRows) .build(); - for (InputRow row : rows) { + while (rows.hasNext()) { try { + InputRow row = rows.next(); incrementalIndex.add(row); } catch (IndexSizeExceededException e) { @@ -457,40 +464,24 @@ private static IncrementalIndex buildIncrementalIndexWithRows( return incrementalIndex; } - private static IncrementalIndex buildIncrementalIndexWithInputSource( + public static InputSourceReader buildIncrementalIndexWithInputSource( IncrementalIndexSchema schema, InputSource inputSource, InputFormat inputFormat, @Nullable TransformSpec transformSpec, - File inputSourceTmpDir, - int maxRows - ) + File inputSourceTmpDir) { Preconditions.checkNotNull(schema, "schema"); Preconditions.checkNotNull(inputSource, "inputSource"); Preconditions.checkNotNull(inputFormat, "inputFormat"); Preconditions.checkNotNull(inputSourceTmpDir, "inputSourceTmpDir"); - - final IncrementalIndex incrementalIndex = new OnheapIncrementalIndex.Builder() - .setIndexSchema(schema) - .setMaxRowCount(maxRows) - .build(); TransformSpec tranformer = transformSpec != null ? transformSpec : TransformSpec.NONE; InputRowSchema rowSchema = new InputRowSchema(schema.getTimestampSpec(), schema.getDimensionsSpec(), null); InputSourceReader reader = inputSource.reader(rowSchema, inputFormat, inputSourceTmpDir); InputSourceReader transformingReader = tranformer.decorate(reader); - try (CloseableIterator rowIterator = transformingReader.read()) { - while (rowIterator.hasNext()) { - incrementalIndex.add(rowIterator.next()); - } - } - catch (IOException e) { - throw new RuntimeException(e); - } - return incrementalIndex; + return transformingReader; } - @FunctionalInterface interface IteratorSupplier { diff --git a/quidem-ut/src/main/java/org/apache/druid/quidem/KttmNestedComponentSupplier.java b/quidem-ut/src/main/java/org/apache/druid/quidem/KttmNestedComponentSupplier.java index ab2683f7027e..8c17c22d2bc8 100644 --- a/quidem-ut/src/main/java/org/apache/druid/quidem/KttmNestedComponentSupplier.java +++ b/quidem-ut/src/main/java/org/apache/druid/quidem/KttmNestedComponentSupplier.java @@ -19,7 +19,6 @@ package org.apache.druid.quidem; -import com.google.inject.Injector; import org.apache.druid.data.input.InputSource; import org.apache.druid.data.input.ResourceInputSource; import org.apache.druid.data.input.impl.DimensionSchema; @@ -30,7 +29,6 @@ import org.apache.druid.java.util.common.FileUtils; import org.apache.druid.java.util.common.Intervals; import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.query.QueryRunnerFactoryConglomerate; import org.apache.druid.segment.AutoTypeColumnSchema; import org.apache.druid.segment.IndexBuilder; import org.apache.druid.segment.IndexSpec; @@ -38,7 +36,6 @@ import org.apache.druid.segment.TestIndex; import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; -import org.apache.druid.segment.join.JoinableFactoryWrapper; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.server.SpecificSegmentsQuerySegmentWalker; import org.apache.druid.sql.calcite.TempDirProducer; @@ -60,10 +57,9 @@ public KttmNestedComponentSupplier(TempDirProducer tempDirProducer) } @Override - public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(QueryRunnerFactoryConglomerate conglomerate, - JoinableFactoryWrapper joinableFactory, Injector injector) + public SpecificSegmentsQuerySegmentWalker addSegmentsToWalker(SpecificSegmentsQuerySegmentWalker walker) { - SpecificSegmentsQuerySegmentWalker walker = super.createQuerySegmentWalker(conglomerate, joinableFactory, injector); + walker = super.addSegmentsToWalker(walker); QueryableIndex idx = makeKttmIndex(tempDirProducer.newTempFolder()); walker.add( diff --git a/server/src/test/java/org/apache/druid/server/ClientQuerySegmentWalkerTest.java b/server/src/test/java/org/apache/druid/server/ClientQuerySegmentWalkerTest.java index 0fbc725235d8..b6abac801679 100644 --- a/server/src/test/java/org/apache/druid/server/ClientQuerySegmentWalkerTest.java +++ b/server/src/test/java/org/apache/druid/server/ClientQuerySegmentWalkerTest.java @@ -1705,7 +1705,6 @@ public QueryRunner getQueryRunnerForSegments(Query query, Iterable> timelines, QueryRunnerFactoryConglomerate conglomerate, @Nullable QueryScheduler scheduler, - GroupByQueryConfig groupByQueryConfig, Injector injector ) { - return new TestClusterQuerySegmentWalker(timelines, conglomerate, scheduler, groupByQueryConfig, injector.getInstance(EtagProvider.KEY)); + return new TestClusterQuerySegmentWalker(timelines, conglomerate, scheduler, injector.getInstance(EtagProvider.KEY)); } public static LocalQuerySegmentWalker createLocalQuerySegmentWalker( diff --git a/server/src/test/java/org/apache/druid/server/SpecificSegmentsQuerySegmentWalker.java b/server/src/test/java/org/apache/druid/server/SpecificSegmentsQuerySegmentWalker.java index 746102cc6f57..ff385e99f641 100644 --- a/server/src/test/java/org/apache/druid/server/SpecificSegmentsQuerySegmentWalker.java +++ b/server/src/test/java/org/apache/druid/server/SpecificSegmentsQuerySegmentWalker.java @@ -32,7 +32,6 @@ import org.apache.druid.query.QueryRunnerFactoryConglomerate; import org.apache.druid.query.QuerySegmentWalker; import org.apache.druid.query.SegmentDescriptor; -import org.apache.druid.query.groupby.GroupByQueryConfig; import org.apache.druid.query.lookup.LookupExtractorFactoryContainer; import org.apache.druid.query.lookup.LookupExtractorFactoryContainerProvider; import org.apache.druid.segment.FrameBasedInlineSegmentWrangler; @@ -50,11 +49,14 @@ import org.apache.druid.segment.join.JoinableFactoryWrapper; import org.apache.druid.server.initialization.ServerConfig; import org.apache.druid.server.metrics.NoopServiceEmitter; +import org.apache.druid.sql.calcite.util.datasets.TestDataSet; import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.VersionedIntervalTimeline; +import org.apache.druid.timeline.partition.LinearShardSpec; import org.joda.time.Interval; import java.io.Closeable; +import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; @@ -114,11 +116,13 @@ public static SpecificSegmentsQuerySegmentWalker createWalker( final QueryRunnerFactoryConglomerate conglomerate, final SegmentWrangler segmentWrangler, final JoinableFactoryWrapper joinableFactoryWrapper, - final QueryScheduler scheduler, - final GroupByQueryConfig groupByQueryConfig + final QueryScheduler scheduler ) { Map> timelines = new HashMap<>(); + NoopServiceEmitter emitter = new NoopServiceEmitter(); + ServerConfig serverConfig = new ServerConfig(); + return new SpecificSegmentsQuerySegmentWalker( timelines, QueryStackTests.createClientQuerySegmentWalker( @@ -127,7 +131,6 @@ public static SpecificSegmentsQuerySegmentWalker createWalker( timelines, conglomerate, scheduler, - groupByQueryConfig, injector ), QueryStackTests.createLocalQuerySegmentWalker( @@ -135,12 +138,12 @@ public static SpecificSegmentsQuerySegmentWalker createWalker( segmentWrangler, joinableFactoryWrapper, scheduler, - new NoopServiceEmitter() + emitter ), conglomerate, joinableFactoryWrapper.getJoinableFactory(), - new ServerConfig(), - new NoopServiceEmitter() + serverConfig, + emitter ) ); } @@ -168,8 +171,7 @@ public static SpecificSegmentsQuerySegmentWalker createWalker( .build() ), new JoinableFactoryWrapper(QueryStackTests.makeJoinableFactoryForLookup(LOOKUP_EXTRACTOR_FACTORY_CONTAINER_PROVIDER)), - QueryStackTests.DEFAULT_NOOP_SCHEDULER, - new GroupByQueryConfig() + QueryStackTests.DEFAULT_NOOP_SCHEDULER ); } @@ -236,4 +238,19 @@ public void close() throws IOException Closeables.close(closeable, true); } } + + public SpecificSegmentsQuerySegmentWalker add(TestDataSet dataset, File tmpDir) + { + QueryableIndex indexNumericDims = dataset.makeIndex(tmpDir); + return add( + DataSegment.builder() + .dataSource(dataset.getName()) + .interval(indexNumericDims.getDataInterval()) + .version("1") + .shardSpec(new LinearShardSpec(0)) + .size(0) + .build(), + indexNumericDims + ); + } } diff --git a/server/src/test/java/org/apache/druid/server/TestClusterQuerySegmentWalker.java b/server/src/test/java/org/apache/druid/server/TestClusterQuerySegmentWalker.java index 21178fe140a7..464092d7ccb0 100644 --- a/server/src/test/java/org/apache/druid/server/TestClusterQuerySegmentWalker.java +++ b/server/src/test/java/org/apache/druid/server/TestClusterQuerySegmentWalker.java @@ -22,6 +22,7 @@ import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; +import com.google.inject.Inject; import org.apache.druid.client.SegmentServerSelector; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.concurrent.Execs; @@ -43,7 +44,6 @@ import org.apache.druid.query.SegmentDescriptor; import org.apache.druid.query.TableDataSource; import org.apache.druid.query.context.ResponseContext.Keys; -import org.apache.druid.query.groupby.GroupByQueryConfig; import org.apache.druid.query.groupby.GroupByQueryRunnerTestHelper; import org.apache.druid.query.planning.DataSourceAnalysis; import org.apache.druid.query.spec.SpecificSegmentQueryRunner; @@ -58,6 +58,7 @@ import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -78,25 +79,35 @@ public class TestClusterQuerySegmentWalker implements QuerySegmentWalker private final QueryRunnerFactoryConglomerate conglomerate; @Nullable private final QueryScheduler scheduler; - private final GroupByQueryConfig groupByQueryConfig; private final EtagProvider etagProvider; + public static class TestSegmentsBroker + { + public final Map> timelines = new HashMap<>(); + } + + @Inject + TestClusterQuerySegmentWalker( + TestSegmentsBroker testSegmentsBroker, + QueryRunnerFactoryConglomerate conglomerate, + @Nullable QueryScheduler scheduler, + EtagProvider etagProvider) + { + this(testSegmentsBroker.timelines, conglomerate, scheduler, etagProvider); + } + TestClusterQuerySegmentWalker( Map> timelines, QueryRunnerFactoryConglomerate conglomerate, @Nullable QueryScheduler scheduler, - GroupByQueryConfig groupByQueryConfig, - EtagProvider etagProvider - ) + EtagProvider etagProvider) { this.timelines = timelines; this.conglomerate = conglomerate; this.scheduler = scheduler; - this.groupByQueryConfig = groupByQueryConfig; this.etagProvider = etagProvider; } - @Override public QueryRunner getQueryRunnerForIntervals(final Query query, final Iterable intervals) { diff --git a/server/src/test/java/org/apache/druid/sql/calcite/util/datasets/InputSourceBasedTestDataset.java b/server/src/test/java/org/apache/druid/sql/calcite/util/datasets/InputSourceBasedTestDataset.java new file mode 100644 index 000000000000..39cd46d52ac3 --- /dev/null +++ b/server/src/test/java/org/apache/druid/sql/calcite/util/datasets/InputSourceBasedTestDataset.java @@ -0,0 +1,116 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.sql.calcite.util.datasets; + +import org.apache.druid.data.input.InputFormat; +import org.apache.druid.data.input.InputSource; +import org.apache.druid.data.input.impl.DimensionSchema; +import org.apache.druid.segment.IndexBuilder; +import org.apache.druid.segment.QueryableIndex; +import org.apache.druid.segment.column.RowSignature; +import org.apache.druid.segment.column.RowSignature.Builder; +import org.apache.druid.segment.incremental.IncrementalIndexSchema; +import org.apache.druid.segment.indexing.DataSchema; +import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; +import org.apache.druid.timeline.DataSegment; +import org.apache.druid.timeline.partition.LinearShardSpec; + +import java.io.File; + +public class InputSourceBasedTestDataset implements TestDataSet +{ + protected final InputSource inputSource; + protected final InputFormat inputFormat; + protected final DataSchema dataSchema; + + public InputSourceBasedTestDataset(DataSchema dataSchema, InputFormat inputFormat, InputSource inputSource) + { + this.inputSource = inputSource; + this.inputFormat = inputFormat; + this.dataSchema = dataSchema; + } + + @Override + public String getName() + { + return getDataSchema().getDataSource(); + } + + @Override + public final DataSegment makeSegment(final QueryableIndex index) + { + DataSegment segment = DataSegment.builder() + .dataSource(getName()) + .interval(index.getDataInterval()) + .version("1") + .shardSpec(new LinearShardSpec(0)) + .size(0) + .build(); + return segment; + } + + @Override + public final QueryableIndex makeIndex(File tmpDir) + { + return IndexBuilder + .create() + .inputTmpDir(tmpDir) + .tmpDir(tmpDir) + .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance()) + .schema(getIndexSchema()) + .inputSource(getInputSource()) + .inputFormat(getInputFormat()) + .buildMMappedIndex(); + } + + public IncrementalIndexSchema getIndexSchema() + { + return new IncrementalIndexSchema.Builder() + .withTimestampSpec(getDataSchema().getTimestampSpec()) + .withMetrics(getDataSchema().getAggregators()) + .withDimensionsSpec(getDataSchema().getDimensionsSpec()) + .withRollup(false) + .build(); + } + + public RowSignature getInputRowSignature() + { + Builder rsBuilder = RowSignature.builder(); + for (DimensionSchema dimensionSchema : getDataSchema().getDimensionsSpec().getDimensions()) { + rsBuilder.add(dimensionSchema.getName(), dimensionSchema.getColumnType()); + } + return rsBuilder.build(); + } + + protected DataSchema getDataSchema() + { + return dataSchema; + } + + protected InputSource getInputSource() + { + return inputSource; + } + + protected InputFormat getInputFormat() + { + return inputFormat; + } +} diff --git a/server/src/test/java/org/apache/druid/sql/calcite/util/datasets/MapBasedTestDataset.java b/server/src/test/java/org/apache/druid/sql/calcite/util/datasets/MapBasedTestDataset.java new file mode 100644 index 000000000000..6db8bd6e10be --- /dev/null +++ b/server/src/test/java/org/apache/druid/sql/calcite/util/datasets/MapBasedTestDataset.java @@ -0,0 +1,118 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.sql.calcite.util.datasets; + +import org.apache.druid.data.input.InputRow; +import org.apache.druid.data.input.InputRowSchema; +import org.apache.druid.data.input.impl.DimensionSchema; +import org.apache.druid.data.input.impl.MapInputRowParser; +import org.apache.druid.query.aggregation.AggregatorFactory; +import org.apache.druid.segment.IndexBuilder; +import org.apache.druid.segment.QueryableIndex; +import org.apache.druid.segment.column.RowSignature; +import org.apache.druid.segment.column.RowSignature.Builder; +import org.apache.druid.segment.incremental.IncrementalIndexSchema; +import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; +import org.apache.druid.timeline.DataSegment; +import org.apache.druid.timeline.partition.LinearShardSpec; + +import java.io.File; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +public abstract class MapBasedTestDataset implements TestDataSet +{ + protected final String name; + + protected MapBasedTestDataset(String name) + { + this.name = name; + } + + @Override + public String getName() + { + return name; + } + + @Override + public final DataSegment makeSegment(final QueryableIndex index) + { + DataSegment segment = DataSegment.builder() + .dataSource(name) + .interval(index.getDataInterval()) + .version("1") + .shardSpec(new LinearShardSpec(0)) + .size(0) + .build(); + return segment; + } + + @Override + public final QueryableIndex makeIndex(File tmpDir) + { + return IndexBuilder + .create() + .tmpDir(new File(tmpDir, "idx")) + .inputTmpDir(new File(tmpDir, "input")) + .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance()) + .schema(getIndexSchema()) + .rows(getRows()) + .buildMMappedIndex(); + } + + public IncrementalIndexSchema getIndexSchema() + { + return new IncrementalIndexSchema.Builder() + .withMetrics(getMetrics().toArray(new AggregatorFactory[0])) + .withDimensionsSpec(getInputRowSchema().getDimensionsSpec()) + .withRollup(false) + .build(); + } + + public final Iterable getRows() + { + return getRawRows() + .stream() + .map(raw -> createRow(raw, getInputRowSchema())) + .collect(Collectors.toList()); + } + + public static InputRow createRow(final Map map, InputRowSchema inputRowSchema) + { + return MapInputRowParser.parse(inputRowSchema, (Map) map); + } + + public RowSignature getInputRowSignature() + { + Builder rsBuilder = RowSignature.builder(); + for (DimensionSchema dimensionSchema : getInputRowSchema().getDimensionsSpec().getDimensions()) { + rsBuilder.add(dimensionSchema.getName(), dimensionSchema.getColumnType()); + } + return rsBuilder.build(); + } + + public abstract InputRowSchema getInputRowSchema(); + + public abstract List> getRawRows(); + + public abstract List getMetrics(); +} diff --git a/server/src/test/java/org/apache/druid/sql/calcite/util/datasets/NumFoo.java b/server/src/test/java/org/apache/druid/sql/calcite/util/datasets/NumFoo.java new file mode 100644 index 000000000000..0d445ac9ba86 --- /dev/null +++ b/server/src/test/java/org/apache/druid/sql/calcite/util/datasets/NumFoo.java @@ -0,0 +1,179 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.sql.calcite.util.datasets; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import org.apache.druid.data.input.InputRowSchema; +import org.apache.druid.data.input.impl.DimensionSchema; +import org.apache.druid.data.input.impl.DimensionsSpec; +import org.apache.druid.data.input.impl.DoubleDimensionSchema; +import org.apache.druid.data.input.impl.FloatDimensionSchema; +import org.apache.druid.data.input.impl.LongDimensionSchema; +import org.apache.druid.data.input.impl.TimestampSpec; +import org.apache.druid.query.aggregation.AggregatorFactory; +import org.apache.druid.query.aggregation.CountAggregatorFactory; +import org.apache.druid.query.aggregation.DoubleSumAggregatorFactory; +import org.apache.druid.query.aggregation.FloatSumAggregatorFactory; +import org.apache.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; + +import java.util.List; +import java.util.Map; + +public class NumFoo extends MapBasedTestDataset +{ + protected NumFoo() + { + this("numfoo"); + } + + public NumFoo(String name) + { + super(name); + } + + @Override + public final InputRowSchema getInputRowSchema() + { + return new InputRowSchema( + new TimestampSpec(TIMESTAMP_COLUMN, "iso", null), + new DimensionsSpec( + ImmutableList.builder() + .addAll( + DimensionsSpec.getDefaultSchemas( + ImmutableList.of( + "dim1", + "dim2", + "dim3", + "dim4", + "dim5", + "dim6" + ) + ) + ) + .add(new DoubleDimensionSchema("dbl1")) + .add(new DoubleDimensionSchema("dbl2")) + .add(new FloatDimensionSchema("f1")) + .add(new FloatDimensionSchema("f2")) + .add(new LongDimensionSchema("l1")) + .add(new LongDimensionSchema("l2")) + .build() + ), + null + ); + } + + @Override + public List getMetrics() + { + return ImmutableList.of( + new CountAggregatorFactory("cnt"), + new FloatSumAggregatorFactory("m1", "m1"), + new DoubleSumAggregatorFactory("m2", "m2"), + new HyperUniquesAggregatorFactory("unique_dim1", "dim1") + ); + } + + @Override + public List> getRawRows() + { + return ImmutableList.of( + ImmutableMap.builder() + .put("t", "2000-01-01") + .put("m1", "1.0") + .put("m2", "1.0") + .put("dbl1", 1.0) + .put("f1", 1.0f) + .put("l1", 7L) + .put("dim1", "") + .put("dim2", ImmutableList.of("a")) + .put("dim3", ImmutableList.of("a", "b")) + .put("dim4", "a") + .put("dim5", "aa") + .put("dim6", "1") + .build(), + ImmutableMap.builder() + .put("t", "2000-01-02") + .put("m1", "2.0") + .put("m2", "2.0") + .put("dbl1", 1.7) + .put("dbl2", 1.7) + .put("f1", 0.1f) + .put("f2", 0.1f) + .put("l1", 325323L) + .put("l2", 325323L) + .put("dim1", "10.1") + .put("dim2", ImmutableList.of()) + .put("dim3", ImmutableList.of("b", "c")) + .put("dim4", "a") + .put("dim5", "ab") + .put("dim6", "2") + .build(), + ImmutableMap.builder() + .put("t", "2000-01-03") + .put("m1", "3.0") + .put("m2", "3.0") + .put("dbl1", 0.0) + .put("dbl2", 0.0) + .put("f1", 0.0) + .put("f2", 0.0) + .put("l1", 0) + .put("l2", 0) + .put("dim1", "2") + .put("dim2", ImmutableList.of("")) + .put("dim3", ImmutableList.of("d")) + .put("dim4", "a") + .put("dim5", "ba") + .put("dim6", "3") + .build(), + ImmutableMap.builder() + .put("t", "2001-01-01") + .put("m1", "4.0") + .put("m2", "4.0") + .put("dim1", "1") + .put("dim2", ImmutableList.of("a")) + .put("dim3", ImmutableList.of("")) + .put("dim4", "b") + .put("dim5", "ad") + .put("dim6", "4") + .build(), + ImmutableMap.builder() + .put("t", "2001-01-02") + .put("m1", "5.0") + .put("m2", "5.0") + .put("dim1", "def") + .put("dim2", ImmutableList.of("abc")) + .put("dim3", ImmutableList.of()) + .put("dim4", "b") + .put("dim5", "aa") + .put("dim6", "5") + .build(), + ImmutableMap.builder() + .put("t", "2001-01-03") + .put("m1", "6.0") + .put("m2", "6.0") + .put("dim1", "abc") + .put("dim4", "b") + .put("dim5", "ab") + .put("dim6", "6") + .build() + ); + } +} diff --git a/server/src/test/java/org/apache/druid/sql/calcite/util/datasets/TestDataSet.java b/server/src/test/java/org/apache/druid/sql/calcite/util/datasets/TestDataSet.java new file mode 100644 index 000000000000..983dd81243b5 --- /dev/null +++ b/server/src/test/java/org/apache/druid/sql/calcite/util/datasets/TestDataSet.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.sql.calcite.util.datasets; + +import org.apache.druid.segment.QueryableIndex; +import org.apache.druid.timeline.DataSegment; + +import java.io.File; + +public interface TestDataSet +{ + public static final String TIMESTAMP_COLUMN = "t"; + + public static final MapBasedTestDataset NUMFOO = new NumFoo(); + public static final MapBasedTestDataset BROADCAST = new NumFoo("broadcast"); + + String getName(); + + QueryableIndex makeIndex(File tmpDir); + + DataSegment makeSegment(QueryableIndex index); +} diff --git a/sql/src/test/java/org/apache/druid/quidem/DruidQuidemTestBase.java b/sql/src/test/java/org/apache/druid/quidem/DruidQuidemTestBase.java index 290bc5184506..8366d1dce9ef 100644 --- a/sql/src/test/java/org/apache/druid/quidem/DruidQuidemTestBase.java +++ b/sql/src/test/java/org/apache/druid/quidem/DruidQuidemTestBase.java @@ -100,13 +100,13 @@ public DruidQuidemTestBase() } filter = new WildcardFileFilter(filterStr); } - druidQuidemRunner = new DruidQuidemRunner(); + druidQuidemRunner = new DruidQuidemRunner(createCommandHandler()); } /** Creates a command handler. */ protected CommandHandler createCommandHandler() { - return Quidem.EMPTY_COMMAND_HANDLER; + return new DruidQuidemCommandHandler(); } @ParameterizedTest @@ -121,8 +121,11 @@ public void test(String testFileName) throws Exception public static class DruidQuidemRunner { - public DruidQuidemRunner() + private CommandHandler commandHandler; + + public DruidQuidemRunner(CommandHandler commandHandler) { + this.commandHandler = commandHandler; } public void run(File inFile) throws Exception @@ -142,7 +145,7 @@ public void run(File inFile, final File outFile) throws Exception ConfigBuilder configBuilder = Quidem.configBuilder() .withConnectionFactory(connectionFactory) .withPropertyHandler(connectionFactory) - .withCommandHandler(new DruidQuidemCommandHandler()); + .withCommandHandler(commandHandler); Config config = configBuilder .withReader(reader) diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteNestedDataQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteNestedDataQueryTest.java index 6b2bd9bc5871..c892c4732691 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteNestedDataQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteNestedDataQueryTest.java @@ -22,7 +22,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; -import com.google.inject.Injector; import org.apache.druid.data.input.InputRow; import org.apache.druid.data.input.InputRowSchema; import org.apache.druid.data.input.ResourceInputSource; @@ -39,7 +38,6 @@ import org.apache.druid.math.expr.ExprMacroTable; import org.apache.druid.query.Druids; import org.apache.druid.query.NestedDataTestUtils; -import org.apache.druid.query.QueryRunnerFactoryConglomerate; import org.apache.druid.query.TableDataSource; import org.apache.druid.query.UnnestDataSource; import org.apache.druid.query.aggregation.CountAggregatorFactory; @@ -66,7 +64,6 @@ import org.apache.druid.segment.column.RowSignature; import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; -import org.apache.druid.segment.join.JoinableFactoryWrapper; import org.apache.druid.segment.nested.NestedPathField; import org.apache.druid.segment.virtual.ExpressionVirtualColumn; import org.apache.druid.segment.virtual.NestedFieldVirtualColumn; @@ -191,13 +188,8 @@ public NestedComponentSupplier(TempDirProducer tempFolderProducer) super(tempFolderProducer); } - @SuppressWarnings("resource") @Override - public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker( - final QueryRunnerFactoryConglomerate conglomerate, - final JoinableFactoryWrapper joinableFactory, - final Injector injector - ) + public SpecificSegmentsQuerySegmentWalker addSegmentsToWalker(SpecificSegmentsQuerySegmentWalker walker) { BuiltInTypesModule.registerHandlersAndSerde(); final QueryableIndex index = @@ -353,8 +345,6 @@ public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker( .inputTmpDir(tempDirProducer.newTempFolder()) .buildIncrementalIndex(); - - SpecificSegmentsQuerySegmentWalker walker = SpecificSegmentsQuerySegmentWalker.createWalker(injector, conglomerate); walker.add( DataSegment.builder() .dataSource(DATA_SOURCE) diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteSubqueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteSubqueryTest.java index 71ef8964fc59..794423f764a6 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteSubqueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteSubqueryTest.java @@ -21,7 +21,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.inject.Injector; import org.apache.druid.data.input.InputRow; import org.apache.druid.data.input.InputRowSchema; import org.apache.druid.data.input.impl.DimensionsSpec; @@ -40,7 +39,6 @@ import org.apache.druid.query.Order; import org.apache.druid.query.QueryContexts; import org.apache.druid.query.QueryDataSource; -import org.apache.druid.query.QueryRunnerFactoryConglomerate; import org.apache.druid.query.ResourceLimitExceededException; import org.apache.druid.query.TableDataSource; import org.apache.druid.query.aggregation.CountAggregatorFactory; @@ -75,7 +73,6 @@ import org.apache.druid.segment.column.ColumnType; import org.apache.druid.segment.incremental.IncrementalIndexSchema; import org.apache.druid.segment.join.JoinType; -import org.apache.druid.segment.join.JoinableFactoryWrapper; import org.apache.druid.segment.writeout.OnHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.server.SpecificSegmentsQuerySegmentWalker; import org.apache.druid.sql.calcite.expression.DruidExpression; @@ -1605,14 +1602,8 @@ public SubqueryComponentSupplier(TempDirProducer tempDirProducer) } @Override - public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker( - QueryRunnerFactoryConglomerate conglomerate, - JoinableFactoryWrapper joinableFactory, - Injector injector - ) + public SpecificSegmentsQuerySegmentWalker addSegmentsToWalker(SpecificSegmentsQuerySegmentWalker walker) { - SpecificSegmentsQuerySegmentWalker walker = - super.createQuerySegmentWalker(conglomerate, joinableFactory, injector); final String datasource1 = "dsMissingCol"; final File tmpFolder = tempDirProducer.newTempFolder(); @@ -1697,6 +1688,7 @@ public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker( .rows(rows2) .buildMMappedIndex(); + super.addSegmentsToWalker(walker); walker.add( DataSegment.builder() .dataSource(datasource1) diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/DrillWindowQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/DrillWindowQueryTest.java index 41bc0ef8d1ac..977b4bb66157 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/DrillWindowQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/DrillWindowQueryTest.java @@ -22,7 +22,6 @@ import com.google.common.base.Function; import com.google.common.collect.ImmutableMap; import com.google.common.io.ByteStreams; -import com.google.inject.Injector; import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql2rel.SqlToRelConverter; import org.apache.commons.io.FileUtils; @@ -30,11 +29,9 @@ import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.parsers.TimestampParser; import org.apache.druid.query.QueryContexts; -import org.apache.druid.query.QueryRunnerFactoryConglomerate; import org.apache.druid.segment.column.ColumnType; import org.apache.druid.segment.column.RowSignature; import org.apache.druid.segment.column.ValueType; -import org.apache.druid.segment.join.JoinableFactoryWrapper; import org.apache.druid.server.SpecificSegmentsQuerySegmentWalker; import org.apache.druid.sql.calcite.DrillWindowQueryTest.DrillComponentSupplier; import org.apache.druid.sql.calcite.NotYetSupported.Modes; @@ -225,20 +222,12 @@ public DrillComponentSupplier(TempDirProducer tempFolderProducer) } @Override - public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker( - QueryRunnerFactoryConglomerate conglomerate, - JoinableFactoryWrapper joinableFactory, - Injector injector - ) + public SpecificSegmentsQuerySegmentWalker addSegmentsToWalker(SpecificSegmentsQuerySegmentWalker walker) { - final SpecificSegmentsQuerySegmentWalker retVal = super.createQuerySegmentWalker( - conglomerate, - joinableFactory, - injector); - + super.addSegmentsToWalker(walker); final File tmpFolder = tempDirProducer.newTempFolder(); - TestDataBuilder.attachIndexesForDrillTestDatasources(retVal, tmpFolder); - return retVal; + TestDataBuilder.attachIndexesForDrillTestDatasources(walker, tmpFolder); + return walker; } } diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/QTestCase.java b/sql/src/test/java/org/apache/druid/sql/calcite/QTestCase.java index 67f5bf3c2eb7..01714525b664 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/QTestCase.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/QTestCase.java @@ -25,6 +25,7 @@ import org.apache.druid.java.util.common.FileUtils; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.quidem.DruidQTestInfo; +import org.apache.druid.quidem.DruidQuidemCommandHandler; import org.apache.druid.quidem.DruidQuidemTestBase; import org.apache.druid.quidem.DruidQuidemTestBase.DruidQuidemRunner; import org.apache.druid.sql.calcite.QueryTestRunner.QueryRunStep; @@ -69,7 +70,7 @@ public void run() isValidTestCaseFile(testInfo.getIQFile()); } - DruidQuidemRunner runner = new DruidQuidemTestBase.DruidQuidemRunner(); + DruidQuidemRunner runner = new DruidQuidemTestBase.DruidQuidemRunner(new DruidQuidemCommandHandler()); runner.run(testInfo.getIQFile()); } catch (Exception e) { diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/SqlTestFrameworkConfig.java b/sql/src/test/java/org/apache/druid/sql/calcite/SqlTestFrameworkConfig.java index ded918d64d8c..8758871c3841 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/SqlTestFrameworkConfig.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/SqlTestFrameworkConfig.java @@ -33,6 +33,7 @@ import org.apache.druid.quidem.DruidAvaticaTestDriver; import org.apache.druid.server.QueryStackTests; import org.apache.druid.sql.calcite.util.CacheTestHelperModule.ResultCacheMode; +import org.apache.druid.sql.calcite.util.FakeIndexTaskUtil; import org.apache.druid.sql.calcite.util.SqlTestFramework; import org.apache.druid.sql.calcite.util.SqlTestFramework.QueryComponentSupplier; import org.apache.druid.sql.calcite.util.SqlTestFramework.StandardComponentSupplier; @@ -50,6 +51,7 @@ import org.reflections.util.FilterBuilder; import javax.annotation.Nonnull; + import java.io.Closeable; import java.lang.annotation.Annotation; import java.lang.annotation.ElementType; @@ -166,17 +168,44 @@ public Class fromString(String name) throws Ex Class value(); } + /** + * Declares which tables to ingest into this {@link QueryComponentSupplier}. + * + * May point to a directory containing json ingestion files. + * All files will be made available thru via {@link FakeIndexTaskUtil}. + * It may not support all ingestion feature. + */ + @Retention(RetentionPolicy.RUNTIME) + @Target({ElementType.METHOD, ElementType.TYPE}) + @Datasets("") + public @interface Datasets + { + ConfigOptionProcessor PROCESSOR = new ConfigOptionProcessor<>(Datasets.class) + { + @Override + public String fromString(String name) + { + return name; + } + }; + + String value(); + } + private static final Set KNOWN_CONFIG_KEYS = ImmutableSet.builder() .add(NumMergeBuffers.PROCESSOR.getConfigName()) .add(MinTopNThreshold.PROCESSOR.getConfigName()) .add(ResultCache.PROCESSOR.getConfigName()) .add(ComponentSupplier.PROCESSOR.getConfigName()) + .add(Datasets.PROCESSOR.getConfigName()) .build(); public final int numMergeBuffers; public final int minTopNThreshold; public final ResultCacheMode resultCache; public final Class componentSupplier; + public final String datasets; + public SqlTestFrameworkConfig(List annotations) { @@ -185,6 +214,7 @@ public SqlTestFrameworkConfig(List annotations) minTopNThreshold = MinTopNThreshold.PROCESSOR.fromAnnotations(annotations); resultCache = ResultCache.PROCESSOR.fromAnnotations(annotations); componentSupplier = ComponentSupplier.PROCESSOR.fromAnnotations(annotations); + datasets = Datasets.PROCESSOR.fromAnnotations(annotations); } catch (Exception e) { throw new RuntimeException(e); @@ -199,6 +229,7 @@ public SqlTestFrameworkConfig(Map queryParams) minTopNThreshold = MinTopNThreshold.PROCESSOR.fromMap(queryParams); resultCache = ResultCache.PROCESSOR.fromMap(queryParams); componentSupplier = ComponentSupplier.PROCESSOR.fromMap(queryParams); + datasets = Datasets.PROCESSOR.fromMap(queryParams); } catch (Exception e) { throw new RuntimeException(e); @@ -217,7 +248,7 @@ private void validateConfigKeys(Set keySet) @Override public int hashCode() { - return Objects.hash(minTopNThreshold, numMergeBuffers, resultCache, componentSupplier); + return Objects.hash(minTopNThreshold, numMergeBuffers, resultCache, componentSupplier, datasets); } @Override @@ -230,7 +261,8 @@ public boolean equals(Object obj) return minTopNThreshold == other.minTopNThreshold && numMergeBuffers == other.numMergeBuffers && resultCache == other.resultCache - && componentSupplier == other.componentSupplier; + && componentSupplier == other.componentSupplier + && Objects.equals(datasets, other.datasets); } public static class SqlTestFrameworkConfigStore implements Closeable diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/SqlTestFrameworkConfigTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/SqlTestFrameworkConfigTest.java index 844251e8ac3d..9b727fd9eaf5 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/SqlTestFrameworkConfigTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/SqlTestFrameworkConfigTest.java @@ -136,7 +136,7 @@ public void testInvalidConfigKeySpecified() ); assertEquals( - "Invalid configuration key(s) specified [[nonExistent]]; valid options are [[numMergeBuffers, minTopNThreshold, resultCache, componentSupplier]]", + "Invalid configuration key(s) specified [[nonExistent]]; valid options are [[numMergeBuffers, minTopNThreshold, resultCache, componentSupplier, datasets]]", e.getMessage() ); } diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/util/FakeIndexTaskUtil.java b/sql/src/test/java/org/apache/druid/sql/calcite/util/FakeIndexTaskUtil.java new file mode 100644 index 000000000000..6af4aa2ea34a --- /dev/null +++ b/sql/src/test/java/org/apache/druid/sql/calcite/util/FakeIndexTaskUtil.java @@ -0,0 +1,111 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.sql.calcite.util; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.jsontype.NamedType; +import org.apache.druid.data.input.InputFormat; +import org.apache.druid.data.input.InputSource; +import org.apache.druid.data.input.impl.LocalInputSource; +import org.apache.druid.quidem.ProjectPathUtils; +import org.apache.druid.segment.indexing.DataSchema; +import org.apache.druid.segment.indexing.IOConfig; +import org.apache.druid.segment.indexing.IngestionSpec; +import org.apache.druid.segment.indexing.TuningConfig; +import org.apache.druid.sql.calcite.util.datasets.InputSourceBasedTestDataset; +import org.apache.druid.sql.calcite.util.datasets.TestDataSet; + +import java.io.File; +import java.io.IOException; + +/** + * Utility class to create {@link TestDataSet} from fake indexing tasks. + * + * Goal is to let the users utilize the ingestion api to create test data. + */ +public class FakeIndexTaskUtil +{ + public static TestDataSet makeDS(ObjectMapper objectMapper, File src) + { + try { + ObjectMapper om = objectMapper.copy(); + om.registerSubtypes(new NamedType(MyIOConfigType.class, "index_parallel")); + FakeIndexTask indexTask = om.readValue(src, FakeIndexTask.class); + FakeIngestionSpec spec = indexTask.spec; + InputSource inputSource = relativizeLocalInputSource( + spec.getIOConfig().inputSource, ProjectPathUtils.PROJECT_ROOT + ); + TestDataSet dataset = new InputSourceBasedTestDataset( + spec.getDataSchema(), + spec.getIOConfig().inputFormat, + inputSource + ); + return dataset; + } + catch (IOException e) { + throw new RuntimeException(e); + } + } + + private static InputSource relativizeLocalInputSource(InputSource inputSource, File projectRoot) + { + if (!(inputSource instanceof LocalInputSource)) { + return inputSource; + } + LocalInputSource localInputSource = (LocalInputSource) inputSource; + if (localInputSource.getBaseDir().isAbsolute()) { + return inputSource; + } + File newBaseDir = localInputSource.getBaseDir().toPath().resolve(projectRoot.toPath()).toFile(); + return new LocalInputSource( + newBaseDir, + localInputSource.getFilter(), + localInputSource.getFiles(), + localInputSource.getSystemFields() + ); + } + + static class FakeIndexTask + { + @JsonProperty + public FakeIngestionSpec spec; + } + + static class FakeIngestionSpec extends IngestionSpec + { + @JsonCreator + public FakeIngestionSpec( + @JsonProperty("dataSchema") DataSchema dataSchema, + @JsonProperty("ioConfig") MyIOConfigType ioConfig) + { + super(dataSchema, ioConfig, null); + } + } + + static class MyIOConfigType implements IOConfig + { + @JsonProperty + public InputSource inputSource; + @JsonProperty + public InputFormat inputFormat; + } +} diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java b/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java index dda77b02aee9..90cc53d1b9a0 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java @@ -29,6 +29,8 @@ import com.google.inject.Module; import com.google.inject.Provides; import com.google.inject.TypeLiteral; +import org.apache.druid.client.cache.Cache; +import org.apache.druid.client.cache.CacheConfig; import org.apache.druid.collections.NonBlockingPool; import org.apache.druid.guice.BuiltInTypesModule; import org.apache.druid.guice.DruidInjectorBuilder; @@ -57,6 +59,7 @@ import org.apache.druid.query.QueryRunnerTestHelper; import org.apache.druid.query.QuerySegmentWalker; import org.apache.druid.query.QueryWatcher; +import org.apache.druid.query.RetryQueryRunnerConfig; import org.apache.druid.query.TestBufferPool; import org.apache.druid.query.groupby.DefaultGroupByQueryMetricsFactory; import org.apache.druid.query.groupby.GroupByQueryConfig; @@ -67,16 +70,28 @@ import org.apache.druid.query.groupby.TestGroupByBuffers; import org.apache.druid.query.lookup.LookupExtractorFactoryContainerProvider; import org.apache.druid.query.topn.TopNQueryConfig; +import org.apache.druid.quidem.ProjectPathUtils; import org.apache.druid.quidem.TestSqlModule; import org.apache.druid.segment.DefaultColumnFormatConfig; +import org.apache.druid.segment.column.ColumnConfig; import org.apache.druid.segment.join.JoinableFactoryWrapper; +import org.apache.druid.segment.realtime.ChatHandlerProvider; +import org.apache.druid.segment.realtime.NoopChatHandlerProvider; +import org.apache.druid.server.ClientQuerySegmentWalker; +import org.apache.druid.server.LocalQuerySegmentWalker; import org.apache.druid.server.QueryLifecycle; import org.apache.druid.server.QueryLifecycleFactory; +import org.apache.druid.server.QueryScheduler; import org.apache.druid.server.QueryStackTests; import org.apache.druid.server.SpecificSegmentsQuerySegmentWalker; +import org.apache.druid.server.SubqueryGuardrailHelper; +import org.apache.druid.server.TestClusterQuerySegmentWalker; +import org.apache.druid.server.TestClusterQuerySegmentWalker.TestSegmentsBroker; +import org.apache.druid.server.initialization.ServerConfig; import org.apache.druid.server.log.RequestLogger; import org.apache.druid.server.log.TestRequestLogger; import org.apache.druid.server.metrics.NoopServiceEmitter; +import org.apache.druid.server.metrics.SubqueryCountStatsProvider; import org.apache.druid.server.security.AuthConfig; import org.apache.druid.server.security.AuthTestUtils; import org.apache.druid.server.security.AuthorizerMapper; @@ -97,20 +112,24 @@ import org.apache.druid.sql.calcite.schema.LookupSchema; import org.apache.druid.sql.calcite.schema.NoopDruidSchemaManager; import org.apache.druid.sql.calcite.schema.SystemSchema; +import org.apache.druid.sql.calcite.util.datasets.TestDataSet; import org.apache.druid.sql.calcite.view.DruidViewMacroFactory; import org.apache.druid.sql.calcite.view.InProcessViewManager; import org.apache.druid.sql.calcite.view.ViewManager; import org.apache.druid.sql.guice.SqlModule; import org.apache.druid.sql.hook.DruidHookDispatcher; import org.apache.druid.timeline.DataSegment; +import org.apache.druid.utils.JvmUtils; import javax.inject.Named; import java.io.Closeable; +import java.io.File; import java.io.IOException; import java.net.URI; import java.nio.ByteBuffer; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Properties; import java.util.Set; @@ -176,6 +195,8 @@ public interface QueryComponentSupplier extends Closeable */ void gatherProperties(Properties properties); + SpecificSegmentsQuerySegmentWalker addSegmentsToWalker(SpecificSegmentsQuerySegmentWalker walker); + /** * Should return a module which provides the core Druid components. */ @@ -186,12 +207,6 @@ public interface QueryComponentSupplier extends Closeable */ DruidModule getOverrideModule(); - SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker( - QueryRunnerFactoryConglomerate conglomerate, - JoinableFactoryWrapper joinableFactory, - Injector injector - ); - SqlEngine createEngine( QueryLifecycleFactory qlf, ObjectMapper objectMapper, @@ -266,12 +281,9 @@ public void configureGuice(DruidInjectorBuilder builder, List overrideMo } @Override - public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker( - QueryRunnerFactoryConglomerate conglomerate, - JoinableFactoryWrapper joinableFactory, - Injector injector) + public SpecificSegmentsQuerySegmentWalker addSegmentsToWalker(SpecificSegmentsQuerySegmentWalker walker) { - return delegate.createQuerySegmentWalker(conglomerate, joinableFactory, injector); + return delegate.addSegmentsToWalker(walker); } @Override @@ -436,19 +448,9 @@ public void configureGuice(DruidInjectorBuilder builder, List overrideMo } @Override - public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker( - final QueryRunnerFactoryConglomerate conglomerate, - final JoinableFactoryWrapper joinableFactory, - final Injector injector - ) + public SpecificSegmentsQuerySegmentWalker addSegmentsToWalker(SpecificSegmentsQuerySegmentWalker walker) { - return TestDataBuilder.createMockWalker( - injector, - conglomerate, - tempDirProducer.newTempFolder("segments"), - QueryStackTests.DEFAULT_NOOP_SCHEDULER, - joinableFactory - ); + return TestDataBuilder.addDataSetsToWalker(tempDirProducer.newTempFolder("segments"), walker); } @Override @@ -746,6 +748,12 @@ public QuerySegmentWalker getQuerySegmentWalker(SpecificSegmentsQuerySegmentWalk return walker; } + @Provides + ChatHandlerProvider getChatHandlerProvider() + { + return new NoopChatHandlerProvider(); + } + @Provides GenericQueryMetricsFactory getGenericQueryMetricsFactory() { @@ -935,6 +943,13 @@ private SystemSchema makeSystemSchema(QuerySegmentWalker walker, AuthorizerMappe .createMockSystemSchema(druidSchema, (SpecificSegmentsQuerySegmentWalker) walker, authorizerMapper); } + @Provides + @LazySingleton + private ColumnConfig getColumnConfig() + { + return ColumnConfig.DEFAULT; + } + @Provides @LazySingleton private LookupSchema makeLookupSchema(final Injector injector) @@ -962,17 +977,116 @@ private DruidSchemaCatalog makeCatalog(final PlannerConfig plannerConfig, final @Provides @LazySingleton - public SpecificSegmentsQuerySegmentWalker specificSegmentsQuerySegmentWalker(final Injector injector, Builder builder) + public SpecificSegmentsQuerySegmentWalker specificSegmentsQuerySegmentWalker( + @Named("empty") SpecificSegmentsQuerySegmentWalker walker, Builder builder, + List testDataSets) { - SpecificSegmentsQuerySegmentWalker walker = builder.componentSupplier.createQuerySegmentWalker( - injector.getInstance(QueryRunnerFactoryConglomerate.class), - injector.getInstance(JoinableFactoryWrapper.class), - injector - ); builder.resourceCloser.register(walker); + if (testDataSets.isEmpty()) { + builder.componentSupplier.addSegmentsToWalker(walker); + } else { + for (TestDataSet testDataSet : testDataSets) { + walker.add(testDataSet, builder.componentSupplier.getTempDirProducer().newTempFolder()); + } + } + return walker; } + @Provides + @LazySingleton + public List buildCustomTables(ObjectMapper objectMapper, TempDirProducer tdp, + SqlTestFrameworkConfig cfg) + { + String datasets = cfg.datasets; + if (datasets.isEmpty()) { + return Collections.emptyList(); + } + final File[] inputFiles = getTableIngestFiles(datasets); + List ret = new ArrayList(); + for (File src : inputFiles) { + ret.add(FakeIndexTaskUtil.makeDS(objectMapper, src)); + } + return ret; + } + + private File[] getTableIngestFiles(String datasets) + { + File datasetsFile = ProjectPathUtils.getPathFromProjectRoot(datasets); + if (!datasetsFile.exists()) { + throw new RE("Table config file does not exist: %s", datasetsFile); + } + if (!datasetsFile.isDirectory()) { + throw new RE("The option datasets [%s] must point to a directory relative to the project root!", datasetsFile); + } + final File[] inputFiles = datasetsFile.listFiles(this::jsonFiles); + if (inputFiles.length == 0) { + throw new RE("There are no json files found in datasets directory [%s]!", datasetsFile); + } + + return inputFiles; + } + + boolean jsonFiles(File f) + { + return !f.isDirectory() && f.getName().endsWith(".json"); + } + + @Provides + @LazySingleton + public TestSegmentsBroker makeTimelines() + { + return new TestSegmentsBroker(); + } + + @Provides + @Named("empty") + @LazySingleton + public SpecificSegmentsQuerySegmentWalker createEmptyWalker( + TestSegmentsBroker testSegmentsBroker, + ClientQuerySegmentWalker clientQuerySegmentWalker) + { + return new SpecificSegmentsQuerySegmentWalker( + testSegmentsBroker.timelines, + clientQuerySegmentWalker + ); + } + + @Provides + @LazySingleton + private ClientQuerySegmentWalker makeClientQuerySegmentWalker(QueryRunnerFactoryConglomerate conglomerate, + JoinableFactoryWrapper joinableFactory, Injector injector, ServiceEmitter emitter, + TestClusterQuerySegmentWalker testClusterQuerySegmentWalker, + LocalQuerySegmentWalker testLocalQuerySegmentWalker, ServerConfig serverConfig) + { + return new ClientQuerySegmentWalker( + emitter, + testClusterQuerySegmentWalker, + testLocalQuerySegmentWalker, + conglomerate, + joinableFactory.getJoinableFactory(), + new RetryQueryRunnerConfig(), + injector.getInstance(ObjectMapper.class), + serverConfig, + injector.getInstance(Cache.class), + injector.getInstance(CacheConfig.class), + new SubqueryGuardrailHelper(null, JvmUtils.getRuntimeInfo().getMaxHeapSizeBytes(), 1), + new SubqueryCountStatsProvider() + ); + } + + @Provides + @LazySingleton + public SubqueryGuardrailHelper makeSubqueryGuardrailHelper() + { + return new SubqueryGuardrailHelper(null, JvmUtils.getRuntimeInfo().getMaxHeapSizeBytes(), 1); + } + + @Provides + public QueryScheduler makeQueryScheduler() + { + return QueryStackTests.DEFAULT_NOOP_SCHEDULER; + } @Override public void configure(Binder binder) diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/util/TestDataBuilder.java b/sql/src/test/java/org/apache/druid/sql/calcite/util/TestDataBuilder.java index 8093e36dfc38..4fd90a95c32b 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/util/TestDataBuilder.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/util/TestDataBuilder.java @@ -33,7 +33,6 @@ import org.apache.druid.data.input.impl.DimensionSchema; import org.apache.druid.data.input.impl.DimensionsSpec; import org.apache.druid.data.input.impl.DoubleDimensionSchema; -import org.apache.druid.data.input.impl.FloatDimensionSchema; import org.apache.druid.data.input.impl.JsonInputFormat; import org.apache.druid.data.input.impl.LongDimensionSchema; import org.apache.druid.data.input.impl.MapInputRowParser; @@ -64,7 +63,6 @@ import org.apache.druid.query.aggregation.firstlast.last.LongLastAggregatorFactory; import org.apache.druid.query.aggregation.firstlast.last.StringLastAggregatorFactory; import org.apache.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; -import org.apache.druid.query.groupby.GroupByQueryConfig; import org.apache.druid.query.lookup.LookupExtractorFactoryContainerProvider; import org.apache.druid.segment.AutoTypeColumnSchema; import org.apache.druid.segment.IndexBuilder; @@ -91,6 +89,7 @@ import org.apache.druid.server.QueryScheduler; import org.apache.druid.server.QueryStackTests; import org.apache.druid.server.SpecificSegmentsQuerySegmentWalker; +import org.apache.druid.sql.calcite.util.datasets.TestDataSet; import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.partition.LinearShardSpec; import org.apache.druid.timeline.partition.NumberedShardSpec; @@ -159,29 +158,6 @@ public Optional build( null ); - private static final InputRowSchema NUMFOO_SCHEMA = new InputRowSchema( - new TimestampSpec(TIMESTAMP_COLUMN, "iso", null), - new DimensionsSpec( - ImmutableList.builder() - .addAll(DimensionsSpec.getDefaultSchemas(ImmutableList.of( - "dim1", - "dim2", - "dim3", - "dim4", - "dim5", - "dim6" - ))) - .add(new DoubleDimensionSchema("dbl1")) - .add(new DoubleDimensionSchema("dbl2")) - .add(new FloatDimensionSchema("f1")) - .add(new FloatDimensionSchema("f2")) - .add(new LongDimensionSchema("l1")) - .add(new LongDimensionSchema("l2")) - .build() - ), - null - ); - private static final InputRowSchema LOTS_OF_COLUMNS_SCHEMA = new InputRowSchema( new TimestampSpec("timestamp", "millis", null), new DimensionsSpec( @@ -244,16 +220,7 @@ public Optional build( .withRollup(false) .build(); - public static final IncrementalIndexSchema INDEX_SCHEMA_NUMERIC_DIMS = new IncrementalIndexSchema.Builder() - .withMetrics( - new CountAggregatorFactory("cnt"), - new FloatSumAggregatorFactory("m1", "m1"), - new DoubleSumAggregatorFactory("m2", "m2"), - new HyperUniquesAggregatorFactory("unique_dim1", "dim1") - ) - .withDimensionsSpec(NUMFOO_SCHEMA.getDimensionsSpec()) - .withRollup(false) - .build(); + public static final IncrementalIndexSchema INDEX_SCHEMA_NUMERIC_DIMS = TestDataSet.NUMFOO.getIndexSchema(); public static final IncrementalIndexSchema INDEX_SCHEMA_LOTS_O_COLUMNS = new IncrementalIndexSchema.Builder() .withMetrics( @@ -264,13 +231,6 @@ public Optional build( .build(); private static final List USER_VISIT_DIMS = ImmutableList.of("user", "country", "city"); - private static final IncrementalIndexSchema INDEX_SCHEMA_USER_VISIT = new IncrementalIndexSchema.Builder() - .withMetrics( - new CountAggregatorFactory("cnt") - ) - .withRollup(false) - .withMinTimestamp(DateTimes.of("2020-12-31").getMillis()) - .build(); public static final List> RAW_ROWS1 = ImmutableList.of( ImmutableMap.builder() @@ -466,8 +426,7 @@ public Optional build( .put("dim6", "6") .build() ); - public static final List ROWS1_WITH_NUMERIC_DIMS = - RAW_ROWS1_WITH_NUMERIC_DIMS.stream().map(raw -> createRow(raw, NUMFOO_SCHEMA)).collect(Collectors.toList()); + public static final List ROWS1_WITH_NUMERIC_DIMS = ImmutableList.copyOf(TestDataSet.NUMFOO.getRows()); public static final List> RAW_ROWS2 = ImmutableList.of( ImmutableMap.builder() @@ -753,7 +712,22 @@ public static SpecificSegmentsQuerySegmentWalker createMockWalker( final QueryRunnerFactoryConglomerate conglomerate, final File tmpDir, final QueryScheduler scheduler, - final JoinableFactoryWrapper joinableFactoryWrapper + final JoinableFactoryWrapper joinableFactoryWrapper) + { + SpecificSegmentsQuerySegmentWalker walker = SpecificSegmentsQuerySegmentWalker.createWalker( + injector, + conglomerate, + injector.getInstance(SegmentWrangler.class), + joinableFactoryWrapper, + scheduler + ); + return addDataSetsToWalker(tmpDir, walker); + } + + @SuppressWarnings("resource") + public static SpecificSegmentsQuerySegmentWalker addDataSetsToWalker( + final File tmpDir, + SpecificSegmentsQuerySegmentWalker walker ) { final QueryableIndex index1 = IndexBuilder @@ -780,14 +754,6 @@ public static SpecificSegmentsQuerySegmentWalker createMockWalker( .rows(FORBIDDEN_ROWS) .buildMMappedIndex(); - final QueryableIndex indexNumericDims = IndexBuilder - .create() - .tmpDir(new File(tmpDir, "3")) - .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance()) - .schema(INDEX_SCHEMA_NUMERIC_DIMS) - .rows(ROWS1_WITH_NUMERIC_DIMS) - .buildMMappedIndex(); - final QueryableIndex index4 = IndexBuilder .create() .tmpDir(new File(tmpDir, "4")) @@ -852,14 +818,7 @@ public static SpecificSegmentsQuerySegmentWalker createMockWalker( .inputTmpDir(new File(tmpDir, "9-input")) .buildMMappedIndex(); - return SpecificSegmentsQuerySegmentWalker.createWalker( - injector, - conglomerate, - injector.getInstance(SegmentWrangler.class), - joinableFactoryWrapper, - scheduler, - injector.getInstance(GroupByQueryConfig.class) - ).add( + return walker.add( DataSegment.builder() .dataSource(CalciteTests.DATASOURCE1) .interval(index1.getDataInterval()) @@ -896,14 +855,8 @@ public static SpecificSegmentsQuerySegmentWalker createMockWalker( .build(), forbiddenIndex ).add( - DataSegment.builder() - .dataSource(CalciteTests.DATASOURCE3) - .interval(indexNumericDims.getDataInterval()) - .version("1") - .shardSpec(new LinearShardSpec(0)) - .size(0) - .build(), - indexNumericDims + TestDataSet.NUMFOO, + new File(tmpDir, "3") ).add( DataSegment.builder() .dataSource(CalciteTests.DATASOURCE4) @@ -941,14 +894,8 @@ public static SpecificSegmentsQuerySegmentWalker createMockWalker( .build(), someXDatasourceIndex ).add( - DataSegment.builder() - .dataSource(CalciteTests.BROADCAST_DATASOURCE) - .interval(indexNumericDims.getDataInterval()) - .version("1") - .shardSpec(new LinearShardSpec(0)) - .size(0) - .build(), - indexNumericDims + TestDataSet.BROADCAST, + new File(tmpDir, "3a") ).add( DataSegment.builder() .dataSource(CalciteTests.USERVISITDATASOURCE) diff --git a/sql/src/test/quidem/org.apache.druid.quidem.SqlQuidemTest/customdataset.iq b/sql/src/test/quidem/org.apache.druid.quidem.SqlQuidemTest/customdataset.iq new file mode 100644 index 000000000000..dd3b0d43bcf4 --- /dev/null +++ b/sql/src/test/quidem/org.apache.druid.quidem.SqlQuidemTest/customdataset.iq @@ -0,0 +1,13 @@ +!use druidtest://?componentSupplier=StandardComponentSupplier&datasets=sql/src/test/quidem/sampledataset +!set outputformat mysql + + +select count(1) from "rollup-tutorial"; ++--------+ +| EXPR$0 | ++--------+ +| 9 | ++--------+ +(1 row) + +!ok diff --git a/sql/src/test/quidem/sampledataset/rollup-index.json b/sql/src/test/quidem/sampledataset/rollup-index.json new file mode 100644 index 000000000000..a978c2a76d27 --- /dev/null +++ b/sql/src/test/quidem/sampledataset/rollup-index.json @@ -0,0 +1,47 @@ +{ + "type" : "index_parallel", + "spec" : { + "dataSchema" : { + "dataSource" : "rollup-tutorial", + "timestampSpec": { + "column": "timestamp", + "format": "iso" + }, + "dimensionsSpec" : { + "dimensions" : [ + "srcIP", + "dstIP" + ] + }, + "metricsSpec" : [ + { "type" : "count", "name" : "count" }, + { "type" : "longSum", "name" : "packets", "fieldName" : "packets" }, + { "type" : "longSum", "name" : "bytes", "fieldName" : "bytes" } + ], + "granularitySpec" : { + "type" : "uniform", + "segmentGranularity" : "day", + "queryGranularity" : "minute", + "intervals" : ["2018-01-01/2018-01-03"], + "rollup" : true + } + }, + "ioConfig" : { + "type" : "index_parallel", + "inputSource" : { + "type" : "local", + "baseDir" : "quickstart/tutorial", + "filter" : "rollup-data.json" + }, + "inputFormat" : { + "type" : "json" + }, + "appendToExisting" : false + }, + "tuningConfig" : { + "type" : "index_parallel", + "maxRowsPerSegment" : 5000000, + "maxRowsInMemory" : 25000 + } + } +}