Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Enable quidem shadowing for decoupled testcases #16431

Merged
merged 11 commits into from
May 23, 2024
Prev Previous commit
Next Next commit
fix import
style fixes

clenaup
  • Loading branch information
kgyrtkirk committed May 13, 2024
commit e36c46a85aff2e33be81d99b451903099d513612
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ public interface DruidConnectionExtras
{
ObjectMapper getObjectMapper();

public class DruidConnectionExtrasImpl implements DruidConnectionExtras
class DruidConnectionExtrasImpl implements DruidConnectionExtras
{
private final ObjectMapper objectMapper;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,6 @@
import org.joda.time.DateTimeZone;
import org.joda.time.Period;
import org.junit.Assert;
import org.junit.jupiter.api.Assumptions;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
Expand All @@ -107,8 +106,8 @@
import java.util.Map;

import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assume.assumeFalse;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assumptions.assumeFalse;

public class CalciteJoinQueryTest extends BaseCalciteQueryTest
{
Expand Down Expand Up @@ -3588,10 +3587,11 @@ public void testLeftJoinSubqueryWithNullKeyFilter(Map<String, Object> queryConte
{
// JoinFilterAnalyzer bug causes incorrect results on this test in replace-with-default mode.
// This test case was originally added in https://github.com/apache/druid/pull/11434 with a note about this.
Assumptions.assumeFalse(NullHandling.replaceWithDefault() && QueryContext.of(queryContext).getEnableJoinFilterRewrite());
assumeFalse(NullHandling.replaceWithDefault() && QueryContext.of(queryContext).getEnableJoinFilterRewrite());

assumeFalse(
"join condition not support in decoupled mode", testBuilder().isDecoupledMode() && NullHandling.replaceWithDefault()
testBuilder().isDecoupledMode() && NullHandling.replaceWithDefault(),
"join condition not support in decoupled mode"
);

// Cannot vectorize due to 'concat' expression.
Expand Down Expand Up @@ -4702,7 +4702,7 @@ public void testJoinWithNonEquiCondition(Map<String, Object> queryContext)
cannotVectorize();

// We don't handle non-equi join conditions for non-sql compatible mode.
Assumptions.assumeFalse(NullHandling.replaceWithDefault());
assumeFalse(NullHandling.replaceWithDefault());

testQuery(
"SELECT x.m1, y.m1 FROM foo x INNER JOIN foo y ON x.m1 > y.m1",
Expand Down Expand Up @@ -4765,7 +4765,7 @@ public void testJoinWithEquiAndNonEquiCondition(Map<String, Object> queryContext
cannotVectorize();

// We don't handle non-equi join conditions for non-sql compatible mode.
Assumptions.assumeFalse(NullHandling.replaceWithDefault());
assumeFalse(NullHandling.replaceWithDefault());

testQuery(
"SELECT x.m1, y.m1 FROM foo x INNER JOIN foo y ON x.m1 = y.m1 AND x.m1 + y.m1 = 6.0",
Expand Down Expand Up @@ -5653,7 +5653,7 @@ public void testPlanWithInFilterMoreThanInSubQueryThreshold()
@ParameterizedTest(name = "{0}")
public void testRegressionFilteredAggregatorsSubqueryJoins(Map<String, Object> queryContext)
{
assumeFalse("not support in decoupled mode", testBuilder().isDecoupledMode() && NullHandling.replaceWithDefault());
assumeFalse(testBuilder().isDecoupledMode() && NullHandling.replaceWithDefault(), "not support in decoupled mode");

cannotVectorize();
testQuery(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ public DecoupledExtension(BaseCalciteQueryTest baseTest)
private File qCaseDir;

@Override
public void beforeEach(ExtensionContext context) throws Exception
public void beforeEach(ExtensionContext context)
{
Class<?> testClass = context.getTestClass().get();
qCaseDir = ProjectPathUtils.getPathFromProjectRoot("sql/src/test/quidem/" + testClass.getName());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,8 @@ public QTestCase(DruidQTestInfo testInfo)
{
this.testInfo = testInfo;
sb = new StringBuffer();
sb.append("# " + testInfo.comment);
sb.append("# ");
sb.append(testInfo.comment);
sb.append("\n");
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.ImmutableSet;
import org.apache.curator.shaded.com.google.common.collect.Sets;
import com.google.common.collect.Sets;
import org.apache.druid.java.util.common.IAE;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.query.topn.TopNQueryConfig;
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
# testExactTopNOnInnerJoinWithLimit@NullHandling=sql case-crc:c3f897f8
# quidem testcase reason: EQUIV_PLAN
!set sqlQueryId dummy
!set defaultTimeout 300000
!set debug true
!set maxScatterGatherBytes 9223372036854775807
!set useApproximateTopN false
!set sqlCurrentTimestamp 2000-01-01T00:00:00Z
!set plannerStrategy DECOUPLED
!set outputformat mysql
!use druidtest:///?MinTopNThreshold=1
select f1."dim4", sum("m1") from numfoo f1 inner join (
select "dim4" from numfoo where dim4 <> 'a' group by 1
) f2 on f1."dim4" = f2."dim4" group by 1 limit 1;
+------+--------+
| dim4 | EXPR$1 |
+------+--------+
| b | 15.0 |
+------+--------+
(1 row)

!ok
LogicalSort(fetch=[1])
LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)])
LogicalJoin(condition=[=($0, $2)], joinType=[inner])
LogicalProject(dim4=[$4], m1=[$14])
LogicalTableScan(table=[[druid, numfoo]])
LogicalAggregate(group=[{4}])
LogicalFilter(condition=[<>($4, 'a')])
LogicalTableScan(table=[[druid, numfoo]])

!logicalPlan
DruidSort(fetch=[1], druid=[logical])
DruidAggregate(group=[{0}], EXPR$1=[SUM($1)], druid=[logical])
DruidJoin(condition=[=($0, $2)], joinType=[inner])
DruidProject(dim4=[$4], m1=[$14], druid=[logical])
DruidTableScan(table=[[druid, numfoo]], druid=[logical])
DruidAggregate(group=[{4}], druid=[logical])
DruidFilter(condition=[<>($4, 'a')])
DruidTableScan(table=[[druid, numfoo]], druid=[logical])

!druidPlan
{
"queryType" : "topN",
"dataSource" : {
"type" : "join",
"left" : {
"type" : "table",
"name" : "numfoo"
},
"right" : {
"type" : "query",
"query" : {
"queryType" : "groupBy",
"dataSource" : {
"type" : "table",
"name" : "numfoo"
},
"intervals" : {
"type" : "intervals",
"intervals" : [ "-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z" ]
},
"filter" : {
"type" : "not",
"field" : {
"type" : "equals",
"column" : "dim4",
"matchValueType" : "STRING",
"matchValue" : "a"
}
},
"granularity" : {
"type" : "all"
},
"dimensions" : [ {
"type" : "default",
"dimension" : "dim4",
"outputName" : "_d0",
"outputType" : "STRING"
} ],
"limitSpec" : {
"type" : "NoopLimitSpec"
}
}
},
"rightPrefix" : "j0.",
"condition" : "(\"dim4\" == \"j0._d0\")",
"joinType" : "INNER"
},
"dimension" : {
"type" : "default",
"dimension" : "dim4",
"outputName" : "d0",
"outputType" : "STRING"
},
"metric" : {
"type" : "dimension",
"ordering" : {
"type" : "lexicographic"
}
},
"threshold" : 1,
"intervals" : {
"type" : "intervals",
"intervals" : [ "-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z" ]
},
"granularity" : {
"type" : "all"
},
"aggregations" : [ {
"type" : "doubleSum",
"name" : "a0",
"fieldName" : "m1"
} ]
}
!nativePlan
Original file line number Diff line number Diff line change
@@ -0,0 +1,165 @@
# testGroupByOverGroupByOverInnerJoinOnTwoInlineDataSources@all_disabled@NullHandling=sql case-crc:7916e17e
# quidem testcase reason: EQUIV_PLAN
!set sqlQueryId dummy
!set defaultTimeout 300000
!set debug true
!set maxScatterGatherBytes 9223372036854775807
!set enableJoinFilterRewriteValueColumnFilters false
!set enableRewriteJoinToFilter false
!set sqlCurrentTimestamp 2000-01-01T00:00:00Z
!set plannerStrategy DECOUPLED
!set enableJoinFilterRewrite false
!set outputformat mysql
!use druidtest:///
with abc as
(
SELECT dim1, "__time", m1 from foo WHERE "dim1" = '10.1'
)
SELECT dim1 from (SELECT dim1,__time FROM (SELECT t1.dim1, t1."__time" from abc as t1 INNER JOIN abc as t2 on t1.dim1 = t2.dim1) GROUP BY 1,2) GROUP BY dim1
;
+------+
| dim1 |
+------+
| 10.1 |
+------+
(1 row)

!ok
LogicalAggregate(group=[{0}])
LogicalProject(dim1=[CAST('10.1':VARCHAR):VARCHAR])
LogicalAggregate(group=[{1}])
LogicalJoin(condition=[=($0, $2)], joinType=[inner])
LogicalProject(dim1=[CAST('10.1':VARCHAR):VARCHAR], __time=[$0])
LogicalFilter(condition=[=($1, '10.1')])
LogicalTableScan(table=[[druid, foo]])
LogicalProject(dim1=[$1])
LogicalFilter(condition=[=($1, '10.1')])
LogicalTableScan(table=[[druid, foo]])

!logicalPlan
DruidAggregate(group=[{0}], druid=[logical])
DruidProject(dim1=[CAST('10.1':VARCHAR):VARCHAR], druid=[logical])
DruidAggregate(group=[{1}], druid=[logical])
DruidJoin(condition=[=($0, $2)], joinType=[inner])
DruidProject(dim1=[CAST('10.1':VARCHAR):VARCHAR], __time=[$0], druid=[logical])
DruidFilter(condition=[=($1, '10.1')])
DruidTableScan(table=[[druid, foo]], druid=[logical])
DruidProject(dim1=[$1], druid=[logical])
DruidFilter(condition=[=($1, '10.1')])
DruidTableScan(table=[[druid, foo]], druid=[logical])

!druidPlan
{
"queryType" : "groupBy",
"dataSource" : {
"type" : "query",
"query" : {
"queryType" : "groupBy",
"dataSource" : {
"type" : "join",
"left" : {
"type" : "query",
"query" : {
"queryType" : "scan",
"dataSource" : {
"type" : "table",
"name" : "foo"
},
"intervals" : {
"type" : "intervals",
"intervals" : [ "-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z" ]
},
"virtualColumns" : [ {
"type" : "expression",
"name" : "v0",
"expression" : "'10.1'",
"outputType" : "STRING"
} ],
"resultFormat" : "compactedList",
"filter" : {
"type" : "equals",
"column" : "dim1",
"matchValueType" : "STRING",
"matchValue" : "10.1"
},
"columns" : [ "__time", "v0" ],
"legacy" : false,
"columnTypes" : [ "LONG", "STRING" ],
"granularity" : {
"type" : "all"
}
}
},
"right" : {
"type" : "query",
"query" : {
"queryType" : "scan",
"dataSource" : {
"type" : "table",
"name" : "foo"
},
"intervals" : {
"type" : "intervals",
"intervals" : [ "-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z" ]
},
"resultFormat" : "compactedList",
"filter" : {
"type" : "equals",
"column" : "dim1",
"matchValueType" : "STRING",
"matchValue" : "10.1"
},
"columns" : [ "dim1" ],
"legacy" : false,
"columnTypes" : [ "STRING" ],
"granularity" : {
"type" : "all"
}
}
},
"rightPrefix" : "j0.",
"condition" : "(\"v0\" == \"j0.dim1\")",
"joinType" : "INNER"
},
"intervals" : {
"type" : "intervals",
"intervals" : [ "-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z" ]
},
"granularity" : {
"type" : "all"
},
"dimensions" : [ {
"type" : "default",
"dimension" : "__time",
"outputName" : "d0",
"outputType" : "LONG"
} ],
"limitSpec" : {
"type" : "NoopLimitSpec"
}
}
},
"intervals" : {
"type" : "intervals",
"intervals" : [ "-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z" ]
},
"virtualColumns" : [ {
"type" : "expression",
"name" : "v0",
"expression" : "'10.1'",
"outputType" : "STRING"
} ],
"granularity" : {
"type" : "all"
},
"dimensions" : [ {
"type" : "default",
"dimension" : "v0",
"outputName" : "_d0",
"outputType" : "STRING"
} ],
"limitSpec" : {
"type" : "NoopLimitSpec"
}
}
!nativePlan
Loading
Loading