LakshSingla commented on code in PR #14969:
URL: https://github.com/apache/druid/pull/14969#discussion_r1323987372
##
sql/src/test/java/org/apache/druid/sql/calcite/CalciteArraysQueryTest.java:
##
@@ -4807,4 +4808,187 @@ public void
testUnnestWithGroupByHavingWithWhereOnUnnestCol()
)
);
}
+
+ @Test
+ public void testUnnestWithCommaButHigherPrecedenceJoinLaterAlongWithAlias()
+ {
+// CROSS JOIN has a higher precedence over COMMA JOIN
+// CALCITE would interpret this as
+// (foo t1) COMMA JOIN (UNNEST(t1.dim3) CROSS JOIN foo t2)
+// while validating the right, parser does not understand t1.dim3
+// will throw a table not found validation error
+expectedException.expect(DruidException.class);
+expectedException.expectMessage("Table 't1' not found (line [2], column
[34])");
+testQuery(
+"select c1 from \n"
++ "druid.foo t1, unnest(mv_to_array(t1.dim3)) as u1(c1) CROSS JOIN
druid.foo t2 \n",
+QUERY_CONTEXT_UNNEST,
+ImmutableList.of(),
+ImmutableList.of()
+);
+ }
+
+ @Test
+ public void testUnnestWithCrossJoinEarlierToEnforcePrecedence()
+ {
+testQuery(
+"select c1 from \n"
++ "druid.foo CROSS JOIN unnest(mv_to_array(dim3)) as u1(c1) CROSS JOIN
druid.foo t2 \n",
+QUERY_CONTEXT_UNNEST,
+ImmutableList.of(
+Druids.newScanQueryBuilder()
+ .dataSource(
+ join(
+ new QueryDataSource(
+ Druids.newScanQueryBuilder()
+.dataSource(
+UnnestDataSource.create(
+new
TableDataSource(CalciteTests.DATASOURCE1),
+
expressionVirtualColumn("j0.unnest", "\"dim3\"", ColumnType.STRING),
+null
+)
+)
+
.intervals(querySegmentSpec(Filtration.eternity()))
+
.resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST)
+.legacy(false)
+.columns(
+"__time",
+"cnt",
+"dim1",
+"dim2",
+"dim3",
+"j0.unnest",
+"m1",
+"m2",
+"unique_dim1"
+)
+.context(QUERY_CONTEXT_UNNEST)
+.build()
+ ),
+ new QueryDataSource(
+ Druids.newScanQueryBuilder()
+.dataSource(new
TableDataSource(CalciteTests.DATASOURCE1))
+
.intervals(querySegmentSpec(Filtration.eternity()))
+
.resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST)
+.legacy(false)
+.context(QUERY_CONTEXT_UNNEST)
+.columns("__time", "cnt", "dim1", "dim2",
"dim3", "m1", "m2", "unique_dim1")
+.build()
+ ),
+ "_j0.",
+ "1",
+ JoinType.INNER,
+ null
+ )
+ )
+ .intervals(querySegmentSpec(Filtration.eternity()))
+
.resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST)
+ .legacy(false)
+ .columns("j0.unnest")
+ .context(QUERY_CONTEXT_UNNEST)
+ .build()
+),
+NullHandling.sqlCompatible() ?
+ImmutableList.of(
+new Object[]{"a"},
+new Object[]{"a"},
+new Object[]{"a"},
+new Object[]{"a"},
+new Object[]{"a"},
+new Object[]{"a"},
Review Comment:
Refactoring comment - We should use a loop or a method that supports adding
multiple elements at a single time.
I think one of the concise approach can be something like:
```java
ImmutableList.Builder resultsBuilder = ImmutableList.builder();
resultsBuilder.addAll(Collections.nCopies(6, new Object[]{"a"}));
resultsBuilder.addAll(Collections.nCopies(12, new Object[]{"b"}));
resultsBuilder.addAll(Collections.nCopies(12, new
Object[]{NullHandling.defaultStringValue()}));
```