rohangarg commented on code in PR #15897:
URL: https://github.com/apache/druid/pull/15897#discussion_r1503690679


##########
sql/src/main/java/org/apache/druid/sql/calcite/external/TableAppendMacro.java:
##########
@@ -0,0 +1,344 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.external;
+
+import com.google.common.collect.ImmutableList;
+import org.apache.calcite.plan.RelOptTable;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rel.type.RelDataTypeFactory;
+import org.apache.calcite.rex.RexNode;
+import org.apache.calcite.schema.TranslatableTable;
+import org.apache.calcite.sql.SqlCall;
+import org.apache.calcite.sql.SqlCallBinding;
+import org.apache.calcite.sql.SqlIdentifier;
+import org.apache.calcite.sql.SqlKind;
+import org.apache.calcite.sql.SqlNode;
+import org.apache.calcite.sql.SqlOperandCountRange;
+import org.apache.calcite.sql.SqlOperator;
+import org.apache.calcite.sql.SqlOperatorBinding;
+import org.apache.calcite.sql.parser.SqlParserPos;
+import org.apache.calcite.sql.type.ReturnTypes;
+import org.apache.calcite.sql.type.SqlOperandCountRanges;
+import org.apache.calcite.sql.type.SqlOperandMetadata;
+import org.apache.calcite.sql.type.SqlTypeFamily;
+import org.apache.calcite.sql.type.SqlTypeName;
+import org.apache.calcite.sql.validate.SqlUserDefinedTableMacro;
+import org.apache.calcite.sql.validate.SqlValidator;
+import org.apache.calcite.sql.validate.SqlValidatorCatalogReader;
+import org.apache.calcite.sql.validate.SqlValidatorTable;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.query.DataSource;
+import org.apache.druid.query.UnionDataSource;
+import org.apache.druid.segment.column.ColumnType;
+import org.apache.druid.segment.column.RowSignature;
+import org.apache.druid.segment.column.RowSignature.Builder;
+import org.apache.druid.server.security.Action;
+import org.apache.druid.server.security.Resource;
+import org.apache.druid.server.security.ResourceAction;
+import org.apache.druid.server.security.ResourceType;
+import org.apache.druid.sql.calcite.expression.AuthorizableOperator;
+import org.apache.druid.sql.calcite.expression.DruidExpression;
+import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
+import org.apache.druid.sql.calcite.planner.DruidSqlValidator;
+import org.apache.druid.sql.calcite.planner.PlannerContext;
+import org.apache.druid.sql.calcite.table.DatasourceMetadata;
+import org.apache.druid.sql.calcite.table.DatasourceTable;
+import org.apache.druid.sql.calcite.table.DatasourceTable.EffectiveMetadata;
+import org.checkerframework.checker.nullness.qual.Nullable;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+/**
+ * Dynamic table append.
+ *
+ * Enables to use: TABLE(APPEND('t1','t2')); which will provide a union view 
of the operand tables.
+ */
+public class TableAppendMacro extends SqlUserDefinedTableMacro implements 
AuthorizableOperator
+{
+
+  public static final OperatorConversion OPERATOR_CONVERSION = new 
OperatorConversion();
+  public static final SqlOperator APPEND_TABLE_MACRO = new TableAppendMacro();
+
+  private static class OperatorConversion implements SqlOperatorConversion
+  {
+    public static final String FUNCTION_NAME = "APPEND";
+
+    public OperatorConversion()
+    {
+    }
+
+    @Override
+    public SqlOperator calciteOperator()
+    {
+      return APPEND_TABLE_MACRO;
+    }
+
+    @Override
+    public DruidExpression toDruidExpression(PlannerContext plannerContext, 
RowSignature rowSignature, RexNode rexNode)
+    {
+      throw new IllegalStateException();
+    }
+  }
+
+  private TableAppendMacro()
+  {
+    super(
+        new SqlIdentifier(OperatorConversion.FUNCTION_NAME, SqlParserPos.ZERO),
+        SqlKind.OTHER_FUNCTION,
+        ReturnTypes.CURSOR,
+        null,
+        new OperandMetadata(),
+        null
+    );
+  }
+
+  private static class OperandMetadata implements SqlOperandMetadata
+  {
+    @Override
+    public boolean checkOperandTypes(SqlCallBinding callBinding, boolean 
throwOnFailure)
+    {
+      for (int i = 0; i < callBinding.getOperandCount(); i++) {
+        SqlNode operand = callBinding.operand(i);
+        if (!callBinding.isOperandLiteral(i, false)) {
+          if (throwOnFailure) {
+            throw DruidSqlValidator.buildCalciteContextException(
+                "All arguments to APPEND should be literal strings. "
+                    + "Argument #" + i + " is not literal",
+                operand
+            );
+          } else {
+            return false;
+          }
+        }
+
+        SqlTypeName typeName = callBinding.getOperandType(i).getSqlTypeName();
+        if (!SqlTypeFamily.CHARACTER.getTypeNames().contains(typeName)) {
+          if (throwOnFailure) {
+            throw DruidSqlValidator.buildCalciteContextException(
+                "All arguments to APPEND should be literal strings. "
+                    + "Argument #" + i + " is not string",
+                operand
+            );
+          } else {
+            return false;
+          }
+        }
+      }
+      return true;
+    }
+
+    @Override
+    public SqlOperandCountRange getOperandCountRange()
+    {
+      return SqlOperandCountRanges.from(1);
+    }
+
+    @Override
+    public String getAllowedSignatures(SqlOperator op, String opName)
+    {
+      return "APPEND( <TABLE_NAME>[, <TABLE_NAME> ...] )";
+    }
+
+    @Override
+    public List<RelDataType> paramTypes(RelDataTypeFactory typeFactory)
+    {
+      RelDataType t = typeFactory.createSqlType(SqlTypeName.VARCHAR);
+      return ImmutableList.<RelDataType>builder().add(t, t).build();
+    }
+
+    @Override
+    public List<String> paramNames()
+    {
+      return ImmutableList.<String>builder().add("tableName", 
"tableName").build();
+    }
+  }
+
+  @Override
+  public List<String> getParamNames()
+  {
+    return ImmutableList.<String>builder().add("tableName", 
"tableName").build();
+  }
+
+  @Override
+  public TranslatableTable getTable(SqlOperatorBinding operatorBinding)
+  {
+    SqlCallBinding callBinding = (SqlCallBinding) operatorBinding;
+    SqlValidator validator = callBinding.getValidator();
+
+    List<TableOperand> tables = getTables(callBinding, 
validator.getCatalogReader());
+    AppendDatasourceMetadata metadata = buildUnionDataSource(tables);
+    return new DatasourceTable(
+        metadata.values,
+        metadata,
+        EffectiveMetadata.of(metadata.values)
+    );
+  }
+
+  static class TableOperand
+  {
+    private final SqlNode sqlOperand;
+    private final SqlValidatorTable table;
+
+    public TableOperand(SqlNode sqlOperand, SqlValidatorTable table)
+    {
+      this.sqlOperand = sqlOperand;
+      this.table = table;
+    }
+
+    public RelOptTable getRelOptTable()
+    {
+      return table.unwrapOrThrow(RelOptTable.class);
+    }
+
+    public DatasourceTable getDataSourceTable()
+    {
+      return table.unwrapOrThrow(DatasourceTable.class);
+    }
+
+    public RowSignature getRowSignature()
+    {
+      return getDataSourceTable().getRowSignature();
+    }
+
+    public DataSource getDataSource()
+    {
+      return getDataSourceTable().getDataSource();
+    }
+  }
+
+  private List<TableOperand> getTables(SqlCallBinding callBinding, 
SqlValidatorCatalogReader catalogReader)
+  {
+    List<TableOperand> tables = new ArrayList<>();
+    for (int i = 0; i < callBinding.getOperandCount(); i++) {
+      if (!callBinding.isOperandLiteral(i, false)) {

Review Comment:
   can this happen? isn't this first checked by the operand checker?



##########
sql/src/main/java/org/apache/druid/sql/calcite/external/TableAppendMacro.java:
##########
@@ -0,0 +1,344 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.external;
+
+import com.google.common.collect.ImmutableList;
+import org.apache.calcite.plan.RelOptTable;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rel.type.RelDataTypeFactory;
+import org.apache.calcite.rex.RexNode;
+import org.apache.calcite.schema.TranslatableTable;
+import org.apache.calcite.sql.SqlCall;
+import org.apache.calcite.sql.SqlCallBinding;
+import org.apache.calcite.sql.SqlIdentifier;
+import org.apache.calcite.sql.SqlKind;
+import org.apache.calcite.sql.SqlNode;
+import org.apache.calcite.sql.SqlOperandCountRange;
+import org.apache.calcite.sql.SqlOperator;
+import org.apache.calcite.sql.SqlOperatorBinding;
+import org.apache.calcite.sql.parser.SqlParserPos;
+import org.apache.calcite.sql.type.ReturnTypes;
+import org.apache.calcite.sql.type.SqlOperandCountRanges;
+import org.apache.calcite.sql.type.SqlOperandMetadata;
+import org.apache.calcite.sql.type.SqlTypeFamily;
+import org.apache.calcite.sql.type.SqlTypeName;
+import org.apache.calcite.sql.validate.SqlUserDefinedTableMacro;
+import org.apache.calcite.sql.validate.SqlValidator;
+import org.apache.calcite.sql.validate.SqlValidatorCatalogReader;
+import org.apache.calcite.sql.validate.SqlValidatorTable;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.query.DataSource;
+import org.apache.druid.query.UnionDataSource;
+import org.apache.druid.segment.column.ColumnType;
+import org.apache.druid.segment.column.RowSignature;
+import org.apache.druid.segment.column.RowSignature.Builder;
+import org.apache.druid.server.security.Action;
+import org.apache.druid.server.security.Resource;
+import org.apache.druid.server.security.ResourceAction;
+import org.apache.druid.server.security.ResourceType;
+import org.apache.druid.sql.calcite.expression.AuthorizableOperator;
+import org.apache.druid.sql.calcite.expression.DruidExpression;
+import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
+import org.apache.druid.sql.calcite.planner.DruidSqlValidator;
+import org.apache.druid.sql.calcite.planner.PlannerContext;
+import org.apache.druid.sql.calcite.table.DatasourceMetadata;
+import org.apache.druid.sql.calcite.table.DatasourceTable;
+import org.apache.druid.sql.calcite.table.DatasourceTable.EffectiveMetadata;
+import org.checkerframework.checker.nullness.qual.Nullable;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+/**
+ * Dynamic table append.
+ *
+ * Enables to use: TABLE(APPEND('t1','t2')); which will provide a union view 
of the operand tables.
+ */
+public class TableAppendMacro extends SqlUserDefinedTableMacro implements 
AuthorizableOperator
+{
+
+  public static final OperatorConversion OPERATOR_CONVERSION = new 
OperatorConversion();
+  public static final SqlOperator APPEND_TABLE_MACRO = new TableAppendMacro();
+
+  private static class OperatorConversion implements SqlOperatorConversion
+  {
+    public static final String FUNCTION_NAME = "APPEND";
+
+    public OperatorConversion()
+    {
+    }
+
+    @Override
+    public SqlOperator calciteOperator()
+    {
+      return APPEND_TABLE_MACRO;
+    }
+
+    @Override
+    public DruidExpression toDruidExpression(PlannerContext plannerContext, 
RowSignature rowSignature, RexNode rexNode)
+    {
+      throw new IllegalStateException();
+    }
+  }
+
+  private TableAppendMacro()
+  {
+    super(
+        new SqlIdentifier(OperatorConversion.FUNCTION_NAME, SqlParserPos.ZERO),
+        SqlKind.OTHER_FUNCTION,
+        ReturnTypes.CURSOR,
+        null,
+        new OperandMetadata(),
+        null
+    );
+  }
+
+  private static class OperandMetadata implements SqlOperandMetadata
+  {
+    @Override
+    public boolean checkOperandTypes(SqlCallBinding callBinding, boolean 
throwOnFailure)
+    {
+      for (int i = 0; i < callBinding.getOperandCount(); i++) {
+        SqlNode operand = callBinding.operand(i);
+        if (!callBinding.isOperandLiteral(i, false)) {
+          if (throwOnFailure) {
+            throw DruidSqlValidator.buildCalciteContextException(
+                "All arguments to APPEND should be literal strings. "
+                    + "Argument #" + i + " is not literal",
+                operand
+            );
+          } else {
+            return false;
+          }
+        }
+
+        SqlTypeName typeName = callBinding.getOperandType(i).getSqlTypeName();
+        if (!SqlTypeFamily.CHARACTER.getTypeNames().contains(typeName)) {
+          if (throwOnFailure) {
+            throw DruidSqlValidator.buildCalciteContextException(
+                "All arguments to APPEND should be literal strings. "
+                    + "Argument #" + i + " is not string",
+                operand
+            );
+          } else {
+            return false;
+          }
+        }
+      }
+      return true;
+    }
+
+    @Override
+    public SqlOperandCountRange getOperandCountRange()
+    {
+      return SqlOperandCountRanges.from(1);
+    }
+
+    @Override
+    public String getAllowedSignatures(SqlOperator op, String opName)
+    {
+      return "APPEND( <TABLE_NAME>[, <TABLE_NAME> ...] )";
+    }
+
+    @Override
+    public List<RelDataType> paramTypes(RelDataTypeFactory typeFactory)
+    {
+      RelDataType t = typeFactory.createSqlType(SqlTypeName.VARCHAR);
+      return ImmutableList.<RelDataType>builder().add(t, t).build();
+    }
+
+    @Override
+    public List<String> paramNames()
+    {
+      return ImmutableList.<String>builder().add("tableName", 
"tableName").build();
+    }
+  }
+
+  @Override
+  public List<String> getParamNames()
+  {
+    return ImmutableList.<String>builder().add("tableName", 
"tableName").build();
+  }
+
+  @Override
+  public TranslatableTable getTable(SqlOperatorBinding operatorBinding)
+  {
+    SqlCallBinding callBinding = (SqlCallBinding) operatorBinding;
+    SqlValidator validator = callBinding.getValidator();
+
+    List<TableOperand> tables = getTables(callBinding, 
validator.getCatalogReader());
+    AppendDatasourceMetadata metadata = buildUnionDataSource(tables);
+    return new DatasourceTable(
+        metadata.values,
+        metadata,
+        EffectiveMetadata.of(metadata.values)
+    );
+  }
+
+  static class TableOperand
+  {
+    private final SqlNode sqlOperand;
+    private final SqlValidatorTable table;
+
+    public TableOperand(SqlNode sqlOperand, SqlValidatorTable table)
+    {
+      this.sqlOperand = sqlOperand;
+      this.table = table;
+    }
+
+    public RelOptTable getRelOptTable()
+    {
+      return table.unwrapOrThrow(RelOptTable.class);
+    }
+
+    public DatasourceTable getDataSourceTable()
+    {
+      return table.unwrapOrThrow(DatasourceTable.class);
+    }
+
+    public RowSignature getRowSignature()
+    {
+      return getDataSourceTable().getRowSignature();
+    }
+
+    public DataSource getDataSource()
+    {
+      return getDataSourceTable().getDataSource();
+    }
+  }
+
+  private List<TableOperand> getTables(SqlCallBinding callBinding, 
SqlValidatorCatalogReader catalogReader)
+  {
+    List<TableOperand> tables = new ArrayList<>();
+    for (int i = 0; i < callBinding.getOperandCount(); i++) {
+      if (!callBinding.isOperandLiteral(i, false)) {
+        throw DruidSqlValidator.buildCalciteContextException(
+            "All arguments of call to macro "
+                + "APPEND should be literal. Actual argument #"
+                + i + " is not literal",
+                callBinding.operand(i)
+        );
+      }
+      @Nullable
+      String tableName = callBinding.getOperandLiteralValue(i, String.class);
+      ImmutableList<String> tableNameList = 
ImmutableList.<String>builder().add(tableName).build();
+      SqlValidatorTable table = catalogReader.getTable(tableNameList);

Review Comment:
   does this also take care of tables created via CTEs?



##########
sql/src/test/java/org/apache/druid/sql/calcite/CalciteTableAppendTest.java:
##########
@@ -0,0 +1,214 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite;
+
+import com.google.common.collect.ImmutableList;
+import org.apache.druid.error.DruidException;
+import org.apache.druid.sql.calcite.NotYetSupported.NotYetSupportedProcessor;
+import org.hamcrest.MatcherAssert;
+import org.junit.Assert;
+import org.junit.Rule;
+import org.junit.Test;
+
+public class CalciteTableAppendTest extends BaseCalciteQueryTest

Review Comment:
   is there a specific reason for leaving out the native query plans in the 
test queries?



##########
sql/src/test/java/org/apache/druid/sql/calcite/CalciteTableAppendTest.java:
##########
@@ -0,0 +1,214 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite;
+
+import com.google.common.collect.ImmutableList;
+import org.apache.druid.error.DruidException;
+import org.apache.druid.sql.calcite.NotYetSupported.NotYetSupportedProcessor;
+import org.hamcrest.MatcherAssert;
+import org.junit.Assert;
+import org.junit.Rule;
+import org.junit.Test;
+
+public class CalciteTableAppendTest extends BaseCalciteQueryTest
+{
+  @Rule(order = 0)
+  public NotYetSupportedProcessor negativeTestProcessor = new 
NotYetSupportedProcessor();
+
+  @Test
+  public void testUnion()
+  {
+    testBuilder()
+        .sql("select dim1,null as dim4 from foo union all select dim1,dim4 
from numfoo")
+        .expectedResults(
+            ImmutableList.of(
+                new Object[] {"", null},
+                new Object[] {"10.1", null},
+                new Object[] {"2", null},
+                new Object[] {"1", null},
+                new Object[] {"def", null},
+                new Object[] {"abc", null},
+                new Object[] {"", "a"},
+                new Object[] {"10.1", "a"},
+                new Object[] {"2", "a"},
+                new Object[] {"1", "b"},
+                new Object[] {"def", "b"},
+                new Object[] {"abc", "b"}
+            )
+        )
+        .run();
+  }
+
+  @Test
+  public void testAppend2()
+  {
+    testBuilder()
+        .sql("select dim1,dim4,d1,f1 from TABLE(APPEND('foo','numfoo')) u")
+        .expectedResults(
+            ResultMatchMode.RELAX_NULLS,
+            ImmutableList.of(
+                new Object[] {"", null, null, null},
+                new Object[] {"10.1", null, null, null},
+                new Object[] {"2", null, null, null},
+                new Object[] {"1", null, null, null},
+                new Object[] {"def", null, null, null},
+                new Object[] {"abc", null, null, null},
+                new Object[] {"", "a", 1.0D, 1.0F},
+                new Object[] {"10.1", "a", 1.7D, 0.1F},
+                new Object[] {"2", "a", 0.0D, 0.0F},
+                new Object[] {"1", "b", null, null},
+                new Object[] {"def", "b", null, null},
+                new Object[] {"abc", "b", null, null}
+            )
+        )
+        .expectedLogicalPlan(
+            ""
+                + "LogicalProject(exprs=[[$1, $8, $11, $13]])\n"
+                + "  LogicalTableScan(table=[[APPEND]])\n"
+        )
+        .run();
+  }
+
+  @Test
+  public void testAppendSameTableMultipleTimes()
+  {
+    testBuilder()
+        .sql("select dim1,dim4,d1,f1 from TABLE(APPEND('foo','numfoo','foo')) 
u where dim1='2'")
+        .expectedResults(
+            ResultMatchMode.RELAX_NULLS,
+            ImmutableList.of(
+                new Object[] {"2", null, null, null},
+                new Object[] {"2", null, null, null},
+                new Object[] {"2", "a", 0.0D, 0.0F}
+            )
+        )
+        .run();
+  }
+
+  @Test
+  public void testAppendtSingleTableIsValid()
+  {
+    testBuilder()
+        .sql("select dim1 from TABLE(APPEND('foo')) u")
+        .expectedResults(
+            ImmutableList.of(
+                new Object[] {""},
+                new Object[] {"10.1"},
+                new Object[] {"2"},
+                new Object[] {"1"},
+                new Object[] {"def"},
+                new Object[] {"abc"}
+            )
+        )
+        .run();
+  }
+
+  @Test
+  public void testAppendtNoTableIsInvalid()
+  {
+    try {
+      testBuilder()
+          .sql("select dim1 from TABLE(APPEND()) u")
+          .run();
+      Assert.fail("query execution should fail");
+    }
+    catch (DruidException e) {
+      MatcherAssert.assertThat(
+          e,
+          invalidSqlIs("No match found for function signature APPEND() (line 
[1], column [24])")
+      );
+    }
+  }
+
+  @Test
+  public void testAppendNoTableIsInvalid()
+  {
+    try {
+      testBuilder()
+          .sql("select dim1 from TABLE(APPEND()) u")
+          .run();
+      Assert.fail("query execution should fail");
+    }
+    catch (DruidException e) {
+      MatcherAssert.assertThat(
+          e,
+          invalidSqlIs("No match found for function signature APPEND() (line 
[1], column [24])")
+      );
+    }
+  }
+
+  @Test
+  public void testAppendtSingleTableIsInvalidArg()
+  {
+    try {
+      testBuilder()
+          .sql("select dim1 from TABLE(APPEND('foo',111)) u")
+          .run();
+      Assert.fail("query execution should fail");
+    }
+    catch (DruidException e) {
+      MatcherAssert.assertThat(
+          e,
+          invalidSqlIs(
+              "All arguments to APPEND should be literal strings. Argument #1 
is not string (line [1], column [37])"

Review Comment:
   do you think we should make the argument index printing to be 1 based 
instead of 0 based? 
   Also, I'm not sure whether it is possible to print the argument itself too, 
if we can do that then that'd be great too.



##########
sql/src/main/java/org/apache/druid/sql/calcite/external/TableAppendMacro.java:
##########
@@ -0,0 +1,344 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.external;
+
+import com.google.common.collect.ImmutableList;
+import org.apache.calcite.plan.RelOptTable;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rel.type.RelDataTypeFactory;
+import org.apache.calcite.rex.RexNode;
+import org.apache.calcite.schema.TranslatableTable;
+import org.apache.calcite.sql.SqlCall;
+import org.apache.calcite.sql.SqlCallBinding;
+import org.apache.calcite.sql.SqlIdentifier;
+import org.apache.calcite.sql.SqlKind;
+import org.apache.calcite.sql.SqlNode;
+import org.apache.calcite.sql.SqlOperandCountRange;
+import org.apache.calcite.sql.SqlOperator;
+import org.apache.calcite.sql.SqlOperatorBinding;
+import org.apache.calcite.sql.parser.SqlParserPos;
+import org.apache.calcite.sql.type.ReturnTypes;
+import org.apache.calcite.sql.type.SqlOperandCountRanges;
+import org.apache.calcite.sql.type.SqlOperandMetadata;
+import org.apache.calcite.sql.type.SqlTypeFamily;
+import org.apache.calcite.sql.type.SqlTypeName;
+import org.apache.calcite.sql.validate.SqlUserDefinedTableMacro;
+import org.apache.calcite.sql.validate.SqlValidator;
+import org.apache.calcite.sql.validate.SqlValidatorCatalogReader;
+import org.apache.calcite.sql.validate.SqlValidatorTable;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.query.DataSource;
+import org.apache.druid.query.UnionDataSource;
+import org.apache.druid.segment.column.ColumnType;
+import org.apache.druid.segment.column.RowSignature;
+import org.apache.druid.segment.column.RowSignature.Builder;
+import org.apache.druid.server.security.Action;
+import org.apache.druid.server.security.Resource;
+import org.apache.druid.server.security.ResourceAction;
+import org.apache.druid.server.security.ResourceType;
+import org.apache.druid.sql.calcite.expression.AuthorizableOperator;
+import org.apache.druid.sql.calcite.expression.DruidExpression;
+import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
+import org.apache.druid.sql.calcite.planner.DruidSqlValidator;
+import org.apache.druid.sql.calcite.planner.PlannerContext;
+import org.apache.druid.sql.calcite.table.DatasourceMetadata;
+import org.apache.druid.sql.calcite.table.DatasourceTable;
+import org.apache.druid.sql.calcite.table.DatasourceTable.EffectiveMetadata;
+import org.checkerframework.checker.nullness.qual.Nullable;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+/**
+ * Dynamic table append.
+ *
+ * Enables to use: TABLE(APPEND('t1','t2')); which will provide a union view 
of the operand tables.
+ */
+public class TableAppendMacro extends SqlUserDefinedTableMacro implements 
AuthorizableOperator
+{
+
+  public static final OperatorConversion OPERATOR_CONVERSION = new 
OperatorConversion();
+  public static final SqlOperator APPEND_TABLE_MACRO = new TableAppendMacro();
+
+  private static class OperatorConversion implements SqlOperatorConversion
+  {
+    public static final String FUNCTION_NAME = "APPEND";
+
+    public OperatorConversion()
+    {
+    }
+
+    @Override
+    public SqlOperator calciteOperator()
+    {
+      return APPEND_TABLE_MACRO;
+    }
+
+    @Override
+    public DruidExpression toDruidExpression(PlannerContext plannerContext, 
RowSignature rowSignature, RexNode rexNode)
+    {
+      throw new IllegalStateException();
+    }
+  }
+
+  private TableAppendMacro()
+  {
+    super(
+        new SqlIdentifier(OperatorConversion.FUNCTION_NAME, SqlParserPos.ZERO),
+        SqlKind.OTHER_FUNCTION,
+        ReturnTypes.CURSOR,
+        null,
+        new OperandMetadata(),
+        null
+    );
+  }
+
+  private static class OperandMetadata implements SqlOperandMetadata
+  {
+    @Override
+    public boolean checkOperandTypes(SqlCallBinding callBinding, boolean 
throwOnFailure)
+    {
+      for (int i = 0; i < callBinding.getOperandCount(); i++) {
+        SqlNode operand = callBinding.operand(i);
+        if (!callBinding.isOperandLiteral(i, false)) {
+          if (throwOnFailure) {
+            throw DruidSqlValidator.buildCalciteContextException(
+                "All arguments to APPEND should be literal strings. "
+                    + "Argument #" + i + " is not literal",
+                operand
+            );
+          } else {
+            return false;
+          }
+        }
+
+        SqlTypeName typeName = callBinding.getOperandType(i).getSqlTypeName();
+        if (!SqlTypeFamily.CHARACTER.getTypeNames().contains(typeName)) {
+          if (throwOnFailure) {
+            throw DruidSqlValidator.buildCalciteContextException(
+                "All arguments to APPEND should be literal strings. "
+                    + "Argument #" + i + " is not string",
+                operand
+            );
+          } else {
+            return false;
+          }
+        }
+      }
+      return true;
+    }
+
+    @Override
+    public SqlOperandCountRange getOperandCountRange()
+    {
+      return SqlOperandCountRanges.from(1);
+    }
+
+    @Override
+    public String getAllowedSignatures(SqlOperator op, String opName)
+    {
+      return "APPEND( <TABLE_NAME>[, <TABLE_NAME> ...] )";
+    }
+
+    @Override
+    public List<RelDataType> paramTypes(RelDataTypeFactory typeFactory)
+    {
+      RelDataType t = typeFactory.createSqlType(SqlTypeName.VARCHAR);
+      return ImmutableList.<RelDataType>builder().add(t, t).build();
+    }
+
+    @Override
+    public List<String> paramNames()
+    {
+      return ImmutableList.<String>builder().add("tableName", 
"tableName").build();
+    }
+  }
+
+  @Override
+  public List<String> getParamNames()
+  {
+    return ImmutableList.<String>builder().add("tableName", 
"tableName").build();
+  }
+
+  @Override
+  public TranslatableTable getTable(SqlOperatorBinding operatorBinding)
+  {
+    SqlCallBinding callBinding = (SqlCallBinding) operatorBinding;
+    SqlValidator validator = callBinding.getValidator();
+
+    List<TableOperand> tables = getTables(callBinding, 
validator.getCatalogReader());
+    AppendDatasourceMetadata metadata = buildUnionDataSource(tables);
+    return new DatasourceTable(
+        metadata.values,
+        metadata,
+        EffectiveMetadata.of(metadata.values)
+    );
+  }
+
+  static class TableOperand
+  {
+    private final SqlNode sqlOperand;
+    private final SqlValidatorTable table;
+
+    public TableOperand(SqlNode sqlOperand, SqlValidatorTable table)
+    {
+      this.sqlOperand = sqlOperand;
+      this.table = table;
+    }
+
+    public RelOptTable getRelOptTable()
+    {
+      return table.unwrapOrThrow(RelOptTable.class);
+    }
+
+    public DatasourceTable getDataSourceTable()
+    {
+      return table.unwrapOrThrow(DatasourceTable.class);
+    }
+
+    public RowSignature getRowSignature()
+    {
+      return getDataSourceTable().getRowSignature();
+    }
+
+    public DataSource getDataSource()
+    {
+      return getDataSourceTable().getDataSource();
+    }
+  }
+
+  private List<TableOperand> getTables(SqlCallBinding callBinding, 
SqlValidatorCatalogReader catalogReader)
+  {
+    List<TableOperand> tables = new ArrayList<>();
+    for (int i = 0; i < callBinding.getOperandCount(); i++) {
+      if (!callBinding.isOperandLiteral(i, false)) {
+        throw DruidSqlValidator.buildCalciteContextException(
+            "All arguments of call to macro "
+                + "APPEND should be literal. Actual argument #"
+                + i + " is not literal",
+                callBinding.operand(i)
+        );
+      }
+      @Nullable

Review Comment:
   is there a valid case with `null` table name?



##########
sql/src/main/java/org/apache/druid/sql/calcite/external/TableAppendMacro.java:
##########
@@ -0,0 +1,344 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.external;
+
+import com.google.common.collect.ImmutableList;
+import org.apache.calcite.plan.RelOptTable;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rel.type.RelDataTypeFactory;
+import org.apache.calcite.rex.RexNode;
+import org.apache.calcite.schema.TranslatableTable;
+import org.apache.calcite.sql.SqlCall;
+import org.apache.calcite.sql.SqlCallBinding;
+import org.apache.calcite.sql.SqlIdentifier;
+import org.apache.calcite.sql.SqlKind;
+import org.apache.calcite.sql.SqlNode;
+import org.apache.calcite.sql.SqlOperandCountRange;
+import org.apache.calcite.sql.SqlOperator;
+import org.apache.calcite.sql.SqlOperatorBinding;
+import org.apache.calcite.sql.parser.SqlParserPos;
+import org.apache.calcite.sql.type.ReturnTypes;
+import org.apache.calcite.sql.type.SqlOperandCountRanges;
+import org.apache.calcite.sql.type.SqlOperandMetadata;
+import org.apache.calcite.sql.type.SqlTypeFamily;
+import org.apache.calcite.sql.type.SqlTypeName;
+import org.apache.calcite.sql.validate.SqlUserDefinedTableMacro;
+import org.apache.calcite.sql.validate.SqlValidator;
+import org.apache.calcite.sql.validate.SqlValidatorCatalogReader;
+import org.apache.calcite.sql.validate.SqlValidatorTable;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.query.DataSource;
+import org.apache.druid.query.UnionDataSource;
+import org.apache.druid.segment.column.ColumnType;
+import org.apache.druid.segment.column.RowSignature;
+import org.apache.druid.segment.column.RowSignature.Builder;
+import org.apache.druid.server.security.Action;
+import org.apache.druid.server.security.Resource;
+import org.apache.druid.server.security.ResourceAction;
+import org.apache.druid.server.security.ResourceType;
+import org.apache.druid.sql.calcite.expression.AuthorizableOperator;
+import org.apache.druid.sql.calcite.expression.DruidExpression;
+import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
+import org.apache.druid.sql.calcite.planner.DruidSqlValidator;
+import org.apache.druid.sql.calcite.planner.PlannerContext;
+import org.apache.druid.sql.calcite.table.DatasourceMetadata;
+import org.apache.druid.sql.calcite.table.DatasourceTable;
+import org.apache.druid.sql.calcite.table.DatasourceTable.EffectiveMetadata;
+import org.checkerframework.checker.nullness.qual.Nullable;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+/**
+ * Dynamic table append.
+ *
+ * Enables to use: TABLE(APPEND('t1','t2')); which will provide a union view 
of the operand tables.
+ */
+public class TableAppendMacro extends SqlUserDefinedTableMacro implements 
AuthorizableOperator
+{
+
+  public static final OperatorConversion OPERATOR_CONVERSION = new 
OperatorConversion();
+  public static final SqlOperator APPEND_TABLE_MACRO = new TableAppendMacro();
+
+  private static class OperatorConversion implements SqlOperatorConversion
+  {
+    public static final String FUNCTION_NAME = "APPEND";
+
+    public OperatorConversion()
+    {
+    }
+
+    @Override
+    public SqlOperator calciteOperator()
+    {
+      return APPEND_TABLE_MACRO;
+    }
+
+    @Override
+    public DruidExpression toDruidExpression(PlannerContext plannerContext, 
RowSignature rowSignature, RexNode rexNode)
+    {
+      throw new IllegalStateException();
+    }
+  }
+
+  private TableAppendMacro()
+  {
+    super(
+        new SqlIdentifier(OperatorConversion.FUNCTION_NAME, SqlParserPos.ZERO),
+        SqlKind.OTHER_FUNCTION,
+        ReturnTypes.CURSOR,
+        null,
+        new OperandMetadata(),
+        null
+    );
+  }
+
+  private static class OperandMetadata implements SqlOperandMetadata

Review Comment:
   the `SqlOperandMetadata` seems to say that it is only to be used for fixed 
number of parameters. do you think is it ok to use to use for variable params 
as well in this case?



##########
sql/src/main/java/org/apache/druid/sql/calcite/external/TableAppendMacro.java:
##########
@@ -0,0 +1,344 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.external;
+
+import com.google.common.collect.ImmutableList;
+import org.apache.calcite.plan.RelOptTable;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rel.type.RelDataTypeFactory;
+import org.apache.calcite.rex.RexNode;
+import org.apache.calcite.schema.TranslatableTable;
+import org.apache.calcite.sql.SqlCall;
+import org.apache.calcite.sql.SqlCallBinding;
+import org.apache.calcite.sql.SqlIdentifier;
+import org.apache.calcite.sql.SqlKind;
+import org.apache.calcite.sql.SqlNode;
+import org.apache.calcite.sql.SqlOperandCountRange;
+import org.apache.calcite.sql.SqlOperator;
+import org.apache.calcite.sql.SqlOperatorBinding;
+import org.apache.calcite.sql.parser.SqlParserPos;
+import org.apache.calcite.sql.type.ReturnTypes;
+import org.apache.calcite.sql.type.SqlOperandCountRanges;
+import org.apache.calcite.sql.type.SqlOperandMetadata;
+import org.apache.calcite.sql.type.SqlTypeFamily;
+import org.apache.calcite.sql.type.SqlTypeName;
+import org.apache.calcite.sql.validate.SqlUserDefinedTableMacro;
+import org.apache.calcite.sql.validate.SqlValidator;
+import org.apache.calcite.sql.validate.SqlValidatorCatalogReader;
+import org.apache.calcite.sql.validate.SqlValidatorTable;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.query.DataSource;
+import org.apache.druid.query.UnionDataSource;
+import org.apache.druid.segment.column.ColumnType;
+import org.apache.druid.segment.column.RowSignature;
+import org.apache.druid.segment.column.RowSignature.Builder;
+import org.apache.druid.server.security.Action;
+import org.apache.druid.server.security.Resource;
+import org.apache.druid.server.security.ResourceAction;
+import org.apache.druid.server.security.ResourceType;
+import org.apache.druid.sql.calcite.expression.AuthorizableOperator;
+import org.apache.druid.sql.calcite.expression.DruidExpression;
+import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
+import org.apache.druid.sql.calcite.planner.DruidSqlValidator;
+import org.apache.druid.sql.calcite.planner.PlannerContext;
+import org.apache.druid.sql.calcite.table.DatasourceMetadata;
+import org.apache.druid.sql.calcite.table.DatasourceTable;
+import org.apache.druid.sql.calcite.table.DatasourceTable.EffectiveMetadata;
+import org.checkerframework.checker.nullness.qual.Nullable;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+/**
+ * Dynamic table append.
+ *
+ * Enables to use: TABLE(APPEND('t1','t2')); which will provide a union view 
of the operand tables.
+ */
+public class TableAppendMacro extends SqlUserDefinedTableMacro implements 
AuthorizableOperator
+{
+
+  public static final OperatorConversion OPERATOR_CONVERSION = new 
OperatorConversion();
+  public static final SqlOperator APPEND_TABLE_MACRO = new TableAppendMacro();
+
+  private static class OperatorConversion implements SqlOperatorConversion
+  {
+    public static final String FUNCTION_NAME = "APPEND";
+
+    public OperatorConversion()
+    {
+    }
+
+    @Override
+    public SqlOperator calciteOperator()
+    {
+      return APPEND_TABLE_MACRO;
+    }
+
+    @Override
+    public DruidExpression toDruidExpression(PlannerContext plannerContext, 
RowSignature rowSignature, RexNode rexNode)
+    {
+      throw new IllegalStateException();
+    }
+  }
+
+  private TableAppendMacro()
+  {
+    super(
+        new SqlIdentifier(OperatorConversion.FUNCTION_NAME, SqlParserPos.ZERO),
+        SqlKind.OTHER_FUNCTION,
+        ReturnTypes.CURSOR,
+        null,
+        new OperandMetadata(),
+        null
+    );
+  }
+
+  private static class OperandMetadata implements SqlOperandMetadata
+  {
+    @Override
+    public boolean checkOperandTypes(SqlCallBinding callBinding, boolean 
throwOnFailure)
+    {
+      for (int i = 0; i < callBinding.getOperandCount(); i++) {
+        SqlNode operand = callBinding.operand(i);
+        if (!callBinding.isOperandLiteral(i, false)) {
+          if (throwOnFailure) {
+            throw DruidSqlValidator.buildCalciteContextException(
+                "All arguments to APPEND should be literal strings. "
+                    + "Argument #" + i + " is not literal",
+                operand
+            );
+          } else {
+            return false;
+          }
+        }
+
+        SqlTypeName typeName = callBinding.getOperandType(i).getSqlTypeName();
+        if (!SqlTypeFamily.CHARACTER.getTypeNames().contains(typeName)) {
+          if (throwOnFailure) {
+            throw DruidSqlValidator.buildCalciteContextException(
+                "All arguments to APPEND should be literal strings. "
+                    + "Argument #" + i + " is not string",
+                operand
+            );
+          } else {
+            return false;
+          }
+        }
+      }
+      return true;
+    }
+
+    @Override
+    public SqlOperandCountRange getOperandCountRange()
+    {
+      return SqlOperandCountRanges.from(1);
+    }
+
+    @Override
+    public String getAllowedSignatures(SqlOperator op, String opName)
+    {
+      return "APPEND( <TABLE_NAME>[, <TABLE_NAME> ...] )";
+    }
+
+    @Override
+    public List<RelDataType> paramTypes(RelDataTypeFactory typeFactory)
+    {
+      RelDataType t = typeFactory.createSqlType(SqlTypeName.VARCHAR);
+      return ImmutableList.<RelDataType>builder().add(t, t).build();
+    }
+
+    @Override
+    public List<String> paramNames()
+    {
+      return ImmutableList.<String>builder().add("tableName", 
"tableName").build();
+    }
+  }
+
+  @Override
+  public List<String> getParamNames()
+  {
+    return ImmutableList.<String>builder().add("tableName", 
"tableName").build();
+  }
+
+  @Override
+  public TranslatableTable getTable(SqlOperatorBinding operatorBinding)
+  {
+    SqlCallBinding callBinding = (SqlCallBinding) operatorBinding;
+    SqlValidator validator = callBinding.getValidator();
+
+    List<TableOperand> tables = getTables(callBinding, 
validator.getCatalogReader());
+    AppendDatasourceMetadata metadata = buildUnionDataSource(tables);
+    return new DatasourceTable(
+        metadata.values,
+        metadata,
+        EffectiveMetadata.of(metadata.values)
+    );
+  }
+
+  static class TableOperand
+  {
+    private final SqlNode sqlOperand;
+    private final SqlValidatorTable table;
+
+    public TableOperand(SqlNode sqlOperand, SqlValidatorTable table)
+    {
+      this.sqlOperand = sqlOperand;
+      this.table = table;
+    }
+
+    public RelOptTable getRelOptTable()
+    {
+      return table.unwrapOrThrow(RelOptTable.class);
+    }
+
+    public DatasourceTable getDataSourceTable()
+    {
+      return table.unwrapOrThrow(DatasourceTable.class);
+    }
+
+    public RowSignature getRowSignature()
+    {
+      return getDataSourceTable().getRowSignature();
+    }
+
+    public DataSource getDataSource()
+    {
+      return getDataSourceTable().getDataSource();
+    }
+  }
+
+  private List<TableOperand> getTables(SqlCallBinding callBinding, 
SqlValidatorCatalogReader catalogReader)
+  {
+    List<TableOperand> tables = new ArrayList<>();
+    for (int i = 0; i < callBinding.getOperandCount(); i++) {
+      if (!callBinding.isOperandLiteral(i, false)) {
+        throw DruidSqlValidator.buildCalciteContextException(
+            "All arguments of call to macro "
+                + "APPEND should be literal. Actual argument #"
+                + i + " is not literal",
+                callBinding.operand(i)
+        );
+      }
+      @Nullable
+      String tableName = callBinding.getOperandLiteralValue(i, String.class);
+      ImmutableList<String> tableNameList = 
ImmutableList.<String>builder().add(tableName).build();
+      SqlValidatorTable table = catalogReader.getTable(tableNameList);
+      if (table == null) {
+        throw DruidSqlValidator.buildCalciteContextException(
+            StringUtils.format("Table [%s] not found", tableName),
+            callBinding.operand(i)
+        );
+      }
+      tables.add(new TableOperand(callBinding.operand(i), table));
+    }
+    return tables;
+  }
+
+  static class AppendDatasourceMetadata implements DatasourceMetadata
+  {
+    private final RowSignature values;
+    private final DataSource dataSource;
+
+    public AppendDatasourceMetadata(RowSignature values, List<DataSource> 
dataSources)
+    {
+      this.values = values;
+      this.dataSource = new UnionDataSource(dataSources);
+    }
+
+    @Override
+    public boolean isJoinable()
+    {
+      return false;
+    }
+
+    @Override
+    public boolean isBroadcast()
+    {
+      return false;
+    }
+
+    @Override
+    public DataSource dataSource()
+    {
+      return dataSource;
+    }
+  }
+
+  private AppendDatasourceMetadata buildUnionDataSource(List<TableOperand> 
tables)
+  {
+    List<DataSource> dataSources = new ArrayList<>();
+    Map<String, ColumnType> fields = new LinkedHashMap<>();
+    Builder rowSignatureBuilder = RowSignature.builder();
+    for (TableOperand table : tables) {
+      RowSignature rowSignature = table.getRowSignature();
+      for (String columnName : rowSignature.getColumnNames()) {
+        ColumnType currentType = rowSignature.getColumnType(columnName).get();
+        ColumnType existingType = fields.get(columnName);
+
+        if (existingType == null || existingType.equals(currentType)) {
+          fields.put(columnName, currentType);
+        } else {
+          try {
+            ColumnType commonType = 
ColumnType.leastRestrictiveType(currentType, existingType);
+            fields.put(columnName, commonType);
+          }
+          catch (Exception e) {

Review Comment:
   I think we should only catch `Types.IncompatibleTypeException` - is there a 
reason for a catch all?



##########
sql/src/main/java/org/apache/druid/sql/calcite/external/TableAppendMacro.java:
##########
@@ -0,0 +1,344 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.external;
+
+import com.google.common.collect.ImmutableList;
+import org.apache.calcite.plan.RelOptTable;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rel.type.RelDataTypeFactory;
+import org.apache.calcite.rex.RexNode;
+import org.apache.calcite.schema.TranslatableTable;
+import org.apache.calcite.sql.SqlCall;
+import org.apache.calcite.sql.SqlCallBinding;
+import org.apache.calcite.sql.SqlIdentifier;
+import org.apache.calcite.sql.SqlKind;
+import org.apache.calcite.sql.SqlNode;
+import org.apache.calcite.sql.SqlOperandCountRange;
+import org.apache.calcite.sql.SqlOperator;
+import org.apache.calcite.sql.SqlOperatorBinding;
+import org.apache.calcite.sql.parser.SqlParserPos;
+import org.apache.calcite.sql.type.ReturnTypes;
+import org.apache.calcite.sql.type.SqlOperandCountRanges;
+import org.apache.calcite.sql.type.SqlOperandMetadata;
+import org.apache.calcite.sql.type.SqlTypeFamily;
+import org.apache.calcite.sql.type.SqlTypeName;
+import org.apache.calcite.sql.validate.SqlUserDefinedTableMacro;
+import org.apache.calcite.sql.validate.SqlValidator;
+import org.apache.calcite.sql.validate.SqlValidatorCatalogReader;
+import org.apache.calcite.sql.validate.SqlValidatorTable;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.query.DataSource;
+import org.apache.druid.query.UnionDataSource;
+import org.apache.druid.segment.column.ColumnType;
+import org.apache.druid.segment.column.RowSignature;
+import org.apache.druid.segment.column.RowSignature.Builder;
+import org.apache.druid.server.security.Action;
+import org.apache.druid.server.security.Resource;
+import org.apache.druid.server.security.ResourceAction;
+import org.apache.druid.server.security.ResourceType;
+import org.apache.druid.sql.calcite.expression.AuthorizableOperator;
+import org.apache.druid.sql.calcite.expression.DruidExpression;
+import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
+import org.apache.druid.sql.calcite.planner.DruidSqlValidator;
+import org.apache.druid.sql.calcite.planner.PlannerContext;
+import org.apache.druid.sql.calcite.table.DatasourceMetadata;
+import org.apache.druid.sql.calcite.table.DatasourceTable;
+import org.apache.druid.sql.calcite.table.DatasourceTable.EffectiveMetadata;
+import org.checkerframework.checker.nullness.qual.Nullable;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+/**
+ * Dynamic table append.
+ *
+ * Enables to use: TABLE(APPEND('t1','t2')); which will provide a union view 
of the operand tables.
+ */
+public class TableAppendMacro extends SqlUserDefinedTableMacro implements 
AuthorizableOperator
+{
+
+  public static final OperatorConversion OPERATOR_CONVERSION = new 
OperatorConversion();
+  public static final SqlOperator APPEND_TABLE_MACRO = new TableAppendMacro();
+
+  private static class OperatorConversion implements SqlOperatorConversion
+  {
+    public static final String FUNCTION_NAME = "APPEND";
+
+    public OperatorConversion()
+    {
+    }
+
+    @Override
+    public SqlOperator calciteOperator()
+    {
+      return APPEND_TABLE_MACRO;
+    }
+
+    @Override
+    public DruidExpression toDruidExpression(PlannerContext plannerContext, 
RowSignature rowSignature, RexNode rexNode)
+    {
+      throw new IllegalStateException();
+    }
+  }
+
+  private TableAppendMacro()
+  {
+    super(
+        new SqlIdentifier(OperatorConversion.FUNCTION_NAME, SqlParserPos.ZERO),
+        SqlKind.OTHER_FUNCTION,
+        ReturnTypes.CURSOR,
+        null,
+        new OperandMetadata(),
+        null
+    );
+  }
+
+  private static class OperandMetadata implements SqlOperandMetadata
+  {
+    @Override
+    public boolean checkOperandTypes(SqlCallBinding callBinding, boolean 
throwOnFailure)
+    {
+      for (int i = 0; i < callBinding.getOperandCount(); i++) {
+        SqlNode operand = callBinding.operand(i);
+        if (!callBinding.isOperandLiteral(i, false)) {
+          if (throwOnFailure) {
+            throw DruidSqlValidator.buildCalciteContextException(
+                "All arguments to APPEND should be literal strings. "
+                    + "Argument #" + i + " is not literal",
+                operand
+            );
+          } else {
+            return false;
+          }
+        }
+
+        SqlTypeName typeName = callBinding.getOperandType(i).getSqlTypeName();
+        if (!SqlTypeFamily.CHARACTER.getTypeNames().contains(typeName)) {
+          if (throwOnFailure) {
+            throw DruidSqlValidator.buildCalciteContextException(
+                "All arguments to APPEND should be literal strings. "
+                    + "Argument #" + i + " is not string",
+                operand
+            );
+          } else {
+            return false;
+          }
+        }
+      }
+      return true;
+    }
+
+    @Override
+    public SqlOperandCountRange getOperandCountRange()
+    {
+      return SqlOperandCountRanges.from(1);
+    }
+
+    @Override
+    public String getAllowedSignatures(SqlOperator op, String opName)
+    {
+      return "APPEND( <TABLE_NAME>[, <TABLE_NAME> ...] )";
+    }
+
+    @Override
+    public List<RelDataType> paramTypes(RelDataTypeFactory typeFactory)
+    {
+      RelDataType t = typeFactory.createSqlType(SqlTypeName.VARCHAR);
+      return ImmutableList.<RelDataType>builder().add(t, t).build();
+    }
+
+    @Override
+    public List<String> paramNames()
+    {
+      return ImmutableList.<String>builder().add("tableName", 
"tableName").build();
+    }
+  }
+
+  @Override
+  public List<String> getParamNames()
+  {
+    return ImmutableList.<String>builder().add("tableName", 
"tableName").build();
+  }
+
+  @Override
+  public TranslatableTable getTable(SqlOperatorBinding operatorBinding)
+  {
+    SqlCallBinding callBinding = (SqlCallBinding) operatorBinding;
+    SqlValidator validator = callBinding.getValidator();
+
+    List<TableOperand> tables = getTables(callBinding, 
validator.getCatalogReader());
+    AppendDatasourceMetadata metadata = buildUnionDataSource(tables);
+    return new DatasourceTable(
+        metadata.values,
+        metadata,
+        EffectiveMetadata.of(metadata.values)
+    );
+  }
+
+  static class TableOperand
+  {
+    private final SqlNode sqlOperand;
+    private final SqlValidatorTable table;
+
+    public TableOperand(SqlNode sqlOperand, SqlValidatorTable table)
+    {
+      this.sqlOperand = sqlOperand;
+      this.table = table;
+    }
+
+    public RelOptTable getRelOptTable()
+    {
+      return table.unwrapOrThrow(RelOptTable.class);
+    }
+
+    public DatasourceTable getDataSourceTable()
+    {
+      return table.unwrapOrThrow(DatasourceTable.class);
+    }
+
+    public RowSignature getRowSignature()
+    {
+      return getDataSourceTable().getRowSignature();
+    }
+
+    public DataSource getDataSource()
+    {
+      return getDataSourceTable().getDataSource();
+    }
+  }
+
+  private List<TableOperand> getTables(SqlCallBinding callBinding, 
SqlValidatorCatalogReader catalogReader)
+  {
+    List<TableOperand> tables = new ArrayList<>();
+    for (int i = 0; i < callBinding.getOperandCount(); i++) {
+      if (!callBinding.isOperandLiteral(i, false)) {
+        throw DruidSqlValidator.buildCalciteContextException(
+            "All arguments of call to macro "
+                + "APPEND should be literal. Actual argument #"
+                + i + " is not literal",
+                callBinding.operand(i)
+        );
+      }
+      @Nullable
+      String tableName = callBinding.getOperandLiteralValue(i, String.class);
+      ImmutableList<String> tableNameList = 
ImmutableList.<String>builder().add(tableName).build();
+      SqlValidatorTable table = catalogReader.getTable(tableNameList);
+      if (table == null) {
+        throw DruidSqlValidator.buildCalciteContextException(
+            StringUtils.format("Table [%s] not found", tableName),
+            callBinding.operand(i)
+        );
+      }
+      tables.add(new TableOperand(callBinding.operand(i), table));
+    }
+    return tables;
+  }
+
+  static class AppendDatasourceMetadata implements DatasourceMetadata
+  {
+    private final RowSignature values;
+    private final DataSource dataSource;
+
+    public AppendDatasourceMetadata(RowSignature values, List<DataSource> 
dataSources)
+    {
+      this.values = values;
+      this.dataSource = new UnionDataSource(dataSources);
+    }
+
+    @Override
+    public boolean isJoinable()
+    {
+      return false;
+    }
+
+    @Override
+    public boolean isBroadcast()
+    {
+      return false;
+    }
+
+    @Override
+    public DataSource dataSource()
+    {
+      return dataSource;
+    }
+  }
+
+  private AppendDatasourceMetadata buildUnionDataSource(List<TableOperand> 
tables)
+  {
+    List<DataSource> dataSources = new ArrayList<>();
+    Map<String, ColumnType> fields = new LinkedHashMap<>();
+    Builder rowSignatureBuilder = RowSignature.builder();

Review Comment:
   can move this to the actual place its being used



##########
sql/src/main/java/org/apache/druid/sql/calcite/external/TableAppendMacro.java:
##########
@@ -0,0 +1,344 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.external;
+
+import com.google.common.collect.ImmutableList;
+import org.apache.calcite.plan.RelOptTable;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rel.type.RelDataTypeFactory;
+import org.apache.calcite.rex.RexNode;
+import org.apache.calcite.schema.TranslatableTable;
+import org.apache.calcite.sql.SqlCall;
+import org.apache.calcite.sql.SqlCallBinding;
+import org.apache.calcite.sql.SqlIdentifier;
+import org.apache.calcite.sql.SqlKind;
+import org.apache.calcite.sql.SqlNode;
+import org.apache.calcite.sql.SqlOperandCountRange;
+import org.apache.calcite.sql.SqlOperator;
+import org.apache.calcite.sql.SqlOperatorBinding;
+import org.apache.calcite.sql.parser.SqlParserPos;
+import org.apache.calcite.sql.type.ReturnTypes;
+import org.apache.calcite.sql.type.SqlOperandCountRanges;
+import org.apache.calcite.sql.type.SqlOperandMetadata;
+import org.apache.calcite.sql.type.SqlTypeFamily;
+import org.apache.calcite.sql.type.SqlTypeName;
+import org.apache.calcite.sql.validate.SqlUserDefinedTableMacro;
+import org.apache.calcite.sql.validate.SqlValidator;
+import org.apache.calcite.sql.validate.SqlValidatorCatalogReader;
+import org.apache.calcite.sql.validate.SqlValidatorTable;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.query.DataSource;
+import org.apache.druid.query.UnionDataSource;
+import org.apache.druid.segment.column.ColumnType;
+import org.apache.druid.segment.column.RowSignature;
+import org.apache.druid.segment.column.RowSignature.Builder;
+import org.apache.druid.server.security.Action;
+import org.apache.druid.server.security.Resource;
+import org.apache.druid.server.security.ResourceAction;
+import org.apache.druid.server.security.ResourceType;
+import org.apache.druid.sql.calcite.expression.AuthorizableOperator;
+import org.apache.druid.sql.calcite.expression.DruidExpression;
+import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
+import org.apache.druid.sql.calcite.planner.DruidSqlValidator;
+import org.apache.druid.sql.calcite.planner.PlannerContext;
+import org.apache.druid.sql.calcite.table.DatasourceMetadata;
+import org.apache.druid.sql.calcite.table.DatasourceTable;
+import org.apache.druid.sql.calcite.table.DatasourceTable.EffectiveMetadata;
+import org.checkerframework.checker.nullness.qual.Nullable;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+/**
+ * Dynamic table append.
+ *
+ * Enables to use: TABLE(APPEND('t1','t2')); which will provide a union view 
of the operand tables.
+ */
+public class TableAppendMacro extends SqlUserDefinedTableMacro implements 
AuthorizableOperator
+{
+
+  public static final OperatorConversion OPERATOR_CONVERSION = new 
OperatorConversion();
+  public static final SqlOperator APPEND_TABLE_MACRO = new TableAppendMacro();
+
+  private static class OperatorConversion implements SqlOperatorConversion
+  {
+    public static final String FUNCTION_NAME = "APPEND";
+
+    public OperatorConversion()
+    {
+    }
+
+    @Override
+    public SqlOperator calciteOperator()
+    {
+      return APPEND_TABLE_MACRO;
+    }
+
+    @Override
+    public DruidExpression toDruidExpression(PlannerContext plannerContext, 
RowSignature rowSignature, RexNode rexNode)
+    {
+      throw new IllegalStateException();
+    }
+  }
+
+  private TableAppendMacro()
+  {
+    super(
+        new SqlIdentifier(OperatorConversion.FUNCTION_NAME, SqlParserPos.ZERO),
+        SqlKind.OTHER_FUNCTION,
+        ReturnTypes.CURSOR,
+        null,
+        new OperandMetadata(),
+        null
+    );
+  }
+
+  private static class OperandMetadata implements SqlOperandMetadata
+  {
+    @Override
+    public boolean checkOperandTypes(SqlCallBinding callBinding, boolean 
throwOnFailure)
+    {
+      for (int i = 0; i < callBinding.getOperandCount(); i++) {
+        SqlNode operand = callBinding.operand(i);
+        if (!callBinding.isOperandLiteral(i, false)) {
+          if (throwOnFailure) {
+            throw DruidSqlValidator.buildCalciteContextException(
+                "All arguments to APPEND should be literal strings. "
+                    + "Argument #" + i + " is not literal",
+                operand
+            );
+          } else {
+            return false;
+          }
+        }
+
+        SqlTypeName typeName = callBinding.getOperandType(i).getSqlTypeName();
+        if (!SqlTypeFamily.CHARACTER.getTypeNames().contains(typeName)) {
+          if (throwOnFailure) {
+            throw DruidSqlValidator.buildCalciteContextException(
+                "All arguments to APPEND should be literal strings. "
+                    + "Argument #" + i + " is not string",
+                operand
+            );
+          } else {
+            return false;
+          }
+        }
+      }
+      return true;
+    }
+
+    @Override
+    public SqlOperandCountRange getOperandCountRange()
+    {
+      return SqlOperandCountRanges.from(1);
+    }
+
+    @Override
+    public String getAllowedSignatures(SqlOperator op, String opName)
+    {
+      return "APPEND( <TABLE_NAME>[, <TABLE_NAME> ...] )";
+    }
+
+    @Override
+    public List<RelDataType> paramTypes(RelDataTypeFactory typeFactory)
+    {
+      RelDataType t = typeFactory.createSqlType(SqlTypeName.VARCHAR);
+      return ImmutableList.<RelDataType>builder().add(t, t).build();
+    }
+
+    @Override
+    public List<String> paramNames()
+    {
+      return ImmutableList.<String>builder().add("tableName", 
"tableName").build();

Review Comment:
   should this generate a list of all the parameters with an independent name 
for each of them?



##########
sql/src/test/java/org/apache/druid/sql/calcite/CalciteTableAppendTest.java:
##########
@@ -0,0 +1,214 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite;
+
+import com.google.common.collect.ImmutableList;
+import org.apache.druid.error.DruidException;
+import org.apache.druid.sql.calcite.NotYetSupported.NotYetSupportedProcessor;
+import org.hamcrest.MatcherAssert;
+import org.junit.Assert;
+import org.junit.Rule;
+import org.junit.Test;
+
+public class CalciteTableAppendTest extends BaseCalciteQueryTest
+{
+  @Rule(order = 0)
+  public NotYetSupportedProcessor negativeTestProcessor = new 
NotYetSupportedProcessor();
+
+  @Test
+  public void testUnion()
+  {
+    testBuilder()
+        .sql("select dim1,null as dim4 from foo union all select dim1,dim4 
from numfoo")
+        .expectedResults(
+            ImmutableList.of(
+                new Object[] {"", null},
+                new Object[] {"10.1", null},
+                new Object[] {"2", null},
+                new Object[] {"1", null},
+                new Object[] {"def", null},
+                new Object[] {"abc", null},
+                new Object[] {"", "a"},
+                new Object[] {"10.1", "a"},
+                new Object[] {"2", "a"},
+                new Object[] {"1", "b"},
+                new Object[] {"def", "b"},
+                new Object[] {"abc", "b"}
+            )
+        )
+        .run();
+  }
+
+  @Test
+  public void testAppend2()
+  {
+    testBuilder()
+        .sql("select dim1,dim4,d1,f1 from TABLE(APPEND('foo','numfoo')) u")
+        .expectedResults(
+            ResultMatchMode.RELAX_NULLS,
+            ImmutableList.of(
+                new Object[] {"", null, null, null},
+                new Object[] {"10.1", null, null, null},
+                new Object[] {"2", null, null, null},
+                new Object[] {"1", null, null, null},
+                new Object[] {"def", null, null, null},
+                new Object[] {"abc", null, null, null},
+                new Object[] {"", "a", 1.0D, 1.0F},
+                new Object[] {"10.1", "a", 1.7D, 0.1F},
+                new Object[] {"2", "a", 0.0D, 0.0F},
+                new Object[] {"1", "b", null, null},
+                new Object[] {"def", "b", null, null},
+                new Object[] {"abc", "b", null, null}
+            )
+        )
+        .expectedLogicalPlan(
+            ""
+                + "LogicalProject(exprs=[[$1, $8, $11, $13]])\n"
+                + "  LogicalTableScan(table=[[APPEND]])\n"
+        )
+        .run();
+  }
+
+  @Test
+  public void testAppendSameTableMultipleTimes()
+  {
+    testBuilder()
+        .sql("select dim1,dim4,d1,f1 from TABLE(APPEND('foo','numfoo','foo')) 
u where dim1='2'")
+        .expectedResults(
+            ResultMatchMode.RELAX_NULLS,
+            ImmutableList.of(
+                new Object[] {"2", null, null, null},
+                new Object[] {"2", null, null, null},
+                new Object[] {"2", "a", 0.0D, 0.0F}
+            )
+        )
+        .run();
+  }
+
+  @Test
+  public void testAppendtSingleTableIsValid()
+  {
+    testBuilder()
+        .sql("select dim1 from TABLE(APPEND('foo')) u")
+        .expectedResults(
+            ImmutableList.of(
+                new Object[] {""},
+                new Object[] {"10.1"},
+                new Object[] {"2"},
+                new Object[] {"1"},
+                new Object[] {"def"},
+                new Object[] {"abc"}
+            )
+        )
+        .run();
+  }
+
+  @Test
+  public void testAppendtNoTableIsInvalid()

Review Comment:
   seems like a redundant test



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to