kgyrtkirk commented on code in PR #15897:
URL: https://github.com/apache/druid/pull/15897#discussion_r1491016435


##########
sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidSqlValidator.java:
##########
@@ -177,7 +177,7 @@ public void validateCall(SqlCall call, SqlValidatorScope 
scope)
     super.validateCall(call, scope);
   }
 
-  private CalciteContextException buildCalciteContextException(String message, 
SqlNode call)
+  public static CalciteContextException buildCalciteContextException(String 
message, SqlNode call)

Review Comment:
   better place?



##########
sql/src/main/java/org/apache/druid/sql/calcite/external/TableAppendMacro.java:
##########
@@ -0,0 +1,344 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.external;
+
+import com.google.common.collect.ImmutableList;
+import org.apache.calcite.plan.RelOptTable;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rel.type.RelDataTypeFactory;
+import org.apache.calcite.rex.RexNode;
+import org.apache.calcite.schema.TranslatableTable;
+import org.apache.calcite.sql.SqlCall;
+import org.apache.calcite.sql.SqlCallBinding;
+import org.apache.calcite.sql.SqlIdentifier;
+import org.apache.calcite.sql.SqlKind;
+import org.apache.calcite.sql.SqlNode;
+import org.apache.calcite.sql.SqlOperandCountRange;
+import org.apache.calcite.sql.SqlOperator;
+import org.apache.calcite.sql.SqlOperatorBinding;
+import org.apache.calcite.sql.parser.SqlParserPos;
+import org.apache.calcite.sql.type.ReturnTypes;
+import org.apache.calcite.sql.type.SqlOperandCountRanges;
+import org.apache.calcite.sql.type.SqlOperandMetadata;
+import org.apache.calcite.sql.type.SqlTypeFamily;
+import org.apache.calcite.sql.type.SqlTypeName;
+import org.apache.calcite.sql.validate.SqlUserDefinedTableMacro;
+import org.apache.calcite.sql.validate.SqlValidator;
+import org.apache.calcite.sql.validate.SqlValidatorCatalogReader;
+import org.apache.calcite.sql.validate.SqlValidatorTable;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.query.DataSource;
+import org.apache.druid.query.UnionDataSource;
+import org.apache.druid.segment.column.ColumnType;
+import org.apache.druid.segment.column.RowSignature;
+import org.apache.druid.segment.column.RowSignature.Builder;
+import org.apache.druid.server.security.Action;
+import org.apache.druid.server.security.Resource;
+import org.apache.druid.server.security.ResourceAction;
+import org.apache.druid.server.security.ResourceType;
+import org.apache.druid.sql.calcite.expression.AuthorizableOperator;
+import org.apache.druid.sql.calcite.expression.DruidExpression;
+import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
+import org.apache.druid.sql.calcite.planner.DruidSqlValidator;
+import org.apache.druid.sql.calcite.planner.PlannerContext;
+import org.apache.druid.sql.calcite.table.DatasourceMetadata;
+import org.apache.druid.sql.calcite.table.DatasourceTable;
+import org.apache.druid.sql.calcite.table.DatasourceTable.EffectiveMetadata;
+import org.checkerframework.checker.nullness.qual.Nullable;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+/**
+ * Dynamic table append.
+ *
+ * Enables to use: TABLE(APPEND('t1','t2')); which will provide a union view 
of the operand tables.
+ */
+public class TableAppendMacro extends SqlUserDefinedTableMacro implements 
AuthorizableOperator
+{
+
+  public static final OperatorConversion OPERATOR_CONVERSION = new 
OperatorConversion();
+  public static final SqlOperator APPEND_TABLE_MACRO = new TableAppendMacro();
+
+  private static class OperatorConversion implements SqlOperatorConversion
+  {
+    public static final String FUNCTION_NAME = "APPEND";
+
+    public OperatorConversion()
+    {
+    }
+
+    @Override
+    public SqlOperator calciteOperator()
+    {
+      return APPEND_TABLE_MACRO;
+    }
+
+    @Override
+    public DruidExpression toDruidExpression(PlannerContext plannerContext, 
RowSignature rowSignature, RexNode rexNode)
+    {
+      throw new IllegalStateException();
+    }
+  }
+
+  private TableAppendMacro()
+  {
+    super(
+        new SqlIdentifier(OperatorConversion.FUNCTION_NAME, SqlParserPos.ZERO),
+        SqlKind.OTHER_FUNCTION,
+        ReturnTypes.CURSOR,
+        null,
+        new OperandMetadata(),
+        null
+    );
+  }
+
+  private static class OperandMetadata implements SqlOperandMetadata
+  {
+    @Override
+    public boolean checkOperandTypes(SqlCallBinding callBinding, boolean 
throwOnFailure)
+    {
+      for (int i = 0; i < callBinding.getOperandCount(); i++) {
+        SqlNode operand = callBinding.operand(i);
+        if (!callBinding.isOperandLiteral(i, false)) {
+          if (throwOnFailure) {
+            throw DruidSqlValidator.buildCalciteContextException(
+                "All arguments to APPEND should be literal strings. "
+                    + "Argument #" + i + " is not literal",
+                operand
+            );
+          } else {
+            return false;
+          }
+        }
+
+        SqlTypeName typeName = callBinding.getOperandType(i).getSqlTypeName();
+        if (!SqlTypeFamily.CHARACTER.getTypeNames().contains(typeName)) {
+          if (throwOnFailure) {
+            throw DruidSqlValidator.buildCalciteContextException(
+                "All arguments to APPEND should be literal strings. "
+                    + "Argument #" + i + " is not string",
+                operand
+            );
+          } else {
+            return false;
+          }
+        }
+      }
+      return true;
+    }
+
+    @Override
+    public SqlOperandCountRange getOperandCountRange()
+    {
+      return SqlOperandCountRanges.from(1);
+    }
+
+    @Override
+    public String getAllowedSignatures(SqlOperator op, String opName)
+    {
+      return "APPEND( <TABLE_NAME>[, <TABLE_NAME> ...] )";
+    }
+
+    @Override
+    public List<RelDataType> paramTypes(RelDataTypeFactory typeFactory)
+    {
+      RelDataType t = typeFactory.createSqlType(SqlTypeName.VARCHAR);
+      return ImmutableList.<RelDataType>builder().add(t, t).build();
+    }
+
+    @Override
+    public List<String> paramNames()
+    {
+      return ImmutableList.<String>builder().add("tableName", 
"tableName").build();

Review Comment:
   
   
   changed it to `"tableName1", "tableName2"`
   



##########
sql/src/main/java/org/apache/druid/sql/calcite/external/TableAppendMacro.java:
##########
@@ -0,0 +1,309 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.external;
+
+import org.apache.calcite.plan.RelOptTable;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rel.type.RelDataTypeFactory;
+import org.apache.calcite.rex.RexNode;
+import org.apache.calcite.schema.TranslatableTable;
+import org.apache.calcite.sql.SqlCall;
+import org.apache.calcite.sql.SqlCallBinding;
+import org.apache.calcite.sql.SqlIdentifier;
+import org.apache.calcite.sql.SqlKind;
+import org.apache.calcite.sql.SqlNode;
+import org.apache.calcite.sql.SqlOperandCountRange;
+import org.apache.calcite.sql.SqlOperator;
+import org.apache.calcite.sql.SqlOperatorBinding;
+import org.apache.calcite.sql.parser.SqlParserPos;
+import org.apache.calcite.sql.type.ReturnTypes;
+import org.apache.calcite.sql.type.SqlOperandCountRanges;
+import org.apache.calcite.sql.type.SqlOperandMetadata;
+import org.apache.calcite.sql.type.SqlTypeFamily;
+import org.apache.calcite.sql.type.SqlTypeName;
+import org.apache.calcite.sql.validate.SqlUserDefinedTableMacro;
+import org.apache.calcite.sql.validate.SqlValidator;
+import org.apache.calcite.sql.validate.SqlValidatorCatalogReader;
+import org.apache.calcite.sql.validate.SqlValidatorTable;
+import org.apache.curator.shaded.com.google.common.collect.ImmutableList;
+import org.apache.druid.error.InvalidInput;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.query.DataSource;
+import org.apache.druid.query.UnionDataSource;
+import org.apache.druid.segment.column.ColumnType;
+import org.apache.druid.segment.column.RowSignature;
+import org.apache.druid.segment.column.RowSignature.Builder;
+import org.apache.druid.server.security.Action;
+import org.apache.druid.server.security.Resource;
+import org.apache.druid.server.security.ResourceAction;
+import org.apache.druid.server.security.ResourceType;
+import org.apache.druid.sql.calcite.expression.AuthorizableOperator;
+import org.apache.druid.sql.calcite.expression.DruidExpression;
+import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
+import org.apache.druid.sql.calcite.planner.DruidSqlValidator;
+import org.apache.druid.sql.calcite.planner.PlannerContext;
+import org.apache.druid.sql.calcite.table.DatasourceMetadata;
+import org.apache.druid.sql.calcite.table.DatasourceTable;
+import org.apache.druid.sql.calcite.table.DatasourceTable.EffectiveMetadata;
+import org.checkerframework.checker.nullness.qual.Nullable;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+/**
+ * Dynamic table append.
+ *
+ * Enables to use: TABLE(APPEND('t1','t2')); which will provide a union view 
of the operand tables.
+ */
+public class TableAppendMacro extends SqlUserDefinedTableMacro implements 
AuthorizableOperator
+{
+
+  public static final OperatorConversion OPERATOR_CONVERSION = new 
OperatorConversion();
+  public static final SqlOperator APPEND_TABLE_MACRO = new TableAppendMacro();
+
+  private static class OperatorConversion implements SqlOperatorConversion
+  {
+    public static final String FUNCTION_NAME = "APPEND";
+
+    public OperatorConversion()
+    {
+    }
+
+    @Override
+    public SqlOperator calciteOperator()
+    {
+      return APPEND_TABLE_MACRO;
+    }
+
+    @Override
+    public DruidExpression toDruidExpression(PlannerContext plannerContext, 
RowSignature rowSignature, RexNode rexNode)
+    {
+      throw new IllegalStateException();
+    }
+  }
+
+  private TableAppendMacro()
+  {
+    super(
+        new SqlIdentifier(OperatorConversion.FUNCTION_NAME, SqlParserPos.ZERO),
+        SqlKind.OTHER_FUNCTION,
+        ReturnTypes.CURSOR,
+        null,
+        new OperandMetadata(),
+        null
+    );
+  }
+
+  private static class OperandMetadata implements SqlOperandMetadata
+  {
+    @Override
+    public boolean checkOperandTypes(SqlCallBinding callBinding, boolean 
throwOnFailure)
+    {
+      for (int i = 0; i < callBinding.getOperandCount(); i++) {
+        SqlNode operand = callBinding.operand(i);
+        if (!callBinding.isOperandLiteral(i, false)) {
+          if (throwOnFailure) {
+            throw DruidSqlValidator.buildCalciteContextException(
+                "All arguments to APPEND should be literal strings. "
+                    + "Argument #" + i + " is not literal",
+                operand
+            );
+          } else {
+            return false;
+          }
+        }
+
+        SqlTypeName typeName = callBinding.getOperandType(i).getSqlTypeName();
+        if (!SqlTypeFamily.CHARACTER.getTypeNames().contains(typeName)) {
+          if (throwOnFailure) {
+            throw DruidSqlValidator.buildCalciteContextException(
+                "All arguments to APPEND should be literal strings. "
+                    + "Argument #" + i + " is not string",
+                operand
+            );
+          } else {
+            return false;
+          }
+        }
+      }
+      return true;
+    }
+
+    @Override
+    public SqlOperandCountRange getOperandCountRange()
+    {
+      return SqlOperandCountRanges.from(2);
+    }
+
+    @Override
+    public String getAllowedSignatures(SqlOperator op, String opName)
+    {
+      return "APPEND( <TABLE_NAME>, <TABLE_NAME>[, <TABLE_NAME> ...] )";
+    }
+
+    @Override
+    public List<RelDataType> paramTypes(RelDataTypeFactory typeFactory)
+    {
+      RelDataType t = typeFactory.createSqlType(SqlTypeName.VARCHAR);
+      return ImmutableList.<RelDataType>builder().add(t, t).build();
+    }
+
+    @Override
+    public List<String> paramNames()
+    {
+      return ImmutableList.<String>builder().add("tableName", 
"tableName").build();
+    }
+  }
+
+  @Override
+  public List<String> getParamNames()
+  {
+    return ImmutableList.<String>builder().add("tableName", 
"tableName").build();
+  }
+
+  @Override
+  public TranslatableTable getTable(SqlOperatorBinding callBinding)
+  {
+    SqlValidator validator = ((SqlCallBinding) callBinding).getValidator();
+    List<RelOptTable> tables = getTables(callBinding, 
validator.getCatalogReader());
+    AppendDatasourceMetadata metadata = buildUnionDataSource(tables);
+    return new DatasourceTable(
+        metadata.values,
+        metadata,
+        EffectiveMetadata.of(metadata.values)
+    );
+  }
+
+  private List<RelOptTable> getTables(SqlOperatorBinding callBinding, 
SqlValidatorCatalogReader catalogReader)
+  {
+    List<RelOptTable> tables = new ArrayList<>();
+    for (int i = 0; i < callBinding.getOperandCount(); i++) {
+      if (!callBinding.isOperandLiteral(i, false)) {
+        throw new IllegalArgumentException(
+            "All arguments of call to macro "
+                + "APPEND should be literal. Actual argument #"
+                + i + " is not literal"
+        );

Review Comment:
   didn't notice this remained an `IllegalArgumentException` ; fixed it!



##########
sql/src/main/java/org/apache/druid/sql/calcite/external/TableAppendMacro.java:
##########
@@ -0,0 +1,309 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.external;
+
+import org.apache.calcite.plan.RelOptTable;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rel.type.RelDataTypeFactory;
+import org.apache.calcite.rex.RexNode;
+import org.apache.calcite.schema.TranslatableTable;
+import org.apache.calcite.sql.SqlCall;
+import org.apache.calcite.sql.SqlCallBinding;
+import org.apache.calcite.sql.SqlIdentifier;
+import org.apache.calcite.sql.SqlKind;
+import org.apache.calcite.sql.SqlNode;
+import org.apache.calcite.sql.SqlOperandCountRange;
+import org.apache.calcite.sql.SqlOperator;
+import org.apache.calcite.sql.SqlOperatorBinding;
+import org.apache.calcite.sql.parser.SqlParserPos;
+import org.apache.calcite.sql.type.ReturnTypes;
+import org.apache.calcite.sql.type.SqlOperandCountRanges;
+import org.apache.calcite.sql.type.SqlOperandMetadata;
+import org.apache.calcite.sql.type.SqlTypeFamily;
+import org.apache.calcite.sql.type.SqlTypeName;
+import org.apache.calcite.sql.validate.SqlUserDefinedTableMacro;
+import org.apache.calcite.sql.validate.SqlValidator;
+import org.apache.calcite.sql.validate.SqlValidatorCatalogReader;
+import org.apache.calcite.sql.validate.SqlValidatorTable;
+import org.apache.curator.shaded.com.google.common.collect.ImmutableList;
+import org.apache.druid.error.InvalidInput;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.query.DataSource;
+import org.apache.druid.query.UnionDataSource;
+import org.apache.druid.segment.column.ColumnType;
+import org.apache.druid.segment.column.RowSignature;
+import org.apache.druid.segment.column.RowSignature.Builder;
+import org.apache.druid.server.security.Action;
+import org.apache.druid.server.security.Resource;
+import org.apache.druid.server.security.ResourceAction;
+import org.apache.druid.server.security.ResourceType;
+import org.apache.druid.sql.calcite.expression.AuthorizableOperator;
+import org.apache.druid.sql.calcite.expression.DruidExpression;
+import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
+import org.apache.druid.sql.calcite.planner.DruidSqlValidator;
+import org.apache.druid.sql.calcite.planner.PlannerContext;
+import org.apache.druid.sql.calcite.table.DatasourceMetadata;
+import org.apache.druid.sql.calcite.table.DatasourceTable;
+import org.apache.druid.sql.calcite.table.DatasourceTable.EffectiveMetadata;
+import org.checkerframework.checker.nullness.qual.Nullable;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+/**
+ * Dynamic table append.
+ *
+ * Enables to use: TABLE(APPEND('t1','t2')); which will provide a union view 
of the operand tables.
+ */
+public class TableAppendMacro extends SqlUserDefinedTableMacro implements 
AuthorizableOperator
+{
+
+  public static final OperatorConversion OPERATOR_CONVERSION = new 
OperatorConversion();
+  public static final SqlOperator APPEND_TABLE_MACRO = new TableAppendMacro();
+
+  private static class OperatorConversion implements SqlOperatorConversion
+  {
+    public static final String FUNCTION_NAME = "APPEND";
+
+    public OperatorConversion()
+    {
+    }
+
+    @Override
+    public SqlOperator calciteOperator()
+    {
+      return APPEND_TABLE_MACRO;
+    }
+
+    @Override
+    public DruidExpression toDruidExpression(PlannerContext plannerContext, 
RowSignature rowSignature, RexNode rexNode)
+    {
+      throw new IllegalStateException();
+    }
+  }
+
+  private TableAppendMacro()
+  {
+    super(
+        new SqlIdentifier(OperatorConversion.FUNCTION_NAME, SqlParserPos.ZERO),
+        SqlKind.OTHER_FUNCTION,
+        ReturnTypes.CURSOR,
+        null,
+        new OperandMetadata(),
+        null
+    );
+  }
+
+  private static class OperandMetadata implements SqlOperandMetadata
+  {
+    @Override
+    public boolean checkOperandTypes(SqlCallBinding callBinding, boolean 
throwOnFailure)
+    {
+      for (int i = 0; i < callBinding.getOperandCount(); i++) {
+        SqlNode operand = callBinding.operand(i);
+        if (!callBinding.isOperandLiteral(i, false)) {
+          if (throwOnFailure) {
+            throw DruidSqlValidator.buildCalciteContextException(
+                "All arguments to APPEND should be literal strings. "
+                    + "Argument #" + i + " is not literal",
+                operand
+            );
+          } else {
+            return false;
+          }
+        }
+
+        SqlTypeName typeName = callBinding.getOperandType(i).getSqlTypeName();
+        if (!SqlTypeFamily.CHARACTER.getTypeNames().contains(typeName)) {
+          if (throwOnFailure) {
+            throw DruidSqlValidator.buildCalciteContextException(
+                "All arguments to APPEND should be literal strings. "
+                    + "Argument #" + i + " is not string",
+                operand
+            );
+          } else {
+            return false;
+          }
+        }
+      }
+      return true;
+    }
+
+    @Override
+    public SqlOperandCountRange getOperandCountRange()
+    {
+      return SqlOperandCountRanges.from(2);
+    }
+
+    @Override
+    public String getAllowedSignatures(SqlOperator op, String opName)
+    {
+      return "APPEND( <TABLE_NAME>, <TABLE_NAME>[, <TABLE_NAME> ...] )";
+    }
+
+    @Override
+    public List<RelDataType> paramTypes(RelDataTypeFactory typeFactory)
+    {
+      RelDataType t = typeFactory.createSqlType(SqlTypeName.VARCHAR);
+      return ImmutableList.<RelDataType>builder().add(t, t).build();
+    }
+
+    @Override
+    public List<String> paramNames()
+    {
+      return ImmutableList.<String>builder().add("tableName", 
"tableName").build();
+    }
+  }
+
+  @Override
+  public List<String> getParamNames()
+  {
+    return ImmutableList.<String>builder().add("tableName", 
"tableName").build();
+  }
+
+  @Override
+  public TranslatableTable getTable(SqlOperatorBinding callBinding)
+  {
+    SqlValidator validator = ((SqlCallBinding) callBinding).getValidator();
+    List<RelOptTable> tables = getTables(callBinding, 
validator.getCatalogReader());
+    AppendDatasourceMetadata metadata = buildUnionDataSource(tables);
+    return new DatasourceTable(
+        metadata.values,
+        metadata,
+        EffectiveMetadata.of(metadata.values)
+    );
+  }
+
+  private List<RelOptTable> getTables(SqlOperatorBinding callBinding, 
SqlValidatorCatalogReader catalogReader)
+  {
+    List<RelOptTable> tables = new ArrayList<>();
+    for (int i = 0; i < callBinding.getOperandCount(); i++) {
+      if (!callBinding.isOperandLiteral(i, false)) {
+        throw new IllegalArgumentException(
+            "All arguments of call to macro "
+                + "APPEND should be literal. Actual argument #"
+                + i + " is not literal"
+        );
+      }
+      @Nullable
+      String tableName = callBinding.getOperandLiteralValue(i, String.class);
+      ImmutableList<String> tableNameList = 
ImmutableList.<String>builder().add(tableName).build();
+      SqlValidatorTable table = catalogReader.getTable(tableNameList);
+      if (table == null) {
+        throw DruidSqlValidator.buildCalciteContextException(
+            StringUtils.format("Table [%s] not found", tableName),
+            ((SqlCallBinding) callBinding).operand(i)
+        );
+      }
+      tables.add(table.unwrapOrThrow(RelOptTable.class));
+    }
+    return tables;
+  }
+
+  static class AppendDatasourceMetadata implements DatasourceMetadata
+  {
+    private final RowSignature values;
+    private final DataSource dataSource;
+
+    public AppendDatasourceMetadata(RowSignature values, List<DataSource> 
dataSources)
+    {
+      this.values = values;
+      this.dataSource = new UnionDataSource(dataSources);
+    }
+
+    @Override
+    public boolean isJoinable()
+    {
+      return false;
+    }
+
+    @Override
+    public boolean isBroadcast()
+    {
+      return false;
+    }
+
+    @Override
+    public DataSource dataSource()
+    {
+      return dataSource;
+    }
+  }
+
+  private AppendDatasourceMetadata buildUnionDataSource(List<RelOptTable> 
tables)
+  {
+    List<DataSource> dataSources = new ArrayList<>();
+    Map<String, ColumnType> fields = new LinkedHashMap<>();
+    Builder rowSignatureBuilder = RowSignature.builder();
+    for (RelOptTable relOptTable : tables) {
+      DatasourceTable table = relOptTable.unwrapOrThrow(DatasourceTable.class);
+      RowSignature rowSignature = table.getRowSignature();
+      for (String columnName : rowSignature.getColumnNames()) {
+        ColumnType currentType = rowSignature.getColumnType(columnName).get();
+        ColumnType existingType = fields.get(columnName);
+
+        if (existingType == null || existingType.equals(currentType)) {
+          fields.put(columnName, currentType);
+        } else {
+          try {
+            ColumnType commonType = 
ColumnType.leastRestrictiveType(currentType, existingType);
+            fields.put(columnName, commonType);
+          }
+          catch (Exception e) {
+            throw InvalidInput.exception(

Review Comment:
   It doesn't seem like `ColumnType.leastRestrictiveType` will throw exception 
right now - but that may change in the futue...better to handle it correctly
   
   changed it to use `buildCalciteContextException` 
   



##########
sql/src/main/java/org/apache/druid/sql/calcite/external/TableAppendMacro.java:
##########
@@ -0,0 +1,344 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.external;
+
+import com.google.common.collect.ImmutableList;
+import org.apache.calcite.plan.RelOptTable;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rel.type.RelDataTypeFactory;
+import org.apache.calcite.rex.RexNode;
+import org.apache.calcite.schema.TranslatableTable;
+import org.apache.calcite.sql.SqlCall;
+import org.apache.calcite.sql.SqlCallBinding;
+import org.apache.calcite.sql.SqlIdentifier;
+import org.apache.calcite.sql.SqlKind;
+import org.apache.calcite.sql.SqlNode;
+import org.apache.calcite.sql.SqlOperandCountRange;
+import org.apache.calcite.sql.SqlOperator;
+import org.apache.calcite.sql.SqlOperatorBinding;
+import org.apache.calcite.sql.parser.SqlParserPos;
+import org.apache.calcite.sql.type.ReturnTypes;
+import org.apache.calcite.sql.type.SqlOperandCountRanges;
+import org.apache.calcite.sql.type.SqlOperandMetadata;
+import org.apache.calcite.sql.type.SqlTypeFamily;
+import org.apache.calcite.sql.type.SqlTypeName;
+import org.apache.calcite.sql.validate.SqlUserDefinedTableMacro;
+import org.apache.calcite.sql.validate.SqlValidator;
+import org.apache.calcite.sql.validate.SqlValidatorCatalogReader;
+import org.apache.calcite.sql.validate.SqlValidatorTable;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.query.DataSource;
+import org.apache.druid.query.UnionDataSource;
+import org.apache.druid.segment.column.ColumnType;
+import org.apache.druid.segment.column.RowSignature;
+import org.apache.druid.segment.column.RowSignature.Builder;
+import org.apache.druid.server.security.Action;
+import org.apache.druid.server.security.Resource;
+import org.apache.druid.server.security.ResourceAction;
+import org.apache.druid.server.security.ResourceType;
+import org.apache.druid.sql.calcite.expression.AuthorizableOperator;
+import org.apache.druid.sql.calcite.expression.DruidExpression;
+import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
+import org.apache.druid.sql.calcite.planner.DruidSqlValidator;
+import org.apache.druid.sql.calcite.planner.PlannerContext;
+import org.apache.druid.sql.calcite.table.DatasourceMetadata;
+import org.apache.druid.sql.calcite.table.DatasourceTable;
+import org.apache.druid.sql.calcite.table.DatasourceTable.EffectiveMetadata;
+import org.checkerframework.checker.nullness.qual.Nullable;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+/**
+ * Dynamic table append.
+ *
+ * Enables to use: TABLE(APPEND('t1','t2')); which will provide a union view 
of the operand tables.
+ */
+public class TableAppendMacro extends SqlUserDefinedTableMacro implements 
AuthorizableOperator
+{
+
+  public static final OperatorConversion OPERATOR_CONVERSION = new 
OperatorConversion();
+  public static final SqlOperator APPEND_TABLE_MACRO = new TableAppendMacro();
+
+  private static class OperatorConversion implements SqlOperatorConversion
+  {
+    public static final String FUNCTION_NAME = "APPEND";
+
+    public OperatorConversion()
+    {
+    }
+
+    @Override
+    public SqlOperator calciteOperator()
+    {
+      return APPEND_TABLE_MACRO;
+    }
+
+    @Override
+    public DruidExpression toDruidExpression(PlannerContext plannerContext, 
RowSignature rowSignature, RexNode rexNode)
+    {
+      throw new IllegalStateException();
+    }
+  }
+
+  private TableAppendMacro()
+  {
+    super(
+        new SqlIdentifier(OperatorConversion.FUNCTION_NAME, SqlParserPos.ZERO),
+        SqlKind.OTHER_FUNCTION,
+        ReturnTypes.CURSOR,
+        null,
+        new OperandMetadata(),
+        null
+    );
+  }
+
+  private static class OperandMetadata implements SqlOperandMetadata

Review Comment:
   I had to do some back-and-forth before implementing these things; mostly 
because (as you've noticed) originally the tablemacro supposed to have a fixed 
number of arguments.
   
   I don't really understand why it needs to have fixed number of arguments...
   
   however I wanted to use [this 
logic](https://github.com/apache/calcite/blob/2558c13cdb8b6a8e1608112a902b1bf9d97b5386/core/src/main/java/org/apache/calcite/sql2rel/SqlToRelConverter.java#L2874)
 - but for that I had to retain the `SqlUserDefinedTableMacro` type; which 
wants `SqlOperandMetadata` which have the idea of fixed number of arguments....
   
   implementing `SqlOperandMetadata` made it work correctly - but `paramNames` 
and `paramTypes` remained odd...
   



##########
sql/src/main/java/org/apache/druid/sql/calcite/external/TableAppendMacro.java:
##########
@@ -0,0 +1,344 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.external;
+
+import com.google.common.collect.ImmutableList;
+import org.apache.calcite.plan.RelOptTable;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rel.type.RelDataTypeFactory;
+import org.apache.calcite.rex.RexNode;
+import org.apache.calcite.schema.TranslatableTable;
+import org.apache.calcite.sql.SqlCall;
+import org.apache.calcite.sql.SqlCallBinding;
+import org.apache.calcite.sql.SqlIdentifier;
+import org.apache.calcite.sql.SqlKind;
+import org.apache.calcite.sql.SqlNode;
+import org.apache.calcite.sql.SqlOperandCountRange;
+import org.apache.calcite.sql.SqlOperator;
+import org.apache.calcite.sql.SqlOperatorBinding;
+import org.apache.calcite.sql.parser.SqlParserPos;
+import org.apache.calcite.sql.type.ReturnTypes;
+import org.apache.calcite.sql.type.SqlOperandCountRanges;
+import org.apache.calcite.sql.type.SqlOperandMetadata;
+import org.apache.calcite.sql.type.SqlTypeFamily;
+import org.apache.calcite.sql.type.SqlTypeName;
+import org.apache.calcite.sql.validate.SqlUserDefinedTableMacro;
+import org.apache.calcite.sql.validate.SqlValidator;
+import org.apache.calcite.sql.validate.SqlValidatorCatalogReader;
+import org.apache.calcite.sql.validate.SqlValidatorTable;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.query.DataSource;
+import org.apache.druid.query.UnionDataSource;
+import org.apache.druid.segment.column.ColumnType;
+import org.apache.druid.segment.column.RowSignature;
+import org.apache.druid.segment.column.RowSignature.Builder;
+import org.apache.druid.server.security.Action;
+import org.apache.druid.server.security.Resource;
+import org.apache.druid.server.security.ResourceAction;
+import org.apache.druid.server.security.ResourceType;
+import org.apache.druid.sql.calcite.expression.AuthorizableOperator;
+import org.apache.druid.sql.calcite.expression.DruidExpression;
+import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
+import org.apache.druid.sql.calcite.planner.DruidSqlValidator;
+import org.apache.druid.sql.calcite.planner.PlannerContext;
+import org.apache.druid.sql.calcite.table.DatasourceMetadata;
+import org.apache.druid.sql.calcite.table.DatasourceTable;
+import org.apache.druid.sql.calcite.table.DatasourceTable.EffectiveMetadata;
+import org.checkerframework.checker.nullness.qual.Nullable;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+/**
+ * Dynamic table append.
+ *
+ * Enables to use: TABLE(APPEND('t1','t2')); which will provide a union view 
of the operand tables.
+ */
+public class TableAppendMacro extends SqlUserDefinedTableMacro implements 
AuthorizableOperator
+{
+
+  public static final OperatorConversion OPERATOR_CONVERSION = new 
OperatorConversion();
+  public static final SqlOperator APPEND_TABLE_MACRO = new TableAppendMacro();
+
+  private static class OperatorConversion implements SqlOperatorConversion
+  {
+    public static final String FUNCTION_NAME = "APPEND";
+
+    public OperatorConversion()
+    {
+    }
+
+    @Override
+    public SqlOperator calciteOperator()
+    {
+      return APPEND_TABLE_MACRO;
+    }
+
+    @Override
+    public DruidExpression toDruidExpression(PlannerContext plannerContext, 
RowSignature rowSignature, RexNode rexNode)
+    {
+      throw new IllegalStateException();
+    }
+  }
+
+  private TableAppendMacro()
+  {
+    super(
+        new SqlIdentifier(OperatorConversion.FUNCTION_NAME, SqlParserPos.ZERO),
+        SqlKind.OTHER_FUNCTION,
+        ReturnTypes.CURSOR,
+        null,
+        new OperandMetadata(),
+        null
+    );
+  }
+
+  private static class OperandMetadata implements SqlOperandMetadata
+  {
+    @Override
+    public boolean checkOperandTypes(SqlCallBinding callBinding, boolean 
throwOnFailure)
+    {
+      for (int i = 0; i < callBinding.getOperandCount(); i++) {
+        SqlNode operand = callBinding.operand(i);
+        if (!callBinding.isOperandLiteral(i, false)) {
+          if (throwOnFailure) {
+            throw DruidSqlValidator.buildCalciteContextException(
+                "All arguments to APPEND should be literal strings. "
+                    + "Argument #" + i + " is not literal",
+                operand
+            );
+          } else {
+            return false;
+          }
+        }
+
+        SqlTypeName typeName = callBinding.getOperandType(i).getSqlTypeName();
+        if (!SqlTypeFamily.CHARACTER.getTypeNames().contains(typeName)) {
+          if (throwOnFailure) {
+            throw DruidSqlValidator.buildCalciteContextException(
+                "All arguments to APPEND should be literal strings. "
+                    + "Argument #" + i + " is not string",
+                operand
+            );
+          } else {
+            return false;
+          }
+        }
+      }
+      return true;
+    }
+
+    @Override
+    public SqlOperandCountRange getOperandCountRange()
+    {
+      return SqlOperandCountRanges.from(1);
+    }
+
+    @Override
+    public String getAllowedSignatures(SqlOperator op, String opName)
+    {
+      return "APPEND( <TABLE_NAME>[, <TABLE_NAME> ...] )";
+    }
+
+    @Override
+    public List<RelDataType> paramTypes(RelDataTypeFactory typeFactory)
+    {
+      RelDataType t = typeFactory.createSqlType(SqlTypeName.VARCHAR);
+      return ImmutableList.<RelDataType>builder().add(t, t).build();
+    }
+
+    @Override
+    public List<String> paramNames()
+    {
+      return ImmutableList.<String>builder().add("tableName", 
"tableName").build();
+    }
+  }
+
+  @Override
+  public List<String> getParamNames()
+  {
+    return ImmutableList.<String>builder().add("tableName", 
"tableName").build();
+  }
+
+  @Override
+  public TranslatableTable getTable(SqlOperatorBinding operatorBinding)
+  {
+    SqlCallBinding callBinding = (SqlCallBinding) operatorBinding;
+    SqlValidator validator = callBinding.getValidator();
+
+    List<TableOperand> tables = getTables(callBinding, 
validator.getCatalogReader());
+    AppendDatasourceMetadata metadata = buildUnionDataSource(tables);
+    return new DatasourceTable(
+        metadata.values,
+        metadata,
+        EffectiveMetadata.of(metadata.values)
+    );
+  }
+
+  static class TableOperand
+  {
+    private final SqlNode sqlOperand;
+    private final SqlValidatorTable table;
+
+    public TableOperand(SqlNode sqlOperand, SqlValidatorTable table)
+    {
+      this.sqlOperand = sqlOperand;
+      this.table = table;
+    }
+
+    public RelOptTable getRelOptTable()
+    {
+      return table.unwrapOrThrow(RelOptTable.class);
+    }
+
+    public DatasourceTable getDataSourceTable()
+    {
+      return table.unwrapOrThrow(DatasourceTable.class);
+    }
+
+    public RowSignature getRowSignature()
+    {
+      return getDataSourceTable().getRowSignature();
+    }
+
+    public DataSource getDataSource()
+    {
+      return getDataSourceTable().getDataSource();
+    }
+  }
+
+  private List<TableOperand> getTables(SqlCallBinding callBinding, 
SqlValidatorCatalogReader catalogReader)
+  {
+    List<TableOperand> tables = new ArrayList<>();
+    for (int i = 0; i < callBinding.getOperandCount(); i++) {
+      if (!callBinding.isOperandLiteral(i, false)) {
+        throw DruidSqlValidator.buildCalciteContextException(
+            "All arguments of call to macro "
+                + "APPEND should be literal. Actual argument #"
+                + i + " is not literal",
+                callBinding.operand(i)
+        );
+      }
+      @Nullable

Review Comment:
   no; `null` is not a string literal either ; so it can't happen => but added 
a testcase ; in case that changes



##########
sql/src/main/java/org/apache/druid/sql/calcite/external/TableAppendMacro.java:
##########
@@ -0,0 +1,344 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.external;
+
+import com.google.common.collect.ImmutableList;
+import org.apache.calcite.plan.RelOptTable;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rel.type.RelDataTypeFactory;
+import org.apache.calcite.rex.RexNode;
+import org.apache.calcite.schema.TranslatableTable;
+import org.apache.calcite.sql.SqlCall;
+import org.apache.calcite.sql.SqlCallBinding;
+import org.apache.calcite.sql.SqlIdentifier;
+import org.apache.calcite.sql.SqlKind;
+import org.apache.calcite.sql.SqlNode;
+import org.apache.calcite.sql.SqlOperandCountRange;
+import org.apache.calcite.sql.SqlOperator;
+import org.apache.calcite.sql.SqlOperatorBinding;
+import org.apache.calcite.sql.parser.SqlParserPos;
+import org.apache.calcite.sql.type.ReturnTypes;
+import org.apache.calcite.sql.type.SqlOperandCountRanges;
+import org.apache.calcite.sql.type.SqlOperandMetadata;
+import org.apache.calcite.sql.type.SqlTypeFamily;
+import org.apache.calcite.sql.type.SqlTypeName;
+import org.apache.calcite.sql.validate.SqlUserDefinedTableMacro;
+import org.apache.calcite.sql.validate.SqlValidator;
+import org.apache.calcite.sql.validate.SqlValidatorCatalogReader;
+import org.apache.calcite.sql.validate.SqlValidatorTable;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.query.DataSource;
+import org.apache.druid.query.UnionDataSource;
+import org.apache.druid.segment.column.ColumnType;
+import org.apache.druid.segment.column.RowSignature;
+import org.apache.druid.segment.column.RowSignature.Builder;
+import org.apache.druid.server.security.Action;
+import org.apache.druid.server.security.Resource;
+import org.apache.druid.server.security.ResourceAction;
+import org.apache.druid.server.security.ResourceType;
+import org.apache.druid.sql.calcite.expression.AuthorizableOperator;
+import org.apache.druid.sql.calcite.expression.DruidExpression;
+import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
+import org.apache.druid.sql.calcite.planner.DruidSqlValidator;
+import org.apache.druid.sql.calcite.planner.PlannerContext;
+import org.apache.druid.sql.calcite.table.DatasourceMetadata;
+import org.apache.druid.sql.calcite.table.DatasourceTable;
+import org.apache.druid.sql.calcite.table.DatasourceTable.EffectiveMetadata;
+import org.checkerframework.checker.nullness.qual.Nullable;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+/**
+ * Dynamic table append.
+ *
+ * Enables to use: TABLE(APPEND('t1','t2')); which will provide a union view 
of the operand tables.
+ */
+public class TableAppendMacro extends SqlUserDefinedTableMacro implements 
AuthorizableOperator
+{
+
+  public static final OperatorConversion OPERATOR_CONVERSION = new 
OperatorConversion();
+  public static final SqlOperator APPEND_TABLE_MACRO = new TableAppendMacro();
+
+  private static class OperatorConversion implements SqlOperatorConversion
+  {
+    public static final String FUNCTION_NAME = "APPEND";
+
+    public OperatorConversion()
+    {
+    }
+
+    @Override
+    public SqlOperator calciteOperator()
+    {
+      return APPEND_TABLE_MACRO;
+    }
+
+    @Override
+    public DruidExpression toDruidExpression(PlannerContext plannerContext, 
RowSignature rowSignature, RexNode rexNode)
+    {
+      throw new IllegalStateException();
+    }
+  }
+
+  private TableAppendMacro()
+  {
+    super(
+        new SqlIdentifier(OperatorConversion.FUNCTION_NAME, SqlParserPos.ZERO),
+        SqlKind.OTHER_FUNCTION,
+        ReturnTypes.CURSOR,
+        null,
+        new OperandMetadata(),
+        null
+    );
+  }
+
+  private static class OperandMetadata implements SqlOperandMetadata
+  {
+    @Override
+    public boolean checkOperandTypes(SqlCallBinding callBinding, boolean 
throwOnFailure)
+    {
+      for (int i = 0; i < callBinding.getOperandCount(); i++) {
+        SqlNode operand = callBinding.operand(i);
+        if (!callBinding.isOperandLiteral(i, false)) {
+          if (throwOnFailure) {
+            throw DruidSqlValidator.buildCalciteContextException(
+                "All arguments to APPEND should be literal strings. "
+                    + "Argument #" + i + " is not literal",
+                operand
+            );
+          } else {
+            return false;
+          }
+        }
+
+        SqlTypeName typeName = callBinding.getOperandType(i).getSqlTypeName();
+        if (!SqlTypeFamily.CHARACTER.getTypeNames().contains(typeName)) {
+          if (throwOnFailure) {
+            throw DruidSqlValidator.buildCalciteContextException(
+                "All arguments to APPEND should be literal strings. "
+                    + "Argument #" + i + " is not string",
+                operand
+            );
+          } else {
+            return false;
+          }
+        }
+      }
+      return true;
+    }
+
+    @Override
+    public SqlOperandCountRange getOperandCountRange()
+    {
+      return SqlOperandCountRanges.from(1);
+    }
+
+    @Override
+    public String getAllowedSignatures(SqlOperator op, String opName)
+    {
+      return "APPEND( <TABLE_NAME>[, <TABLE_NAME> ...] )";
+    }
+
+    @Override
+    public List<RelDataType> paramTypes(RelDataTypeFactory typeFactory)
+    {
+      RelDataType t = typeFactory.createSqlType(SqlTypeName.VARCHAR);
+      return ImmutableList.<RelDataType>builder().add(t, t).build();
+    }
+
+    @Override
+    public List<String> paramNames()
+    {
+      return ImmutableList.<String>builder().add("tableName", 
"tableName").build();
+    }
+  }
+
+  @Override
+  public List<String> getParamNames()
+  {
+    return ImmutableList.<String>builder().add("tableName", 
"tableName").build();
+  }
+
+  @Override
+  public TranslatableTable getTable(SqlOperatorBinding operatorBinding)
+  {
+    SqlCallBinding callBinding = (SqlCallBinding) operatorBinding;
+    SqlValidator validator = callBinding.getValidator();
+
+    List<TableOperand> tables = getTables(callBinding, 
validator.getCatalogReader());
+    AppendDatasourceMetadata metadata = buildUnionDataSource(tables);
+    return new DatasourceTable(
+        metadata.values,
+        metadata,
+        EffectiveMetadata.of(metadata.values)
+    );
+  }
+
+  static class TableOperand
+  {
+    private final SqlNode sqlOperand;
+    private final SqlValidatorTable table;
+
+    public TableOperand(SqlNode sqlOperand, SqlValidatorTable table)
+    {
+      this.sqlOperand = sqlOperand;
+      this.table = table;
+    }
+
+    public RelOptTable getRelOptTable()
+    {
+      return table.unwrapOrThrow(RelOptTable.class);
+    }
+
+    public DatasourceTable getDataSourceTable()
+    {
+      return table.unwrapOrThrow(DatasourceTable.class);
+    }
+
+    public RowSignature getRowSignature()
+    {
+      return getDataSourceTable().getRowSignature();
+    }
+
+    public DataSource getDataSource()
+    {
+      return getDataSourceTable().getDataSource();
+    }
+  }
+
+  private List<TableOperand> getTables(SqlCallBinding callBinding, 
SqlValidatorCatalogReader catalogReader)
+  {
+    List<TableOperand> tables = new ArrayList<>();
+    for (int i = 0; i < callBinding.getOperandCount(); i++) {
+      if (!callBinding.isOperandLiteral(i, false)) {

Review Comment:
   right - this is not necessary; removed it!



##########
sql/src/test/java/org/apache/druid/sql/calcite/CalciteTableAppendTest.java:
##########
@@ -0,0 +1,214 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite;
+
+import com.google.common.collect.ImmutableList;
+import org.apache.druid.error.DruidException;
+import org.apache.druid.sql.calcite.NotYetSupported.NotYetSupportedProcessor;
+import org.hamcrest.MatcherAssert;
+import org.junit.Assert;
+import org.junit.Rule;
+import org.junit.Test;
+
+public class CalciteTableAppendTest extends BaseCalciteQueryTest

Review Comment:
   added native queries



##########
sql/src/main/java/org/apache/druid/sql/calcite/table/DatasourceMetadata.java:
##########
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.table;
+
+import org.apache.druid.query.DataSource;
+
+public interface DatasourceMetadata
+{
+
+  DataSource dataSource();

Review Comment:
   apidoc?



##########
sql/src/main/java/org/apache/druid/sql/calcite/external/AppendTableMacro.java:
##########
@@ -0,0 +1,301 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.external;
+
+import org.apache.calcite.plan.RelOptTable;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rel.type.RelDataTypeFactory;
+import org.apache.calcite.rex.RexNode;
+import org.apache.calcite.schema.TranslatableTable;
+import org.apache.calcite.sql.SqlCall;
+import org.apache.calcite.sql.SqlCallBinding;
+import org.apache.calcite.sql.SqlIdentifier;
+import org.apache.calcite.sql.SqlKind;
+import org.apache.calcite.sql.SqlNode;
+import org.apache.calcite.sql.SqlOperandCountRange;
+import org.apache.calcite.sql.SqlOperator;
+import org.apache.calcite.sql.SqlOperatorBinding;
+import org.apache.calcite.sql.parser.SqlParserPos;
+import org.apache.calcite.sql.type.ReturnTypes;
+import org.apache.calcite.sql.type.SqlOperandCountRanges;
+import org.apache.calcite.sql.type.SqlOperandMetadata;
+import org.apache.calcite.sql.type.SqlTypeFamily;
+import org.apache.calcite.sql.type.SqlTypeName;
+import org.apache.calcite.sql.validate.SqlUserDefinedTableMacro;
+import org.apache.calcite.sql.validate.SqlValidator;
+import org.apache.calcite.sql.validate.SqlValidatorCatalogReader;
+import org.apache.calcite.sql.validate.SqlValidatorTable;
+import org.apache.curator.shaded.com.google.common.collect.ImmutableList;
+import org.apache.druid.query.DataSource;
+import org.apache.druid.query.UnionDataSource;
+import org.apache.druid.segment.column.ColumnType;
+import org.apache.druid.segment.column.RowSignature;
+import org.apache.druid.segment.column.RowSignature.Builder;
+import org.apache.druid.server.security.Action;
+import org.apache.druid.server.security.Resource;
+import org.apache.druid.server.security.ResourceAction;
+import org.apache.druid.server.security.ResourceType;
+import org.apache.druid.sql.calcite.expression.AuthorizableOperator;
+import org.apache.druid.sql.calcite.expression.DruidExpression;
+import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
+import org.apache.druid.sql.calcite.planner.DruidSqlValidator;
+import org.apache.druid.sql.calcite.planner.PlannerContext;
+import org.apache.druid.sql.calcite.table.DatasourceMetadata;
+import org.apache.druid.sql.calcite.table.DatasourceTable;
+import org.apache.druid.sql.calcite.table.DatasourceTable.EffectiveMetadata;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+/**
+ * Dynamic table append.
+ *
+ * Enables to use: TABLE(APPEND('t1','t2')); which will provide a union view 
of the operand tables.
+ */
+public class AppendTableMacro extends SqlUserDefinedTableMacro implements 
AuthorizableOperator
+{
+
+  public static final OperatorConversion OPERATOR_CONVERSION = new 
OperatorConversion();
+  public static final SqlOperator APPEND_TABLE_MACRO = new AppendTableMacro();
+
+  private static class OperatorConversion implements SqlOperatorConversion
+  {
+    public static final String FUNCTION_NAME = "APPEND";
+
+    public OperatorConversion()
+    {
+    }
+
+    @Override
+    public SqlOperator calciteOperator()
+    {
+      return APPEND_TABLE_MACRO;
+    }
+
+    @Override
+    public DruidExpression toDruidExpression(PlannerContext plannerContext, 
RowSignature rowSignature, RexNode rexNode)
+    {
+      throw new IllegalStateException();
+    }
+  }
+
+  private AppendTableMacro()
+  {
+    super(
+        new SqlIdentifier(OperatorConversion.FUNCTION_NAME, SqlParserPos.ZERO),
+        SqlKind.OTHER_FUNCTION,
+        ReturnTypes.CURSOR,
+        null,
+        new OperandMetadata(),
+        null
+    );
+  }
+
+  private static class OperandMetadata implements SqlOperandMetadata
+  {
+    @Override
+    public boolean checkOperandTypes(SqlCallBinding callBinding, boolean 
throwOnFailure)
+    {
+      for (int i = 0; i < callBinding.getOperandCount(); i++) {
+        SqlNode operand = callBinding.operand(i);
+        if (!callBinding.isOperandLiteral(i, false)) {
+          if (throwOnFailure) {
+            throw DruidSqlValidator.buildCalciteContextException(
+                "All arguments to APPEND should be literal strings. "
+                    + "Argument #" + i + " is not literal",
+                operand
+            );
+          } else {
+            return false;
+          }
+        }
+        RelDataType type = callBinding.getOperandType(i);
+
+        SqlTypeName typeName = type.getSqlTypeName();
+
+        if (!SqlTypeFamily.CHARACTER.getTypeNames().contains(typeName)) {
+          if (throwOnFailure) {
+            throw DruidSqlValidator.buildCalciteContextException(
+                "All arguments to APPEND should be literal strings. "
+                    + "Argument #" + i + " is not string",
+                operand
+            );
+          } else {
+            return false;
+          }
+        }
+      }
+      return true;
+    }
+
+    @Override
+    public SqlOperandCountRange getOperandCountRange()
+    {
+      return SqlOperandCountRanges.from(2);
+    }
+
+    @Override
+    public String getAllowedSignatures(SqlOperator op, String opName)
+    {
+      return "APPEND( <TABLE_NAME>, <TABLE_NAME>[, <TABLE_NAME> ...] )";
+    }
+
+    @Override
+    public List<RelDataType> paramTypes(RelDataTypeFactory typeFactory)
+    {
+      RelDataType t = typeFactory.createSqlType(SqlTypeName.VARCHAR);
+      return ImmutableList.<RelDataType>builder().add(t, t).build();
+    }
+
+    @Override
+    public List<String> paramNames()
+    {
+      return ImmutableList.<String>builder().add("tableName", 
"tableName").build();
+    }
+  }
+
+  @Override
+  public List<String> getParamNames()
+  {
+    return ImmutableList.<String>builder().add("tableName", 
"tableName").build();
+  }
+
+  @Override
+  public TranslatableTable getTable(SqlOperatorBinding callBinding)
+  {
+    SqlCallBinding ss = (SqlCallBinding) callBinding;
+    SqlValidator validator = ss.getValidator();
+    SqlValidatorCatalogReader catalogReader = validator.getCatalogReader();
+    List<String> tableNames = getTableNames(callBinding);
+    List<RelOptTable> tables = getTables(catalogReader, tableNames);
+
+    AppendDatasourceMetadata metadata = buildUnionDataSource(tables);
+    return new DatasourceTable(
+        metadata.values,
+        metadata,
+        EffectiveMetadata.of(metadata.values)
+    );
+  }
+
+  static class AppendDatasourceMetadata implements DatasourceMetadata
+  {
+    private final RowSignature values;
+    private final DataSource dataSource;
+
+    public AppendDatasourceMetadata(RowSignature values, List<DataSource> 
dataSources)
+    {
+      this.values = values;
+      this.dataSource = new UnionDataSource(dataSources);
+    }
+
+    @Override
+    public boolean isJoinable()
+    {
+      return false;
+    }
+
+    @Override
+    public boolean isBroadcast()
+    {
+      return false;
+    }
+
+    @Override
+    public DataSource dataSource()
+    {
+      return dataSource;
+    }
+  }
+
+  private AppendDatasourceMetadata buildUnionDataSource(List<RelOptTable> 
tables)
+  {
+    List<DataSource> dataSources = new ArrayList<>();
+    Map<String, ColumnType> fields = new LinkedHashMap<>();
+    Builder rowSignatureBuilder = RowSignature.builder();
+    for (RelOptTable relOptTable : tables) {
+
+      DatasourceTable a = relOptTable.unwrapOrThrow(DatasourceTable.class);

Review Comment:
   rename



##########
sql/src/main/java/org/apache/druid/sql/calcite/external/TableAppendMacro.java:
##########
@@ -0,0 +1,344 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.external;
+
+import com.google.common.collect.ImmutableList;
+import org.apache.calcite.plan.RelOptTable;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rel.type.RelDataTypeFactory;
+import org.apache.calcite.rex.RexNode;
+import org.apache.calcite.schema.TranslatableTable;
+import org.apache.calcite.sql.SqlCall;
+import org.apache.calcite.sql.SqlCallBinding;
+import org.apache.calcite.sql.SqlIdentifier;
+import org.apache.calcite.sql.SqlKind;
+import org.apache.calcite.sql.SqlNode;
+import org.apache.calcite.sql.SqlOperandCountRange;
+import org.apache.calcite.sql.SqlOperator;
+import org.apache.calcite.sql.SqlOperatorBinding;
+import org.apache.calcite.sql.parser.SqlParserPos;
+import org.apache.calcite.sql.type.ReturnTypes;
+import org.apache.calcite.sql.type.SqlOperandCountRanges;
+import org.apache.calcite.sql.type.SqlOperandMetadata;
+import org.apache.calcite.sql.type.SqlTypeFamily;
+import org.apache.calcite.sql.type.SqlTypeName;
+import org.apache.calcite.sql.validate.SqlUserDefinedTableMacro;
+import org.apache.calcite.sql.validate.SqlValidator;
+import org.apache.calcite.sql.validate.SqlValidatorCatalogReader;
+import org.apache.calcite.sql.validate.SqlValidatorTable;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.query.DataSource;
+import org.apache.druid.query.UnionDataSource;
+import org.apache.druid.segment.column.ColumnType;
+import org.apache.druid.segment.column.RowSignature;
+import org.apache.druid.segment.column.RowSignature.Builder;
+import org.apache.druid.server.security.Action;
+import org.apache.druid.server.security.Resource;
+import org.apache.druid.server.security.ResourceAction;
+import org.apache.druid.server.security.ResourceType;
+import org.apache.druid.sql.calcite.expression.AuthorizableOperator;
+import org.apache.druid.sql.calcite.expression.DruidExpression;
+import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
+import org.apache.druid.sql.calcite.planner.DruidSqlValidator;
+import org.apache.druid.sql.calcite.planner.PlannerContext;
+import org.apache.druid.sql.calcite.table.DatasourceMetadata;
+import org.apache.druid.sql.calcite.table.DatasourceTable;
+import org.apache.druid.sql.calcite.table.DatasourceTable.EffectiveMetadata;
+import org.checkerframework.checker.nullness.qual.Nullable;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+/**
+ * Dynamic table append.
+ *
+ * Enables to use: TABLE(APPEND('t1','t2')); which will provide a union view 
of the operand tables.
+ */
+public class TableAppendMacro extends SqlUserDefinedTableMacro implements 
AuthorizableOperator
+{
+
+  public static final OperatorConversion OPERATOR_CONVERSION = new 
OperatorConversion();
+  public static final SqlOperator APPEND_TABLE_MACRO = new TableAppendMacro();
+
+  private static class OperatorConversion implements SqlOperatorConversion
+  {
+    public static final String FUNCTION_NAME = "APPEND";
+
+    public OperatorConversion()
+    {
+    }
+
+    @Override
+    public SqlOperator calciteOperator()
+    {
+      return APPEND_TABLE_MACRO;
+    }
+
+    @Override
+    public DruidExpression toDruidExpression(PlannerContext plannerContext, 
RowSignature rowSignature, RexNode rexNode)
+    {
+      throw new IllegalStateException();
+    }
+  }
+
+  private TableAppendMacro()
+  {
+    super(
+        new SqlIdentifier(OperatorConversion.FUNCTION_NAME, SqlParserPos.ZERO),
+        SqlKind.OTHER_FUNCTION,
+        ReturnTypes.CURSOR,
+        null,
+        new OperandMetadata(),
+        null
+    );
+  }
+
+  private static class OperandMetadata implements SqlOperandMetadata
+  {
+    @Override
+    public boolean checkOperandTypes(SqlCallBinding callBinding, boolean 
throwOnFailure)
+    {
+      for (int i = 0; i < callBinding.getOperandCount(); i++) {
+        SqlNode operand = callBinding.operand(i);
+        if (!callBinding.isOperandLiteral(i, false)) {
+          if (throwOnFailure) {
+            throw DruidSqlValidator.buildCalciteContextException(
+                "All arguments to APPEND should be literal strings. "
+                    + "Argument #" + i + " is not literal",
+                operand
+            );
+          } else {
+            return false;
+          }
+        }
+
+        SqlTypeName typeName = callBinding.getOperandType(i).getSqlTypeName();
+        if (!SqlTypeFamily.CHARACTER.getTypeNames().contains(typeName)) {
+          if (throwOnFailure) {
+            throw DruidSqlValidator.buildCalciteContextException(
+                "All arguments to APPEND should be literal strings. "
+                    + "Argument #" + i + " is not string",
+                operand
+            );
+          } else {
+            return false;
+          }
+        }
+      }
+      return true;
+    }
+
+    @Override
+    public SqlOperandCountRange getOperandCountRange()
+    {
+      return SqlOperandCountRanges.from(1);
+    }
+
+    @Override
+    public String getAllowedSignatures(SqlOperator op, String opName)
+    {
+      return "APPEND( <TABLE_NAME>[, <TABLE_NAME> ...] )";
+    }
+
+    @Override
+    public List<RelDataType> paramTypes(RelDataTypeFactory typeFactory)
+    {
+      RelDataType t = typeFactory.createSqlType(SqlTypeName.VARCHAR);
+      return ImmutableList.<RelDataType>builder().add(t, t).build();
+    }
+
+    @Override
+    public List<String> paramNames()
+    {
+      return ImmutableList.<String>builder().add("tableName", 
"tableName").build();
+    }
+  }
+
+  @Override
+  public List<String> getParamNames()
+  {
+    return ImmutableList.<String>builder().add("tableName", 
"tableName").build();
+  }
+
+  @Override
+  public TranslatableTable getTable(SqlOperatorBinding operatorBinding)
+  {
+    SqlCallBinding callBinding = (SqlCallBinding) operatorBinding;
+    SqlValidator validator = callBinding.getValidator();
+
+    List<TableOperand> tables = getTables(callBinding, 
validator.getCatalogReader());
+    AppendDatasourceMetadata metadata = buildUnionDataSource(tables);
+    return new DatasourceTable(
+        metadata.values,
+        metadata,
+        EffectiveMetadata.of(metadata.values)
+    );
+  }
+
+  static class TableOperand
+  {
+    private final SqlNode sqlOperand;
+    private final SqlValidatorTable table;
+
+    public TableOperand(SqlNode sqlOperand, SqlValidatorTable table)
+    {
+      this.sqlOperand = sqlOperand;
+      this.table = table;
+    }
+
+    public RelOptTable getRelOptTable()
+    {
+      return table.unwrapOrThrow(RelOptTable.class);
+    }
+
+    public DatasourceTable getDataSourceTable()
+    {
+      return table.unwrapOrThrow(DatasourceTable.class);
+    }
+
+    public RowSignature getRowSignature()
+    {
+      return getDataSourceTable().getRowSignature();
+    }
+
+    public DataSource getDataSource()
+    {
+      return getDataSourceTable().getDataSource();
+    }
+  }
+
+  private List<TableOperand> getTables(SqlCallBinding callBinding, 
SqlValidatorCatalogReader catalogReader)
+  {
+    List<TableOperand> tables = new ArrayList<>();
+    for (int i = 0; i < callBinding.getOperandCount(); i++) {
+      if (!callBinding.isOperandLiteral(i, false)) {
+        throw DruidSqlValidator.buildCalciteContextException(
+            "All arguments of call to macro "
+                + "APPEND should be literal. Actual argument #"
+                + i + " is not literal",
+                callBinding.operand(i)
+        );
+      }
+      @Nullable
+      String tableName = callBinding.getOperandLiteralValue(i, String.class);
+      ImmutableList<String> tableNameList = 
ImmutableList.<String>builder().add(tableName).build();
+      SqlValidatorTable table = catalogReader.getTable(tableNameList);
+      if (table == null) {
+        throw DruidSqlValidator.buildCalciteContextException(
+            StringUtils.format("Table [%s] not found", tableName),
+            callBinding.operand(i)
+        );
+      }
+      tables.add(new TableOperand(callBinding.operand(i), table));
+    }
+    return tables;
+  }
+
+  static class AppendDatasourceMetadata implements DatasourceMetadata
+  {
+    private final RowSignature values;
+    private final DataSource dataSource;
+
+    public AppendDatasourceMetadata(RowSignature values, List<DataSource> 
dataSources)
+    {
+      this.values = values;
+      this.dataSource = new UnionDataSource(dataSources);
+    }
+
+    @Override
+    public boolean isJoinable()
+    {
+      return false;
+    }
+
+    @Override
+    public boolean isBroadcast()
+    {
+      return false;
+    }
+
+    @Override
+    public DataSource dataSource()
+    {
+      return dataSource;
+    }
+  }
+
+  private AppendDatasourceMetadata buildUnionDataSource(List<TableOperand> 
tables)
+  {
+    List<DataSource> dataSources = new ArrayList<>();
+    Map<String, ColumnType> fields = new LinkedHashMap<>();
+    Builder rowSignatureBuilder = RowSignature.builder();
+    for (TableOperand table : tables) {
+      RowSignature rowSignature = table.getRowSignature();
+      for (String columnName : rowSignature.getColumnNames()) {
+        ColumnType currentType = rowSignature.getColumnType(columnName).get();
+        ColumnType existingType = fields.get(columnName);
+
+        if (existingType == null || existingType.equals(currentType)) {
+          fields.put(columnName, currentType);
+        } else {
+          try {
+            ColumnType commonType = 
ColumnType.leastRestrictiveType(currentType, existingType);
+            fields.put(columnName, commonType);
+          }
+          catch (Exception e) {

Review Comment:
   don't think anything else might be coming out; changed it



##########
sql/src/test/java/org/apache/druid/sql/calcite/CalciteTableAppendTest.java:
##########
@@ -0,0 +1,214 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite;
+
+import com.google.common.collect.ImmutableList;
+import org.apache.druid.error.DruidException;
+import org.apache.druid.sql.calcite.NotYetSupported.NotYetSupportedProcessor;
+import org.hamcrest.MatcherAssert;
+import org.junit.Assert;
+import org.junit.Rule;
+import org.junit.Test;
+
+public class CalciteTableAppendTest extends BaseCalciteQueryTest
+{
+  @Rule(order = 0)
+  public NotYetSupportedProcessor negativeTestProcessor = new 
NotYetSupportedProcessor();
+
+  @Test
+  public void testUnion()
+  {
+    testBuilder()
+        .sql("select dim1,null as dim4 from foo union all select dim1,dim4 
from numfoo")
+        .expectedResults(
+            ImmutableList.of(
+                new Object[] {"", null},
+                new Object[] {"10.1", null},
+                new Object[] {"2", null},
+                new Object[] {"1", null},
+                new Object[] {"def", null},
+                new Object[] {"abc", null},
+                new Object[] {"", "a"},
+                new Object[] {"10.1", "a"},
+                new Object[] {"2", "a"},
+                new Object[] {"1", "b"},
+                new Object[] {"def", "b"},
+                new Object[] {"abc", "b"}
+            )
+        )
+        .run();
+  }
+
+  @Test
+  public void testAppend2()
+  {
+    testBuilder()
+        .sql("select dim1,dim4,d1,f1 from TABLE(APPEND('foo','numfoo')) u")
+        .expectedResults(
+            ResultMatchMode.RELAX_NULLS,
+            ImmutableList.of(
+                new Object[] {"", null, null, null},
+                new Object[] {"10.1", null, null, null},
+                new Object[] {"2", null, null, null},
+                new Object[] {"1", null, null, null},
+                new Object[] {"def", null, null, null},
+                new Object[] {"abc", null, null, null},
+                new Object[] {"", "a", 1.0D, 1.0F},
+                new Object[] {"10.1", "a", 1.7D, 0.1F},
+                new Object[] {"2", "a", 0.0D, 0.0F},
+                new Object[] {"1", "b", null, null},
+                new Object[] {"def", "b", null, null},
+                new Object[] {"abc", "b", null, null}
+            )
+        )
+        .expectedLogicalPlan(
+            ""
+                + "LogicalProject(exprs=[[$1, $8, $11, $13]])\n"
+                + "  LogicalTableScan(table=[[APPEND]])\n"
+        )
+        .run();
+  }
+
+  @Test
+  public void testAppendSameTableMultipleTimes()
+  {
+    testBuilder()
+        .sql("select dim1,dim4,d1,f1 from TABLE(APPEND('foo','numfoo','foo')) 
u where dim1='2'")
+        .expectedResults(
+            ResultMatchMode.RELAX_NULLS,
+            ImmutableList.of(
+                new Object[] {"2", null, null, null},
+                new Object[] {"2", null, null, null},
+                new Object[] {"2", "a", 0.0D, 0.0F}
+            )
+        )
+        .run();
+  }
+
+  @Test
+  public void testAppendtSingleTableIsValid()
+  {
+    testBuilder()
+        .sql("select dim1 from TABLE(APPEND('foo')) u")
+        .expectedResults(
+            ImmutableList.of(
+                new Object[] {""},
+                new Object[] {"10.1"},
+                new Object[] {"2"},
+                new Object[] {"1"},
+                new Object[] {"def"},
+                new Object[] {"abc"}
+            )
+        )
+        .run();
+  }
+
+  @Test
+  public void testAppendtNoTableIsInvalid()
+  {
+    try {
+      testBuilder()
+          .sql("select dim1 from TABLE(APPEND()) u")
+          .run();
+      Assert.fail("query execution should fail");
+    }
+    catch (DruidException e) {
+      MatcherAssert.assertThat(
+          e,
+          invalidSqlIs("No match found for function signature APPEND() (line 
[1], column [24])")
+      );
+    }
+  }
+
+  @Test
+  public void testAppendNoTableIsInvalid()
+  {
+    try {
+      testBuilder()
+          .sql("select dim1 from TABLE(APPEND()) u")
+          .run();
+      Assert.fail("query execution should fail");
+    }
+    catch (DruidException e) {
+      MatcherAssert.assertThat(
+          e,
+          invalidSqlIs("No match found for function signature APPEND() (line 
[1], column [24])")
+      );
+    }
+  }
+
+  @Test
+  public void testAppendtSingleTableIsInvalidArg()
+  {
+    try {
+      testBuilder()
+          .sql("select dim1 from TABLE(APPEND('foo',111)) u")
+          .run();
+      Assert.fail("query execution should fail");
+    }
+    catch (DruidException e) {
+      MatcherAssert.assertThat(
+          e,
+          invalidSqlIs(
+              "All arguments to APPEND should be literal strings. Argument #1 
is not string (line [1], column [37])"

Review Comment:
   totally agree - changed it!



##########
sql/src/main/java/org/apache/druid/sql/calcite/external/TableAppendMacro.java:
##########
@@ -0,0 +1,344 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.external;
+
+import com.google.common.collect.ImmutableList;
+import org.apache.calcite.plan.RelOptTable;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rel.type.RelDataTypeFactory;
+import org.apache.calcite.rex.RexNode;
+import org.apache.calcite.schema.TranslatableTable;
+import org.apache.calcite.sql.SqlCall;
+import org.apache.calcite.sql.SqlCallBinding;
+import org.apache.calcite.sql.SqlIdentifier;
+import org.apache.calcite.sql.SqlKind;
+import org.apache.calcite.sql.SqlNode;
+import org.apache.calcite.sql.SqlOperandCountRange;
+import org.apache.calcite.sql.SqlOperator;
+import org.apache.calcite.sql.SqlOperatorBinding;
+import org.apache.calcite.sql.parser.SqlParserPos;
+import org.apache.calcite.sql.type.ReturnTypes;
+import org.apache.calcite.sql.type.SqlOperandCountRanges;
+import org.apache.calcite.sql.type.SqlOperandMetadata;
+import org.apache.calcite.sql.type.SqlTypeFamily;
+import org.apache.calcite.sql.type.SqlTypeName;
+import org.apache.calcite.sql.validate.SqlUserDefinedTableMacro;
+import org.apache.calcite.sql.validate.SqlValidator;
+import org.apache.calcite.sql.validate.SqlValidatorCatalogReader;
+import org.apache.calcite.sql.validate.SqlValidatorTable;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.query.DataSource;
+import org.apache.druid.query.UnionDataSource;
+import org.apache.druid.segment.column.ColumnType;
+import org.apache.druid.segment.column.RowSignature;
+import org.apache.druid.segment.column.RowSignature.Builder;
+import org.apache.druid.server.security.Action;
+import org.apache.druid.server.security.Resource;
+import org.apache.druid.server.security.ResourceAction;
+import org.apache.druid.server.security.ResourceType;
+import org.apache.druid.sql.calcite.expression.AuthorizableOperator;
+import org.apache.druid.sql.calcite.expression.DruidExpression;
+import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
+import org.apache.druid.sql.calcite.planner.DruidSqlValidator;
+import org.apache.druid.sql.calcite.planner.PlannerContext;
+import org.apache.druid.sql.calcite.table.DatasourceMetadata;
+import org.apache.druid.sql.calcite.table.DatasourceTable;
+import org.apache.druid.sql.calcite.table.DatasourceTable.EffectiveMetadata;
+import org.checkerframework.checker.nullness.qual.Nullable;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+/**
+ * Dynamic table append.
+ *
+ * Enables to use: TABLE(APPEND('t1','t2')); which will provide a union view 
of the operand tables.
+ */
+public class TableAppendMacro extends SqlUserDefinedTableMacro implements 
AuthorizableOperator
+{
+
+  public static final OperatorConversion OPERATOR_CONVERSION = new 
OperatorConversion();
+  public static final SqlOperator APPEND_TABLE_MACRO = new TableAppendMacro();
+
+  private static class OperatorConversion implements SqlOperatorConversion
+  {
+    public static final String FUNCTION_NAME = "APPEND";
+
+    public OperatorConversion()
+    {
+    }
+
+    @Override
+    public SqlOperator calciteOperator()
+    {
+      return APPEND_TABLE_MACRO;
+    }
+
+    @Override
+    public DruidExpression toDruidExpression(PlannerContext plannerContext, 
RowSignature rowSignature, RexNode rexNode)
+    {
+      throw new IllegalStateException();
+    }
+  }
+
+  private TableAppendMacro()
+  {
+    super(
+        new SqlIdentifier(OperatorConversion.FUNCTION_NAME, SqlParserPos.ZERO),
+        SqlKind.OTHER_FUNCTION,
+        ReturnTypes.CURSOR,
+        null,
+        new OperandMetadata(),
+        null
+    );
+  }
+
+  private static class OperandMetadata implements SqlOperandMetadata
+  {
+    @Override
+    public boolean checkOperandTypes(SqlCallBinding callBinding, boolean 
throwOnFailure)
+    {
+      for (int i = 0; i < callBinding.getOperandCount(); i++) {
+        SqlNode operand = callBinding.operand(i);
+        if (!callBinding.isOperandLiteral(i, false)) {
+          if (throwOnFailure) {
+            throw DruidSqlValidator.buildCalciteContextException(
+                "All arguments to APPEND should be literal strings. "
+                    + "Argument #" + i + " is not literal",
+                operand
+            );
+          } else {
+            return false;
+          }
+        }
+
+        SqlTypeName typeName = callBinding.getOperandType(i).getSqlTypeName();
+        if (!SqlTypeFamily.CHARACTER.getTypeNames().contains(typeName)) {
+          if (throwOnFailure) {
+            throw DruidSqlValidator.buildCalciteContextException(
+                "All arguments to APPEND should be literal strings. "
+                    + "Argument #" + i + " is not string",
+                operand
+            );
+          } else {
+            return false;
+          }
+        }
+      }
+      return true;
+    }
+
+    @Override
+    public SqlOperandCountRange getOperandCountRange()
+    {
+      return SqlOperandCountRanges.from(1);
+    }
+
+    @Override
+    public String getAllowedSignatures(SqlOperator op, String opName)
+    {
+      return "APPEND( <TABLE_NAME>[, <TABLE_NAME> ...] )";
+    }
+
+    @Override
+    public List<RelDataType> paramTypes(RelDataTypeFactory typeFactory)
+    {
+      RelDataType t = typeFactory.createSqlType(SqlTypeName.VARCHAR);
+      return ImmutableList.<RelDataType>builder().add(t, t).build();
+    }
+
+    @Override
+    public List<String> paramNames()
+    {
+      return ImmutableList.<String>builder().add("tableName", 
"tableName").build();
+    }
+  }
+
+  @Override
+  public List<String> getParamNames()
+  {
+    return ImmutableList.<String>builder().add("tableName", 
"tableName").build();
+  }
+
+  @Override
+  public TranslatableTable getTable(SqlOperatorBinding operatorBinding)
+  {
+    SqlCallBinding callBinding = (SqlCallBinding) operatorBinding;
+    SqlValidator validator = callBinding.getValidator();
+
+    List<TableOperand> tables = getTables(callBinding, 
validator.getCatalogReader());
+    AppendDatasourceMetadata metadata = buildUnionDataSource(tables);
+    return new DatasourceTable(
+        metadata.values,
+        metadata,
+        EffectiveMetadata.of(metadata.values)
+    );
+  }
+
+  static class TableOperand
+  {
+    private final SqlNode sqlOperand;
+    private final SqlValidatorTable table;
+
+    public TableOperand(SqlNode sqlOperand, SqlValidatorTable table)
+    {
+      this.sqlOperand = sqlOperand;
+      this.table = table;
+    }
+
+    public RelOptTable getRelOptTable()
+    {
+      return table.unwrapOrThrow(RelOptTable.class);
+    }
+
+    public DatasourceTable getDataSourceTable()
+    {
+      return table.unwrapOrThrow(DatasourceTable.class);
+    }
+
+    public RowSignature getRowSignature()
+    {
+      return getDataSourceTable().getRowSignature();
+    }
+
+    public DataSource getDataSource()
+    {
+      return getDataSourceTable().getDataSource();
+    }
+  }
+
+  private List<TableOperand> getTables(SqlCallBinding callBinding, 
SqlValidatorCatalogReader catalogReader)
+  {
+    List<TableOperand> tables = new ArrayList<>();
+    for (int i = 0; i < callBinding.getOperandCount(); i++) {
+      if (!callBinding.isOperandLiteral(i, false)) {
+        throw DruidSqlValidator.buildCalciteContextException(
+            "All arguments of call to macro "
+                + "APPEND should be literal. Actual argument #"
+                + i + " is not literal",
+                callBinding.operand(i)
+        );
+      }
+      @Nullable
+      String tableName = callBinding.getOperandLiteralValue(i, String.class);
+      ImmutableList<String> tableNameList = 
ImmutableList.<String>builder().add(tableName).build();
+      SqlValidatorTable table = catalogReader.getTable(tableNameList);

Review Comment:
   no - or not yet; `ProcedureNamespace` is deriving the type for the 
arguments; so can't use plain identifiers => 
   
   with string arguments I was not able to make it work
   
   I don't know if its possible to do that right now - but it would be nice to 
have that; most likely the direct identifier approach will be the best.
   Anyway - if its a CTE; then there will be real subquery inputs; which 
wouldn't be easy to handle right now....or I just don't see a way to make that 
work



##########
sql/src/main/java/org/apache/druid/sql/calcite/external/TableAppendMacro.java:
##########
@@ -0,0 +1,344 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.external;
+
+import com.google.common.collect.ImmutableList;
+import org.apache.calcite.plan.RelOptTable;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rel.type.RelDataTypeFactory;
+import org.apache.calcite.rex.RexNode;
+import org.apache.calcite.schema.TranslatableTable;
+import org.apache.calcite.sql.SqlCall;
+import org.apache.calcite.sql.SqlCallBinding;
+import org.apache.calcite.sql.SqlIdentifier;
+import org.apache.calcite.sql.SqlKind;
+import org.apache.calcite.sql.SqlNode;
+import org.apache.calcite.sql.SqlOperandCountRange;
+import org.apache.calcite.sql.SqlOperator;
+import org.apache.calcite.sql.SqlOperatorBinding;
+import org.apache.calcite.sql.parser.SqlParserPos;
+import org.apache.calcite.sql.type.ReturnTypes;
+import org.apache.calcite.sql.type.SqlOperandCountRanges;
+import org.apache.calcite.sql.type.SqlOperandMetadata;
+import org.apache.calcite.sql.type.SqlTypeFamily;
+import org.apache.calcite.sql.type.SqlTypeName;
+import org.apache.calcite.sql.validate.SqlUserDefinedTableMacro;
+import org.apache.calcite.sql.validate.SqlValidator;
+import org.apache.calcite.sql.validate.SqlValidatorCatalogReader;
+import org.apache.calcite.sql.validate.SqlValidatorTable;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.query.DataSource;
+import org.apache.druid.query.UnionDataSource;
+import org.apache.druid.segment.column.ColumnType;
+import org.apache.druid.segment.column.RowSignature;
+import org.apache.druid.segment.column.RowSignature.Builder;
+import org.apache.druid.server.security.Action;
+import org.apache.druid.server.security.Resource;
+import org.apache.druid.server.security.ResourceAction;
+import org.apache.druid.server.security.ResourceType;
+import org.apache.druid.sql.calcite.expression.AuthorizableOperator;
+import org.apache.druid.sql.calcite.expression.DruidExpression;
+import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
+import org.apache.druid.sql.calcite.planner.DruidSqlValidator;
+import org.apache.druid.sql.calcite.planner.PlannerContext;
+import org.apache.druid.sql.calcite.table.DatasourceMetadata;
+import org.apache.druid.sql.calcite.table.DatasourceTable;
+import org.apache.druid.sql.calcite.table.DatasourceTable.EffectiveMetadata;
+import org.checkerframework.checker.nullness.qual.Nullable;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+/**
+ * Dynamic table append.
+ *
+ * Enables to use: TABLE(APPEND('t1','t2')); which will provide a union view 
of the operand tables.
+ */
+public class TableAppendMacro extends SqlUserDefinedTableMacro implements 
AuthorizableOperator
+{
+
+  public static final OperatorConversion OPERATOR_CONVERSION = new 
OperatorConversion();
+  public static final SqlOperator APPEND_TABLE_MACRO = new TableAppendMacro();
+
+  private static class OperatorConversion implements SqlOperatorConversion
+  {
+    public static final String FUNCTION_NAME = "APPEND";
+
+    public OperatorConversion()
+    {
+    }
+
+    @Override
+    public SqlOperator calciteOperator()
+    {
+      return APPEND_TABLE_MACRO;
+    }
+
+    @Override
+    public DruidExpression toDruidExpression(PlannerContext plannerContext, 
RowSignature rowSignature, RexNode rexNode)
+    {
+      throw new IllegalStateException();
+    }
+  }
+
+  private TableAppendMacro()
+  {
+    super(
+        new SqlIdentifier(OperatorConversion.FUNCTION_NAME, SqlParserPos.ZERO),
+        SqlKind.OTHER_FUNCTION,
+        ReturnTypes.CURSOR,
+        null,
+        new OperandMetadata(),
+        null
+    );
+  }
+
+  private static class OperandMetadata implements SqlOperandMetadata
+  {
+    @Override
+    public boolean checkOperandTypes(SqlCallBinding callBinding, boolean 
throwOnFailure)
+    {
+      for (int i = 0; i < callBinding.getOperandCount(); i++) {
+        SqlNode operand = callBinding.operand(i);
+        if (!callBinding.isOperandLiteral(i, false)) {
+          if (throwOnFailure) {
+            throw DruidSqlValidator.buildCalciteContextException(
+                "All arguments to APPEND should be literal strings. "
+                    + "Argument #" + i + " is not literal",
+                operand
+            );
+          } else {
+            return false;
+          }
+        }
+
+        SqlTypeName typeName = callBinding.getOperandType(i).getSqlTypeName();
+        if (!SqlTypeFamily.CHARACTER.getTypeNames().contains(typeName)) {
+          if (throwOnFailure) {
+            throw DruidSqlValidator.buildCalciteContextException(
+                "All arguments to APPEND should be literal strings. "
+                    + "Argument #" + i + " is not string",
+                operand
+            );
+          } else {
+            return false;
+          }
+        }
+      }
+      return true;
+    }
+
+    @Override
+    public SqlOperandCountRange getOperandCountRange()
+    {
+      return SqlOperandCountRanges.from(1);
+    }
+
+    @Override
+    public String getAllowedSignatures(SqlOperator op, String opName)
+    {
+      return "APPEND( <TABLE_NAME>[, <TABLE_NAME> ...] )";
+    }
+
+    @Override
+    public List<RelDataType> paramTypes(RelDataTypeFactory typeFactory)
+    {
+      RelDataType t = typeFactory.createSqlType(SqlTypeName.VARCHAR);
+      return ImmutableList.<RelDataType>builder().add(t, t).build();
+    }
+
+    @Override
+    public List<String> paramNames()
+    {
+      return ImmutableList.<String>builder().add("tableName", 
"tableName").build();
+    }
+  }
+
+  @Override
+  public List<String> getParamNames()
+  {
+    return ImmutableList.<String>builder().add("tableName", 
"tableName").build();
+  }
+
+  @Override
+  public TranslatableTable getTable(SqlOperatorBinding operatorBinding)
+  {
+    SqlCallBinding callBinding = (SqlCallBinding) operatorBinding;
+    SqlValidator validator = callBinding.getValidator();
+
+    List<TableOperand> tables = getTables(callBinding, 
validator.getCatalogReader());
+    AppendDatasourceMetadata metadata = buildUnionDataSource(tables);
+    return new DatasourceTable(
+        metadata.values,
+        metadata,
+        EffectiveMetadata.of(metadata.values)
+    );
+  }
+
+  static class TableOperand
+  {
+    private final SqlNode sqlOperand;
+    private final SqlValidatorTable table;
+
+    public TableOperand(SqlNode sqlOperand, SqlValidatorTable table)
+    {
+      this.sqlOperand = sqlOperand;
+      this.table = table;
+    }
+
+    public RelOptTable getRelOptTable()
+    {
+      return table.unwrapOrThrow(RelOptTable.class);
+    }
+
+    public DatasourceTable getDataSourceTable()
+    {
+      return table.unwrapOrThrow(DatasourceTable.class);
+    }
+
+    public RowSignature getRowSignature()
+    {
+      return getDataSourceTable().getRowSignature();
+    }
+
+    public DataSource getDataSource()
+    {
+      return getDataSourceTable().getDataSource();
+    }
+  }
+
+  private List<TableOperand> getTables(SqlCallBinding callBinding, 
SqlValidatorCatalogReader catalogReader)
+  {
+    List<TableOperand> tables = new ArrayList<>();
+    for (int i = 0; i < callBinding.getOperandCount(); i++) {
+      if (!callBinding.isOperandLiteral(i, false)) {
+        throw DruidSqlValidator.buildCalciteContextException(
+            "All arguments of call to macro "
+                + "APPEND should be literal. Actual argument #"
+                + i + " is not literal",
+                callBinding.operand(i)
+        );
+      }
+      @Nullable
+      String tableName = callBinding.getOperandLiteralValue(i, String.class);
+      ImmutableList<String> tableNameList = 
ImmutableList.<String>builder().add(tableName).build();
+      SqlValidatorTable table = catalogReader.getTable(tableNameList);
+      if (table == null) {
+        throw DruidSqlValidator.buildCalciteContextException(
+            StringUtils.format("Table [%s] not found", tableName),
+            callBinding.operand(i)
+        );
+      }
+      tables.add(new TableOperand(callBinding.operand(i), table));
+    }
+    return tables;
+  }
+
+  static class AppendDatasourceMetadata implements DatasourceMetadata
+  {
+    private final RowSignature values;
+    private final DataSource dataSource;
+
+    public AppendDatasourceMetadata(RowSignature values, List<DataSource> 
dataSources)
+    {
+      this.values = values;
+      this.dataSource = new UnionDataSource(dataSources);
+    }
+
+    @Override
+    public boolean isJoinable()
+    {
+      return false;
+    }
+
+    @Override
+    public boolean isBroadcast()
+    {
+      return false;
+    }
+
+    @Override
+    public DataSource dataSource()
+    {
+      return dataSource;
+    }
+  }
+
+  private AppendDatasourceMetadata buildUnionDataSource(List<TableOperand> 
tables)
+  {
+    List<DataSource> dataSources = new ArrayList<>();
+    Map<String, ColumnType> fields = new LinkedHashMap<>();
+    Builder rowSignatureBuilder = RowSignature.builder();

Review Comment:
   oh - right! I've extracted it into a method as well!



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]


Reply via email to