HIVE-14671 : merge master into hive-14535 (Sergey Shelukhin)

Conflicts:
        ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/ccea0d6f
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/ccea0d6f
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/ccea0d6f

Branch: refs/heads/hive-14535
Commit: ccea0d6ff7aaeaac494d7c7c680a3efad7805e3d
Parents: 8e6719d 2a8d1bf
Author: Sergey Shelukhin <[email protected]>
Authored: Wed Mar 15 16:45:48 2017 -0700
Committer: Sergey Shelukhin <[email protected]>
Committed: Wed Mar 15 16:45:48 2017 -0700

----------------------------------------------------------------------
 .../apache/hive/beeline/util/QFileClient.java   |  382 ---
 .../org/apache/hadoop/hive/conf/HiveConf.java   |   12 +-
 .../hadoop/hive/druid/io/DruidOutputFormat.java |    6 +-
 .../e2e/templeton/drivers/TestDriverCurl.pm     |    2 +-
 .../test/resources/testconfiguration.properties |    4 +-
 .../hive/cli/control/CoreBeeLineDriver.java     |  145 +-
 .../hadoop/hive/ql/QTestProcessExecResult.java  |   70 +
 .../org/apache/hadoop/hive/ql/QTestUtil.java    |    6 +-
 .../org/apache/hive/beeline/qfile/QFile.java    |  273 ++
 .../hive/beeline/qfile/QFileBeeLineClient.java  |  149 +
 .../apache/hive/beeline/qfile/package-info.java |   22 +
 llap-server/pom.xml                             |    6 +
 .../llap/daemon/impl/ContainerRunnerImpl.java   |    2 +-
 .../hive/llap/daemon/impl/LlapDaemon.java       |   14 +
 .../daemon/impl/StatsRecordingThreadPool.java   |    8 +-
 .../llap/daemon/impl/TaskExecutorService.java   |  179 +-
 .../llap/daemon/impl/TaskRunnerCallable.java    |    2 +-
 .../llap/io/encoded/OrcEncodedDataReader.java   |    4 +-
 .../llap/io/encoded/SerDeEncodedDataReader.java |    3 +-
 .../hive/llap/io/encoded/TezCounterSource.java  |   24 +
 .../daemon/impl/TaskExecutorTestHelpers.java    |   39 +-
 .../daemon/impl/TestTaskExecutorService.java    |   98 +-
 .../llap/tezplugins/LlapTaskCommunicator.java   |   13 +-
 .../org/apache/hadoop/hive/ql/exec/DDLTask.java |   27 +-
 .../calcite/reloperators/HiveFilter.java        |    6 +-
 .../calcite/rules/HiveSubQueryRemoveRule.java   |   36 +-
 .../hive/ql/parse/BaseSemanticAnalyzer.java     |   38 +-
 .../hadoop/hive/ql/parse/CalcitePlanner.java    |  200 +-
 .../hadoop/hive/ql/parse/FromClauseParser.g     |    4 +-
 .../apache/hadoop/hive/ql/parse/HiveParser.g    |    2 +-
 .../hadoop/hive/ql/parse/SubQueryUtils.java     |   47 +
 .../hadoop/hive/ql/plan/ExprNodeDescUtils.java  |    6 +-
 .../apache/hadoop/hive/ql/TestTxnCommands.java  |    7 +
 .../TestHostAffinitySplitLocationProvider.java  |    2 +-
 .../hive/ql/parse/TestMergeStatement.java       |    9 +-
 .../clientnegative/avro_add_column_extschema.q  |   18 +
 .../queries/clientnegative/subquery_in_select.q |    6 -
 .../clientnegative/subquery_select_aggregate.q  |    2 +
 .../subquery_select_complex_expr.q              |    3 +
 .../clientnegative/subquery_select_distinct.q   |    2 +
 .../clientnegative/subquery_select_distinct2.q  |    2 +
 .../clientnegative/subquery_select_udf.q        |    2 +
 .../clientpositive/avro_add_column_extschema.q  |   48 +
 .../clientpositive/default_file_format.q        |   35 +
 .../queries/clientpositive/subquery_select.q    |  133 +
 .../avro_add_column_extschema.q.out             |   43 +
 .../clientnegative/subquery_in_select.q.out     |    2 +-
 .../subquery_select_aggregate.q.out             |    1 +
 .../subquery_select_complex_expr.q.out          |    1 +
 .../subquery_select_distinct.q.out              |    1 +
 .../subquery_select_distinct2.q.out             |    1 +
 .../clientnegative/subquery_select_udf.q.out    |    1 +
 .../avro_add_column_extschema.q.out             |  161 +
 .../beeline/drop_with_concurrency.q.out         |   67 +
 .../clientpositive/default_file_format.q.out    |  347 +++
 .../clientpositive/llap/subquery_select.q.out   | 2942 ++++++++++++++++++
 .../hadoop/hive/serde2/avro/AvroSerdeUtils.java |   22 +
 57 files changed, 4979 insertions(+), 708 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/ccea0d6f/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/ccea0d6f/itests/src/test/resources/testconfiguration.properties
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/ccea0d6f/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
----------------------------------------------------------------------
diff --cc ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index 39f2c53,f137819..a1a0862
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@@ -3659,8 -3645,12 +3660,13 @@@ public class DDLTask extends Task<DDLWo
      return false;
    }
  
++
+   private static StorageDescriptor retrieveStorageDescriptor(Table tbl, 
Partition part) {
+     return (part == null ? tbl.getTTable().getSd() : 
part.getTPartition().getSd());
+   }
+ 
 -  private int alterTableOrSinglePartition(AlterTableDesc alterTbl, Table tbl, 
Partition part)
 -      throws HiveException {
 +  private List<Task<?>> alterTableOrSinglePartition(
 +      AlterTableDesc alterTbl, Table tbl, Partition part) throws 
HiveException {
      EnvironmentContext environmentContext = alterTbl.getEnvironmentContext();
      if (environmentContext == null) {
        environmentContext = new EnvironmentContext();
@@@ -3808,14 -3801,34 +3817,14 @@@
        }
        sd.setCols(alterTbl.getNewCols());
      } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.ADDPROPS) {
 -      if (StatsSetupConst.USER.equals(environmentContext.getProperties()
 -              .get(StatsSetupConst.STATS_GENERATED))) {
 -        
environmentContext.getProperties().remove(StatsSetupConst.DO_NOT_UPDATE_STATS);
 -      }
 -      if (part != null) {
 -        part.getTPartition().getParameters().putAll(alterTbl.getProps());
 -      } else {
 -        tbl.getTTable().getParameters().putAll(alterTbl.getProps());
 -      }
 +      return alterTableAddProps(alterTbl, tbl, part, environmentContext);
      } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.DROPPROPS) {
 -      Iterator<String> keyItr = alterTbl.getProps().keySet().iterator();
 -      if (StatsSetupConst.USER.equals(environmentContext.getProperties()
 -          .get(StatsSetupConst.STATS_GENERATED))) {
 -        // drop a stats parameter, which triggers recompute stats update 
automatically
 -        
environmentContext.getProperties().remove(StatsSetupConst.DO_NOT_UPDATE_STATS);
 -      }
 -      while (keyItr.hasNext()) {
 -        if (part != null) {
 -          part.getTPartition().getParameters().remove(keyItr.next());
 -        } else {
 -          tbl.getTTable().getParameters().remove(keyItr.next());
 -        }
 -      }
 +      return alterTableDropProps(alterTbl, tbl, part, environmentContext);
      } else if (alterTbl.getOp() == 
AlterTableDesc.AlterTableTypes.ADDSERDEPROPS) {
-       StorageDescriptor sd = (part == null ? tbl.getTTable().getSd() : 
part.getTPartition().getSd());
+       StorageDescriptor sd = retrieveStorageDescriptor(tbl, part);
        sd.getSerdeInfo().getParameters().putAll(alterTbl.getProps());
      } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.ADDSERDE) {
-       StorageDescriptor sd = (part == null ? tbl.getTTable().getSd() : 
part.getTPartition().getSd());
+       StorageDescriptor sd = retrieveStorageDescriptor(tbl, part);
        String serdeName = alterTbl.getSerdeName();
        String oldSerdeName = sd.getSerdeInfo().getSerializationLib();
        // if orc table, restrict changing the serde as it can break schema 
evolution

http://git-wip-us.apache.org/repos/asf/hive/blob/ccea0d6f/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands.java
----------------------------------------------------------------------

Reply via email to