http://git-wip-us.apache.org/repos/asf/hive/blob/42335b46/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
----------------------------------------------------------------------
diff --cc ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index 9754039,a5b69ec..df5c6aa
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@@ -6873,22 -6889,16 +6851,24 @@@ public class SemanticAnalyzer extends B
            acidOp = getAcidType(table_desc.getOutputFileFormatClass(), dest);
            checkAcidConstraints(qb, table_desc, dest_tab);
          }
 -        Long currentTransactionId = acidOp == Operation.NOT_ACID ? null :
 +        if (AcidUtils.isInsertOnlyTable(table_desc.getProperties())) {
 +          acidOp = getAcidType(table_desc.getOutputFileFormatClass(), dest);
 +        }
 +        if (isMmTable) {
 +          txnId = SessionState.get().getTxnMgr().getCurrentTxnId();
 +        } else {
 +          txnId = acidOp == Operation.NOT_ACID ? null :
              SessionState.get().getTxnMgr().getCurrentTxnId();
 -        ltd = new LoadTableDesc(queryTmpdir, table_desc, dpCtx, acidOp,
 -            currentTransactionId);
 +        }
 +        boolean isReplace = !qb.getParseInfo().isInsertIntoTable(
 +            dest_tab.getDbName(), dest_tab.getTableName());
 +        ltd = new LoadTableDesc(queryTmpdir, table_desc, dpCtx, acidOp, 
isReplace, txnId);
          // For Acid table, Insert Overwrite shouldn't replace the table 
content. We keep the old
          // deltas and base and leave them up to the cleaner to clean up
-         
ltd.setReplace(!qb.getParseInfo().isInsertIntoTable(dest_tab.getDbName(),
-             dest_tab.getTableName()) && !destTableIsAcid);
+         LoadFileType loadType = 
(!qb.getParseInfo().isInsertIntoTable(dest_tab.getDbName(),
+                 dest_tab.getTableName()) && !destTableIsAcid)
+                 ? LoadFileType.REPLACE_ALL : LoadFileType.KEEP_EXISTING;
+         ltd.setLoadFileType(loadType);
          ltd.setLbCtx(lbCtx);
          loadTableWork.add(ltd);
        } else {
@@@ -6947,20 -7009,16 +6927,22 @@@
          acidOp = getAcidType(table_desc.getOutputFileFormatClass(), dest);
          checkAcidConstraints(qb, table_desc, dest_tab);
        }
 -      Long currentTransactionId = (acidOp == Operation.NOT_ACID) ? null :
 +      if (AcidUtils.isInsertOnlyTable(dest_part.getTable().getParameters())) {
 +        acidOp = getAcidType(table_desc.getOutputFileFormatClass(), dest);
 +      }
 +      if (isMmTable) {
 +        txnId = SessionState.get().getTxnMgr().getCurrentTxnId();
 +      } else {
 +        txnId = (acidOp == Operation.NOT_ACID) ? null :
            SessionState.get().getTxnMgr().getCurrentTxnId();
 -      ltd = new LoadTableDesc(queryTmpdir, table_desc, dest_part.getSpec(), 
acidOp,
 -          currentTransactionId);
 +      }
 +      ltd = new LoadTableDesc(queryTmpdir, table_desc, dest_part.getSpec(), 
acidOp, txnId);
        // For Acid table, Insert Overwrite shouldn't replace the table 
content. We keep the old
        // deltas and base and leave them up to the cleaner to clean up
-       
ltd.setReplace(!qb.getParseInfo().isInsertIntoTable(dest_tab.getDbName(),
-           dest_tab.getTableName()) && !destTableIsAcid);
+       LoadFileType loadType = 
(!qb.getParseInfo().isInsertIntoTable(dest_tab.getDbName(),
+               dest_tab.getTableName()) && !destTableIsAcid)
+               ? LoadFileType.REPLACE_ALL : LoadFileType.KEEP_EXISTING;
+       ltd.setLoadFileType(loadType);
        ltd.setLbCtx(lbCtx);
  
        loadTableWork.add(ltd);

http://git-wip-us.apache.org/repos/asf/hive/blob/42335b46/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java
----------------------------------------------------------------------
diff --cc 
ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java
index 1c220b8,7ff321f..6f21cae
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java
@@@ -30,10 -29,7 +29,8 @@@ import org.apache.hadoop.hive.conf.Hive
  import org.apache.hadoop.hive.ql.exec.TableScanOperator;
  import org.apache.hadoop.hive.ql.exec.Task;
  import org.apache.hadoop.hive.ql.exec.TaskFactory;
 +import org.apache.hadoop.hive.ql.exec.Utilities;
  import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat;
- import org.apache.hadoop.hive.ql.io.rcfile.stats.PartialScanWork;
  import org.apache.hadoop.hive.ql.lib.Node;
  import org.apache.hadoop.hive.ql.lib.NodeProcessor;
  import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
@@@ -170,44 -160,5 +161,4 @@@ public class SparkProcessAnalyzeTable i
  
      return null;
    }
- 
-   /**
-    * handle partial scan command.
-    *
-    * It is composed of PartialScanTask followed by StatsTask.
-    */
-   private void handlePartialScanCommand(TableScanOperator tableScan, 
ParseContext parseContext,
-       StatsWork statsWork, GenSparkProcContext context, Task<StatsWork> 
statsTask)
-               throws SemanticException {
-     String aggregationKey = tableScan.getConf().getStatsAggPrefix();
-     StringBuilder aggregationKeyBuffer = new StringBuilder(aggregationKey);
-     List<Path> inputPaths = 
GenMapRedUtils.getInputPathsForPartialScan(tableScan, aggregationKeyBuffer);
-     aggregationKey = aggregationKeyBuffer.toString();
- 
-     // scan work
-     PartialScanWork scanWork = new PartialScanWork(inputPaths,
-         Utilities.getTableDesc(tableScan.getConf().getTableMetadata()));
-     scanWork.setMapperCannotSpanPartns(true);
-     scanWork.setAggKey(aggregationKey);
-     scanWork.setStatsTmpDir(tableScan.getConf().getTmpStatsDir(), 
parseContext.getConf());
- 
-     // stats work
-     statsWork.setPartialScanAnalyzeCommand(true);
- 
-     // partial scan task
-     DriverContext driverCxt = new DriverContext();
- 
-     @SuppressWarnings("unchecked")
-     Task<PartialScanWork> partialScanTask = TaskFactory.get(scanWork, 
parseContext.getConf());
-     partialScanTask.initialize(parseContext.getQueryState(), null, driverCxt,
-         tableScan.getCompilationOpContext());
-     partialScanTask.setWork(scanWork);
-     statsWork.setSourceTask(partialScanTask);
- 
-     // task dependency
-     context.rootTasks.remove(context.currentTask);
-     context.rootTasks.add(partialScanTask);
-     partialScanTask.addDependentTask(statsTask);
-   }
--
  }

http://git-wip-us.apache.org/repos/asf/hive/blob/42335b46/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java
----------------------------------------------------------------------
diff --cc ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java
index f246115,e15f59c..1fa7b40
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java
@@@ -61,14 -66,11 +66,14 @@@ public class LoadTableDesc extends Load
    public LoadTableDesc(final Path sourcePath,
        final TableDesc table,
        final Map<String, String> partitionSpec,
-       final boolean replace,
+       final LoadFileType loadFileType,
        final AcidUtils.Operation writeType, Long currentTransactionId) {
      super(sourcePath, writeType);
 -    this.currentTransactionId = currentTransactionId;
 -    init(table, partitionSpec, loadFileType);
 +    if (Utilities.FILE_OP_LOGGER.isTraceEnabled()) {
 +      Utilities.FILE_OP_LOGGER.trace("creating part LTD from " + sourcePath + 
" to "
 +        + ((table.getProperties() == null) ? "null" : table.getTableName()));
 +    }
-     init(table, partitionSpec, replace, currentTransactionId);
++    init(table, partitionSpec, loadFileType, currentTransactionId);
    }
  
    /**
@@@ -76,14 -78,14 +81,14 @@@
     * @param sourcePath
     * @param table
     * @param partitionSpec
-    * @param replace
+    * @param loadFileType
     */
    public LoadTableDesc(final Path sourcePath,
 -      final TableDesc table,
 -      final Map<String, String> partitionSpec,
 -      final LoadFileType loadFileType) {
 -    this(sourcePath, table, partitionSpec, loadFileType, 
AcidUtils.Operation.NOT_ACID,
 -        null);
 +                       final TableDesc table,
 +                       final Map<String, String> partitionSpec,
-                        final boolean replace,
++                       final LoadFileType loadFileType,
 +                       final Long txnId) {
-     this(sourcePath, table, partitionSpec, replace, 
AcidUtils.Operation.NOT_ACID, txnId);
++    this(sourcePath, table, partitionSpec, loadFileType, 
AcidUtils.Operation.NOT_ACID, txnId);
    }
  
    public LoadTableDesc(final Path sourcePath,
@@@ -100,37 -103,33 +106,39 @@@
     * @param partitionSpec
     */
    public LoadTableDesc(final Path sourcePath,
 -      final TableDesc table,
 -      final Map<String, String> partitionSpec) {
 +                       final org.apache.hadoop.hive.ql.plan.TableDesc table,
 +                       final Map<String, String> partitionSpec, Long txnId) {
-     this(sourcePath, table, partitionSpec, true, 
AcidUtils.Operation.NOT_ACID, txnId);
+     this(sourcePath, table, partitionSpec, LoadFileType.REPLACE_ALL,
 -            AcidUtils.Operation.NOT_ACID, null);
++      AcidUtils.Operation.NOT_ACID, txnId);
    }
  
    public LoadTableDesc(final Path sourcePath,
        final TableDesc table,
        final DynamicPartitionCtx dpCtx,
 -      final AcidUtils.Operation writeType, Long currentTransactionId) {
 +      final AcidUtils.Operation writeType,
 +      boolean isReplace, Long txnId) {
      super(sourcePath, writeType);
 +    if (Utilities.FILE_OP_LOGGER.isTraceEnabled()) {
 +      Utilities.FILE_OP_LOGGER.trace("creating LTD from " + sourcePath + " to 
" + table.getTableName());
 +    }
      this.dpCtx = dpCtx;
 -    this.currentTransactionId = currentTransactionId;
++    LoadFileType lft = isReplace ?  LoadFileType.REPLACE_ALL :  
LoadFileType.OVERWRITE_EXISTING;
      if (dpCtx != null && dpCtx.getPartSpec() != null && partitionSpec == 
null) {
-       init(table, dpCtx.getPartSpec(), isReplace, txnId);
 -      init(table, dpCtx.getPartSpec(), LoadFileType.REPLACE_ALL);
++      init(table, dpCtx.getPartSpec(), lft, txnId);
      } else {
-       init(table, new LinkedHashMap<>(), isReplace, txnId);
 -      init(table, new LinkedHashMap<>(), LoadFileType.REPLACE_ALL);
++      init(table, new LinkedHashMap<String, String>(), lft, txnId);
      }
    }
  
    private void init(
        final org.apache.hadoop.hive.ql.plan.TableDesc table,
        final Map<String, String> partitionSpec,
-       final boolean replace,
 -      final LoadFileType loadFileType) {
++      final LoadFileType loadFileType,
 +      Long txnId) {
      this.table = table;
      this.partitionSpec = partitionSpec;
-     this.replace = replace;
+     this.loadFileType = loadFileType;
 +    this.currentTransactionId = txnId;
    }
  
    @Explain(displayName = "table", explainLevels = { Level.USER, 
Level.DEFAULT, Level.EXTENDED })
@@@ -153,20 -152,15 +161,24 @@@
  
    @Explain(displayName = "replace")
    public boolean getReplace() {
-     return replace;
+     return (loadFileType == LoadFileType.REPLACE_ALL);
+   }
+ 
+   public LoadFileType getLoadFileType() {
+     return loadFileType;
    }
  
 +  @Explain(displayName = "micromanaged table")
 +  public Boolean isMmTableExplain() {
 +    return isMmTable() ? true : null;
 +  }
 +
 +  public boolean isMmTable() {
 +    return AcidUtils.isInsertOnlyTable(table.getProperties());
 +  }
 +
-   public void setReplace(boolean replace) {
-     this.replace = replace;
+   public void setLoadFileType(LoadFileType loadFileType) {
+     this.loadFileType = loadFileType;
    }
  
    public DynamicPartitionCtx getDPCtx() {

http://git-wip-us.apache.org/repos/asf/hive/blob/42335b46/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/42335b46/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands2.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/42335b46/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
----------------------------------------------------------------------
diff --cc ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
index 75ea16f,e523049..d5ab079
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
@@@ -140,7 -141,8 +141,8 @@@ public class TestExecDriver extends Tes
          db.dropTable(Warehouse.DEFAULT_DATABASE_NAME, src, true, true);
          db.createTable(src, cols, null, TextInputFormat.class,
              HiveIgnoreKeyTextOutputFormat.class);
-         db.loadTable(hadoopDataFile[i], src, false, true, false, false, 
false, null, 0, false);
+         db.loadTable(hadoopDataFile[i], src, LoadFileType.KEEP_EXISTING,
 -                true, false, false, false);
++           true, false, false, false, null, 0, false);
          i++;
        }
  

http://git-wip-us.apache.org/repos/asf/hive/blob/42335b46/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/42335b46/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/42335b46/standalone-metastore/src/gen/thrift/gen-php/metastore/Types.php
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/42335b46/standalone-metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/42335b46/standalone-metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/42335b46/standalone-metastore/src/main/resources/package.jdo
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/42335b46/standalone-metastore/src/main/thrift/hive_metastore.thrift
----------------------------------------------------------------------

Reply via email to