This is an automated email from the ASF dual-hosted git repository.

dlych pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/asterixdb.git

commit 6903fa35e46e395cecac18a39fc62e0588d2294d
Author: Murtadha Hubail <[email protected]>
AuthorDate: Thu Dec 3 22:25:08 2020 +0300

    [NO ISSUE][MD] Ignore index not exists on compensating ops
    
    - user model changes: no
    - storage format changes: no
    - interface changes: no
    
    Details:
    
    - When executing dataset drop compensating operations, ignore
      failures if the dataset files were already deleted.
    
    Change-Id: Ia4b3ff6b3c3cd1a3327f193c7398f983bd891064
    Reviewed-on: https://asterix-gerrit.ics.uci.edu/c/asterixdb/+/9144
    Integration-Tests: Jenkins <[email protected]>
    Tested-by: Jenkins <[email protected]>
    Reviewed-by: Murtadha Hubail <[email protected]>
    Reviewed-by: Till Westmann <[email protected]>
---
 .../apache/asterix/app/translator/QueryTranslator.java    | 15 ++++++++++++---
 .../org/apache/asterix/metadata/entities/Dataset.java     | 12 +++++++-----
 .../org/apache/asterix/metadata/utils/DatasetUtil.java    |  9 ++++++++-
 3 files changed, 27 insertions(+), 9 deletions(-)

diff --git 
a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
 
b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
index 607e23a..45f0a82 100644
--- 
a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
+++ 
b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
@@ -227,6 +227,7 @@ import org.apache.hyracks.api.result.IResultSet;
 import org.apache.hyracks.api.result.ResultSetId;
 import org.apache.hyracks.control.cc.ClusterControllerService;
 import org.apache.hyracks.control.common.controllers.CCConfig;
+import 
org.apache.hyracks.storage.am.common.dataflow.IndexDropOperatorDescriptor.DropOption;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
 import org.apache.logging.log4j.Level;
 import org.apache.logging.log4j.LogManager;
@@ -871,7 +872,8 @@ public class QueryTranslator extends AbstractLangTranslator 
implements IStatemen
                 bActiveTxn = true;
                 metadataProvider.setMetadataTxnContext(mdTxnCtx);
                 try {
-                    JobSpecification jobSpec = 
DatasetUtil.dropDatasetJobSpec(dataset, metadataProvider);
+                    JobSpecification jobSpec =
+                            DatasetUtil.dropDatasetJobSpec(dataset, 
metadataProvider, EnumSet.of(DropOption.IF_EXISTS));
                     MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                     bActiveTxn = false;
                     runJob(hcc, jobSpec);
@@ -1654,6 +1656,7 @@ public class QueryTranslator extends 
AbstractLangTranslator implements IStatemen
         MutableBoolean bActiveTxn = new MutableBoolean(true);
         metadataProvider.setMetadataTxnContext(mdTxnCtx.getValue());
         List<JobSpecification> jobsToExecute = new ArrayList<>();
+        Dataset ds = null;
         try {
             // Check if the dataverse exists
             Dataverse dv = 
MetadataManager.INSTANCE.getDataverse(mdTxnCtx.getValue(), dataverseName);
@@ -1669,7 +1672,7 @@ public class QueryTranslator extends 
AbstractLangTranslator implements IStatemen
                     throw new 
CompilationException(ErrorCode.UNKNOWN_DATAVERSE, sourceLoc, dataverseName);
                 }
             }
-            Dataset ds = metadataProvider.findDataset(dataverseName, 
datasetName);
+            ds = metadataProvider.findDataset(dataverseName, datasetName);
             if (ds == null) {
                 if (ifExists) {
                     
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx.getValue());
@@ -1682,11 +1685,12 @@ public class QueryTranslator extends 
AbstractLangTranslator implements IStatemen
             validateDatasetState(metadataProvider, ds, sourceLoc);
 
             ds.drop(metadataProvider, mdTxnCtx, jobsToExecute, bActiveTxn, 
progress, hcc, dropCorrespondingNodeGroup,
-                    sourceLoc);
+                    sourceLoc, Collections.emptySet());
 
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx.getValue());
             return true;
         } catch (Exception e) {
+            LOGGER.error("failed to drop dataset; executing compensating 
operations", e);
             if (bActiveTxn.booleanValue()) {
                 abort(e, e, mdTxnCtx.getValue());
             }
@@ -1695,6 +1699,11 @@ public class QueryTranslator extends 
AbstractLangTranslator implements IStatemen
                 // #. execute compensation operations
                 // remove the all indexes in NC
                 try {
+                    if (ds != null) {
+                        jobsToExecute.clear();
+                        ds.drop(metadataProvider, mdTxnCtx, jobsToExecute, 
bActiveTxn, progress, hcc,
+                                dropCorrespondingNodeGroup, sourceLoc, 
EnumSet.of(DropOption.IF_EXISTS));
+                    }
                     for (JobSpecification jobSpec : jobsToExecute) {
                         JobUtils.runJob(hcc, jobSpec, true);
                     }
diff --git 
a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataset.java
 
b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataset.java
index 2171974..4e556a2 100644
--- 
a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataset.java
+++ 
b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataset.java
@@ -23,6 +23,7 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Objects;
+import java.util.Set;
 import java.util.stream.IntStream;
 
 import org.apache.asterix.common.api.IDatasetInfoProvider;
@@ -104,6 +105,7 @@ import org.apache.hyracks.api.io.FileSplit;
 import org.apache.hyracks.api.job.JobSpecification;
 import 
org.apache.hyracks.storage.am.common.api.IModificationOperationCallbackFactory;
 import 
org.apache.hyracks.storage.am.common.api.ISearchOperationCallbackFactory;
+import 
org.apache.hyracks.storage.am.common.dataflow.IndexDropOperatorDescriptor.DropOption;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
 import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
 import 
org.apache.hyracks.storage.am.lsm.common.api.IFrameOperationCallbackFactory;
@@ -344,8 +346,8 @@ public class Dataset implements IMetadataEntity<Dataset>, 
IDataset {
      */
     public void drop(MetadataProvider metadataProvider, 
MutableObject<MetadataTransactionContext> mdTxnCtx,
             List<JobSpecification> jobsToExecute, MutableBoolean bActiveTxn, 
MutableObject<ProgressState> progress,
-            IHyracksClientConnection hcc, boolean dropCorrespondingNodeGroup, 
SourceLocation sourceLoc)
-            throws Exception {
+            IHyracksClientConnection hcc, boolean dropCorrespondingNodeGroup, 
SourceLocation sourceLoc,
+            Set<DropOption> options) throws Exception {
         Map<FeedConnectionId, Pair<JobSpecification, Boolean>> 
disconnectJobList = new HashMap<>();
         if (getDatasetType() == DatasetType.INTERNAL) {
             // #. prepare jobs to drop the datatset and the indexes in NC
@@ -353,11 +355,11 @@ public class Dataset implements IMetadataEntity<Dataset>, 
IDataset {
                     
MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx.getValue(), dataverseName, 
datasetName);
             for (int j = 0; j < indexes.size(); j++) {
                 if (indexes.get(j).isSecondaryIndex()) {
-                    jobsToExecute
-                            
.add(IndexUtil.buildDropIndexJobSpec(indexes.get(j), metadataProvider, this, 
sourceLoc));
+                    
jobsToExecute.add(IndexUtil.buildDropIndexJobSpec(indexes.get(j), 
metadataProvider, this, options,
+                            sourceLoc));
                 }
             }
-            jobsToExecute.add(DatasetUtil.dropDatasetJobSpec(this, 
metadataProvider));
+            jobsToExecute.add(DatasetUtil.dropDatasetJobSpec(this, 
metadataProvider, options));
             // #. mark the existing dataset as PendingDropOp
             MetadataManager.INSTANCE.dropDataset(mdTxnCtx.getValue(), 
dataverseName, datasetName);
             MetadataManager.INSTANCE.addDataset(mdTxnCtx.getValue(),
diff --git 
a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java
 
b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java
index bf3f8cf..52a133d 100644
--- 
a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java
+++ 
b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java
@@ -20,6 +20,7 @@ package org.apache.asterix.metadata.utils;
 
 import java.io.DataOutput;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -289,6 +290,11 @@ public class DatasetUtil {
 
     public static JobSpecification dropDatasetJobSpec(Dataset dataset, 
MetadataProvider metadataProvider)
             throws AlgebricksException, ACIDException {
+        return dropDatasetJobSpec(dataset, metadataProvider, 
Collections.emptySet());
+    }
+
+    public static JobSpecification dropDatasetJobSpec(Dataset dataset, 
MetadataProvider metadataProvider,
+            Set<IndexDropOperatorDescriptor.DropOption> options) throws 
AlgebricksException, ACIDException {
         LOGGER.info("DROP DATASET: " + dataset);
         if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
             return 
RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
@@ -298,7 +304,8 @@ public class DatasetUtil {
                 metadataProvider.getSplitProviderAndConstraints(dataset);
         IIndexDataflowHelperFactory indexHelperFactory = new 
IndexDataflowHelperFactory(
                 
metadataProvider.getStorageComponentProvider().getStorageManager(), 
splitsAndConstraint.first);
-        IndexDropOperatorDescriptor primaryBtreeDrop = new 
IndexDropOperatorDescriptor(specPrimary, indexHelperFactory);
+        IndexDropOperatorDescriptor primaryBtreeDrop =
+                new IndexDropOperatorDescriptor(specPrimary, 
indexHelperFactory, options);
         
AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(specPrimary,
 primaryBtreeDrop,
                 splitsAndConstraint.second);
         specPrimary.addRoot(primaryBtreeDrop);

Reply via email to