This is an automated email from the ASF dual-hosted git repository.

dlych pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/asterixdb.git

commit 41cbadfe5c7b084dff8a13d42fd1e4026b671874
Merge: 63e2a43 6904d36
Author: Dmitry Lychagin <[email protected]>
AuthorDate: Fri May 15 17:48:42 2020 -0700

    Merge commit '6904d36' from 'gerrit/mad-hatter'
    
    Change-Id: Ib118bb2d1b18a93412015b1b652684c3fbb11e52

 .../asterix/app/translator/QueryTranslator.java    | 42 ++++++++++++----------
 .../asterix/common/metadata/IMetadataLockUtil.java |  9 ++---
 .../asterix/metadata/utils/MetadataLockUtil.java   | 30 ++++++++++------
 3 files changed, 49 insertions(+), 32 deletions(-)

diff --cc 
asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
index f2302f9,c2d50b3..95e67be
--- 
a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
+++ 
b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
@@@ -582,74 -554,68 +582,87 @@@ public class QueryTranslator extends Ab
      }
  
      public void handleCreateDatasetStatement(MetadataProvider 
metadataProvider, Statement stmt,
 -            IHyracksClientConnection hcc, IRequestParameters 
requestParameters) throws CompilationException, Exception {
 -        MutableObject<ProgressState> progress = new 
MutableObject<>(ProgressState.NO_PROGRESS);
 +            IHyracksClientConnection hcc, IRequestParameters 
requestParameters) throws Exception {
          DatasetDecl dd = (DatasetDecl) stmt;
 -        SourceLocation sourceLoc = dd.getSourceLocation();
 -        String dataverseName = getActiveDataverse(dd.getDataverse());
 +        DataverseName dataverseName = 
getActiveDataverseName(dd.getDataverse());
          String datasetName = dd.getName().getValue();
 -        DatasetType dsType = dd.getDatasetType();
          TypeExpression itemTypeExpr = dd.getItemType();
-         DataverseName itemTypeDataverseName = null;
-         String itemTypeName = null;
 -        String itemTypeDataverseName, itemTypeName;
++        DataverseName itemTypeDataverseName;
++        String itemTypeName;
+         boolean itemTypeAnonymous;
          switch (itemTypeExpr.getTypeKind()) {
              case TYPEREFERENCE:
                  TypeReferenceExpression itemTypeRefExpr = 
(TypeReferenceExpression) itemTypeExpr;
 -                Identifier itemTypeDataverseIdent = 
itemTypeRefExpr.getIdent().first;
 -                itemTypeDataverseName = itemTypeDataverseIdent != null && 
itemTypeDataverseIdent.getValue() != null
 -                        ? itemTypeDataverseIdent.getValue() : dataverseName;
 +                Pair<DataverseName, Identifier> itemTypeIdent = 
itemTypeRefExpr.getIdent();
 +                itemTypeDataverseName = itemTypeIdent.first != null ? 
itemTypeIdent.first : dataverseName;
                  itemTypeName = itemTypeRefExpr.getIdent().second.getValue();
+                 itemTypeAnonymous = false;
                  break;
              case RECORD:
+                 itemTypeDataverseName = dataverseName;
+                 itemTypeName = DatasetUtil.createInlineTypeName(datasetName, 
false);
+                 itemTypeAnonymous = true;
                  break;
              default:
 -                throw new 
CompilationException(ErrorCode.COMPILATION_ILLEGAL_STATE, sourceLoc,
 +                throw new 
CompilationException(ErrorCode.COMPILATION_ILLEGAL_STATE, 
stmt.getSourceLocation(),
                          String.valueOf(itemTypeExpr.getTypeKind()));
          }
 -        String itemTypeFullyQualifiedName = itemTypeDataverseName + '.' + 
itemTypeName;
  
          TypeExpression metaItemTypeExpr = dd.getMetaItemType();
 -        String metaItemTypeDataverseName = null, metaItemTypeName = null, 
metaItemTypeFullyQualifiedName;
 +        DataverseName metaItemTypeDataverseName = null;
 +        String metaItemTypeName = null;
+         boolean metaItemTypeAnonymous;
          if (metaItemTypeExpr != null) {
              switch (metaItemTypeExpr.getTypeKind()) {
                  case TYPEREFERENCE:
                      TypeReferenceExpression metaItemTypeRefExpr = 
(TypeReferenceExpression) metaItemTypeExpr;
 -                    Identifier metaItemTypeDataverseIdent = 
metaItemTypeRefExpr.getIdent().first;
 +                    Pair<DataverseName, Identifier> metaItemTypeIdent = 
metaItemTypeRefExpr.getIdent();
                      metaItemTypeDataverseName =
 -                            metaItemTypeDataverseIdent != null && 
metaItemTypeDataverseIdent.getValue() != null
 -                                    ? metaItemTypeDataverseIdent.getValue() : 
dataverseName;
 +                            metaItemTypeIdent.first != null ? 
metaItemTypeIdent.first : dataverseName;
                      metaItemTypeName = 
metaItemTypeRefExpr.getIdent().second.getValue();
+                     metaItemTypeAnonymous = false;
                      break;
                  case RECORD:
+                     metaItemTypeDataverseName = dataverseName;
+                     metaItemTypeName = 
DatasetUtil.createInlineTypeName(datasetName, true);
+                     metaItemTypeAnonymous = true;
                      break;
                  default:
 -                    throw new 
CompilationException(ErrorCode.COMPILATION_ILLEGAL_STATE, sourceLoc,
 +                    throw new 
CompilationException(ErrorCode.COMPILATION_ILLEGAL_STATE, 
stmt.getSourceLocation(),
                              String.valueOf(metaItemTypeExpr.getTypeKind()));
              }
 -            metaItemTypeFullyQualifiedName = metaItemTypeDataverseName + '.' 
+ metaItemTypeName;
+         } else {
 -            metaItemTypeFullyQualifiedName = null;
+             metaItemTypeAnonymous = true; // doesn't matter
          }
  
          Identifier ngNameId = dd.getNodegroupName();
          String nodegroupName = ngNameId == null ? null : ngNameId.getValue();
          String compactionPolicy = dd.getCompactionPolicy();
 +        boolean defaultCompactionPolicy = compactionPolicy == null;
 +
 +        lockUtil.createDatasetBegin(lockManager, metadataProvider.getLocks(), 
dataverseName, datasetName,
-                 itemTypeDataverseName, itemTypeName, 
metaItemTypeDataverseName, metaItemTypeName, nodegroupName,
-                 compactionPolicy, defaultCompactionPolicy, 
dd.getDatasetDetailsDecl());
++                itemTypeDataverseName, itemTypeName, itemTypeAnonymous, 
metaItemTypeDataverseName, metaItemTypeName,
++                metaItemTypeAnonymous, nodegroupName, compactionPolicy, 
defaultCompactionPolicy, dd.getDatasetType(),
++                dd.getDatasetDetailsDecl());
 +        try {
 +            doCreateDatasetStatement(metadataProvider, dd, dataverseName, 
datasetName, itemTypeDataverseName,
 +                    itemTypeExpr, itemTypeName, metaItemTypeExpr, 
metaItemTypeDataverseName, metaItemTypeName, hcc,
 +                    requestParameters);
 +        } finally {
 +            metadataProvider.getLocks().unlock();
 +        }
 +    }
 +
 +    protected void doCreateDatasetStatement(MetadataProvider 
metadataProvider, DatasetDecl dd,
 +            DataverseName dataverseName, String datasetName, DataverseName 
itemTypeDataverseName,
 +            TypeExpression itemTypeExpr, String itemTypeName, TypeExpression 
metaItemTypeExpr,
 +            DataverseName metaItemTypeDataverseName, String metaItemTypeName, 
IHyracksClientConnection hcc,
 +            IRequestParameters requestParameters) throws Exception {
 +        MutableObject<ProgressState> progress = new 
MutableObject<>(ProgressState.NO_PROGRESS);
 +        SourceLocation sourceLoc = dd.getSourceLocation();
 +        DatasetType dsType = dd.getDatasetType();
 +        Identifier ngNameId = dd.getNodegroupName();
 +        String compactionPolicy = dd.getCompactionPolicy();
          Map<String, String> compactionPolicyProperties = 
dd.getCompactionPolicyProperties();
          String compressionScheme = metadataProvider.getCompressionManager()
                  
.getDdlOrDefaultCompressionScheme(dd.getDatasetCompressionScheme());
@@@ -674,8 -645,7 +687,8 @@@
                      Datatype itemTypeEntity = 
metadataProvider.findTypeEntity(itemTypeDataverseName, itemTypeName);
                      if (itemTypeEntity == null || 
itemTypeEntity.getIsAnonymous()) {
                          // anonymous types cannot be referred from CREATE 
DATASET
 -                        throw new AsterixException(ErrorCode.UNKNOWN_TYPE, 
sourceLoc, itemTypeFullyQualifiedName);
 +                        throw new AsterixException(ErrorCode.UNKNOWN_TYPE, 
sourceLoc,
-                                 itemTypeDataverseName + "." + itemTypeName);
++                                
DatasetUtil.getFullyQualifiedDisplayName(itemTypeDataverseName, itemTypeName));
                      }
                      itemType = itemTypeEntity.getDatatype();
                      validateDatasetItemType(dsType, itemType, false, 
sourceLoc);
@@@ -714,8 -680,8 +723,8 @@@
                                          
metadataProvider.findTypeEntity(metaItemTypeDataverseName, metaItemTypeName);
                                  if (metaItemTypeEntity == null || 
metaItemTypeEntity.getIsAnonymous()) {
                                      // anonymous types cannot be referred 
from CREATE DATASET
--                                    throw new 
AsterixException(ErrorCode.UNKNOWN_TYPE, sourceLoc,
-                                             metaItemTypeDataverseName + "." + 
metaItemTypeName);
 -                                            metaItemTypeFullyQualifiedName);
++                                    throw new 
AsterixException(ErrorCode.UNKNOWN_TYPE, sourceLoc, DatasetUtil
++                                            
.getFullyQualifiedDisplayName(metaItemTypeDataverseName, metaItemTypeName));
                                  }
                                  metaItemType = 
metaItemTypeEntity.getDatatype();
                                  validateDatasetItemType(dsType, metaItemType, 
true, sourceLoc);
@@@ -864,8 -840,8 +870,8 @@@
          }
      }
  
-     protected Map<String, String> createExternalDatasetProperties(DatasetDecl 
dd, MetadataProvider metadataProvider,
-             MetadataTransactionContext mdTxnCtx) throws AlgebricksException {
 -    protected Map<String, String> createExternalDatasetProperties(String 
dataverseName, DatasetDecl dd,
++    protected Map<String, String> 
createExternalDatasetProperties(DataverseName dataverseName, DatasetDecl dd,
+             MetadataProvider metadataProvider, MetadataTransactionContext 
mdTxnCtx) throws AlgebricksException {
          ExternalDetailsDecl externalDetails = (ExternalDetailsDecl) 
dd.getDatasetDetailsDecl();
          return externalDetails.getProperties();
      }
diff --cc 
asterixdb/asterix-common/src/main/java/org/apache/asterix/common/metadata/IMetadataLockUtil.java
index ea7941e,0000000..b788c1a
mode 100644,000000..100644
--- 
a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/metadata/IMetadataLockUtil.java
+++ 
b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/metadata/IMetadataLockUtil.java
@@@ -1,131 -1,0 +1,132 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +package org.apache.asterix.common.metadata;
 +
 +import org.apache.asterix.common.api.IMetadataLockManager;
++import org.apache.asterix.common.config.DatasetConfig;
 +import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 +
 +public interface IMetadataLockUtil {
 +
 +    // Dataverse helpers
 +
 +    void createDataverseBegin(IMetadataLockManager lockManager, LockList 
locks, DataverseName dataverseName)
 +            throws AlgebricksException;
 +
 +    void dropDataverseBegin(IMetadataLockManager lockManager, LockList locks, 
DataverseName dataverseName)
 +            throws AlgebricksException;
 +
 +    // Dataset helpers
 +
 +    void createDatasetBegin(IMetadataLockManager lockManager, LockList locks, 
DataverseName dataverseName,
-             String datasetName, DataverseName itemTypeDataverseName, String 
itemTypeName,
-             DataverseName metaItemTypeDataverseName, String metaItemTypeName, 
String nodeGroupName,
-             String compactionPolicyName, boolean isDefaultCompactionPolicy, 
Object datasetDetails)
-             throws AlgebricksException;
++            String datasetName, DataverseName itemTypeDataverseName, String 
itemTypeName, boolean itemTypeAnonymous,
++            DataverseName metaItemTypeDataverseName, String metaItemTypeName, 
boolean metaItemTypeAnonymous,
++            String nodeGroupName, String compactionPolicyName, boolean 
isDefaultCompactionPolicy,
++            DatasetConfig.DatasetType datasetType, Object datasetDetails) 
throws AlgebricksException;
 +
 +    void dropDatasetBegin(IMetadataLockManager lockManager, LockList locks, 
DataverseName dataverseName,
 +            String datasetName) throws AlgebricksException;
 +
 +    void modifyDatasetBegin(IMetadataLockManager lockManager, LockList locks, 
DataverseName dataverseName,
 +            String datasetName) throws AlgebricksException;
 +
 +    void refreshDatasetBegin(IMetadataLockManager lockManager, LockList 
locks, DataverseName dataverseName,
 +            String datasetName) throws AlgebricksException;
 +
 +    void compactBegin(IMetadataLockManager lockManager, LockList locks, 
DataverseName dataverseName, String datasetName)
 +            throws AlgebricksException;
 +
 +    void insertDeleteUpsertBegin(IMetadataLockManager lockManager, LockList 
locks, DataverseName dataverseName,
 +            String datasetName) throws AlgebricksException;
 +
 +    // Index helpers
 +
 +    void createIndexBegin(IMetadataLockManager lockManager, LockList locks, 
DataverseName dataverseName,
 +            String datasetName) throws AlgebricksException;
 +
 +    void dropIndexBegin(IMetadataLockManager lockManager, LockList locks, 
DataverseName dataverseName,
 +            String datasetName) throws AlgebricksException;
 +
 +    // Type helpers
 +
 +    void createTypeBegin(IMetadataLockManager lockManager, LockList locks, 
DataverseName dataverseName, String typeName)
 +            throws AlgebricksException;
 +
 +    void dropTypeBegin(IMetadataLockManager lockManager, LockList locks, 
DataverseName dataverseName, String typeName)
 +            throws AlgebricksException;
 +
 +    // Function helpers
 +
 +    void createLibraryBegin(IMetadataLockManager lockManager, LockList locks, 
DataverseName dataverseName,
 +            String libraryName) throws AlgebricksException;
 +
 +    void dropLibraryBegin(IMetadataLockManager lockManager, LockList locks, 
DataverseName dataverseName,
 +            String libraryName) throws AlgebricksException;
 +
 +    // Function helpers
 +
 +    void createFunctionBegin(IMetadataLockManager lockManager, LockList 
locks, DataverseName dataverseName,
 +            String functionName, String libraryName) throws 
AlgebricksException;
 +
 +    void dropFunctionBegin(IMetadataLockManager lockManager, LockList locks, 
DataverseName dataverseName,
 +            String functionName) throws AlgebricksException;
 +
 +    // Adapter helpers
 +
 +    void createAdapterBegin(IMetadataLockManager lockManager, LockList locks, 
DataverseName dataverseName,
 +            String adapterName, String libraryName) throws 
AlgebricksException;
 +
 +    void dropAdapterBegin(IMetadataLockManager lockManager, LockList locks, 
DataverseName dataverseName,
 +            String adapterName) throws AlgebricksException;
 +
 +    // Synonym helpers
 +
 +    void createSynonymBegin(IMetadataLockManager lockManager, LockList locks, 
DataverseName dataverseName,
 +            String synonymName) throws AlgebricksException;
 +
 +    void dropSynonymBegin(IMetadataLockManager lockManager, LockList locks, 
DataverseName dataverseName,
 +            String synonymName) throws AlgebricksException;
 +
 +    // Feed helpers
 +
 +    void createFeedPolicyBegin(IMetadataLockManager lockManager, LockList 
locks, DataverseName dataverseName,
 +            String policyName) throws AlgebricksException;
 +
 +    void dropFeedPolicyBegin(IMetadataLockManager lockManager, LockList 
locks, DataverseName dataverseName,
 +            String policyName) throws AlgebricksException;
 +
 +    void createFeedBegin(IMetadataLockManager lockManager, LockList locks, 
DataverseName dataverseName, String feedName)
 +            throws AlgebricksException;
 +
 +    void dropFeedBegin(IMetadataLockManager lockManager, LockList locks, 
DataverseName dataverseName, String feedName)
 +            throws AlgebricksException;
 +
 +    void startFeedBegin(IMetadataLockManager lockManager, LockList locks, 
DataverseName dataverseName, String feedName)
 +            throws AlgebricksException;
 +
 +    void stopFeedBegin(IMetadataLockManager lockManager, LockList locks, 
DataverseName dataverseName, String feedName)
 +            throws AlgebricksException;
 +
 +    void connectFeedBegin(IMetadataLockManager lockManager, LockList locks, 
DataverseName dataverseName,
 +            String datasetName, String feedName) throws AlgebricksException;
 +
 +    void disconnectFeedBegin(IMetadataLockManager lockManager, LockList 
locks, DataverseName dataverseName,
 +            String datasetName, String feedName) throws AlgebricksException;
 +}
diff --cc 
asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/MetadataLockUtil.java
index 1b88a06,037a560..eab69e0
--- 
a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/MetadataLockUtil.java
+++ 
b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/MetadataLockUtil.java
@@@ -19,55 -19,41 +19,65 @@@
  package org.apache.asterix.metadata.utils;
  
  import org.apache.asterix.common.api.IMetadataLockManager;
++import org.apache.asterix.common.config.DatasetConfig;
 +import org.apache.asterix.common.metadata.DataverseName;
 +import org.apache.asterix.common.metadata.IMetadataLockUtil;
  import org.apache.asterix.common.metadata.LockList;
  import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
  
 -public class MetadataLockUtil {
 +public class MetadataLockUtil implements IMetadataLockUtil {
  
 -    private MetadataLockUtil() {
 +    @Override
 +    public void createDataverseBegin(IMetadataLockManager lockMgr, LockList 
locks, DataverseName dataverseName)
 +            throws AlgebricksException {
 +        lockMgr.acquireDataverseReadLock(locks, dataverseName);
      }
  
 -    public static void createDatasetBeginPre(IMetadataLockManager lockMgr, 
LockList locks, String dataverseName,
 -            String itemTypeDataverseName, String itemTypeFullyQualifiedName, 
boolean itemTypeAnonymous,
 -            String metaItemTypeDataverseName, String 
metaItemTypeFullyQualifiedName, boolean metaItemTypeAnonymous,
 -            String nodeGroupName, String compactionPolicyName, boolean 
isDefaultCompactionPolicy)
 +    @Override
 +    public void dropDataverseBegin(IMetadataLockManager lockMgr, LockList 
locks, DataverseName dataverseName)
              throws AlgebricksException {
 +        lockMgr.acquireDataverseWriteLock(locks, dataverseName);
 +    }
 +
 +    @Override
 +    public void createDatasetBegin(IMetadataLockManager lockMgr, LockList 
locks, DataverseName dataverseName,
-             String datasetName, DataverseName itemTypeDataverseName, String 
itemTypeName,
-             DataverseName metaItemTypeDataverseName, String metaItemTypeName, 
String nodeGroupName,
-             String compactionPolicyName, boolean isDefaultCompactionPolicy, 
Object datasetDetails)
-             throws AlgebricksException {
-         createDatasetBeginPre(lockMgr, locks, dataverseName, 
itemTypeDataverseName, itemTypeName,
-                 metaItemTypeDataverseName, metaItemTypeName, nodeGroupName, 
compactionPolicyName,
++            String datasetName, DataverseName itemTypeDataverseName, String 
itemTypeName, boolean itemTypeAnonymous,
++            DataverseName metaItemTypeDataverseName, String metaItemTypeName, 
boolean metaItemTypeAnonymous,
++            String nodeGroupName, String compactionPolicyName, boolean 
isDefaultCompactionPolicy,
++            DatasetConfig.DatasetType datasetType, Object datasetDetails) 
throws AlgebricksException {
++        createDatasetBeginPre(lockMgr, locks, dataverseName, 
itemTypeDataverseName, itemTypeName, itemTypeAnonymous,
++                metaItemTypeDataverseName, metaItemTypeName, 
metaItemTypeAnonymous, nodeGroupName, compactionPolicyName,
 +                isDefaultCompactionPolicy);
 +        lockMgr.acquireDatasetWriteLock(locks, dataverseName, datasetName);
 +    }
 +
 +    protected final void createDatasetBeginPre(IMetadataLockManager lockMgr, 
LockList locks,
 +            DataverseName dataverseName, DataverseName itemTypeDataverseName, 
String itemTypeName,
-             DataverseName metaItemTypeDataverseName, String metaItemTypeName, 
String nodeGroupName,
-             String compactionPolicyName, boolean isDefaultCompactionPolicy) 
throws AlgebricksException {
++            boolean itemTypeAnonymous, DataverseName 
metaItemTypeDataverseName, String metaItemTypeName,
++            boolean metaItemTypeAnonymous, String nodeGroupName, String 
compactionPolicyName,
++            boolean isDefaultCompactionPolicy) throws AlgebricksException {
          lockMgr.acquireDataverseReadLock(locks, dataverseName);
 -        if (!dataverseName.equals(itemTypeDataverseName)) {
 +        if (itemTypeDataverseName != null && 
!dataverseName.equals(itemTypeDataverseName)) {
              lockMgr.acquireDataverseReadLock(locks, itemTypeDataverseName);
          }
          if (metaItemTypeDataverseName != null && 
!metaItemTypeDataverseName.equals(dataverseName)
                  && !metaItemTypeDataverseName.equals(itemTypeDataverseName)) {
              lockMgr.acquireDataverseReadLock(locks, 
metaItemTypeDataverseName);
          }
-         if (itemTypeName != null) {
+         if (itemTypeAnonymous) {
+             // the datatype will be created
 -            lockMgr.acquireDataTypeWriteLock(locks, 
itemTypeFullyQualifiedName);
++            lockMgr.acquireDataTypeWriteLock(locks, itemTypeDataverseName, 
itemTypeName);
+         } else {
 -            lockMgr.acquireDataTypeReadLock(locks, 
itemTypeFullyQualifiedName);
 +            lockMgr.acquireDataTypeReadLock(locks, itemTypeDataverseName, 
itemTypeName);
          }
 -        if (metaItemTypeFullyQualifiedName != null
 -                && 
!metaItemTypeFullyQualifiedName.equals(itemTypeFullyQualifiedName)) {
 +        if (metaItemTypeDataverseName != null && 
!metaItemTypeDataverseName.equals(itemTypeDataverseName)
 +                && !metaItemTypeName.equals(itemTypeName)) {
-             lockMgr.acquireDataTypeReadLock(locks, metaItemTypeDataverseName, 
metaItemTypeName);
+             if (metaItemTypeAnonymous) {
+                 // the datatype will be created
 -                lockMgr.acquireDataTypeWriteLock(locks, 
metaItemTypeFullyQualifiedName);
++                lockMgr.acquireDataTypeWriteLock(locks, 
metaItemTypeDataverseName, metaItemTypeName);
+             } else {
 -                lockMgr.acquireDataTypeReadLock(locks, 
metaItemTypeFullyQualifiedName);
++                lockMgr.acquireDataTypeReadLock(locks, 
metaItemTypeDataverseName, metaItemTypeName);
+             }
          }
          if (nodeGroupName != null) {
              lockMgr.acquireNodeGroupReadLock(locks, nodeGroupName);

Reply via email to