This is an automated email from the ASF dual-hosted git repository.

elserj pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hbase-filesystem.git


The following commit(s) were added to refs/heads/master by this push:
     new 1669358  HBASE-25900 Hadoop 3.2 and 3.3 support (#25)
1669358 is described below

commit 16693581c356c695f91bbe45a28bcb62789ca4c0
Author: Josh Elser <els...@apache.org>
AuthorDate: Tue Oct 5 12:51:58 2021 -0400

    HBASE-25900 Hadoop 3.2 and 3.3 support (#25)
    
    Build on top of SteveL's original changes. Added indirection so
    that the correct S3ClientFactory can be included at test-time. Provides
    s3a.xml contracts for each version to reflect what actually works. Skips
    the new tests which we know would fail on the older <Hadoop3.3 releases.
    
    
    Co-authored-by: Steve Loughran <ste...@apache.org>
    Co-authored-by: Peter Somogyi <psomo...@cloudera.com>
    Signed-off-by: Peter Somogyi <psomo...@cloudera.com>
---
 hadoop-testutils/pom.xml                           |  63 +++++++++
 .../org/apache/hadoop/hbase/oss/EmbeddedS3.java    |  69 +---------
 hadoop3-2-testutils/pom.xml                        |  56 ++++++++
 .../hbase/oss/Hadoop32EmbeddedS3ClientFactory.java |  50 +++++++
 hadoop3-3-testutils/pom.xml                        |  56 ++++++++
 .../hbase/oss/Hadoop33EmbeddedS3ClientFactory.java |  63 +++++++++
 hbase-oss/pom.xml                                  |  74 ++++++++++
 .../hbase/oss/HBaseObjectStoreSemanticsTest.java   |   4 +-
 .../org/apache/hadoop/hbase/oss/TestUtils.java     | 139 ++++++++++++++++++-
 .../hadoop/hbase/oss/contract/HBOSSContract.java   |   4 +-
 .../hbase/oss/contract/TestHBOSSContract.java      |  48 ++++++-
 .../oss/contract/TestHBOSSContractCreate.java      |  21 ++-
 .../oss/contract/TestHBOSSContractRenameS3A.java   |   7 +
 .../resources/contract/{ => hadoop-3.2}/s3a.xml    |  10 +-
 .../resources/contract/{ => hadoop-3.3}/s3a.xml    |  20 ++-
 pom.xml                                            | 149 ++++++---------------
 16 files changed, 641 insertions(+), 192 deletions(-)

diff --git a/hadoop-testutils/pom.xml b/hadoop-testutils/pom.xml
new file mode 100644
index 0000000..53d45b0
--- /dev/null
+++ b/hadoop-testutils/pom.xml
@@ -0,0 +1,63 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"; 
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
https://maven.apache.org/xsd/maven-4.0.0.xsd";>
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hbase.filesystem</groupId>
+    <artifactId>hbase-filesystem</artifactId>
+    <version>1.0.0-alpha2-SNAPSHOT</version>
+  </parent>
+  <artifactId>hadoop-testutils</artifactId>
+  <name>Common test utilities across Hadoop versions</name>
+   <dependencies>
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-lang3</artifactId>
+      <version>${commons-lang3.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-api</artifactId>
+      <version>${slf4j.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <exclusions>
+        <exclusion>
+          <!-- Banned import in HBase -->
+          <groupId>com.google.code.findbugs</groupId>
+          <artifactId>jsr305</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-aws</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>com.amazonaws</groupId>
+      <artifactId>aws-java-sdk-bundle</artifactId>
+      <version>${aws-java-sdk.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.yetus</groupId>
+      <artifactId>audience-annotations</artifactId>
+      <version>${audience-annotations.version}</version>
+    </dependency>
+  </dependencies>
+</project>
\ No newline at end of file
diff --git 
a/hbase-oss/src/test/java/org/apache/hadoop/hbase/oss/EmbeddedS3.java 
b/hadoop-testutils/src/main/java/org/apache/hadoop/hbase/oss/EmbeddedS3.java
similarity index 82%
rename from hbase-oss/src/test/java/org/apache/hadoop/hbase/oss/EmbeddedS3.java
rename to 
hadoop-testutils/src/main/java/org/apache/hadoop/hbase/oss/EmbeddedS3.java
index f5411b7..2284a6d 100644
--- a/hbase-oss/src/test/java/org/apache/hadoop/hbase/oss/EmbeddedS3.java
+++ b/hadoop-testutils/src/main/java/org/apache/hadoop/hbase/oss/EmbeddedS3.java
@@ -15,14 +15,11 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase.oss;
 
 import com.amazonaws.AmazonServiceException;
 import com.amazonaws.SdkClientException;
-import com.amazonaws.auth.AWSCredentialsProvider;
 import com.amazonaws.services.s3.AbstractAmazonS3;
-import com.amazonaws.services.s3.AmazonS3;
 import com.amazonaws.services.s3.model.Bucket;
 import com.amazonaws.services.s3.model.CopyObjectRequest;
 import com.amazonaws.services.s3.model.CopyObjectResult;
@@ -43,84 +40,26 @@ import com.amazonaws.services.s3.model.S3ObjectSummary;
 import java.io.File;
 import java.io.InputStream;
 import java.io.IOException;
-import java.net.URI;
 import java.nio.charset.Charset;
 import java.util.ArrayList;
-import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.IOUtils;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.s3a.S3ClientFactory;
-import org.apache.hadoop.fs.s3a.s3guard.LocalMetadataStore;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.yetus.audience.InterfaceStability;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static org.apache.hadoop.hbase.oss.Constants.*;
-import static org.apache.hadoop.fs.s3a.Constants.*;
-
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public class EmbeddedS3 {
 
-  public static boolean usingEmbeddedS3 = false;
-
-  private static final String BUCKET = "embedded";
-
-  public static void conditionalStart(Configuration conf) {
-    if (StringUtils.isEmpty(conf.get(S3_METADATA_STORE_IMPL))) {
-      conf.set(S3_METADATA_STORE_IMPL, LocalMetadataStore.class.getName());
-    }
-
-    boolean notConfigured = StringUtils.isEmpty(conf.get(DATA_URI));
-    if (notConfigured) {
-      usingEmbeddedS3 = true;
-      conf.set(S3_CLIENT_FACTORY_IMPL,
-            EmbeddedS3ClientFactory.class.getName());
-      conf.set(DATA_URI, "s3a://" + BUCKET);
-    } else {
-      usingEmbeddedS3 = false;
-    }
-  }
-
-  /**
-   * Replaces the default S3ClientFactory to inject an EmbeddedAmazonS3
-   * instance. This is currently a private API in Hadoop, but is the same 
method
-   * used by S3Guard's inconsistency-injection tests. The method signature
-   * defined in the interface varies depending on the Hadoop version.
-   *
-   * Due to compatibility purposes for both hadoop 2 and 3 main versions,
-   * we are omitting "@override" annotation from overridden methods.
-   */
-  public static class EmbeddedS3ClientFactory implements S3ClientFactory {
-
-    public AmazonS3 createS3Client(URI name) {
-      AmazonS3 s3 = new EmbeddedAmazonS3();
-      s3.createBucket(BUCKET);
-      return s3;
-    }
-
-    public AmazonS3 createS3Client(URI name,
-        String bucket,
-        AWSCredentialsProvider credentialSet,
-        String userAgentSuffix) {
-      AmazonS3 s3 = new EmbeddedAmazonS3();
-      s3.createBucket(bucket);
-      return s3;
-    }
-
-    public AmazonS3 createS3Client(URI name,
-        String bucket,
-        AWSCredentialsProvider credentialSet) {
-      return createS3Client(name);
-    }
-  }
+  public static final String BUCKET = "embedded";
 
   /**
    * Emulates an S3-connected client. This is the bare minimum implementation
@@ -172,7 +111,7 @@ public class EmbeddedS3 {
       }
     }
 
-    private Map<String, EmbeddedS3Object> bucket = new HashMap<>();
+    private Map<String, EmbeddedS3Object> bucket = new ConcurrentHashMap<>();
 
     private void simulateServerSideCopy() {
       try {
diff --git a/hadoop3-2-testutils/pom.xml b/hadoop3-2-testutils/pom.xml
new file mode 100644
index 0000000..4911f1a
--- /dev/null
+++ b/hadoop3-2-testutils/pom.xml
@@ -0,0 +1,56 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0";
+  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hbase.filesystem</groupId>
+    <artifactId>hbase-filesystem</artifactId>
+    <version>1.0.0-alpha2-SNAPSHOT</version>
+    <relativePath>../pom.xml</relativePath>
+  </parent>
+  <artifactId>hadoop3-2-testutils</artifactId>
+  <name>Test utility classes for Hadoop 3.1.x and 3.2.x</name>
+  <packaging>jar</packaging>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-api</artifactId>
+      <version>${slf4j.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase.filesystem</groupId>
+      <artifactId>hadoop-testutils</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-aws</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>com.amazonaws</groupId>
+      <artifactId>aws-java-sdk-bundle</artifactId>
+      <version>${aws-java-sdk.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.yetus</groupId>
+      <artifactId>audience-annotations</artifactId>
+      <version>${audience-annotations.version}</version>
+    </dependency>
+  </dependencies>
+</project>
diff --git 
a/hadoop3-2-testutils/src/main/java/org/apache/hadoop/hbase/oss/Hadoop32EmbeddedS3ClientFactory.java
 
b/hadoop3-2-testutils/src/main/java/org/apache/hadoop/hbase/oss/Hadoop32EmbeddedS3ClientFactory.java
new file mode 100644
index 0000000..a20586e
--- /dev/null
+++ 
b/hadoop3-2-testutils/src/main/java/org/apache/hadoop/hbase/oss/Hadoop32EmbeddedS3ClientFactory.java
@@ -0,0 +1,50 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.oss;
+
+import java.io.IOException;
+import java.net.URI;
+
+import org.apache.hadoop.fs.s3a.S3ClientFactory;
+import org.apache.hadoop.hbase.oss.EmbeddedS3.EmbeddedAmazonS3;
+
+import com.amazonaws.auth.AWSCredentialsProvider;
+import com.amazonaws.services.s3.AmazonS3;
+
+public class Hadoop32EmbeddedS3ClientFactory implements S3ClientFactory {
+  public AmazonS3 createS3Client(URI name) {
+    AmazonS3 s3 = new EmbeddedAmazonS3();
+    s3.createBucket(EmbeddedS3.BUCKET);
+    return s3;
+  }
+
+  public AmazonS3 createS3Client(URI name,
+      String bucket,
+      AWSCredentialsProvider credentialSet,
+      String userAgentSuffix) {
+    AmazonS3 s3 = new EmbeddedAmazonS3();
+    s3.createBucket(bucket);
+    return s3;
+  }
+
+  public AmazonS3 createS3Client(URI name,
+      String bucket,
+      AWSCredentialsProvider credentialSet) {
+    return createS3Client(name);
+  }
+}
\ No newline at end of file
diff --git a/hadoop3-3-testutils/pom.xml b/hadoop3-3-testutils/pom.xml
new file mode 100644
index 0000000..74b1efd
--- /dev/null
+++ b/hadoop3-3-testutils/pom.xml
@@ -0,0 +1,56 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0";
+  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hbase.filesystem</groupId>
+    <artifactId>hbase-filesystem</artifactId>
+    <version>1.0.0-alpha2-SNAPSHOT</version>
+    <relativePath>../pom.xml</relativePath>
+  </parent>
+  <artifactId>hadoop3-3-testutils</artifactId>
+  <name>Test utility classes for Hadoop 3.3.1 and beyond</name>
+  <packaging>jar</packaging>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-api</artifactId>
+      <version>${slf4j.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase.filesystem</groupId>
+      <artifactId>hadoop-testutils</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-aws</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>com.amazonaws</groupId>
+      <artifactId>aws-java-sdk-bundle</artifactId>
+      <version>${aws-java-sdk.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.yetus</groupId>
+      <artifactId>audience-annotations</artifactId>
+      <version>${audience-annotations.version}</version>
+    </dependency>
+  </dependencies>
+</project>
diff --git 
a/hadoop3-3-testutils/src/main/java/org/apache/hadoop/hbase/oss/Hadoop33EmbeddedS3ClientFactory.java
 
b/hadoop3-3-testutils/src/main/java/org/apache/hadoop/hbase/oss/Hadoop33EmbeddedS3ClientFactory.java
new file mode 100644
index 0000000..e0cbcbe
--- /dev/null
+++ 
b/hadoop3-3-testutils/src/main/java/org/apache/hadoop/hbase/oss/Hadoop33EmbeddedS3ClientFactory.java
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.oss;
+
+import java.io.IOException;
+import java.net.URI;
+
+import org.apache.hadoop.fs.s3a.S3ClientFactory;
+import org.apache.hadoop.hbase.oss.EmbeddedS3.EmbeddedAmazonS3;
+
+import com.amazonaws.auth.AWSCredentialsProvider;
+import com.amazonaws.services.s3.AmazonS3;
+
+/**
+ * An S3ClientFactory for Hadoop 3.3 releases which have the change from
+ * HADOOP-13551. Builds on top of Hadoop32EmbeddedS3ClientFactory.
+ */
+public class Hadoop33EmbeddedS3ClientFactory implements S3ClientFactory {
+
+  public AmazonS3 createS3Client(URI name) {
+    AmazonS3 s3 = new EmbeddedAmazonS3();
+    s3.createBucket(EmbeddedS3.BUCKET);
+    return s3;
+  }
+
+  public AmazonS3 createS3Client(URI name,
+      String bucket,
+      AWSCredentialsProvider credentialSet,
+      String userAgentSuffix) {
+    AmazonS3 s3 = new EmbeddedAmazonS3();
+    s3.createBucket(bucket);
+    return s3;
+  }
+
+  public AmazonS3 createS3Client(URI name,
+      String bucket,
+      AWSCredentialsProvider credentialSet) {
+    return createS3Client(name);
+  }
+
+  public AmazonS3 createS3Client(URI uri,
+      S3ClientCreationParameters s3ClientCreationParameters)
+      throws IOException {
+    AmazonS3 s3 = new EmbeddedAmazonS3();
+    s3.createBucket(uri.getHost());
+    return s3;
+  }
+}
\ No newline at end of file
diff --git a/hbase-oss/pom.xml b/hbase-oss/pom.xml
index e346076..2667f38 100644
--- a/hbase-oss/pom.xml
+++ b/hbase-oss/pom.xml
@@ -56,6 +56,62 @@
         
<fs.hboss.sync.impl>org.apache.hadoop.hbase.oss.sync.ZKTreeLockManager</fs.hboss.sync.impl>
       </properties>
     </profile>
+    <profile>
+      <id>hadoop3.2</id>
+      <activation>
+        <property>
+          <name>hadoop.profile</name>
+          <value>3.2</value>
+        </property>
+      </activation>
+      <properties>
+        <HBOSS_HADOOP_VERSION>3.2</HBOSS_HADOOP_VERSION>
+      </properties>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hbase.filesystem</groupId>
+          <artifactId>hadoop3-2-testutils</artifactId>
+          <version>${project.version}</version>
+        </dependency>
+      </dependencies>
+    </profile>
+    <profile>
+      <id>hadoop3.3</id>
+      <activation>
+        <property>
+          <name>hadoop.profile</name>
+          <value>3.3</value>
+        </property>
+      </activation>
+      <properties>
+        <HBOSS_HADOOP_VERSION>3.3</HBOSS_HADOOP_VERSION>
+      </properties>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hbase.filesystem</groupId>
+          <artifactId>hadoop3-3-testutils</artifactId>
+          <version>${project.version}</version>
+        </dependency>
+      </dependencies>
+    </profile>
+    <profile>
+      <id>hadoop-default</id>
+      <activation>
+        <property>
+          <name>!hadoop.profile</name>
+        </property>
+      </activation>
+      <properties>
+        <HBOSS_HADOOP_VERSION>3.3</HBOSS_HADOOP_VERSION>
+      </properties>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hbase.filesystem</groupId>
+          <artifactId>hadoop3-3-testutils</artifactId>
+          <version>${project.version}</version>
+        </dependency>
+      </dependencies>
+    </profile>
   </profiles>
 
   <build>
@@ -67,6 +123,7 @@
         <configuration>
           <systemProperties>
             <fs.hboss.sync.impl>${fs.hboss.sync.impl}</fs.hboss.sync.impl>
+            
<HBOSS_HADOOP_VERSION>${HBOSS_HADOOP_VERSION}</HBOSS_HADOOP_VERSION>
           </systemProperties>
         </configuration>
       </plugin>
@@ -355,6 +412,23 @@
       <version>${commons-io.version}</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.assertj</groupId>
+      <artifactId>assertj-core</artifactId>
+      <version>${assertj.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase.filesystem</groupId>
+      <artifactId>hadoop-testutils</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-metrics</artifactId>
+      <version>${hbase.version}</version>
+      <scope>test</scope>
+    </dependency>
 
   </dependencies>
 
diff --git 
a/hbase-oss/src/test/java/org/apache/hadoop/hbase/oss/HBaseObjectStoreSemanticsTest.java
 
b/hbase-oss/src/test/java/org/apache/hadoop/hbase/oss/HBaseObjectStoreSemanticsTest.java
index 1c1c883..e11647b 100644
--- 
a/hbase-oss/src/test/java/org/apache/hadoop/hbase/oss/HBaseObjectStoreSemanticsTest.java
+++ 
b/hbase-oss/src/test/java/org/apache/hadoop/hbase/oss/HBaseObjectStoreSemanticsTest.java
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.hbase.oss;
 
+import static org.apache.hadoop.hbase.oss.TestUtils.addContract;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.oss.sync.TreeLockManager;
@@ -44,7 +46,7 @@ public class HBaseObjectStoreSemanticsTest {
   @Before
   public void setup() throws Exception {
     Configuration conf = new Configuration();
-    conf.addResource("contract/s3a.xml");
+    addContract(conf);
     hboss = TestUtils.getFileSystem(conf);
     sync = hboss.getLockManager();
     hboss.mkdirs(testPathRoot());
diff --git a/hbase-oss/src/test/java/org/apache/hadoop/hbase/oss/TestUtils.java 
b/hbase-oss/src/test/java/org/apache/hadoop/hbase/oss/TestUtils.java
index 9e4f5d9..f2e7011 100644
--- a/hbase-oss/src/test/java/org/apache/hadoop/hbase/oss/TestUtils.java
+++ b/hbase-oss/src/test/java/org/apache/hadoop/hbase/oss/TestUtils.java
@@ -18,12 +18,20 @@
 
 package org.apache.hadoop.hbase.oss;
 
+import static org.apache.hadoop.fs.s3a.Constants.S3_CLIENT_FACTORY_IMPL;
+import static org.apache.hadoop.fs.s3a.Constants.S3_METADATA_STORE_IMPL;
+import static org.apache.hadoop.hbase.oss.Constants.DATA_URI;
+
 import java.net.URI;
 import java.net.URISyntaxException;
+import java.net.URL;
+
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.s3a.s3guard.LocalMetadataStore;
 import org.apache.hadoop.hbase.oss.sync.EmbeddedZK;
+import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.yetus.audience.InterfaceStability;
 import org.junit.Assume;
@@ -36,6 +44,21 @@ public class TestUtils {
   public static final Logger LOG =
         LoggerFactory.getLogger(TestUtils.class);
 
+  public static enum HadoopVersion {
+    HADOOP32("3.2"),
+    HADOOP33("3.3");
+
+    private final String versionIdentifier;
+
+    HadoopVersion(String versionIdentifier) {
+      this.versionIdentifier = versionIdentifier;
+    }
+
+    public String getIdentifier() {
+      return versionIdentifier;
+    }
+  }
+
   // This is defined by the Maven Surefire plugin configuration
   private static final String TEST_UNIQUE_FORK_ID = "test.unique.fork.id";
 
@@ -43,6 +66,92 @@ public class TestUtils {
 
   public static final String S3A = "s3a";
 
+  public static boolean usingEmbeddedS3 = false;
+
+  public static void conditionalStart(Configuration conf) {
+    if (StringUtils.isEmpty(conf.get(S3_METADATA_STORE_IMPL))) {
+      conf.set(S3_METADATA_STORE_IMPL, LocalMetadataStore.class.getName());
+    }
+
+    boolean notConfigured = StringUtils.isEmpty(conf.get(DATA_URI));
+    if (notConfigured) {
+      usingEmbeddedS3 = true;
+      conf.set(S3_CLIENT_FACTORY_IMPL, getEmbeddedS3ClientFactoryClassName());
+      conf.set(DATA_URI, "s3a://" + EmbeddedS3.BUCKET);
+    } else {
+      usingEmbeddedS3 = false;
+    }
+  }
+
+  public static void addContract(Configuration conf) {
+    final HadoopVersion version = getDesiredHadoopVersion();
+    String contractFile;
+    switch (version) {
+    case HADOOP32:
+      contractFile = "contract/hadoop-3.2/s3a.xml";
+      break;
+    case HADOOP33:
+      contractFile = "contract/hadoop-3.3/s3a.xml";
+      break;
+    default:
+      throw new RuntimeException("Unhandled HadoopVersion: " + version);
+    }
+    URL url = TestUtils.class.getClassLoader().getResource(contractFile);
+    if (url == null) {
+      throw new RuntimeException("Failed to find s3a contract file on 
classpath: " + contractFile);
+    }
+    LOG.info("Adding s3a contract definition: {}", contractFile);
+    conf.addResource(contractFile);
+  }
+
+  /**
+   * Returns the class name for the S3ClientFactory implementation for the
+   * given major version of Hadoop.
+   */
+  public static String getEmbeddedS3ClientFactoryClassName() {
+    final HadoopVersion version = getDesiredHadoopVersion();
+    switch (version) {
+    case HADOOP32:
+      return "org.apache.hadoop.hbase.oss.Hadoop32EmbeddedS3ClientFactory";
+    case HADOOP33:
+      return "org.apache.hadoop.hbase.oss.Hadoop33EmbeddedS3ClientFactory";
+    }
+
+    throw new RuntimeException("HadoopVersion " + version + " is not 
handled.");
+  }
+
+  /**
+   * Attempts to return a HadoopVersion enum value given the value of the 
system
+   * property {@code HBOSS_HADOOP_VERSION}. This system property is set via
+   * the pom.xml via the corresponding profile for each Hadoop version this 
project
+   * has support for.
+   */
+  static HadoopVersion getDesiredHadoopVersion() {
+    String hadoopVersPropValue = System.getProperty("HBOSS_HADOOP_VERSION");
+    if (hadoopVersPropValue == null) {
+      throw new RuntimeException("HBOSS_HADOOP_VERSION was not set as a system 
property.");
+    }
+    for (HadoopVersion version : HadoopVersion.values()) {
+      if (hadoopVersPropValue.equals(version.getIdentifier())) {
+        return version;
+      }
+    }
+
+    LOG.error("Found HBOSS_HADOOP_VERSION property set to '{}',"
+        + "but there is no corresponding HadoopVersion enum value", 
hadoopVersPropValue);
+    throw new RuntimeException("Unable to determine S3ClientFactory to 
instantiate");
+  }
+
+  public static boolean renameToExistingDestinationSupported() {
+    HadoopVersion version = getDesiredHadoopVersion();
+    // Hadoop 3.2 and below don't support the additional checks added
+    // by HADOOP-16721 around renames.
+    if (version == HadoopVersion.HADOOP32) {
+      return false;
+    }
+    return true;
+  }
+
   public static String getScheme(Configuration conf) {
     String dataUri = conf.get(Constants.DATA_URI);
     try {
@@ -79,13 +188,12 @@ public class TestUtils {
   }
 
   public static HBaseObjectStoreSemantics getFileSystem(Configuration conf) 
throws Exception {
-    // Newer versions of Hadoop will do this for us, but older ones won't
-    // This allows Maven properties, profiles, etc. to set the implementation
-    if (StringUtils.isEmpty(conf.get(Constants.SYNC_IMPL))) {
-      conf.set(Constants.SYNC_IMPL, System.getProperty(Constants.SYNC_IMPL));
-    }
+    // Prevent re-registration of the same MetricsSource
+    DefaultMetricsSystem.setMiniClusterMode(true);
 
-    EmbeddedS3.conditionalStart(conf);
+    patchFileSystemImplementation(conf);
+
+    conditionalStart(conf);
     synchronized (TestUtils.class) {
       if (zk == null) {
         zk = new EmbeddedZK();
@@ -106,6 +214,25 @@ public class TestUtils {
     }
   }
 
+  /**
+   * Pick up the fs.hboss.sync.impl value from the JVM system property,
+   * which is how it is passed down from maven.
+   * If this isn't set, fall back to the local tree lock.
+   * That enables IDE test runs.
+   * @param conf configuration to patch.
+   */
+  private static void patchFileSystemImplementation(Configuration conf) {
+    // Newer versions of Hadoop will do this for us, but older ones won't
+    // This allows Maven properties, profiles, etc. to set the implementation
+    if (StringUtils.isEmpty(conf.get(Constants.SYNC_IMPL))) {
+      String property = System.getProperty(Constants.SYNC_IMPL);
+      if (property == null) {
+        property = "org.apache.hadoop.hbase.oss.sync.LocalTreeLockManager";
+      }
+      conf.set(Constants.SYNC_IMPL, property);
+    }
+  }
+
   public static void cleanup(HBaseObjectStoreSemantics hboss) throws Exception 
{
     if (hboss != null) {
       hboss.close();
diff --git 
a/hbase-oss/src/test/java/org/apache/hadoop/hbase/oss/contract/HBOSSContract.java
 
b/hbase-oss/src/test/java/org/apache/hadoop/hbase/oss/contract/HBOSSContract.java
index 1dd6a46..d37252d 100644
--- 
a/hbase-oss/src/test/java/org/apache/hadoop/hbase/oss/contract/HBOSSContract.java
+++ 
b/hbase-oss/src/test/java/org/apache/hadoop/hbase/oss/contract/HBOSSContract.java
@@ -31,6 +31,8 @@ import org.apache.yetus.audience.InterfaceStability;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.hadoop.hbase.oss.TestUtils.addContract;
+
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public class HBOSSContract extends AbstractFSContract {
@@ -47,7 +49,7 @@ public class HBOSSContract extends AbstractFSContract {
   public HBOSSContract(Configuration conf) {
     super(conf);
     this.conf = conf;
-    addConfResource("contract/s3a.xml");
+    addContract(conf);
   }
 
   /**
diff --git 
a/hbase-oss/src/test/java/org/apache/hadoop/hbase/oss/contract/TestHBOSSContract.java
 
b/hbase-oss/src/test/java/org/apache/hadoop/hbase/oss/contract/TestHBOSSContract.java
index 26c5a96..96adb7c 100644
--- 
a/hbase-oss/src/test/java/org/apache/hadoop/hbase/oss/contract/TestHBOSSContract.java
+++ 
b/hbase-oss/src/test/java/org/apache/hadoop/hbase/oss/contract/TestHBOSSContract.java
@@ -18,8 +18,10 @@
 
 package org.apache.hadoop.hbase.oss.contract;
 
+import java.io.FileNotFoundException;
 import java.lang.reflect.Method;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileAlreadyExistsException;
 import org.apache.hadoop.fs.FileSystemContractBaseTest;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.oss.HBaseObjectStoreSemantics;
@@ -31,6 +33,11 @@ import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.TestName;
 
+import static org.apache.hadoop.fs.contract.ContractTestUtils.skip;
+import static org.apache.hadoop.hbase.oss.TestUtils.addContract;
+import static org.apache.hadoop.test.LambdaTestUtils.intercept;
+import static org.junit.Assume.assumeTrue;
+
 public class TestHBOSSContract extends FileSystemContractBaseTest {
 
   private Path basePath;
@@ -47,7 +54,7 @@ public class TestHBOSSContract extends 
FileSystemContractBaseTest {
   public void setUp() throws Exception {
     nameThread();
     conf = new Configuration();
-    conf.addResource("contract/s3a.xml");
+    addContract(conf);
     fs = TestUtils.getFileSystem(conf);
     Assume.assumeNotNull(fs);
     HBaseObjectStoreSemantics hboss = (HBaseObjectStoreSemantics)fs;
@@ -122,4 +129,43 @@ public class TestHBOSSContract extends 
FileSystemContractBaseTest {
       method.invoke(this, (Object[]) null);
     }
   }
+
+  @Test
+  public void testRenameDirectoryMoveToNonExistentDirectory()
+      throws Exception {
+    skip("does not fail on S3A since HADOOP-16721");
+  }
+
+  @Test
+  public void testRenameFileMoveToNonExistentDirectory() throws Exception {
+    skip("does not fail on S3A since HADOOP-16721");
+  }
+
+  @Test
+  public void testRenameDirectoryAsExistingFile() throws Exception {
+    assumeTrue(renameSupported());
+    assumeTrue(TestUtils.renameToExistingDestinationSupported());
+
+    Path src = path("testRenameDirectoryAsExistingFile/dir");
+    fs.mkdirs(src);
+    Path dst = path("testRenameDirectoryAsExistingFileNew/newfile");
+    createFile(dst);
+    intercept(FileAlreadyExistsException.class,
+        () -> rename(src, dst, false, true, true));
+  }
+
+  @Test
+  public void testRenameFileAsExistingFile() throws Exception {
+    assumeTrue(TestUtils.renameToExistingDestinationSupported());
+    intercept(FileAlreadyExistsException.class,
+        () -> super.testRenameFileAsExistingFile());
+  }
+
+  @Test
+  public void testRenameNonExistentPath() throws Exception {
+    assumeTrue(TestUtils.renameToExistingDestinationSupported());
+    intercept(FileNotFoundException.class,
+        () -> super.testRenameNonExistentPath());
+
+  }
 }
diff --git 
a/hbase-oss/src/test/java/org/apache/hadoop/hbase/oss/contract/TestHBOSSContractCreate.java
 
b/hbase-oss/src/test/java/org/apache/hadoop/hbase/oss/contract/TestHBOSSContractCreate.java
index 1f24ac4..5351cae 100644
--- 
a/hbase-oss/src/test/java/org/apache/hadoop/hbase/oss/contract/TestHBOSSContractCreate.java
+++ 
b/hbase-oss/src/test/java/org/apache/hadoop/hbase/oss/contract/TestHBOSSContractCreate.java
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.hbase.oss.contract;
 
+import java.lang.reflect.Method;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.Path;
@@ -43,6 +45,11 @@ public class TestHBOSSContractCreate extends 
AbstractContractCreateTest {
   @Test
   @Override
   public void testCreatedFileIsVisibleOnFlush() throws Throwable {
+    skipIfFilesNotVisibleDuringCreation();
+    super.testCreatedFileIsVisibleOnFlush();
+  }
+
+  private void skipIfFilesNotVisibleDuringCreation() {
     Configuration conf = createConfiguration();
     try {
       TestUtils.getFileSystem(conf);
@@ -53,7 +60,6 @@ public class TestHBOSSContractCreate extends 
AbstractContractCreateTest {
     // HBOSS satisfies the contract that this test checks for, but it also
     // relies on flush, which s3a still does not support.
     Assume.assumeFalse(TestUtils.fsIs(TestUtils.S3A, conf));
-    super.testCreatedFileIsVisibleOnFlush();
   }
 
   @Test
@@ -85,4 +91,17 @@ public class TestHBOSSContractCreate extends 
AbstractContractCreateTest {
       }
     }
   }
+
+  public void testSyncable() throws Throwable {
+    skipIfFilesNotVisibleDuringCreation();
+    // testSyncable() only exists in >=Hadoop-3.3.1. Selectively skip this 
test when
+    // the method doesn't exist.
+    try {
+      Method testSyncable = 
AbstractContractCreateTest.class.getMethod("testSyncable");
+      // super.testSyncable()
+      testSyncable.invoke(this);
+    } catch (NoSuchMethodException e) {
+      Assume.assumeTrue("testSyncable does not exist on the parent, skipping 
test", false);
+    }
+  }
 }
diff --git 
a/hbase-oss/src/test/java/org/apache/hadoop/hbase/oss/contract/TestHBOSSContractRenameS3A.java
 
b/hbase-oss/src/test/java/org/apache/hadoop/hbase/oss/contract/TestHBOSSContractRenameS3A.java
index cb991fb..00bf25c 100644
--- 
a/hbase-oss/src/test/java/org/apache/hadoop/hbase/oss/contract/TestHBOSSContractRenameS3A.java
+++ 
b/hbase-oss/src/test/java/org/apache/hadoop/hbase/oss/contract/TestHBOSSContractRenameS3A.java
@@ -26,6 +26,8 @@ import 
org.apache.hadoop.fs.contract.AbstractContractRenameTest;
 import org.apache.hadoop.fs.contract.ContractTestUtils;
 import org.apache.hadoop.hbase.oss.TestUtils;
 
+import static org.apache.hadoop.fs.contract.ContractTestUtils.skip;
+
 /**
  * There is an S3A-specific extension of AbstractContractRenameTest, and this
  * class implements the same modifications for HBOSS-on-S3A.
@@ -69,4 +71,9 @@ public class TestHBOSSContractRenameS3A extends 
AbstractContractRenameTest {
     boolean rename = fs.rename(srcDir, destDir);
     assertFalse("s3a doesn't support rename to non-empty directory", rename);
   }
+
+  //@Override
+  public void testRenameFileUnderFileSubdir() throws Exception {
+    skip("Rename deep paths under files is allowed");
+  }
 }
diff --git a/hbase-oss/src/test/resources/contract/s3a.xml 
b/hbase-oss/src/test/resources/contract/hadoop-3.2/s3a.xml
similarity index 100%
copy from hbase-oss/src/test/resources/contract/s3a.xml
copy to hbase-oss/src/test/resources/contract/hadoop-3.2/s3a.xml
index 98389a1..4f8e132 100644
--- a/hbase-oss/src/test/resources/contract/s3a.xml
+++ b/hbase-oss/src/test/resources/contract/hadoop-3.2/s3a.xml
@@ -61,6 +61,11 @@
   </property>
 
   <property>
+    <name>fs.contract.rename-overwrites-dest</name>
+    <value>false</value>
+  </property>
+
+  <property>
     <name>fs.contract.rename-remove-dest-if-empty-dir</name>
     <value>true</value>
   </property>
@@ -121,11 +126,6 @@
   </property>
 
   <property>
-    <name>fs.contract.rename-overwrites-dest</name>
-    <value>false</value>
-  </property>
-
-  <property>
     <name>fs.s3a.change.detection.version.required</name>
     <value>false</value>
   </property>
diff --git a/hbase-oss/src/test/resources/contract/s3a.xml 
b/hbase-oss/src/test/resources/contract/hadoop-3.3/s3a.xml
similarity index 87%
rename from hbase-oss/src/test/resources/contract/s3a.xml
rename to hbase-oss/src/test/resources/contract/hadoop-3.3/s3a.xml
index 98389a1..9ae26d7 100644
--- a/hbase-oss/src/test/resources/contract/s3a.xml
+++ b/hbase-oss/src/test/resources/contract/hadoop-3.3/s3a.xml
@@ -26,6 +26,10 @@
       fs.contract.supports-atomic-directory-delete = true
       fs.contract.supports-atomic-rename = true
 
+    fs.contract.is-blobstore tells the tests "don't expect it to be visible
+    during creation"
+
+    "fs.contract.create-visibility-delayed."
     Note that fs.contract.is-blobstore appears to be identical in meaning to
     fs.contract.create-visibility-delayed.
   -->
@@ -54,10 +58,24 @@
     <name>fs.contract.is-case-sensitive</name>
     <value>true</value>
   </property>
+  <property>
+    <name>fs.contract.rename-creates-dest-dirs</name>
+    <value>true</value>
+  </property>
 
   <property>
     <name>fs.contract.rename-returns-false-if-source-missing</name>
-    <value>true</value>
+    <value>false</value>
+  </property>
+
+  <property>
+    <name>fs.contract.rename-overwrites-dest</name>
+    <value>false</value>
+  </property>
+
+  <property>
+    <name>fs.contract.rename-returns-false-if-dest-exists</name>
+    <value>false</value>
   </property>
 
   <property>
diff --git a/pom.xml b/pom.xml
index 969e774..2cd24a6 100644
--- a/pom.xml
+++ b/pom.xml
@@ -45,6 +45,7 @@
     <!-- XXX If you change these versions please ensure the bundled 
LICENSE/NOTICE in
          hbase-oss is correct
      -->
+    <assertj.version>3.12.2</assertj.version>
     <audience-annotations.version>0.5.0</audience-annotations.version>
     <aws-java-sdk.version>1.11.1026</aws-java-sdk.version>
     <commons-io.version>2.5</commons-io.version>
@@ -52,10 +53,9 @@
     <curator.version>4.2.0</curator.version>
     <enforcer.version>3.0.0-M3</enforcer.version>
     <extra.enforcer.version>1.2</extra.enforcer.version>
-    <hadoop2.version>2.9.2</hadoop2.version>
-    <hadoop3.version>3.2.2</hadoop3.version>
-    <hbase1.version>1.4.10</hbase1.version>
-    <hbase2.version>2.3.6</hbase2.version>
+    <hadoop32.version>3.2.2</hadoop32.version>
+    <hadoop33.version>3.3.1</hadoop33.version>
+    <hbase.version>2.3.6</hbase.version>
     <hbase-thirdparty.version>3.5.1</hbase-thirdparty.version>
     <junit.version>4.12</junit.version>
     <log4j.version>1.2.17</log4j.version>
@@ -67,8 +67,26 @@
 
   <modules>
     <module>hbase-oss</module>
+    <module>hadoop-testutils</module>
   </modules>
 
+  <!-- For testing against ZK -->
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-zookeeper</artifactId>
+      <version>${hbase.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-zookeeper</artifactId>
+      <version>${hbase.version}</version>
+      <type>test-jar</type>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
   <build>
     <pluginManagement>
       <plugins>
@@ -101,27 +119,33 @@
 
   <profiles>
     <profile>
-      <id>hadoop2</id>
+      <id>hadoop3.2</id>
       <activation>
         <property>
           <name>hadoop.profile</name>
-          <value>2</value>
+          <value>3.2</value>
         </property>
       </activation>
+      <modules>
+        <module>hadoop3-2-testutils</module>
+      </modules>
       <properties>
-        <hadoop.version>${hadoop2.version}</hadoop.version>
+        <hadoop.version>${hadoop32.version}</hadoop.version>
       </properties>
     </profile>
     <profile>
-      <id>hadoop3</id>
+      <id>hadoop3.3</id>
       <activation>
         <property>
           <name>hadoop.profile</name>
-          <value>3</value>
+          <value>3.3</value>
         </property>
       </activation>
+      <modules>
+        <module>hadoop3-3-testutils</module>
+      </modules>
       <properties>
-        <hadoop.version>${hadoop3.version}</hadoop.version>
+        <hadoop.version>${hadoop33.version}</hadoop.version>
       </properties>
     </profile>
     <profile>
@@ -131,109 +155,12 @@
           <name>!hadoop.profile</name>
         </property>
       </activation>
+      <modules>
+        <module>hadoop3-3-testutils</module>
+      </modules>
       <properties>
-        <hadoop.version>${hadoop3.version}</hadoop.version>
-      </properties>
-    </profile>
-    <profile>
-      <id>hbase-default</id>
-      <activation>
-        <property>
-          <name>!hbase.profile</name>
-        </property>
-      </activation>
-      <properties>
-        <hbase.version>${hbase2.version}</hbase.version>
-      </properties>
-      <!-- For testing against ZK -->
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-zookeeper</artifactId>
-          <version>${hbase2.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-zookeeper</artifactId>
-          <version>${hbase2.version}</version>
-          <type>test-jar</type>
-          <scope>test</scope>
-        </dependency>
-      </dependencies>
-    </profile>
-    <profile>
-      <id>hbase2</id>
-      <activation>
-        <property>
-          <name>hbase.profile</name>
-          <value>2</value>
-        </property>
-      </activation>
-      <properties>
-        <hbase.version>${hbase2.version}</hbase.version>
-      </properties>
-      <!-- For testing against ZK -->
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-zookeeper</artifactId>
-          <version>${hbase2.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-zookeeper</artifactId>
-          <version>${hbase2.version}</version>
-          <type>test-jar</type>
-          <scope>test</scope>
-        </dependency>
-      </dependencies>
-    </profile>
-    <profile>
-      <id>hbase1</id>
-      <activation>
-        <property>
-          <name>hbase.profile</name>
-          <value>1</value>
-        </property>
-      </activation>
-      <properties>
-        <hbase.version>${hbase1.version}</hbase.version>
+        <hadoop.version>${hadoop33.version}</hadoop.version>
       </properties>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hbase.thirdparty</groupId>
-          <artifactId>hbase-shaded-miscellaneous</artifactId>
-          <version>${hbase-thirdparty.version}</version>
-        </dependency>
-        <!-- For testing against ZK -->
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-testing-util</artifactId>
-          <version>${hbase1.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-testing-util</artifactId>
-          <version>${hbase1.version}</version>
-          <type>test-jar</type>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-minikdc</artifactId>
-          <version>${hadoop.version}</version>
-          <exclusions>
-            <exclusion>
-              <groupId>org.apache.directory.jdbm</groupId>
-              <artifactId>apacheds-jdbm1</artifactId>
-            </exclusion>
-          </exclusions>
-          <scope>test</scope>
-        </dependency>
-      </dependencies>
     </profile>
     <!-- this profile should match the name of the release profile in the root 
asf pom -->
     <profile>

Reply via email to