Repository: hadoop
Updated Branches:
  refs/heads/trunk fcd94eeab -> a16aa2f60


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a16aa2f6/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/TestChunkStreams.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/TestChunkStreams.java
 
b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/TestChunkStreams.java
index 177694c..7dd31da 100644
--- 
a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/TestChunkStreams.java
+++ 
b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/TestChunkStreams.java
@@ -30,6 +30,7 @@ import java.io.OutputStream;
 import java.io.IOException;
 import java.util.ArrayList;
 
+import static java.nio.charset.StandardCharsets.UTF_8;
 import static org.junit.Assert.assertEquals;
 
 /**
@@ -62,7 +63,7 @@ public class TestChunkStreams {
       assertEquals(0, groupOutputStream.getByteOffset());
 
       String dataString = RandomStringUtils.randomAscii(500);
-      byte[] data = dataString.getBytes();
+      byte[] data = dataString.getBytes(UTF_8);
       groupOutputStream.write(data, 0, data.length);
       assertEquals(500, groupOutputStream.getByteOffset());
 
@@ -95,7 +96,8 @@ public class TestChunkStreams {
       assertEquals(0, groupOutputStream.getByteOffset());
 
       // first writes of 100 bytes should succeed
-      groupOutputStream.write(RandomStringUtils.randomAscii(100).getBytes());
+      groupOutputStream.write(RandomStringUtils.randomAscii(100)
+          .getBytes(UTF_8));
       assertEquals(100, groupOutputStream.getByteOffset());
 
       // second writes of 500 bytes should fail, as there should be only 400
@@ -104,7 +106,8 @@ public class TestChunkStreams {
       // other add more informative error code rather than exception, need to
       // change this part.
       exception.expect(Exception.class);
-      groupOutputStream.write(RandomStringUtils.randomAscii(500).getBytes());
+      groupOutputStream.write(RandomStringUtils.randomAscii(500)
+          .getBytes(UTF_8));
       assertEquals(100, groupOutputStream.getByteOffset());
     }
   }
@@ -115,7 +118,7 @@ public class TestChunkStreams {
       ArrayList<ChunkInputStream> inputStreams = new ArrayList<>();
 
       String dataString = RandomStringUtils.randomAscii(500);
-      byte[] buf = dataString.getBytes();
+      byte[] buf = dataString.getBytes(UTF_8);
       int offset = 0;
       for (int i = 0; i < 5; i++) {
         int tempOffset = offset;
@@ -126,12 +129,12 @@ public class TestChunkStreams {
                   new ByteArrayInputStream(buf, tempOffset, 100);
 
               @Override
-              public void seek(long pos) throws IOException {
+              public synchronized void seek(long pos) throws IOException {
                 throw new UnsupportedOperationException();
               }
 
               @Override
-              public long getPos() throws IOException {
+              public synchronized long getPos() throws IOException {
                 return pos;
               }
 
@@ -142,12 +145,13 @@ public class TestChunkStreams {
               }
 
               @Override
-              public int read() throws IOException {
+              public synchronized int read() throws IOException {
                 return in.read();
               }
 
               @Override
-              public int read(byte[] b, int off, int len) throws IOException {
+              public synchronized  int read(byte[] b, int off, int len)
+                  throws IOException {
                 int readLen = in.read(b, off, len);
                 pos += readLen;
                 return readLen;
@@ -162,7 +166,7 @@ public class TestChunkStreams {
       int len = groupInputStream.read(resBuf, 0, 500);
 
       assertEquals(500, len);
-      assertEquals(dataString, new String(resBuf));
+      assertEquals(dataString, new String(resBuf, UTF_8));
     }
   }
 
@@ -172,7 +176,7 @@ public class TestChunkStreams {
       ArrayList<ChunkInputStream> inputStreams = new ArrayList<>();
 
       String dataString = RandomStringUtils.randomAscii(500);
-      byte[] buf = dataString.getBytes();
+      byte[] buf = dataString.getBytes(UTF_8);
       int offset = 0;
       for (int i = 0; i < 5; i++) {
         int tempOffset = offset;
@@ -183,28 +187,29 @@ public class TestChunkStreams {
                   new ByteArrayInputStream(buf, tempOffset, 100);
 
               @Override
-              public void seek(long pos) throws IOException {
+              public synchronized void seek(long pos) throws IOException {
                 throw new UnsupportedOperationException();
               }
 
               @Override
-              public long getPos() throws IOException {
+              public synchronized long getPos() throws IOException {
                 return pos;
               }
 
               @Override
-              public boolean seekToNewSource(long targetPos)
+              public synchronized boolean seekToNewSource(long targetPos)
                   throws IOException {
                 throw new UnsupportedOperationException();
               }
 
               @Override
-              public int read() throws IOException {
+              public synchronized int read() throws IOException {
                 return in.read();
               }
 
               @Override
-              public int read(byte[] b, int off, int len) throws IOException {
+              public synchronized int read(byte[] b, int off, int len)
+                  throws IOException {
                 int readLen = in.read(b, off, len);
                 pos += readLen;
                 return readLen;
@@ -222,14 +227,14 @@ public class TestChunkStreams {
       assertEquals(60, groupInputStream.getRemainingOfIndex(3));
       assertEquals(340, len);
       assertEquals(dataString.substring(0, 340),
-          new String(resBuf).substring(0, 340));
+          new String(resBuf, UTF_8).substring(0, 340));
 
       // read following 300 bytes, but only 200 left
       len = groupInputStream.read(resBuf, 340, 260);
       assertEquals(4, groupInputStream.getCurrentStreamIndex());
       assertEquals(0, groupInputStream.getRemainingOfIndex(4));
       assertEquals(160, len);
-      assertEquals(dataString, new String(resBuf).substring(0, 500));
+      assertEquals(dataString, new String(resBuf, UTF_8).substring(0, 500));
 
       // further read should get EOF
       len = groupInputStream.read(resBuf, 0, 1);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a16aa2f6/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/TestKeyManagerImpl.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/TestKeyManagerImpl.java
 
b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/TestKeyManagerImpl.java
index d722155..55843e1 100644
--- 
a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/TestKeyManagerImpl.java
+++ 
b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/TestKeyManagerImpl.java
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.ozone.om;
 
-import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.HashSet;
 import java.util.Set;
@@ -49,6 +48,8 @@ import org.rocksdb.RocksDB;
 import org.rocksdb.Statistics;
 import org.rocksdb.StatsLevel;
 
+import static java.nio.charset.StandardCharsets.UTF_8;
+
 /**
  * Test class for @{@link KeyManagerImpl}.
  * */
@@ -93,11 +94,11 @@ public class TestKeyManagerImpl {
     Mockito.when(metadataManager.getLock())
         .thenReturn(new OzoneManagerLock(conf));
     Mockito.when(metadataManager.getVolumeKey(VOLUME_NAME))
-        .thenReturn(VOLUME_NAME.getBytes());
+        .thenReturn(VOLUME_NAME.getBytes(UTF_8));
     Mockito.when(metadataManager.getBucketKey(VOLUME_NAME, BUCKET_NAME))
-        .thenReturn(BUCKET_NAME.getBytes());
+        .thenReturn(BUCKET_NAME.getBytes(UTF_8));
     Mockito.when(metadataManager.getOpenKeyBytes(VOLUME_NAME, BUCKET_NAME,
-        KEY_NAME, 1)).thenReturn(KEY_NAME.getBytes());
+        KEY_NAME, 1)).thenReturn(KEY_NAME.getBytes(UTF_8));
   }
 
   private void setupRocksDb() throws Exception {
@@ -129,11 +130,11 @@ public class TestKeyManagerImpl {
 
     rdbStore = new RDBStore(folder.newFolder(), options, configSet);
     rdbTable = rdbStore.getTable("testTable");
-    rdbTable.put(VOLUME_NAME.getBytes(),
-        RandomStringUtils.random(10).getBytes(StandardCharsets.UTF_8));
-    rdbTable.put(BUCKET_NAME.getBytes(),
-        RandomStringUtils.random(10).getBytes(StandardCharsets.UTF_8));
-    rdbTable.put(KEY_NAME.getBytes(), keyData.toByteArray());
+    rdbTable.put(VOLUME_NAME.getBytes(UTF_8),
+        RandomStringUtils.random(10).getBytes(UTF_8));
+    rdbTable.put(BUCKET_NAME.getBytes(UTF_8),
+        RandomStringUtils.random(10).getBytes(UTF_8));
+    rdbTable.put(KEY_NAME.getBytes(UTF_8), keyData.toByteArray());
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a16aa2f6/hadoop-ozone/ozonefs/src/main/java/org/apache/hadoop/fs/ozone/OzoneFileSystem.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/ozonefs/src/main/java/org/apache/hadoop/fs/ozone/OzoneFileSystem.java
 
b/hadoop-ozone/ozonefs/src/main/java/org/apache/hadoop/fs/ozone/OzoneFileSystem.java
index 78b6e5d..6077b32 100644
--- 
a/hadoop-ozone/ozonefs/src/main/java/org/apache/hadoop/fs/ozone/OzoneFileSystem.java
+++ 
b/hadoop-ozone/ozonefs/src/main/java/org/apache/hadoop/fs/ozone/OzoneFileSystem.java
@@ -252,6 +252,7 @@ public class OzoneFileSystem extends FileSystem {
       LOG.trace("rename from:{} to:{}", srcKey, dstKey);
     }
 
+    @Override
     boolean processKey(String key) throws IOException {
       String newKeyName = dstKey.concat(key.substring(srcKey.length()));
       bucket.renameKey(key, newKeyName);
@@ -370,6 +371,7 @@ public class OzoneFileSystem extends FileSystem {
       }
     }
 
+    @Override
     boolean processKey(String key) throws IOException {
       if (key.equals("")) {
         LOG.trace("Skipping deleting root directory");
@@ -496,6 +498,7 @@ public class OzoneFileSystem extends FileSystem {
      * @return always returns true
      * @throws IOException
      */
+    @Override
     boolean processKey(String key) throws IOException {
       Path keyPath = new Path(OZONE_URI_DELIMITER + key);
       if (key.equals(getPathKey())) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a16aa2f6/hadoop-ozone/ozonefs/src/test/java/org/apache/hadoop/fs/ozone/TestOzoneFileInterfaces.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/ozonefs/src/test/java/org/apache/hadoop/fs/ozone/TestOzoneFileInterfaces.java
 
b/hadoop-ozone/ozonefs/src/test/java/org/apache/hadoop/fs/ozone/TestOzoneFileInterfaces.java
index 0eb8dce..d32c25a 100644
--- 
a/hadoop-ozone/ozonefs/src/test/java/org/apache/hadoop/fs/ozone/TestOzoneFileInterfaces.java
+++ 
b/hadoop-ozone/ozonefs/src/test/java/org/apache/hadoop/fs/ozone/TestOzoneFileInterfaces.java
@@ -51,6 +51,7 @@ import org.apache.hadoop.ozone.web.utils.OzoneUtils;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.Time;
 
+import static java.nio.charset.StandardCharsets.UTF_8;
 import static org.apache.hadoop.fs.ozone.Constants.OZONE_DEFAULT_USER;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
@@ -176,7 +177,7 @@ public class TestOzoneFileInterfaces {
       byte[] buffer = new byte[stringLen];
       // This read will not change the offset inside the file
       int readBytes = inputStream.read(0, buffer, 0, buffer.length);
-      String out = new String(buffer, 0, buffer.length);
+      String out = new String(buffer, 0, buffer.length, UTF_8);
       assertEquals(data, out);
       assertEquals(readBytes, buffer.length);
       assertEquals(0, inputStream.getPos());

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a16aa2f6/hadoop-ozone/ozonefs/src/test/java/org/apache/hadoop/fs/ozone/package-info.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/ozonefs/src/test/java/org/apache/hadoop/fs/ozone/package-info.java
 
b/hadoop-ozone/ozonefs/src/test/java/org/apache/hadoop/fs/ozone/package-info.java
new file mode 100644
index 0000000..51284c2
--- /dev/null
+++ 
b/hadoop-ozone/ozonefs/src/test/java/org/apache/hadoop/fs/ozone/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *
+ */
+/**
+ * Ozone FS Contract tests.
+ */
+package org.apache.hadoop.fs.ozone;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a16aa2f6/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/header/AuthorizationHeaderV2.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/header/AuthorizationHeaderV2.java
 
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/header/AuthorizationHeaderV2.java
index 8e745f2..dfafc3a 100644
--- 
a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/header/AuthorizationHeaderV2.java
+++ 
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/header/AuthorizationHeaderV2.java
@@ -49,6 +49,7 @@ public class AuthorizationHeaderV2 {
    *
    * @throws OS3Exception
    */
+  @SuppressWarnings("StringSplitter")
   public void parseHeader() throws OS3Exception {
     String[] split = authHeader.split(" ");
     if (split.length != 2) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a16aa2f6/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/header/AuthorizationHeaderV4.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/header/AuthorizationHeaderV4.java
 
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/header/AuthorizationHeaderV4.java
index 88c64ca..c3f7072 100644
--- 
a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/header/AuthorizationHeaderV4.java
+++ 
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/header/AuthorizationHeaderV4.java
@@ -60,6 +60,7 @@ public class AuthorizationHeaderV4 {
    * Signature=db81b057718d7c1b3b8dffa29933099551c51d787b3b13b9e0f9ebed45982bf2
    * @throws OS3Exception
    */
+  @SuppressWarnings("StringSplitter")
   public void parseAuthHeader() throws OS3Exception {
     String[] split = authHeader.split(" ");
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a16aa2f6/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/header/Credential.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/header/Credential.java
 
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/header/Credential.java
index 19699a0..8db10a8 100644
--- 
a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/header/Credential.java
+++ 
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/header/Credential.java
@@ -54,6 +54,7 @@ public class Credential {
    *
    * @throws OS3Exception
    */
+  @SuppressWarnings("StringSplitter")
   public void parseCredential() throws OS3Exception {
     String[] split = credential.split("/");
     if (split.length == 5) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a16aa2f6/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/client/ObjectStoreStub.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/client/ObjectStoreStub.java
 
b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/client/ObjectStoreStub.java
index de15a40..6c85b81 100644
--- 
a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/client/ObjectStoreStub.java
+++ 
b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/client/ObjectStoreStub.java
@@ -209,6 +209,7 @@ public class ObjectStoreStub extends ObjectStore {
   }
 
   @Override
+  @SuppressWarnings("StringSplitter")
   public String getOzoneVolumeName(String s3BucketName) throws IOException {
     if (bucketVolumeMap.get(s3BucketName) == null) {
       throw new IOException("S3_BUCKET_NOT_FOUND");
@@ -217,6 +218,7 @@ public class ObjectStoreStub extends ObjectStore {
   }
 
   @Override
+  @SuppressWarnings("StringSplitter")
   public String getOzoneBucketName(String s3BucketName) throws IOException {
     if (bucketVolumeMap.get(s3BucketName) == null) {
       throw new IOException("S3_BUCKET_NOT_FOUND");

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a16aa2f6/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestBucketDelete.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestBucketDelete.java
 
b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestBucketDelete.java
index 5114a47..ea574d4 100644
--- 
a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestBucketDelete.java
+++ 
b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestBucketDelete.java
@@ -63,7 +63,7 @@ public class TestBucketDelete {
   @Test
   public void testBucketEndpoint() throws Exception {
     Response response = bucketEndpoint.delete(bucketName);
-    assertEquals(response.getStatus(), HttpStatus.SC_NO_CONTENT);
+    assertEquals(HttpStatus.SC_NO_CONTENT, response.getStatus());
 
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a16aa2f6/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestMultiDeleteRequestUnmarshaller.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestMultiDeleteRequestUnmarshaller.java
 
b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestMultiDeleteRequestUnmarshaller.java
index b3b1be0..c15a128 100644
--- 
a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestMultiDeleteRequestUnmarshaller.java
+++ 
b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestMultiDeleteRequestUnmarshaller.java
@@ -23,6 +23,8 @@ import java.io.ByteArrayInputStream;
 import java.io.IOException;
 
 import org.junit.Assert;
+
+import static java.nio.charset.StandardCharsets.UTF_8;
 import static org.junit.Assert.*;
 import org.junit.Test;
 
@@ -40,7 +42,7 @@ public class TestMultiDeleteRequestUnmarshaller {
                 + ".com/doc/2006-03-01/\"><Object>key1</Object><Object>key2"
                 + "</Object><Object>key3"
                 + "</Object></Delete>")
-                .getBytes());
+                .getBytes(UTF_8));
 
     //WHEN
     MultiDeleteRequest multiDeleteRequest =
@@ -58,7 +60,7 @@ public class TestMultiDeleteRequestUnmarshaller {
             ("<Delete><Object>key1</Object><Object>key2"
                 + "</Object><Object>key3"
                 + "</Object></Delete>")
-                .getBytes());
+                .getBytes(UTF_8));
 
     //WHEN
     MultiDeleteRequest multiDeleteRequest =

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a16aa2f6/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectGet.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectGet.java
 
b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectGet.java
index 2d0504d..2426ecc 100644
--- 
a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectGet.java
+++ 
b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectGet.java
@@ -37,6 +37,8 @@ import org.junit.Assert;
 import org.junit.Test;
 import org.mockito.Mockito;
 
+import static java.nio.charset.StandardCharsets.UTF_8;
+
 /**
  * Test get object.
  */
@@ -54,15 +56,16 @@ public class TestObjectGet {
     OzoneBucket bucket =
         volume.getBucket("b1");
     OzoneOutputStream keyStream =
-        bucket.createKey("key1", CONTENT.getBytes().length);
-    keyStream.write(CONTENT.getBytes());
+        bucket.createKey("key1", CONTENT.getBytes(UTF_8).length);
+    keyStream.write(CONTENT.getBytes(UTF_8));
     keyStream.close();
 
     ObjectEndpoint rest = new ObjectEndpoint();
     rest.setClient(client);
     HttpHeaders headers = Mockito.mock(HttpHeaders.class);
     rest.setHeaders(headers);
-    ByteArrayInputStream body = new ByteArrayInputStream(CONTENT.getBytes());
+    ByteArrayInputStream body =
+        new ByteArrayInputStream(CONTENT.getBytes(UTF_8));
 
     //WHEN
     rest.get("b1", "key1", body);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a16aa2f6/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectHead.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectHead.java
 
b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectHead.java
index 6c166d7..8a8c015 100644
--- 
a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectHead.java
+++ 
b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectHead.java
@@ -32,6 +32,7 @@ import org.apache.hadoop.ozone.client.io.OzoneOutputStream;
 import org.apache.hadoop.ozone.s3.exception.OS3Exception;
 
 import static java.net.HttpURLConnection.HTTP_NOT_FOUND;
+import static java.nio.charset.StandardCharsets.UTF_8;
 import org.apache.commons.lang3.RandomStringUtils;
 import org.junit.Assert;
 import org.junit.Before;
@@ -69,9 +70,9 @@ public class TestObjectHead {
     //GIVEN
     String value = RandomStringUtils.randomAlphanumeric(32);
     OzoneOutputStream out = bucket.createKey("key1",
-        value.getBytes().length, ReplicationType.STAND_ALONE,
+        value.getBytes(UTF_8).length, ReplicationType.STAND_ALONE,
         ReplicationFactor.ONE);
-    out.write(value.getBytes());
+    out.write(value.getBytes(UTF_8));
     out.close();
 
     //WHEN
@@ -79,7 +80,7 @@ public class TestObjectHead {
 
     //THEN
     Assert.assertEquals(200, response.getStatus());
-    Assert.assertEquals(value.getBytes().length,
+    Assert.assertEquals(value.getBytes(UTF_8).length,
         Long.parseLong(response.getHeaderString("Content-Length")));
 
     DateTimeFormatter.RFC_1123_DATE_TIME
@@ -91,7 +92,8 @@ public class TestObjectHead {
   public void testHeadFailByBadName() throws Exception {
     //Head an object that doesn't exist.
     try {
-      keyEndpoint.head(bucketName, "badKeyName");
+      Response response =  keyEndpoint.head(bucketName, "badKeyName");
+      Assert.assertEquals(404, response.getStatus());
     } catch (OS3Exception ex) {
       Assert.assertTrue(ex.getCode().contains("NoSuchObject"));
       Assert.assertTrue(ex.getErrorMessage().contains("object does not 
exist"));

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a16aa2f6/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/RandomKeyGenerator.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/RandomKeyGenerator.java
 
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/RandomKeyGenerator.java
index d73e37e..184b075 100644
--- 
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/RandomKeyGenerator.java
+++ 
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/RandomKeyGenerator.java
@@ -286,7 +286,7 @@ public final class RandomKeyGenerator implements 
Callable<Void> {
     long maxValue;
 
     currentValue = () -> numberOfKeysAdded.get();
-    maxValue = numOfVolumes *
+    maxValue = (long) numOfVolumes *
         numOfBuckets *
         numOfKeys;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a16aa2f6/hadoop-ozone/tools/src/test/java/org/apache/hadoop/ozone/freon/TestFreonWithDatanodeFastRestart.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/tools/src/test/java/org/apache/hadoop/ozone/freon/TestFreonWithDatanodeFastRestart.java
 
b/hadoop-ozone/tools/src/test/java/org/apache/hadoop/ozone/freon/TestFreonWithDatanodeFastRestart.java
index 149d65e..a91e190 100644
--- 
a/hadoop-ozone/tools/src/test/java/org/apache/hadoop/ozone/freon/TestFreonWithDatanodeFastRestart.java
+++ 
b/hadoop-ozone/tools/src/test/java/org/apache/hadoop/ozone/freon/TestFreonWithDatanodeFastRestart.java
@@ -88,8 +88,8 @@ public class TestFreonWithDatanodeFastRestart {
     String expectedSnapFile =
         storage.getSnapshotFile(termIndexBeforeRestart.getTerm(),
             termIndexBeforeRestart.getIndex()).getAbsolutePath();
-    Assert.assertEquals(snapshotInfo.getFile().getPath().toString(),
-        expectedSnapFile);
+    Assert.assertEquals(expectedSnapFile,
+        snapshotInfo.getFile().getPath().toString());
     Assert.assertEquals(termInSnapshot, termIndexBeforeRestart);
 
     // After restart the term index might have progressed to apply pending

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a16aa2f6/hadoop-ozone/tools/src/test/java/org/apache/hadoop/ozone/freon/package-info.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/tools/src/test/java/org/apache/hadoop/ozone/freon/package-info.java
 
b/hadoop-ozone/tools/src/test/java/org/apache/hadoop/ozone/freon/package-info.java
new file mode 100644
index 0000000..f7cb075
--- /dev/null
+++ 
b/hadoop-ozone/tools/src/test/java/org/apache/hadoop/ozone/freon/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *
+ */
+/**
+ * Freon Ozone Load Generator.
+ */
+package org.apache.hadoop.ozone.freon;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a16aa2f6/hadoop-ozone/tools/src/test/java/org/apache/hadoop/ozone/om/TestOmSQLCli.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/tools/src/test/java/org/apache/hadoop/ozone/om/TestOmSQLCli.java 
b/hadoop-ozone/tools/src/test/java/org/apache/hadoop/ozone/om/TestOmSQLCli.java
index 1dc3cab..88fcb7b 100644
--- 
a/hadoop-ozone/tools/src/test/java/org/apache/hadoop/ozone/om/TestOmSQLCli.java
+++ 
b/hadoop-ozone/tools/src/test/java/org/apache/hadoop/ozone/om/TestOmSQLCli.java
@@ -45,10 +45,10 @@ import java.sql.DriverManager;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
+import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.HashMap;
-import java.util.LinkedList;
 import java.util.List;
 import java.util.UUID;
 
@@ -182,7 +182,7 @@ public class TestOmSQLCli {
     String sql = "SELECT * FROM volumeList";
     ResultSet rs = executeQuery(conn, sql);
     List<String> expectedValues =
-        new LinkedList<>(Arrays.asList(volumeName0, volumeName1));
+        new ArrayList<>(Arrays.asList(volumeName0, volumeName1));
     while (rs.next()) {
       String userNameRs = rs.getString("userName");
       String volumeNameRs = rs.getString("volumeName");
@@ -194,7 +194,7 @@ public class TestOmSQLCli {
     sql = "SELECT * FROM volumeInfo";
     rs = executeQuery(conn, sql);
     expectedValues =
-        new LinkedList<>(Arrays.asList(volumeName0, volumeName1));
+        new ArrayList<>(Arrays.asList(volumeName0, volumeName1));
     while (rs.next()) {
       String adName = rs.getString("adminName");
       String ownerName = rs.getString("ownerName");
@@ -208,7 +208,7 @@ public class TestOmSQLCli {
     sql = "SELECT * FROM aclInfo";
     rs = executeQuery(conn, sql);
     expectedValues =
-        new LinkedList<>(Arrays.asList(volumeName0, volumeName1));
+        new ArrayList<>(Arrays.asList(volumeName0, volumeName1));
     while (rs.next()) {
       String adName = rs.getString("adminName");
       String ownerName = rs.getString("ownerName");

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a16aa2f6/hadoop-ozone/tools/src/test/java/org/apache/hadoop/ozone/om/package-info.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/tools/src/test/java/org/apache/hadoop/ozone/om/package-info.java 
b/hadoop-ozone/tools/src/test/java/org/apache/hadoop/ozone/om/package-info.java
new file mode 100644
index 0000000..595708c
--- /dev/null
+++ 
b/hadoop-ozone/tools/src/test/java/org/apache/hadoop/ozone/om/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *
+ */
+/**
+ * OM to SQL Converter. Currently broken.
+ */
+package org.apache.hadoop.ozone.om;
\ No newline at end of file


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to