This is an automated email from the ASF dual-hosted git repository.

iemejia pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/avro.git


The following commit(s) were added to refs/heads/master by this push:
     new c552c17  AVRO-2336: Use Java Standard Charsets - Part 2
c552c17 is described below

commit c552c1746ff9bac97512fc7db2523f3cff1bd15d
Author: Beluga Behr <[email protected]>
AuthorDate: Thu Mar 28 11:55:27 2019 -0400

    AVRO-2336: Use Java Standard Charsets - Part 2
---
 .../src/main/java/org/apache/avro/Protocol.java    |  5 ++--
 .../main/java/org/apache/avro/SchemaBuilder.java   |  5 ++--
 .../java/org/apache/avro/file/DataFileWriter.java  |  3 ++-
 .../java/org/apache/avro/io/BinaryEncoder.java     |  3 ++-
 .../main/java/org/apache/avro/io/JsonDecoder.java  |  5 ++--
 .../main/java/org/apache/avro/io/JsonEncoder.java  |  3 ++-
 .../java/org/apache/avro/io/ResolvingDecoder.java  |  6 ++---
 .../avro/io/parsing/ResolvingGrammarGenerator.java |  5 ++--
 .../java/org/apache/avro/TestNestedRecords.java    |  2 +-
 .../TestReadingWritingDataInEvolvedSchemas.java    |  7 +++---
 .../java/org/apache/avro/io/TestBlockingIO.java    | 20 ++++++++--------
 .../org/apache/avro/ipc/netty/TestNettyServer.java |  4 ++--
 .../java/org/apache/avro/ipc/SaslSocketServer.java |  8 +++----
 .../org/apache/avro/ipc/SaslSocketTransceiver.java | 10 +++-----
 .../java/org/apache/avro/RPCMetaTestPlugin.java    | 27 +++++++++++-----------
 .../apache/avro/mapred/AvroTextOutputFormat.java   | 12 +++++-----
 .../org/apache/avro/mapreduce/TestFsInput.java     | 10 ++++----
 .../apache/avro/tool/SchemaNormalizationTool.java  |  3 ++-
 .../main/java/org/apache/avro/tool/ToTextTool.java |  3 ++-
 .../org/apache/avro/tool/TrevniMetadataTool.java   |  3 ++-
 .../org/apache/avro/tool/TestTextFileTools.java    |  3 ++-
 .../apache/trevni/avro/AvroTrevniOutputFormat.java |  4 ++--
 .../avro/mapreduce/AvroTrevniRecordWriterBase.java |  5 ++--
 .../src/main/java/org/apache/trevni/MetaData.java  |  9 +++-----
 .../main/java/org/apache/trevni/OutputBuffer.java  |  6 ++---
 25 files changed, 85 insertions(+), 86 deletions(-)

diff --git a/lang/java/avro/src/main/java/org/apache/avro/Protocol.java 
b/lang/java/avro/src/main/java/org/apache/avro/Protocol.java
index cbdf3f0..71f8548 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/Protocol.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/Protocol.java
@@ -21,6 +21,7 @@ import java.io.ByteArrayInputStream;
 import java.io.File;
 import java.io.InputStream;
 import java.io.StringWriter;
+import java.nio.charset.StandardCharsets;
 import java.io.IOException;
 import java.security.MessageDigest;
 import java.util.ArrayList;
@@ -450,7 +451,7 @@ public class Protocol extends JsonProperties {
   public byte[] getMD5() {
     if (md5 == null)
       try {
-        md5 = 
MessageDigest.getInstance("MD5").digest(this.toString().getBytes("UTF-8"));
+        md5 = 
MessageDigest.getInstance("MD5").digest(this.toString().getBytes(StandardCharsets.UTF_8));
       } catch (Exception e) {
         throw new AvroRuntimeException(e);
       }
@@ -478,7 +479,7 @@ public class Protocol extends JsonProperties {
   /** Read a protocol from a Json string. */
   public static Protocol parse(String string) {
     try {
-      return parse(Schema.FACTORY.createParser(new 
ByteArrayInputStream(string.getBytes("UTF-8"))));
+      return parse(Schema.FACTORY.createParser(new 
ByteArrayInputStream(string.getBytes(StandardCharsets.UTF_8))));
     } catch (IOException e) {
       throw new AvroRuntimeException(e);
     }
diff --git a/lang/java/avro/src/main/java/org/apache/avro/SchemaBuilder.java 
b/lang/java/avro/src/main/java/org/apache/avro/SchemaBuilder.java
index 102dfd2..bb851d5 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/SchemaBuilder.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/SchemaBuilder.java
@@ -19,6 +19,7 @@ package org.apache.avro;
 
 import java.io.IOException;
 import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -2708,11 +2709,11 @@ public class SchemaBuilder {
         byte[] data = new byte[bytes.remaining()];
         bytes.get(data);
         bytes.reset(); // put the buffer back the way we got it
-        s = new String(data, "ISO-8859-1");
+        s = new String(data, StandardCharsets.ISO_8859_1);
         char[] quoted = 
BufferRecyclers.getJsonStringEncoder().quoteAsString(s);
         s = "\"" + new String(quoted) + "\"";
       } else if (o instanceof byte[]) {
-        s = new String((byte[]) o, "ISO-8859-1");
+        s = new String((byte[]) o, StandardCharsets.ISO_8859_1);
         char[] quoted = 
BufferRecyclers.getJsonStringEncoder().quoteAsString(s);
         s = '\"' + new String(quoted) + '\"';
       } else {
diff --git 
a/lang/java/avro/src/main/java/org/apache/avro/file/DataFileWriter.java 
b/lang/java/avro/src/main/java/org/apache/avro/file/DataFileWriter.java
index 4ad5519..591ec66 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/file/DataFileWriter.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/DataFileWriter.java
@@ -28,6 +28,7 @@ import java.io.Flushable;
 import java.io.IOException;
 import java.io.OutputStream;
 import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
 import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
 import java.util.HashMap;
@@ -222,7 +223,7 @@ public class DataFileWriter<D> implements Closeable, 
Flushable {
     this.meta.putAll(reader.getHeader().meta);
     byte[] codecBytes = this.meta.get(DataFileConstants.CODEC);
     if (codecBytes != null) {
-      String strCodec = new String(codecBytes, "UTF-8");
+      String strCodec = new String(codecBytes, StandardCharsets.UTF_8);
       this.codec = CodecFactory.fromString(strCodec).createInstance();
     } else {
       this.codec = CodecFactory.nullCodec().createInstance();
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/BinaryEncoder.java 
b/lang/java/avro/src/main/java/org/apache/avro/io/BinaryEncoder.java
index b9ec6e0..9d10a34 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/BinaryEncoder.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/BinaryEncoder.java
@@ -19,6 +19,7 @@ package org.apache.avro.io;
 
 import java.io.IOException;
 import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
 
 import org.apache.avro.util.Utf8;
 
@@ -51,7 +52,7 @@ public abstract class BinaryEncoder extends Encoder {
       writeZero();
       return;
     }
-    byte[] bytes = string.getBytes("UTF-8");
+    byte[] bytes = string.getBytes(StandardCharsets.UTF_8);
     writeInt(bytes.length);
     writeFixed(bytes, 0, bytes.length);
   }
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/JsonDecoder.java 
b/lang/java/avro/src/main/java/org/apache/avro/io/JsonDecoder.java
index 75b1111..308a8c6 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/JsonDecoder.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/JsonDecoder.java
@@ -23,6 +23,7 @@ import java.io.InputStream;
 import java.math.BigDecimal;
 import java.math.BigInteger;
 import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
@@ -62,8 +63,6 @@ public class JsonDecoder extends ParsingDecoder implements 
Parser.ActionHandler
     public JsonParser origParser = null;
   }
 
-  static final String CHARSET = "ISO-8859-1";
-
   private JsonDecoder(Symbol root, InputStream in) throws IOException {
     super(root);
     configure(in);
@@ -266,7 +265,7 @@ public class JsonDecoder extends ParsingDecoder implements 
Parser.ActionHandler
   }
 
   private byte[] readByteArray() throws IOException {
-    byte[] result = in.getText().getBytes(CHARSET);
+    byte[] result = in.getText().getBytes(StandardCharsets.ISO_8859_1);
     return result;
   }
 
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/JsonEncoder.java 
b/lang/java/avro/src/main/java/org/apache/avro/io/JsonEncoder.java
index aa3f20e..d5d7909 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/JsonEncoder.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/JsonEncoder.java
@@ -20,6 +20,7 @@ package org.apache.avro.io;
 import java.io.IOException;
 import java.io.OutputStream;
 import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
 import java.util.BitSet;
 
 import org.apache.avro.AvroTypeException;
@@ -206,7 +207,7 @@ public class JsonEncoder extends ParsingEncoder implements 
Parser.ActionHandler
   }
 
   private void writeByteArray(byte[] bytes, int start, int len) throws 
IOException {
-    out.writeString(new String(bytes, start, len, JsonDecoder.CHARSET));
+    out.writeString(new String(bytes, start, len, 
StandardCharsets.ISO_8859_1));
   }
 
   @Override
diff --git 
a/lang/java/avro/src/main/java/org/apache/avro/io/ResolvingDecoder.java 
b/lang/java/avro/src/main/java/org/apache/avro/io/ResolvingDecoder.java
index 04fb7a6..d474632 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/ResolvingDecoder.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/ResolvingDecoder.java
@@ -19,7 +19,7 @@ package org.apache.avro.io;
 
 import java.io.IOException;
 import java.nio.ByteBuffer;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 
 import org.apache.avro.AvroTypeException;
 import org.apache.avro.Schema;
@@ -210,13 +210,11 @@ public class ResolvingDecoder extends ValidatingDecoder {
     }
   }
 
-  private static final Charset UTF8 = Charset.forName("UTF-8");
-
   @Override
   public String readString() throws IOException {
     Symbol actual = parser.advance(Symbol.STRING);
     if (actual == Symbol.BYTES) {
-      return new String(in.readBytes(null).array(), UTF8);
+      return new String(in.readBytes(null).array(), StandardCharsets.UTF_8);
     } else {
       assert actual == Symbol.STRING;
       return in.readString();
diff --git 
a/lang/java/avro/src/main/java/org/apache/avro/io/parsing/ResolvingGrammarGenerator.java
 
b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/ResolvingGrammarGenerator.java
index 602b667..dd60309 100644
--- 
a/lang/java/avro/src/main/java/org/apache/avro/io/parsing/ResolvingGrammarGenerator.java
+++ 
b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/ResolvingGrammarGenerator.java
@@ -19,6 +19,7 @@ package org.apache.avro.io.parsing;
 
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.Iterator;
@@ -444,7 +445,7 @@ public class ResolvingGrammarGenerator extends 
ValidatingGrammarGenerator {
     case FIXED:
       if (!n.isTextual())
         throw new AvroTypeException("Non-string default value for fixed: " + 
n);
-      byte[] bb = n.textValue().getBytes("ISO-8859-1");
+      byte[] bb = n.textValue().getBytes(StandardCharsets.ISO_8859_1);
       if (bb.length != s.getFixedSize()) {
         bb = Arrays.copyOf(bb, s.getFixedSize());
       }
@@ -458,7 +459,7 @@ public class ResolvingGrammarGenerator extends 
ValidatingGrammarGenerator {
     case BYTES:
       if (!n.isTextual())
         throw new AvroTypeException("Non-string default value for bytes: " + 
n);
-      e.writeBytes(n.textValue().getBytes("ISO-8859-1"));
+      e.writeBytes(n.textValue().getBytes(StandardCharsets.ISO_8859_1));
       break;
     case INT:
       if (!n.isNumber())
diff --git 
a/lang/java/avro/src/test/java/org/apache/avro/TestNestedRecords.java 
b/lang/java/avro/src/test/java/org/apache/avro/TestNestedRecords.java
index 85da030..06ab017 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/TestNestedRecords.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestNestedRecords.java
@@ -79,7 +79,7 @@ public class TestNestedRecords {
         // this field should be safely ignored
         "    \"extraField\":\"extraValue\"\n" + " },\n" + " 
\"parentField2\":\"parentValue2\"\n" + "}";
 
-    final ByteArrayInputStream inputStream = new 
ByteArrayInputStream(inputAsExpected.getBytes());
+    final ByteArrayInputStream inputStream = new 
ByteArrayInputStream(inputAsExpected.getBytes(UTF_8));
 
     final JsonDecoder decoder = DecoderFactory.get().jsonDecoder(parent, 
inputStream);
     final DatumReader<Object> reader = new GenericDatumReader<>(parent);
diff --git 
a/lang/java/avro/src/test/java/org/apache/avro/TestReadingWritingDataInEvolvedSchemas.java
 
b/lang/java/avro/src/test/java/org/apache/avro/TestReadingWritingDataInEvolvedSchemas.java
index 9296f7c..415d679 100644
--- 
a/lang/java/avro/src/test/java/org/apache/avro/TestReadingWritingDataInEvolvedSchemas.java
+++ 
b/lang/java/avro/src/test/java/org/apache/avro/TestReadingWritingDataInEvolvedSchemas.java
@@ -23,6 +23,7 @@ import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.Collection;
 
@@ -268,7 +269,7 @@ public class TestReadingWritingDataInEvolvedSchemas {
     Record record = defaultRecordWithSchema(writer, FIELD_A, "42");
     byte[] encoded = encodeGenericBlob(record);
     ByteBuffer actual = (ByteBuffer) decodeGenericBlob(BYTES_RECORD, writer, 
encoded).get(FIELD_A);
-    assertArrayEquals("42".getBytes("UTF-8"), actual.array());
+    assertArrayEquals("42".getBytes(StandardCharsets.UTF_8), actual.array());
   }
 
   @Test
@@ -278,13 +279,13 @@ public class TestReadingWritingDataInEvolvedSchemas {
     Record record = defaultRecordWithSchema(writer, FIELD_A, goeran);
     byte[] encoded = encodeGenericBlob(record);
     ByteBuffer actual = (ByteBuffer) decodeGenericBlob(BYTES_RECORD, writer, 
encoded).get(FIELD_A);
-    assertArrayEquals(goeran.getBytes("UTF-8"), actual.array());
+    assertArrayEquals(goeran.getBytes(StandardCharsets.UTF_8), actual.array());
   }
 
   @Test
   public void asciiBytesWrittenWithUnionSchemaIsConvertedToStringSchema() 
throws Exception {
     Schema writer = UNION_STRING_BYTES_RECORD;
-    ByteBuffer buf = ByteBuffer.wrap("42".getBytes("UTF-8"));
+    ByteBuffer buf = ByteBuffer.wrap("42".getBytes(StandardCharsets.UTF_8));
     Record record = defaultRecordWithSchema(writer, FIELD_A, buf);
     byte[] encoded = encodeGenericBlob(record);
     CharSequence read = (CharSequence) decodeGenericBlob(STRING_RECORD, 
writer, encoded).get(FIELD_A);
diff --git 
a/lang/java/avro/src/test/java/org/apache/avro/io/TestBlockingIO.java 
b/lang/java/avro/src/test/java/org/apache/avro/io/TestBlockingIO.java
index 8c9a991..36f32d1 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/io/TestBlockingIO.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/TestBlockingIO.java
@@ -23,6 +23,7 @@ import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayDeque;
 import java.util.Arrays;
 import java.util.Collection;
@@ -35,7 +36,6 @@ import org.junit.runners.Parameterized;
 
 @RunWith(Parameterized.class)
 public class TestBlockingIO {
-  private static final String UTF_8 = "UTF-8";
 
   private final int iSize;
   private final int iDepth;
@@ -55,9 +55,9 @@ public class TestBlockingIO {
     public Tests(int bufferSize, int depth, String input) throws IOException {
 
       this.depth = depth;
-      byte[] in = input.getBytes("UTF-8");
+      byte[] in = input.getBytes(StandardCharsets.UTF_8);
       JsonFactory f = new JsonFactory();
-      JsonParser p = f.createParser(new 
ByteArrayInputStream(input.getBytes("UTF-8")));
+      JsonParser p = f.createParser(new 
ByteArrayInputStream(input.getBytes(StandardCharsets.UTF_8)));
 
       ByteArrayOutputStream os = new ByteArrayOutputStream();
       EncoderFactory factory = new 
EncoderFactory().configureBlockSize(bufferSize);
@@ -92,13 +92,13 @@ public class TestBlockingIO {
           continue;
         case VALUE_STRING: {
           String s = parser.getText();
-          int n = s.getBytes(UTF_8).length;
+          int n = s.getBytes(StandardCharsets.UTF_8).length;
           checkString(s, input, n);
           break;
         }
         case FIELD_NAME: {
           String s = parser.getCurrentName();
-          int n = s.getBytes(UTF_8).length;
+          int n = s.getBytes(StandardCharsets.UTF_8).length;
           checkString(s, input, n);
           continue;
         }
@@ -149,14 +149,14 @@ public class TestBlockingIO {
             input.skipBytes();
           } else {
             String s = parser.getText();
-            int n = s.getBytes(UTF_8).length;
+            int n = s.getBytes(StandardCharsets.UTF_8).length;
             checkString(s, input, n);
           }
           break;
         }
         case FIELD_NAME: {
           String s = parser.getCurrentName();
-          int n = s.getBytes(UTF_8).length;
+          int n = s.getBytes(StandardCharsets.UTF_8).length;
           checkString(s, input, n);
           continue;
         }
@@ -261,7 +261,7 @@ public class TestBlockingIO {
   private static void checkString(String s, Decoder input, int n) throws 
IOException {
     ByteBuffer buf = input.readBytes(null);
     assertEquals(n, buf.remaining());
-    String s2 = new String(buf.array(), buf.position(), buf.remaining(), 
UTF_8);
+    String s2 = new String(buf.array(), buf.position(), buf.remaining(), 
StandardCharsets.UTF_8);
     assertEquals(s, s2);
   }
 
@@ -298,7 +298,7 @@ public class TestBlockingIO {
           cos.startItem();
           counts[stackTop]++;
         }
-        byte[] bb = p.getText().getBytes(UTF_8);
+        byte[] bb = p.getText().getBytes(StandardCharsets.UTF_8);
         cos.writeBytes(bb);
         break;
       case START_OBJECT:
@@ -315,7 +315,7 @@ public class TestBlockingIO {
         cos.setItemCount(1);
         cos.startItem();
         counts[stackTop]++;
-        cos.writeBytes(p.getCurrentName().getBytes(UTF_8));
+        cos.writeBytes(p.getCurrentName().getBytes(StandardCharsets.UTF_8));
         break;
       default:
         throw new RuntimeException("Unsupported: " + p.getCurrentToken());
diff --git 
a/lang/java/ipc-netty/src/test/java/org/apache/avro/ipc/netty/TestNettyServer.java
 
b/lang/java/ipc-netty/src/test/java/org/apache/avro/ipc/netty/TestNettyServer.java
index 0a05d3d..db65ca7 100644
--- 
a/lang/java/ipc-netty/src/test/java/org/apache/avro/ipc/netty/TestNettyServer.java
+++ 
b/lang/java/ipc-netty/src/test/java/org/apache/avro/ipc/netty/TestNettyServer.java
@@ -24,7 +24,7 @@ import static org.junit.Assert.assertEquals;
 
 import java.net.InetSocketAddress;
 import java.net.Socket;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.TimeUnit;
 
@@ -171,7 +171,7 @@ public class TestNettyServer {
     Socket sock = new Socket();
     sock.connect(sockAddr);
     OutputStream out = sock.getOutputStream();
-    out.write(msg.getBytes(Charset.forName("UTF-8")));
+    out.write(msg.getBytes(StandardCharsets.UTF_8));
     out.flush();
     byte[] buf = new byte[2048];
     int bytesRead = sock.getInputStream().read(buf);
diff --git 
a/lang/java/ipc/src/main/java/org/apache/avro/ipc/SaslSocketServer.java 
b/lang/java/ipc/src/main/java/org/apache/avro/ipc/SaslSocketServer.java
index fa97ce0..645df04 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/SaslSocketServer.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/SaslSocketServer.java
@@ -22,6 +22,8 @@ import java.io.IOException;
 import java.util.Map;
 import java.net.SocketAddress;
 import java.nio.channels.SocketChannel;
+import java.nio.charset.StandardCharsets;
+
 import javax.security.sasl.Sasl;
 import javax.security.sasl.SaslServer;
 import javax.security.sasl.SaslException;
@@ -87,11 +89,7 @@ public class SaslSocketServer extends SocketServer {
 
     @Override
     public byte[] evaluateResponse(byte[] response) throws SaslException {
-      try {
-        this.user = new String(response, "UTF-8");
-      } catch (IOException e) {
-        throw new SaslException(e.toString());
-      }
+      this.user = new String(response, StandardCharsets.UTF_8);
       return null;
     }
 
diff --git 
a/lang/java/ipc/src/main/java/org/apache/avro/ipc/SaslSocketTransceiver.java 
b/lang/java/ipc/src/main/java/org/apache/avro/ipc/SaslSocketTransceiver.java
index 95a5ddd..aac4192 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/SaslSocketTransceiver.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/SaslSocketTransceiver.java
@@ -155,7 +155,7 @@ public class SaslSocketTransceiver extends Transceiver {
           response = sasl.evaluate(frame.array());
           status = sasl.isComplete() ? Status.COMPLETE : Status.CONTINUE;
         } catch (SaslException e) {
-          response = e.toString().getBytes("UTF-8");
+          response = e.toString().getBytes(StandardCharsets.UTF_8);
           status = Status.FAIL;
         }
         write(status, response != null ? ByteBuffer.wrap(response) : EMPTY);
@@ -272,7 +272,7 @@ public class SaslSocketTransceiver extends Transceiver {
   }
 
   private void write(Status status, String response) throws IOException {
-    write(status, ByteBuffer.wrap(response.getBytes("UTF-8")));
+    write(status, ByteBuffer.wrap(response.getBytes(StandardCharsets.UTF_8)));
   }
 
   private void write(Status status, ByteBuffer response) throws IOException {
@@ -399,11 +399,7 @@ public class SaslSocketTransceiver extends Transceiver {
 
     @Override
     public byte[] evaluateChallenge(byte[] challenge) throws SaslException {
-      try {
-        return System.getProperty("user.name").getBytes("UTF-8");
-      } catch (IOException e) {
-        throw new SaslException(e.toString());
-      }
+      return System.getProperty("user.name").getBytes(StandardCharsets.UTF_8);
     }
 
     @Override
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/RPCMetaTestPlugin.java 
b/lang/java/ipc/src/test/java/org/apache/avro/RPCMetaTestPlugin.java
index b8fd1ad..9a98fac 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/RPCMetaTestPlugin.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/RPCMetaTestPlugin.java
@@ -18,6 +18,7 @@
 package org.apache.avro;
 
 import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
 import java.util.Map;
 
 import org.junit.Assert;
@@ -43,7 +44,7 @@ public final class RPCMetaTestPlugin extends RPCPlugin {
 
   @Override
   public void clientStartConnect(RPCContext context) {
-    ByteBuffer buf = ByteBuffer.wrap("ap".getBytes());
+    ByteBuffer buf = ByteBuffer.wrap("ap".getBytes(StandardCharsets.UTF_8));
     context.requestHandshakeMeta().put(key, buf);
   }
 
@@ -62,11 +63,11 @@ public final class RPCMetaTestPlugin extends RPCPlugin {
     Assert.assertNotNull(buf);
     Assert.assertNotNull(buf.array());
 
-    String partialstr = new String(buf.array());
+    String partialstr = new String(buf.array(), StandardCharsets.UTF_8);
     Assert.assertNotNull(partialstr);
     Assert.assertEquals("partial string mismatch", "ap", partialstr);
 
-    buf = ByteBuffer.wrap((partialstr + "ac").getBytes());
+    buf = ByteBuffer.wrap((partialstr + 
"ac").getBytes(StandardCharsets.UTF_8));
     Assert.assertTrue(buf.remaining() > 0);
     context.responseHandshakeMeta().put(key, buf);
   }
@@ -86,11 +87,11 @@ public final class RPCMetaTestPlugin extends RPCPlugin {
     Assert.assertNotNull(buf);
     Assert.assertNotNull(buf.array());
 
-    String partialstr = new String(buf.array());
+    String partialstr = new String(buf.array(), StandardCharsets.UTF_8);
     Assert.assertNotNull(partialstr);
     Assert.assertEquals("partial string mismatch", "apac", partialstr);
 
-    buf = ByteBuffer.wrap((partialstr + "he").getBytes());
+    buf = ByteBuffer.wrap((partialstr + 
"he").getBytes(StandardCharsets.UTF_8));
     Assert.assertTrue(buf.remaining() > 0);
     handshakeMeta.put(key, buf);
 
@@ -99,7 +100,7 @@ public final class RPCMetaTestPlugin extends RPCPlugin {
 
   @Override
   public void clientSendRequest(RPCContext context) {
-    ByteBuffer buf = ByteBuffer.wrap("ap".getBytes());
+    ByteBuffer buf = ByteBuffer.wrap("ap".getBytes(StandardCharsets.UTF_8));
     context.requestCallMeta().put(key, buf);
     Assert.assertNotNull(context.getMessage());
     Assert.assertNotNull(context.getRequestPayload());
@@ -121,11 +122,11 @@ public final class RPCMetaTestPlugin extends RPCPlugin {
     Assert.assertNotNull(buf);
     Assert.assertNotNull(buf.array());
 
-    String partialstr = new String(buf.array());
+    String partialstr = new String(buf.array(), StandardCharsets.UTF_8);
     Assert.assertNotNull(partialstr);
     Assert.assertEquals("partial string mismatch", "ap", partialstr);
 
-    buf = ByteBuffer.wrap((partialstr + "a").getBytes());
+    buf = ByteBuffer.wrap((partialstr + "a").getBytes(StandardCharsets.UTF_8));
     Assert.assertTrue(buf.remaining() > 0);
     meta.put(key, buf);
   }
@@ -144,11 +145,11 @@ public final class RPCMetaTestPlugin extends RPCPlugin {
     Assert.assertNotNull(buf);
     Assert.assertNotNull(buf.array());
 
-    String partialstr = new String(buf.array());
+    String partialstr = new String(buf.array(), StandardCharsets.UTF_8);
     Assert.assertNotNull(partialstr);
     Assert.assertEquals("partial string mismatch", "apa", partialstr);
 
-    buf = ByteBuffer.wrap((partialstr + "c").getBytes());
+    buf = ByteBuffer.wrap((partialstr + "c").getBytes(StandardCharsets.UTF_8));
     Assert.assertTrue(buf.remaining() > 0);
     context.responseCallMeta().put(key, buf);
   }
@@ -165,11 +166,11 @@ public final class RPCMetaTestPlugin extends RPCPlugin {
     Assert.assertNotNull(buf);
     Assert.assertNotNull(buf.array());
 
-    String partialstr = new String(buf.array());
+    String partialstr = new String(buf.array(), StandardCharsets.UTF_8);
     Assert.assertNotNull(partialstr);
     Assert.assertEquals("partial string mismatch", "apac", partialstr);
 
-    buf = ByteBuffer.wrap((partialstr + "he").getBytes());
+    buf = ByteBuffer.wrap((partialstr + 
"he").getBytes(StandardCharsets.UTF_8));
     Assert.assertTrue(buf.remaining() > 0);
     context.responseCallMeta().put(key, buf);
 
@@ -184,7 +185,7 @@ public final class RPCMetaTestPlugin extends RPCPlugin {
     Assert.assertNotNull(keybuf);
     Assert.assertTrue("key BB had nothing remaining", keybuf.remaining() > 0);
 
-    String str = new String(keybuf.array());
+    String str = new String(keybuf.array(), StandardCharsets.UTF_8);
     Assert.assertEquals("apache", str);
   }
 
diff --git 
a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroTextOutputFormat.java
 
b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroTextOutputFormat.java
index 9a9c658..53283d3 100644
--- 
a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroTextOutputFormat.java
+++ 
b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroTextOutputFormat.java
@@ -22,6 +22,7 @@ import static org.apache.avro.mapred.AvroOutputFormat.EXT;
 
 import java.io.IOException;
 import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
 
 import org.apache.avro.Schema;
 import org.apache.avro.file.DataFileWriter;
@@ -42,15 +43,14 @@ import org.apache.hadoop.util.Progressable;
  */
 public class AvroTextOutputFormat<K, V> extends FileOutputFormat<K, V> {
 
-  private static final String UTF8 = "UTF-8";
-
   @Override
   public RecordWriter<K, V> getRecordWriter(FileSystem ignore, JobConf job, 
String name, Progressable prog)
       throws IOException {
 
     Schema schema = Schema.create(Schema.Type.BYTES);
 
-    final byte[] keyValueSeparator = 
job.get("mapreduce.output.textoutputformat.separator", "\t").getBytes(UTF8);
+    final byte[] keyValueSeparator = 
job.get("mapreduce.output.textoutputformat.separator", "\t")
+        .getBytes(StandardCharsets.UTF_8);
 
     final DataFileWriter<ByteBuffer> writer = new DataFileWriter<>(new 
ReflectDatumWriter<>());
 
@@ -96,7 +96,7 @@ public class AvroTextOutputFormat<K, V> extends 
FileOutputFormat<K, V> {
         Text to = (Text) o;
         return ByteBuffer.wrap(to.getBytes(), 0, to.getLength());
       } else {
-        return ByteBuffer.wrap(o.toString().getBytes(UTF8));
+        return ByteBuffer.wrap(o.toString().getBytes(StandardCharsets.UTF_8));
       }
     }
 
@@ -108,7 +108,7 @@ public class AvroTextOutputFormat<K, V> extends 
FileOutputFormat<K, V> {
         keyBytes = tkey.getBytes();
         keyLength = tkey.getLength();
       } else {
-        keyBytes = key.toString().getBytes(UTF8);
+        keyBytes = key.toString().getBytes(StandardCharsets.UTF_8);
         keyLength = keyBytes.length;
       }
       if (value instanceof Text) {
@@ -116,7 +116,7 @@ public class AvroTextOutputFormat<K, V> extends 
FileOutputFormat<K, V> {
         valBytes = tval.getBytes();
         valLength = tval.getLength();
       } else {
-        valBytes = value.toString().getBytes(UTF8);
+        valBytes = value.toString().getBytes(StandardCharsets.UTF_8);
         valLength = valBytes.length;
       }
       ByteBuffer buf = ByteBuffer.allocate(keyLength + sep.length + valLength);
diff --git 
a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestFsInput.java 
b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestFsInput.java
index 17f77a5..b7d83ef 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestFsInput.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestFsInput.java
@@ -27,7 +27,7 @@ import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.OutputStreamWriter;
 import java.io.PrintWriter;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 
 import org.apache.avro.mapred.FsInput;
 import org.apache.hadoop.conf.Configuration;
@@ -51,8 +51,8 @@ public class TestFsInput {
     conf.set("fs.default.name", "file:///");
     file = new File(DIR.getRoot(), "file.txt");
 
-    try (PrintWriter out = new PrintWriter(
-        new OutputStreamWriter(new FileOutputStream(file), 
Charset.forName("UTF-8")))) {
+    try (
+        PrintWriter out = new PrintWriter(new OutputStreamWriter(new 
FileOutputStream(file), StandardCharsets.UTF_8))) {
       out.print(FILE_CONTENTS);
     }
     fsInput = new FsInput(new Path(file.getPath()), conf);
@@ -94,7 +94,7 @@ public class TestFsInput {
 
   @Test
   public void testRead() throws Exception {
-    byte[] expectedBytes = FILE_CONTENTS.getBytes(Charset.forName("UTF-8"));
+    byte[] expectedBytes = FILE_CONTENTS.getBytes(StandardCharsets.UTF_8);
     byte[] actualBytes = new byte[expectedBytes.length];
     int actualByteCount = fsInput.read(actualBytes, 0, actualBytes.length);
 
@@ -105,7 +105,7 @@ public class TestFsInput {
   @Test
   public void testSeek() throws Exception {
     int seekPos = FILE_CONTENTS.length() / 2;
-    byte[] fileContentBytes = FILE_CONTENTS.getBytes(Charset.forName("UTF-8"));
+    byte[] fileContentBytes = FILE_CONTENTS.getBytes(StandardCharsets.UTF_8);
     byte expectedByte = fileContentBytes[seekPos];
     fsInput.seek(seekPos);
     byte[] readBytes = new byte[1];
diff --git 
a/lang/java/tools/src/main/java/org/apache/avro/tool/SchemaNormalizationTool.java
 
b/lang/java/tools/src/main/java/org/apache/avro/tool/SchemaNormalizationTool.java
index bcf3609..58a0cba 100644
--- 
a/lang/java/tools/src/main/java/org/apache/avro/tool/SchemaNormalizationTool.java
+++ 
b/lang/java/tools/src/main/java/org/apache/avro/tool/SchemaNormalizationTool.java
@@ -21,6 +21,7 @@ import java.io.BufferedInputStream;
 import java.io.BufferedOutputStream;
 import java.io.InputStream;
 import java.io.PrintStream;
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 
 import joptsimple.OptionParser;
@@ -61,7 +62,7 @@ public class SchemaNormalizationTool implements Tool {
 
     String canonicalForm = SchemaNormalization.toParsingForm(schema);
 
-    outStream.write(canonicalForm.getBytes());
+    outStream.write(canonicalForm.getBytes(StandardCharsets.UTF_8));
 
     Util.close(inStream);
     Util.close(outStream);
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/ToTextTool.java 
b/lang/java/tools/src/main/java/org/apache/avro/tool/ToTextTool.java
index d1c47ff..f8bdebf 100644
--- a/lang/java/tools/src/main/java/org/apache/avro/tool/ToTextTool.java
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/ToTextTool.java
@@ -22,6 +22,7 @@ import java.io.BufferedOutputStream;
 import java.io.InputStream;
 import java.io.PrintStream;
 import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 
 import joptsimple.OptionParser;
@@ -34,7 +35,7 @@ import org.apache.avro.generic.GenericDatumReader;
 /** Reads an avro data file into a plain text file. */
 public class ToTextTool implements Tool {
   private static final String TEXT_FILE_SCHEMA = "\"bytes\"";
-  private static final byte[] LINE_SEPARATOR = 
System.getProperty("line.separator").getBytes();
+  private static final byte[] LINE_SEPARATOR = 
System.getProperty("line.separator").getBytes(StandardCharsets.UTF_8);
 
   @Override
   public String getName() {
diff --git 
a/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniMetadataTool.java 
b/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniMetadataTool.java
index 24d9010..51dc8a0 100644
--- a/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniMetadataTool.java
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniMetadataTool.java
@@ -20,6 +20,7 @@ package org.apache.avro.tool;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.PrintStream;
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 import java.util.Map;
 
@@ -104,7 +105,7 @@ public class TrevniMetadataTool implements Tool {
   private void dump(MetaData<?> meta) throws IOException {
     generator.writeStartObject();
     for (Map.Entry<String, byte[]> e : meta.entrySet())
-      generator.writeStringField(e.getKey(), new String(e.getValue(), 
"ISO-8859-1"));
+      generator.writeStringField(e.getKey(), new String(e.getValue(), 
StandardCharsets.ISO_8859_1));
     generator.writeEndObject();
   }
 
diff --git 
a/lang/java/tools/src/test/java/org/apache/avro/tool/TestTextFileTools.java 
b/lang/java/tools/src/test/java/org/apache/avro/tool/TestTextFileTools.java
index ca53154..33547c1 100644
--- a/lang/java/tools/src/test/java/org/apache/avro/tool/TestTextFileTools.java
+++ b/lang/java/tools/src/test/java/org/apache/avro/tool/TestTextFileTools.java
@@ -28,6 +28,7 @@ import java.io.BufferedOutputStream;
 import java.io.BufferedInputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Random;
@@ -43,7 +44,7 @@ import org.junit.rules.TemporaryFolder;
 public class TestTextFileTools {
   private static final int COUNT = 
Integer.parseInt(System.getProperty("test.count", "10"));
 
-  private static final byte[] LINE_SEP = 
System.getProperty("line.separator").getBytes();
+  private static final byte[] LINE_SEP = 
System.getProperty("line.separator").getBytes(StandardCharsets.UTF_8);
   private static File linesFile;
   private static ByteBuffer[] lines;
   static Schema schema;
diff --git 
a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroTrevniOutputFormat.java
 
b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroTrevniOutputFormat.java
index 36874a3..bda5696 100644
--- 
a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroTrevniOutputFormat.java
+++ 
b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroTrevniOutputFormat.java
@@ -20,6 +20,7 @@ package org.apache.trevni.avro;
 
 import java.io.IOException;
 import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
 import java.util.Map;
 
 import org.apache.hadoop.io.NullWritable;
@@ -36,7 +37,6 @@ import org.apache.avro.reflect.ReflectData;
 import org.apache.avro.mapred.AvroJob;
 import org.apache.avro.mapred.AvroWrapper;
 
-import org.apache.trevni.MetaData;
 import org.apache.trevni.ColumnFileMetaData;
 
 /**
@@ -105,7 +105,7 @@ public class AvroTrevniOutputFormat<T> extends 
FileOutputFormat<AvroWrapper<T>,
     final ColumnFileMetaData meta = new ColumnFileMetaData();
     for (Map.Entry<String, String> e : job)
       if (e.getKey().startsWith(META_PREFIX))
-        meta.put(e.getKey().substring(META_PREFIX.length()), 
e.getValue().getBytes(MetaData.UTF8));
+        meta.put(e.getKey().substring(META_PREFIX.length()), 
e.getValue().getBytes(StandardCharsets.UTF_8));
     return meta;
   }
 
diff --git 
a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniRecordWriterBase.java
 
b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniRecordWriterBase.java
index f2e5039..9bbec31 100644
--- 
a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniRecordWriterBase.java
+++ 
b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniRecordWriterBase.java
@@ -20,6 +20,7 @@ package org.apache.trevni.avro.mapreduce;
 
 import java.io.IOException;
 import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
 import java.util.Map.Entry;
 
 import org.apache.avro.Schema;
@@ -31,7 +32,6 @@ import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 import org.apache.trevni.ColumnFileMetaData;
-import org.apache.trevni.MetaData;
 import org.apache.trevni.avro.AvroColumnWriter;
 
 /**
@@ -127,7 +127,8 @@ public abstract class AvroTrevniRecordWriterBase<K, V, T> 
extends RecordWriter<K
 
     for (Entry<String, String> confEntry : configuration) {
       if (confEntry.getKey().startsWith(META_PREFIX))
-        meta.put(confEntry.getKey().substring(META_PREFIX.length()), 
confEntry.getValue().getBytes(MetaData.UTF8));
+        meta.put(confEntry.getKey().substring(META_PREFIX.length()),
+            confEntry.getValue().getBytes(StandardCharsets.UTF_8));
     }
 
     return meta;
diff --git 
a/lang/java/trevni/core/src/main/java/org/apache/trevni/MetaData.java 
b/lang/java/trevni/core/src/main/java/org/apache/trevni/MetaData.java
index 123013b..a0fa6e6 100644
--- a/lang/java/trevni/core/src/main/java/org/apache/trevni/MetaData.java
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/MetaData.java
@@ -18,7 +18,6 @@
 package org.apache.trevni;
 
 import java.io.IOException;
-import java.nio.charset.Charset;
 import java.nio.charset.StandardCharsets;
 import java.util.Map;
 import java.util.LinkedHashMap;
@@ -31,8 +30,6 @@ public class MetaData<T extends MetaData> extends 
LinkedHashMap<String, byte[]>
   static final String CODEC_KEY = RESERVED_KEY_PREFIX + "codec";
   static final String CHECKSUM_KEY = RESERVED_KEY_PREFIX + "checksum";
 
-  public static final Charset UTF8 = Charset.forName("UTF-8");
-
   private MetaData<?> defaults;
 
   void setDefaults(MetaData defaults) {
@@ -68,7 +65,7 @@ public class MetaData<T extends MetaData> extends 
LinkedHashMap<String, byte[]>
       value = defaults.get(key);
     if (value == null)
       return null;
-    return new String(value, UTF8);
+    return new String(value, StandardCharsets.UTF_8);
   }
 
   /** Return the value of a metadata property as a long. */
@@ -97,11 +94,11 @@ public class MetaData<T extends MetaData> extends 
LinkedHashMap<String, byte[]>
 
   /** Set a metadata property to a String value. */
   public T set(String key, String value) {
-    return set(key, value.getBytes(UTF8));
+    return set(key, value.getBytes(StandardCharsets.UTF_8));
   }
 
   T setReserved(String key, String value) {
-    put(key, value.getBytes(UTF8));
+    put(key, value.getBytes(StandardCharsets.UTF_8));
     return (T) this;
   }
 
diff --git 
a/lang/java/trevni/core/src/main/java/org/apache/trevni/OutputBuffer.java 
b/lang/java/trevni/core/src/main/java/org/apache/trevni/OutputBuffer.java
index 87feb1e..cfbe6b1 100644
--- a/lang/java/trevni/core/src/main/java/org/apache/trevni/OutputBuffer.java
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/OutputBuffer.java
@@ -19,8 +19,8 @@ package org.apache.trevni;
 
 import java.io.IOException;
 import java.io.ByteArrayOutputStream;
-import java.nio.charset.Charset;
 import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 
 /** Used to write values. */
@@ -97,10 +97,8 @@ class OutputBuffer extends ByteArrayOutputStream {
     writeInt(length);
   }
 
-  private static final Charset UTF8 = Charset.forName("UTF-8");
-
   public void writeString(String string) throws IOException {
-    byte[] bytes = string.getBytes(UTF8);
+    byte[] bytes = string.getBytes(StandardCharsets.UTF_8);
     writeInt(bytes.length);
     write(bytes, 0, bytes.length);
   }

Reply via email to