This is an automated email from the ASF dual-hosted git repository.
yhu pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/beam.git
The following commit(s) were added to refs/heads/master by this push:
new 676c998dec7 Enable StringCharset Error Prone Check (#37767)
676c998dec7 is described below
commit 676c998dec78e878d54ad21cde46f91cc9a598b7
Author: Radosław Stankiewicz <[email protected]>
AuthorDate: Fri Mar 6 03:43:24 2026 +0100
Enable StringCharset Error Prone Check (#37767)
---
.../org/apache/beam/gradle/BeamModulePlugin.groovy | 1 -
.../examples/complete/game/injector/Injector.java | 6 ++++--
.../beam/sdk/schemas/JsonSchemaConversionTest.java | 15 ++++++++-------
.../org/apache/beam/sdk/io/kafka/KafkaDlqTest.java | 14 ++++++++------
.../beam/sdk/io/kafka/KafkaIOExternalTest.java | 12 ++++++++----
.../KafkaWriteSchemaTransformProviderTest.java | 22 +++++++++++-----------
6 files changed, 39 insertions(+), 31 deletions(-)
diff --git
a/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy
b/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy
index 06c29c781bc..fe0f1ab8bc9 100644
--- a/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy
+++ b/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy
@@ -1578,7 +1578,6 @@ class BeamModulePlugin implements Plugin<Project> {
"NarrowCalculation",
"NullableTypeParameter",
"NullableWildcard",
- "StringCharset",
"SuperCallToObjectMethod",
"UnnecessaryLongToIntConversion",
"UnusedVariable",
diff --git
a/examples/java/src/main/java/org/apache/beam/examples/complete/game/injector/Injector.java
b/examples/java/src/main/java/org/apache/beam/examples/complete/game/injector/Injector.java
index c6eb55d497b..c0d5d45e663 100644
---
a/examples/java/src/main/java/org/apache/beam/examples/complete/game/injector/Injector.java
+++
b/examples/java/src/main/java/org/apache/beam/examples/complete/game/injector/Injector.java
@@ -17,6 +17,8 @@
*/
package org.apache.beam.examples.complete.game.injector;
+import static java.nio.charset.StandardCharsets.UTF_8;
+
import com.google.api.services.pubsub.Pubsub;
import com.google.api.services.pubsub.model.PublishRequest;
import com.google.api.services.pubsub.model.PubsubMessage;
@@ -327,7 +329,7 @@ class Injector {
for (int i = 0; i < Math.max(1, numMessages); i++) {
Long currTime = System.currentTimeMillis();
String message = generateEvent(currTime, delayInMillis);
- PubsubMessage pubsubMessage = new
PubsubMessage().encodeData(message.getBytes("UTF-8"));
+ PubsubMessage pubsubMessage = new
PubsubMessage().encodeData(message.getBytes(UTF_8));
pubsubMessage.setAttributes(
ImmutableMap.of(
GameConstants.TIMESTAMP_ATTRIBUTE,
@@ -350,7 +352,7 @@ class Injector {
PrintWriter out =
new PrintWriter(
new OutputStreamWriter(
- new BufferedOutputStream(new FileOutputStream(fileName,
true)), "UTF-8"));
+ new BufferedOutputStream(new FileOutputStream(fileName,
true)), UTF_8));
try {
for (int i = 0; i < Math.max(1, numMessages); i++) {
diff --git
a/sdks/java/core/src/test/java/org/apache/beam/sdk/schemas/JsonSchemaConversionTest.java
b/sdks/java/core/src/test/java/org/apache/beam/sdk/schemas/JsonSchemaConversionTest.java
index e21c8930df1..e53a21634ca 100644
---
a/sdks/java/core/src/test/java/org/apache/beam/sdk/schemas/JsonSchemaConversionTest.java
+++
b/sdks/java/core/src/test/java/org/apache/beam/sdk/schemas/JsonSchemaConversionTest.java
@@ -17,6 +17,7 @@
*/
package org.apache.beam.sdk.schemas;
+import static java.nio.charset.StandardCharsets.UTF_8;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
@@ -189,7 +190,7 @@ public class JsonSchemaConversionTest {
public void testBasicJsonSchemaToBeamSchema() throws IOException {
try (InputStream inputStream =
getClass().getResourceAsStream("/json-schema/basic_json_schema.json"))
{
- String stringJsonSchema = new
String(ByteStreams.toByteArray(inputStream), "UTF-8");
+ String stringJsonSchema = new
String(ByteStreams.toByteArray(inputStream), UTF_8);
Schema parsedSchema =
JsonUtils.beamSchemaFromJsonSchema(stringJsonSchema);
assertThat(
@@ -209,7 +210,7 @@ public class JsonSchemaConversionTest {
public void testNestedStructsJsonSchemaToBeamSchema() throws IOException {
try (InputStream inputStream =
getClass().getResourceAsStream("/json-schema/nested_arrays_objects_json_schema.json"))
{
- String stringJsonSchema = new
String(ByteStreams.toByteArray(inputStream), "UTF-8");
+ String stringJsonSchema = new
String(ByteStreams.toByteArray(inputStream), UTF_8);
Schema parsedSchema =
JsonUtils.beamSchemaFromJsonSchema(stringJsonSchema);
assertThat(parsedSchema.getFieldNames(), containsInAnyOrder("fruits",
"vegetables"));
@@ -230,7 +231,7 @@ public class JsonSchemaConversionTest {
public void testArrayNestedArrayObjectJsonSchemaToBeamSchema() throws
IOException {
try (InputStream inputStream =
getClass().getResourceAsStream("/json-schema/array_nested_array_json_schema.json"))
{
- String stringJsonSchema = new
String(ByteStreams.toByteArray(inputStream), "UTF-8");
+ String stringJsonSchema = new
String(ByteStreams.toByteArray(inputStream), UTF_8);
Schema parsedSchema =
JsonUtils.beamSchemaFromJsonSchema(stringJsonSchema);
assertThat(parsedSchema.getFieldNames(),
containsInAnyOrder("complexMatrix"));
@@ -252,7 +253,7 @@ public class JsonSchemaConversionTest {
try (InputStream inputStream =
getClass()
.getResourceAsStream("/json-schema/object_nested_object_and_array_json_schema.json"))
{
- String stringJsonSchema = new
String(ByteStreams.toByteArray(inputStream), "UTF-8");
+ String stringJsonSchema = new
String(ByteStreams.toByteArray(inputStream), UTF_8);
Schema parsedSchema =
JsonUtils.beamSchemaFromJsonSchema(stringJsonSchema);
assertThat(parsedSchema.getFieldNames(),
containsInAnyOrder("classroom"));
@@ -285,7 +286,7 @@ public class JsonSchemaConversionTest {
public void testArrayWithNestedRefsBeamSchema() throws IOException {
try (InputStream inputStream =
getClass().getResourceAsStream("/json-schema/ref_with_ref_json_schema.json")) {
- String stringJsonSchema = new
String(ByteStreams.toByteArray(inputStream), "UTF-8");
+ String stringJsonSchema = new
String(ByteStreams.toByteArray(inputStream), UTF_8);
Schema parsedSchema =
JsonUtils.beamSchemaFromJsonSchema(stringJsonSchema);
assertEquals("vegetables",
Iterables.getOnlyElement(parsedSchema.getFieldNames()));
@@ -327,7 +328,7 @@ public class JsonSchemaConversionTest {
public void testUnsupportedTupleArrays() throws IOException {
try (InputStream inputStream =
getClass().getResourceAsStream("/json-schema/unsupported_tuple_arrays.json")) {
- String stringJsonSchema = new
String(ByteStreams.toByteArray(inputStream), "UTF-8");
+ String stringJsonSchema = new
String(ByteStreams.toByteArray(inputStream), UTF_8);
IllegalArgumentException thrownException =
assertThrows(
@@ -347,7 +348,7 @@ public class JsonSchemaConversionTest {
public void testUnsupportedNestedTupleArrays() throws IOException {
try (InputStream inputStream =
getClass().getResourceAsStream("/json-schema/unsupported_nested_tuple_array.json"))
{
- String stringJsonSchema = new
String(ByteStreams.toByteArray(inputStream), "UTF-8");
+ String stringJsonSchema = new
String(ByteStreams.toByteArray(inputStream), UTF_8);
IllegalArgumentException thrownException =
assertThrows(
diff --git
a/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaDlqTest.java
b/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaDlqTest.java
index e65d9591a0b..c702e44797a 100644
---
a/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaDlqTest.java
+++
b/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaDlqTest.java
@@ -17,6 +17,8 @@
*/
package org.apache.beam.sdk.io.kafka;
+import static java.nio.charset.StandardCharsets.UTF_8;
+
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
@@ -69,9 +71,9 @@ public class KafkaDlqTest {
try {
messages =
Arrays.asList(
- "{\"name\":\"a\"}".getBytes("UTF8"),
- "{\"name\":\"b\"}".getBytes("UTF8"),
- "{\"name\":\"c\"}".getBytes("UTF8"));
+ "{\"name\":\"a\"}".getBytes(UTF_8),
+ "{\"name\":\"b\"}".getBytes(UTF_8),
+ "{\"name\":\"c\"}".getBytes(UTF_8));
} catch (Exception e) {
}
PCollection<byte[]> input = p.apply(Create.of(messages));
@@ -93,9 +95,9 @@ public class KafkaDlqTest {
try {
messagesWithError =
Arrays.asList(
- "{\"error\":\"a\"}".getBytes("UTF8"),
- "{\"error\":\"b\"}".getBytes("UTF8"),
- "{\"error\":\"c\"}".getBytes("UTF8"));
+ "{\"error\":\"a\"}".getBytes(UTF_8),
+ "{\"error\":\"b\"}".getBytes(UTF_8),
+ "{\"error\":\"c\"}".getBytes(UTF_8));
} catch (Exception e) {
}
PCollection<byte[]> input = p.apply(Create.of(messagesWithError));
diff --git
a/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOExternalTest.java
b/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOExternalTest.java
index 1973f95ddc2..42b81fc2d59 100644
---
a/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOExternalTest.java
+++
b/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOExternalTest.java
@@ -185,11 +185,13 @@ public class KafkaIOExternalTest {
assertEquals(12345, byteArrayKafkaRecord.timestamp);
assertEquals(KafkaTimestampType.LOG_APPEND_TIME.id,
byteArrayKafkaRecord.timestampTypeId);
assertEquals(KafkaTimestampType.LOG_APPEND_TIME.name,
byteArrayKafkaRecord.timestampTypeName);
- assertEquals("dummyKey", new String(byteArrayKafkaRecord.key, "UTF-8"));
- assertEquals("dummyValue", new String(byteArrayKafkaRecord.value,
"UTF-8"));
+ assertEquals("dummyKey", new String(byteArrayKafkaRecord.key,
StandardCharsets.UTF_8));
+ assertEquals("dummyValue", new String(byteArrayKafkaRecord.value,
StandardCharsets.UTF_8));
assertEquals(1, byteArrayKafkaRecord.headers.size());
assertEquals("dummyHeaderKey", byteArrayKafkaRecord.headers.get(0).key);
- assertEquals("dummyHeaderVal", new
String(byteArrayKafkaRecord.headers.get(0).value, "UTF-8"));
+ assertEquals(
+ "dummyHeaderVal",
+ new String(byteArrayKafkaRecord.headers.get(0).value,
StandardCharsets.UTF_8));
}
@Test
@@ -212,7 +214,9 @@ public class KafkaIOExternalTest {
assertNull(byteArrayKafkaRecord.value);
assertEquals(1, byteArrayKafkaRecord.headers.size());
assertEquals("dummyHeaderKey", byteArrayKafkaRecord.headers.get(0).key);
- assertEquals("dummyHeaderVal", new
String(byteArrayKafkaRecord.headers.get(0).value, "UTF-8"));
+ assertEquals(
+ "dummyHeaderVal",
+ new String(byteArrayKafkaRecord.headers.get(0).value,
StandardCharsets.UTF_8));
}
@Test
diff --git
a/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaWriteSchemaTransformProviderTest.java
b/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaWriteSchemaTransformProviderTest.java
index 98cdb0636c2..fed783f70a0 100644
---
a/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaWriteSchemaTransformProviderTest.java
+++
b/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaWriteSchemaTransformProviderTest.java
@@ -17,10 +17,10 @@
*/
package org.apache.beam.sdk.io.kafka;
+import static java.nio.charset.StandardCharsets.UTF_8;
import static
org.apache.beam.sdk.io.kafka.KafkaWriteSchemaTransformProvider.getRowToRawBytesFunction;
import static org.junit.Assert.assertEquals;
-import java.io.UnsupportedEncodingException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
@@ -114,15 +114,15 @@ public class KafkaWriteSchemaTransformProviderTest {
RAW_ROWS =
Arrays.asList(
Row.withSchema(BEAM_RAW_SCHEMA)
- .withFieldValue("payload", "a".getBytes("UTF8"))
+ .withFieldValue("payload", "a".getBytes(UTF_8))
.build(),
Row.withSchema(BEAM_RAW_SCHEMA)
- .withFieldValue("payload", "b".getBytes("UTF8"))
+ .withFieldValue("payload", "b".getBytes(UTF_8))
.build(),
Row.withSchema(BEAM_RAW_SCHEMA)
- .withFieldValue("payload", "c".getBytes("UTF8"))
+ .withFieldValue("payload", "c".getBytes(UTF_8))
.build());
- } catch (UnsupportedEncodingException e) {
+ } catch (Exception e) {
throw new RuntimeException(e);
}
}
@@ -146,9 +146,9 @@ public class KafkaWriteSchemaTransformProviderTest {
public void testKafkaErrorFnSuccess() throws Exception {
List<KV<byte[], byte[]>> msg =
Arrays.asList(
- KV.of(new byte[1], "{\"name\":\"a\"}".getBytes("UTF8")),
- KV.of(new byte[1], "{\"name\":\"b\"}".getBytes("UTF8")),
- KV.of(new byte[1], "{\"name\":\"c\"}".getBytes("UTF8")));
+ KV.of(new byte[1], "{\"name\":\"a\"}".getBytes(UTF_8)),
+ KV.of(new byte[1], "{\"name\":\"b\"}".getBytes(UTF_8)),
+ KV.of(new byte[1], "{\"name\":\"c\"}".getBytes(UTF_8)));
PCollection<Row> input = p.apply(Create.of(ROWS));
Schema errorSchema = ErrorHandling.errorSchema(BEAMSCHEMA);
@@ -168,9 +168,9 @@ public class KafkaWriteSchemaTransformProviderTest {
public void testKafkaErrorFnRawSuccess() throws Exception {
List<KV<byte[], byte[]>> msg =
Arrays.asList(
- KV.of(new byte[1], "a".getBytes("UTF8")),
- KV.of(new byte[1], "b".getBytes("UTF8")),
- KV.of(new byte[1], "c".getBytes("UTF8")));
+ KV.of(new byte[1], "a".getBytes(UTF_8)),
+ KV.of(new byte[1], "b".getBytes(UTF_8)),
+ KV.of(new byte[1], "c".getBytes(UTF_8)));
PCollection<Row> input = p.apply(Create.of(RAW_ROWS));
Schema errorSchema = ErrorHandling.errorSchema(BEAM_RAW_SCHEMA);