This is an automated email from the ASF dual-hosted git repository.

fokko pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/parquet-mr.git


The following commit(s) were added to refs/heads/master by this push:
     new 697875381 PARQUET-2397: Make use of `isEmpty` (#1220)
697875381 is described below

commit 697875381fb483fbbb546fed11905228e8486b4c
Author: Fokko Driesprong <[email protected]>
AuthorDate: Wed Nov 29 05:04:08 2023 -0800

    PARQUET-2397: Make use of `isEmpty` (#1220)
---
 .../src/main/java/org/apache/parquet/avro/AvroWriteSupport.java       | 4 ++--
 .../java/org/apache/parquet/cli/commands/ColumnMaskingCommand.java    | 2 +-
 .../main/java/org/apache/parquet/cli/commands/ColumnSizeCommand.java  | 2 +-
 .../java/org/apache/parquet/cli/commands/PruneColumnsCommand.java     | 2 +-
 .../parquet/internal/filter2/columnindex/ColumnIndexFilter.java       | 2 +-
 .../java/org/apache/parquet/io/ExpectationValidatingConverter.java    | 2 +-
 .../main/java/org/apache/parquet/crypto/FileEncryptionProperties.java | 2 +-
 .../org/apache/parquet/format/converter/ParquetMetadataConverter.java | 4 ++--
 .../main/java/org/apache/parquet/hadoop/rewrite/ParquetRewriter.java  | 2 +-
 .../crypto/propertiesfactory/SchemaCryptoPropertiesFactory.java       | 2 +-
 .../java/org/apache/parquet/hadoop/example/TestInputOutputFormat.java | 3 ++-
 .../java/org/apache/parquet/hadoop/rewrite/ParquetRewriterTest.java   | 4 ++--
 .../test/java/org/apache/parquet/hadoop/util/ColumnEncryptorTest.java | 2 +-
 .../src/main/java/org/apache/parquet/pig/TupleWriteSupport.java       | 2 +-
 .../src/main/java/org/apache/parquet/pig/summary/MapSummaryData.java  | 2 +-
 .../src/test/java/org/apache/parquet/proto/utils/WriteUsingMR.java    | 2 +-
 .../main/java/org/apache/parquet/thrift/ThriftRecordConverter.java    | 2 +-
 .../thrift/projection/deprecated/DeprecatedFieldProjectionFilter.java | 3 ++-
 18 files changed, 23 insertions(+), 21 deletions(-)

diff --git 
a/parquet-avro/src/main/java/org/apache/parquet/avro/AvroWriteSupport.java 
b/parquet-avro/src/main/java/org/apache/parquet/avro/AvroWriteSupport.java
index 692e3fac0..bcd124be6 100644
--- a/parquet-avro/src/main/java/org/apache/parquet/avro/AvroWriteSupport.java
+++ b/parquet-avro/src/main/java/org/apache/parquet/avro/AvroWriteSupport.java
@@ -225,7 +225,7 @@ public class AvroWriteSupport<T> extends WriteSupport<T> {
     Type valueType = innerGroup.getType(1);
 
     recordConsumer.startGroup(); // group wrapper (original type MAP)
-    if (map.size() > 0) {
+    if (!map.isEmpty()) {
       recordConsumer.startField(MAP_REPEATED_NAME, 0);
 
       for (Map.Entry<CharSequence, V> entry : map.entrySet()) {
@@ -659,7 +659,7 @@ public class AvroWriteSupport<T> extends WriteSupport<T> {
   private class ThreeLevelListWriter extends ListWriter {
     @Override
     protected void writeCollection(GroupType type, Schema schema, 
Collection<?> collection) {
-      if (collection.size() > 0) {
+      if (!collection.isEmpty()) {
         recordConsumer.startField(LIST_REPEATED_NAME, 0);
         GroupType repeatedType = type.getType(0).asGroupType();
         Type elementType = repeatedType.getType(0);
diff --git 
a/parquet-cli/src/main/java/org/apache/parquet/cli/commands/ColumnMaskingCommand.java
 
b/parquet-cli/src/main/java/org/apache/parquet/cli/commands/ColumnMaskingCommand.java
index c984ec7a7..f6c189b84 100644
--- 
a/parquet-cli/src/main/java/org/apache/parquet/cli/commands/ColumnMaskingCommand.java
+++ 
b/parquet-cli/src/main/java/org/apache/parquet/cli/commands/ColumnMaskingCommand.java
@@ -85,7 +85,7 @@ public class ColumnMaskingCommand extends BaseCommand {
     Preconditions.checkArgument(input != null && output != null,
       "Both input and output parquet file paths are required.");
 
-    Preconditions.checkArgument(cols != null && cols.size() > 0,
+    Preconditions.checkArgument(cols != null && !cols.isEmpty(),
       "columns cannot be null or empty");
 
     MaskMode maskMode = MaskMode.fromString(mode);
diff --git 
a/parquet-cli/src/main/java/org/apache/parquet/cli/commands/ColumnSizeCommand.java
 
b/parquet-cli/src/main/java/org/apache/parquet/cli/commands/ColumnSizeCommand.java
index 2a59d0bfd..2da4b8542 100644
--- 
a/parquet-cli/src/main/java/org/apache/parquet/cli/commands/ColumnSizeCommand.java
+++ 
b/parquet-cli/src/main/java/org/apache/parquet/cli/commands/ColumnSizeCommand.java
@@ -74,7 +74,7 @@ public class ColumnSizeCommand extends BaseCommand {
     Map<String, Float> columnRatio = getColumnRatio(columnSizes);
 
     // If user defined columns, only print out size for those columns
-    if (columns != null && columns.size() > 0) {
+    if (columns != null && !columns.isEmpty()) {
       for (String inputColumn : columns) {
         long size = 0;
         float ratio = 0;
diff --git 
a/parquet-cli/src/main/java/org/apache/parquet/cli/commands/PruneColumnsCommand.java
 
b/parquet-cli/src/main/java/org/apache/parquet/cli/commands/PruneColumnsCommand.java
index cb4e0e792..3f8a1f1f6 100644
--- 
a/parquet-cli/src/main/java/org/apache/parquet/cli/commands/PruneColumnsCommand.java
+++ 
b/parquet-cli/src/main/java/org/apache/parquet/cli/commands/PruneColumnsCommand.java
@@ -62,7 +62,7 @@ public class PruneColumnsCommand extends BaseCommand {
     Preconditions.checkArgument(input != null && output != null,
       "Both input and output parquet file paths are required.");
 
-    Preconditions.checkArgument(cols != null && cols.size() > 0,
+    Preconditions.checkArgument(cols != null && !cols.isEmpty(),
       "columns cannot be null or empty");
 
     Path inPath = new Path(input);
diff --git 
a/parquet-column/src/main/java/org/apache/parquet/internal/filter2/columnindex/ColumnIndexFilter.java
 
b/parquet-column/src/main/java/org/apache/parquet/internal/filter2/columnindex/ColumnIndexFilter.java
index 935c25942..e4486823b 100644
--- 
a/parquet-column/src/main/java/org/apache/parquet/internal/filter2/columnindex/ColumnIndexFilter.java
+++ 
b/parquet-column/src/main/java/org/apache/parquet/internal/filter2/columnindex/ColumnIndexFilter.java
@@ -192,7 +192,7 @@ public class ColumnIndexFilter implements 
Visitor<RowRanges> {
   @Override
   public RowRanges visit(And and) {
     RowRanges leftResult = and.getLeft().accept(this);
-    if (leftResult.getRanges().size() == 0) {
+    if (leftResult.getRanges().isEmpty()) {
       return leftResult;
     }
 
diff --git 
a/parquet-column/src/test/java/org/apache/parquet/io/ExpectationValidatingConverter.java
 
b/parquet-column/src/test/java/org/apache/parquet/io/ExpectationValidatingConverter.java
index 3e4731586..c756bed73 100644
--- 
a/parquet-column/src/test/java/org/apache/parquet/io/ExpectationValidatingConverter.java
+++ 
b/parquet-column/src/test/java/org/apache/parquet/io/ExpectationValidatingConverter.java
@@ -152,7 +152,7 @@ public class ExpectationValidatingConverter extends 
RecordMaterializer<Void> {
 
   private String path(List<GroupType> path, Type type) {
     StringBuilder pathString = new StringBuilder();
-    if (path.size() > 0) {
+    if (!path.isEmpty()) {
       for (int i = 1; i < path.size(); i++) {
         pathString.append(path.get(i).getName()).append('.');
       }
diff --git 
a/parquet-hadoop/src/main/java/org/apache/parquet/crypto/FileEncryptionProperties.java
 
b/parquet-hadoop/src/main/java/org/apache/parquet/crypto/FileEncryptionProperties.java
index 208d53e7a..13e7a8471 100755
--- 
a/parquet-hadoop/src/main/java/org/apache/parquet/crypto/FileEncryptionProperties.java
+++ 
b/parquet-hadoop/src/main/java/org/apache/parquet/crypto/FileEncryptionProperties.java
@@ -56,7 +56,7 @@ public class FileEncryptionProperties {
       throw new IllegalArgumentException("Wrong footer key length " + 
footerKey.length);
     }
     if (null != columnPropertyMap) {
-      if (columnPropertyMap.size() == 0) {
+      if (columnPropertyMap.isEmpty()) {
         throw new IllegalArgumentException("No encrypted columns");
       }
     } else {
diff --git 
a/parquet-hadoop/src/main/java/org/apache/parquet/format/converter/ParquetMetadataConverter.java
 
b/parquet-hadoop/src/main/java/org/apache/parquet/format/converter/ParquetMetadataConverter.java
index 09b21538e..b976ad9ad 100644
--- 
a/parquet-hadoop/src/main/java/org/apache/parquet/format/converter/ParquetMetadataConverter.java
+++ 
b/parquet-hadoop/src/main/java/org/apache/parquet/format/converter/ParquetMetadataConverter.java
@@ -1247,7 +1247,7 @@ public class ParquetMetadataConverter {
     long preStartIndex = 0;
     long preCompressedSize = 0;
     boolean firstColumnWithMetadata = true;
-    if (rowGroups != null && rowGroups.size() > 0) {
+    if (rowGroups != null && !rowGroups.isEmpty()) {
       firstColumnWithMetadata = 
rowGroups.get(0).getColumns().get(0).isSetMeta_data();
     }
     for (RowGroup rowGroup : rowGroups) {
@@ -1321,7 +1321,7 @@ public class ParquetMetadataConverter {
     long preStartIndex = 0;
     long preCompressedSize = 0;
     boolean firstColumnWithMetadata = true;
-    if (rowGroups != null && rowGroups.size() > 0) {
+    if (rowGroups != null && !rowGroups.isEmpty()) {
       firstColumnWithMetadata = 
rowGroups.get(0).getColumns().get(0).isSetMeta_data();
     }
     for (RowGroup rowGroup : rowGroups) {
diff --git 
a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/rewrite/ParquetRewriter.java
 
b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/rewrite/ParquetRewriter.java
index 0de0009f5..8f774b9fc 100644
--- 
a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/rewrite/ParquetRewriter.java
+++ 
b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/rewrite/ParquetRewriter.java
@@ -648,7 +648,7 @@ public class ParquetRewriter implements Closeable {
       } else {
         List<Type> childFields = ((GroupType) field).getFields();
         List<Type> prunedFields = pruneColumnsInFields(childFields, 
currentPath, prunePaths);
-        if (prunedFields.size() > 0) {
+        if (!prunedFields.isEmpty()) {
           prunedField = ((GroupType) field).withNewFields(prunedFields);
         }
       }
diff --git 
a/parquet-hadoop/src/test/java/org/apache/parquet/crypto/propertiesfactory/SchemaCryptoPropertiesFactory.java
 
b/parquet-hadoop/src/test/java/org/apache/parquet/crypto/propertiesfactory/SchemaCryptoPropertiesFactory.java
index 446fc6d98..7d65acadd 100644
--- 
a/parquet-hadoop/src/test/java/org/apache/parquet/crypto/propertiesfactory/SchemaCryptoPropertiesFactory.java
+++ 
b/parquet-hadoop/src/test/java/org/apache/parquet/crypto/propertiesfactory/SchemaCryptoPropertiesFactory.java
@@ -70,7 +70,7 @@ public class SchemaCryptoPropertiesFactory implements 
EncryptionPropertiesFactor
       getColumnEncryptionProperties(path, columnPropertyMap, conf);
     }
 
-    if (columnPropertyMap.size() == 0) {
+    if (columnPropertyMap.isEmpty()) {
       log.debug("No column is encrypted. Returning null so that Parquet can 
skip. Empty properties will cause Parquet exception");
       return null;
     }
diff --git 
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/example/TestInputOutputFormat.java
 
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/example/TestInputOutputFormat.java
index 188a79643..4f74a1d4c 100644
--- 
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/example/TestInputOutputFormat.java
+++ 
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/example/TestInputOutputFormat.java
@@ -20,6 +20,7 @@ package org.apache.parquet.hadoop.example;
 
 import static java.lang.Thread.sleep;
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertTrue;
 
@@ -134,7 +135,7 @@ public class TestInputOutputFormat {
     @Override
     public org.apache.parquet.hadoop.api.ReadSupport.ReadContext 
init(InitContext context) {
       Set<String> counts = context.getKeyValueMetadata().get("my.count");
-      assertTrue("counts: " + counts, counts.size() > 0);
+      assertFalse("counts: " + counts, counts.isEmpty());
       return super.init(context);
     }
   }
diff --git 
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/rewrite/ParquetRewriterTest.java
 
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/rewrite/ParquetRewriterTest.java
index e66486531..da16ec190 100644
--- 
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/rewrite/ParquetRewriterTest.java
+++ 
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/rewrite/ParquetRewriterTest.java
@@ -286,7 +286,7 @@ public class ParquetRewriterTest {
 
     // Verify column encryption
     ParquetMetadata metaData = getFileMetaData(outputFile, 
fileDecryptionProperties);
-    assertTrue(metaData.getBlocks().size() > 0);
+    assertFalse(metaData.getBlocks().isEmpty());
     List<ColumnChunkMetaData> columns = 
metaData.getBlocks().get(0).getColumns();
     Set<String> set = new HashSet<>(Arrays.asList(encryptColumns));
     for (ColumnChunkMetaData column : columns) {
@@ -429,7 +429,7 @@ public class ParquetRewriterTest {
 
     // Verify the column is encrypted
     ParquetMetadata metaData = getFileMetaData(outputFile, 
fileDecryptionProperties);
-    assertTrue(metaData.getBlocks().size() > 0);
+    assertFalse(metaData.getBlocks().isEmpty());
     Set<String> encryptedColumns = new 
HashSet<>(Arrays.asList(encryptColumns));
     for (BlockMetaData blockMetaData : metaData.getBlocks()) {
       List<ColumnChunkMetaData> columns = blockMetaData.getColumns();
diff --git 
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/util/ColumnEncryptorTest.java
 
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/util/ColumnEncryptorTest.java
index 978a1d98b..ef3a7902f 100644
--- 
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/util/ColumnEncryptorTest.java
+++ 
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/util/ColumnEncryptorTest.java
@@ -125,7 +125,7 @@ public class ColumnEncryptorTest {
       EncDecProperties.getFileEncryptionProperties(encryptColumns, 
ParquetCipher.AES_GCM_CTR_V1, false));
 
     ParquetMetadata metaData = 
getParquetMetadata(EncDecProperties.getFileDecryptionProperties());
-    assertTrue(metaData.getBlocks().size() > 0);
+    assertFalse(metaData.getBlocks().isEmpty());
     List<ColumnChunkMetaData> columns = 
metaData.getBlocks().get(0).getColumns();
     Set<String> set = new HashSet<>(Arrays.asList(encryptColumns));
     for (ColumnChunkMetaData column : columns) {
diff --git 
a/parquet-pig/src/main/java/org/apache/parquet/pig/TupleWriteSupport.java 
b/parquet-pig/src/main/java/org/apache/parquet/pig/TupleWriteSupport.java
index fd1bb39cd..926cea944 100644
--- a/parquet-pig/src/main/java/org/apache/parquet/pig/TupleWriteSupport.java
+++ b/parquet-pig/src/main/java/org/apache/parquet/pig/TupleWriteSupport.java
@@ -147,7 +147,7 @@ public class TupleWriteSupport extends WriteSupport<Tuple> {
         @SuppressWarnings("unchecked") // I know
         Map<String, Object> map = (Map<String, Object>)t.get(i);
         recordConsumer.startGroup();
-        if (map.size() > 0) {
+        if (!map.isEmpty()) {
           recordConsumer.startField(mapType.getName(), 0);
           Set<Entry<String, Object>> entrySet = map.entrySet();
           for (Entry<String, Object> entry : entrySet) {
diff --git 
a/parquet-pig/src/main/java/org/apache/parquet/pig/summary/MapSummaryData.java 
b/parquet-pig/src/main/java/org/apache/parquet/pig/summary/MapSummaryData.java
index a8775c888..1d8b37780 100644
--- 
a/parquet-pig/src/main/java/org/apache/parquet/pig/summary/MapSummaryData.java
+++ 
b/parquet-pig/src/main/java/org/apache/parquet/pig/summary/MapSummaryData.java
@@ -37,7 +37,7 @@ public class MapSummaryData extends SummaryData {
     super.add(m);
     size.add(m.size());
     FieldSchema field = getField(schema, 0);
-    if (m.size() > 0 && key == null) {
+    if (!m.isEmpty() && key == null) {
       key = new FieldSummaryData();
       key.setName(getName(field));
       value = new FieldSummaryData();
diff --git 
a/parquet-protobuf/src/test/java/org/apache/parquet/proto/utils/WriteUsingMR.java
 
b/parquet-protobuf/src/test/java/org/apache/parquet/proto/utils/WriteUsingMR.java
index 90bb3fd75..0380d08cf 100644
--- 
a/parquet-protobuf/src/test/java/org/apache/parquet/proto/utils/WriteUsingMR.java
+++ 
b/parquet-protobuf/src/test/java/org/apache/parquet/proto/utils/WriteUsingMR.java
@@ -64,7 +64,7 @@ public class WriteUsingMR {
   public static class WritingMapper extends Mapper<LongWritable, Text, Void, 
Message> {
 
     public void run(Context context) throws IOException, InterruptedException {
-      if (inputMessages == null || inputMessages.size() == 0) {
+      if (inputMessages == null || inputMessages.isEmpty()) {
         throw new RuntimeException("No mock data given");
       } else {
         for (Message msg : inputMessages) {
diff --git 
a/parquet-thrift/src/main/java/org/apache/parquet/thrift/ThriftRecordConverter.java
 
b/parquet-thrift/src/main/java/org/apache/parquet/thrift/ThriftRecordConverter.java
index d0649212f..238f32bdd 100644
--- 
a/parquet-thrift/src/main/java/org/apache/parquet/thrift/ThriftRecordConverter.java
+++ 
b/parquet-thrift/src/main/java/org/apache/parquet/thrift/ThriftRecordConverter.java
@@ -733,7 +733,7 @@ public class ThriftRecordConverter<T> extends 
RecordMaterializer<T> {
 
     @Override
     public void end() {
-      if (elementEvents.size() > 0) {
+      if (!elementEvents.isEmpty()) {
         listEvents.addAll(elementEvents);
       } else {
         nullElementCount += 1;
diff --git 
a/parquet-thrift/src/main/java/org/apache/parquet/thrift/projection/deprecated/DeprecatedFieldProjectionFilter.java
 
b/parquet-thrift/src/main/java/org/apache/parquet/thrift/projection/deprecated/DeprecatedFieldProjectionFilter.java
index ab32ddd07..08debf1b7 100644
--- 
a/parquet-thrift/src/main/java/org/apache/parquet/thrift/projection/deprecated/DeprecatedFieldProjectionFilter.java
+++ 
b/parquet-thrift/src/main/java/org/apache/parquet/thrift/projection/deprecated/DeprecatedFieldProjectionFilter.java
@@ -74,8 +74,9 @@ public class DeprecatedFieldProjectionFilter implements 
FieldProjectionFilter {
 
   @Override
   public boolean keep(FieldsPath path) {
-    if (filterPatterns.size() == 0)
+    if (filterPatterns.isEmpty()) {
       return true;
+    }
 
     for (PathGlobPatternStatus pattern : filterPatterns) {
       if (pattern.matches(path.toDelimitedString("/")))

Reply via email to