This is an automated email from the ASF dual-hosted git repository.
gangwu pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/parquet-mr.git
The following commit(s) were added to refs/heads/master by this push:
new ed308ff61 PARQUET-2419: Reduce noisy logging when running test suite
(#1253)
ed308ff61 is described below
commit ed308ff61db00224fc2738cab1354bebe274b036
Author: Atour <[email protected]>
AuthorDate: Fri Jan 26 16:27:45 2024 +0100
PARQUET-2419: Reduce noisy logging when running test suite (#1253)
---
.github/workflows/ci-hadoop2.yml | 2 +-
.github/workflows/ci-hadoop3.yml | 2 +-
.github/workflows/vector-plugins.yml | 2 +-
.../org/apache/parquet/column/mem/TestMemPageStore.java | 8 ++++++--
.../parquet/io/ExpectationValidatingRecordConsumer.java | 1 -
.../java/org/apache/parquet/encodings/FileEncodingsIT.java | 9 +++++++--
.../apache/parquet/hadoop/DeprecatedInputFormatTest.java | 9 +++++++--
.../org/apache/parquet/hadoop/TestDirectCodecFactory.java | 6 +++++-
.../java/org/apache/parquet/statistics/TestStatistics.java | 7 ++++++-
.../test/java/org/apache/parquet/pig/TestParquetLoader.java | 7 ++++++-
.../java/org/apache/parquet/pig/summary/TestSummary.java | 13 ++++++++-----
.../apache/parquet/hadoop/thrift/TestInputOutputFormat.java | 5 +++--
.../org/apache/parquet/thrift/TestParquetWriteProtocol.java | 2 +-
pom.xml | 13 ++++++-------
14 files changed, 58 insertions(+), 28 deletions(-)
diff --git a/.github/workflows/ci-hadoop2.yml b/.github/workflows/ci-hadoop2.yml
index d2ce4be2d..ebc905f8a 100644
--- a/.github/workflows/ci-hadoop2.yml
+++ b/.github/workflows/ci-hadoop2.yml
@@ -53,4 +53,4 @@ jobs:
run: |
EXTRA_JAVA_TEST_ARGS=$(mvn help:evaluate
-Dexpression=extraJavaTestArgs -q -DforceStdout)
export MAVEN_OPTS="$MAVEN_OPTS $EXTRA_JAVA_TEST_ARGS"
- mvn verify --batch-mode -P hadoop2 javadoc:javadoc -Pci-test
+ mvn verify --batch-mode -P hadoop2 javadoc:javadoc
diff --git a/.github/workflows/ci-hadoop3.yml b/.github/workflows/ci-hadoop3.yml
index 54fa60c97..6a9ed78a5 100644
--- a/.github/workflows/ci-hadoop3.yml
+++ b/.github/workflows/ci-hadoop3.yml
@@ -54,4 +54,4 @@ jobs:
run: |
EXTRA_JAVA_TEST_ARGS=$(mvn help:evaluate
-Dexpression=extraJavaTestArgs -q -DforceStdout)
export MAVEN_OPTS="$MAVEN_OPTS $EXTRA_JAVA_TEST_ARGS"
- mvn verify --batch-mode javadoc:javadoc -Pci-test
+ mvn verify --batch-mode javadoc:javadoc
diff --git a/.github/workflows/vector-plugins.yml
b/.github/workflows/vector-plugins.yml
index f3abad9c2..73dd5e23d 100644
--- a/.github/workflows/vector-plugins.yml
+++ b/.github/workflows/vector-plugins.yml
@@ -54,4 +54,4 @@ jobs:
run: |
EXTRA_JAVA_TEST_ARGS=$(mvn help:evaluate
-Dexpression=extraJavaTestArgs -q -DforceStdout)
export MAVEN_OPTS="$MAVEN_OPTS $EXTRA_JAVA_TEST_ARGS"
- mvn verify --batch-mode -Pvector-plugins javadoc:javadoc -Pci-test
-pl
parquet-plugins/parquet-encoding-vector,parquet-plugins/parquet-plugins-benchmarks
-am
+ mvn verify --batch-mode -Pvector-plugins javadoc:javadoc -pl
parquet-plugins/parquet-encoding-vector,parquet-plugins/parquet-plugins-benchmarks
-am
diff --git
a/parquet-column/src/test/java/org/apache/parquet/column/mem/TestMemPageStore.java
b/parquet-column/src/test/java/org/apache/parquet/column/mem/TestMemPageStore.java
index dabd0a517..6628305e8 100644
---
a/parquet-column/src/test/java/org/apache/parquet/column/mem/TestMemPageStore.java
+++
b/parquet-column/src/test/java/org/apache/parquet/column/mem/TestMemPageStore.java
@@ -31,9 +31,13 @@ import org.apache.parquet.column.page.mem.MemPageStore;
import org.apache.parquet.column.statistics.LongStatistics;
import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName;
import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public class TestMemPageStore {
+ private static final Logger LOG =
LoggerFactory.getLogger(TestMemPageStore.class);
+
private String[] path = {"foo", "bar"};
@Test
@@ -48,12 +52,12 @@ public class TestMemPageStore {
pageWriter.writePage(BytesInput.from(new byte[735]), 209, stats,
BIT_PACKED, BIT_PACKED, PLAIN);
PageReader pageReader = memPageStore.getPageReader(col);
long totalValueCount = pageReader.getTotalValueCount();
- System.out.println(totalValueCount);
+ LOG.info(String.valueOf(totalValueCount));
int total = 0;
do {
DataPage readPage = pageReader.readPage();
total += readPage.getValueCount();
- System.out.println(readPage);
+ LOG.info(readPage.toString());
// TODO: assert
} while (total < totalValueCount);
}
diff --git
a/parquet-column/src/test/java/org/apache/parquet/io/ExpectationValidatingRecordConsumer.java
b/parquet-column/src/test/java/org/apache/parquet/io/ExpectationValidatingRecordConsumer.java
index f72a1658c..e21ebb6e1 100644
---
a/parquet-column/src/test/java/org/apache/parquet/io/ExpectationValidatingRecordConsumer.java
+++
b/parquet-column/src/test/java/org/apache/parquet/io/ExpectationValidatingRecordConsumer.java
@@ -33,7 +33,6 @@ public final class ExpectationValidatingRecordConsumer
extends RecordConsumer {
}
private void validate(String got) {
- // System.out.println(" \"" + got + "\";");
assertEquals("event #" + count, expectations.pop(), got);
++count;
}
diff --git
a/parquet-hadoop/src/test/java/org/apache/parquet/encodings/FileEncodingsIT.java
b/parquet-hadoop/src/test/java/org/apache/parquet/encodings/FileEncodingsIT.java
index f14adb2f9..416495601 100644
---
a/parquet-hadoop/src/test/java/org/apache/parquet/encodings/FileEncodingsIT.java
+++
b/parquet-hadoop/src/test/java/org/apache/parquet/encodings/FileEncodingsIT.java
@@ -64,6 +64,8 @@ import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This class contains test cases to validate each data type encoding.
@@ -72,6 +74,9 @@ import org.junit.runners.Parameterized;
*/
@RunWith(Parameterized.class)
public class FileEncodingsIT {
+
+ private static final Logger LOG =
LoggerFactory.getLogger(FileEncodingsIT.class);
+
private static final int RANDOM_SEED = 1;
private static final int RECORD_COUNT = 2000000;
private static final int FIXED_LENGTH = 60;
@@ -156,7 +161,7 @@ public class FileEncodingsIT {
* This loop will make sure to test future writer versions added to
WriterVersion enum.
*/
for (WriterVersion writerVersion : WriterVersion.values()) {
- System.out.println(String.format(
+ LOG.info(String.format(
"Testing %s/%s/%s encodings using ROW_GROUP_SIZE=%d PAGE_SIZE=%d",
writerVersion, this.paramTypeName, this.compression,
TEST_ROW_GROUP_SIZE, TEST_PAGE_SIZE));
@@ -182,7 +187,7 @@ public class FileEncodingsIT {
* This loop will make sure to test future writer versions added to
WriterVersion enum.
*/
for (WriterVersion writerVersion : WriterVersion.values()) {
- System.out.println(String.format(
+ LOG.info(String.format(
"Testing %s/%s/%s + DICTIONARY encodings using ROW_GROUP_SIZE=%d
PAGE_SIZE=%d",
writerVersion, this.paramTypeName, this.compression,
TEST_ROW_GROUP_SIZE, TEST_PAGE_SIZE));
diff --git
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/DeprecatedInputFormatTest.java
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/DeprecatedInputFormatTest.java
index 5f3420824..0b446e665 100644
---
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/DeprecatedInputFormatTest.java
+++
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/DeprecatedInputFormatTest.java
@@ -66,12 +66,17 @@ import org.apache.parquet.hadoop.util.ContextUtil;
import org.apache.parquet.schema.MessageTypeParser;
import org.junit.Before;
import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* DeprecatedParquetInputFormat is used by cascading. It initializes the
recordReader using an initialize method with
* different parameters than ParquetInputFormat
*/
public class DeprecatedInputFormatTest {
+
+ private static final Logger LOG =
LoggerFactory.getLogger(DeprecatedInputFormatTest.class);
+
final Path parquetPath = new
Path("target/test/example/TestInputOutputFormat/parquet");
final Path inputPath = new
Path("src/test/java/org/apache/parquet/hadoop/example/TestInputOutputFormat.java");
final Path outputPath = new
Path("target/test/example/TestInputOutputFormat/out");
@@ -317,10 +322,10 @@ public class DeprecatedInputFormatTest {
private void waitForJob(Job job) throws InterruptedException, IOException {
while (!job.isComplete()) {
- System.out.println("waiting for job " + job.getJobName());
+ LOG.info("waiting for job " + job.getJobName());
sleep(100);
}
- System.out.println("status for job " + job.getJobName() + ": " +
(job.isSuccessful() ? "SUCCESS" : "FAILURE"));
+ LOG.info("status for job " + job.getJobName() + ": " + (job.isSuccessful()
? "SUCCESS" : "FAILURE"));
if (!job.isSuccessful()) {
throw new RuntimeException("job failed " + job.getJobName());
}
diff --git
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestDirectCodecFactory.java
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestDirectCodecFactory.java
index 60108178f..e5b87a7e9 100644
---
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestDirectCodecFactory.java
+++
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestDirectCodecFactory.java
@@ -36,9 +36,13 @@ import
org.apache.parquet.compression.CompressionCodecFactory.BytesInputDecompre
import org.apache.parquet.hadoop.metadata.CompressionCodecName;
import org.junit.Assert;
import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public class TestDirectCodecFactory {
+ private static final Logger LOG =
LoggerFactory.getLogger(TestDirectCodecFactory.class);
+
private enum Decompression {
ON_HEAP,
OFF_HEAP,
@@ -121,7 +125,7 @@ public class TestDirectCodecFactory {
final String msg = String.format(
"Failure while testing Codec: %s, OnHeapCompressionInput: %s,
Decompression Mode: %s, Data Size: %d",
codec.name(), useOnHeapCompression, decomp.name(), size);
- System.out.println(msg);
+ LOG.error(msg);
throw new RuntimeException(msg, e);
} finally {
if (rawBuf != null) {
diff --git
a/parquet-hadoop/src/test/java/org/apache/parquet/statistics/TestStatistics.java
b/parquet-hadoop/src/test/java/org/apache/parquet/statistics/TestStatistics.java
index 8562cf933..0915b4644 100644
---
a/parquet-hadoop/src/test/java/org/apache/parquet/statistics/TestStatistics.java
+++
b/parquet-hadoop/src/test/java/org/apache/parquet/statistics/TestStatistics.java
@@ -71,8 +71,13 @@ import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public class TestStatistics {
+
+ private static final Logger LOG =
LoggerFactory.getLogger(TestStatistics.class);
+
private static final int MEGABYTE = 1 << 20;
private static final long RANDOM_SEED = 1441990701846L; //
System.currentTimeMillis();
@@ -481,7 +486,7 @@ public class TestStatistics {
File file = folder.newFile("test_file.parquet");
file.delete();
- System.out.println(String.format("RANDOM SEED: %s", RANDOM_SEED));
+ LOG.info(String.format("RANDOM SEED: %s", RANDOM_SEED));
Random random = new Random(RANDOM_SEED);
diff --git
a/parquet-pig/src/test/java/org/apache/parquet/pig/TestParquetLoader.java
b/parquet-pig/src/test/java/org/apache/parquet/pig/TestParquetLoader.java
index 0ed1a887b..e32720e8a 100644
--- a/parquet-pig/src/test/java/org/apache/parquet/pig/TestParquetLoader.java
+++ b/parquet-pig/src/test/java/org/apache/parquet/pig/TestParquetLoader.java
@@ -46,8 +46,13 @@ import org.apache.pig.data.Tuple;
import org.apache.pig.impl.logicalLayer.FrontendException;
import org.junit.Assert;
import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public class TestParquetLoader {
+
+ private static final Logger LOG =
LoggerFactory.getLogger(TestParquetLoader.class);
+
@Test
public void testSchema() throws Exception {
String location = "garbage";
@@ -243,7 +248,7 @@ public class TestParquetLoader {
+ DataType.findTypeName(types[(i + 4) % types.length]) + "," + " b:"
+ DataType.findTypeName(types[(i + 5) % types.length]) + "');";
- System.out.println("Query: " + query);
+ LOG.info("Query: " + query);
pigServer.registerQuery(query);
pigServer.registerQuery("STORE B into 'out" + i + "' using
mock.Storage();");
pigServer.executeBatch();
diff --git
a/parquet-pig/src/test/java/org/apache/parquet/pig/summary/TestSummary.java
b/parquet-pig/src/test/java/org/apache/parquet/pig/summary/TestSummary.java
index 7f8d771fa..8adea91e9 100644
--- a/parquet-pig/src/test/java/org/apache/parquet/pig/summary/TestSummary.java
+++ b/parquet-pig/src/test/java/org/apache/parquet/pig/summary/TestSummary.java
@@ -35,9 +35,13 @@ import org.apache.pig.data.DataBag;
import org.apache.pig.data.Tuple;
import org.apache.pig.data.TupleFactory;
import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public class TestSummary {
+ private static final Logger LOG = LoggerFactory.getLogger(TestSummary.class);
+
private static final TupleFactory tf = TupleFactory.getInstance();
private static final BagFactory bf = BagFactory.getInstance();
@@ -103,9 +107,8 @@ public class TestSummary {
private void validate(String result, int factor) throws IOException {
TupleSummaryData s = SummaryData.fromJSON(result, TupleSummaryData.class);
- // System.out.println(SummaryData.toPrettyJSON(s));
assertEquals(9 * factor, s.getCount());
- assertEquals(1 * factor, s.getFields().get(0).getNull().longValue());
+ assertEquals(factor, s.getFields().get(0).getNull().longValue());
assertEquals(7 * factor, s.getFields().get(0).getBag().getCount());
assertEquals(
18 * factor,
@@ -141,10 +144,10 @@ public class TestSummary {
pigServer.registerQuery("A = LOAD 'in' USING mock.Storage();");
pigServer.registerQuery("B = FOREACH (GROUP A ALL) GENERATE " +
Summary.class.getName() + "(A);");
pigServer.registerQuery("STORE B INTO 'out' USING mock.Storage();");
- System.out.println(data.get("out").get(0).get(0));
+ LOG.info(String.valueOf(data.get("out").get(0).get(0)));
TupleSummaryData s =
SummaryData.fromJSON((String) data.get("out").get(0).get(0),
TupleSummaryData.class);
- System.out.println(s);
+ LOG.info(String.valueOf(s));
}
@Test
@@ -163,7 +166,7 @@ public class TestSummary {
pigServer.registerQuery("STORE B INTO 'out' USING mock.Storage();");
TupleSummaryData s =
SummaryData.fromJSON((String) data.get("out").get(0).get(0),
TupleSummaryData.class);
- System.out.println(s);
+ LOG.info(String.valueOf(s));
assertEquals(0, s.getFields().get(1).getNumber().getValue().getMax(), 0);
}
}
diff --git
a/parquet-thrift/src/test/java/org/apache/parquet/hadoop/thrift/TestInputOutputFormat.java
b/parquet-thrift/src/test/java/org/apache/parquet/hadoop/thrift/TestInputOutputFormat.java
index f2885ebb3..9615e9c30 100644
---
a/parquet-thrift/src/test/java/org/apache/parquet/hadoop/thrift/TestInputOutputFormat.java
+++
b/parquet-thrift/src/test/java/org/apache/parquet/hadoop/thrift/TestInputOutputFormat.java
@@ -54,6 +54,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestInputOutputFormat {
+
private static final Logger LOG =
LoggerFactory.getLogger(TestInputOutputFormat.class);
public static AddressBook nextAddressbook(int i) {
@@ -244,11 +245,11 @@ public class TestInputOutputFormat {
private void read(String outputPath, int expected) throws
FileNotFoundException, IOException {
final BufferedReader out = new BufferedReader(new FileReader(new
File(outputPath.toString())));
- String lineOut = null;
+ String lineOut;
int lineNumber = 0;
while ((lineOut = out.readLine()) != null) {
lineOut = lineOut.substring(lineOut.indexOf("\t") + 1);
- System.out.println(lineOut);
+ LOG.info(lineOut);
++lineNumber;
}
out.close();
diff --git
a/parquet-thrift/src/test/java/org/apache/parquet/thrift/TestParquetWriteProtocol.java
b/parquet-thrift/src/test/java/org/apache/parquet/thrift/TestParquetWriteProtocol.java
index 248de8d90..9e562bf73 100644
---
a/parquet-thrift/src/test/java/org/apache/parquet/thrift/TestParquetWriteProtocol.java
+++
b/parquet-thrift/src/test/java/org/apache/parquet/thrift/TestParquetWriteProtocol.java
@@ -65,6 +65,7 @@ import org.slf4j.LoggerFactory;
import thrift.test.OneOfEach;
public class TestParquetWriteProtocol {
+
private static final Logger LOG =
LoggerFactory.getLogger(TestParquetWriteProtocol.class);
@Test
@@ -697,7 +698,6 @@ public class TestParquetWriteProtocol {
private void validateThrift(Configuration configuration, String[]
expectations, TBase<?, ?> a) throws TException {
final ThriftSchemaConverter thriftSchemaConverter = new
ThriftSchemaConverter(configuration);
- // System.out.println(a);
final Class<TBase<?, ?>> class1 = (Class<TBase<?, ?>>) a.getClass();
final MessageType schema = thriftSchemaConverter.convert(class1);
LOG.info("{}", schema);
diff --git a/pom.xml b/pom.xml
index 0b945076f..4d104ae1c 100644
--- a/pom.xml
+++ b/pom.xml
@@ -107,8 +107,8 @@
<commons-lang3.version>3.14.0</commons-lang3.version>
<!-- properties for the profiles -->
- <surefire.argLine> </surefire.argLine>
- <surefire.logLevel>INFO</surefire.logLevel>
+ <surefire.argLine>-XX:MaxJavaStackTraceDepth=8</surefire.argLine>
+ <surefire.logLevel>ERROR</surefire.logLevel>
<!-- Resource intesive tests are enabled by default but disabled in the CI
envrionment -->
<enableResourceIntensiveTests>true</enableResourceIntensiveTests>
@@ -713,13 +713,12 @@
</build>
</profile>
- <!-- Profile for CI tests to have less output -->
+ <!-- Profile for tests to have more output -->
<profile>
- <id>ci-test</id>
+ <id>verbose-test</id>
<properties>
- <surefire.logLevel>WARN</surefire.logLevel>
- <surefire.argLine>-XX:MaxJavaStackTraceDepth=10</surefire.argLine>
- <enableResourceIntensiveTests>false</enableResourceIntensiveTests>
+ <surefire.logLevel>INFO</surefire.logLevel>
+ <surefire.argLine>-XX:MaxJavaStackTraceDepth=1024</surefire.argLine>
</properties>
</profile>