This is an automated email from the ASF dual-hosted git repository.
xushiyan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git
The following commit(s) were added to refs/heads/master by this push:
new f922837 [HUDI-1950] Fix Azure CI failure in TestParquetUtils (#2984)
f922837 is described below
commit f92283706408e25c48ed63abcbc9d07dc855106d
Author: Raymond Xu <[email protected]>
AuthorDate: Tue Jun 15 03:45:17 2021 -0700
[HUDI-1950] Fix Azure CI failure in TestParquetUtils (#2984)
* fix azure pipeline configs
* add pentaho.org in maven repositories
* Make sure file paths with scheme in TestParquetUtils
* add azure build status to README
---
README.md | 1 +
azure-pipelines.yml | 50 ++++------------------
.../apache/hudi/common/util/TestParquetUtils.java | 11 ++---
pom.xml | 4 ++
4 files changed, 19 insertions(+), 47 deletions(-)
diff --git a/README.md b/README.md
index c961001..4cb363a 100644
--- a/README.md
+++ b/README.md
@@ -22,6 +22,7 @@ Hudi manages the storage of large analytical datasets on DFS
(Cloud stores, HDFS
<https://hudi.apache.org/>
[](https://travis-ci.com/apache/hudi)
+[](https://dev.azure.com/apache-hudi-ci-org/apache-hudi-ci/_build/latest?definitionId=3&branchName=master)
[](https://www.apache.org/licenses/LICENSE-2.0.html)
[](http://search.maven.org/#search%7Cga%7C1%7Cg%3A%22org.apache.hudi%22)
[](https://join.slack.com/t/apache-hudi/shared_invite/enQtODYyNDAxNzc5MTg2LTE5OTBlYmVhYjM0N2ZhOTJjOWM4YzBmMWU2MjZjMGE4NDc5ZDFiOGQ2N2VkYTVkNzU3ZDQ4OTI1NmFmYWQ0NzE)
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index cf2343d..2fdf429 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -34,16 +34,8 @@ stages:
jobs:
- job: unit_tests_spark_client
steps:
- - task: Maven@3
- inputs:
- mavenPomFile: 'pom.xml'
- goals: 'clean install'
- options: -DskipTests
- publishJUnitResults: false
- javaHomeOption: 'JDKVersion'
- jdkVersionOption: '1.8'
- jdkArchitectureOption: 'x64'
- mavenOptions: '-Xmx2g $(MAVEN_OPTS)'
+ - script: |
+ mvn $(MAVEN_OPTS) clean install -DskipTests
- task: Cache@2
inputs:
key: 'maven | "$(Agent.OS)" | **/pom.xml'
@@ -66,16 +58,8 @@ stages:
mavenOptions: '-Xmx2g $(MAVEN_OPTS)'
- job: unit_tests_utilities
steps:
- - task: Maven@3
- inputs:
- mavenPomFile: 'pom.xml'
- goals: 'clean install'
- options: -DskipTests
- publishJUnitResults: false
- javaHomeOption: 'JDKVersion'
- jdkVersionOption: '1.8'
- jdkArchitectureOption: 'x64'
- mavenOptions: '-Xmx2g $(MAVEN_OPTS)'
+ - script: |
+ mvn $(MAVEN_OPTS) clean install -DskipTests
- task: Cache@2
inputs:
key: 'maven | "$(Agent.OS)" | **/pom.xml'
@@ -98,16 +82,8 @@ stages:
mavenOptions: '-Xmx2g $(MAVEN_OPTS)'
- job: unit_tests_other_modules
steps:
- - task: Maven@3
- inputs:
- mavenPomFile: 'pom.xml'
- goals: 'clean install'
- options: -DskipTests
- publishJUnitResults: false
- javaHomeOption: 'JDKVersion'
- jdkVersionOption: '1.8'
- jdkArchitectureOption: 'x64'
- mavenOptions: '-Xmx2g $(MAVEN_OPTS)'
+ - script: |
+ mvn $(MAVEN_OPTS) clean install -DskipTests
- task: Cache@2
inputs:
key: 'maven | "$(Agent.OS)" | **/pom.xml'
@@ -130,18 +106,8 @@ stages:
mavenOptions: '-Xmx2g $(MAVEN_OPTS)'
- job: functional_tests
steps:
- - task: Maven@3
- inputs:
- mavenPomFile: 'pom.xml'
- goals: 'test'
- options: -Pfunctional-tests
- publishJUnitResults: false
- testResultsFiles: '**/surefire-reports/TEST-*.xml'
- testRunTitle: 'functional tests'
- javaHomeOption: 'JDKVersion'
- jdkVersionOption: '1.8'
- jdkArchitectureOption: 'x64'
- mavenOptions: '-Xmx2g $(MAVEN_OPTS)'
+ - script: |
+ mvn $(MAVEN_OPTS) -Pfunctional-tests test
- job: integration_tests
steps:
- script: |
diff --git
a/hudi-common/src/test/java/org/apache/hudi/common/util/TestParquetUtils.java
b/hudi-common/src/test/java/org/apache/hudi/common/util/TestParquetUtils.java
index b997554..6735c10 100644
---
a/hudi-common/src/test/java/org/apache/hudi/common/util/TestParquetUtils.java
+++
b/hudi-common/src/test/java/org/apache/hudi/common/util/TestParquetUtils.java
@@ -58,7 +58,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
*/
public class TestParquetUtils extends HoodieCommonTestHarness {
- private ParquetUtils parquetUtils = new ParquetUtils();
+ private ParquetUtils parquetUtils;
public static List<Arguments> bloomFilterTypeCodes() {
return Arrays.asList(
@@ -70,6 +70,7 @@ public class TestParquetUtils extends HoodieCommonTestHarness
{
@BeforeEach
public void setup() {
initPath();
+ parquetUtils = new ParquetUtils();
}
@ParameterizedTest
@@ -80,7 +81,7 @@ public class TestParquetUtils extends HoodieCommonTestHarness
{
rowKeys.add(UUID.randomUUID().toString());
}
- String filePath = Paths.get(basePath, "test.parquet").toString();
+ String filePath = Paths.get(basePath, "test.parquet").toUri().toString();
writeParquetFile(typeCode, filePath, rowKeys);
// Read and verify
@@ -110,7 +111,7 @@ public class TestParquetUtils extends
HoodieCommonTestHarness {
}
}
- String filePath = Paths.get(basePath, "test.parquet").toString();
+ String filePath = Paths.get(basePath, "test.parquet").toUri().toString();
writeParquetFile(typeCode, filePath, rowKeys);
// Read and verify
@@ -136,7 +137,7 @@ public class TestParquetUtils extends
HoodieCommonTestHarness {
expected.add(new HoodieKey(rowKey, partitionPath));
}
- String filePath = basePath + "/test.parquet";
+ String filePath = Paths.get(basePath, "test.parquet").toUri().toString();
Schema schema = HoodieAvroUtils.getRecordKeyPartitionPathSchema();
writeParquetFile(typeCode, filePath, rowKeys, schema, true, partitionPath);
@@ -152,7 +153,7 @@ public class TestParquetUtils extends
HoodieCommonTestHarness {
@Test
public void testReadCounts() throws Exception {
- String filePath = basePath + "/test.parquet";
+ String filePath = Paths.get(basePath, "test.parquet").toUri().toString();
List<String> rowKeys = new ArrayList<>();
for (int i = 0; i < 123; i++) {
rowKeys.add(UUID.randomUUID().toString());
diff --git a/pom.xml b/pom.xml
index 014426e..a8054c2 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1051,6 +1051,10 @@
<id>confluent</id>
<url>https://packages.confluent.io/maven/</url>
</repository>
+ <repository>
+ <id>pentaho.org</id>
+
<url>https://public.nexus.pentaho.org/repository/proxy-public-3rd-party-release/</url>
+ </repository>
</repositories>
<profiles>