This is an automated email from the ASF dual-hosted git repository.

abhishek pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/druid.git


The following commit(s) were added to refs/heads/master by this push:
     new 4868ef9529 Enable Arm builds (#12451)
4868ef9529 is described below

commit 4868ef952977e2cb527900eff59188687a2b2183
Author: Will Xu <[email protected]>
AuthorDate: Tue Apr 26 07:44:40 2022 -0700

    Enable Arm builds (#12451)
    
    This PR enables ARM builds on Travis. I've ported over the changes from 
@martin-g on reducing heap requirements for some of the tests to ensure they 
run well on Travis arm instances.
---
 .travis.yml                                        | 24 +++++++++++++++++++++-
 .../druid/java/util/metrics/MonitorsTest.java      | 10 +++++++++
 .../druid/java/util/metrics/SigarLoadTest.java     | 13 +++++++++++-
 .../java/util/metrics/SigarPidDiscovererTest.java  |  6 ++++++
 .../hadoop/DatasourceIngestionSpecTest.java        |  8 ++++++++
 .../virtual/ExpressionVectorSelectorsTest.java     |  3 +--
 6 files changed, 60 insertions(+), 4 deletions(-)

diff --git a/.travis.yml b/.travis.yml
index 7c4fc85890..af43cbe0ec 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -186,7 +186,7 @@ jobs:
       name: "(openjdk11) packaging check"
       stage: Tests - phase 2
       jdk: openjdk11
-    
+
     - <<: *package
       name: "(openjdk15) packaging check"
       stage: Tests - phase 2
@@ -382,6 +382,28 @@ jobs:
       after_success:
         - (cd web-console && travis_retry npm run codecov)  # retry in case of 
network error
 
+    - name: "Build and test on ARM64 CPU architecture (1)"
+      stage: Tests - phase 2
+      arch: arm64-graviton2
+      dist: focal
+      virt: vm
+      group: edge
+      jdk: openjdk11
+      env:
+        - MAVEN_PROJECTS='core,indexing-hadoop,indexing-service,processing'
+      script: ${MVN} test -B -pl ${MAVEN_PROJECTS} -Ddruid.console.skip=true 
-DargLine=-Xmx3000m -T1C
+
+    - name: "Build and test on ARM64 CPU architecture (2)"
+      stage: Tests - phase 2
+      arch: arm64-graviton2
+      dist: focal
+      virt: vm
+      group: edge
+      jdk: openjdk11
+      env:
+        - MAVEN_PROJECTS='core,sql,server,services'
+      script: ${MVN} test -B -pl ${MAVEN_PROJECTS} -Ddruid.console.skip=true 
-DargLine=-Xmx3000m -T1C
+
     - name: "web console end-to-end test"
       before_install: *setup_generate_license
       install: web-console/script/druid build
diff --git 
a/core/src/test/java/org/apache/druid/java/util/metrics/MonitorsTest.java 
b/core/src/test/java/org/apache/druid/java/util/metrics/MonitorsTest.java
index 872af3dca2..6f58fa47d4 100644
--- a/core/src/test/java/org/apache/druid/java/util/metrics/MonitorsTest.java
+++ b/core/src/test/java/org/apache/druid/java/util/metrics/MonitorsTest.java
@@ -23,12 +23,22 @@ import com.google.common.collect.ImmutableMap;
 import org.apache.druid.java.util.common.StringUtils;
 import org.apache.druid.java.util.emitter.core.Event;
 import org.junit.Assert;
+import org.junit.Assume;
+import org.junit.Before;
 import org.junit.Test;
 
 import java.util.List;
 
 public class MonitorsTest
 {
+  private static final String CPU_ARCH = System.getProperty("os.arch");
+
+  @Before
+  public void before()
+  {
+    // Do not run the tests on ARM64. Sigar library has no binaries for ARM64
+    Assume.assumeFalse("aarch64".equals(CPU_ARCH));
+  }
 
   @Test
   public void testSetFeed()
diff --git 
a/core/src/test/java/org/apache/druid/java/util/metrics/SigarLoadTest.java 
b/core/src/test/java/org/apache/druid/java/util/metrics/SigarLoadTest.java
index 42e1003dd4..40d7dada3c 100644
--- a/core/src/test/java/org/apache/druid/java/util/metrics/SigarLoadTest.java
+++ b/core/src/test/java/org/apache/druid/java/util/metrics/SigarLoadTest.java
@@ -19,12 +19,23 @@
 
 package org.apache.druid.java.util.metrics;
 
-import junit.framework.Assert;
 import org.hyperic.sigar.Sigar;
+import org.junit.Assert;
+import org.junit.Assume;
+import org.junit.Before;
 import org.junit.Test;
 
 public class SigarLoadTest
 {
+  private static final String CPU_ARCH = System.getProperty("os.arch");
+
+  @Before
+  public void before()
+  {
+    // Do not run the tests on ARM64. Sigar library has no binaries for ARM64
+    Assume.assumeFalse("aarch64".equals(CPU_ARCH));
+  }
+
   @Test
   public void testSigarLoad()
   {
diff --git 
a/core/src/test/java/org/apache/druid/java/util/metrics/SigarPidDiscovererTest.java
 
b/core/src/test/java/org/apache/druid/java/util/metrics/SigarPidDiscovererTest.java
index 26357f8dc9..32711ec029 100644
--- 
a/core/src/test/java/org/apache/druid/java/util/metrics/SigarPidDiscovererTest.java
+++ 
b/core/src/test/java/org/apache/druid/java/util/metrics/SigarPidDiscovererTest.java
@@ -19,13 +19,19 @@
 
 package org.apache.druid.java.util.metrics;
 
+import org.junit.Assume;
 import org.junit.Test;
 
 public class SigarPidDiscovererTest
 {
+  private static final String CPU_ARCH = System.getProperty("os.arch");
+
   @Test
   public void simpleTest()
   {
+    // Do not run the tests on ARM64. Sigar library has no binaries for ARM64
+    Assume.assumeFalse("aarch64".equals(CPU_ARCH));
+
     // Just make sure we don't crash
     SigarPidDiscoverer.instance().getPid();
   }
diff --git 
a/indexing-hadoop/src/test/java/org/apache/druid/indexer/hadoop/DatasourceIngestionSpecTest.java
 
b/indexing-hadoop/src/test/java/org/apache/druid/indexer/hadoop/DatasourceIngestionSpecTest.java
index 98da95fc77..f2f6131225 100644
--- 
a/indexing-hadoop/src/test/java/org/apache/druid/indexer/hadoop/DatasourceIngestionSpecTest.java
+++ 
b/indexing-hadoop/src/test/java/org/apache/druid/indexer/hadoop/DatasourceIngestionSpecTest.java
@@ -22,12 +22,14 @@ package org.apache.druid.indexer.hadoop;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Lists;
+import org.apache.druid.common.config.NullHandling;
 import org.apache.druid.java.util.common.Intervals;
 import org.apache.druid.query.filter.SelectorDimFilter;
 import org.apache.druid.segment.TestHelper;
 import org.apache.druid.timeline.DataSegment;
 import org.joda.time.Interval;
 import org.junit.Assert;
+import org.junit.Before;
 import org.junit.Test;
 
 import java.util.List;
@@ -38,6 +40,12 @@ public class DatasourceIngestionSpecTest
 {
   private static final ObjectMapper MAPPER = TestHelper.makeJsonMapper();
 
+  @Before
+  public void before()
+  {
+    NullHandling.initializeForTests();
+  }
+
   @Test
   public void testSingleIntervalSerde() throws Exception
   {
diff --git 
a/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java
 
b/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java
index 54fc20deed..a7ca0ad079 100644
--- 
a/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java
+++ 
b/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java
@@ -20,7 +20,6 @@
 package org.apache.druid.segment.virtual;
 
 import com.google.common.collect.ImmutableList;
-import org.apache.druid.java.util.common.StringUtils;
 import org.apache.druid.java.util.common.granularity.Granularities;
 import org.apache.druid.java.util.common.guava.Sequence;
 import org.apache.druid.java.util.common.io.Closer;
@@ -262,7 +261,7 @@ public class ExpressionVectorSelectorsTest
           int rows = 0;
           while (!nonVectorized.isDone()) {
             Assert.assertEquals(
-                StringUtils.format("Failed at row %s", rows),
+                "Failed at row " + rows,
                 nonSelector.getObject(),
                 results.get(rows)
             );


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to