This is an automated email from the ASF dual-hosted git repository.

chiyang10000 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hawq.git


The following commit(s) were added to refs/heads/master by this push:
     new 89ed32f  HAWQ-1783. Add GitHub Action Workflow for Build on Linux
89ed32f is described below

commit 89ed32fe8dceb185b0d3dd65c89d717045ae597c
Author: Chiyang Wan <chiyang10...@gmail.com>
AuthorDate: Mon Feb 22 13:27:50 2021 +0800

    HAWQ-1783. Add GitHub Action Workflow for Build on Linux
    
    - the abs function declared in math.h
    - without passing in --dependency=, the bootstrap of libhdfs3 and
      libyarn just search their dependencies in /usr/local/hawq/, which just
      inherits from the default_prefix.
    
    With toolchain.sh provided, it is easy to set up the development env on
    both macOS and Linux. And there will be following commits which apply
    and test it on Docker images.
---
 .github/workflows/build.yml                        | 73 +++++++++++-------
 .github/workflows/scripts/download/.gitignore      |  1 +
 .github/workflows/scripts/init_linux.sh            | 64 +++++++++++++++
 .github/workflows/scripts/toolchain.sh             | 90 ++++++++++++++++++++++
 .../src/dbcommon/function/arithmetic-function.h    |  2 +
 depends/libhdfs3/Makefile                          |  2 +-
 depends/libyarn/Makefile                           |  2 +-
 7 files changed, 204 insertions(+), 30 deletions(-)

diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 41de4ac..d16ec37 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -18,9 +18,14 @@ name: Apache HAWQ
 on: [push, pull_request]
 
 jobs:
-  build-on-macOS:
+  Build-and-Test:
 
-    runs-on: macOS-10.15
+    strategy:
+      fail-fast: false
+      matrix:
+        platform: [ubuntu-latest, macos-latest]
+
+    runs-on: ${{ matrix.platform }}
 
     steps:
 
@@ -28,67 +33,79 @@ jobs:
 
     - name: lint check
       run: |
-        csrutil status
         ulimit -a
         mvn apache-rat:check
 
+    - name: initilize OS
+      run: |
+        case $(uname -s) in
+          Darwin) .github/workflows/scripts/init_macos.sh ;;
+          Linux)  .github/workflows/scripts/init_linux.sh ;;
+        esac
+
     - name: install thirdparty
       run: |
-        # download prebuilt libraries
-        curl -sL 
http://yum.oushu-tech.com/oushurepo/yumrepo/internal/linux/toolchain/dependency-Darwin.tar.xz
 | tar -xJ -C $GITHUB_WORKSPACE
-        for file in $(find $GITHUB_WORKSPACE/dependency-Darwin/package/bin 
-name '*' -type f) $(find $GITHUB_WORKSPACE/dependency-Darwin/package/lib -name 
'*.dylib' -type f); do
-          if [[ $(file $file | grep Mach-O) ]]; then
-            install_name_tool -add_rpath 
$GITHUB_WORKSPACE/dependency-Darwin/package/lib $file;
-          fi
-        done
-        install_name_tool -add_rpath 
$GITHUB_WORKSPACE/dependency-Darwin/package/lib/perl5/5.28.0/darwin-thread-multi-2level/CORE/
 $GITHUB_WORKSPACE/dependency-Darwin/package/bin/perl
-        rm -rf $GITHUB_WORKSPACE/dependency-Darwin/package/include/hdfs
-        rm -rf $GITHUB_WORKSPACE/dependency-Darwin/package/lib/libhdfs3*
+        set -x
+        source .github/workflows/scripts/toolchain.sh
 
     - name: configure
       timeout-minutes: 10
       run: |
-        source $GITHUB_WORKSPACE/dependency-Darwin/package/env.sh
+        source .github/workflows/scripts/toolchain.sh
         export CFLAGS="$CFLAGS -w"
-        export LDFLAGS="$LDFLAGS 
-Wl,-rpath,$GITHUB_WORKSPACE/dependency-Darwin/package/lib"
+        export LDFLAGS="$LDFLAGS 
-Wl,-rpath,$HAWQ_TOOLCHAIN_PATH/dependency/package/lib"
 
         ./configure --enable-debug --prefix=/tmp/hawq || cat config.log
         test -f config.status
 
     - name: build hawq
       run: |
-        source $GITHUB_WORKSPACE/dependency-Darwin/package/env.sh
-        make -j$(sysctl -n hw.ncpu)
-        make -j$(sysctl -n hw.ncpu) install
+        source .github/workflows/scripts/toolchain.sh
+        make
+        make install
 
     - name: build feature-test
       run: |
-        source $GITHUB_WORKSPACE/dependency-Darwin/package/env.sh
-        make -j$(sysctl -n hw.ncpu) feature-test
+        source .github/workflows/scripts/toolchain.sh
+        make feature-test
 
-    - name: test executable
+    - name: package HAWQ on Linux
+      if: runner.os == 'Linux'
       run: |
-        for file in $(find /tmp/hawq/bin -name '*' -type f); do
+        source .github/workflows/scripts/toolchain.sh
+        cp -r $HAWQ_TOOLCHAIN_PATH/dependency/package/lib/* /tmp/hawq/lib
+
+    - name: package HAWQ on macOS
+      if: runner.os == 'macOS'
+      run: |
+        source .github/workflows/scripts/toolchain.sh
+        for file in $(find /tmp/hawq/bin /tmp/hawq/lib -name '*' -type f); do
           if [[ $(file $file | grep Mach-O) ]]; then
             install_name_tool -add_rpath /tmp/hawq/lib $file;
           fi
         done
+        cp -r $HAWQ_TOOLCHAIN_PATH/dependency/package/lib/* /tmp/hawq/lib
+
+    - name: test executable
+      run: |
         source /tmp/hawq/greenplum_path.sh
         postgres -V
         src/test/feature/feature-test --gtest_list_tests
 
     - name: install HDFS
       run: |
-        export HOMEBREW_NO_INSTALL_CLEANUP=1
-        brew install hadoop
-
-    - name: initilize macOS
-      run: .github/workflows/scripts/init_macos.sh
+        wget -nv 
https://archive.apache.org/dist/hadoop/common/hadoop-3.3.0/hadoop-3.3.0.tar.gz
+        sudo install -o $USER -d /usr/local/hadoop-3.3.0
+        sudo ln -snf hadoop-3.3.0 /usr/local/hadoop
+        sudo tee /usr/local/bin/hdfs <<<'exec /usr/local/hadoop/bin/hdfs $@'
+        sudo chmod a+x /usr/local/bin/hdfs
+        tar xf hadoop-3.3.0.tar.gz -C /usr/local/
 
     - name: initilize HDFS
       run: |
-        export HADOOP_HOME=/usr/local/opt/hadoop/libexec
+        export HADOOP_HOME=/usr/local/hadoop/
         .github/workflows/scripts/init_hdfs.sh
+        sudo -u $USER hdfs dfs -ls /
 
     - name: initilize HAWQ
       run: |
diff --git a/.github/workflows/scripts/download/.gitignore 
b/.github/workflows/scripts/download/.gitignore
new file mode 100644
index 0000000..57c53d5
--- /dev/null
+++ b/.github/workflows/scripts/download/.gitignore
@@ -0,0 +1 @@
+./*
diff --git a/.github/workflows/scripts/init_linux.sh 
b/.github/workflows/scripts/init_linux.sh
new file mode 100755
index 0000000..357adf9
--- /dev/null
+++ b/.github/workflows/scripts/init_linux.sh
@@ -0,0 +1,64 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+set -e
+
+
+
+# Setup passphraseless ssh
+test -f ~/.ssh/id_rsa || ssh-keygen -t rsa -P '' -f ~/.ssh/id_rsa
+cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
+chmod go-w ~
+chmod 0700 ~/.ssh
+chmod 0600 ~/.ssh/authorized_keys
+
+tee -a ~/.ssh/config <<EOF_ssh_config
+Host *
+   StrictHostKeyChecking no
+   UserKnownHostsFile=/dev/null
+EOF_ssh_config
+
+ssh -v localhost whoami
+
+# Configure system kernel state
+sudo tee /etc/sysctl.conf << EOF_sysctl
+kernel.shmmax = 1000000000
+kernel.shmmni = 4096
+kernel.shmall = 4000000000
+kernel.sem = 250 512000 100 2048
+kernel.sysrq = 1
+kernel.core_uses_pid = 1
+kernel.msgmnb = 65536
+kernel.msgmax = 65536
+kernel.msgmni = 2048
+net.ipv4.tcp_syncookies = 0
+net.ipv4.conf.default.accept_source_route = 0
+net.ipv4.tcp_max_syn_backlog = 200000
+net.ipv4.conf.all.arp_filter = 1
+net.ipv4.ip_local_port_range = 10000 65535
+net.core.netdev_max_backlog = 200000
+net.netfilter.nf_conntrack_max = 524288
+fs.nr_open = 3000000
+kernel.threads-max = 798720
+kernel.pid_max = 798720
+
+net.core.rmem_max=2097152
+net.core.wmem_max=2097152
+net.core.somaxconn=4096
+EOF_sysctl
+sudo sysctl -p
+
+# Add data folder
+sudo install -o $USER -d /tmp/db_data/
diff --git a/.github/workflows/scripts/toolchain.sh 
b/.github/workflows/scripts/toolchain.sh
new file mode 100644
index 0000000..f7339c8
--- /dev/null
+++ b/.github/workflows/scripts/toolchain.sh
@@ -0,0 +1,90 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+export HAWQ_TOOLCHAIN_PATH=$(cd "$( dirname "${BASH_SOURCE[0]-$0}" )" && 
pwd)/download
+
+# GitHub Release for third-party package
+# https://github.com/apache/hawq/releases/tag/thirdparty
+REPO=https://github.com/apache/hawq/releases/download/thirdparty/
+
+
+
+###
+### macOS
+###
+if [[ $(uname -s) == Darwin ]]; then
+  if [ ! -d $HAWQ_TOOLCHAIN_PATH/dependency-Darwin ]; then
+    (cd $HAWQ_TOOLCHAIN_PATH && curl -OL $REPO/dependency-Darwin.tar.xz)
+    (cd $HAWQ_TOOLCHAIN_PATH && tar xJf dependency-Darwin.tar.xz -C 
$HAWQ_TOOLCHAIN_PATH)
+    ln -snf dependency-Darwin $HAWQ_TOOLCHAIN_PATH/dependency
+
+    for file in $(find $HAWQ_TOOLCHAIN_PATH/dependency/package/bin -name '*' 
-type f) $(find $HAWQ_TOOLCHAIN_PATH/dependency/package/lib -name '*.dylib' 
-type f); do
+      if [[ $(file $file | grep Mach-O) ]]; then
+        install_name_tool -add_rpath 
$HAWQ_TOOLCHAIN_PATH/dependency/package/lib $file || true
+      fi
+    done
+    install_name_tool -add_rpath 
$HAWQ_TOOLCHAIN_PATH/dependency/package/lib/perl5/5.28.0/darwin-thread-multi-2level/CORE/
 $HAWQ_TOOLCHAIN_PATH/dependency/package/bin/perl
+  fi
+
+  export MAKEFLAGS=-j$(sysctl -n hw.ncpu)
+fi
+
+
+
+###
+### Linux
+###
+if [[ $(uname -s) == Linux ]]; then
+  if [ ! -d $HAWQ_TOOLCHAIN_PATH/gcc ]; then
+    (cd $HAWQ_TOOLCHAIN_PATH && curl -OL 
$REPO/gcc-7.4.0-x86_64-linux-sles11.4.tar.xz)
+    (cd $HAWQ_TOOLCHAIN_PATH && tar xJf gcc-7.4.0-x86_64-linux-sles11.4.tar.xz 
-C $HAWQ_TOOLCHAIN_PATH)
+    ln -snf gcc-7.4.0-x86_64-linux-sles11.4 $HAWQ_TOOLCHAIN_PATH/gcc
+  fi
+  if [ ! -d $HAWQ_TOOLCHAIN_PATH/cmake ]; then
+    (cd $HAWQ_TOOLCHAIN_PATH && curl -OL 
$REPO/cmake-3.12.4-Linux-x86_64.tar.gz)
+    (cd $HAWQ_TOOLCHAIN_PATH && tar xzf cmake-3.12.4-Linux-x86_64.tar.gz -C 
$HAWQ_TOOLCHAIN_PATH)
+    ln -snf cmake-3.12.4-Linux-x86_64 $HAWQ_TOOLCHAIN_PATH/cmake
+  fi
+  if [ ! -d $HAWQ_TOOLCHAIN_PATH/dependency-gcc-x86_64-Linux/ ]; then
+    (cd $HAWQ_TOOLCHAIN_PATH && curl -OL 
$REPO/dependency-gcc-x86_64-Linux.tar.gz)
+    (cd $HAWQ_TOOLCHAIN_PATH && tar xzf dependency-gcc-x86_64-Linux.tar.gz -C 
$HAWQ_TOOLCHAIN_PATH)
+    ln -snf dependency-gcc-x86_64-Linux $HAWQ_TOOLCHAIN_PATH/dependency
+  fi
+
+  export PATH=$HAWQ_TOOLCHAIN_PATH/gcc/bin:$HAWQ_TOOLCHAIN_PATH/cmake/bin:$PATH
+  export LD_LIBRARY_PATH=$HAWQ_TOOLCHAIN_PATH/gcc/lib64/:$LD_LIBRARY_PATH
+
+  export 
CPATH=$HAWQ_TOOLCHAIN_PATH/gcc/include/c++/7.4.0/:$HAWQ_TOOLCHAIN_PATH/gcc/include/c++/7.4.0/x86_64-pc-linux-gnu/
+  export CPATH=$CPATH:/usr/include/x86_64-linux-gnu/
+  export 
LIBRARY_PATH=$HAWQ_TOOLCHAIN_PATH/gcc/lib64/:/usr/lib/x86_64-linux-gnu/
+
+  unset CPPFLAGS
+  export CFLAGS='-std=gnu11 -fno-use-linker-plugin'
+  export CXXFLAGS='-fpermissive -fno-use-linker-plugin'
+  unset LDFLAGS
+
+  export CC=gcc
+  export CXX=g++
+  export LD=ld
+
+  export MAKEFLAGS=-j$(nproc)
+fi
+
+
+
+###
+rm -rf $HAWQ_TOOLCHAIN_PATH/dependency/package/include/hdfs
+rm -rf $HAWQ_TOOLCHAIN_PATH/dependency/package/lib/libhdfs3*
+source $HAWQ_TOOLCHAIN_PATH/dependency/package/env.sh
diff --git a/depends/dbcommon/src/dbcommon/function/arithmetic-function.h 
b/depends/dbcommon/src/dbcommon/function/arithmetic-function.h
index 8377454..023e542 100644
--- a/depends/dbcommon/src/dbcommon/function/arithmetic-function.h
+++ b/depends/dbcommon/src/dbcommon/function/arithmetic-function.h
@@ -20,6 +20,8 @@
 #ifndef DBCOMMON_SRC_DBCOMMON_FUNCTION_ARITHMETIC_FUNCTION_H_
 #define DBCOMMON_SRC_DBCOMMON_FUNCTION_ARITHMETIC_FUNCTION_H_
 
+#include <math.h>
+
 #include <cfloat>
 #include <cmath>
 #include <functional>
diff --git a/depends/libhdfs3/Makefile b/depends/libhdfs3/Makefile
index a60d8a8..ae5e2aa 100644
--- a/depends/libhdfs3/Makefile
+++ b/depends/libhdfs3/Makefile
@@ -66,7 +66,7 @@ pre-config:
        mkdir -p build; \
        cd build; \
        if [ ! -f libhdfs3_build_timestamp ]; then \
-               $(abs_top_srcdir)/$(subdir)/bootstrap --prefix=$(prefix) 
$(PRE_CFG_ARG) && touch libhdfs3_build_timestamp; \
+               $(abs_top_srcdir)/$(subdir)/bootstrap --prefix=$(prefix) 
--dependency=$(prefix) $(PRE_CFG_ARG) && touch libhdfs3_build_timestamp; \
        fi
 
 else
diff --git a/depends/libyarn/Makefile b/depends/libyarn/Makefile
index c6754a5..d667901 100644
--- a/depends/libyarn/Makefile
+++ b/depends/libyarn/Makefile
@@ -66,7 +66,7 @@ pre-config:
        mkdir -p build; \
        cd build; \
        if [ ! -f libyarn_build_timestamp ]; then \
-               $(abs_top_srcdir)/$(subdir)/bootstrap --prefix=$(prefix) 
$(PRE_CFG_ARG) && touch libyarn_build_timestamp; \
+               $(abs_top_srcdir)/$(subdir)/bootstrap --prefix=$(prefix) 
--dependency=$(prefix) $(PRE_CFG_ARG) && touch libyarn_build_timestamp; \
        fi
 
 else

Reply via email to