This is an automated email from the ASF dual-hosted git repository.

guyuqi pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/bigtop.git


The following commit(s) were added to refs/heads/master by this push:
     new d120a926 BIGTOP-3706. Bump Hadoop to 3.3.3. (#916)
d120a926 is described below

commit d120a92611fb42578aa84dd6bbec0b025d13b7e6
Author: Masatake Iwasaki <[email protected]>
AuthorDate: Wed Jun 22 21:12:14 2022 +0900

    BIGTOP-3706. Bump Hadoop to 3.3.3. (#916)
    
    * BIGTOP-3706. Bump Hadoop to 3.3.3.
    
    * using CapacityScheduler to make smoke-tests work run by root user with 
the default configuration.
---
 bigtop-packages/src/common/hadoop/do-component-build         |  8 ++------
 bigtop-packages/src/common/hadoop/install_hadoop.sh          |  5 +----
 .../src/common/hadoop/patch10-MAPREDUCE-7373.diff            | 12 ------------
 .../common/hadoop/patch7-remove-phantomjs-in-yarn-ui.diff    | 11 ++++++-----
 bigtop-packages/src/deb/hadoop/hadoop-httpfs.install         |  1 -
 bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec             |  1 -
 bigtop.bom                                                   |  2 +-
 provisioner/docker/docker-hadoop.sh                          |  7 +------
 8 files changed, 11 insertions(+), 36 deletions(-)

diff --git a/bigtop-packages/src/common/hadoop/do-component-build 
b/bigtop-packages/src/common/hadoop/do-component-build
index 0a52d78e..948b1489 100644
--- a/bigtop-packages/src/common/hadoop/do-component-build
+++ b/bigtop-packages/src/common/hadoop/do-component-build
@@ -39,8 +39,6 @@ if [ $HOSTTYPE = "powerpc64le" ] ; then
         #cleanup
         rm -rf ${LEVELDBJNI_HOME}
         rm -rf ${LEVELDB_HOME}
-        mvn install:install-file -DgroupId=com.google.protobuf 
-DartifactId=protoc -Dversion=2.5.0 \
-            -Dclassifier=linux-ppcle_64 -Dpackaging=exe 
-Dfile=/usr/local/bin/protoc
 fi
 ## BIGTOP-2288
 
@@ -103,8 +101,6 @@ EOF
         #cleanup
         rm -rf ${LEVELDBJNI_HOME}
         rm -rf ${LEVELDB_HOME}
-        mvn install:install-file -DgroupId=com.google.protobuf 
-DartifactId=protoc -Dversion=2.5.0 \
-            -Dclassifier=linux-aarch_64 -Dpackaging=exe 
-Dfile=/usr/local/bin/protoc
 fi
 ## BIGTOP-3027
 
@@ -122,13 +118,13 @@ mkdir build/src
  
 # Build artifacts
 MAVEN_OPTS="-Dzookeeper.version=$ZOOKEEPER_VERSION "
-MAVEN_OPTS+="-DskipTests -DskipTest -DskipITs "
+MAVEN_OPTS+="-DskipTests -DskipITs "
 
 # Include common Maven Deployment logic
 . $(dirname ${0})/maven_deploy.sh
 
 # Build artifacts
-mvn $ANT_OPTS $BUNDLE_SNAPPY -Pdist -Pnative -Psrc -Pyarn-ui -Dtar 
${MAVEN_OPTS} install package ${EXTRA_GOALS} "$@"
+mvn $BUNDLE_SNAPPY -Pdist -Pnative -Psrc -Pyarn-ui -Dtar ${MAVEN_OPTS} install 
${EXTRA_GOALS} "$@"
 mvn site site:stage ${MAVEN_OPTS} $@
 
 (cd build ; tar --strip-components=1 -xzvf  
../hadoop-dist/target/hadoop-${HADOOP_VERSION}.tar.gz)
diff --git a/bigtop-packages/src/common/hadoop/install_hadoop.sh 
b/bigtop-packages/src/common/hadoop/install_hadoop.sh
index 68152a42..ca92a295 100755
--- a/bigtop-packages/src/common/hadoop/install_hadoop.sh
+++ b/bigtop-packages/src/common/hadoop/install_hadoop.sh
@@ -199,15 +199,13 @@ install -d -m 0755 ${HADOOP_DIR}/lib
 cp ${BUILD_DIR}/share/hadoop/common/lib/*.jar ${HADOOP_DIR}/lib
 install -d -m 0755 ${HADOOP_DIR}/tools/lib
 cp ${BUILD_DIR}/share/hadoop/tools/lib/*.jar ${HADOOP_DIR}/tools/lib
-install -d -m 0755 ${MAPREDUCE_DIR}/lib
-cp ${BUILD_DIR}/share/hadoop/mapreduce/lib/*.jar ${MAPREDUCE_DIR}/lib
 install -d -m 0755 ${HDFS_DIR}/lib 
 cp ${BUILD_DIR}/share/hadoop/hdfs/lib/*.jar ${HDFS_DIR}/lib
 install -d -m 0755 ${YARN_DIR}/lib
 cp ${BUILD_DIR}/share/hadoop/yarn/lib/*.jar ${YARN_DIR}/lib
 install -d -m 0755 ${YARN_DIR}/timelineservice/lib
 cp ${BUILD_DIR}/share/hadoop/yarn/timelineservice/lib/*.jar 
${YARN_DIR}/timelineservice/lib
-chmod 644 ${HADOOP_DIR}/lib/*.jar ${MAPREDUCE_DIR}/lib/*.jar 
${HDFS_DIR}/lib/*.jar ${YARN_DIR}/lib/*.jar 
${YARN_DIR}/timelineservice/lib/*.jar
+chmod 644 ${HADOOP_DIR}/lib/*.jar ${HDFS_DIR}/lib/*.jar ${YARN_DIR}/lib/*.jar 
${YARN_DIR}/timelineservice/lib/*.jar
 
 # Install webapps
 cp -ra ${BUILD_DIR}/share/hadoop/hdfs/webapps ${HDFS_DIR}/
@@ -354,7 +352,6 @@ install -d -m 0755 
$PREFIX/var/{log,run,lib}/hadoop-mapreduce
 for DIR in ${HADOOP_DIR} ${HDFS_DIR} ${YARN_DIR} ${MAPREDUCE_DIR} ; do
   (cd $DIR &&
    rm -fv *-sources.jar
-   rm -fv lib/hadoop-*.jar
    for j in hadoop-*.jar; do
      if [[ $j =~ hadoop-(.*)-${HADOOP_VERSION}.jar ]]; then
        name=${BASH_REMATCH[1]}
diff --git a/bigtop-packages/src/common/hadoop/patch10-MAPREDUCE-7373.diff 
b/bigtop-packages/src/common/hadoop/patch10-MAPREDUCE-7373.diff
deleted file mode 100644
index 51229a38..00000000
--- a/bigtop-packages/src/common/hadoop/patch10-MAPREDUCE-7373.diff
+++ /dev/null
@@ -1,12 +0,0 @@
-diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
-index ae3b9c6029e..4c32838afb0 100644
---- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
-+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
-@@ -27,6 +27,7 @@ set(GTEST_SRC_DIR 
${CMAKE_SOURCE_DIR}/../../../../hadoop-common-project/hadoop-c
- # Add extra compiler and linker flags.
- # -Wno-sign-compare
- hadoop_add_compiler_flags("-DNDEBUG -DSIMPLE_MEMCPY -fno-strict-aliasing 
-fsigned-char")
-+set(CMAKE_CXX_STANDARD 11)
- 
- # Source location.
- set(SRC main/native)
diff --git 
a/bigtop-packages/src/common/hadoop/patch7-remove-phantomjs-in-yarn-ui.diff 
b/bigtop-packages/src/common/hadoop/patch7-remove-phantomjs-in-yarn-ui.diff
index a5ae472b..8c075bb0 100644
--- a/bigtop-packages/src/common/hadoop/patch7-remove-phantomjs-in-yarn-ui.diff
+++ b/bigtop-packages/src/common/hadoop/patch7-remove-phantomjs-in-yarn-ui.diff
@@ -1,8 +1,8 @@
 diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-ui/src/main/webapp/yarn.lock 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-ui/src/main/webapp/yarn.lock
-index a9b3ab758c3..94b6f4b5e04 100644
+index 43b2fc7d8c0..fe968cbd49e 100644
 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-ui/src/main/webapp/yarn.lock
 +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-ui/src/main/webapp/yarn.lock
-@@ -1531,8 +1531,6 @@ em-helpers@^0.8.0:
+@@ -1912,8 +1912,6 @@ em-helpers@^0.8.0:
      ember-cli-htmlbars "^1.0.1"
      ember-cli-less "^1.4.0"
      source-map "^0.5.6"
@@ -11,7 +11,7 @@ index a9b3ab758c3..94b6f4b5e04 100644
  
  [email protected]:
    version "0.12.0"
-@@ -1541,8 +1539,6 @@ [email protected]:
+@@ -1923,8 +1921,6 @@ [email protected]:
      ember-cli-htmlbars "^1.0.1"
      ember-cli-less "^1.4.0"
      source-map "^0.5.6"
@@ -20,13 +20,14 @@ index a9b3ab758c3..94b6f4b5e04 100644
  
  [email protected]:
    version "1.0.2"
-@@ -4250,20 +4246,6 @@ performance-now@^2.1.0:
-   version "2.1.0"
+@@ -5148,21 +5144,6 @@ performance-now@^2.1.0:
    resolved 
"https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b";
+   integrity sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=
  
 [email protected]:
 -  version "2.1.13"
 -  resolved 
"https://registry.yarnpkg.com/phantomjs-prebuilt/-/phantomjs-prebuilt-2.1.13.tgz#66556ad9e965d893ca5a7dc9e763df7e8697f76d";
+-  integrity sha1-ZlVq2ell2JPKWn3J52PffoaX920=
 -  dependencies:
 -    es6-promise "~4.0.3"
 -    extract-zip "~1.5.0"
diff --git a/bigtop-packages/src/deb/hadoop/hadoop-httpfs.install 
b/bigtop-packages/src/deb/hadoop/hadoop-httpfs.install
index 86196379..c8bb78fd 100644
--- a/bigtop-packages/src/deb/hadoop/hadoop-httpfs.install
+++ b/bigtop-packages/src/deb/hadoop/hadoop-httpfs.install
@@ -1,5 +1,4 @@
 /etc/hadoop/conf.empty/httpfs-env.sh
 /etc/hadoop/conf.empty/httpfs-log4j.properties
-/etc/hadoop/conf.empty/httpfs-signature.secret
 /etc/hadoop/conf.empty/httpfs-site.xml
 /var/lib/hadoop-httpfs
diff --git a/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec 
b/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec
index 0c1f0eca..4394bfc7 100644
--- a/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec
+++ b/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec
@@ -765,7 +765,6 @@ fi
 %config(noreplace) /etc/default/%{name}-httpfs
 %config(noreplace) %{etc_hadoop}/conf.empty/httpfs-env.sh
 %config(noreplace) %{etc_hadoop}/conf.empty/httpfs-log4j.properties
-%config(noreplace) %{etc_hadoop}/conf.empty/httpfs-signature.secret
 %config(noreplace) %{etc_hadoop}/conf.empty/httpfs-site.xml
 %{initd_dir}/%{name}-httpfs
 %attr(0775,httpfs,httpfs) %{run_httpfs}
diff --git a/bigtop.bom b/bigtop.bom
index c6700e82..1a4f6854 100644
--- a/bigtop.bom
+++ b/bigtop.bom
@@ -145,7 +145,7 @@ bigtop {
     'hadoop' {
       name    = 'hadoop'
       relNotes = 'Apache Hadoop'
-      version { base = '3.2.3'; pkg = base; release = 1 }
+      version { base = '3.3.3'; pkg = base; release = 1 }
       tarball { destination = "${name}-${version.base}.tar.gz"
                 source      = "${name}-${version.base}-src.tar.gz" }
       url     { download_path = "/$name/common/$name-${version.base}"
diff --git a/provisioner/docker/docker-hadoop.sh 
b/provisioner/docker/docker-hadoop.sh
index 7049a12f..69b33c08 100755
--- a/provisioner/docker/docker-hadoop.sh
+++ b/provisioner/docker/docker-hadoop.sh
@@ -157,11 +157,6 @@ generate-config() {
         elasticsearch_bootstrap_system_call_filter=true
     fi
 
-    # Using FairScheduler instead of CapacityScheduler here is a workaround 
for BIGTOP-3406.
-    # Due to the default setting of the 
yarn.scheduler.capacity.maximum-am-resource-percent
-    # property defined in capacity-scheduler.xml (=0.1), some oozie jobs are 
not assigned
-    # enough resource to succeed. But this property can't be set via hiera for 
now,
-    # so we use FairScheduler as an easy workaround.
     cat > ./config/hieradata/site.yaml << EOF
 bigtop::hadoop_head_node: $1
 hadoop::hadoop_storage_dirs: [/data/1, /data/2]
@@ -169,7 +164,7 @@ bigtop::bigtop_repo_uri: $2
 bigtop::bigtop_repo_gpg_check: $gpg_check
 hadoop_cluster_node::cluster_components: $3
 hadoop_cluster_node::cluster_nodes: [$node_list]
-hadoop::common_yarn::yarn_resourcemanager_scheduler_class: 
org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler
+hadoop::common_yarn::yarn_resourcemanager_scheduler_class: 
org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler
 elasticsearch::bootstrap::system_call_filter: 
$elasticsearch_bootstrap_system_call_filter
 EOF
 }

Reply via email to