This is an automated email from the ASF dual-hosted git repository.

zhangduo pushed a commit to branch HBASE-15867
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/HBASE-15867 by this push:
     new 51aa28d  HBASE-15867 Disable nightly jobs and add flaky-tests 
directory to fix the jenkins pipeline job
51aa28d is described below

commit 51aa28db77ed4baafc10903581c1f402461e81c7
Author: Duo Zhang <zhang...@apache.org>
AuthorDate: Fri Aug 16 09:45:13 2019 +0800

    HBASE-15867 Disable nightly jobs and add flaky-tests directory to fix the 
jenkins pipeline job
---
 dev-support/Jenkinsfile                     | 525 ----------------------------
 dev-support/flaky-tests/findHangingTests.py | 115 ++++++
 2 files changed, 115 insertions(+), 525 deletions(-)

diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
deleted file mode 100644
index e8bf8c9..0000000
--- a/dev-support/Jenkinsfile
+++ /dev/null
@@ -1,525 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-pipeline {
-  agent {
-    node {
-      label 'ubuntu'
-    }
-  }
-  triggers {
-    cron('@daily')
-  }
-  options {
-    buildDiscarder(logRotator(numToKeepStr: '30'))
-    timeout (time: 9, unit: 'HOURS')
-    timestamps()
-    skipDefaultCheckout()
-  }
-  environment {
-    YETUS_RELEASE = '0.7.0'
-    // where we'll write everything from different steps. Need a copy here so 
the final step can check for success/failure.
-    OUTPUT_DIR_RELATIVE_GENERAL = 'output-general'
-    OUTPUT_DIR_RELATIVE_JDK7 = 'output-jdk7'
-    OUTPUT_DIR_RELATIVE_HADOOP2 = 'output-jdk8-hadoop2'
-    OUTPUT_DIR_RELATIVE_HADOOP3 = 'output-jdk8-hadoop3'
-
-    PROJECT = 'hbase'
-    PROJECT_PERSONALITY = 
'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
-    PERSONALITY_FILE = 'tools/personality.sh'
-    // This section of the docs tells folks not to use the javadoc tag. older 
branches have our old version of the check for said tag.
-    AUTHOR_IGNORE_LIST = 
'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
-    WHITESPACE_IGNORE_LIST = '.*/generated/.*'
-    // output from surefire; sadly the archive function in yetus only works on 
file names.
-    ARCHIVE_PATTERN_LIST = 'TEST-*.xml,org.apache.h*.txt,*.dumpstream,*.dump'
-    // These tests currently have known failures. Once they burn down to 0, 
remove from here so that new problems will cause a failure.
-    TESTS_FILTER = 
'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop,mvnsite'
-    // Flaky urls for different branches. Replace '-' and '.' in branch name 
by '_' because those
-    // characters are not allowed in bash variable name.
-    // Not excluding flakies from the nightly build for now.
-    // EXCLUDE_TESTS_URL_master = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
-    // EXCLUDE_TESTS_URL_branch_2 = 
'https://builds.apache.org/job/HBase-Find-Flaky-Tests-branch2.0/lastSuccessfulBuild/artifact/excludes/'
-  }
-  parameters {
-    booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, 
description: '''Check to use the current HEAD of apache/yetus rather than our 
configured release.
-
-    Should only be used manually when e.g. there is some non-work-aroundable 
issue in yetus we are checking a fix for.''')
-    booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a 
lot more meta-information.')
-  }
-  stages {
-    stage ('yetus install') {
-      steps {
-        sh  '''#!/usr/bin/env bash
-echo "Ensure we have a copy of Apache Yetus."
-if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
-  YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
-  echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
-  if [ ! -d "${YETUS_DIR}" ]; then
-    echo "New download of Apache Yetus version ${YETUS_RELEASE}."
-    rm -rf "${WORKSPACE}/.gpg"
-    mkdir -p "${WORKSPACE}/.gpg"
-    chmod -R 700 "${WORKSPACE}/.gpg"
-
-    echo "install yetus project KEYS"
-    curl -L --fail -o "${WORKSPACE}/KEYS_YETUS" 
https://dist.apache.org/repos/dist/release/yetus/KEYS
-    gpg --homedir "${WORKSPACE}/.gpg" --import "${WORKSPACE}/KEYS_YETUS"
-
-    echo "download yetus release ${YETUS_RELEASE}"
-    curl -L --fail -O 
"https://dist.apache.org/repos/dist/release/yetus/${YETUS_RELEASE}/yetus-${YETUS_RELEASE}-bin.tar.gz";
-    curl -L --fail -O 
"https://dist.apache.org/repos/dist/release/yetus/${YETUS_RELEASE}/yetus-${YETUS_RELEASE}-bin.tar.gz.asc";
-    echo "verifying yetus release"
-    gpg --homedir "${WORKSPACE}/.gpg" --verify 
"yetus-${YETUS_RELEASE}-bin.tar.gz.asc"
-    mv "yetus-${YETUS_RELEASE}-bin.tar.gz" yetus.tar.gz
-  else
-    echo "Reusing cached download of Apache Yetus version ${YETUS_RELEASE}."
-  fi
-else
-  YETUS_DIR="${WORKSPACE}/yetus-git"
-  rm -rf "${YETUS_DIR}"
-  echo "downloading from github"
-  curl -L --fail https://api.github.com/repos/apache/yetus/tarball/HEAD -o 
yetus.tar.gz
-fi
-if [ ! -d "${YETUS_DIR}" ]; then
-  echo "unpacking yetus into '${YETUS_DIR}'"
-  mkdir -p "${YETUS_DIR}"
-  gunzip -c yetus.tar.gz | tar xpf - -C "${YETUS_DIR}" --strip-components 1
-fi
-        '''
-        // Set up the file we need at PERSONALITY_FILE location
-        dir ("tools") {
-          sh """#!/usr/bin/env bash
-echo "Downloading Project personality."
-curl -L  -o personality.sh "${env.PROJECT_PERSONALITY}"
-          """
-        }
-        stash name: 'yetus', includes: 
"yetus-*/*,yetus-*/**/*,tools/personality.sh"
-      }
-    }
-    stage ('init health results') {
-      steps {
-        // stash with given name for all tests we might run, so that we can 
unstash all of them even if
-        // we skip some due to e.g. branch-specific JDK or Hadoop support
-        stash name: 'general-result', allowEmpty: true, includes: 
"${OUTPUT_DIR_RELATIVE_GENERAL}/doesn't-match"
-        stash name: 'jdk7-result', allowEmpty: true, includes: 
"${OUTPUT_DIR_RELATIVE_JDK7}/doesn't-match"
-        stash name: 'hadoop2-result', allowEmpty: true, includes: 
"${OUTPUT_DIR_RELATIVE_HADOOP2}/doesn't-match"
-        stash name: 'hadoop3-result', allowEmpty: true, includes: 
"${OUTPUT_DIR_RELATIVE_HADOOP3}/doesn't-match"
-        stash name: 'srctarball-result', allowEmpty: true, includes: 
"output-srctarball/doesn't-match"
-      }
-    }
-    stage ('health checks') {
-      parallel {
-        stage ('yetus general check') {
-          agent {
-            node {
-              label 'Hadoop'
-            }
-          }
-          environment {
-            BASEDIR = "${env.WORKSPACE}/component"
-            // TODO does hadoopcheck need to be jdk specific?
-            // Should be things that work with multijdk
-            TESTS = 'all,-unit,-findbugs'
-            // on branches that don't support jdk7, this will already be 
JAVA_HOME, so we'll end up not
-            // doing multijdk there.
-            MULTIJDK = '/usr/lib/jvm/java-8-openjdk-amd64'
-            OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_GENERAL}"
-            OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_GENERAL}"
-          }
-          steps {
-            unstash 'yetus'
-            dir('component') {
-              checkout scm
-            }
-            sh '''#!/usr/bin/env bash
-              rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
-              rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
-              "${BASEDIR}/dev-support/gather_machine_environment.sh" 
"${OUTPUT_DIR_RELATIVE}/machine"
-'''
-            // TODO roll this into the hbase_nightly_yetus script
-            sh '''#!/usr/bin/env bash
-              rm -rf "${OUTPUT_DIR}/commentfile}"
-              declare -i status=0
-              if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
-                echo '(/) {color:green}+1 general checks{color}' >> 
"${OUTPUT_DIR}/commentfile"
-              else
-                echo '(x) {color:red}-1 general checks{color}' >> 
"${OUTPUT_DIR}/commentfile"
-                status=1
-              fi
-              echo "-- For more information [see general 
report|${BUILD_URL}/General_Nightly_Build_Report/]" >> 
"${OUTPUT_DIR}/commentfile"
-              exit "${status}"
-            '''
-          }
-          post {
-            always {
-              stash name: 'general-result', includes: 
"${OUTPUT_DIR_RELATIVE}/commentfile"
-              // Has to be relative to WORKSPACE.
-              archive "${env.OUTPUT_DIR_RELATIVE}/*"
-              archive "${env.OUTPUT_DIR_RELATIVE}/**/*"
-              publishHTML target: [
-                allowMissing: true,
-                keepAll: true,
-                alwaysLinkToLastBuild: true,
-                // Has to be relative to WORKSPACE
-                reportDir: "${env.OUTPUT_DIR_RELATIVE}",
-                reportFiles: 'console-report.html',
-                reportName: 'General Nightly Build Report'
-              ]
-            }
-          }
-        }
-        stage ('yetus jdk7 checks') {
-          agent {
-            node {
-              label 'Hadoop'
-            }
-          }
-          when {
-            branch 'branch-1*'
-          }
-          environment {
-            BASEDIR = "${env.WORKSPACE}/component"
-            TESTS = 'mvninstall,compile,javac,unit,htmlout'
-            OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK7}"
-            OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK7}"
-            // On branches where we do jdk7 checks, jdk7 will be JAVA_HOME 
already.
-          }
-          steps {
-            unstash 'yetus'
-            dir('component') {
-              checkout scm
-            }
-            sh '''#!/usr/bin/env bash
-              rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
-              rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
-              "${BASEDIR}/dev-support/gather_machine_environment.sh" 
"${OUTPUT_DIR_RELATIVE}/machine"
-'''
-            sh '''#!/usr/bin/env bash
-              rm -rf "${OUTPUT_DIR}/commentfile}"
-              declare -i status=0
-              if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
-                echo '(/) {color:green}+1 jdk7 checks{color}' >> 
"${OUTPUT_DIR}/commentfile"
-              else
-                echo '(x) {color:red}-1 jdk7 checks{color}' >> 
"${OUTPUT_DIR}/commentfile"
-                status=1
-              fi
-              echo "-- For more information [see jdk7 
report|${BUILD_URL}/JDK7_Nightly_Build_Report/]" >> "${OUTPUT_DIR}/commentfile"
-              exit "${status}"
-            '''
-          }
-          post {
-            always {
-              stash name: 'jdk7-result', includes: 
"${OUTPUT_DIR_RELATIVE}/commentfile"
-              junit testResults: 
"${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
-              // zip surefire reports.
-              sh '''#!/bin/bash -e
-                if [ -d "${OUTPUT_DIR}/archiver" ]; then
-                  count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
-                  if [[ 0 -ne ${count} ]]; then
-                    echo "zipping ${count} archived files"
-                    zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" 
"${OUTPUT_DIR}/archiver"
-                  else
-                    echo "No archived files, skipping compressing."
-                  fi
-                else
-                  echo "No archiver directory, skipping compressing."
-                fi
-'''
-              // Has to be relative to WORKSPACE.
-              archive "${env.OUTPUT_DIR_RELATIVE}/*"
-              archive "${env.OUTPUT_DIR_RELATIVE}/**/*"
-              publishHTML target: [
-                allowMissing         : true,
-                keepAll              : true,
-                alwaysLinkToLastBuild: true,
-                // Has to be relative to WORKSPACE.
-                reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
-                reportFiles          : 'console-report.html',
-                reportName           : 'JDK7 Nightly Build Report'
-              ]
-            }
-          }
-        }
-        stage ('yetus jdk8 hadoop2 checks') {
-          agent {
-            node {
-              label 'Hadoop'
-            }
-          }
-          environment {
-            BASEDIR = "${env.WORKSPACE}/component"
-            TESTS = 'mvninstall,compile,javac,unit,findbugs,htmlout'
-            OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_HADOOP2}"
-            OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_HADOOP2}"
-            // This isn't strictly needed on branches that only support jdk8, 
but doesn't hurt
-            // and is needed on branches that do both jdk7 and jdk8
-            SET_JAVA_HOME = '/usr/lib/jvm/java-8-openjdk-amd64'
-          }
-          steps {
-            unstash 'yetus'
-            dir('component') {
-              checkout scm
-            }
-            sh '''#!/usr/bin/env bash
-              rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
-              rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
-              "${BASEDIR}/dev-support/gather_machine_environment.sh" 
"${OUTPUT_DIR_RELATIVE}/machine"
-'''
-            sh '''#!/usr/bin/env bash
-              rm -rf "${OUTPUT_DIR}/commentfile}"
-              declare -i status=0
-              if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
-                echo '(/) {color:green}+1 jdk8 hadoop2 checks{color}' >> 
"${OUTPUT_DIR}/commentfile"
-              else
-                echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' >> 
"${OUTPUT_DIR}/commentfile"
-                status=1
-              fi
-              echo "-- For more information [see jdk8 (hadoop2) 
report|${BUILD_URL}/JDK8_Nightly_Build_Report_(Hadoop2)/]" >> 
"${OUTPUT_DIR}/commentfile"
-              exit "${status}"
-            '''
-          }
-          post {
-            always {
-              stash name: 'hadoop2-result', includes: 
"${OUTPUT_DIR_RELATIVE}/commentfile"
-              junit testResults: 
"${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
-              // zip surefire reports.
-              sh '''#!/bin/bash -e
-                if [ -d "${OUTPUT_DIR}/archiver" ]; then
-                  count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
-                  if [[ 0 -ne ${count} ]]; then
-                    echo "zipping ${count} archived files"
-                    zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" 
"${OUTPUT_DIR}/archiver"
-                  else
-                    echo "No archived files, skipping compressing."
-                  fi
-                else
-                  echo "No archiver directory, skipping compressing."
-                fi
-'''
-              // Has to be relative to WORKSPACE.
-              archive "${env.OUTPUT_DIR_RELATIVE}/*"
-              archive "${env.OUTPUT_DIR_RELATIVE}/**/*"
-              publishHTML target: [
-                allowMissing         : true,
-                keepAll              : true,
-                alwaysLinkToLastBuild: true,
-                // Has to be relative to WORKSPACE.
-                reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
-                reportFiles          : 'console-report.html',
-                reportName           : 'JDK8 Nightly Build Report (Hadoop2)'
-              ]
-            }
-          }
-        }
-        stage ('yetus jdk8 hadoop3 checks') {
-          agent {
-            node {
-              label 'Hadoop'
-            }
-          }
-          when {
-            not {
-              branch 'branch-1*'
-            }
-          }
-          environment {
-            BASEDIR = "${env.WORKSPACE}/component"
-            TESTS = 'mvninstall,compile,javac,unit,htmlout'
-            OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_HADOOP3}"
-            OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_HADOOP3}"
-            // This isn't strictly needed on branches that only support jdk8, 
but doesn't hurt
-            // and is needed on branches that do both jdk7 and jdk8
-            SET_JAVA_HOME = '/usr/lib/jvm/java-8-openjdk-amd64'
-            // Activates hadoop 3.0 profile in maven runs.
-            HADOOP_PROFILE = '3.0'
-          }
-          steps {
-            unstash 'yetus'
-            dir('component') {
-              checkout scm
-            }
-            sh '''#!/usr/bin/env bash
-              rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
-              rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
-              "${BASEDIR}/dev-support/gather_machine_environment.sh" 
"${OUTPUT_DIR_RELATIVE}/machine"
-'''
-            sh '''#!/usr/bin/env bash
-              rm -rf "${OUTPUT_DIR}/commentfile}"
-              declare -i status=0
-              if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
-                echo '(/) {color:green}+1 jdk8 hadoop3 checks{color}' >> 
"${OUTPUT_DIR}/commentfile"
-              else
-                echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' >> 
"${OUTPUT_DIR}/commentfile"
-                status=1
-              fi
-              echo "-- For more information [see jdk8 (hadoop3) 
report|${BUILD_URL}/JDK8_Nightly_Build_Report_(Hadoop3)/]" >> 
"${OUTPUT_DIR}/commentfile"
-              exit "${status}"
-            '''
-          }
-          post {
-            always {
-              stash name: 'hadoop3-result', includes: 
"${OUTPUT_DIR_RELATIVE}/commentfile"
-              // Not sure how two junit test reports will work. Disabling this 
for now.
-              // junit testResults: 
"${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
-              // zip surefire reports.
-              sh '''#!/bin/bash -e
-                if [ -d "${OUTPUT_DIR}/archiver" ]; then
-                  count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
-                  if [[ 0 -ne ${count} ]]; then
-                    echo "zipping ${count} archived files"
-                    zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" 
"${OUTPUT_DIR}/archiver"
-                  else
-                    echo "No archived files, skipping compressing."
-                  fi
-                else
-                  echo "No archiver directory, skipping compressing."
-                fi
-'''
-              // Has to be relative to WORKSPACE.
-              archive "${env.OUTPUT_DIR_RELATIVE}/*"
-              archive "${env.OUTPUT_DIR_RELATIVE}/**/*"
-              publishHTML target: [
-                allowMissing         : true,
-                keepAll              : true,
-                alwaysLinkToLastBuild: true,
-                // Has to be relative to WORKSPACE.
-                reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
-                reportFiles          : 'console-report.html',
-                reportName           : 'JDK8 Nightly Build Report (Hadoop3)'
-              ]
-            }
-          }
-        }
-        // This is meant to mimic what a release manager will do to create RCs.
-        // See http://hbase.apache.org/book.html#maven.release
-        stage ('create source tarball') {
-          tools {
-            maven 'Maven (latest)'
-            // this needs to be set to the jdk that ought to be used to build 
releases on the branch the Jenkinsfile is stored in.
-            jdk "JDK 1.8 (latest)"
-          }
-          environment {
-            BASEDIR = "${env.WORKSPACE}/component"
-          }
-          steps {
-            sh '''#!/bin/bash -e
-              echo "Setting up directories"
-              rm -rf "output-srctarball" && mkdir "output-srctarball"
-              rm -rf "unpacked_src_tarball" && mkdir "unpacked_src_tarball"
-              rm -rf ".m2-for-repo" && mkdir ".m2-for-repo"
-              rm -rf ".m2-for-src" && mkdir ".m2-for-src"
-'''
-            dir('component') {
-              checkout scm
-            }
-            sh '''#!/usr/bin/env bash
-              rm -rf "output-srctarball/machine" && mkdir 
"output-srctarball/machine"
-              "${BASEDIR}/dev-support/gather_machine_environment.sh" 
"output-srctarball/machine"
-'''
-            sh """#!/bin/bash -e
-              if "${env.BASEDIR}/dev-support/hbase_nightly_source-artifact.sh" 
\
-                  --intermediate-file-dir output-srctarball \
-                  --unpack-temp-dir unpacked_src_tarball \
-                  --maven-m2-initial .m2-for-repo \
-                  --maven-m2-src-build .m2-for-src \
-                  --clean-source-checkout \
-                  "${env.BASEDIR}" ; then
-                echo '(/) {color:green}+1 source release artifact{color}\n-- 
See build output for details.' >output-srctarball/commentfile
-              else
-                echo '(x) {color:red}-1 source release artifact{color}\n-- See 
build output for details.' >output-srctarball/commentfile
-              fi
-"""
-          }
-          post {
-            always {
-              stash name: 'srctarball-result', includes: 
"output-srctarball/commentfile"
-              archive 'output-srctarball/*'
-            }
-          }
-        }
-      }
-    }
-  }
-  post {
-    always {
-      script {
-         try {
-           unstash 'general-result'
-           unstash 'jdk7-result'
-           unstash 'hadoop2-result'
-           unstash 'hadoop3-result'
-           unstash 'srctarball-result'
-           sh "printenv"
-           def results = ["${env.OUTPUT_DIR_RELATIVE_GENERAL}/commentfile",
-                          "${env.OUTPUT_DIR_RELATIVE_JDK7}/commentfile",
-                          "${env.OUTPUT_DIR_RELATIVE_HADOOP2}/commentfile",
-                          "${env.OUTPUT_DIR_RELATIVE_HADOOP3}/commentfile",
-                          'output-srctarball/commentfile']
-           echo env.BRANCH_NAME
-           echo env.BUILD_URL
-           echo currentBuild.result
-           echo currentBuild.durationString
-           def comment = "Results for branch ${env.BRANCH_NAME}\n"
-           comment += "\t[build ${currentBuild.displayName} on 
builds.a.o|${env.BUILD_URL}]: "
-           if (currentBuild.result == "SUCCESS") {
-              comment += "(/) *{color:green}+1 overall{color}*\n"
-           } else {
-              comment += "(x) *{color:red}-1 overall{color}*\n"
-              // Ideally get the committer our of the change and @ mention 
them in the per-jira comment
-           }
-           comment += "----\ndetails (if available):\n\n"
-           echo ""
-           echo "[DEBUG] trying to aggregate step-wise results"
-           comment += results.collect { fileExists(file: it) ? readFile(file: 
it) : "" }.join("\n\n")
-           echo "[INFO] Comment:"
-           echo comment
-           echo ""
-           echo "[INFO] There are ${currentBuild.changeSets.size()} change 
sets."
-           getJirasToComment(currentBuild).each { currentIssue ->
-             jiraComment issueKey: currentIssue, body: comment
-           }
-        } catch (Exception exception) {
-          echo "Got exception: ${exception}"
-          echo "    ${exception.getStackTrace()}"
-        }
-      }
-    }
-  }
-}
-import org.jenkinsci.plugins.workflow.support.steps.build.RunWrapper
-@NonCPS
-List<String> getJirasToComment(RunWrapper thisBuild) {
-  def seenJiras = []
-  thisBuild.changeSets.each { cs ->
-    cs.getItems().each { change ->
-      CharSequence msg = change.msg
-      echo "change: ${change}"
-      echo "     ${msg}"
-      echo "     ${change.commitId}"
-      echo "     ${change.author}"
-      echo ""
-      msg.eachMatch("HBASE-[0-9]+") { currentIssue ->
-        echo "[DEBUG] found jira key: ${currentIssue}"
-        if (currentIssue in seenJiras) {
-          echo "[DEBUG] already commented on ${currentIssue}."
-        } else {
-          echo "[INFO] commenting on ${currentIssue}."
-          seenJiras << currentIssue
-        }
-      }
-    }
-  }
-  return seenJiras
-}
diff --git a/dev-support/flaky-tests/findHangingTests.py 
b/dev-support/flaky-tests/findHangingTests.py
new file mode 100755
index 0000000..328516e
--- /dev/null
+++ b/dev-support/flaky-tests/findHangingTests.py
@@ -0,0 +1,115 @@
+#!/usr/bin/env python
+##
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# pylint: disable=invalid-name
+# To disable 'invalid constant name' warnings.
+
+"""
+# Script to find hanging test from Jenkins build output
+# usage: ./findHangingTests.py <url of Jenkins build console>
+"""
+
+import re
+import sys
+import requests
+
+# If any of these strings appear in the console output, it's a build one 
should probably ignore
+# for analyzing failed/hanging tests.
+BAD_RUN_STRINGS = [
+    "Slave went offline during the build",  # Machine went down, can't do 
anything about it.
+    "The forked VM terminated without properly saying goodbye",  # JVM crashed.
+]
+
+
+def get_bad_tests(console_url):
+    """
+    Returns [[all tests], [failed tests], [timeout tests], [hanging tests]] if 
successfully gets
+    the build information.
+    If there is error getting console text or if there are blacklisted strings 
in console text,
+    then returns None.
+    """
+    response = requests.get(console_url)
+    if response.status_code != 200:
+        print "Error getting consoleText. Response = {} {}".format(
+            response.status_code, response.reason)
+        return
+
+    # All tests: All testcases which were run.
+    # Hanging test: A testcase which started but never finished.
+    # Failed test: Testcase which encountered any kind of failure. It can be 
failing atomic tests,
+    #   timed out tests, etc
+    # Timeout test: A Testcase which encountered timeout. Naturally, all 
timeout tests will be
+    #   included in failed tests.
+    all_tests_set = set()
+    hanging_tests_set = set()
+    failed_tests_set = set()
+    timeout_tests_set = set()
+    for line in response.content.splitlines():
+        result1 = re.findall("Running org.apache.hadoop.hbase.(.*)", line)
+        if len(result1) == 1:
+            test_case = result1[0]
+            if test_case in all_tests_set:
+                print ("ERROR! Multiple tests with same name '{}'. Might get 
wrong results "
+                       "for this test.".format(test_case))
+            else:
+                hanging_tests_set.add(test_case)
+                all_tests_set.add(test_case)
+        result2 = re.findall("Tests run:.*?- in org.apache.hadoop.hbase.(.*)", 
line)
+        if len(result2) == 1:
+            test_case = result2[0]
+            if "FAILURE!" in line:
+                failed_tests_set.add(test_case)
+            if test_case not in hanging_tests_set:
+                print ("ERROR! No test '{}' found in hanging_tests. Might get 
wrong results "
+                       "for this test. This may also happen if maven is set to 
retry failing "
+                       "tests.".format(test_case))
+            else:
+                hanging_tests_set.remove(test_case)
+        result3 = re.match("^\\s+(\\w*).*\\sTestTimedOut", line)
+        if result3:
+            test_case = result3.group(1)
+            timeout_tests_set.add(test_case)
+        for bad_string in BAD_RUN_STRINGS:
+            if re.match(".*" + bad_string + ".*", line):
+                print "Bad string found in build:\n > {}".format(line)
+    print "Result > total tests: {:4}   failed : {:4}  timedout : {:4}  
hanging : {:4}".format(
+        len(all_tests_set), len(failed_tests_set), len(timeout_tests_set), 
len(hanging_tests_set))
+    return [all_tests_set, failed_tests_set, timeout_tests_set, 
hanging_tests_set]
+
+if __name__ == "__main__":
+    if len(sys.argv) != 2:
+        print "ERROR : Provide the jenkins job console URL as the only 
argument."
+        sys.exit(1)
+
+    print "Fetching {}".format(sys.argv[1])
+    result = get_bad_tests(sys.argv[1])
+    if not result:
+        sys.exit(1)
+    [all_tests, failed_tests, timedout_tests, hanging_tests] = result
+
+    print "Found {} hanging tests:".format(len(hanging_tests))
+    for test in hanging_tests:
+        print test
+    print "\n"
+    print "Found {} failed tests of which {} timed out:".format(
+        len(failed_tests), len(timedout_tests))
+    for test in failed_tests:
+        print "{0} {1}".format(test, ("(Timed Out)" if test in timedout_tests 
else ""))
+
+    print ("\nA test may have had 0 or more atomic test failures before it 
timed out. So a "
+           "'Timed Out' test may have other errors too.")

Reply via email to