This is an automated email from the ASF dual-hosted git repository.

hutran pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-gobblin.git


The following commit(s) were added to refs/heads/master by this push:
     new 7ecd278  [GOBBLIN-821] Adding Codecov
7ecd278 is described below

commit 7ecd2784cfc13c060adea98a0c71394ba30e6458
Author: autumnust <[email protected]>
AuthorDate: Sun Jul 14 17:34:52 2019 -0700

    [GOBBLIN-821] Adding Codecov
    
    Closes #2684 from autumnust/codecov
---
 .codecov_bash | 1285 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++
 .travis.yml   |    3 +-
 README.md     |    2 +-
 3 files changed, 1288 insertions(+), 2 deletions(-)

diff --git a/.codecov_bash b/.codecov_bash
new file mode 100644
index 0000000..523fb30
--- /dev/null
+++ b/.codecov_bash
@@ -0,0 +1,1285 @@
+#!/usr/bin/env bash
+
+set -e +o pipefail
+
+VERSION="1b98dd4"
+
+url="https://codecov.io";
+url_o=""
+env="$CODECOV_ENV"
+pr_o=""
+pr=""
+job=""
+build_url=""
+service=""
+build_o=""
+token=""
+commit_o=""
+search_in=""
+search_in_o=""
+tag_o=""
+tag=""
+flags=""
+exit_with=0
+branch_o=""
+slug_o=""
+dump="0"
+clean="0"
+branch=""
+commit=""
+include_cov=""
+exclude_cov=""
+ddp="$(echo ~)/Library/Developer/Xcode/DerivedData"
+xp=""
+files=""
+cacert="$CODECOV_CA_BUNDLE"
+gcov_ignore=""
+gcov_include=""
+ft_gcov="1"
+ft_coveragepy="1"
+ft_fix="1"
+ft_search="1"
+ft_s3="1"
+ft_xcode="1"
+ft_network="1"
+_git_root=$(git rev-parse --show-toplevel 2>/dev/null || hg root 2>/dev/null 
|| echo $PWD)
+git_root="$_git_root"
+remote_addr=""
+if [ "$git_root" = "$PWD" ];
+then
+  git_root="."
+fi
+
+proj_root="$git_root"
+gcov_exe="gcov"
+gcov_arg=""
+
+b="\033[0;36m"
+g="\033[0;32m"
+r="\033[0;31m"
+e="\033[0;90m"
+x="\033[0m"
+
+show_help() {
+cat << EOF
+
+                Codecov Bash $VERSION
+
+          Global report uploading tool for Codecov
+    Contribute at https://github.com/codecov/codecov-bash
+
+
+    -h          Display this help and exit
+    -f FILE     Target file(s) to upload
+
+                 -f "path/to/file"     only upload this file
+                                       skips searching unless provided 
patterns below
+                 -f "!*.bar"           ignore all files at pattern *.bar
+                 -f "*.foo"            include all files at pattern *.foo
+
+    -s DIR       Directory to search for coverage reports.
+                 Already searches project root and artifact folders.
+    -t TOKEN     Set the private repository token
+                 (option) set environment variable CODECOV_TOKEN=:uuid
+
+                 -t @/path/to/token_file
+                 -t uuid
+
+    -e ENV       Specify environment variables to be included with this build
+                 Also accepting environment variables: CODECOV_ENV=VAR,VAR2
+
+                 -e VAR,VAR2
+
+    -X feature   Toggle functionalities
+
+                 -X gcov          Disable gcov
+                 -X coveragepy    Disable python coverage
+                 -X fix           Disable report fixing
+                 -X search        Disable searching for reports
+                 -X xcode         Disable xcode processing
+                 -X network       Disable uploading the file network
+
+    -R root dir  Used when not in git/hg project to identify project root 
directory
+    -F flag      Flag the upload to group coverage metrics
+
+                 -F unittests        This upload is only unittests
+                 -F integration      This upload is only integration tests
+                 -F ui,chrome        This uplaod is Chrome - UI tests
+
+    -c           Move discovered coverage reports to the trash
+    -Z           Exit with 1 if not successful. Default will Exit with 0
+
+    -- xcode --
+    -D           Custom Derived Data Path for Coverage.profdata and gcov 
processing
+                 Default '~/Library/Developer/Xcode/DerivedData'
+    -J           Specify packages to build coverage.
+                 This can significantly reduces time to build coverage reports.
+
+                 -J 'MyAppName'      Will match "MyAppName" and 
"MyAppNameTests"
+                 -J '^ExampleApp$'   Will match only "ExampleApp" not 
"ExampleAppTests"
+
+    -- gcov --
+    -g GLOB      Paths to ignore during gcov gathering
+    -G GLOB      Paths to include during gcov gathering
+    -p dir       Project root directory
+                 Also used when preparing gcov
+    -x gcovexe   gcov executable to run. Defaults to 'gcov'
+    -a gcovargs  extra arguments to pass to gcov
+
+    -- Override CI Environment Variables --
+       These variables are automatically detected by popular CI providers
+
+    -B branch    Specify the branch name
+    -C sha       Specify the commit sha
+    -P pr        Specify the pull request number
+    -b build     Specify the build number
+    -T tag       Specify the git tag
+
+    -- Enterprise --
+    -u URL       Set the target url for Enterprise customers
+                 Not required when retrieving the bash uploader from your CCE
+                 (option) Set environment variable 
CODECOV_URL=https://my-hosted-codecov.com
+    -r SLUG      owner/repo slug used instead of the private repo token in 
Enterprise
+                 (option) set environment variable CODECOV_SLUG=:owner/:repo
+                 (option) set in your codecov.yml "codecov.slug"
+    -S PATH      File path to your cacert.pem file used to verify ssl with 
Codecov Enterprise (optional)
+                 (option) Set environment variable: 
CODECOV_CA_BUNDLE="/path/to/ca.pem"
+
+    -- Debugging --
+    -d           Dont upload and dump to stdin
+    -K           Remove color from the output
+    -v           Verbose mode
+
+EOF
+}
+
+
+say() {
+  echo -e "$1"
+}
+
+
+urlencode() {
+  echo "$1" | curl -Gso /dev/null -w %{url_effective} --data-urlencode @- "" | 
cut -c 3- | sed -e 's/%0A//'
+}
+
+
+swiftcov() {
+  _dir=$(dirname "$1")
+  for _type in app framework xctest
+  do
+    find "$_dir" -name "*.$_type" | while read f
+    do
+      _proj=${f##*/}
+      _proj=${_proj%."$_type"}
+      if [ "$2" = "" ] || [ "$(echo "$_proj" | grep -i "$2")" != "" ];
+      then
+        say "    $g+$x Building reports for $_proj $_type"
+        dest=$([ -f "$f/$_proj" ] && echo "$f/$_proj" || echo 
"$f/Contents/MacOS/$_proj")
+        _proj_name=$(echo "$_proj" | sed -e 's/[[:space:]]//g')
+        xcrun llvm-cov show -instr-profile "$1" "$dest" > 
"$_proj_name.$_type.coverage.txt" \
+         || say "    ${r}x>${x} llvm-cov failed to produce results for $dest"
+      fi
+    done
+  done
+}
+
+
+# Credits to: https://gist.github.com/pkuczynski/8665367
+parse_yaml() {
+   local prefix=$2
+   local s='[[:space:]]*' w='[a-zA-Z0-9_]*' fs=$(echo @|tr @ '\034')
+   sed -ne "s|^\($s\)\($w\)$s:$s\"\(.*\)\"$s\$|\1$fs\2$fs\3|p" \
+        -e "s|^\($s\)\($w\)$s:$s\(.*\)$s\$|\1$fs\2$fs\3|p" $1 |
+   awk -F$fs '{
+      indent = length($1)/2;
+      vname[indent] = $2;
+      for (i in vname) {if (i > indent) {delete vname[i]}}
+      if (length($3) > 0) {
+         vn=""; if (indent > 0) {vn=(vn)(vname[0])("_")}
+         printf("%s%s%s=\"%s\"\n", "'$prefix'",vn, $2, $3);
+      }
+   }'
+}
+
+
+if [ $# != 0 ];
+then
+  while getopts "a:b:B:cC:dD:e:f:F:g:G:hJ:Kp:P:r:R:s:S:t:T:u:vx:X:Z" o
+  do
+    case "$o" in
+      "a")
+        gcov_arg=$OPTARG
+        ;;
+      "b")
+        build_o="$OPTARG"
+        ;;
+      "B")
+        branch_o="$OPTARG"
+        ;;
+      "c")
+        clean="1"
+        ;;
+      "C")
+        commit_o="$OPTARG"
+        ;;
+      "d")
+        dump="1"
+        ;;
+      "D")
+        ddp="$OPTARG"
+        ;;
+      "e")
+        env="$env,$OPTARG"
+        ;;
+      "f")
+        if [ "${OPTARG::1}" = "!" ];
+        then
+          exclude_cov="$exclude_cov -not -path '${OPTARG:1}'"
+
+        elif [[ "$OPTARG" = *"*"* ]];
+        then
+          include_cov="$include_cov -or -name '$OPTARG'"
+
+        else
+          ft_search=0
+          if [ "$files" = "" ];
+          then
+            files="$OPTARG"
+          else
+            files="$files
+$OPTARG"
+          fi
+        fi
+        ;;
+      "F")
+        if [ "$flags" = "" ];
+        then
+          flags="$OPTARG"
+        else
+          flags="$flags,$OPTARG"
+        fi
+        ;;
+      "g")
+        gcov_ignore="$gcov_ignore -not -path '$OPTARG'"
+        ;;
+      "G")
+        gcov_include="$gcov_include -path '$OPTARG'"
+        ;;
+      "h")
+        show_help
+        exit 0;
+        ;;
+      "J")
+        if [ "$xp" = "" ];
+        then
+          xp="$OPTARG"
+        else
+          xp="$xp\|$OPTARG"
+        fi
+        ;;
+      "K")
+        b=""
+        g=""
+        r=""
+        e=""
+        x=""
+        ;;
+      "p")
+        proj_root="$OPTARG"
+        ;;
+      "P")
+        pr_o="$OPTARG"
+        ;;
+      "r")
+        slug_o="$OPTARG"
+        ;;
+      "R")
+        git_root="$OPTARG"
+        ;;
+      "s")
+        if [ "$search_in_o" = "" ];
+        then
+          search_in_o="$OPTARG"
+        else
+          search_in_o="$search_in_o $OPTARG"
+        fi
+        ;;
+      "S")
+        cacert="--cacert \"$OPTARG\""
+        ;;
+      "t")
+        if [ "${OPTARG::1}" = "@" ];
+        then
+          token=$(cat "${OPTARG:1}" | tr -d ' \n')
+        else
+          token="$OPTARG"
+        fi
+        ;;
+      "T")
+        tag_o="$OPTARG"
+        ;;
+      "u")
+        url_o=$(echo "$OPTARG" | sed -e 's/\/$//')
+        ;;
+      "v")
+        set -x
+        ;;
+      "x")
+        gcov_exe=$OPTARG
+        ;;
+      "X")
+        if [ "$OPTARG" = "gcov" ];
+        then
+          ft_gcov="0"
+        elif [ "$OPTARG" = "coveragepy" ] || [ "$OPTARG" = "py" ];
+        then
+          ft_coveragepy="0"
+        elif [ "$OPTARG" = "fix" ] || [ "$OPTARG" = "fixes" ];
+        then
+          ft_fix="0"
+        elif [ "$OPTARG" = "xcode" ];
+        then
+          ft_xcode="0"
+        elif [ "$OPTARG" = "search" ];
+        then
+          ft_search="0"
+        elif [ "$OPTARG" = "network" ];
+        then
+          ft_network="0"
+        fi
+        ;;
+      "Z")
+        exit_with=1
+        ;;
+    esac
+  done
+fi
+
+say "
+  _____          _
+ / ____|        | |
+| |     ___   __| | ___  ___ _____   __
+| |    / _ \\ / _\` |/ _ \\/ __/ _ \\ \\ / /
+| |___| (_) | (_| |  __/ (_| (_) \\ V /
+ \\_____\\___/ \\__,_|\\___|\\___\\___/ \\_/
+                              Bash-$VERSION
+
+"
+
+search_in="$proj_root"
+
+if [ "$JENKINS_URL" != "" ];
+then
+  say "$e==>$x Jenkins CI detected."
+  # https://wiki.jenkins-ci.org/display/JENKINS/Building+a+software+project
+  # 
https://wiki.jenkins-ci.org/display/JENKINS/GitHub+pull+request+builder+plugin#GitHubpullrequestbuilderplugin-EnvironmentVariables
+  service="jenkins"
+  branch=$([ ! -z "$ghprbSourceBranch" ] && echo "$ghprbSourceBranch" || echo 
"$GIT_BRANCH")
+  commit=$([ ! -z "$ghprbActualCommit" ] && echo "$ghprbActualCommit" || echo 
"$GIT_COMMIT")
+  build="$BUILD_NUMBER"
+  pr="$ghprbPullId"
+  build_url=$(urlencode "$BUILD_URL")
+
+elif [ "$CI" = "true" ] && [ "$TRAVIS" = "true" ] && [ "$SHIPPABLE" != "true" 
];
+then
+  say "$e==>$x Travis CI detected."
+  # http://docs.travis-ci.com/user/ci-environment/#Environment-variables
+  service="travis"
+  branch="$TRAVIS_BRANCH"
+  commit="$TRAVIS_COMMIT"
+  build="$TRAVIS_JOB_NUMBER"
+  pr="$TRAVIS_PULL_REQUEST"
+  job="$TRAVIS_JOB_ID"
+  slug="$TRAVIS_REPO_SLUG"
+  tag="$TRAVIS_TAG"
+  env="$env,TRAVIS_OS_NAME"
+
+  language=$(printenv | grep "TRAVIS_.*_VERSION" | head -1)
+  if [ "$language" != "" ];
+  then
+    env="$env,${language%=*}"
+  fi
+
+elif [ "$CI" = "true" ] && [ "$CI_NAME" = "codeship" ];
+then
+  say "$e==>$x Codeship CI detected."
+  # 
https://www.codeship.io/documentation/continuous-integration/set-environment-variables/
+  service="codeship"
+  branch="$CI_BRANCH"
+  build="$CI_BUILD_NUMBER"
+  build_url=$(urlencode "$CI_BUILD_URL")
+  commit="$CI_COMMIT_ID"
+
+elif [ "$TEAMCITY_VERSION" != "" ];
+then
+  say "$e==>$x TeamCity CI detected."
+  # https://confluence.jetbrains.com/display/TCD8/Predefined+Build+Parameters
+  # 
https://confluence.jetbrains.com/plugins/servlet/mobile#content/view/74847298
+  if [ "$TEAMCITY_BUILD_BRANCH" = '' ];
+  then
+    echo "    Teamcity does not automatically make build parameters available 
as environment variables."
+    echo "    Add the following environment parameters to the build 
configuration"
+    echo "    env.TEAMCITY_BUILD_BRANCH = %teamcity.build.branch%"
+    echo "    env.TEAMCITY_BUILD_ID = %teamcity.build.id%"
+    echo "    env.TEAMCITY_BUILD_URL = 
%teamcity.serverUrl%/viewLog.html?buildId=%teamcity.build.id%"
+    echo "    env.TEAMCITY_BUILD_COMMIT = %system.build.vcs.number%"
+    echo "    env.TEAMCITY_BUILD_REPOSITORY = %vcsroot.<YOUR TEAMCITY VCS 
NAME>.url%"
+  fi
+  service="teamcity"
+  branch="$TEAMCITY_BUILD_BRANCH"
+  build="$TEAMCITY_BUILD_ID"
+  build_url=$(urlencode "$TEAMCITY_BUILD_URL")
+  if [ "$TEAMCITY_BUILD_COMMIT" != "" ];
+  then
+    commit="$TEAMCITY_BUILD_COMMIT"
+  else
+    commit="$BUILD_VCS_NUMBER"
+  fi
+  remove_addr="$TEAMCITY_BUILD_REPOSITORY"
+
+elif [ "$CI" = "true" ] && [ "$CIRCLECI" = "true" ];
+then
+  say "$e==>$x Circle CI detected."
+  # https://circleci.com/docs/environment-variables
+  service="circleci"
+  branch="$CIRCLE_BRANCH"
+  build="$CIRCLE_BUILD_NUM"
+  job="$CIRCLE_NODE_INDEX"
+  slug="$CIRCLE_PROJECT_USERNAME/$CIRCLE_PROJECT_REPONAME"
+  pr="$CIRCLE_PR_NUMBER"
+  commit="$CIRCLE_SHA1"
+  search_in="$search_in $CIRCLE_ARTIFACTS $CIRCLE_TEST_REPORTS"
+
+elif [ "$BUDDYBUILD_BRANCH" != "" ];
+then
+  say "$e==>$x buddybuild detected"
+  # http://docs.buddybuild.com/v6/docs/custom-prebuild-and-postbuild-steps
+  service="buddybuild"
+  branch="$BUDDYBUILD_BRANCH"
+  build="$BUDDYBUILD_BUILD_NUMBER"
+  
build_url="https://dashboard.buddybuild.com/public/apps/$BUDDYBUILD_APP_ID/build/$BUDDYBUILD_BUILD_ID";
+  # BUDDYBUILD_TRIGGERED_BY
+  if [ "$ddp" = "$(echo ~)/Library/Developer/Xcode/DerivedData" ];
+  then
+    ddp="/private/tmp/sandbox/${BUDDYBUILD_APP_ID}/bbtest"
+  fi
+
+elif [ "${bamboo_planRepository_revision}" != "" ];
+then
+  say "$e==>$x Bamboo detected"
+  # 
https://confluence.atlassian.com/bamboo/bamboo-variables-289277087.html#Bamboovariables-Build-specificvariables
+  service="bamboo"
+  commit="${bamboo_planRepository_revision}"
+  branch="${bamboo_planRepository_branch}"
+  build="${bamboo_buildNumber}"
+  build_url="${bamboo_buildResultsUrl}"
+  remote_addr="${bamboo_planRepository_repositoryUrl}"
+
+elif [ "$CI" = "true" ] && [ "$BITRISE_IO" = "true" ];
+then
+  # http://devcenter.bitrise.io/faq/available-environment-variables/
+  say "$e==>$x Bitrise CI detected."
+  service="bitrise"
+  branch="$BITRISE_GIT_BRANCH"
+  build="$BITRISE_BUILD_NUMBER"
+  build_url=$(urlencode "$BITRISE_BUILD_URL")
+  pr="$BITRISE_PULL_REQUEST"
+  if [ "$GIT_CLONE_COMMIT_HASH" != "" ];
+  then
+    commit="$GIT_CLONE_COMMIT_HASH"
+  fi
+
+elif [ "$CI" = "true" ] && [ "$SEMAPHORE" = "true" ];
+then
+  say "$e==>$x Semaphore CI detected."
+  # https://semaphoreapp.com/docs/available-environment-variables.html
+  service="semaphore"
+  branch="$BRANCH_NAME"
+  build="$SEMAPHORE_BUILD_NUMBER.$SEMAPHORE_CURRENT_THREAD"
+  pr="$PULL_REQUEST_NUMBER"
+  slug="$SEMAPHORE_REPO_SLUG"
+  commit="$REVISION"
+  env="$env,SEMAPHORE_TRIGGER_SOURCE"
+
+elif [ "$CI" = "true" ] && [ "$BUILDKITE" = "true" ];
+then
+  say "$e==>$x Buildkite CI detected."
+  # https://buildkite.com/docs/guides/environment-variables
+  service="buildkite"
+  branch="$BUILDKITE_BRANCH"
+  build="$BUILDKITE_BUILD_NUMBER.$BUILDKITE_JOB_ID"
+  build_url=$(urlencode "$BUILDKITE_BUILD_URL")
+  slug="$BUILDKITE_PROJECT_SLUG"
+  commit="$BUILDKITE_COMMIT"
+
+elif [ "$CI" = "true" ] && [ "$DRONE" = "true" ];
+then
+  say "$e==>$x Drone CI detected."
+  # http://docs.drone.io/env.html
+  # drone commits are not full shas
+  service="drone.io"
+  branch="$DRONE_BRANCH"
+  build="$DRONE_BUILD_NUMBER"
+  build_url=$(urlencode "${DRONE_BUILD_URL:-$CI_BUILD_URL}")
+  pr="$DRONE_PULL_REQUEST"
+  job="$DRONE_JOB_NUMBER"
+  tag="$DRONE_TAG"
+
+elif [ "$CI" = "True" ] && [ "$APPVEYOR" = "True" ];
+then
+  say "$e==>$x Appveyor CI detected."
+  # http://www.appveyor.com/docs/environment-variables
+  service="appveyor"
+  branch="$APPVEYOR_REPO_BRANCH"
+  build=$(urlencode "$APPVEYOR_JOB_ID")
+  pr="$APPVEYOR_PULL_REQUEST_NUMBER"
+  
job="$APPVEYOR_ACCOUNT_NAME%2F$APPVEYOR_PROJECT_SLUG%2F$APPVEYOR_BUILD_VERSION"
+  slug="$APPVEYOR_REPO_NAME"
+  commit="$APPVEYOR_REPO_COMMIT"
+
+elif [ "$CI" = "true" ] && [ "$WERCKER_GIT_BRANCH" != "" ];
+then
+  say "$e==>$x Wercker CI detected."
+  # http://devcenter.wercker.com/articles/steps/variables.html
+  service="wercker"
+  branch="$WERCKER_GIT_BRANCH"
+  build="$WERCKER_MAIN_PIPELINE_STARTED"
+  slug="$WERCKER_GIT_OWNER/$WERCKER_GIT_REPOSITORY"
+  commit="$WERCKER_GIT_COMMIT"
+
+elif [ "$CI" = "true" ] && [ "$MAGNUM" = "true" ];
+then
+  say "$e==>$x Magnum CI detected."
+  # https://magnum-ci.com/docs/environment
+  service="magnum"
+  branch="$CI_BRANCH"
+  build="$CI_BUILD_NUMBER"
+  commit="$CI_COMMIT"
+
+elif [ "$CI" = "true" ] && [ "$SNAP_CI" = "true" ];
+then
+  say "$e==>$x Snap CI detected."
+  # https://docs.snap-ci.com/environment-variables/
+  service="snap"
+  branch=$([ "$SNAP_BRANCH" != "" ] && echo "$SNAP_BRANCH" || echo 
"$SNAP_UPSTREAM_BRANCH")
+  build="$SNAP_PIPELINE_COUNTER"
+  job="$SNAP_STAGE_NAME"
+  pr="$SNAP_PULL_REQUEST_NUMBER"
+  commit=$([ "$SNAP_COMMIT" != "" ] && echo "$SNAP_COMMIT" || echo 
"$SNAP_UPSTREAM_COMMIT")
+  env="$env,DISPLAY"
+
+elif [ "$SHIPPABLE" = "true" ];
+then
+  say "$e==>$x Shippable CI detected."
+  # http://docs.shippable.com/ci_configure/
+  service="shippable"
+  branch=$([ "$HEAD_BRANCH" != "" ] && echo "$HEAD_BRANCH" || echo "$BRANCH")
+  build="$BUILD_NUMBER"
+  build_url=$(urlencode "$BUILD_URL")
+  pr="$PULL_REQUEST"
+  slug="$REPO_FULL_NAME"
+  commit="$COMMIT"
+
+elif [ "$TDDIUM" = "true" ];
+then
+  say "Solano CI detected."
+  # http://docs.solanolabs.com/Setup/tddium-set-environment-variables/
+  service="solano"
+  commit="$TDDIUM_CURRENT_COMMIT"
+  branch="$TDDIUM_CURRENT_BRANCH"
+  build="$TDDIUM_TID"
+  pr="$TDDIUM_PR_ID"
+
+elif [ "$GREENHOUSE" = "true" ];
+then
+  say "$e==>$x Greenhouse CI detected."
+  # http://docs.greenhouseci.com/docs/environment-variables-files
+  service="greenhouse"
+  branch="$GREENHOUSE_BRANCH"
+  build="$GREENHOUSE_BUILD_NUMBER"
+  build_url=$(urlencode "$GREENHOUSE_BUILD_URL")
+  pr="$GREENHOUSE_PULL_REQUEST"
+  commit="$GREENHOUSE_COMMIT"
+  search_in="$search_in $GREENHOUSE_EXPORT_DIR"
+
+elif [ "$GITLAB_CI" != "" ];
+then
+  say "$e==>$x GitLab CI detected."
+  # http://doc.gitlab.com/ce/ci/variables/README.html
+  service="gitlab"
+  branch="$CI_BUILD_REF_NAME"
+  build="$CI_BUILD_ID"
+  remove_addr="$CI_BUILD_REPO"
+  commit="$CI_BUILD_REF"
+
+else
+  say "${r}x>${x} No CI provider detected."
+
+  commit="$VCS_COMMIT_ID"
+  branch="$VCS_BRANCH_NAME"
+  pr="$VCS_PULL_REQUEST"
+  slug="$VCS_SLUG"
+  build_url="$CI_BUILD_URL"
+  build="$CI_BUILD_ID"
+
+fi
+
+say "    ${e}project root:${x} $git_root"
+
+# find branch, commit, repo from git command
+if [ "$GIT_BRANCH" != "" ];
+then
+  branch="$GIT_BRANCH"
+
+elif [ "$branch" = "" ];
+then
+  branch=$(git rev-parse --abbrev-ref HEAD 2>/dev/null || hg branch 
2>/dev/null || echo "")
+  if [ "$branch" = "HEAD" ];
+  then
+    branch=""
+  fi
+fi
+
+if [ "$commit_o" = "" ];
+then
+  # merge commit -> actual commit
+  mc=$(git log -1 --pretty=%B 2>/dev/null | tr -d '[[:space:]]' || true)
+  if [[ "$mc" =~ 
^Merge[[:space:]][a-z0-9]{40}[[:space:]]into[[:space:]][a-z0-9]{40}$ ]];
+  then
+    say "    (DEPRECIATING SOON) Learn more at 
http://docs.codecov.io/docs/merge-commits";
+    # Merge xxx into yyy
+    say "    Fixing merge commit sha"
+    commit=$(echo "$mc" | cut -d' ' -f2)
+  elif [ "$GIT_COMMIT" != "" ];
+  then
+    commit="$GIT_COMMIT"
+  elif [ "$commit" = "" ];
+  then
+    commit=$(git log -1 --format="%H" 2>/dev/null || hg id -i --debug 
2>/dev/null | tr -d '+' || echo "")
+  fi
+else
+  commit="$commit_o"
+fi
+
+if [ "$CODECOV_TOKEN" != "" ] && [ "$token" = "" ];
+then
+  say "${e}-->${x} token set from env"
+  token="$CODECOV_TOKEN"
+fi
+
+if [ "$CODECOV_URL" != "" ] && [ "$url_o" = "" ];
+then
+  say "${e}-->${x} url set from env"
+  url_o=$(echo "$CODECOV_URL" | sed -e 's/\/$//')
+fi
+
+if [ "$CODECOV_SLUG" != "" ];
+then
+  say "${e}-->${x} slug set from env"
+  slug_o="$CODECOV_SLUG"
+
+elif [ "$slug" = "" ];
+then
+  if [ "$remote_addr" = "" ];
+  then
+    remote_addr=$(git config --get remote.origin.url || hg paths default || 
echo '')
+  fi
+  if [ "$remote_addr" != "" ];
+  then
+    if echo "$remote_addr" | grep -q "//"; then
+      # https
+      slug=$(echo "$remote_addr" | cut -d / -f 4,5 | sed -e 's/\.git$//')
+    else
+      # ssh
+      slug=$(echo "$remote_addr" | cut -d : -f 2 | sed -e 's/\.git$//')
+    fi
+  fi
+  if [ "$slug" = "/" ];
+  then
+    slug=""
+  fi
+fi
+
+yaml=$(cd "$git_root" && \
+       git ls-files "*codecov.yml" "*codecov.yaml" 2>/dev/null \
+       || hg locate "*codecov.yml" "*codecov.yaml" 2>/dev/null \
+       || echo '')
+yaml=$(echo "$yaml" | head -1)
+
+if [ "$yaml" != "" ];
+then
+  say "    ${e}Yaml found at:${x} $yaml"
+  config=$(parse_yaml $yaml || echo '')
+
+  # TODO validate the yaml here
+
+  if [ "$(echo "$config" | grep 'codecov_token="')" != "" ] && [ "$token" = "" 
];
+  then
+    say "${e}-->${x} token set from yaml"
+    token="$(echo "$config" | grep 'codecov_token="' | sed -e 
's/codecov_token="//' | sed -e 's/"\.*//')"
+  fi
+
+  if [ "$(echo "$config" | grep 'codecov_url="')" != "" ] && [ "$url_o" = "" ];
+  then
+    say "${e}-->${x} url set from yaml"
+    url_o="$(echo "$config" | grep 'codecov_url="' | sed -e 
's/codecov_url="//' | sed -e 's/"\.*//')"
+  fi
+
+  if [ "$(echo "$config" | grep 'codecov_slug="')" != "" ] && [ "$slug_o" = "" 
];
+  then
+    say "${e}-->${x} slug set from yaml"
+    slug_o="$(echo "$config" | grep 'codecov_slug="' | sed -e 
's/codecov_slug="//' | sed -e 's/"\.*//')"
+  fi
+else
+  say "    ${g}Yaml not found, that's ok! Learn more at${x} 
${b}http://docs.codecov.io/docs/codecov-yaml${x}";
+
+fi
+
+if [ "$branch_o" != "" ];
+then
+  branch=$(urlencode "$branch_o")
+else
+  branch=$(urlencode "$branch")
+fi
+
+query="branch=$branch\
+       &commit=$commit\
+       &build=$([ "$build_o" = "" ] && echo "$build" || echo "$build_o")\
+       &build_url=$build_url\
+       &tag=$([ "$tag_o" = "" ] && echo "$tag" || echo "$tag_o")\
+       &slug=$([ "$slug_o" = "" ] && urlencode "$slug" || urlencode "$slug_o")\
+       &yaml=$(urlencode "$yaml")\
+       &service=$service\
+       &flags=$flags\
+       &pr=$([ "$pr_o" = "" ] && echo "$pr" || echo "$pr_o")\
+       &job=$job"
+
+if [ "$ft_search" = "1" ];
+then
+  # detect bower comoponents location
+  bower_components="bower_components"
+  bower_rc=$(cd "$git_root" && cat .bowerrc 2>/dev/null || echo "")
+  if [ "$bower_rc" != "" ];
+  then
+    bower_components=$(echo "$bower_rc" | tr -d '\n' | grep '"directory"' | 
cut -d'"' -f4 | sed -e 's/\/$//')
+    if [ "$bower_components" = "" ];
+    then
+      bower_components="bower_components"
+    fi
+  fi
+
+  # Swift Coverage
+  if [ "$ft_xcode" = "1" ] && [ -d "$ddp" ];
+  then
+    say "${e}==>${x} Processing Xcode reports"
+    say "    DerivedData folder: $ddp"
+    profdata_files=$(find "$ddp" -name '*.profdata' 2>/dev/null || echo '')
+    if [ "$profdata_files" != "" ];
+    then
+      # xcode via profdata
+      if [ "$xp" = "" ];
+      then
+        # xp=$(xcodebuild -showBuildSettings 2>/dev/null | grep -i 
"^\s*PRODUCT_NAME" | sed -e 's/.*= \(.*\)/\1/')
+        # say " ${e}->${x} Speed up Xcode processing by adding ${e}-J 
'$xp'${x}"
+        say "    ${g}hint${x} Speed up Swift processing by using use -J 
'AppName' (accepting regexp)"
+      fi
+      while read -r profdata;
+      do
+        if [ "$profdata" != "" ];
+        then
+          swiftcov "$profdata" "$xp"
+        fi
+      done <<< "$profdata_files"
+    else
+      say "    ${e}->${x} No Swift coverage found"
+    fi
+
+    # Obj-C Gcov Coverage
+    if [ "$ft_gcov" = "1" ];
+    then
+      say "    ${e}->${x} Running $gcov_exe for Obj-C"
+      bash -c "find $ddp -type f -name '*.gcda' $gcov_include $gcov_ignore 
-exec $gcov_exe -pbcu $gcov_arg {} +" || true
+    fi
+  fi
+
+  # Gcov Coverage
+  if [ "$ft_gcov" = "1" ];
+  then
+    say "${e}==>${x} Running gcov in $proj_root ${e}(disable via -X gcov)${x}"
+    bash -c "find $proj_root -type f -name '*.gcno' $gcov_include $gcov_ignore 
-exec $gcov_exe -pb $gcov_arg {} +" || true
+  else
+    say "${e}==>${x} gcov disabled"
+  fi
+
+  # Python Coverage
+  if [ "$ft_coveragepy" = "1" ];
+  then
+    if [ ! -f coverage.xml ];
+    then
+      if which coverage >/dev/null 2>&1;
+      then
+        say "${e}==>${x} Python coveragepy exists ${e}disable via -X 
coveragepy${x}"
+
+        dotcoverage=$(find "$git_root" -name '.coverage' -or -name 
'.coverage.*' | head -1 || echo '')
+        if [ "$dotcoverage" != "" ];
+        then
+          cd "$(dirname "$dotcoverage")"
+          if [ ! -f .coverage ];
+          then
+            say "    ${e}->${x} Running coverage combine"
+            coverage combine
+          fi
+          say "    ${e}->${x} Running coverage xml"
+          if [ "$(coverage xml -i)" != "No data to report." ];
+          then
+            files="$files
+$PWD/coverage.xml"
+          else
+            say "    ${r}No data to report.${x}"
+          fi
+          cd "$proj_root"
+        else
+          say "    ${r}No .coverage file found.${x}"
+        fi
+      else
+        say "${e}==>${x} Python coveragepy not found"
+      fi
+    fi
+  else
+    say "${e}==>${x} Python coveragepy disabled"
+  fi
+
+  if [ "$search_in_o" != "" ];
+  then
+    # location override
+    search_in="$search_in_o"
+  fi
+
+  say "$e==>$x Searching for coverage reports in:"
+  for _path in $search_in
+  do
+    say "    ${g}+${x} $_path"
+  done
+
+  patterns="find $search_in -type f \( -name '*coverage*.*' \
+                     -or -name 'nosetests.xml' \
+                     -or -name 'jacoco*.xml' \
+                     -or -name 'clover.xml' \
+                     -or -name 'report.xml' \
+                     -or -name '*.codecov.*' \
+                     -or -name 'codecov.*' \
+                     -or -name 'cobertura.xml' \
+                     -or -name 'excoveralls.json' \
+                     -or -name 'luacov.report.out' \
+                     -or -name 'coverage-final.json' \
+                     -or -name 'naxsi.info' \
+                     -or -name 'lcov.info' \
+                     -or -name 'lcov.dat' \
+                     -or -name '*.lcov' \
+                     -or -name '*.clover' \
+                     -or -name 'cover.out' \
+                     -or -name 'gcov.info' \
+                     -or -name '*.gcov' \
+                     -or -name '*.lst' \
+                     $include_cov \) \
+                    $exclude_cov \
+                    -not -name '*.profdata' \
+                    -not -name 'coverage-summary.json' \
+                    -not -name 'phpunit-code-coverage.xml' \
+                    -not -name 'remapInstanbul.coverage*.json' \
+                    -not -name '*codecov.yml' \
+                    -not -name '*.serialized' \
+                    -not -name '.coverage*' \
+                    -not -name '.*coveragerc' \
+                    -not -name '*.sh' \
+                    -not -name '*.bat' \
+                    -not -name '*.ps1' \
+                    -not -name '*.rst' \
+                    -not -name '*.sbt' \
+                    -not -name '*.xcoverage.*' \
+                    -not -name '*.gz' \
+                    -not -name '*.conf' \
+                    -not -name '*.p12' \
+                    -not -name '*.csv' \
+                    -not -name '*.rsp' \
+                    -not -name '*.m4' \
+                    -not -name '*.am' \
+                    -not -name '*.template' \
+                    -not -name '*.cp' \
+                    -not -name '*.bw' \
+                    -not -name '*.crt' \
+                    -not -name '*.log' \
+                    -not -name '*.cmake' \
+                    -not -name '*.pth' \
+                    -not -name '*.in' \
+                    -not -name '*.jar*' \
+                    -not -name '*.pom*' \
+                    -not -name '*.png' \
+                    -not -name '*.jpg' \
+                    -not -name '*.sql' \
+                    -not -name '*.jpeg' \
+                    -not -name '*.svg' \
+                    -not -name '*.gif' \
+                    -not -name '*.csv' \
+                    -not -name '*.snapshot' \
+                    -not -name '*.mak*' \
+                    -not -name '*.bash' \
+                    -not -name '*.data' \
+                    -not -name '*.py' \
+                    -not -name '*.class' \
+                    -not -name '*.xcconfig' \
+                    -not -name '*.ec' \
+                    -not -name '*.coverage' \
+                    -not -name '*.pyc' \
+                    -not -name '*.cfg' \
+                    -not -name '*.egg' \
+                    -not -name '*.ru' \
+                    -not -name '*.css' \
+                    -not -name '*.less' \
+                    -not -name '*.pyo' \
+                    -not -name '*.whl' \
+                    -not -name '*.html' \
+                    -not -name '*.ftl' \
+                    -not -name '*.erb' \
+                    -not -name '*.rb' \
+                    -not -name '*.js' \
+                    -not -name '*.jade' \
+                    -not -name '*.db' \
+                    -not -name '*.md' \
+                    -not -name '*.cpp' \
+                    -not -name '*.gradle' \
+                    -not -name '*.tar.tz' \
+                    -not -name '*.scss' \
+                    -not -name 'include.lst' \
+                    -not -name 'fullLocaleNames.lst' \
+                    -not -name 'inputFiles.lst' \
+                    -not -name 'createdFiles.lst' \
+                    -not -name 'scoverage.measurements.*' \
+                    -not -name 'test_*_coverage.txt' \
+                    -not -name 'testrunner-coverage*' \
+                    -not -path '*/vendor/*' \
+                    -not -path '*/htmlcov/*' \
+                    -not -path '*/home/cainus/*' \
+                    -not -path '*/virtualenv/*' \
+                    -not -path '*/js/generated/coverage/*' \
+                    -not -path '*/.virtualenv/*' \
+                    -not -path '*/virtualenvs/*' \
+                    -not -path '*/.virtualenvs/*' \
+                    -not -path '*/.env/*' \
+                    -not -path '*/.envs/*' \
+                    -not -path '*/env/*' \
+                    -not -path '*/envs/*' \
+                    -not -path '*/.venv/*' \
+                    -not -path '*/.venvs/*' \
+                    -not -path '*/venv/*' \
+                    -not -path '*/venvs/*' \
+                    -not -path '*/.git/*' \
+                    -not -path '*/.hg/*' \
+                    -not -path '*/.tox/*' \
+                    -not -path '*/__pycache__/*' \
+                    -not -path '*/.egg-info*' \
+                    -not -path '*/$bower_components/*' \
+                    -not -path '*/node_modules/*' \
+                    -not -path '*/conftest_*.c.gcov' 2>/dev/null"
+  files=$(eval "$patterns" || echo '')
+
+elif [ "$include_cov" != "" ];
+then
+  files=$(eval "find $search_in -type f \( ${include_cov:5} \)$exclude_cov 
2>/dev/null" || echo '')
+fi
+
+num_of_files=$(echo "$files" | wc -l | tr -d ' ')
+if [ "$num_of_files" != '' ] && [ "$files" != '' ];
+then
+  say "    ${e}->${x} Found $num_of_files reports"
+fi
+
+# no files found
+if [ "$files" = "" ];
+then
+  say "${r}-->${x} No coverage report found."
+  say "    Please visit 
${b}http://docs.codecov.io/docs/supported-languages${x}";
+  exit ${exit_with};
+fi
+
+if [ "$ft_network" == "1" ];
+then
+  say "${e}==>${x} Detecting git/mercurial file structure"
+  network=$(cd "$git_root" && git ls-files 2>/dev/null || hg locate 
2>/dev/null || echo "")
+  if [ "$network" = "" ];
+  then
+    network=$(find "$git_root" -type f \
+                   -not -path '*/virtualenv/*' \
+                   -not -path '*/.virtualenv/*' \
+                   -not -path '*/virtualenvs/*' \
+                   -not -path '*/.virtualenvs/*' \
+                   -not -path '*.png' \
+                   -not -path '*.gif' \
+                   -not -path '*.jpg' \
+                   -not -path '*.jpeg' \
+                   -not -path '*.md' \
+                   -not -path '*/.env/*' \
+                   -not -path '*/.envs/*' \
+                   -not -path '*/env/*' \
+                   -not -path '*/envs/*' \
+                   -not -path '*/.venv/*' \
+                   -not -path '*/.venvs/*' \
+                   -not -path '*/venv/*' \
+                   -not -path '*/venvs/*' \
+                   -not -path '*/build/lib/*' \
+                   -not -path '*/.git/*' \
+                   -not -path '*/.egg-info/*' \
+                   -not -path '*/shunit2-2.1.6/*' \
+                   -not -path '*/vendor/*' \
+                   -not -path '*/js/generated/coverage/*' \
+                   -not -path '*/__pycache__/*' \
+                   -not -path '*/node_modules/*' \
+                   -not -path "*/$bower_components/*" 2>/dev/null || echo '')
+  fi
+fi
+
+upload_file=`mktemp /tmp/codecov.XXXXXX`
+
+cleanup() {
+    rm -f $upload_file
+}
+
+trap cleanup INT ABRT TERM
+
+if [ "$env" != "" ];
+then
+  inc_env=""
+  say "${e}==>${x} Appending build variables"
+  for varname in $(echo "$env" | tr ',' ' ')
+  do
+    if [ "$varname" != "" ];
+    then
+      say "    ${g}+${x} $varname"
+      inc_env="${inc_env}${varname}=$(eval echo "\$${varname}")
+"
+    fi
+  done
+
+echo "$inc_env<<<<<< ENV" >> $upload_file
+fi
+
+if [ "$ft_network" == "1" ];
+then
+  echo "$network
+<<<<<< network" >> $upload_file
+fi
+
+fr=0
+say "${e}==>${x} Reading reports"
+while IFS='' read -r file;
+do
+  # read the coverage file
+  if [ "$(echo "$file" | tr -d ' ')" != '' ];
+  then
+    if [ -f "$file" ];
+    then
+      report_len=$(wc -c < "$file")
+      if [ "$report_len" -ne 0 ];
+      then
+        say "    ${g}+${x} $file ${e}bytes=$(echo "$report_len" | tr -d ' 
')${x}"
+        # append to to upload
+        echo "# path=$(echo "$file" | sed "s|^$git_root/||")" >> $upload_file
+        cat "$file" >> $upload_file
+        echo "<<<<<< EOF" >> $upload_file
+        fr=1
+        if [ "$clean" = "1" ];
+        then
+          rm "$file"
+        fi
+      else
+        say "    ${r}-${x} Skipping empty file $file"
+      fi
+    else
+      say "    ${r}-${x} file not found at $file"
+    fi
+  fi
+done <<< "$(echo -e "$files")"
+
+if [ "$fr" = "0" ];
+then
+  say "${r}-->${x} No coverage data found."
+  say "    Please visit 
${b}http://docs.codecov.io/docs/supported-languages${x}";
+  say "    search for your projects language to learn how to collect reports."
+  exit ${exit_with};
+fi
+
+if [ "$ft_fix" = "1" ];
+then
+  say "${e}==>${x} Appending adjustments"
+  say "    ${b}http://docs.codecov.io/docs/fixing-reports${x}";
+  adjustments=""
+
+  if echo "$network" | grep -m1 '.kt$' 1>/dev/null;
+  then
+    # skip brackets and comments
+    adjustments="$adjustments
+$(find "$git_root" -type f -name '*.kt' -exec grep -nIH '^[[:space:]]*}$' {} 
\; | cut -f1-2 -d: 2>/dev/null || echo '')
+$(find "$git_root" -type f -name '*.kt' -exec grep -nIH '^/\*' {} \; | cut 
-f1-2 -d: 2>/dev/null || echo '')"
+  fi
+
+  if echo "$network" | grep -m1 '.go$' 1>/dev/null;
+  then
+    # skip empty lines, comments, and brackets
+    adjustments="$adjustments
+$(find "$git_root" -type f -not -path '*/vendor/*' -name '*.go' -exec grep 
-nIH \
+      -e '^[[:space:]]*$' \
+      -e '^[[:space:]]*//.*' \
+      -e '^[[:space:]]*/\*' \
+      -e '^[[:space:]]*\*/' \
+      -e '^[[:space:]]*}$' \
+      {} \; | cut -f1-2 -d: 2>/dev/null || echo '')"
+  fi
+
+  if echo "$network" | grep -m1 '.jsx$' 1>/dev/null;
+  then
+    # skip empty lines, comments, and brackets
+    adjustments="$adjustments
+$(find "$git_root" -type f -name '*.jsx' -exec grep -nIH \
+      -e '^[[:space:]]*$' \
+      -e '^[[:space:]]*//.*' \
+      -e '^[[:space:]]*/\*' \
+      -e '^[[:space:]]*\*/' \
+      -e '^[[:space:]]*}$' \
+      {} \; | cut -f1-2 -d: 2>/dev/null || echo '')"
+  fi
+
+  if echo "$network" | grep -m1 '.php$' 1>/dev/null;
+  then
+    # skip empty lines, comments, and brackets
+    adjustments="$adjustments
+$(find "$git_root" -type f -not -path '*/vendor/*' -name '*.php' -exec grep 
-nIH \
+      -e '^[[:space:]]*[\{\}\\[][[:space:]]*$' \
+      -e '^[[:space:]]*);[[:space:]]*$' \
+      -e '^[[:space:]]*][[:space:]]*$' \
+      {} \; | cut -f1-2 -d: 2>/dev/null || echo '')"
+  fi
+
+  if echo "$network" | grep -m1 '\(.cpp\|.h\|.cxx\|.c\|.hpp\|.m\)$' 
1>/dev/null;
+  then
+    # skip brackets
+    adjustments="$adjustments
+$(find "$git_root" -type f \( -name '*.h' -or -name '*.cpp' -or -name '*.cxx' 
-or -name '*.m' -or -name '*.c' -or -name '*.hpp' \) -exec grep -nIH 
'^[[:space:]]*}' {} \; | cut -f1-2 -d: 2>/dev/null || echo '')"
+  fi
+
+  # join files into single line
+  found=$(echo "$adjustments" | wc -l | tr -d ' ')
+  adjustments=$(echo "$adjustments" | awk 'BEGIN { FS=":" }
+                                           $1!=key { if (key!="") print out ; 
key=$1 ; out=$0 ; next }
+                                           { out=out","$2 }
+                                           END { print out }')
+
+  if echo "$network" | grep -m1 '\(.cpp\|.h\|.cxx\|.c\|.hpp\|.m\)$' 
1>/dev/null;
+  then
+    # skip LCOV_EXCL
+    adjustments="$adjustments
+$(find "$git_root" -type f \( -name '*.h' -or -name '*.cpp' -or -name '*.cxx' 
-or -name '*.m' -or -name '*.c' -or -name '*.hpp' \) -exec grep -nIH '// 
LCOV_EXCL' {} \; 2>/dev/null || echo '')"
+  fi
+
+  if echo "$network" | grep -m1 '.kt$' 1>/dev/null;
+  then
+    # last line in file
+    adjustments="$adjustments
+$(find "$git_root" -type f -name '*.kt' -exec wc -l {} \; | while read l; do 
echo "EOF: $l"; done 2>/dev/null || echo '')"
+  fi
+
+  if [ "$found" != "1" ];
+  then
+    say "    ${g}+${x} Found $found adjustments"
+    echo "# path=fixes
+$adjustments
+<<<<<< EOF" >> $upload_file
+  else
+    say "    ${e}->${x} Found 0 adjustments"
+  fi
+fi
+
+if [ "$url_o" != "" ];
+then
+  url="$url_o"
+fi
+# trim whitespace from query
+
+if [ "$dump" != "0" ];
+then
+  echo "$url/upload/v4?$(echo "package=bash-$VERSION&token=$token&$query" | tr 
-d ' ')"
+  cat $upload_file
+else
+
+  query=$(echo "${query}" | tr -d ' ')
+  say "${e}==>${x} Uploading reports"
+  say "    ${e}url:${x} $url"
+  say "    ${e}query:${x} $query"
+
+  # now add token to query
+  query=$(echo "package=bash-$VERSION&token=$token&$query" | tr -d ' ')
+
+  if [ "$ft_s3" = "1" ];
+  then
+    say "    ${e}->${x} Pinging Codecov"
+    res=$(curl -sX $cacert POST "$url/upload/v4?$query" -H 'Accept: 
text/plain' || true)
+    # a good replay is "https://codecov.io"; + "\n" + 
"https://codecov.s3.amazonaws.com/...";
+    status=$(echo "$res" | head -1 | grep 'HTTP ' | cut -d' ' -f2)
+    if [ "$status" = "" ];
+    then
+      s3target=$(echo "$res" | sed -n 2p)
+      say "    ${e}->${x} Uploading to S3 $(echo "$s3target" | cut -c1-32)"
+      s3=$(curl -fisX PUT --data-binary @$upload_file \
+                -H 'Content-Type: text/plain' \
+                -H 'x-amz-acl: public-read' \
+                -H 'x-amz-storage-class: REDUCED_REDUNDANCY' \
+                "$s3target" || true)
+      if [ "$s3" != "" ];
+      then
+        say "    ${g}->${x} View reports at ${b}$(echo "$res" | sed -n 1p)${x}"
+        exit 0
+      else
+        say "    ${r}X>${x} Failed to upload to S3"
+      fi
+    elif [ "$status" = "400" ];
+    then
+        # 400 Error
+        say "${g}${res}${x}"
+        exit ${exit_with}
+    fi
+  fi
+
+  say "    ${e}->${x} Uploading to Codecov"
+  i="0"
+  while [ $i -lt 4 ]
+  do
+    i=$[$i+1]
+
+    res=$(curl -sX POST $cacert --data-binary @$upload_file 
"$url/upload/v2?$query" -H 'Accept: text/plain' || echo 'HTTP 500')
+    # HTTP 200
+    # http://....
+    status=$(echo "$res" | head -1 | cut -d' ' -f2)
+    if [ "$status" = "" ];
+    then
+      say "    View reports at ${b}$(echo "$res" | head -2 | tail -1)${x}"
+      exit 0
+
+    elif [ "${status:0:1}" = "5" ];
+    then
+      say "    ${e}->${x} Sleeping for 10s and trying again..."
+      sleep 10
+
+    else
+      say "    ${g}${res}${x}"
+      exit 0
+      exit ${exit_with}
+    fi
+
+  done
+
+fi
+
+say "    ${r}X> Failed to upload coverage reports${x}"
+exit ${exit_with}
+
+# EOF
diff --git a/.travis.yml b/.travis.yml
index 38efe05..a719386 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -31,7 +31,8 @@ before_script:
 
 script: ./travis/test.sh
 
-after_success: ./gradlew coveralls
+after_success:
+  - bash <(cat .codecov_bash)
 after_failure: ./travis/junit-errors-to-stdout.sh
 
 
diff --git a/README.md b/README.md
index 1300ea2..5fb5860 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,4 @@
-# Apache Gobblin [![Build 
Status](https://secure.travis-ci.org/linkedin/gobblin.png)](https://travis-ci.org/linkedin/gobblin)
 [![Documentation 
Status](https://readthedocs.org/projects/gobblin/badge/?version=latest)](http://gobblin.readthedocs.org/en/latest/?badge=latest)
+# Apache Gobblin [![Build 
Status](https://api.travis-ci.org/apache/incubator-gobblin.svg?branch=master)](https://travis-ci.org/apache/incubator-gobblin)
 [![Documentation 
Status](https://readthedocs.org/projects/gobblin/badge/?version=latest)](http://gobblin.readthedocs.org/en/latest/?badge=latest)
 
[![codecov.io](https://codecov.io/github/apache/incubator-gobblin/branch/master/graph/badge.svg)](https://codecov.io/github/apache/incubator-gobblin)
 
 Apache Gobblin is a universal data ingestion framework for extracting, 
transforming, and loading large volume of data from a variety of data sources, 
e.g., databases, rest APIs, FTP/SFTP servers, filers, etc., onto Hadoop. Apache 
Gobblin handles the common routine tasks required for all data ingestion ETLs, 
including job/task scheduling, task partitioning, error handling, state 
management, data quality checking, data publishing, etc. Gobblin ingests data 
from different data sources in th [...]
 

Reply via email to