Github user nchammas commented on a diff in the pull request:
https://github.com/apache/spark/pull/6770#discussion_r32330878
--- Diff: build/mvn ---
@@ -105,28 +108,16 @@ install_scala() {
SCALA_LIBRARY="$(cd "$(dirname ${scala_bin})/../lib" &&
pwd)/scala-library.jar"
}
-# Determines if a given application is already installed. If not, will
attempt
-# to install
-## Arg1 - application name
-## Arg2 - Alternate path to local install under build/ dir
-check_and_install_app() {
- # create the local environment variable in uppercase
- local app_bin="`echo $1 | awk '{print toupper(\$0)}'`_BIN"
- # some black magic to set the generated app variable (i.e. MVN_BIN) into
the
- # environment
- eval "${app_bin}=`which $1 2>/dev/null`"
-
- if [ -z "`which $1 2>/dev/null`" ]; then
- install_$1
- fi
-}
-
# Setup healthy defaults for the Zinc port if none were provided from
# the environment
ZINC_PORT=${ZINC_PORT:-"3030"}
-# Check and install all applications necessary to build Spark
-check_and_install_app "mvn"
+# Install Maven if necessary
+MVN_BIN="$(command -v mvn)"
+
+if [ ! "$MVN_BIN" ]; then
+ install_mvn
--- End diff --
> I believe we'd also need to require Maven 3.3 in the build, and in
enforcer plugin? to actually fail if used with a local version that's not high
enough?
@srowen - We can always install Maven at this step and ignore any existing
Maven installs. Would that be sufficient?
---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]