This is an automated email from the ASF dual-hosted git repository.
baunsgaard pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/systemds.git
The following commit(s) were added to refs/heads/main by this push:
new d43c492433 [MINOR] Windows update and setup
d43c492433 is described below
commit d43c492433bc6c87ba54afce3f393098d7a9a5fc
Author: baunsgaard <[email protected]>
AuthorDate: Wed Feb 1 18:40:08 2023 +0100
[MINOR] Windows update and setup
Update to java 11 in install guide and add windows install guide.
Closes #1778
---
docs/site/install.md | 82 +++++++++++----------
docs/site/windows-source-installation.md | 1 -
src/test/config/hadoop_bin_windows/bin/hadoop | 15 +++-
src/test/config/hadoop_bin_windows/bin/hadoop.cmd | 12 ++-
src/test/config/hadoop_bin_windows/bin/hadoop.dll | Bin 85504 -> 96256 bytes
src/test/config/hadoop_bin_windows/bin/hadoop.exp | Bin 17633 -> 24775 bytes
src/test/config/hadoop_bin_windows/bin/hadoop.lib | Bin 29676 -> 41422 bytes
src/test/config/hadoop_bin_windows/bin/hadoop.pdb | Bin 486400 -> 839680 bytes
src/test/config/hadoop_bin_windows/bin/hdfs | 16 +++-
src/test/config/hadoop_bin_windows/bin/hdfs.dll | Bin 63488 -> 0 bytes
src/test/config/hadoop_bin_windows/bin/hdfs.exp | Bin 10678 -> 0 bytes
src/test/config/hadoop_bin_windows/bin/hdfs.lib | Bin 373450 -> 0 bytes
src/test/config/hadoop_bin_windows/bin/hdfs.pdb | Bin 371712 -> 0 bytes
.../config/hadoop_bin_windows/bin/libwinutils.lib | Bin 1246294 -> 1597574
bytes
src/test/config/hadoop_bin_windows/bin/mapred | 14 +++-
.../config/hadoop_bin_windows/bin/winutils.exe | Bin 112640 -> 118784 bytes
.../config/hadoop_bin_windows/bin/winutils.pdb | Bin 904192 -> 1355776
bytes
src/test/config/hadoop_bin_windows/bin/yarn | 40 +++++++++-
src/test/config/hadoop_bin_windows/bin/yarn.cmd | 6 ++
19 files changed, 137 insertions(+), 49 deletions(-)
diff --git a/docs/site/install.md b/docs/site/install.md
index 4cdbebe4e3..dba533e1f7 100644
--- a/docs/site/install.md
+++ b/docs/site/install.md
@@ -24,31 +24,47 @@ limitations under the License.
This guide helps in the install and setup of SystemDS from source code.
- [Windows Guide](#windows)
-- [Ubuntu/Linux Guide](#ubuntu-2004)
+- [Ubuntu/Linux Guide](#ubuntu-2204)
- [Mac Guide](#mac)
-## Windows
+Once the individual versions is set up skip to the common part of building the
system.
-[Developer Guide](windows-source-installation)
+---
+
+## Install
---
-## Ubuntu 20.04
+### Windows
+
+First setup java and maven to compile the system note the java version is 11,
we suggest using Java OpenJDK 11.
+
+- <https://openjdk.org/>
+- <https://maven.apache.org/download.cgi?.>
+
+Setup your environment variables with JAVA_HOME and MAVEN_HOME. Using these
variables add the JAVA_HOME/bin and MAVEN_HOME/bin to the path environment
variable. An example of setting it for java can be found here:
<https://www.thewindowsclub.com/set-java_home-in-windows-10>
+
+To run the system we also have to setup some Hadoop and spark specific
libraries. These can be found in the SystemDS repository. To add this, simply
take out the files, or add 'src/test/config/hadoop_bin_windows/bin' to PATH.
Just like for JAVA_HOME set a HADOOP_HOME to the environment variable without
the bin part, and add the %HADOOP_HOME%/bin to path.
+
+Finally if you want to run systemds from command line, add a SYSTEMDS_ROOT
that points to the repository root, and add the bin folder to the path.
+
+To make the build go faster set the IDE or environment variables for java:
'-Xmx16g -Xms16g -Xmn1600m'. Here set the memory to something close to max
memory of the device you are using.
+
+To start editing the files remember to import the code style formatting into
the IDE, to keep the changes of the files consistent.
+
+A suggested starting point would be to run some of the component tests from
your IDE.
+
+---
-### Java and Maven
+### Ubuntu 22.04
-First setup java and maven to compile the system note that the java version is
1.8.
+First setup java and maven to compile the system note that the java version is
11.
```bash
-sudo apt install openjdk-8-jdk-headless
+sudo apt install openjdk-11-jdk
sudo apt install maven
```
-Note: To update the `java` command to `openjdk-8` run:
-```sh
-update-alternatives --set java /usr/lib/jvm/java-8-openjdk-amd64/jre/bin/java
-```
-
Verify the install with:
```bash
@@ -59,9 +75,10 @@ mvn -version
This should return something like:
```bash
-openjdk version "1.8.0_252"
-OpenJDK Runtime Environment (build 1.8.0_252-8u252-b09-1ubuntu1-b09)
-OpenJDK 64-Bit Server VM (build 25.252-b09, mixed mode)
+openjdk 11.0.17 2022-10-18
+OpenJDK Runtime Environment (build 11.0.17+8-post-Ubuntu-1ubuntu220.04)
+OpenJDK 64-Bit Server VM (build 11.0.17+8-post-Ubuntu-1ubuntu220.04, mixed
mode, sharing)
+
Apache Maven 3.6.3
Maven home: /usr/share/maven
Java version: 1.8.0_252, vendor: Private Build, runtime:
/usr/lib/jvm/java-8-openjdk-amd64/jre
@@ -69,9 +86,9 @@ Default locale: en_US, platform encoding: UTF-8
OS name: "linux", version: "5.4.0-37-generic", arch: "amd64", family: "unix"
```
-### Testing
+#### Testing
-R is required to be install to run the test suite, since many tests are
constructed to comprare output with common R packages.
+R is required to be install to run the test suite, since many tests are
constructed to compare output with common R packages.
One option to install this is to follow the guide on the following link:
<https://linuxize.com/post/how-to-install-r-on-ubuntu-20-04/>
At the time of writing the commands to install R 4.0.2 are:
@@ -83,25 +100,23 @@ sudo add-apt-repository 'deb
https://cloud.r-project.org/bin/linux/ubuntu focal-
sudo apt install r-base
```
-Optionally, you need to install the R depedencies for integration tests, like
this:
+Optionally, you need to install the R dependencies for integration tests, like
this:
(use `sudo` mode if the script couldn't write to local R library)
```bash
Rscript ./src/test/scripts/installDependencies.R
```
-See [Build the project](#Build%20the%20project) to compile the code from here.
-
---
-## MAC
+### MAC
Prerequisite install homebrew on the device.
```bash
# To allow relative paths:
brew install coreutils
-# To install open jdk 8.
+# To install open jdk 11.
brew tap adoptopenjdk/openjdk
brew cask install adoptopenjdk8
# Install maven to enable compilation of SystemDS.
@@ -115,37 +130,26 @@ java --version
mvn --version
```
-This should print something like:
+This should print java version.
-```bash
-Java version: 1.8.0_242, vendor: AdoptOpenJDK, runtime:
/Library/Java/JavaVirtualMachines/adoptopenjdk-8.jdk/Contents/Home/jre
-Default locale: ru_UA, platform encoding: UTF-8
-OS name: "mac os x", version: "10.15.5", arch: "x86_64", family: "mac"
+Note that if you have multiple __java__ versions installed then you have to
change the used version to 11, on __both java and javadoc__. This is done by
setting the environment variable JAVA_HOME to the install path of open JDK 11 :
-Apache Maven 3.6.3 (cecedd343002696d0abb50b32b541b8a6ba2883f)
-Maven home: /usr/local/Cellar/maven/3.6.3_1/libexec
-```
-
-Note that if you have multiple __java__ versions installed then you have to
change the used version to 8, on __both java and javadoc__. This is done by
setting the environment variable JAVA_HOME to the install path of open JDK 8 :
-
-``` bash
-export JAVA_HOME=`/usr/libexec/java_home -v 1.8`
+```bash
+export JAVA_HOME=`/usr/libexec/java_home -v 11`
```
For running all tests [r-base](https://cran.r-project.org/bin/macosx/) has to
be installed as well since this is used as a secondary system to verify the
correctness of our code, but it is not a requirement to enable building the
project.
-Optionally, you need to install the R depedencies for integration tests, like
this:
+Optionally, you need to install the R dependencies for integration tests, like
this:
(use `sudo` mode if the script couldn't write to local R library)
```bash
Rscript ./src/test/scripts/installDependencies.R
```
-See [Build the project](#Build%20the%20project) to compile the code from here.
-
---
-## Build the project
+## 2. Build the project
To compile the project use:
diff --git a/docs/site/windows-source-installation.md
b/docs/site/windows-source-installation.md
index d503c0100b..9bb0f1acba 100644
--- a/docs/site/windows-source-installation.md
+++ b/docs/site/windows-source-installation.md
@@ -27,7 +27,6 @@ These instructions will help you build Apache SystemDS from
source code, which i
and algorithms development. The following conventions will be used to refer to
directories on your machine:
* `<USER_HOME>` is your home directory.
-* `<JDK_18_HOME>` is the root directory for the 1.8 JDK.
* `<MAVEN_HOME>` is the root directory for the Apache Maven source code.
* `<SYSTEMDS_HOME>` is the root directory for the SystemDS source code.
* `<SPARK_HOME>` is the root directory for the Apache Spark source code.
diff --git a/src/test/config/hadoop_bin_windows/bin/hadoop
b/src/test/config/hadoop_bin_windows/bin/hadoop
index 38346002cc..882c8588bf 100644
--- a/src/test/config/hadoop_bin_windows/bin/hadoop
+++ b/src/test/config/hadoop_bin_windows/bin/hadoop
@@ -15,8 +15,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+# The name of the script being executed.
+HADOOP_SHELL_EXECNAME="hadoop"
MYNAME="${BASH_SOURCE-$0}"
-HADOOP_SHELL_EXECNAME="${MYNAME##*/}"
## @description build up the hadoop command's usage text.
## @audience public
@@ -44,6 +45,7 @@ function hadoop_usage
hadoop_add_subcommand "key" client "manage keys via the KeyProvider"
hadoop_add_subcommand "trace" client "view and modify Hadoop tracing
settings"
hadoop_add_subcommand "version" client "print the version"
+ hadoop_add_subcommand "kdiag" client "Diagnose Kerberos Problems"
hadoop_generate_usage "${HADOOP_SHELL_EXECNAME}" true
}
@@ -124,6 +126,10 @@ function hadoopcmd_case
echo "HADOOP_TOOLS_HOME='${HADOOP_TOOLS_HOME}'"
echo "HADOOP_TOOLS_DIR='${HADOOP_TOOLS_DIR}'"
echo "HADOOP_TOOLS_LIB_JARS_DIR='${HADOOP_TOOLS_LIB_JARS_DIR}'"
+ if [[ -n "${QATESTMODE}" ]]; then
+ echo "MYNAME=${MYNAME}"
+ echo "HADOOP_SHELL_EXECNAME=${HADOOP_SHELL_EXECNAME}"
+ fi
exit 0
;;
fs)
@@ -133,6 +139,10 @@ function hadoopcmd_case
if [[ -n "${YARN_OPTS}" ]] || [[ -n "${YARN_CLIENT_OPTS}" ]]; then
hadoop_error "WARNING: Use \"yarn jar\" to launch YARN applications."
fi
+ if [[ -z $1 || $1 = "--help" ]]; then
+ echo "Usage: hadoop jar <jar> [mainClass] args..."
+ exit 0
+ fi
HADOOP_CLASSNAME=org.apache.hadoop.util.RunJar
;;
jnipath)
@@ -143,6 +153,9 @@ function hadoopcmd_case
kerbname)
HADOOP_CLASSNAME=org.apache.hadoop.security.HadoopKerberosName
;;
+ kdiag)
+ HADOOP_CLASSNAME=org.apache.hadoop.security.KDiag
+ ;;
key)
HADOOP_CLASSNAME=org.apache.hadoop.crypto.key.KeyShell
;;
diff --git a/src/test/config/hadoop_bin_windows/bin/hadoop.cmd
b/src/test/config/hadoop_bin_windows/bin/hadoop.cmd
index a21ebe658c..04e5039d19 100644
--- a/src/test/config/hadoop_bin_windows/bin/hadoop.cmd
+++ b/src/test/config/hadoop_bin_windows/bin/hadoop.cmd
@@ -149,7 +149,7 @@ call :updatepath %HADOOP_BIN_PATH%
exit /b
)
- set corecommands=fs version jar checknative conftest distch distcp daemonlog
archive classpath credential kerbname key trace
+ set corecommands=fs version jar checknative conftest distch distcp daemonlog
archive classpath credential kerbname key trace kdiag
for %%i in ( %corecommands% ) do (
if %hadoop-command% == %%i set corecommand=true
)
@@ -189,6 +189,11 @@ call :updatepath %HADOOP_BIN_PATH%
) else if defined YARN_CLIENT_OPTS (
@echo WARNING: Use "yarn jar" to launch YARN applications.
)
+ @rem if --help option is used, no need to call command
+ if [!hadoop-command-arguments[%1%]!]==["--help"] (
+ @echo Usage: hadoop jar <jar> [mainClass] args...
+ goto :eof
+ )
set CLASS=org.apache.hadoop.util.RunJar
goto :eof
@@ -231,6 +236,10 @@ call :updatepath %HADOOP_BIN_PATH%
set CLASS=org.apache.hadoop.security.HadoopKerberosName
goto :eof
+:kdiag
+ set CLASS=org.apache.hadoop.security.KDiag
+ goto :eof
+
:key
set CLASS=org.apache.hadoop.crypto.key.KeyShell
goto :eof
@@ -307,6 +316,7 @@ call :updatepath %HADOOP_BIN_PATH%
@echo credential interact with credential providers
@echo jnipath prints the java.library.path
@echo kerbname show auth_to_local principal conversion
+ @echo kdiag diagnose kerberos problems
@echo key manage keys via the KeyProvider
@echo trace view and modify Hadoop tracing settings
@echo daemonlog get/set the log level for each daemon
diff --git a/src/test/config/hadoop_bin_windows/bin/hadoop.dll
b/src/test/config/hadoop_bin_windows/bin/hadoop.dll
index 1409cb8bed..441d3edd7d 100644
Binary files a/src/test/config/hadoop_bin_windows/bin/hadoop.dll and
b/src/test/config/hadoop_bin_windows/bin/hadoop.dll differ
diff --git a/src/test/config/hadoop_bin_windows/bin/hadoop.exp
b/src/test/config/hadoop_bin_windows/bin/hadoop.exp
index cdcd127b05..9a86d4f2d6 100644
Binary files a/src/test/config/hadoop_bin_windows/bin/hadoop.exp and
b/src/test/config/hadoop_bin_windows/bin/hadoop.exp differ
diff --git a/src/test/config/hadoop_bin_windows/bin/hadoop.lib
b/src/test/config/hadoop_bin_windows/bin/hadoop.lib
index 7544e8d490..2d080f79c3 100644
Binary files a/src/test/config/hadoop_bin_windows/bin/hadoop.lib and
b/src/test/config/hadoop_bin_windows/bin/hadoop.lib differ
diff --git a/src/test/config/hadoop_bin_windows/bin/hadoop.pdb
b/src/test/config/hadoop_bin_windows/bin/hadoop.pdb
index aa3cd7ca50..7faed176f0 100644
Binary files a/src/test/config/hadoop_bin_windows/bin/hadoop.pdb and
b/src/test/config/hadoop_bin_windows/bin/hadoop.pdb differ
diff --git a/src/test/config/hadoop_bin_windows/bin/hdfs
b/src/test/config/hadoop_bin_windows/bin/hdfs
index a37c39f220..94426a561f 100644
--- a/src/test/config/hadoop_bin_windows/bin/hdfs
+++ b/src/test/config/hadoop_bin_windows/bin/hdfs
@@ -15,8 +15,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+# The name of the script being executed.
+HADOOP_SHELL_EXECNAME="hdfs"
MYNAME="${BASH_SOURCE-$0}"
-HADOOP_SHELL_EXECNAME="${MYNAME##*/}"
## @description build up the hdfs command's usage text.
## @audience public
@@ -61,7 +62,8 @@ function hadoop_usage
hadoop_add_subcommand "portmap" daemon "run a portmap service"
hadoop_add_subcommand "secondarynamenode" daemon "run the DFS secondary
namenode"
hadoop_add_subcommand "snapshotDiff" client "diff two snapshots of a
directory or diff the current directory contents with a snapshot"
- hadoop_add_subcommand "storagepolicies" admin "list/get/set block storage
policies"
+ hadoop_add_subcommand "storagepolicies" admin
"list/get/set/satisfyStoragePolicy block storage policies"
+ hadoop_add_subcommand "sps" daemon "run external storagepolicysatisfier"
hadoop_add_subcommand "version" client "print the version"
hadoop_add_subcommand "zkfc" daemon "run the ZK Failover Controller daemon"
hadoop_generate_usage "${HADOOP_SHELL_EXECNAME}" false
@@ -126,6 +128,10 @@ function hdfscmd_case
echo "HADOOP_TOOLS_HOME='${HADOOP_TOOLS_HOME}'"
echo "HADOOP_TOOLS_DIR='${HADOOP_TOOLS_DIR}'"
echo "HADOOP_TOOLS_LIB_JARS_DIR='${HADOOP_TOOLS_LIB_JARS_DIR}'"
+ if [[ -n "${QATESTMODE}" ]]; then
+ echo "MYNAME=${MYNAME}"
+ echo "HADOOP_SHELL_EXECNAME=${HADOOP_SHELL_EXECNAME}"
+ fi
exit 0
;;
ec)
@@ -196,6 +202,10 @@ function hdfscmd_case
storagepolicies)
HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.StoragePolicyAdmin
;;
+ sps)
+ HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
+
HADOOP_CLASSNAME=org.apache.hadoop.hdfs.server.sps.ExternalStoragePolicySatisfier
+ ;;
version)
HADOOP_CLASSNAME=org.apache.hadoop.util.VersionInfo
;;
@@ -269,4 +279,4 @@ fi
hadoop_subcommand_opts "${HADOOP_SHELL_EXECNAME}" "${HADOOP_SUBCMD}"
# everything is in globals at this point, so call the generic handler
-hadoop_generic_java_subcmd_handler
\ No newline at end of file
+hadoop_generic_java_subcmd_handler
diff --git a/src/test/config/hadoop_bin_windows/bin/hdfs.dll
b/src/test/config/hadoop_bin_windows/bin/hdfs.dll
deleted file mode 100644
index a6767b3058..0000000000
Binary files a/src/test/config/hadoop_bin_windows/bin/hdfs.dll and /dev/null
differ
diff --git a/src/test/config/hadoop_bin_windows/bin/hdfs.exp
b/src/test/config/hadoop_bin_windows/bin/hdfs.exp
deleted file mode 100644
index a5265ad5c6..0000000000
Binary files a/src/test/config/hadoop_bin_windows/bin/hdfs.exp and /dev/null
differ
diff --git a/src/test/config/hadoop_bin_windows/bin/hdfs.lib
b/src/test/config/hadoop_bin_windows/bin/hdfs.lib
deleted file mode 100644
index 48aa3634e4..0000000000
Binary files a/src/test/config/hadoop_bin_windows/bin/hdfs.lib and /dev/null
differ
diff --git a/src/test/config/hadoop_bin_windows/bin/hdfs.pdb
b/src/test/config/hadoop_bin_windows/bin/hdfs.pdb
deleted file mode 100644
index e97e101c34..0000000000
Binary files a/src/test/config/hadoop_bin_windows/bin/hdfs.pdb and /dev/null
differ
diff --git a/src/test/config/hadoop_bin_windows/bin/libwinutils.lib
b/src/test/config/hadoop_bin_windows/bin/libwinutils.lib
index 83c9a6af89..39e74bb14a 100644
Binary files a/src/test/config/hadoop_bin_windows/bin/libwinutils.lib and
b/src/test/config/hadoop_bin_windows/bin/libwinutils.lib differ
diff --git a/src/test/config/hadoop_bin_windows/bin/mapred
b/src/test/config/hadoop_bin_windows/bin/mapred
index f66f563aae..9773ec89de 100644
--- a/src/test/config/hadoop_bin_windows/bin/mapred
+++ b/src/test/config/hadoop_bin_windows/bin/mapred
@@ -15,8 +15,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+# The name of the script being executed.
+HADOOP_SHELL_EXECNAME="mapred"
MYNAME="${BASH_SOURCE-$0}"
-HADOOP_SHELL_EXECNAME="${MYNAME##*/}"
+
## @description build up the mapred command's usage text.
## @audience public
@@ -32,7 +34,9 @@ function hadoop_usage
hadoop_add_subcommand "pipes" client "run a Pipes job"
hadoop_add_subcommand "queue" client "get information regarding JobQueues"
hadoop_add_subcommand "sampler" client "sampler"
+ hadoop_add_subcommand "frameworkuploader" admin "mapreduce framework upload"
hadoop_add_subcommand "version" client "print the version"
+ hadoop_add_subcommand "minicluster" client "CLI MiniCluster"
hadoop_generate_usage "${HADOOP_SHELL_EXECNAME}" true
}
@@ -92,9 +96,17 @@ function mapredcmd_case
sampler)
HADOOP_CLASSNAME=org.apache.hadoop.mapred.lib.InputSampler
;;
+ frameworkuploader)
+ HADOOP_CLASSNAME=org.apache.hadoop.mapred.uploader.FrameworkUploader
+ ;;
version)
HADOOP_CLASSNAME=org.apache.hadoop.util.VersionInfo
;;
+ minicluster)
+ hadoop_add_classpath
"${HADOOP_YARN_HOME}/${YARN_DIR}/timelineservice"'/*'
+ hadoop_add_classpath "${HADOOP_YARN_HOME}/${YARN_DIR}/test"'/*'
+ HADOOP_CLASSNAME=org.apache.hadoop.mapreduce.MiniHadoopClusterManager
+ ;;
*)
HADOOP_CLASSNAME="${subcmd}"
if ! hadoop_validate_classname "${HADOOP_CLASSNAME}"; then
diff --git a/src/test/config/hadoop_bin_windows/bin/winutils.exe
b/src/test/config/hadoop_bin_windows/bin/winutils.exe
index 1b341e586e..75be699559 100644
Binary files a/src/test/config/hadoop_bin_windows/bin/winutils.exe and
b/src/test/config/hadoop_bin_windows/bin/winutils.exe differ
diff --git a/src/test/config/hadoop_bin_windows/bin/winutils.pdb
b/src/test/config/hadoop_bin_windows/bin/winutils.pdb
index 97d4aab918..6378e09085 100644
Binary files a/src/test/config/hadoop_bin_windows/bin/winutils.pdb and
b/src/test/config/hadoop_bin_windows/bin/winutils.pdb differ
diff --git a/src/test/config/hadoop_bin_windows/bin/yarn
b/src/test/config/hadoop_bin_windows/bin/yarn
index 0f19989efc..8290fcda8d 100644
--- a/src/test/config/hadoop_bin_windows/bin/yarn
+++ b/src/test/config/hadoop_bin_windows/bin/yarn
@@ -15,8 +15,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+# The name of the script being executed.
+HADOOP_SHELL_EXECNAME="yarn"
MYNAME="${BASH_SOURCE-$0}"
-HADOOP_SHELL_EXECNAME="${MYNAME##*/}"
## @description build up the yarn command's usage text.
## @audience public
@@ -31,7 +32,7 @@ function hadoop_usage
hadoop_add_option "--hosts filename" "list of hosts to use in worker mode"
hadoop_add_option "--workers" "turn on worker mode"
- hadoop_add_subcommand "application" client "prints application(s)
report/kill application"
+ hadoop_add_subcommand "app|application" client "prints application(s)
report/kill application/manage long running application"
hadoop_add_subcommand "applicationattempt" client "prints
applicationattempt(s) report"
hadoop_add_subcommand "classpath" client "prints the class path needed to
get the hadoop jar and the required libraries"
hadoop_add_subcommand "cluster" client "prints cluster information"
@@ -44,6 +45,7 @@ function hadoop_usage
hadoop_add_subcommand "nodemanager" daemon "run a nodemanager on each worker"
hadoop_add_subcommand "proxyserver" daemon "run the web app proxy server"
hadoop_add_subcommand "queue" client "prints queue information"
+ hadoop_add_subcommand "registrydns" daemon "run the registry DNS server"
hadoop_add_subcommand "resourcemanager" daemon "run the ResourceManager"
hadoop_add_subcommand "rmadmin" admin "admin tools"
hadoop_add_subcommand "router" daemon "run the Router daemon"
@@ -53,6 +55,7 @@ function hadoop_usage
hadoop_add_subcommand "timelinereader" client "run the timeline reader
server"
hadoop_add_subcommand "timelineserver" daemon "run the timeline server"
hadoop_add_subcommand "top" client "view cluster information"
+ hadoop_add_subcommand "nodeattributes" client "node attributes cli client"
hadoop_add_subcommand "version" client "print the version"
hadoop_generate_usage "${HADOOP_SHELL_EXECNAME}" true
}
@@ -68,10 +71,18 @@ function yarncmd_case
shift
case ${subcmd} in
- application|applicationattempt|container)
+ app|application|applicationattempt|container)
HADOOP_CLASSNAME=org.apache.hadoop.yarn.client.cli.ApplicationCLI
set -- "${subcmd}" "$@"
HADOOP_SUBCMD_ARGS=("$@")
+ local sld="${HADOOP_YARN_HOME}/${YARN_DIR},\
+${HADOOP_YARN_HOME}/${YARN_LIB_JARS_DIR},\
+${HADOOP_HDFS_HOME}/${HDFS_DIR},\
+${HADOOP_HDFS_HOME}/${HDFS_LIB_JARS_DIR},\
+${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR},\
+${HADOOP_COMMON_HOME}/${HADOOP_COMMON_LIB_JARS_DIR}"
+ hadoop_translate_cygwin_path sld
+ hadoop_add_param HADOOP_OPTS service.libdir "-Dservice.libdir=${sld}"
;;
classpath)
hadoop_do_classpath_subcommand HADOOP_CLASSNAME "$@"
@@ -111,6 +122,8 @@ function yarncmd_case
;;
nodemanager)
HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
+ hadoop_add_classpath "$HADOOP_YARN_HOME/$YARN_DIR/timelineservice/*"
+ hadoop_add_classpath "$HADOOP_YARN_HOME/$YARN_DIR/timelineservice/lib/*"
HADOOP_CLASSNAME='org.apache.hadoop.yarn.server.nodemanager.NodeManager'
# Backwards compatibility
if [[ -n "${YARN_NODEMANAGER_HEAPSIZE}" ]]; then
@@ -128,13 +141,28 @@ function yarncmd_case
queue)
HADOOP_CLASSNAME=org.apache.hadoop.yarn.client.cli.QueueCLI
;;
+ registrydns)
+ HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
+
HADOOP_SECURE_CLASSNAME='org.apache.hadoop.registry.server.dns.PrivilegedRegistryDNSStarter'
+
HADOOP_CLASSNAME='org.apache.hadoop.registry.server.dns.RegistryDNSServer'
+ ;;
resourcemanager)
HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
+ hadoop_add_classpath "$HADOOP_YARN_HOME/$YARN_DIR/timelineservice/*"
+ hadoop_add_classpath "$HADOOP_YARN_HOME/$YARN_DIR/timelineservice/lib/*"
HADOOP_CLASSNAME='org.apache.hadoop.yarn.server.resourcemanager.ResourceManager'
# Backwards compatibility
if [[ -n "${YARN_RESOURCEMANAGER_HEAPSIZE}" ]]; then
HADOOP_HEAPSIZE_MAX="${YARN_RESOURCEMANAGER_HEAPSIZE}"
fi
+ local sld="${HADOOP_YARN_HOME}/${YARN_DIR},\
+${HADOOP_YARN_HOME}/${YARN_LIB_JARS_DIR},\
+${HADOOP_HDFS_HOME}/${HDFS_DIR},\
+${HADOOP_HDFS_HOME}/${HDFS_LIB_JARS_DIR},\
+${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR},\
+${HADOOP_COMMON_HOME}/${HADOOP_COMMON_LIB_JARS_DIR}"
+ hadoop_translate_cygwin_path sld
+ hadoop_add_param HADOOP_OPTS service.libdir "-Dservice.libdir=${sld}"
;;
rmadmin)
HADOOP_CLASSNAME='org.apache.hadoop.yarn.client.cli.RMAdminCLI'
@@ -155,8 +183,14 @@ function yarncmd_case
;;
timelinereader)
HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
+ hadoop_add_classpath "$HADOOP_YARN_HOME/$YARN_DIR/timelineservice/*"
+ hadoop_add_classpath "$HADOOP_YARN_HOME/$YARN_DIR/timelineservice/lib/*"
HADOOP_CLASSNAME='org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderServer'
;;
+ nodeattributes)
+ HADOOP_SUBCMD_SUPPORTDAEMONIZATION="false"
+ HADOOP_CLASSNAME='org.apache.hadoop.yarn.client.cli.NodeAttributesCLI'
+ ;;
timelineserver)
HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
HADOOP_CLASSNAME='org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryServer'
diff --git a/src/test/config/hadoop_bin_windows/bin/yarn.cmd
b/src/test/config/hadoop_bin_windows/bin/yarn.cmd
index fed3d90a7c..e1ac11287f 100644
--- a/src/test/config/hadoop_bin_windows/bin/yarn.cmd
+++ b/src/test/config/hadoop_bin_windows/bin/yarn.cmd
@@ -219,6 +219,8 @@ goto :eof
:resourcemanager
set CLASSPATH=%CLASSPATH%;%YARN_CONF_DIR%\rm-config\log4j.properties
+ set CLASSPATH=%CLASSPATH%;%HADOOP_YARN_HOME%\%YARN_DIR%\timelineservice\*
+ set CLASSPATH=%CLASSPATH%;%HADOOP_YARN_HOME%\%YARN_DIR%\timelineservice\lib\*
set CLASS=org.apache.hadoop.yarn.server.resourcemanager.ResourceManager
set YARN_OPTS=%YARN_OPTS% %YARN_RESOURCEMANAGER_OPTS%
if defined YARN_RESOURCEMANAGER_HEAPSIZE (
@@ -248,6 +250,8 @@ goto :eof
:timelinereader
set
CLASSPATH=%CLASSPATH%;%YARN_CONF_DIR%\timelineserver-config\log4j.properties
+ set CLASSPATH=%CLASSPATH%;%HADOOP_YARN_HOME%\%YARN_DIR%\timelineservice\*
+ set CLASSPATH=%CLASSPATH%;%HADOOP_YARN_HOME%\%YARN_DIR%\timelineservice\lib\*
set
CLASS=org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderServer
set YARN_OPTS=%YARN_OPTS% %YARN_TIMELINEREADER_OPTS%
goto :eof
@@ -260,6 +264,8 @@ goto :eof
:nodemanager
set CLASSPATH=%CLASSPATH%;%YARN_CONF_DIR%\nm-config\log4j.properties
+ set CLASSPATH=%CLASSPATH%;%HADOOP_YARN_HOME%\%YARN_DIR%\timelineservice\*
+ set CLASSPATH=%CLASSPATH%;%HADOOP_YARN_HOME%\%YARN_DIR%\timelineservice\lib\*
set CLASS=org.apache.hadoop.yarn.server.nodemanager.NodeManager
set YARN_OPTS=%YARN_OPTS% -server %HADOOP_NODEMANAGER_OPTS%
if defined YARN_NODEMANAGER_HEAPSIZE (