keith-turner commented on a change in pull request #270: Support Accumulo
installs on Microsoft Azure
URL: https://github.com/apache/fluo-muchos/pull/270#discussion_r314097718
##########
File path: ansible/roles/hadoop/tasks/main.yml
##########
@@ -34,17 +34,52 @@
with_items:
- workers
when: hadoop_major_version == '3'
-- name: "copy spark yarn shuffle jar to hadoop lib"
- command: cp {{ spark_home }}/yarn/spark-{{ spark_version }}-yarn-shuffle.jar
{{ hadoop_home }}/share/hadoop/yarn/lib/ creates={{ hadoop_home
}}/share/hadoop/yarn/lib/spark-{{ spark_version }}-yarn-shuffle.jar
- when: "'spark' in groups"
+- name: "configure hadoop to send metrics"
+ template: src={{ item }} dest={{ hadoop_home }}/etc/hadoop/{{ item }}
+ with_items:
+ - hadoop-metrics2.properties
- name: "setup hadoop short circuit socket dir"
file: path=/var/lib/hadoop-hdfs state=directory owner={{ cluster_user }}
group={{ cluster_group }} mode=0755
become: yes
- name: "Configure hadoop log dir"
replace:
path: "{{ hadoop_home }}/etc/hadoop/hadoop-env.sh"
regexp: '.*export\s+HADOOP_LOG_DIR.*'
- replace: "export HADOOP_LOG_DIR={{ worker_data_dirs[0] }}/logs/hadoop"
+ replace: "export HADOOP_LOG_DIR={{ worker_data_dirs[1] }}/logs/hadoop"
+- name: "Configure max log size to 5g for Azure log analytics integration"
+ replace:
+ path: "{{ hadoop_home }}/etc/hadoop/log4j.properties"
+ regexp: 'hadoop.log.maxfilesize=256MB'
+ replace: "hadoop.log.maxfilesize=5g"
+ when: cluster_type == 'azure'
+- name: "Configure hadoop pid dir"
+ replace:
+ path: "{{ hadoop_home }}/etc/hadoop/hadoop-env.sh"
+ regexp: '.*export\s+HADOOP_PID_DIR.*'
+ replace: "export HADOOP_PID_DIR={{ worker_data_dirs[1] }}/hadoop"
+- name: "Configure namenode heap"
+ replace:
+ path: "{{ hadoop_home }}/etc/hadoop/hadoop-env.sh"
+ regexp: '.*export\s+HADOOP_NAMENODE_OPTS.*'
+ replace: "export HADOOP_NAMENODE_OPTS=\"-server -XX:ParallelGCThreads=8
-XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=70
-XX:+UseCMSInitiatingOccupancyOnly {{ namenode_heap }}
-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,RFAS}
-Dhdfs.audit.logger=${HDFS_AUDIT_LOGGER:-INFO,NullAppender} -verbose:gc
-XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Xloggc:{{
worker_data_dirs[1] }}/logs/hadoop/gc.log-`date +'%Y%m%d%H%M'` -XX:ErrorFile={{
worker_data_dirs[1] }}/logs/hadoop/hs_err_pid%p.log
-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath={{ worker_data_dirs[1]
}}/logs/hadoop\""
Review comment:
These setting may be ok, I have not cross referenced them to all to ensure
they are good for JDK11 and JDK8. My overall concern is that I run test with
(Hadoop 2 or Hadoop 3) and (JDK8 or JDK11) and I find myself wondering if these
specific settings will work with all combos.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services