[ 
https://issues.apache.org/jira/browse/METRON-2162?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Nick Allen updated METRON-2162:
-------------------------------
    Description: 
When attempting to deploy Metron in Ambari 2.7.3 with HDP 3.1 the following 
error occurs when deploying Metron on CentOS 7.
{code:java}
$ cd metron-deployment/development/centos7/
$ vagrant up

...

TASK [ambari_config : Deploy cluster with Ambari; http://node1:8080] ***********

The full traceback is:
File "/tmp/ansible_LTBkrV/ansible_module_ambari_cluster_state.py", line 211, in 
main
if not blueprint_exists(ambari_url, username, password, blueprint_name):
File "/tmp/ansible_LTBkrV/ansible_module_ambari_cluster_state.py", line 334, in 
blueprint_exists
blueprints = get_blueprints(ambari_url, user, password)
File "/tmp/ansible_LTBkrV/ansible_module_ambari_cluster_state.py", line 315, in 
get_blueprints
services = json.loads(r.content)
File "/usr/lib64/python2.7/json/__init__.py", line 338, in loads
return _default_decoder.decode(s)
File "/usr/lib64/python2.7/json/decoder.py", line 366, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
File "/usr/lib64/python2.7/json/decoder.py", line 384, in raw_decode
raise ValueError("No JSON object could be decoded")

fatal: [node1]: FAILED! => {
"changed": false,
"invocation": {
"module_args": {
"blueprint_name": "metron_blueprint",
"blueprint_var": {
"groups": [
{
"cardinality": 1,
"components": [
{
"name": "NAMENODE"
},
{
"name": "SECONDARY_NAMENODE"
},
{
"name": "RESOURCEMANAGER"
},
{
"name": "HISTORYSERVER"
},
{
"name": "ZOOKEEPER_SERVER"
},
{
"name": "NIMBUS"
},
{
"name": "STORM_UI_SERVER"
},
{
"name": "DRPC_SERVER"
},
{
"name": "HBASE_MASTER"
},
{
"name": "HBASE_CLIENT"
},
{
"name": "APP_TIMELINE_SERVER"
},
{
"name": "DATANODE"
},
{
"name": "HDFS_CLIENT"
},
{
"name": "NODEMANAGER"
},
{
"name": "YARN_CLIENT"
},
{
"name": "MAPREDUCE2_CLIENT"
},
{
"name": "ZOOKEEPER_CLIENT"
},
{
"name": "SUPERVISOR"
},
{
"name": "KAFKA_BROKER"
},
{
"name": "HBASE_REGIONSERVER"
},
{
"name": "KIBANA_MASTER"
},
{
"name": "METRON_INDEXING"
},
{
"name": "METRON_PROFILER"
},
{
"name": "METRON_PCAP"
},
{
"name": "METRON_ENRICHMENT_MASTER"
},
{
"name": "METRON_PARSERS"
},
{
"name": "METRON_REST"
},
{
"name": "METRON_MANAGEMENT_UI"
},
{
"name": "METRON_ALERTS_UI"
},
{
"name": "ES_MASTER"
}
],
"configurations": [],
"name": "host_group_1"
}
],
"required_configurations": [
{
"metron-env": {
"es_hosts": "node1",
"solr_zookeeper_url": "node1:9983",
"storm_rest_addr": "http://node1:8744";,
"zeppelin_server_url": "node1:9995"
}
},
{
"metron-rest-env": {
"metron_jdbc_driver": "org.h2.Driver",
"metron_jdbc_password": "root",
"metron_jdbc_platform": "h2",
"metron_jdbc_url": "jdbc:h2:file:~/metrondb",
"metron_jdbc_username": "root"
}
},
{
"kibana-env": {
"kibana_default_application": "dashboard/AV-YpDmwdXwc6Ua9Muh9",
"kibana_es_url": "http://node1:9200";,
"kibana_log_dir": "/var/log/kibana",
"kibana_pid_dir": "/var/run/kibana",
"kibana_server_host": "0.0.0.0",
"kibana_server_port": 5000
}
}
],
"stack_name": "HDP",
"stack_version": "3.1"
},
"cluster_name": "metron_cluster",
"cluster_state": "present",
"configurations": [
{
"zoo.cfg": {
"dataDir": "/hadoop/zookeeper"
}
},
{
"hadoop-env": {
"dtnode_heapsize": 512,
"hadoop_heapsize": 1024,
"namenode_heapsize": 2048,
"namenode_opt_permsize": "128m"
}
},
{
"hbase-env": {
"hbase_master_heapsize": 512,
"hbase_regionserver_heapsize": 512,
"hbase_regionserver_xmn_max": 512
}
},
{
"hdfs-site": {
"dfs.datanode.data.dir": "/hadoop/hdfs/data",
"dfs.journalnode.edits.dir": "/hadoop/hdfs/journalnode",
"dfs.namenode.checkpoint.dir": "/hadoop/hdfs/namesecondary",
"dfs.namenode.name.dir": "/hadoop/hdfs/namenode",
"dfs.replication": 1
}
},
{
"yarn-env": {
"apptimelineserver_heapsize": 512,
"min_user_id": 500,
"nodemanager_heapsize": 512,
"resourcemanager_heapsize": 1024,
"yarn_heapsize": 512
}
},
{
"mapred-env": {
"jobhistory_heapsize": 256
}
},
{
"mapred-site": {
"mapreduce.jobhistory.recovery.store.leveldb.path": "/hadoop/mapreduce/jhs",
"mapreduce.map.java.opts": "-Xmx1024m",
"mapreduce.map.memory.mb": 1229,
"mapreduce.reduce.java.opts": "-Xmx1024m",
"mapreduce.reduce.memory.mb": 1229
}
},
{
"yarn-site": {
"yarn.nodemanager.local-dirs": "/hadoop/yarn/local",
"yarn.nodemanager.log-dirs": "/hadoop/yarn/log",
"yarn.nodemanager.resource.memory-mb": 4096,
"yarn.timeline-service.leveldb-state-store.path": "/hadoop/yarn/timeline",
"yarn.timeline-service.leveldb-timeline-store.path": "/hadoop/yarn/timeline"
}
},
{
"storm-site": {
"nimbus.childopts": "-Xmx1024m _JAAS_PLACEHOLDER",
"storm.cluster.metrics.consumer.register": "[{\"class\": 
\"org.apache.storm.metric.LoggingMetricsConsumer\"}]",
"storm.local.dir": "/hadoop/storm",
"supervisor.childopts": "-Xmx256m _JAAS_PLACEHOLDER",
"supervisor.slots.ports": "[6700, 6701, 6702, 6703, 6704, 6705]",
"topology.classpath": "/etc/hbase/conf:/etc/hadoop/conf",
"topology.metrics.consumer.register": "[{\"class\": 
\"org.apache.storm.metric.LoggingMetricsConsumer\", \"parallelism.hint\": 1, 
\"whitelist\": [\"kafkaOffset\\\\..+/\", \"__complete-latency\", 
\"__process-latency\", \"__receive\\\\.population$\", 
\"__sendqueue\\\\.population$\", \"__execute-count\", \"__emit-count\", 
\"__ack-count\", \"__fail-count\", \"memory/heap\\\\.usedBytes$\", 
\"memory/nonHeap\\\\.usedBytes$\", \"GC/.+\\\\.count$\", 
\"GC/.+\\\\.timeMs$\"]}]",
"worker.childopts": "-Xmx768m _JAAS_PLACEHOLDER"
}
},
{
"kafka-env": {
"content": "\n#!/bin/bash\n\n# Set KAFKA specific environment variables 
here.\n\n# The java implementation to use.\nexport KAFKA_HEAP_OPTS=\"-Xms256M 
-Xmx256M\"\nexport KAFKA_JVM_PERFORMANCE_OPTS=\"-server -XX:+UseG1GC 
-XX:+DisableExplicitGC -Djava.awt.headless=true\"\nexport 
JAVA_HOME={{java64_home}}\nexport PATH=$PATH:$JAVA_HOME/bin\nexport 
PID_DIR={{kafka_pid_dir}}\nexport LOG_DIR={{kafka_log_dir}}\nexport 
KAFKA_KERBEROS_PARAMS={{kafka_kerberos_params}}\n# Add kafka sink to classpath 
and related depenencies\nif [ -e 
\"/usr/lib/ambari-metrics-kafka-sink/ambari-metrics-kafka-sink.jar\" ]; then\n 
export 
CLASSPATH=$CLASSPATH:/usr/lib/ambari-metrics-kafka-sink/ambari-metrics-kafka-sink.jar\n
 export CLASSPATH=$CLASSPATH:/usr/lib/ambari-metrics-kafka-sink/lib/*\nfi\nif [ 
-f /etc/kafka/conf/kafka-ranger-env.sh ]; then\n . 
/etc/kafka/conf/kafka-ranger-env.sh\nfi"
}
},
{
"kafka-broker": {
"delete.topic.enable": "true",
"log.dirs": "/kafka-log",
"offsets.topic.replication.factor": 1
}
},
{
"metron-rest-env": {
"metron_spring_profiles_active": "dev"
}
},
{
"metron-parsers-env": {
"parsers": "\"bro,snort,yaf\""
}
},
{
"elastic-site": {
"gateway_recover_after_data_nodes": 1,
"index_number_of_replicas": 0,
"index_number_of_shards": 1,
"masters_also_are_datanodes": "1",
"network_host": "[ _local_, _site_ ]",
"zen_discovery_ping_unicast_hosts": "[ node1 ]"
}
}
],
"host": "node1",
"password": "admin",
"port": 8080,
"username": "admin",
"wait_for_complete": true
}
},
"msg": "Ambari client exception occurred: No JSON object could be decoded"
}
to retry, use: --limit 
@/Users/nallen/Development/metron/metron-deployment/development/centos7/ansible/playbook.retry

PLAY RECAP *********************************************************************
node1 : ok=83 changed=16 unreachable=0 failed=1

[WARNING]: Module did not set no_log for password

Ansible failed to complete successfully. Any error output should be
visible above. Please fix these errors and try again.
{code}

  was:
When attempting to deploy Metron in Ambari 2.7.3 with HDP 3.1 the following 
error occurs when deploying Metron on CentOS 7.
{code:java}
 {code}
{code:java}
$ cd metron-deployment/development/centos7/
$ vagrant up

...

TASK [ambari_config : Deploy cluster with Ambari; http://node1:8080] ***********

The full traceback is:
File "/tmp/ansible_LTBkrV/ansible_module_ambari_cluster_state.py", line 211, in 
main
if not blueprint_exists(ambari_url, username, password, blueprint_name):
File "/tmp/ansible_LTBkrV/ansible_module_ambari_cluster_state.py", line 334, in 
blueprint_exists
blueprints = get_blueprints(ambari_url, user, password)
File "/tmp/ansible_LTBkrV/ansible_module_ambari_cluster_state.py", line 315, in 
get_blueprints
services = json.loads(r.content)
File "/usr/lib64/python2.7/json/__init__.py", line 338, in loads
return _default_decoder.decode(s)
File "/usr/lib64/python2.7/json/decoder.py", line 366, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
File "/usr/lib64/python2.7/json/decoder.py", line 384, in raw_decode
raise ValueError("No JSON object could be decoded")

fatal: [node1]: FAILED! => {
"changed": false,
"invocation": {
"module_args": {
"blueprint_name": "metron_blueprint",
"blueprint_var": {
"groups": [
{
"cardinality": 1,
"components": [
{
"name": "NAMENODE"
},
{
"name": "SECONDARY_NAMENODE"
},
{
"name": "RESOURCEMANAGER"
},
{
"name": "HISTORYSERVER"
},
{
"name": "ZOOKEEPER_SERVER"
},
{
"name": "NIMBUS"
},
{
"name": "STORM_UI_SERVER"
},
{
"name": "DRPC_SERVER"
},
{
"name": "HBASE_MASTER"
},
{
"name": "HBASE_CLIENT"
},
{
"name": "APP_TIMELINE_SERVER"
},
{
"name": "DATANODE"
},
{
"name": "HDFS_CLIENT"
},
{
"name": "NODEMANAGER"
},
{
"name": "YARN_CLIENT"
},
{
"name": "MAPREDUCE2_CLIENT"
},
{
"name": "ZOOKEEPER_CLIENT"
},
{
"name": "SUPERVISOR"
},
{
"name": "KAFKA_BROKER"
},
{
"name": "HBASE_REGIONSERVER"
},
{
"name": "KIBANA_MASTER"
},
{
"name": "METRON_INDEXING"
},
{
"name": "METRON_PROFILER"
},
{
"name": "METRON_PCAP"
},
{
"name": "METRON_ENRICHMENT_MASTER"
},
{
"name": "METRON_PARSERS"
},
{
"name": "METRON_REST"
},
{
"name": "METRON_MANAGEMENT_UI"
},
{
"name": "METRON_ALERTS_UI"
},
{
"name": "ES_MASTER"
}
],
"configurations": [],
"name": "host_group_1"
}
],
"required_configurations": [
{
"metron-env": {
"es_hosts": "node1",
"solr_zookeeper_url": "node1:9983",
"storm_rest_addr": "http://node1:8744";,
"zeppelin_server_url": "node1:9995"
}
},
{
"metron-rest-env": {
"metron_jdbc_driver": "org.h2.Driver",
"metron_jdbc_password": "root",
"metron_jdbc_platform": "h2",
"metron_jdbc_url": "jdbc:h2:file:~/metrondb",
"metron_jdbc_username": "root"
}
},
{
"kibana-env": {
"kibana_default_application": "dashboard/AV-YpDmwdXwc6Ua9Muh9",
"kibana_es_url": "http://node1:9200";,
"kibana_log_dir": "/var/log/kibana",
"kibana_pid_dir": "/var/run/kibana",
"kibana_server_host": "0.0.0.0",
"kibana_server_port": 5000
}
}
],
"stack_name": "HDP",
"stack_version": "3.1"
},
"cluster_name": "metron_cluster",
"cluster_state": "present",
"configurations": [
{
"zoo.cfg": {
"dataDir": "/hadoop/zookeeper"
}
},
{
"hadoop-env": {
"dtnode_heapsize": 512,
"hadoop_heapsize": 1024,
"namenode_heapsize": 2048,
"namenode_opt_permsize": "128m"
}
},
{
"hbase-env": {
"hbase_master_heapsize": 512,
"hbase_regionserver_heapsize": 512,
"hbase_regionserver_xmn_max": 512
}
},
{
"hdfs-site": {
"dfs.datanode.data.dir": "/hadoop/hdfs/data",
"dfs.journalnode.edits.dir": "/hadoop/hdfs/journalnode",
"dfs.namenode.checkpoint.dir": "/hadoop/hdfs/namesecondary",
"dfs.namenode.name.dir": "/hadoop/hdfs/namenode",
"dfs.replication": 1
}
},
{
"yarn-env": {
"apptimelineserver_heapsize": 512,
"min_user_id": 500,
"nodemanager_heapsize": 512,
"resourcemanager_heapsize": 1024,
"yarn_heapsize": 512
}
},
{
"mapred-env": {
"jobhistory_heapsize": 256
}
},
{
"mapred-site": {
"mapreduce.jobhistory.recovery.store.leveldb.path": "/hadoop/mapreduce/jhs",
"mapreduce.map.java.opts": "-Xmx1024m",
"mapreduce.map.memory.mb": 1229,
"mapreduce.reduce.java.opts": "-Xmx1024m",
"mapreduce.reduce.memory.mb": 1229
}
},
{
"yarn-site": {
"yarn.nodemanager.local-dirs": "/hadoop/yarn/local",
"yarn.nodemanager.log-dirs": "/hadoop/yarn/log",
"yarn.nodemanager.resource.memory-mb": 4096,
"yarn.timeline-service.leveldb-state-store.path": "/hadoop/yarn/timeline",
"yarn.timeline-service.leveldb-timeline-store.path": "/hadoop/yarn/timeline"
}
},
{
"storm-site": {
"nimbus.childopts": "-Xmx1024m _JAAS_PLACEHOLDER",
"storm.cluster.metrics.consumer.register": "[{\"class\": 
\"org.apache.storm.metric.LoggingMetricsConsumer\"}]",
"storm.local.dir": "/hadoop/storm",
"supervisor.childopts": "-Xmx256m _JAAS_PLACEHOLDER",
"supervisor.slots.ports": "[6700, 6701, 6702, 6703, 6704, 6705]",
"topology.classpath": "/etc/hbase/conf:/etc/hadoop/conf",
"topology.metrics.consumer.register": "[{\"class\": 
\"org.apache.storm.metric.LoggingMetricsConsumer\", \"parallelism.hint\": 1, 
\"whitelist\": [\"kafkaOffset\\\\..+/\", \"__complete-latency\", 
\"__process-latency\", \"__receive\\\\.population$\", 
\"__sendqueue\\\\.population$\", \"__execute-count\", \"__emit-count\", 
\"__ack-count\", \"__fail-count\", \"memory/heap\\\\.usedBytes$\", 
\"memory/nonHeap\\\\.usedBytes$\", \"GC/.+\\\\.count$\", 
\"GC/.+\\\\.timeMs$\"]}]",
"worker.childopts": "-Xmx768m _JAAS_PLACEHOLDER"
}
},
{
"kafka-env": {
"content": "\n#!/bin/bash\n\n# Set KAFKA specific environment variables 
here.\n\n# The java implementation to use.\nexport KAFKA_HEAP_OPTS=\"-Xms256M 
-Xmx256M\"\nexport KAFKA_JVM_PERFORMANCE_OPTS=\"-server -XX:+UseG1GC 
-XX:+DisableExplicitGC -Djava.awt.headless=true\"\nexport 
JAVA_HOME={{java64_home}}\nexport PATH=$PATH:$JAVA_HOME/bin\nexport 
PID_DIR={{kafka_pid_dir}}\nexport LOG_DIR={{kafka_log_dir}}\nexport 
KAFKA_KERBEROS_PARAMS={{kafka_kerberos_params}}\n# Add kafka sink to classpath 
and related depenencies\nif [ -e 
\"/usr/lib/ambari-metrics-kafka-sink/ambari-metrics-kafka-sink.jar\" ]; then\n 
export 
CLASSPATH=$CLASSPATH:/usr/lib/ambari-metrics-kafka-sink/ambari-metrics-kafka-sink.jar\n
 export CLASSPATH=$CLASSPATH:/usr/lib/ambari-metrics-kafka-sink/lib/*\nfi\nif [ 
-f /etc/kafka/conf/kafka-ranger-env.sh ]; then\n . 
/etc/kafka/conf/kafka-ranger-env.sh\nfi"
}
},
{
"kafka-broker": {
"delete.topic.enable": "true",
"log.dirs": "/kafka-log",
"offsets.topic.replication.factor": 1
}
},
{
"metron-rest-env": {
"metron_spring_profiles_active": "dev"
}
},
{
"metron-parsers-env": {
"parsers": "\"bro,snort,yaf\""
}
},
{
"elastic-site": {
"gateway_recover_after_data_nodes": 1,
"index_number_of_replicas": 0,
"index_number_of_shards": 1,
"masters_also_are_datanodes": "1",
"network_host": "[ _local_, _site_ ]",
"zen_discovery_ping_unicast_hosts": "[ node1 ]"
}
}
],
"host": "node1",
"password": "admin",
"port": 8080,
"username": "admin",
"wait_for_complete": true
}
},
"msg": "Ambari client exception occurred: No JSON object could be decoded"
}
to retry, use: --limit 
@/Users/nallen/Development/metron/metron-deployment/development/centos7/ansible/playbook.retry

PLAY RECAP *********************************************************************
node1 : ok=83 changed=16 unreachable=0 failed=1

[WARNING]: Module did not set no_log for password

Ansible failed to complete successfully. Any error output should be
visible above. Please fix these errors and try again.
{code}


> Ambari client exception occurred: No JSON object could be decoded
> -----------------------------------------------------------------
>
>                 Key: METRON-2162
>                 URL: https://issues.apache.org/jira/browse/METRON-2162
>             Project: Metron
>          Issue Type: Sub-task
>            Reporter: Nick Allen
>            Assignee: Nick Allen
>            Priority: Major
>
> When attempting to deploy Metron in Ambari 2.7.3 with HDP 3.1 the following 
> error occurs when deploying Metron on CentOS 7.
> {code:java}
> $ cd metron-deployment/development/centos7/
> $ vagrant up
> ...
> TASK [ambari_config : Deploy cluster with Ambari; http://node1:8080] 
> ***********
> The full traceback is:
> File "/tmp/ansible_LTBkrV/ansible_module_ambari_cluster_state.py", line 211, 
> in main
> if not blueprint_exists(ambari_url, username, password, blueprint_name):
> File "/tmp/ansible_LTBkrV/ansible_module_ambari_cluster_state.py", line 334, 
> in blueprint_exists
> blueprints = get_blueprints(ambari_url, user, password)
> File "/tmp/ansible_LTBkrV/ansible_module_ambari_cluster_state.py", line 315, 
> in get_blueprints
> services = json.loads(r.content)
> File "/usr/lib64/python2.7/json/__init__.py", line 338, in loads
> return _default_decoder.decode(s)
> File "/usr/lib64/python2.7/json/decoder.py", line 366, in decode
> obj, end = self.raw_decode(s, idx=_w(s, 0).end())
> File "/usr/lib64/python2.7/json/decoder.py", line 384, in raw_decode
> raise ValueError("No JSON object could be decoded")
> fatal: [node1]: FAILED! => {
> "changed": false,
> "invocation": {
> "module_args": {
> "blueprint_name": "metron_blueprint",
> "blueprint_var": {
> "groups": [
> {
> "cardinality": 1,
> "components": [
> {
> "name": "NAMENODE"
> },
> {
> "name": "SECONDARY_NAMENODE"
> },
> {
> "name": "RESOURCEMANAGER"
> },
> {
> "name": "HISTORYSERVER"
> },
> {
> "name": "ZOOKEEPER_SERVER"
> },
> {
> "name": "NIMBUS"
> },
> {
> "name": "STORM_UI_SERVER"
> },
> {
> "name": "DRPC_SERVER"
> },
> {
> "name": "HBASE_MASTER"
> },
> {
> "name": "HBASE_CLIENT"
> },
> {
> "name": "APP_TIMELINE_SERVER"
> },
> {
> "name": "DATANODE"
> },
> {
> "name": "HDFS_CLIENT"
> },
> {
> "name": "NODEMANAGER"
> },
> {
> "name": "YARN_CLIENT"
> },
> {
> "name": "MAPREDUCE2_CLIENT"
> },
> {
> "name": "ZOOKEEPER_CLIENT"
> },
> {
> "name": "SUPERVISOR"
> },
> {
> "name": "KAFKA_BROKER"
> },
> {
> "name": "HBASE_REGIONSERVER"
> },
> {
> "name": "KIBANA_MASTER"
> },
> {
> "name": "METRON_INDEXING"
> },
> {
> "name": "METRON_PROFILER"
> },
> {
> "name": "METRON_PCAP"
> },
> {
> "name": "METRON_ENRICHMENT_MASTER"
> },
> {
> "name": "METRON_PARSERS"
> },
> {
> "name": "METRON_REST"
> },
> {
> "name": "METRON_MANAGEMENT_UI"
> },
> {
> "name": "METRON_ALERTS_UI"
> },
> {
> "name": "ES_MASTER"
> }
> ],
> "configurations": [],
> "name": "host_group_1"
> }
> ],
> "required_configurations": [
> {
> "metron-env": {
> "es_hosts": "node1",
> "solr_zookeeper_url": "node1:9983",
> "storm_rest_addr": "http://node1:8744";,
> "zeppelin_server_url": "node1:9995"
> }
> },
> {
> "metron-rest-env": {
> "metron_jdbc_driver": "org.h2.Driver",
> "metron_jdbc_password": "root",
> "metron_jdbc_platform": "h2",
> "metron_jdbc_url": "jdbc:h2:file:~/metrondb",
> "metron_jdbc_username": "root"
> }
> },
> {
> "kibana-env": {
> "kibana_default_application": "dashboard/AV-YpDmwdXwc6Ua9Muh9",
> "kibana_es_url": "http://node1:9200";,
> "kibana_log_dir": "/var/log/kibana",
> "kibana_pid_dir": "/var/run/kibana",
> "kibana_server_host": "0.0.0.0",
> "kibana_server_port": 5000
> }
> }
> ],
> "stack_name": "HDP",
> "stack_version": "3.1"
> },
> "cluster_name": "metron_cluster",
> "cluster_state": "present",
> "configurations": [
> {
> "zoo.cfg": {
> "dataDir": "/hadoop/zookeeper"
> }
> },
> {
> "hadoop-env": {
> "dtnode_heapsize": 512,
> "hadoop_heapsize": 1024,
> "namenode_heapsize": 2048,
> "namenode_opt_permsize": "128m"
> }
> },
> {
> "hbase-env": {
> "hbase_master_heapsize": 512,
> "hbase_regionserver_heapsize": 512,
> "hbase_regionserver_xmn_max": 512
> }
> },
> {
> "hdfs-site": {
> "dfs.datanode.data.dir": "/hadoop/hdfs/data",
> "dfs.journalnode.edits.dir": "/hadoop/hdfs/journalnode",
> "dfs.namenode.checkpoint.dir": "/hadoop/hdfs/namesecondary",
> "dfs.namenode.name.dir": "/hadoop/hdfs/namenode",
> "dfs.replication": 1
> }
> },
> {
> "yarn-env": {
> "apptimelineserver_heapsize": 512,
> "min_user_id": 500,
> "nodemanager_heapsize": 512,
> "resourcemanager_heapsize": 1024,
> "yarn_heapsize": 512
> }
> },
> {
> "mapred-env": {
> "jobhistory_heapsize": 256
> }
> },
> {
> "mapred-site": {
> "mapreduce.jobhistory.recovery.store.leveldb.path": "/hadoop/mapreduce/jhs",
> "mapreduce.map.java.opts": "-Xmx1024m",
> "mapreduce.map.memory.mb": 1229,
> "mapreduce.reduce.java.opts": "-Xmx1024m",
> "mapreduce.reduce.memory.mb": 1229
> }
> },
> {
> "yarn-site": {
> "yarn.nodemanager.local-dirs": "/hadoop/yarn/local",
> "yarn.nodemanager.log-dirs": "/hadoop/yarn/log",
> "yarn.nodemanager.resource.memory-mb": 4096,
> "yarn.timeline-service.leveldb-state-store.path": "/hadoop/yarn/timeline",
> "yarn.timeline-service.leveldb-timeline-store.path": "/hadoop/yarn/timeline"
> }
> },
> {
> "storm-site": {
> "nimbus.childopts": "-Xmx1024m _JAAS_PLACEHOLDER",
> "storm.cluster.metrics.consumer.register": "[{\"class\": 
> \"org.apache.storm.metric.LoggingMetricsConsumer\"}]",
> "storm.local.dir": "/hadoop/storm",
> "supervisor.childopts": "-Xmx256m _JAAS_PLACEHOLDER",
> "supervisor.slots.ports": "[6700, 6701, 6702, 6703, 6704, 6705]",
> "topology.classpath": "/etc/hbase/conf:/etc/hadoop/conf",
> "topology.metrics.consumer.register": "[{\"class\": 
> \"org.apache.storm.metric.LoggingMetricsConsumer\", \"parallelism.hint\": 1, 
> \"whitelist\": [\"kafkaOffset\\\\..+/\", \"__complete-latency\", 
> \"__process-latency\", \"__receive\\\\.population$\", 
> \"__sendqueue\\\\.population$\", \"__execute-count\", \"__emit-count\", 
> \"__ack-count\", \"__fail-count\", \"memory/heap\\\\.usedBytes$\", 
> \"memory/nonHeap\\\\.usedBytes$\", \"GC/.+\\\\.count$\", 
> \"GC/.+\\\\.timeMs$\"]}]",
> "worker.childopts": "-Xmx768m _JAAS_PLACEHOLDER"
> }
> },
> {
> "kafka-env": {
> "content": "\n#!/bin/bash\n\n# Set KAFKA specific environment variables 
> here.\n\n# The java implementation to use.\nexport KAFKA_HEAP_OPTS=\"-Xms256M 
> -Xmx256M\"\nexport KAFKA_JVM_PERFORMANCE_OPTS=\"-server -XX:+UseG1GC 
> -XX:+DisableExplicitGC -Djava.awt.headless=true\"\nexport 
> JAVA_HOME={{java64_home}}\nexport PATH=$PATH:$JAVA_HOME/bin\nexport 
> PID_DIR={{kafka_pid_dir}}\nexport LOG_DIR={{kafka_log_dir}}\nexport 
> KAFKA_KERBEROS_PARAMS={{kafka_kerberos_params}}\n# Add kafka sink to 
> classpath and related depenencies\nif [ -e 
> \"/usr/lib/ambari-metrics-kafka-sink/ambari-metrics-kafka-sink.jar\" ]; 
> then\n export 
> CLASSPATH=$CLASSPATH:/usr/lib/ambari-metrics-kafka-sink/ambari-metrics-kafka-sink.jar\n
>  export CLASSPATH=$CLASSPATH:/usr/lib/ambari-metrics-kafka-sink/lib/*\nfi\nif 
> [ -f /etc/kafka/conf/kafka-ranger-env.sh ]; then\n . 
> /etc/kafka/conf/kafka-ranger-env.sh\nfi"
> }
> },
> {
> "kafka-broker": {
> "delete.topic.enable": "true",
> "log.dirs": "/kafka-log",
> "offsets.topic.replication.factor": 1
> }
> },
> {
> "metron-rest-env": {
> "metron_spring_profiles_active": "dev"
> }
> },
> {
> "metron-parsers-env": {
> "parsers": "\"bro,snort,yaf\""
> }
> },
> {
> "elastic-site": {
> "gateway_recover_after_data_nodes": 1,
> "index_number_of_replicas": 0,
> "index_number_of_shards": 1,
> "masters_also_are_datanodes": "1",
> "network_host": "[ _local_, _site_ ]",
> "zen_discovery_ping_unicast_hosts": "[ node1 ]"
> }
> }
> ],
> "host": "node1",
> "password": "admin",
> "port": 8080,
> "username": "admin",
> "wait_for_complete": true
> }
> },
> "msg": "Ambari client exception occurred: No JSON object could be decoded"
> }
> to retry, use: --limit 
> @/Users/nallen/Development/metron/metron-deployment/development/centos7/ansible/playbook.retry
> PLAY RECAP 
> *********************************************************************
> node1 : ok=83 changed=16 unreachable=0 failed=1
> [WARNING]: Module did not set no_log for password
> Ansible failed to complete successfully. Any error output should be
> visible above. Please fix these errors and try again.
> {code}



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

Reply via email to