[ 
https://issues.apache.org/jira/browse/AMBARI-8320?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=14211383#comment-14211383
 ] 

Enrique Flores commented on AMBARI-8320:
----------------------------------------

I capitalized "properties" in hdfs-log4 comment and re-ran configs.sh : 

{noformat}
[root@node-0 out2]# grep -i -n -o -P '(properties)\S+' hdfs-log4j.txt
1:properties"
2:Properties\nhadoop.root.logger=INFO,console\nhadoop.log.dir=.\nhadoop.log.file=hadoop.log\n\n\n#
[root@node-0 out2]#
[root@node-0 out2]# grep -n -o -P '(properties)\S+' hdfs-log4j.txt
1:properties"
[root@node-0 out2]#

{noformat}


This time it passed: 

{noformat}
[root@node-0 out2]# bash -x /var/lib/ambari-server/resources/scripts/configs.sh 
set localhost DC-2 hdfs-log4j hdfs-log4j.txt
+ USERID=admin
+ PASSWD=admin
+ PORT=:8080
+ SSL_URL_PREFIX=
+ '[' set == -u ']'
+ '[' set == -p ']'
+ '[' set == -port ']'
+ '[' set == -s ']'
+ AMBARIURL=http://localhost:8080
+ CLUSTER=DC-2
+ SITE=hdfs-log4j
+ SITETAG=
+ CONFIGKEY=hdfs-log4j.txt
+ CONFIGVALUE=
+ case "$1" in
+ (( 5 == 6 ))
+ (( 5 == 5 ))
+ doConfigFileUpdate hdfs-log4j.txt
+ FILENAME=hdfs-log4j.txt
+ '[' -f hdfs-log4j.txt ']'
++ cut -d : -f 1
++ grep -n properties hdfs-log4j.txt
+ '[' 1 == 1 ']'
++ date +%s%N
+ newTag=1415910981752460319
+ newTag=version1415910981752460319
++ cat hdfs-log4j.txt
+ newProperties='"properties" : {
"content" : "\n#\n# Licensed to the Apache Software Foundation (ASF) under 
one\n# or more contributor license agreements. See the NOTICE file\n# 
distributed with this work for additional information\n# regarding copyright 
ownership. The ASF licenses this file\n# to you under the Apache License, 
Version 2.0 (the\n# \"License\"); you may not use this file except in 
compliance\n# with the License. You may obtain a copy of the License at\n#\n# 
http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable 
law or agreed to in writing,\n# software distributed under the License is 
distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF 
ANY\n# KIND, either express or implied. See the License for the\n# specific 
language governing permissions and limitations\n# under the License.\n#\n\n\n# 
Define some default values that can be overridden by system 
Properties\nhadoop.root.logger=INFO,console\nhadoop.log.dir=.\nhadoop.log.file=hadoop.log\n\n\n#
 Define the root logger to the system property 
\"hadoop.root.logger\".\nlog4j.rootLogger=${hadoop.root.logger}, 
EventCounter\n\n# Logging Threshold\nlog4j.threshhold=ALL\n\n#\n# Daily Rolling 
File 
Appender\n#\n\nlog4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.DRFA.File=${hadoop.log.dir}/${hadoop.log.file}\n\n#
 Rollver at midnight\nlog4j.appender.DRFA.DatePattern=.yyyy-MM-dd\n\n# 30-day 
backup\n#log4j.appender.DRFA.MaxBackupIndex=30\nlog4j.appender.DRFA.layout=org.apache.log4j.PatternLayout\n\n#
 Pattern format: Date LogLevel LoggerName 
LogMessage\nlog4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: 
%m%n\n# Debugging Pattern 
format\n#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} 
(%F:%M(%L)) - %m%n\n\n\n#\n# console\n# Add \"console\" to rootlogger above if 
you want to use 
this\n#\n\nlog4j.appender.console=org.apache.log4j.ConsoleAppender\nlog4j.appender.console.target=System.err\nlog4j.appender.console.layout=org.apache.log4j.PatternLayout\nlog4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd
 HH:mm:ss} %p %c{2}: %m%n\n\n#\n# TaskLog Appender\n#\n\n#Default 
values\nhadoop.tasklog.taskid=null\nhadoop.tasklog.iscleanup=false\nhadoop.tasklog.noKeepSplits=4\nhadoop.tasklog.totalLogFileSize=100\nhadoop.tasklog.purgeLogSplits=true\nhadoop.tasklog.logsRetainHours=12\n\nlog4j.appender.TLA=org.apache.hadoop.mapred.TaskLogAppender\nlog4j.appender.TLA.taskId=${hadoop.tasklog.taskid}\nlog4j.appender.TLA.isCleanup=${hadoop.tasklog.iscleanup}\nlog4j.appender.TLA.totalLogFileSize=${hadoop.tasklog.totalLogFileSize}\n\nlog4j.appender.TLA.layout=org.apache.log4j.PatternLayout\nlog4j.appender.TLA.layout.ConversionPattern=%d{ISO8601}
 %p %c: %m%n\n\n#\n#Security audit 
appender\n#\nhadoop.security.logger=INFO,console\nhadoop.security.log.maxfilesize=256MB\nhadoop.security.log.maxbackupindex=20\nlog4j.category.SecurityLogger=${hadoop.security.logger}\nhadoop.security.log.file=SecurityAuth.audit\nlog4j.appender.DRFAS=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.DRFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}\nlog4j.appender.DRFAS.layout=org.apache.log4j.PatternLayout\nlog4j.appender.DRFAS.layout.ConversionPattern=%d{ISO8601}
 %p %c: 
%m%n\nlog4j.appender.DRFAS.DatePattern=.yyyy-MM-dd\n\nlog4j.appender.RFAS=org.apache.log4j.RollingFileAppender\nlog4j.appender.RFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}\nlog4j.appender.RFAS.layout=org.apache.log4j.PatternLayout\nlog4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601}
 %p %c: 
%m%n\nlog4j.appender.RFAS.MaxFileSize=${hadoop.security.log.maxfilesize}\nlog4j.appender.RFAS.MaxBackupIndex=${hadoop.security.log.maxbackupindex}\n\n#\n#
 hdfs audit 
logging\n#\nhdfs.audit.logger=INFO,console\nlog4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=${hdfs.audit.logger}\nlog4j.additivity.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=false\nlog4j.appender.DRFAAUDIT=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.DRFAAUDIT.File=${hadoop.log.dir}/hdfs-audit.log\nlog4j.appender.DRFAAUDIT.layout=org.apache.log4j.PatternLayout\nlog4j.appender.DRFAAUDIT.layout.ConversionPattern=%d{ISO8601}
 %p %c{2}: %m%n\nlog4j.appender.DRFAAUDIT.DatePattern=.yyyy-MM-dd\n\n#\n# 
mapred audit 
logging\n#\nmapred.audit.logger=INFO,console\nlog4j.logger.org.apache.hadoop.mapred.AuditLogger=${mapred.audit.logger}\nlog4j.additivity.org.apache.hadoop.mapred.AuditLogger=false\nlog4j.appender.MRAUDIT=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.MRAUDIT.File=${hadoop.log.dir}/mapred-audit.log\nlog4j.appender.MRAUDIT.layout=org.apache.log4j.PatternLayout\nlog4j.appender.MRAUDIT.layout.ConversionPattern=%d{ISO8601}
 %p %c{2}: %m%n\nlog4j.appender.MRAUDIT.DatePattern=.yyyy-MM-dd\n\n#\n# Rolling 
File 
Appender\n#\n\nlog4j.appender.RFA=org.apache.log4j.RollingFileAppender\nlog4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.file}\n\n#
 Logfile size and and 30-day 
backups\nlog4j.appender.RFA.MaxFileSize=256MB\nlog4j.appender.RFA.MaxBackupIndex=10\n\nlog4j.appender.RFA.layout=org.apache.log4j.PatternLayout\nlog4j.appender.RFA.layout.ConversionPattern=%d{ISO8601}
 %-5p %c{2} - %m%n\nlog4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} 
%-5p %c{2} (%F:%M(%L)) - %m%n\n\n\n# Custom Logging 
levels\n\nhadoop.metrics.log.level=INFO\n#log4j.logger.org.apache.hadoop.mapred.JobTracker=DEBUG\n#log4j.logger.org.apache.hadoop.mapred.TaskTracker=DEBUG\n#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG\nlog4j.logger.org.apache.hadoop.metrics2=${hadoop.metrics.log.level}\n\n#
 Jets3t 
library\nlog4j.logger.org.jets3t.service.impl.rest.httpclient.RestS3Service=ERROR\n\n#\n#
 Null Appender\n# Trap security logger on the hadoop client 
side\n#\nlog4j.appender.NullAppender=org.apache.log4j.varia.NullAppender\n\n#\n#
 Event Counter Appender\n# Sends counts of logging messages at different 
severity levels to Hadoop 
Metrics.\n#\nlog4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter\n
 "
}'
+ finalJson='{ "Clusters": { "desired_config": {"type": "hdfs-log4j", 
"tag":"version1415910981752460319", "properties" : {
"content" : "\n#\n# Licensed to the Apache Software Foundation (ASF) under 
one\n# or more contributor license agreements. See the NOTICE file\n# 
distributed with this work for additional information\n# regarding copyright 
ownership. The ASF licenses this file\n# to you under the Apache License, 
Version 2.0 (the\n# \"License\"); you may not use this file except in 
compliance\n# with the License. You may obtain a copy of the License at\n#\n# 
http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable 
law or agreed to in writing,\n# software distributed under the License is 
distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF 
ANY\n# KIND, either express or implied. See the License for the\n# specific 
language governing permissions and limitations\n# under the License.\n#\n\n\n# 
Define some default values that can be overridden by system 
Properties\nhadoop.root.logger=INFO,console\nhadoop.log.dir=.\nhadoop.log.file=hadoop.log\n\n\n#
 Define the root logger to the system property 
\"hadoop.root.logger\".\nlog4j.rootLogger=${hadoop.root.logger}, 
EventCounter\n\n# Logging Threshold\nlog4j.threshhold=ALL\n\n#\n# Daily Rolling 
File 
Appender\n#\n\nlog4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.DRFA.File=${hadoop.log.dir}/${hadoop.log.file}\n\n#
 Rollver at midnight\nlog4j.appender.DRFA.DatePattern=.yyyy-MM-dd\n\n# 30-day 
backup\n#log4j.appender.DRFA.MaxBackupIndex=30\nlog4j.appender.DRFA.layout=org.apache.log4j.PatternLayout\n\n#
 Pattern format: Date LogLevel LoggerName 
LogMessage\nlog4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: 
%m%n\n# Debugging Pattern 
format\n#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} 
(%F:%M(%L)) - %m%n\n\n\n#\n# console\n# Add \"console\" to rootlogger above if 
you want to use 
this\n#\n\nlog4j.appender.console=org.apache.log4j.ConsoleAppender\nlog4j.appender.console.target=System.err\nlog4j.appender.console.layout=org.apache.log4j.PatternLayout\nlog4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd
 HH:mm:ss} %p %c{2}: %m%n\n\n#\n# TaskLog Appender\n#\n\n#Default 
values\nhadoop.tasklog.taskid=null\nhadoop.tasklog.iscleanup=false\nhadoop.tasklog.noKeepSplits=4\nhadoop.tasklog.totalLogFileSize=100\nhadoop.tasklog.purgeLogSplits=true\nhadoop.tasklog.logsRetainHours=12\n\nlog4j.appender.TLA=org.apache.hadoop.mapred.TaskLogAppender\nlog4j.appender.TLA.taskId=${hadoop.tasklog.taskid}\nlog4j.appender.TLA.isCleanup=${hadoop.tasklog.iscleanup}\nlog4j.appender.TLA.totalLogFileSize=${hadoop.tasklog.totalLogFileSize}\n\nlog4j.appender.TLA.layout=org.apache.log4j.PatternLayout\nlog4j.appender.TLA.layout.ConversionPattern=%d{ISO8601}
 %p %c: %m%n\n\n#\n#Security audit 
appender\n#\nhadoop.security.logger=INFO,console\nhadoop.security.log.maxfilesize=256MB\nhadoop.security.log.maxbackupindex=20\nlog4j.category.SecurityLogger=${hadoop.security.logger}\nhadoop.security.log.file=SecurityAuth.audit\nlog4j.appender.DRFAS=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.DRFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}\nlog4j.appender.DRFAS.layout=org.apache.log4j.PatternLayout\nlog4j.appender.DRFAS.layout.ConversionPattern=%d{ISO8601}
 %p %c: 
%m%n\nlog4j.appender.DRFAS.DatePattern=.yyyy-MM-dd\n\nlog4j.appender.RFAS=org.apache.log4j.RollingFileAppender\nlog4j.appender.RFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}\nlog4j.appender.RFAS.layout=org.apache.log4j.PatternLayout\nlog4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601}
 %p %c: 
%m%n\nlog4j.appender.RFAS.MaxFileSize=${hadoop.security.log.maxfilesize}\nlog4j.appender.RFAS.MaxBackupIndex=${hadoop.security.log.maxbackupindex}\n\n#\n#
 hdfs audit 
logging\n#\nhdfs.audit.logger=INFO,console\nlog4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=${hdfs.audit.logger}\nlog4j.additivity.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=false\nlog4j.appender.DRFAAUDIT=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.DRFAAUDIT.File=${hadoop.log.dir}/hdfs-audit.log\nlog4j.appender.DRFAAUDIT.layout=org.apache.log4j.PatternLayout\nlog4j.appender.DRFAAUDIT.layout.ConversionPattern=%d{ISO8601}
 %p %c{2}: %m%n\nlog4j.appender.DRFAAUDIT.DatePattern=.yyyy-MM-dd\n\n#\n# 
mapred audit 
logging\n#\nmapred.audit.logger=INFO,console\nlog4j.logger.org.apache.hadoop.mapred.AuditLogger=${mapred.audit.logger}\nlog4j.additivity.org.apache.hadoop.mapred.AuditLogger=false\nlog4j.appender.MRAUDIT=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.MRAUDIT.File=${hadoop.log.dir}/mapred-audit.log\nlog4j.appender.MRAUDIT.layout=org.apache.log4j.PatternLayout\nlog4j.appender.MRAUDIT.layout.ConversionPattern=%d{ISO8601}
 %p %c{2}: %m%n\nlog4j.appender.MRAUDIT.DatePattern=.yyyy-MM-dd\n\n#\n# Rolling 
File 
Appender\n#\n\nlog4j.appender.RFA=org.apache.log4j.RollingFileAppender\nlog4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.file}\n\n#
 Logfile size and and 30-day 
backups\nlog4j.appender.RFA.MaxFileSize=256MB\nlog4j.appender.RFA.MaxBackupIndex=10\n\nlog4j.appender.RFA.layout=org.apache.log4j.PatternLayout\nlog4j.appender.RFA.layout.ConversionPattern=%d{ISO8601}
 %-5p %c{2} - %m%n\nlog4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} 
%-5p %c{2} (%F:%M(%L)) - %m%n\n\n\n# Custom Logging 
levels\n\nhadoop.metrics.log.level=INFO\n#log4j.logger.org.apache.hadoop.mapred.JobTracker=DEBUG\n#log4j.logger.org.apache.hadoop.mapred.TaskTracker=DEBUG\n#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG\nlog4j.logger.org.apache.hadoop.metrics2=${hadoop.metrics.log.level}\n\n#
 Jets3t 
library\nlog4j.logger.org.jets3t.service.impl.rest.httpclient.RestS3Service=ERROR\n\n#\n#
 Null Appender\n# Trap security logger on the hadoop client 
side\n#\nlog4j.appender.NullAppender=org.apache.log4j.varia.NullAppender\n\n#\n#
 Event Counter Appender\n# Sends counts of logging messages at different 
severity levels to Hadoop 
Metrics.\n#\nlog4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter\n
 "
}}}}'
+ newFile=hdfs-log4j.txt
+ echo '{' '"Clusters":' '{' '"desired_config":' '{"type":' '"hdfs-log4j",' 
'"tag":"version1415910981752460319",' '"properties"' : '{' '"content"' : 
'"\n#\n#' Licensed to the Apache Software Foundation '(ASF)' under 'one\n#' or 
more contributor license agreements. See the NOTICE 'file\n#' distributed with 
this work for additional 'information\n#' regarding copyright ownership. The 
ASF licenses this 'file\n#' to you under the Apache License, Version 2.0 
'(the\n#' '\"License\");' you may not use this file except in 'compliance\n#' 
with the License. You may obtain a copy of the License 'at\n#\n#' 
'http://www.apache.org/licenses/LICENSE-2.0\n#\n#' Unless required by 
applicable law or agreed to in 'writing,\n#' software distributed under the 
License is distributed on 'an\n#' '\"AS' 'IS\"' BASIS, WITHOUT WARRANTIES OR 
CONDITIONS OF 'ANY\n#' KIND, either express or implied. See the License for 
'the\n#' specific language governing permissions and 'limitations\n#' under the 
'License.\n#\n\n\n#' Define some default values that can be overridden by 
system 
'Properties\nhadoop.root.logger=INFO,console\nhadoop.log.dir=.\nhadoop.log.file=hadoop.log\n\n\n#'
 Define the root logger to the system property 
'\"hadoop.root.logger\".\nlog4j.rootLogger=${hadoop.root.logger},' 
'EventCounter\n\n#' Logging 'Threshold\nlog4j.threshhold=ALL\n\n#\n#' Daily 
Rolling File 
'Appender\n#\n\nlog4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.DRFA.File=${hadoop.log.dir}/${hadoop.log.file}\n\n#'
 Rollver at 'midnight\nlog4j.appender.DRFA.DatePattern=.yyyy-MM-dd\n\n#' 30-day 
'backup\n#log4j.appender.DRFA.MaxBackupIndex=30\nlog4j.appender.DRFA.layout=org.apache.log4j.PatternLayout\n\n#'
 Pattern format: Date LogLevel LoggerName 
'LogMessage\nlog4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601}' %p %c: 
'%m%n\n#' Debugging Pattern 
'format\n#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601}' %-5p 
'%c{2}' '(%F:%M(%L))' - '%m%n\n\n\n#\n#' 'console\n#' Add '\"console\"' to 
rootlogger above if you want to use 
'this\n#\n\nlog4j.appender.console=org.apache.log4j.ConsoleAppender\nlog4j.appender.console.target=System.err\nlog4j.appender.console.layout=org.apache.log4j.PatternLayout\nlog4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd'
 'HH:mm:ss}' %p '%c{2}:' '%m%n\n\n#\n#' TaskLog 'Appender\n#\n\n#Default' 
'values\nhadoop.tasklog.taskid=null\nhadoop.tasklog.iscleanup=false\nhadoop.tasklog.noKeepSplits=4\nhadoop.tasklog.totalLogFileSize=100\nhadoop.tasklog.purgeLogSplits=true\nhadoop.tasklog.logsRetainHours=12\n\nlog4j.appender.TLA=org.apache.hadoop.mapred.TaskLogAppender\nlog4j.appender.TLA.taskId=${hadoop.tasklog.taskid}\nlog4j.appender.TLA.isCleanup=${hadoop.tasklog.iscleanup}\nlog4j.appender.TLA.totalLogFileSize=${hadoop.tasklog.totalLogFileSize}\n\nlog4j.appender.TLA.layout=org.apache.log4j.PatternLayout\nlog4j.appender.TLA.layout.ConversionPattern=%d{ISO8601}'
 %p %c: '%m%n\n\n#\n#Security' audit 
'appender\n#\nhadoop.security.logger=INFO,console\nhadoop.security.log.maxfilesize=256MB\nhadoop.security.log.maxbackupindex=20\nlog4j.category.SecurityLogger=${hadoop.security.logger}\nhadoop.security.log.file=SecurityAuth.audit\nlog4j.appender.DRFAS=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.DRFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}\nlog4j.appender.DRFAS.layout=org.apache.log4j.PatternLayout\nlog4j.appender.DRFAS.layout.ConversionPattern=%d{ISO8601}'
 %p %c: 
'%m%n\nlog4j.appender.DRFAS.DatePattern=.yyyy-MM-dd\n\nlog4j.appender.RFAS=org.apache.log4j.RollingFileAppender\nlog4j.appender.RFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}\nlog4j.appender.RFAS.layout=org.apache.log4j.PatternLayout\nlog4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601}'
 %p %c: 
'%m%n\nlog4j.appender.RFAS.MaxFileSize=${hadoop.security.log.maxfilesize}\nlog4j.appender.RFAS.MaxBackupIndex=${hadoop.security.log.maxbackupindex}\n\n#\n#'
 hdfs audit 
'logging\n#\nhdfs.audit.logger=INFO,console\nlog4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=${hdfs.audit.logger}\nlog4j.additivity.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=false\nlog4j.appender.DRFAAUDIT=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.DRFAAUDIT.File=${hadoop.log.dir}/hdfs-audit.log\nlog4j.appender.DRFAAUDIT.layout=org.apache.log4j.PatternLayout\nlog4j.appender.DRFAAUDIT.layout.ConversionPattern=%d{ISO8601}'
 %p '%c{2}:' '%m%n\nlog4j.appender.DRFAAUDIT.DatePattern=.yyyy-MM-dd\n\n#\n#' 
mapred audit 
'logging\n#\nmapred.audit.logger=INFO,console\nlog4j.logger.org.apache.hadoop.mapred.AuditLogger=${mapred.audit.logger}\nlog4j.additivity.org.apache.hadoop.mapred.AuditLogger=false\nlog4j.appender.MRAUDIT=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.MRAUDIT.File=${hadoop.log.dir}/mapred-audit.log\nlog4j.appender.MRAUDIT.layout=org.apache.log4j.PatternLayout\nlog4j.appender.MRAUDIT.layout.ConversionPattern=%d{ISO8601}'
 %p '%c{2}:' '%m%n\nlog4j.appender.MRAUDIT.DatePattern=.yyyy-MM-dd\n\n#\n#' 
Rolling File 
'Appender\n#\n\nlog4j.appender.RFA=org.apache.log4j.RollingFileAppender\nlog4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.file}\n\n#'
 Logfile size and and 30-day 
'backups\nlog4j.appender.RFA.MaxFileSize=256MB\nlog4j.appender.RFA.MaxBackupIndex=10\n\nlog4j.appender.RFA.layout=org.apache.log4j.PatternLayout\nlog4j.appender.RFA.layout.ConversionPattern=%d{ISO8601}'
 %-5p '%c{2}' - '%m%n\nlog4j.appender.RFA.layout.ConversionPattern=%d{ISO8601}' 
%-5p '%c{2}' '(%F:%M(%L))' - '%m%n\n\n\n#' Custom Logging 
'levels\n\nhadoop.metrics.log.level=INFO\n#log4j.logger.org.apache.hadoop.mapred.JobTracker=DEBUG\n#log4j.logger.org.apache.hadoop.mapred.TaskTracker=DEBUG\n#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG\nlog4j.logger.org.apache.hadoop.metrics2=${hadoop.metrics.log.level}\n\n#'
 Jets3t 
'library\nlog4j.logger.org.jets3t.service.impl.rest.httpclient.RestS3Service=ERROR\n\n#\n#'
 Null 'Appender\n#' Trap security logger on the hadoop client 
'side\n#\nlog4j.appender.NullAppender=org.apache.log4j.varia.NullAppender\n\n#\n#'
 Event Counter 'Appender\n#' Sends counts of logging messages at different 
severity levels to Hadoop 
'Metrics.\n#\nlog4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter\n'
 '"' '}}}}'
+ echo '########## PUTting file:"hdfs-log4j.txt" into config(type:"hdfs-log4j", 
tag:version1415910981752460319) via hdfs-log4j.txt'
########## PUTting file:"hdfs-log4j.txt" into config(type:"hdfs-log4j", 
tag:version1415910981752460319) via hdfs-log4j.txt
+ curl -k -u admin:admin -X PUT -H 'X-Requested-By: ambari' 
http://localhost:8080/api/v1/clusters/DC-2 --data @hdfs-log4j.txt
+ currentSiteTag
+ currentSiteTag=
+ found=
++ grep -E 'hdfs-log4j|tag'
++ curl -k -s -u admin:admin 
'http://localhost:8080/api/v1/clusters/DC-2?fields=Clusters/desired_configs'
+ currentSite='        "tag" : "version1415841361648257984"
        "tag" : "version1415857022919386298"
        "tag" : "version1415857022662073685"
        "tag" : "version1415841362350103619"
      "hdfs-log4j" : {
        "tag" : "version1415910981752460319"
        "tag" : "version1415856995282103135"
        "tag" : "version1415841361994970577"
        "tag" : "version1415856996677034827"
        "tag" : "version1415841362748782344"
        "tag" : "version1415856999640637767"'
+ for line in '$currentSite'
+ '[' '"tag"' '!=' '{' -a '"tag"' '!=' : -a '"tag"' '!=' '"tag"' ']'
+ for line in '$currentSite'
+ '[' : '!=' '{' -a : '!=' : -a : '!=' '"tag"' ']'
+ for line in '$currentSite'
+ '[' '"version1415841361648257984"' '!=' '{' -a '"version1415841361648257984"' 
'!=' : -a '"version1415841361648257984"' '!=' '"tag"' ']'
+ '[' -n '' -a -z '' ']'
+ '[' '"version1415841361648257984"' == '"hdfs-log4j"' ']'
+ for line in '$currentSite'
+ '[' '"tag"' '!=' '{' -a '"tag"' '!=' : -a '"tag"' '!=' '"tag"' ']'
+ for line in '$currentSite'
+ '[' : '!=' '{' -a : '!=' : -a : '!=' '"tag"' ']'
+ for line in '$currentSite'
+ '[' '"version1415857022919386298"' '!=' '{' -a '"version1415857022919386298"' 
'!=' : -a '"version1415857022919386298"' '!=' '"tag"' ']'
+ '[' -n '' -a -z '' ']'
+ '[' '"version1415857022919386298"' == '"hdfs-log4j"' ']'
+ for line in '$currentSite'
+ '[' '"tag"' '!=' '{' -a '"tag"' '!=' : -a '"tag"' '!=' '"tag"' ']'
+ for line in '$currentSite'
+ '[' : '!=' '{' -a : '!=' : -a : '!=' '"tag"' ']'
+ for line in '$currentSite'
+ '[' '"version1415857022662073685"' '!=' '{' -a '"version1415857022662073685"' 
'!=' : -a '"version1415857022662073685"' '!=' '"tag"' ']'
+ '[' -n '' -a -z '' ']'
+ '[' '"version1415857022662073685"' == '"hdfs-log4j"' ']'
+ for line in '$currentSite'
+ '[' '"tag"' '!=' '{' -a '"tag"' '!=' : -a '"tag"' '!=' '"tag"' ']'
+ for line in '$currentSite'
+ '[' : '!=' '{' -a : '!=' : -a : '!=' '"tag"' ']'
+ for line in '$currentSite'
+ '[' '"version1415841362350103619"' '!=' '{' -a '"version1415841362350103619"' 
'!=' : -a '"version1415841362350103619"' '!=' '"tag"' ']'
+ '[' -n '' -a -z '' ']'
+ '[' '"version1415841362350103619"' == '"hdfs-log4j"' ']'
+ for line in '$currentSite'
+ '[' '"hdfs-log4j"' '!=' '{' -a '"hdfs-log4j"' '!=' : -a '"hdfs-log4j"' '!=' 
'"tag"' ']'
+ '[' -n '' -a -z '' ']'
+ '[' '"hdfs-log4j"' == '"hdfs-log4j"' ']'
+ found=hdfs-log4j
+ for line in '$currentSite'
+ '[' : '!=' '{' -a : '!=' : -a : '!=' '"tag"' ']'
+ for line in '$currentSite'
+ '[' '{' '!=' '{' -a '{' '!=' : -a '{' '!=' '"tag"' ']'
+ for line in '$currentSite'
+ '[' '"tag"' '!=' '{' -a '"tag"' '!=' : -a '"tag"' '!=' '"tag"' ']'
+ for line in '$currentSite'
+ '[' : '!=' '{' -a : '!=' : -a : '!=' '"tag"' ']'
+ for line in '$currentSite'
+ '[' '"version1415910981752460319"' '!=' '{' -a '"version1415910981752460319"' 
'!=' : -a '"version1415910981752460319"' '!=' '"tag"' ']'
+ '[' -n hdfs-log4j -a -z '' ']'
+ currentSiteTag='"version1415910981752460319"'
+ '[' '"version1415910981752460319"' == '"hdfs-log4j"' ']'
+ for line in '$currentSite'
+ '[' '"tag"' '!=' '{' -a '"tag"' '!=' : -a '"tag"' '!=' '"tag"' ']'
+ for line in '$currentSite'
+ '[' : '!=' '{' -a : '!=' : -a : '!=' '"tag"' ']'
+ for line in '$currentSite'
+ '[' '"version1415856995282103135"' '!=' '{' -a '"version1415856995282103135"' 
'!=' : -a '"version1415856995282103135"' '!=' '"tag"' ']'
+ '[' -n hdfs-log4j -a -z '"version1415910981752460319"' ']'
+ '[' '"version1415856995282103135"' == '"hdfs-log4j"' ']'
+ for line in '$currentSite'
+ '[' '"tag"' '!=' '{' -a '"tag"' '!=' : -a '"tag"' '!=' '"tag"' ']'
+ for line in '$currentSite'
+ '[' : '!=' '{' -a : '!=' : -a : '!=' '"tag"' ']'
+ for line in '$currentSite'
+ '[' '"version1415841361994970577"' '!=' '{' -a '"version1415841361994970577"' 
'!=' : -a '"version1415841361994970577"' '!=' '"tag"' ']'
+ '[' -n hdfs-log4j -a -z '"version1415910981752460319"' ']'
+ '[' '"version1415841361994970577"' == '"hdfs-log4j"' ']'
+ for line in '$currentSite'
+ '[' '"tag"' '!=' '{' -a '"tag"' '!=' : -a '"tag"' '!=' '"tag"' ']'
+ for line in '$currentSite'
+ '[' : '!=' '{' -a : '!=' : -a : '!=' '"tag"' ']'
+ for line in '$currentSite'
+ '[' '"version1415856996677034827"' '!=' '{' -a '"version1415856996677034827"' 
'!=' : -a '"version1415856996677034827"' '!=' '"tag"' ']'
+ '[' -n hdfs-log4j -a -z '"version1415910981752460319"' ']'
+ '[' '"version1415856996677034827"' == '"hdfs-log4j"' ']'
+ for line in '$currentSite'
+ '[' '"tag"' '!=' '{' -a '"tag"' '!=' : -a '"tag"' '!=' '"tag"' ']'
+ for line in '$currentSite'
+ '[' : '!=' '{' -a : '!=' : -a : '!=' '"tag"' ']'
+ for line in '$currentSite'
+ '[' '"version1415841362748782344"' '!=' '{' -a '"version1415841362748782344"' 
'!=' : -a '"version1415841362748782344"' '!=' '"tag"' ']'
+ '[' -n hdfs-log4j -a -z '"version1415910981752460319"' ']'
+ '[' '"version1415841362748782344"' == '"hdfs-log4j"' ']'
+ for line in '$currentSite'
+ '[' '"tag"' '!=' '{' -a '"tag"' '!=' : -a '"tag"' '!=' '"tag"' ']'
+ for line in '$currentSite'
+ '[' : '!=' '{' -a : '!=' : -a : '!=' '"tag"' ']'
+ for line in '$currentSite'
+ '[' '"version1415856999640637767"' '!=' '{' -a '"version1415856999640637767"' 
'!=' : -a '"version1415856999640637767"' '!=' '"tag"' ']'
+ '[' -n hdfs-log4j -a -z '"version1415910981752460319"' ']'
+ '[' '"version1415856999640637767"' == '"hdfs-log4j"' ']'
+ '[' -z '"version1415910981752460319"' ']'
++ cut -d '"' -f 2
++ echo '"version1415910981752460319"'
+ currentSiteTag=version1415910981752460319
+ SITETAG=version1415910981752460319
+ echo '########## NEW Site:hdfs-log4j, Tag:version1415910981752460319'
########## NEW Site:hdfs-log4j, Tag:version1415910981752460319
[root@node-0 out2]#

{noformat}



grep in doConfigFileUpdate passed : 
{noformat}
+ doConfigFileUpdate hdfs-log4j.txt
+ FILENAME=hdfs-log4j.txt
+ '[' -f hdfs-log4j.txt ']'
++ cut -d : -f 1
++ grep -n properties hdfs-log4j.txt
+ '[' 1 == 1 ']'
++ date +%s%N
+ newTag=1415910981752460319
+ newTag=version1415910981752460319
++ cat hdfs-log4j.txt
{noformat}





> configs.sh doConfigFileUpdate fails when multiple occurrences of word 
> "properties" found 
> -----------------------------------------------------------------------------------------
>
>                 Key: AMBARI-8320
>                 URL: https://issues.apache.org/jira/browse/AMBARI-8320
>             Project: Ambari
>          Issue Type: Bug
>          Components: ambari-server
>    Affects Versions: 1.6.1
>            Reporter: Enrique Flores
>
> When running configs.sh to read in a properties file per doConfigFileUpdate, 
> the update fails with the following error: 
> {noformat}
> [root@node-0 out2]# bash -x 
> /var/lib/ambari-server/resources/scripts/configs.sh set localhost DC-2 
> hdfs-log4j hdfs-log4j.txt
> + USERID=admin
> + PASSWD=admin
> + PORT=:8080
> + SSL_URL_PREFIX=
> + '[' set == -u ']'
> + '[' set == -p ']'
> + '[' set == -port ']'
> + '[' set == -s ']'
> + AMBARIURL=http://localhost:8080
> + CLUSTER=DC-2
> + SITE=hdfs-log4j
> + SITETAG=
> + CONFIGKEY=hdfs-log4j.txt
> + CONFIGVALUE=
> + case "$1" in
> + (( 5 == 6 ))
> + (( 5 == 5 ))
> + doConfigFileUpdate hdfs-log4j.txt
> + FILENAME=hdfs-log4j.txt
> + '[' -f hdfs-log4j.txt ']'
> ++ cut -d : -f 1
> ++ grep -n properties hdfs-log4j.txt
> + '[' 1 == '1
> 2' ']'
> + echo '[ERROR] File "hdfs-log4j.txt" should be in the following JSON format:'
> [ERROR] File "hdfs-log4j.txt" should be in the following JSON format:
> + echo '[ERROR]   "properties": {'
> [ERROR]   "properties": {
> + echo '[ERROR]     "key1": "value1",'
> [ERROR]     "key1": "value1",
> + echo '[ERROR]     "key2": "value2",'
> [ERROR]     "key2": "value2",
> + echo '[ERROR]   }'
> [ERROR]   }
> + exit 1
> [root@node-0 out2]#
> {noformat}
> In this example, there are multiple occurrences of the word "properties" in 
> the hdfs-log4j.txt properties file I'm trying to read in: 
> {noformat}
> [root@node-0 out2]# grep -n properties hdfs-log4j.txt | cut -d: -f 1
> 1
> 2
> [root@node-0 out2]#
> [root@node-0 out2]# grep -n -o -P '(properties)\S+' hdfs-log4j.txt
> 1:properties"
> 2:properties\nhadoop.root.logger=INFO,console\nhadoop.log.dir=.\nhadoop.log.file=hadoop.log\n\n\n#
> [root@node-0 out2]#
> {noformat}
> The file I'm trying to read in is formatted properly , but configs.sh fails 
> due to the grep test in doConfigFileUpdate not passing. 



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

Reply via email to