Repository: ambari
Updated Branches:
  refs/heads/trunk caf2e6f55 -> 127ce93e9


AMBARI-5147 log4j configs to be applied after Ambari upgrade (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/127ce93e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/127ce93e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/127ce93e

Branch: refs/heads/trunk
Commit: 127ce93e9bcbc00b64092ffd4b015dcbe657ef25
Parents: caf2e6f
Author: Dmitry Sen <[email protected]>
Authored: Thu Mar 20 19:23:37 2014 +0200
Committer: Dmitry Sen <[email protected]>
Committed: Thu Mar 20 19:23:37 2014 +0200

----------------------------------------------------------------------
 .../server/upgrade/SchemaUpgradeHelper.java     |   3 -
 .../ambari/server/upgrade/StackUpgradeUtil.java |   4 +-
 .../server/upgrade/UpgradeCatalog150.java       | 110 ++++++++++-
 .../server/upgrade/UpgradeCatalog150Test.java   |  45 ++++-
 .../services/HDFS/configuration/hdfs-log4j.xml  | 195 +++++++++++++++++++
 .../stacks/HDP/1.3.4/services/HDFS/metainfo.xml |   1 +
 6 files changed, 350 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/127ce93e/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
index b26091a..6065d95 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
@@ -26,8 +26,6 @@ import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.ControllerModule;
 import org.apache.ambari.server.orm.DBAccessor;
-import org.apache.ambari.server.orm.dao.MetainfoDAO;
-import org.apache.ambari.server.orm.entities.MetainfoEntity;
 import org.apache.ambari.server.utils.VersionUtils;
 import org.apache.commons.io.FileUtils;
 import org.slf4j.Logger;
@@ -72,7 +70,6 @@ public class SchemaUpgradeHelper {
   }
 
   public String readSourceVersion() {
-    String sourceVersion = null;
 
     ResultSet resultSet = null;
     try {

http://git-wip-us.apache.org/repos/asf/ambari/blob/127ce93e/ambari-server/src/main/java/org/apache/ambari/server/upgrade/StackUpgradeUtil.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/StackUpgradeUtil.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/StackUpgradeUtil.java
index 55697dc..5f96326 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/StackUpgradeUtil.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/StackUpgradeUtil.java
@@ -151,7 +151,9 @@ public class StackUpgradeUtil {
   /**
    * @param stackName
    * @param stackVersion
-   * @param localRepo
+   * @param repoUrl
+   * @param repoUrlOs
+   * @throws Exception
    */
   public void updateLocalRepo(String stackName, String stackVersion,
       String repoUrl, String repoUrlOs) throws Exception {

http://git-wip-us.apache.org/repos/asf/ambari/blob/127ce93e/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog150.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog150.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog150.java
index 3c1ad47..f397b54 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog150.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog150.java
@@ -6,6 +6,7 @@ import com.google.inject.Injector;
 
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.actionmanager.HostRoleStatus;
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo;
 import org.apache.ambari.server.orm.dao.ClusterDAO;
@@ -19,6 +20,7 @@ import org.apache.ambari.server.orm.dao.KeyValueDAO;
 import org.apache.ambari.server.orm.dao.ServiceComponentDesiredStateDAO;
 import org.apache.ambari.server.orm.entities.ClusterConfigEntity;
 import org.apache.ambari.server.orm.entities.ClusterConfigEntityPK;
+import org.apache.ambari.server.orm.entities.ClusterConfigMappingEntity;
 import org.apache.ambari.server.orm.entities.ClusterEntity;
 import org.apache.ambari.server.orm.entities.ClusterServiceEntity;
 import org.apache.ambari.server.orm.entities.ClusterServiceEntityPK;
@@ -32,6 +34,8 @@ import org.apache.ambari.server.orm.entities.KeyValueEntity;
 import 
org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntity;
 import 
org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntityPK;
 import org.apache.ambari.server.state.HostComponentAdminState;
+import org.apache.ambari.server.state.PropertyInfo;
+import org.apache.ambari.server.state.ServiceInfo;
 import org.apache.ambari.server.state.State;
 import org.eclipse.persistence.jpa.JpaEntityManager;
 import org.slf4j.Logger;
@@ -48,6 +52,8 @@ import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
+import java.util.Collection;
+import java.util.HashMap;
 
 public class UpgradeCatalog150 extends AbstractUpgradeCatalog {
   private static final Logger LOG = 
LoggerFactory.getLogger(UpgradeCatalog150.class);
@@ -459,8 +465,15 @@ public class UpgradeCatalog150 extends 
AbstractUpgradeCatalog {
         addHistoryServer();
       }
     });
-    
-      
+
+    // Add default log4j configs if they are absent
+    executeInTransaction(new Runnable() {
+      @Override
+      public void run() {
+        addMissingLog4jConfigs();
+      }
+    });
+
     // ========================================================================
     // Finally update schema version
     updateMetaInfoVersion(getTargetVersion());
@@ -478,7 +491,7 @@ public class UpgradeCatalog150 extends 
AbstractUpgradeCatalog {
       }
     });
   }
-  
+
   protected void addHistoryServer() {
     ClusterDAO clusterDAO = injector.getInstance(ClusterDAO.class);
     ClusterServiceDAO clusterServiceDAO = 
injector.getInstance(ClusterServiceDAO.class);
@@ -567,6 +580,97 @@ public class UpgradeCatalog150 extends 
AbstractUpgradeCatalog {
     hostDAO.merge(hostEntity);
   }
 
+  protected void addMissingLog4jConfigs() {
+
+    final String log4jConfigTypeContains = "log4j";
+    final String defaultVersionTag = "version1";
+    final String defaultUser = "admin";
+
+    LOG.debug("Adding missing configs into Ambari DB.");
+    ClusterDAO clusterDAO = injector.getInstance(ClusterDAO.class);
+    ClusterServiceDAO clusterServiceDAO = 
injector.getInstance(ClusterServiceDAO.class);
+
+    AmbariMetaInfo ambariMetaInfo = injector.getInstance(AmbariMetaInfo.class);
+    Gson gson = injector.getInstance(Gson.class);
+
+    List <ClusterEntity> clusterEntities = clusterDAO.findAll();
+    for (final ClusterEntity clusterEntity : clusterEntities) {
+      Long clusterId = clusterEntity.getClusterId();
+      String desiredStackVersion = clusterEntity.getDesiredStackVersion();
+
+      Map<String, String> clusterInfo =
+        gson.<Map<String, String>>fromJson(desiredStackVersion, Map.class);
+
+      String stackName = clusterInfo.get("stackName");
+      String stackVersion = clusterInfo.get("stackVersion");
+
+      List<ClusterServiceEntity> clusterServiceEntities = 
clusterServiceDAO.findAll();
+      for (final ClusterServiceEntity clusterServiceEntity : 
clusterServiceEntities) {
+        String serviceName = clusterServiceEntity.getServiceName();
+        ServiceInfo serviceInfo = null;
+        try {
+          serviceInfo = ambariMetaInfo.getService(stackName, stackVersion, 
serviceName);
+        } catch (AmbariException e) {
+          LOG.error("Service " + serviceName + " not found for " + stackName + 
stackVersion);
+          continue;
+        }
+        List<String> configTypes = serviceInfo.getConfigDependencies();
+        if (configTypes != null) {
+          for (String configType : configTypes) {
+            if (configType.contains(log4jConfigTypeContains)) {
+              ClusterConfigEntityPK configEntityPK = new 
ClusterConfigEntityPK();
+              configEntityPK.setClusterId(clusterId);
+              configEntityPK.setType(configType);
+              configEntityPK.setTag(defaultVersionTag);
+              ClusterConfigEntity configEntity = 
clusterDAO.findConfig(configEntityPK);
+
+              if (configEntity == null) {
+                String filename = configType + ".xml";
+                Map<String, String> properties = new HashMap<String, String>();
+                for (PropertyInfo propertyInfo : serviceInfo.getProperties()) {
+                  if (filename.equals(propertyInfo.getFilename())) {
+                    properties.put(propertyInfo.getName(), 
propertyInfo.getValue());
+                  }
+                }
+                if (!properties.isEmpty()) {
+                  String configData = gson.toJson(properties);
+                  configEntity = new ClusterConfigEntity();
+                  configEntity.setClusterId(clusterId);
+                  configEntity.setType(configType);
+                  configEntity.setTag(defaultVersionTag);
+                  configEntity.setData(configData);
+                  configEntity.setTimestamp(System.currentTimeMillis());
+                  configEntity.setClusterEntity(clusterEntity);
+                  LOG.debug("Creating new " + configType + " config...");
+                  clusterDAO.createConfig(configEntity);
+
+                  Collection<ClusterConfigMappingEntity> entities =
+                    clusterEntity.getConfigMappingEntities();
+
+                  ClusterConfigMappingEntity clusterConfigMappingEntity =
+                    new ClusterConfigMappingEntity();
+                  clusterConfigMappingEntity.setClusterEntity(clusterEntity);
+                  clusterConfigMappingEntity.setClusterId(clusterId);
+                  clusterConfigMappingEntity.setType(configType);
+                  clusterConfigMappingEntity.setCreateTimestamp(
+                    Long.valueOf(System.currentTimeMillis()));
+                  clusterConfigMappingEntity.setSelected(1);
+                  clusterConfigMappingEntity.setUser(defaultUser);
+                  clusterConfigMappingEntity.setVersion(configEntity.getTag());
+                  entities.add(clusterConfigMappingEntity);
+                  clusterDAO.merge(clusterEntity);
+                }
+              }
+            }
+
+          }
+
+        }
+      }
+    }
+    LOG.debug("Missing configs have been successfully added into Ambari DB.");
+  }
+
   protected void processDecommissionedDatanodes() {
     KeyValueDAO keyValueDAO = injector.getInstance(KeyValueDAO.class);
     ClusterDAO clusterDAO = injector.getInstance(ClusterDAO.class);

http://git-wip-us.apache.org/repos/asf/ambari/blob/127ce93e/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog150Test.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog150Test.java
 
b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog150Test.java
index af2b247..a5eb8a5 100644
--- 
a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog150Test.java
+++ 
b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog150Test.java
@@ -32,6 +32,8 @@ import 
org.apache.ambari.server.orm.dao.HostComponentDesiredStateDAO;
 import org.apache.ambari.server.orm.dao.HostDAO;
 import org.apache.ambari.server.orm.dao.KeyValueDAO;
 import org.apache.ambari.server.orm.entities.ClusterConfigEntity;
+import org.apache.ambari.server.orm.entities.ClusterConfigEntityPK;
+import org.apache.ambari.server.orm.entities.ClusterConfigMappingEntity;
 import org.apache.ambari.server.orm.entities.ClusterEntity;
 import org.apache.ambari.server.orm.entities.ClusterServiceEntity;
 import org.apache.ambari.server.orm.entities.HostComponentDesiredStateEntity;
@@ -73,6 +75,7 @@ public class UpgradeCatalog150Test {
     ClusterEntity clusterEntity = new ClusterEntity();
     clusterEntity.setClusterId(1L);
     clusterEntity.setClusterName(CLUSTER_NAME);
+    clusterEntity.setDesiredStackVersion(DESIRED_STACK_VERSION);
     clusterDAO.create(clusterEntity);
     return clusterEntity;
   }
@@ -167,7 +170,7 @@ public class UpgradeCatalog150Test {
 
     }
   }
-  
+
   @Test
   public void testAddHistoryServer() throws AmbariException {
     final ClusterEntity clusterEntity = createCluster();
@@ -250,4 +253,44 @@ public class UpgradeCatalog150Test {
     Assert.assertNotNull(keyValueEntity);
     Assert.assertEquals("1394147791230", keyValueEntity.getValue());
   }
+
+  @Test
+  public void testAddMissingLog4jConfigs() throws Exception {
+    ClusterDAO clusterDAO = injector.getInstance(ClusterDAO.class);
+
+    ClusterEntity clusterEntity = createCluster();
+    ClusterServiceEntity clusterServiceEntityMR = addService(clusterEntity, 
"HDFS");
+
+    Long clusterId = clusterEntity.getClusterId();
+
+    ClusterConfigEntityPK configEntityPK = new ClusterConfigEntityPK();
+    configEntityPK.setClusterId(clusterId);
+    configEntityPK.setType("hdfs-log4j");
+    configEntityPK.setTag("version1");
+    ClusterConfigEntity configEntity = clusterDAO.findConfig(configEntityPK);
+    Assert.assertNull(configEntity);
+
+    for (ClusterConfigMappingEntity ccme : 
clusterEntity.getConfigMappingEntities()) {
+      if ("hdfs-log4j".equals(ccme.getType())) {
+        Assert.fail();
+      }
+    }
+
+    UpgradeCatalog150 upgradeCatalog150 = 
injector.getInstance(UpgradeCatalog150.class);
+    upgradeCatalog150.addMissingLog4jConfigs();
+
+    configEntity = clusterDAO.findConfig(configEntityPK);
+    Assert.assertNotNull(configEntity);
+
+    //Get updated cluster
+    clusterEntity = clusterDAO.findById(1L);
+
+    boolean failFlag = true;
+    for (ClusterConfigMappingEntity ccme : 
clusterEntity.getConfigMappingEntities()) {
+      if ("hdfs-log4j".equals(ccme.getType())) {
+        failFlag = false;
+      }
+    }
+    Assert.assertFalse(failFlag);
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/127ce93e/ambari-server/src/test/resources/stacks/HDP/1.3.4/services/HDFS/configuration/hdfs-log4j.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/resources/stacks/HDP/1.3.4/services/HDFS/configuration/hdfs-log4j.xml
 
b/ambari-server/src/test/resources/stacks/HDP/1.3.4/services/HDFS/configuration/hdfs-log4j.xml
new file mode 100644
index 0000000..9e9f884
--- /dev/null
+++ 
b/ambari-server/src/test/resources/stacks/HDP/1.3.4/services/HDFS/configuration/hdfs-log4j.xml
@@ -0,0 +1,195 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+
+<configuration>
+
+  <property>
+    <name>content</name>
+    <value>
+# Copyright 2011 The Apache Software Foundation
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Define some default values that can be overridden by system properties
+hadoop.root.logger=INFO,console
+hadoop.log.dir=.
+hadoop.log.file=hadoop.log
+
+
+# Define the root logger to the system property "hadoop.root.logger".
+log4j.rootLogger=${hadoop.root.logger}, EventCounter
+
+# Logging Threshold
+log4j.threshhold=ALL
+
+#
+# Daily Rolling File Appender
+#
+
+log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.DRFA.File=${hadoop.log.dir}/${hadoop.log.file}
+
+# Rollver at midnight
+log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
+
+# 30-day backup
+#log4j.appender.DRFA.MaxBackupIndex=30
+log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
+
+# Pattern format: Date LogLevel LoggerName LogMessage
+log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+# Debugging Pattern format
+#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} 
(%F:%M(%L)) - %m%n
+
+
+#
+# console
+# Add "console" to rootlogger above if you want to use this
+#
+
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p 
%c{2}: %m%n
+
+#
+# TaskLog Appender
+#
+
+#Default values
+hadoop.tasklog.taskid=null
+hadoop.tasklog.iscleanup=false
+hadoop.tasklog.noKeepSplits=4
+hadoop.tasklog.totalLogFileSize=100
+hadoop.tasklog.purgeLogSplits=true
+hadoop.tasklog.logsRetainHours=12
+
+log4j.appender.TLA=org.apache.hadoop.mapred.TaskLogAppender
+log4j.appender.TLA.taskId=${hadoop.tasklog.taskid}
+log4j.appender.TLA.isCleanup=${hadoop.tasklog.iscleanup}
+log4j.appender.TLA.totalLogFileSize=${hadoop.tasklog.totalLogFileSize}
+
+log4j.appender.TLA.layout=org.apache.log4j.PatternLayout
+log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+
+#
+#Security audit appender
+#
+hadoop.security.logger=INFO,console
+hadoop.security.log.maxfilesize=256MB
+hadoop.security.log.maxbackupindex=20
+log4j.category.SecurityLogger=${hadoop.security.logger}
+hadoop.security.log.file=SecurityAuth.audit
+log4j.appender.DRFAS=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.DRFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}
+log4j.appender.DRFAS.layout=org.apache.log4j.PatternLayout
+log4j.appender.DRFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+log4j.appender.DRFAS.DatePattern=.yyyy-MM-dd
+
+log4j.appender.RFAS=org.apache.log4j.RollingFileAppender
+log4j.appender.RFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}
+log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout
+log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+log4j.appender.RFAS.MaxFileSize=${hadoop.security.log.maxfilesize}
+log4j.appender.RFAS.MaxBackupIndex=${hadoop.security.log.maxbackupindex}
+
+#
+# hdfs audit logging
+#
+hdfs.audit.logger=INFO,console
+log4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=${hdfs.audit.logger}
+log4j.additivity.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=false
+log4j.appender.DRFAAUDIT=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.DRFAAUDIT.File=${hadoop.log.dir}/hdfs-audit.log
+log4j.appender.DRFAAUDIT.layout=org.apache.log4j.PatternLayout
+log4j.appender.DRFAAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n
+log4j.appender.DRFAAUDIT.DatePattern=.yyyy-MM-dd
+
+#
+# mapred audit logging
+#
+mapred.audit.logger=INFO,console
+log4j.logger.org.apache.hadoop.mapred.AuditLogger=${mapred.audit.logger}
+log4j.additivity.org.apache.hadoop.mapred.AuditLogger=false
+log4j.appender.MRAUDIT=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.MRAUDIT.File=${hadoop.log.dir}/mapred-audit.log
+log4j.appender.MRAUDIT.layout=org.apache.log4j.PatternLayout
+log4j.appender.MRAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n
+log4j.appender.MRAUDIT.DatePattern=.yyyy-MM-dd
+
+#
+# Rolling File Appender
+#
+
+log4j.appender.RFA=org.apache.log4j.RollingFileAppender
+log4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.file}
+
+# Logfile size and and 30-day backups
+log4j.appender.RFA.MaxFileSize=256MB
+log4j.appender.RFA.MaxBackupIndex=10
+
+log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
+log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} - %m%n
+log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) 
- %m%n
+
+
+# Custom Logging levels
+
+hadoop.metrics.log.level=INFO
+#log4j.logger.org.apache.hadoop.mapred.JobTracker=DEBUG
+#log4j.logger.org.apache.hadoop.mapred.TaskTracker=DEBUG
+#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
+log4j.logger.org.apache.hadoop.metrics2=${hadoop.metrics.log.level}
+
+# Jets3t library
+log4j.logger.org.jets3t.service.impl.rest.httpclient.RestS3Service=ERROR
+
+#
+# Null Appender
+# Trap security logger on the hadoop client side
+#
+log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender
+
+#
+# Event Counter Appender
+# Sends counts of logging messages at different severity levels to Hadoop 
Metrics.
+#
+log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter
+
+    </value>
+  </property>
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/127ce93e/ambari-server/src/test/resources/stacks/HDP/1.3.4/services/HDFS/metainfo.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/resources/stacks/HDP/1.3.4/services/HDFS/metainfo.xml 
b/ambari-server/src/test/resources/stacks/HDP/1.3.4/services/HDFS/metainfo.xml
index 009acae..d29d2fc 100644
--- 
a/ambari-server/src/test/resources/stacks/HDP/1.3.4/services/HDFS/metainfo.xml
+++ 
b/ambari-server/src/test/resources/stacks/HDP/1.3.4/services/HDFS/metainfo.xml
@@ -140,6 +140,7 @@
         <config-type>global</config-type>
         <config-type>hdfs-site</config-type>
         <config-type>hadoop-policy</config-type>
+        <config-type>hdfs-log4j</config-type>
       </configuration-dependencies>
     </service>
   </services>

Reply via email to