http://git-wip-us.apache.org/repos/asf/ambari/blob/edbb5492/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertNoticeResourceProvider.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertNoticeResourceProvider.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertNoticeResourceProvider.java
index 1dc21b0..6d32868 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertNoticeResourceProvider.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertNoticeResourceProvider.java
@@ -65,8 +65,8 @@ public class AlertNoticeResourceProvider extends 
AbstractControllerResourceProvi
   public static final String ALERT_NOTICE_HISTORY_ID = 
"AlertNotice/history_id";
   public static final String ALERT_NOTICE_CLUSTER_NAME = 
"AlertNotice/cluster_name";
 
-  private static final Set<String> PK_PROPERTY_IDS = new HashSet<String>(
-      Arrays.asList(ALERT_NOTICE_ID));
+  private static final Set<String> PK_PROPERTY_IDS = new HashSet<>(
+    Arrays.asList(ALERT_NOTICE_ID));
 
   /**
    * Used for querying alert history.
@@ -77,13 +77,13 @@ public class AlertNoticeResourceProvider extends 
AbstractControllerResourceProvi
   /**
    * The property ids for an alert history resource.
    */
-  private static final Set<String> PROPERTY_IDS = new HashSet<String>();
+  private static final Set<String> PROPERTY_IDS = new HashSet<>();
 
   /**
    * The key property ids for an alert history resource.
    */
   private static final Map<Resource.Type, String> KEY_PROPERTY_IDS =
-      new HashMap<Resource.Type, String>();
+    new HashMap<>();
 
   static {
     // properties
@@ -175,7 +175,7 @@ public class AlertNoticeResourceProvider extends 
AbstractControllerResourceProvi
     }
 
     Set<String> requestPropertyIds = getRequestPropertyIds(request, predicate);
-    Set<Resource> results = new LinkedHashSet<Resource>();
+    Set<Resource> results = new LinkedHashSet<>();
 
     AlertNoticeRequest noticeRequest = new AlertNoticeRequest();
     noticeRequest.Predicate  = predicate;

http://git-wip-us.apache.org/repos/asf/ambari/blob/edbb5492/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertResourceProvider.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertResourceProvider.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertResourceProvider.java
index 4cf41b4..31a1000 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertResourceProvider.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertResourceProvider.java
@@ -84,8 +84,8 @@ public class AlertResourceProvider extends 
ReadOnlyResourceProvider implements
   protected static final String ALERT_REPEAT_TOLERANCE_REMAINING = 
"Alert/repeat_tolerance_remaining";
   protected static final String ALERT_FIRMNESS = "Alert/firmness";
 
-  private static Set<String> pkPropertyIds = new HashSet<String>(
-      Arrays.asList(ALERT_ID, ALERT_DEFINITION_NAME));
+  private static Set<String> pkPropertyIds = new HashSet<>(
+    Arrays.asList(ALERT_ID, ALERT_DEFINITION_NAME));
 
   @Inject
   private static AlertsDAO alertsDAO;
@@ -99,12 +99,12 @@ public class AlertResourceProvider extends 
ReadOnlyResourceProvider implements
   /**
    * The property ids for an alert defintion resource.
    */
-  private static final Set<String> PROPERTY_IDS = new HashSet<String>();
+  private static final Set<String> PROPERTY_IDS = new HashSet<>();
 
   /**
    * The key property ids for an alert definition resource.
    */
-  private static final Map<Resource.Type, String> KEY_PROPERTY_IDS = new 
HashMap<Resource.Type, String>();
+  private static final Map<Resource.Type, String> KEY_PROPERTY_IDS = new 
HashMap<>();
 
   static {
     // properties
@@ -170,7 +170,7 @@ public class AlertResourceProvider extends 
ReadOnlyResourceProvider implements
     Set<String> requestPropertyIds = getRequestPropertyIds(request, predicate);
 
     // use a collection which preserves order since JPA sorts the results
-    Set<Resource> results = new LinkedHashSet<Resource>();
+    Set<Resource> results = new LinkedHashSet<>();
 
     for (Map<String, Object> propertyMap : getPropertyMaps(predicate)) {
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/edbb5492/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertSummaryPropertyProvider.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertSummaryPropertyProvider.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertSummaryPropertyProvider.java
index 5c6bc85..6227f39 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertSummaryPropertyProvider.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertSummaryPropertyProvider.java
@@ -80,7 +80,7 @@ public class AlertSummaryPropertyProvider extends 
BaseProvider implements Proper
    */
   AlertSummaryPropertyProvider(Resource.Type type,
       String clusterPropertyId, String typeIdPropertyId) {
-    super(ImmutableSet.<String> of(ALERTS_SUMMARY, ALERTS_SUMMARY_HOSTS));
+    super(ImmutableSet.of(ALERTS_SUMMARY, ALERTS_SUMMARY_HOSTS));
     m_resourceType = type;
     m_clusterPropertyId = clusterPropertyId;
     m_typeIdPropertyId = typeIdPropertyId;
@@ -99,9 +99,9 @@ public class AlertSummaryPropertyProvider extends 
BaseProvider implements Proper
       //   (1) Cluster level alert-status counts
       //   (2) Per host alert-status counts
       // These can be determined in 1 SQL call per cluster, and results used 
multiple times.
-      Map<Long, Map<String, AlertSummaryDTO>> perHostSummaryMap = new 
HashMap<Long, Map<String, AlertSummaryDTO>>();
-      Map<Long, AlertHostSummaryDTO> hostsSummaryMap = new HashMap<Long, 
AlertHostSummaryDTO>();
-      Map<String, Cluster> resourcesClusterMap = new HashMap<String, 
Cluster>();
+      Map<Long, Map<String, AlertSummaryDTO>> perHostSummaryMap = new 
HashMap<>();
+      Map<Long, AlertHostSummaryDTO> hostsSummaryMap = new HashMap<>();
+      Map<String, Cluster> resourcesClusterMap = new HashMap<>();
       for (Resource res : resources) {
         String clusterName = (String) 
res.getPropertyValue(m_clusterPropertyId);
         if (clusterName == null || 
resourcesClusterMap.containsKey(clusterName)) {
@@ -193,7 +193,7 @@ public class AlertSummaryPropertyProvider extends 
BaseProvider implements Proper
 
     // all alerts in the cluster, in summary count form
     if (null != summary) {
-      Map<String, Integer> map = new HashMap<String, Integer>();
+      Map<String, Integer> map = new HashMap<>();
       map.put(AlertState.OK.name(), Integer.valueOf(summary.getOkCount()));
       map.put(AlertState.WARNING.name(), 
Integer.valueOf(summary.getWarningCount()));
       map.put(AlertState.CRITICAL.name(), 
Integer.valueOf(summary.getCriticalCount()));
@@ -204,7 +204,7 @@ public class AlertSummaryPropertyProvider extends 
BaseProvider implements Proper
 
     // the summary of hosts with warning or critical alerts
     if (null != hostSummary) {
-      Map<AlertState, Integer> map = new HashMap<AlertState, Integer>();
+      Map<AlertState, Integer> map = new HashMap<>();
       map.put(AlertState.OK, Integer.valueOf(hostSummary.getOkCount()));
       map.put(AlertState.WARNING, 
Integer.valueOf(hostSummary.getWarningCount()));
       map.put(AlertState.CRITICAL, 
Integer.valueOf(hostSummary.getCriticalCount()));
@@ -216,7 +216,7 @@ public class AlertSummaryPropertyProvider extends 
BaseProvider implements Proper
 
   @Override
   public Set<String> checkPropertyIds(Set<String> propertyIds) {
-    Set<String> rejects = new HashSet<String>();
+    Set<String> rejects = new HashSet<>();
 
     for (String id : propertyIds) {
       if (!id.startsWith(ALERTS_SUMMARY)) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/edbb5492/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertTargetResourceProvider.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertTargetResourceProvider.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertTargetResourceProvider.java
index 0b7f1db..ceb767d 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertTargetResourceProvider.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertTargetResourceProvider.java
@@ -78,18 +78,18 @@ public class AlertTargetResourceProvider extends
   public static final String ALERT_TARGET_GLOBAL = "AlertTarget/global";
   public static final String ALERT_TARGET_ENABLED = "AlertTarget/enabled";
 
-  private static final Set<String> PK_PROPERTY_IDS = new HashSet<String>(
-      Arrays.asList(ALERT_TARGET_ID, ALERT_TARGET_NAME));
+  private static final Set<String> PK_PROPERTY_IDS = new HashSet<>(
+    Arrays.asList(ALERT_TARGET_ID, ALERT_TARGET_NAME));
 
   /**
    * The property ids for an alert target resource.
    */
-  private static final Set<String> PROPERTY_IDS = new HashSet<String>();
+  private static final Set<String> PROPERTY_IDS = new HashSet<>();
 
   /**
    * The key property ids for an alert target resource.
    */
-  private static final Map<Resource.Type, String> KEY_PROPERTY_IDS = new 
HashMap<Resource.Type, String>();
+  private static final Map<Resource.Type, String> KEY_PROPERTY_IDS = new 
HashMap<>();
 
   static {
     // properties
@@ -156,7 +156,7 @@ public class AlertTargetResourceProvider extends
       throws SystemException, UnsupportedPropertyException,
       NoSuchResourceException, NoSuchParentResourceException {
 
-    Set<Resource> results = new HashSet<Resource>();
+    Set<Resource> results = new HashSet<>();
     Set<String> requestPropertyIds = getRequestPropertyIds(request, predicate);
 
     if( null == predicate ){
@@ -218,7 +218,7 @@ public class AlertTargetResourceProvider extends
     Set<Resource> resources = getResources(new RequestImpl(null, null, null,
         null), predicate);
 
-    Set<Long> targetIds = new HashSet<Long>();
+    Set<Long> targetIds = new HashSet<>();
 
     for (final Resource resource : resources) {
       Long id = (Long) resource.getPropertyValue(ALERT_TARGET_ID);
@@ -310,7 +310,7 @@ public class AlertTargetResourceProvider extends
       // set the states that this alert target cares about
       final Set<AlertState> alertStateSet;
       if (null != alertStates) {
-        alertStateSet = new HashSet<AlertState>(alertStates.size());
+        alertStateSet = new HashSet<>(alertStates.size());
         for (String state : alertStates) {
           alertStateSet.add(AlertState.valueOf(state));
         }
@@ -332,8 +332,8 @@ public class AlertTargetResourceProvider extends
       if (requestMap.containsKey(ALERT_TARGET_GROUPS)) {
         Collection<Long> groupIds = (Collection<Long>) 
requestMap.get(ALERT_TARGET_GROUPS);
         if( !groupIds.isEmpty() ){
-          Set<AlertGroupEntity> groups = new HashSet<AlertGroupEntity>();
-          List<Long> ids = new ArrayList<Long>(groupIds);
+          Set<AlertGroupEntity> groups = new HashSet<>();
+          List<Long> ids = new ArrayList<>(groupIds);
           groups.addAll(s_dao.findGroupsById(ids));
           entity.setAlertGroups(groups);
         }
@@ -419,7 +419,7 @@ public class AlertTargetResourceProvider extends
       if (alertStates.isEmpty()) {
         alertStateSet = EnumSet.allOf(AlertState.class);
       } else {
-        alertStateSet = new HashSet<AlertState>(alertStates.size());
+        alertStateSet = new HashSet<>(alertStates.size());
         for (String state : alertStates) {
           alertStateSet.add(AlertState.valueOf(state));
         }
@@ -430,8 +430,8 @@ public class AlertTargetResourceProvider extends
 
     // if groups were supplied, replace existing
     if (null != groupIds) {
-      Set<AlertGroupEntity> groups = new HashSet<AlertGroupEntity>();
-      List<Long> ids = new ArrayList<Long>(groupIds);
+      Set<AlertGroupEntity> groups = new HashSet<>();
+      List<Long> ids = new ArrayList<>(groupIds);
 
       if (ids.size() > 0) {
         groups.addAll(s_dao.findGroupsById(ids));
@@ -439,7 +439,7 @@ public class AlertTargetResourceProvider extends
 
       entity.setAlertGroups(groups);
     } else if (entity.isGlobal()){
-      Set<AlertGroupEntity> groups = new 
HashSet<AlertGroupEntity>(s_dao.findAllGroups());
+      Set<AlertGroupEntity> groups = new HashSet<>(s_dao.findAllGroups());
       entity.setAlertGroups(groups);
     }
 
@@ -486,8 +486,8 @@ public class AlertTargetResourceProvider extends
 
     if (BaseProvider.isPropertyRequested(ALERT_TARGET_GROUPS, requestedIds)) {
       Set<AlertGroupEntity> groupEntities = entity.getAlertGroups();
-      List<AlertGroup> groups = new ArrayList<AlertGroup>(
-          groupEntities.size());
+      List<AlertGroup> groups = new ArrayList<>(
+        groupEntities.size());
 
       for (AlertGroupEntity groupEntity : groupEntities) {
         AlertGroup group = new AlertGroup();
@@ -515,8 +515,8 @@ public class AlertTargetResourceProvider extends
    *         {@code null} if none.
    */
   private Map<String, Object> extractProperties(Map<String, Object> 
requestMap) {
-    Map<String, Object> normalizedMap = new HashMap<String, Object>(
-        requestMap.size());
+    Map<String, Object> normalizedMap = new HashMap<>(
+      requestMap.size());
 
     for (Entry<String, Object> entry : requestMap.entrySet()) {
       String key = entry.getKey();

http://git-wip-us.apache.org/repos/asf/ambari/blob/edbb5492/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AmbariPrivilegeResourceProvider.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AmbariPrivilegeResourceProvider.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AmbariPrivilegeResourceProvider.java
index 0d101ae..0ffceca 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AmbariPrivilegeResourceProvider.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AmbariPrivilegeResourceProvider.java
@@ -60,7 +60,7 @@ public class AmbariPrivilegeResourceProvider extends 
PrivilegeResourceProvider<O
   /**
    * The property ids for an Ambari privilege resource.
    */
-  private static Set<String> propertyIds = new HashSet<String>();
+  private static Set<String> propertyIds = new HashSet<>();
   static {
     propertyIds.add(PRIVILEGE_ID_PROPERTY_ID);
     propertyIds.add(PERMISSION_NAME_PROPERTY_ID);
@@ -78,7 +78,7 @@ public class AmbariPrivilegeResourceProvider extends 
PrivilegeResourceProvider<O
   /**
    * The key property ids for a privilege resource.
    */
-  private static Map<Resource.Type, String> keyPropertyIds = new 
HashMap<Resource.Type, String>();
+  private static Map<Resource.Type, String> keyPropertyIds = new HashMap<>();
   static {
     keyPropertyIds.put(Resource.Type.AmbariPrivilege, 
PRIVILEGE_ID_PROPERTY_ID);
   }
@@ -122,7 +122,7 @@ public class AmbariPrivilegeResourceProvider extends 
PrivilegeResourceProvider<O
 
   @Override
   public Map<Long, Object> getResourceEntities(Map<String, Object> properties) 
{
-    Map<Long, Object> resourceEntities = new HashMap<Long, Object>();
+    Map<Long, Object> resourceEntities = new HashMap<>();
 
     resourceEntities.put(ResourceEntity.AMBARI_RESOURCE_ID, null);
     // add cluster entities

http://git-wip-us.apache.org/repos/asf/ambari/blob/edbb5492/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AppCookieManager.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AppCookieManager.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AppCookieManager.java
index 2c21086..846c6ba 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AppCookieManager.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AppCookieManager.java
@@ -56,7 +56,7 @@ public class AppCookieManager {
 
   private static final EmptyJaasCredentials EMPTY_JAAS_CREDENTIALS = new 
EmptyJaasCredentials();
 
-  private Map<String, String> endpointCookieMap = new 
ConcurrentHashMap<String, String>();
+  private Map<String, String> endpointCookieMap = new ConcurrentHashMap<>();
   private static Log LOG = LogFactory.getLog(AppCookieManager.class);
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/edbb5492/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ArtifactResourceProvider.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ArtifactResourceProvider.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ArtifactResourceProvider.java
index e47e3c0..a6a731c 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ArtifactResourceProvider.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ArtifactResourceProvider.java
@@ -75,36 +75,36 @@ public class ArtifactResourceProvider extends 
AbstractResourceProvider {
   /**
    * primary key fields
    */
-  private static Set<String> pkPropertyIds = new HashSet<String>();
+  private static Set<String> pkPropertyIds = new HashSet<>();
 
   /**
    * map of resource type to fk field
    */
   private static Map<Resource.Type, String> keyPropertyIds =
-      new HashMap<Resource.Type, String>();
+    new HashMap<>();
 
   /**
    * resource properties
    */
-  private static Set<String> propertyIds = new HashSet<String>();
+  private static Set<String> propertyIds = new HashSet<>();
 
   /**
    * map of resource type to type registration
    */
   private static final Map<Resource.Type, TypeRegistration> typeRegistrations =
-      new HashMap<Resource.Type, TypeRegistration>();
+    new HashMap<>();
 
   /**
    * map of foreign key field to type registration
    */
   private static final Map<String, TypeRegistration> typeRegistrationsByFK =
-      new HashMap<String, TypeRegistration>();
+    new HashMap<>();
 
   /**
    * map of short foreign key field to type registration
    */
   private static final Map<String, TypeRegistration> 
typeRegistrationsByShortFK =
-      new HashMap<String, TypeRegistration>();
+    new HashMap<>();
 
   /**
    * serializer used to convert json to map
@@ -203,7 +203,7 @@ public class ArtifactResourceProvider extends 
AbstractResourceProvider {
              NoSuchParentResourceException {
 
     Set<Map<String, Object>> requestProps = getPropertyMaps(predicate);
-    Set<Resource> resources = new LinkedHashSet<Resource>();
+    Set<Resource> resources = new LinkedHashSet<>();
 
     for (Map<String, Object> props : requestProps) {
       resources.addAll(getResources(getGetCommand(request, predicate, props)));
@@ -301,7 +301,7 @@ public class ArtifactResourceProvider extends 
AbstractResourceProvider {
         String name = (String) properties.get(ARTIFACT_NAME_PROPERTY);
         validateParent(properties);
 
-        Set<Resource> matchingResources = new HashSet<Resource>();
+        Set<Resource> matchingResources = new HashSet<>();
         TreeMap<String, String> foreignKeys = createForeignKeyMap(properties);
         Set<String> requestPropertyIds = getRequestPropertyIds(request, 
predicate);
         if (name != null) {
@@ -341,7 +341,7 @@ public class ArtifactResourceProvider extends 
AbstractResourceProvider {
       @Override
       public Void invoke() throws AmbariException {
         Map<String, Object> entityUpdateProperties =
-            new HashMap<String, 
Object>(request.getProperties().iterator().next());
+          new HashMap<>(request.getProperties().iterator().next());
 
         // ensure name is set.  It won't be in case of query
         entityUpdateProperties.put(ARTIFACT_NAME_PROPERTY,
@@ -367,7 +367,7 @@ public class ArtifactResourceProvider extends 
AbstractResourceProvider {
       @Override
       public Void invoke() throws AmbariException {
         // flatten out key properties as is expected by createForeignKeyMap()
-        Map<String, Object> keyProperties = new HashMap<String, Object>();
+        Map<String, Object> keyProperties = new HashMap<>();
         for (Map.Entry<String, Object> entry : 
resource.getPropertiesMap().get("Artifacts").entrySet()) {
           keyProperties.put(String.format("Artifacts/%s", entry.getKey()), 
entry.getValue());
         }
@@ -419,7 +419,7 @@ public class ArtifactResourceProvider extends 
AbstractResourceProvider {
   private Resource.Type getRequestType(Map<String, Object> properties) throws 
AmbariException {
     Set<String> requestFKs = getRequestForeignKeys(properties).keySet();
     for (TypeRegistration registration : typeRegistrations.values()) {
-      Collection<String> typeFKs = new 
HashSet<String>(registration.getForeignKeyInfo().values());
+      Collection<String> typeFKs = new 
HashSet<>(registration.getForeignKeyInfo().values());
       typeFKs.add(registration.getFKPropertyName());
       if (requestFKs.equals(typeFKs)) {
         return registration.getType();
@@ -439,7 +439,7 @@ public class ArtifactResourceProvider extends 
AbstractResourceProvider {
    * @return map of foreign key to value for the provided request properties
    */
   private Map<String, String> getRequestForeignKeys(Map<String, Object> 
properties) {
-    Map<String, String> requestFKs = new HashMap<String, String>();
+    Map<String, String> requestFKs = new HashMap<>();
     for (String property : properties.keySet()) {
       if (! property.equals(ARTIFACT_NAME_PROPERTY) && ! 
property.startsWith(ARTIFACT_DATA_PROPERTY)) {
         requestFKs.put(property, String.valueOf(properties.get(property)));
@@ -495,7 +495,7 @@ public class ArtifactResourceProvider extends 
AbstractResourceProvider {
    * @throws AmbariException an unexpected exception occurred
    */
   private TreeMap<String, String> createForeignKeyMap(Map<String, Object> 
properties) throws AmbariException {
-    TreeMap<String, String> foreignKeys = new TreeMap<String, String>();
+    TreeMap<String, String> foreignKeys = new TreeMap<>();
     for (String keyProperty : keyPropertyIds.values()) {
       if (! keyProperty.equals(ARTIFACT_NAME_PROPERTY)) {
         String origValue = (String) properties.get(keyProperty);

http://git-wip-us.apache.org/repos/asf/ambari/blob/edbb5492/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseClusterRequest.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseClusterRequest.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseClusterRequest.java
index 07279ac..db1942b 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseClusterRequest.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseClusterRequest.java
@@ -49,7 +49,7 @@ public abstract class BaseClusterRequest implements 
TopologyRequest {
   /**
    * host group info map
    */
-  protected final Map<String, HostGroupInfo> hostGroupInfoMap = new 
HashMap<String, HostGroupInfo>();
+  protected final Map<String, HostGroupInfo> hostGroupInfoMap = new 
HashMap<>();
 
   protected ProvisionAction provisionAction;
 
@@ -135,7 +135,7 @@ public abstract class BaseClusterRequest implements 
TopologyRequest {
           String.format("The specified host query is invalid: %s", 
e.getMessage()));
     }
 
-    Set<String> propertyIds = new HashSet<String>();
+    Set<String> propertyIds = new HashSet<>();
     for (Token token : tokens) {
       if (token.getType() == Token.TYPE.PROPERTY_OPERAND) {
         propertyIds.add(token.getValue());

http://git-wip-us.apache.org/repos/asf/ambari/blob/edbb5492/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseProvider.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseProvider.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseProvider.java
index 9024a7e..a0263a1 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseProvider.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseProvider.java
@@ -90,11 +90,11 @@ public abstract class BaseProvider {
    * @param propertyIds  the properties associated with this provider
    */
   public BaseProvider(Set<String> propertyIds) {
-    this.propertyIds = new HashSet<String>(propertyIds);
+    this.propertyIds = new HashSet<>(propertyIds);
     categoryIds = PropertyHelper.getCategories(propertyIds);
-    combinedIds = new HashSet<String>(propertyIds);
+    combinedIds = new HashSet<>(propertyIds);
     combinedIds.addAll(categoryIds);
-    patterns = new HashMap<String, Pattern>();
+    patterns = new HashMap<>();
 
     // convert the argumented metric as it's defined in the JSON file to regex
     for (String id : combinedIds) {
@@ -121,7 +121,7 @@ public abstract class BaseProvider {
       return base;
     }
 
-    Set<String> unsupported = new HashSet<String>();
+    Set<String> unsupported = new HashSet<>();
 
     for (String propertyId : base)
     {
@@ -135,13 +135,13 @@ public abstract class BaseProvider {
 
   public Set<String> checkPropertyIds(Set<String> propertyIds) {
     if (!this.propertyIds.containsAll(propertyIds)) {
-      Set<String> unsupportedPropertyIds = new HashSet<String>(propertyIds);
+      Set<String> unsupportedPropertyIds = new HashSet<>(propertyIds);
       unsupportedPropertyIds.removeAll(combinedIds);
 
       // If the property id is not in the set of known property ids we may 
still allow it if
       // its parent category is a known property. This allows for Map type 
properties where
       // we want to treat property as a category and the entries as individual 
properties.
-      Set<String> categoryProperties = new HashSet<String>();
+      Set<String> categoryProperties = new HashSet<>();
       for (String unsupportedPropertyId : unsupportedPropertyIds) {
         if (checkCategory(unsupportedPropertyId) || 
checkRegExp(unsupportedPropertyId)) {
           categoryProperties.add(unsupportedPropertyId);
@@ -167,18 +167,18 @@ public abstract class BaseProvider {
 
     // if no properties are specified, then return them all
     if (propertyIds == null || propertyIds.isEmpty()) {
-      return new HashSet<String>(this.propertyIds);
+      return new HashSet<>(this.propertyIds);
     }
 
-    propertyIds = new HashSet<String>(propertyIds);
+    propertyIds = new HashSet<>(propertyIds);
 
     if (predicate != null) {
       propertyIds.addAll(PredicateHelper.getPropertyIds(predicate));
     }
 
     if (!combinedIds.containsAll(propertyIds)) {
-      Set<String> keepers = new HashSet<String>();
-      Set<String> unsupportedPropertyIds = new HashSet<String>(propertyIds);
+      Set<String> keepers = new HashSet<>();
+      Set<String> unsupportedPropertyIds = new HashSet<>(propertyIds);
       unsupportedPropertyIds.removeAll(combinedIds);
 
       for (String unsupportedPropertyId : unsupportedPropertyIds) {
@@ -249,7 +249,7 @@ public abstract class BaseProvider {
    */
   protected List<String> getRegexGroups(String regExpKey, String id) {
     Pattern pattern = patterns.get(regExpKey);
-    List<String> regexGroups = new ArrayList<String>();
+    List<String> regexGroups = new ArrayList<>();
 
     if (pattern != null) {
       Matcher matcher = pattern.matcher(id);

http://git-wip-us.apache.org/repos/asf/ambari/blob/edbb5492/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
index 25f2001..e5927d6 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
@@ -85,31 +85,31 @@ public class BlueprintConfigurationProcessor {
    * Single host topology updaters
    */
   protected static Map<String, Map<String, PropertyUpdater>> 
singleHostTopologyUpdaters =
-      new HashMap<String, Map<String, PropertyUpdater>>();
+    new HashMap<>();
 
   /**
    * Multi host topology updaters
    */
   private static Map<String, Map<String, PropertyUpdater>> 
multiHostTopologyUpdaters =
-      new HashMap<String, Map<String, PropertyUpdater>>();
+    new HashMap<>();
 
   /**
    * Database host topology updaters
    */
   private static Map<String, Map<String, PropertyUpdater>> 
dbHostTopologyUpdaters =
-      new HashMap<String, Map<String, PropertyUpdater>>();
+    new HashMap<>();
 
   /**
    * Updaters for properties which need 'm' appended
    */
   private static Map<String, Map<String, PropertyUpdater>> mPropertyUpdaters =
-      new HashMap<String, Map<String, PropertyUpdater>>();
+    new HashMap<>();
 
   /**
    * Non topology related updaters
    */
   private static Map<String, Map<String, PropertyUpdater>> nonTopologyUpdaters 
=
-      new HashMap<String, Map<String, PropertyUpdater>>();
+    new HashMap<>();
 
   /**
    * Updaters that preserve the original property value, functions
@@ -118,13 +118,13 @@ public class BlueprintConfigurationProcessor {
    * cluster creation
    */
   private Map<String, Map<String, PropertyUpdater>> removePropertyUpdaters =
-    new HashMap<String, Map<String, PropertyUpdater>>();
+    new HashMap<>();
 
   /**
    * Collection of all updaters
    */
   private static Collection<Map<String, Map<String, PropertyUpdater>>> 
allUpdaters =
-      new ArrayList<Map<String, Map<String, PropertyUpdater>>>();
+    new ArrayList<>();
 
   /**
    * Compiled regex for hostgroup token with port information.
@@ -148,7 +148,7 @@ public class BlueprintConfigurationProcessor {
    *   expected hostname information is not found.
    */
   private static Set<String> configPropertiesWithHASupport =
-    new HashSet<String>(Arrays.asList("fs.defaultFS", "hbase.rootdir", 
"instance.volumes", "policymgr_external_url", 
"xasecure.audit.destination.hdfs.dir"));
+    new HashSet<>(Arrays.asList("fs.defaultFS", "hbase.rootdir", 
"instance.volumes", "policymgr_external_url", 
"xasecure.audit.destination.hdfs.dir"));
 
   /**
    * Statically-defined list of filters to apply on property exports.
@@ -228,8 +228,8 @@ public class BlueprintConfigurationProcessor {
       singleHostTopologyUpdaters.put("oozie-env", oozieEnvUpdaters);
       singleHostTopologyUpdaters.put("oozie-site", oozieSiteUpdaters);
     } else {
-      Map<String, PropertyUpdater> oozieEnvOriginalValueMap = new 
HashMap<String, PropertyUpdater>();
-      Map<String, PropertyUpdater> oozieSiteOriginalValueMap = new 
HashMap<String, PropertyUpdater>();
+      Map<String, PropertyUpdater> oozieEnvOriginalValueMap = new HashMap<>();
+      Map<String, PropertyUpdater> oozieSiteOriginalValueMap = new HashMap<>();
       // register updaters for Oozie properties that may point to an external 
DB
       oozieEnvOriginalValueMap.put("oozie_existing_mysql_host", new 
OriginalValuePropertyUpdater());
       oozieEnvOriginalValueMap.put("oozie_existing_oracle_host", new 
OriginalValuePropertyUpdater());
@@ -242,7 +242,7 @@ public class BlueprintConfigurationProcessor {
       removePropertyUpdaters.put("oozie-site", oozieSiteOriginalValueMap);
     }
 
-    Map<String, PropertyUpdater> hiveEnvOriginalValueMap = new HashMap<String, 
PropertyUpdater>();
+    Map<String, PropertyUpdater> hiveEnvOriginalValueMap = new HashMap<>();
     // register updaters for Hive properties that may point to an external DB
     hiveEnvOriginalValueMap.put("hive_existing_oracle_host", new 
OriginalValuePropertyUpdater());
     hiveEnvOriginalValueMap.put("hive_existing_mssql_server_2_host", new 
OriginalValuePropertyUpdater());
@@ -278,7 +278,7 @@ public class BlueprintConfigurationProcessor {
   }
 
   public Collection<String> getRequiredHostGroups() {
-    Collection<String> requiredHostGroups = new HashSet<String>();
+    Collection<String> requiredHostGroups = new HashSet<>();
 
     for (Map<String, Map<String, PropertyUpdater>> updaterMap : 
createCollectionOfUpdaters()) {
       for (Map.Entry<String, Map<String, PropertyUpdater>> entry : 
updaterMap.entrySet()) {
@@ -317,7 +317,7 @@ public class BlueprintConfigurationProcessor {
    * @return Set of config type names that were updated by this update call
    */
   public Set<String> doUpdateForClusterCreate() throws 
ConfigurationTopologyException {
-      Set<String> configTypesUpdated = new HashSet<String>();
+      Set<String> configTypesUpdated = new HashSet<>();
     Configuration clusterConfig = clusterTopology.getConfiguration();
     Map<String, HostGroupInfo> groupInfoMap = 
clusterTopology.getHostGroupInfo();
 
@@ -477,7 +477,7 @@ public class BlueprintConfigurationProcessor {
       doOozieServerHAUpdate();
     }
 
-    Collection<Configuration> allConfigs = new ArrayList<Configuration>();
+    Collection<Configuration> allConfigs = new ArrayList<>();
     allConfigs.add(clusterTopology.getConfiguration());
     for (HostGroupInfo groupInfo : 
clusterTopology.getHostGroupInfo().values()) {
       Configuration hgConfiguration = groupInfo.getConfiguration();
@@ -689,7 +689,7 @@ public class BlueprintConfigurationProcessor {
    */
   private Collection<Map<String, Map<String, PropertyUpdater>>> 
addNameNodeHAUpdaters(Collection<Map<String, Map<String, PropertyUpdater>>> 
updaters) {
     Collection<Map<String, Map<String, PropertyUpdater>>> 
highAvailabilityUpdaters =
-      new LinkedList<Map<String, Map<String, PropertyUpdater>>>();
+      new LinkedList<>();
 
     // always add the statically-defined list of updaters to the list to use
     // in processing cluster configuration
@@ -716,7 +716,7 @@ public class BlueprintConfigurationProcessor {
    */
   private Collection<Map<String, Map<String, PropertyUpdater>>> 
addYarnResourceManagerHAUpdaters(Collection<Map<String, Map<String, 
PropertyUpdater>>> updaters) {
     Collection<Map<String, Map<String, PropertyUpdater>>> 
highAvailabilityUpdaters =
-      new LinkedList<Map<String, Map<String, PropertyUpdater>>>();
+      new LinkedList<>();
 
     // always add the statically-defined list of updaters to the list to use
     // in processing cluster configuration
@@ -742,7 +742,7 @@ public class BlueprintConfigurationProcessor {
    */
   private Collection<Map<String, Map<String, PropertyUpdater>>> 
addOozieServerHAUpdaters(Collection<Map<String, Map<String, PropertyUpdater>>> 
updaters) {
     Collection<Map<String, Map<String, PropertyUpdater>>> 
highAvailabilityUpdaters =
-      new LinkedList<Map<String, Map<String, PropertyUpdater>>>();
+      new LinkedList<>();
 
     // always add the statically-defined list of updaters to the list to use
     // in processing cluster configuration
@@ -841,8 +841,8 @@ public class BlueprintConfigurationProcessor {
    * @return a Map of registered PropertyUpdaters for handling HA properties 
in hdfs-site
    */
   private Map<String, Map<String, PropertyUpdater>> 
createMapOfNameNodeHAUpdaters() {
-    Map<String, Map<String, PropertyUpdater>> highAvailabilityUpdaters = new 
HashMap<String, Map<String, PropertyUpdater>>();
-    Map<String, PropertyUpdater> hdfsSiteUpdatersForAvailability = new 
HashMap<String, PropertyUpdater>();
+    Map<String, Map<String, PropertyUpdater>> highAvailabilityUpdaters = new 
HashMap<>();
+    Map<String, PropertyUpdater> hdfsSiteUpdatersForAvailability = new 
HashMap<>();
     highAvailabilityUpdaters.put("hdfs-site", hdfsSiteUpdatersForAvailability);
 
     //todo: Do we need to call this for HG configurations?
@@ -872,8 +872,8 @@ public class BlueprintConfigurationProcessor {
    * @return a Map of registered PropertyUpdaters for handling HA properties 
in yarn-site
    */
   private Map<String, Map<String, PropertyUpdater>> 
createMapOfYarnResourceManagerHAUpdaters() {
-    Map<String, Map<String, PropertyUpdater>> highAvailabilityUpdaters = new 
HashMap<String, Map<String, PropertyUpdater>>();
-    Map<String, PropertyUpdater> yarnSiteUpdatersForAvailability = new 
HashMap<String, PropertyUpdater>();
+    Map<String, Map<String, PropertyUpdater>> highAvailabilityUpdaters = new 
HashMap<>();
+    Map<String, PropertyUpdater> yarnSiteUpdatersForAvailability = new 
HashMap<>();
     highAvailabilityUpdaters.put("yarn-site", yarnSiteUpdatersForAvailability);
 
     Map<String, String> yarnSiteConfig = 
clusterTopology.getConfiguration().getFullProperties().get("yarn-site");
@@ -899,8 +899,8 @@ public class BlueprintConfigurationProcessor {
    * @return a Map of registered PropertyUpdaters for handling HA properties 
in oozie-site
    */
   private Map<String, Map<String, PropertyUpdater>> 
createMapOfOozieServerHAUpdaters() {
-    Map<String, Map<String, PropertyUpdater>> highAvailabilityUpdaters = new 
HashMap<String, Map<String, PropertyUpdater>>();
-    Map<String, PropertyUpdater> oozieSiteUpdatersForAvailability = new 
HashMap<String, PropertyUpdater>();
+    Map<String, Map<String, PropertyUpdater>> highAvailabilityUpdaters = new 
HashMap<>();
+    Map<String, PropertyUpdater> oozieSiteUpdatersForAvailability = new 
HashMap<>();
     highAvailabilityUpdaters.put("oozie-site", 
oozieSiteUpdatersForAvailability);
 
     // register a multi-host property updater for this Oozie property.
@@ -1174,7 +1174,7 @@ public class BlueprintConfigurationProcessor {
                   groupInfo.getHostGroupName() + "%");
             }
           }
-          Collection<String> addedGroups = new HashSet<String>();
+          Collection<String> addedGroups = new HashSet<>();
           String[] toks = propValue.split(",");
           boolean inBrackets = propValue.startsWith("[");
 
@@ -1216,7 +1216,7 @@ public class BlueprintConfigurationProcessor {
   //todo: replace this with parseHostGroupToken which would return a hostgroup 
or null
   private static Collection<String> getHostStrings(String val, ClusterTopology 
topology) {
 
-    Collection<String> hosts = new LinkedHashSet<String>();
+    Collection<String> hosts = new LinkedHashSet<>();
     Matcher m = HOSTGROUP_PORT_REGEX.matcher(val);
     while (m.find()) {
       String groupName = m.group(1);
@@ -1248,7 +1248,7 @@ public class BlueprintConfigurationProcessor {
    *         elements in this property
    */
   private static String[] splitAndTrimStrings(String propertyName) {
-    List<String> namesWithoutWhitespace = new LinkedList<String>();
+    List<String> namesWithoutWhitespace = new LinkedList<>();
     for (String service : propertyName.split(",")) {
       namesWithoutWhitespace.add(service.trim());
     }
@@ -1480,7 +1480,7 @@ public class BlueprintConfigurationProcessor {
         Collection<String> matchingGroups = 
topology.getHostGroupsForComponent(component);
         int matchingGroupCount = matchingGroups.size();
         if (matchingGroupCount != 0) {
-          return new HashSet<String>(matchingGroups);
+          return new HashSet<>(matchingGroups);
         } else {
           Cardinality cardinality = 
topology.getBlueprint().getStack().getCardinality(component);
           // if no matching host groups are found for a component whose 
configuration
@@ -1892,7 +1892,7 @@ public class BlueprintConfigurationProcessor {
      * @return list of hosts that have the given components
      */
     private Collection<String> getHostStringsFromLocalhost(String origValue, 
ClusterTopology topology) {
-      Set<String> hostStrings = new HashSet<String>();
+      Set<String> hostStrings = new HashSet<>();
       if(origValue.contains("localhost")) {
         Matcher localhostMatcher = LOCALHOST_PORT_REGEX.matcher(origValue);
         String port = null;
@@ -1934,7 +1934,7 @@ public class BlueprintConfigurationProcessor {
     private String removePorts(Collection<String> hostStrings) {
       String port = null;
       if(!usePortForEachHost && !hostStrings.isEmpty()) {
-        Set<String> temp = new HashSet<String>();
+        Set<String> temp = new HashSet<>();
 
         // extract port
         Iterator<String> i = hostStrings.iterator();
@@ -1969,7 +1969,7 @@ public class BlueprintConfigurationProcessor {
                                                     Map<String, Map<String, 
String>> properties,
                                                     ClusterTopology topology) {
 
-      Collection<String> requiredHostGroups = new HashSet<String>();
+      Collection<String> requiredHostGroups = new HashSet<>();
 
       // add all host groups specified in host group tokens
       Matcher m = HOSTGROUP_PORT_REGEX.matcher(origValue);
@@ -2216,7 +2216,7 @@ public class BlueprintConfigurationProcessor {
   private static class TempletonHivePropertyUpdater implements PropertyUpdater 
{
 
     private Map<String, PropertyUpdater> mapOfKeysToUpdaters =
-      new HashMap<String, PropertyUpdater>();
+      new HashMap<>();
 
     TempletonHivePropertyUpdater() {
       // the only known property that requires hostname substitution is 
hive.metastore.uris,
@@ -2281,7 +2281,7 @@ public class BlueprintConfigurationProcessor {
         return Collections.emptySet();
       }
 
-      Collection<String> requiredGroups = new HashSet<String>();
+      Collection<String> requiredGroups = new HashSet<>();
       // split out the key/value pairs
       String[] keyValuePairs = origValue.split(",");
       for (String keyValuePair : keyValuePairs) {
@@ -2328,58 +2328,58 @@ public class BlueprintConfigurationProcessor {
     allUpdaters.add(mPropertyUpdaters);
     allUpdaters.add(nonTopologyUpdaters);
 
-    Map<String, PropertyUpdater> amsSiteMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> hdfsSiteMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> mapredSiteMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> coreSiteMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> hbaseSiteMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> yarnSiteMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> hiveSiteMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> hiveSiteNonTopologyMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> hiveEnvOriginalValueMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> oozieSiteOriginalValueMap = new 
HashMap<String, PropertyUpdater>();
-    Map<String, PropertyUpdater> oozieSiteMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> stormSiteMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> stormSiteNonTopologyMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> accumuloSiteMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> falconStartupPropertiesMap = new 
HashMap<String, PropertyUpdater>();
-    Map<String, PropertyUpdater> kafkaBrokerMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> kafkaBrokerNonTopologyMap = new 
HashMap<String, PropertyUpdater>();
-    Map<String, PropertyUpdater> atlasPropsMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> mapredEnvMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> mHadoopEnvMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> shHadoopEnvMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> hbaseEnvMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> hiveEnvMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> hiveInteractiveEnvMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> hiveInteractiveSiteMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> oozieEnvMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> oozieEnvHeapSizeMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> multiWebhcatSiteMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> multiHbaseSiteMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> multiStormSiteMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> multiCoreSiteMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> multiHdfsSiteMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> multiHiveSiteMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> multiKafkaBrokerMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> multiSliderClientMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> multiYarnSiteMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> multiOozieSiteMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> multiAccumuloSiteMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> multiRangerKmsSiteMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> dbHiveSiteMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> rangerAdminPropsMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> rangerEnvPropsMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> rangerYarnAuditPropsMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> rangerHdfsAuditPropsMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> rangerHbaseAuditPropsMap = new 
HashMap<String, PropertyUpdater>();
-    Map<String, PropertyUpdater> rangerHiveAuditPropsMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> rangerKnoxAuditPropsMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> rangerKafkaAuditPropsMap = new 
HashMap<String, PropertyUpdater>();
-    Map<String, PropertyUpdater> rangerStormAuditPropsMap = new 
HashMap<String, PropertyUpdater>();
-    Map<String, PropertyUpdater> rangerAtlasAuditPropsMap = new 
HashMap<String, PropertyUpdater>();
-    Map<String, PropertyUpdater> hawqSiteMap = new HashMap<String, 
PropertyUpdater>();
-    Map<String, PropertyUpdater> zookeeperEnvMap = new HashMap<String, 
PropertyUpdater>();
+    Map<String, PropertyUpdater> amsSiteMap = new HashMap<>();
+    Map<String, PropertyUpdater> hdfsSiteMap = new HashMap<>();
+    Map<String, PropertyUpdater> mapredSiteMap = new HashMap<>();
+    Map<String, PropertyUpdater> coreSiteMap = new HashMap<>();
+    Map<String, PropertyUpdater> hbaseSiteMap = new HashMap<>();
+    Map<String, PropertyUpdater> yarnSiteMap = new HashMap<>();
+    Map<String, PropertyUpdater> hiveSiteMap = new HashMap<>();
+    Map<String, PropertyUpdater> hiveSiteNonTopologyMap = new HashMap<>();
+    Map<String, PropertyUpdater> hiveEnvOriginalValueMap = new HashMap<>();
+    Map<String, PropertyUpdater> oozieSiteOriginalValueMap = new HashMap<>();
+    Map<String, PropertyUpdater> oozieSiteMap = new HashMap<>();
+    Map<String, PropertyUpdater> stormSiteMap = new HashMap<>();
+    Map<String, PropertyUpdater> stormSiteNonTopologyMap = new HashMap<>();
+    Map<String, PropertyUpdater> accumuloSiteMap = new HashMap<>();
+    Map<String, PropertyUpdater> falconStartupPropertiesMap = new HashMap<>();
+    Map<String, PropertyUpdater> kafkaBrokerMap = new HashMap<>();
+    Map<String, PropertyUpdater> kafkaBrokerNonTopologyMap = new HashMap<>();
+    Map<String, PropertyUpdater> atlasPropsMap = new HashMap<>();
+    Map<String, PropertyUpdater> mapredEnvMap = new HashMap<>();
+    Map<String, PropertyUpdater> mHadoopEnvMap = new HashMap<>();
+    Map<String, PropertyUpdater> shHadoopEnvMap = new HashMap<>();
+    Map<String, PropertyUpdater> hbaseEnvMap = new HashMap<>();
+    Map<String, PropertyUpdater> hiveEnvMap = new HashMap<>();
+    Map<String, PropertyUpdater> hiveInteractiveEnvMap = new HashMap<>();
+    Map<String, PropertyUpdater> hiveInteractiveSiteMap = new HashMap<>();
+    Map<String, PropertyUpdater> oozieEnvMap = new HashMap<>();
+    Map<String, PropertyUpdater> oozieEnvHeapSizeMap = new HashMap<>();
+    Map<String, PropertyUpdater> multiWebhcatSiteMap = new HashMap<>();
+    Map<String, PropertyUpdater> multiHbaseSiteMap = new HashMap<>();
+    Map<String, PropertyUpdater> multiStormSiteMap = new HashMap<>();
+    Map<String, PropertyUpdater> multiCoreSiteMap = new HashMap<>();
+    Map<String, PropertyUpdater> multiHdfsSiteMap = new HashMap<>();
+    Map<String, PropertyUpdater> multiHiveSiteMap = new HashMap<>();
+    Map<String, PropertyUpdater> multiKafkaBrokerMap = new HashMap<>();
+    Map<String, PropertyUpdater> multiSliderClientMap = new HashMap<>();
+    Map<String, PropertyUpdater> multiYarnSiteMap = new HashMap<>();
+    Map<String, PropertyUpdater> multiOozieSiteMap = new HashMap<>();
+    Map<String, PropertyUpdater> multiAccumuloSiteMap = new HashMap<>();
+    Map<String, PropertyUpdater> multiRangerKmsSiteMap = new HashMap<>();
+    Map<String, PropertyUpdater> dbHiveSiteMap = new HashMap<>();
+    Map<String, PropertyUpdater> rangerAdminPropsMap = new HashMap<>();
+    Map<String, PropertyUpdater> rangerEnvPropsMap = new HashMap<>();
+    Map<String, PropertyUpdater> rangerYarnAuditPropsMap = new HashMap<>();
+    Map<String, PropertyUpdater> rangerHdfsAuditPropsMap = new HashMap<>();
+    Map<String, PropertyUpdater> rangerHbaseAuditPropsMap = new HashMap<>();
+    Map<String, PropertyUpdater> rangerHiveAuditPropsMap = new HashMap<>();
+    Map<String, PropertyUpdater> rangerKnoxAuditPropsMap = new HashMap<>();
+    Map<String, PropertyUpdater> rangerKafkaAuditPropsMap = new HashMap<>();
+    Map<String, PropertyUpdater> rangerStormAuditPropsMap = new HashMap<>();
+    Map<String, PropertyUpdater> rangerAtlasAuditPropsMap = new HashMap<>();
+    Map<String, PropertyUpdater> hawqSiteMap = new HashMap<>();
+    Map<String, PropertyUpdater> zookeeperEnvMap = new HashMap<>();
 
     singleHostTopologyUpdaters.put("ams-site", amsSiteMap);
     singleHostTopologyUpdaters.put("hdfs-site", hdfsSiteMap);
@@ -2518,7 +2518,7 @@ public class BlueprintConfigurationProcessor {
         String atlasHookClass = "org.apache.atlas.hive.hook.HiveHook";
         String[] hiveHooks = origValue.split(",");
 
-        List<String> hiveHooksClean = new ArrayList<String>();
+        List<String> hiveHooksClean = new ArrayList<>();
         for(String hiveHook : hiveHooks) {
           if (!StringUtils.isBlank(hiveHook.trim())) {
             hiveHooksClean.add(hiveHook.trim());
@@ -2775,7 +2775,7 @@ public class BlueprintConfigurationProcessor {
    */
   void setMissingConfigurations(Configuration configuration, Set<String> 
configTypesUpdated) {
     // AMBARI-5206
-    final Map<String , String> userProps = new HashMap<String , String>();
+    final Map<String , String> userProps = new HashMap<>();
 
     setRetryConfiguration(configuration, configTypesUpdated);
 
@@ -3190,7 +3190,7 @@ public class BlueprintConfigurationProcessor {
      * namenode.
      */
     private final Set<String> setOfHDFSPropertyNamesNonHA =
-      Collections.unmodifiableSet( new 
HashSet<String>(Arrays.asList("dfs.namenode.http-address", 
"dfs.namenode.https-address", "dfs.namenode.rpc-address")));
+      Collections.unmodifiableSet(new 
HashSet<>(Arrays.asList("dfs.namenode.http-address", 
"dfs.namenode.https-address", "dfs.namenode.rpc-address")));
 
 
     /**
@@ -3260,7 +3260,7 @@ public class BlueprintConfigurationProcessor {
      * Set of HAWQ Property names that are only valid in a HA scenario.
      */
     private final Set<String> setOfHawqPropertyNamesNonHA =
-            Collections.unmodifiableSet( new 
HashSet<String>(Arrays.asList(HAWQ_SITE_HAWQ_STANDBY_ADDRESS_HOST)));
+            Collections.unmodifiableSet(new 
HashSet<>(Arrays.asList(HAWQ_SITE_HAWQ_STANDBY_ADDRESS_HOST)));
 
 
     /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/edbb5492/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintResourceProvider.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintResourceProvider.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintResourceProvider.java
index 8e47a98..a836855 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintResourceProvider.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintResourceProvider.java
@@ -116,8 +116,8 @@ public class BlueprintResourceProvider extends 
AbstractControllerResourceProvide
     "Configuration Maps must hold a single configuration type each";
   // Primary Key Fields
   private static Set<String> pkPropertyIds =
-      new HashSet<String>(Arrays.asList(new String[]{
-          BLUEPRINT_NAME_PROPERTY_ID}));
+    new HashSet<>(Arrays.asList(new String[]{
+      BLUEPRINT_NAME_PROPERTY_ID}));
 
   /**
    * Used to create Blueprint instances
@@ -224,7 +224,7 @@ public class BlueprintResourceProvider extends 
AbstractControllerResourceProvide
       results = blueprintDAO.findAll();
     }
 
-    Set<Resource> resources  = new HashSet<Resource>();
+    Set<Resource> resources  = new HashSet<>();
     for (BlueprintEntity entity : results) {
       Resource resource = toResource(entity, getRequestPropertyIds(request, 
predicate));
       if (predicate == null || ! applyPredicate || 
predicate.evaluate(resource)) {
@@ -299,18 +299,18 @@ public class BlueprintResourceProvider extends 
AbstractControllerResourceProvide
     setResourceProperty(resource, STACK_NAME_PROPERTY_ID, 
stackEntity.getStackName(), requestedIds);
     setResourceProperty(resource, STACK_VERSION_PROPERTY_ID, 
stackEntity.getStackVersion(), requestedIds);
 
-    List<Map<String, Object>> listGroupProps = new ArrayList<Map<String, 
Object>>();
+    List<Map<String, Object>> listGroupProps = new ArrayList<>();
     Collection<HostGroupEntity> hostGroups = entity.getHostGroups();
     for (HostGroupEntity hostGroup : hostGroups) {
-      Map<String, Object> mapGroupProps = new HashMap<String, Object>();
+      Map<String, Object> mapGroupProps = new HashMap<>();
       mapGroupProps.put(HOST_GROUP_NAME_PROPERTY_ID, hostGroup.getName());
       listGroupProps.add(mapGroupProps);
       mapGroupProps.put(HOST_GROUP_CARDINALITY_PROPERTY_ID, 
hostGroup.getCardinality());
 
-      List<Map<String, String>> listComponentProps = new ArrayList<Map<String, 
String>>();
+      List<Map<String, String>> listComponentProps = new ArrayList<>();
       Collection<HostGroupComponentEntity> components = 
hostGroup.getComponents();
       for (HostGroupComponentEntity component : components) {
-        Map<String, String> mapComponentProps = new HashMap<String, String>();
+        Map<String, String> mapComponentProps = new HashMap<>();
         mapComponentProps.put(COMPONENT_NAME_PROPERTY_ID, component.getName());
 
         if (component.getProvisionAction() != null) {
@@ -351,10 +351,10 @@ public class BlueprintResourceProvider extends 
AbstractControllerResourceProvide
   List<Map<String, Map<String, Object>>> populateConfigurationList(
       Collection<? extends BlueprintConfiguration> configurations) throws 
NoSuchResourceException {
 
-    List<Map<String, Map<String, Object>>> listConfigurations = new 
ArrayList<Map<String, Map<String, Object>>>();
+    List<Map<String, Map<String, Object>>> listConfigurations = new 
ArrayList<>();
     for (BlueprintConfiguration config : configurations) {
-      Map<String, Map<String, Object>> mapConfigurations = new HashMap<String, 
Map<String, Object>>();
-      Map<String, Object> configTypeDefinition = new HashMap<String, Object>();
+      Map<String, Map<String, Object>> mapConfigurations = new HashMap<>();
+      Map<String, Object> configTypeDefinition = new HashMap<>();
       String type = config.getType();
 
       if(config instanceof BlueprintConfigEntity) {
@@ -403,7 +403,7 @@ public class BlueprintResourceProvider extends 
AbstractControllerResourceProvide
    */
   public static List<Map<String, Object>> populateSettingList(
           Collection<? extends BlueprintSettingEntity> settings) throws 
NoSuchResourceException {
-    List<Map<String, Object>> listSettings = new ArrayList<Map<String, 
Object>>();
+    List<Map<String, Object>> listSettings = new ArrayList<>();
 
     if (settings != null) {
       for (BlueprintSettingEntity setting : settings) {
@@ -427,7 +427,7 @@ public class BlueprintResourceProvider extends 
AbstractControllerResourceProvide
   void createBlueprintConfigEntities(Collection<Map<String, String>> 
propertyMaps,
                                              BlueprintEntity blueprint) {
 
-    Collection<BlueprintConfigEntity> configurations = new 
ArrayList<BlueprintConfigEntity>();
+    Collection<BlueprintConfigEntity> configurations = new ArrayList<>();
     if (propertyMaps != null) {
       for (Map<String, String> configuration : propertyMaps) {
         BlueprintConfigEntity configEntity = new BlueprintConfigEntity();
@@ -555,8 +555,8 @@ public class BlueprintResourceProvider extends 
AbstractControllerResourceProvide
   protected static abstract class BlueprintConfigPopulationStrategy {
 
     public void applyConfiguration(Map<String, String> configuration, 
BlueprintConfiguration blueprintConfiguration) {
-      Map<String, String> configData = new HashMap<String, String>();
-      Map<String, Map<String, String>> configAttributes = new HashMap<String, 
Map<String, String>>();
+      Map<String, String> configData = new HashMap<>();
+      Map<String, Map<String, String>> configAttributes = new HashMap<>();
 
       if (configuration != null) {
         for (Map.Entry<String, String> entry : configuration.entrySet()) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/edbb5492/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/CalculatedStatus.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/CalculatedStatus.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/CalculatedStatus.java
index 32dd03d..3c0164c 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/CalculatedStatus.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/CalculatedStatus.java
@@ -166,8 +166,8 @@ public class CalculatedStatus {
    * @return a calculated status
    */
   public static CalculatedStatus 
statusFromStageEntities(Collection<StageEntity> stages) {
-    Collection<HostRoleStatus> stageStatuses = new HashSet<HostRoleStatus>();
-    Collection<HostRoleCommandEntity> tasks = new 
HashSet<HostRoleCommandEntity>();
+    Collection<HostRoleStatus> stageStatuses = new HashSet<>();
+    Collection<HostRoleCommandEntity> tasks = new HashSet<>();
 
     for (StageEntity stage : stages) {
       // get all the tasks for the stage
@@ -202,8 +202,8 @@ public class CalculatedStatus {
    */
   public static CalculatedStatus statusFromStages(Collection<Stage> stages) {
 
-    Collection<HostRoleStatus> stageStatuses = new HashSet<HostRoleStatus>();
-    Collection<HostRoleCommand> tasks = new HashSet<HostRoleCommand>();
+    Collection<HostRoleStatus> stageStatuses = new HashSet<>();
+    Collection<HostRoleCommand> tasks = new HashSet<>();
 
     for (Stage stage : stages) {
       // get all the tasks for the stage
@@ -236,7 +236,7 @@ public class CalculatedStatus {
    * @return a map of counts of tasks keyed by the task status
    */
   public static Map<HostRoleStatus, Integer> 
calculateStatusCounts(Collection<HostRoleStatus> hostRoleStatuses) {
-    Map<HostRoleStatus, Integer> counters = new HashMap<HostRoleStatus, 
Integer>();
+    Map<HostRoleStatus, Integer> counters = new HashMap<>();
     // initialize
     for (HostRoleStatus hostRoleStatus : HostRoleStatus.values()) {
       counters.put(hostRoleStatus, 0);
@@ -309,7 +309,7 @@ public class CalculatedStatus {
 
     Map<StatusType,Map<HostRoleStatus, Integer>> counters = new HashMap<>();
     for (StatusType statusType : StatusType.values()) {
-      Map <HostRoleStatus, Integer> statusMap = new HashMap<HostRoleStatus, 
Integer>();
+      Map <HostRoleStatus, Integer> statusMap = new HashMap<>();
       counters.put(statusType,statusMap);
       // initialize
       for (HostRoleStatus hostRoleStatus : HostRoleStatus.values()) {
@@ -391,7 +391,7 @@ public class CalculatedStatus {
    * @return a map of counts of tasks keyed by the task status
    */
   public static Map<HostRoleStatus, Integer> 
calculateTaskEntityStatusCounts(Collection<HostRoleCommandEntity> tasks) {
-    Collection<HostRoleStatus> hostRoleStatuses = new 
LinkedList<HostRoleStatus>();
+    Collection<HostRoleStatus> hostRoleStatuses = new LinkedList<>();
 
     for (HostRoleCommandEntity hostRoleCommand : tasks) {
       hostRoleStatuses.add(hostRoleCommand.getStatus());
@@ -408,7 +408,7 @@ public class CalculatedStatus {
   public static Map<HostRoleStatus, Integer> calculateTaskStatusCounts(
       Map<Long, HostRoleCommandStatusSummaryDTO> stageDto, Set<Long> stageIds) 
{
 
-    List<HostRoleStatus> status = new ArrayList<HostRoleStatus>();
+    List<HostRoleStatus> status = new ArrayList<>();
 
     for (Long stageId : stageIds) {
       if (!stageDto.containsKey(stageId)) {
@@ -485,7 +485,7 @@ public class CalculatedStatus {
    * @return a map of counts of tasks keyed by the task status
    */
   private static Map<HostRoleStatus, Integer> 
calculateTaskStatusCounts(Collection<HostRoleCommand> tasks) {
-    Collection<HostRoleStatus> hostRoleStatuses = new 
LinkedList<HostRoleStatus>();
+    Collection<HostRoleStatus> hostRoleStatuses = new LinkedList<>();
 
     for (HostRoleCommand hostRoleCommand : tasks) {
       hostRoleStatuses.add(hostRoleCommand.getStatus());

http://git-wip-us.apache.org/repos/asf/ambari/blob/edbb5492/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProvider.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProvider.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProvider.java
index b690e3a..e98c062 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProvider.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProvider.java
@@ -126,10 +126,10 @@ public class ClientConfigResourceProvider extends 
AbstractControllerResourceProv
   private final Gson gson;
 
   private static Set<String> pkPropertyIds =
-          new HashSet<String>(Arrays.asList(new String[]{
-                  COMPONENT_CLUSTER_NAME_PROPERTY_ID,
-                  COMPONENT_SERVICE_NAME_PROPERTY_ID,
-                  COMPONENT_COMPONENT_NAME_PROPERTY_ID}));
+    new HashSet<>(Arrays.asList(new String[]{
+      COMPONENT_CLUSTER_NAME_PROPERTY_ID,
+      COMPONENT_SERVICE_NAME_PROPERTY_ID,
+      COMPONENT_COMPONENT_NAME_PROPERTY_ID}));
 
   private MaintenanceStateHelper maintenanceStateHelper;
   private static final Logger LOG = 
LoggerFactory.getLogger(ClientConfigResourceProvider.class);
@@ -168,9 +168,9 @@ public class ClientConfigResourceProvider extends 
AbstractControllerResourceProv
   public Set<Resource> getResources(Request request, Predicate predicate)
           throws SystemException, UnsupportedPropertyException, 
NoSuchResourceException, NoSuchParentResourceException {
 
-    Set<Resource> resources = new HashSet<Resource>();
+    Set<Resource> resources = new HashSet<>();
 
-    final Set<ServiceComponentHostRequest> requests = new 
HashSet<ServiceComponentHostRequest>();
+    final Set<ServiceComponentHostRequest> requests = new HashSet<>();
 
     for (Map<String, Object> propertyMap : getPropertyMaps(predicate)) {
       requests.add(getRequest(propertyMap));
@@ -203,7 +203,7 @@ public class ClientConfigResourceProvider extends 
AbstractControllerResourceProv
     String requestServiceName = schRequest.getServiceName();
     String requestHostName =  schRequest.getHostname();
 
-    Map<String,List<ServiceComponentHostResponse>> serviceToComponentMap = new 
HashMap<String,List<ServiceComponentHostResponse>>();
+    Map<String,List<ServiceComponentHostResponse>> serviceToComponentMap = new 
HashMap<>();
 
     // sch response for the service components that have configFiles defined 
in the stack definition of the service
     List <ServiceComponentHostResponse> schWithConfigFiles = new ArrayList<>();
@@ -265,10 +265,10 @@ public class ClientConfigResourceProvider extends 
AbstractControllerResourceProv
         String commandScriptAbsolute = packageFolderAbsolute + File.separator 
+ commandScript;
 
 
-        Map<String, Map<String, String>> configurations = new TreeMap<String, 
Map<String, String>>();
-        Map<String, Long> configVersions = new TreeMap<String, Long>();
+        Map<String, Map<String, String>> configurations = new TreeMap<>();
+        Map<String, Long> configVersions = new TreeMap<>();
         Map<String, Map<PropertyType, Set<String>>> configPropertiesTypes = 
new TreeMap<>();
-        Map<String, Map<String, Map<String, String>>> configurationAttributes 
= new TreeMap<String, Map<String, Map<String, String>>>();
+        Map<String, Map<String, Map<String, String>>> configurationAttributes 
= new TreeMap<>();
 
         Map<String, DesiredConfig> desiredClusterConfigs = 
cluster.getDesiredConfigs();
 
@@ -280,15 +280,14 @@ public class ClientConfigResourceProvider extends 
AbstractControllerResourceProv
           Config clusterConfig = cluster.getConfig(configType, 
desiredConfig.getTag());
 
           if (clusterConfig != null) {
-            Map<String, String> props = new HashMap<String, 
String>(clusterConfig.getProperties());
+            Map<String, String> props = new 
HashMap<>(clusterConfig.getProperties());
 
             // Apply global properties for this host from all config groups
             Map<String, Map<String, String>> allConfigTags = null;
             allConfigTags = configHelper
               .getEffectiveDesiredTags(cluster, schRequest.getHostname());
 
-            Map<String, Map<String, String>> configTags = new HashMap<String,
-              Map<String, String>>();
+            Map<String, Map<String, String>> configTags = new HashMap<>();
 
             for (Map.Entry<String, Map<String, String>> entry : 
allConfigTags.entrySet()) {
               if (entry.getKey().equals(clusterConfig.getType())) {
@@ -309,7 +308,7 @@ public class ClientConfigResourceProvider extends 
AbstractControllerResourceProv
             configVersions.put(clusterConfig.getType(), 
clusterConfig.getVersion());
             configPropertiesTypes.put(clusterConfig.getType(), 
clusterConfig.getPropertiesTypes());
 
-            Map<String, Map<String, String>> attrs = new TreeMap<String, 
Map<String, String>>();
+            Map<String, Map<String, String>> attrs = new TreeMap<>();
             
configHelper.cloneAttributesMap(clusterConfig.getPropertiesAttributes(), attrs);
 
             Map<String, Map<String, Map<String, String>>> attributes = 
configHelper
@@ -356,7 +355,7 @@ public class ClientConfigResourceProvider extends 
AbstractControllerResourceProv
         }
         osFamily = clusters.getHost(hostName).getOsFamily();
 
-        TreeMap<String, String> hostLevelParams = new TreeMap<String, 
String>();
+        TreeMap<String, String> hostLevelParams = new TreeMap<>();
         hostLevelParams.put(JDK_LOCATION, 
managementController.getJdkResourceUrl());
         hostLevelParams.put(JAVA_HOME, managementController.getJavaHome());
         hostLevelParams.put(JAVA_VERSION, 
String.valueOf(configs.getJavaVersion()));
@@ -382,7 +381,7 @@ public class ClientConfigResourceProvider extends 
AbstractControllerResourceProv
 
         // Build package list that is relevant for host
         List<ServiceOsSpecific.Package> packages =
-          new ArrayList<ServiceOsSpecific.Package>();
+          new ArrayList<>();
         if (anyOs != null) {
           packages.addAll(anyOs.getPackages());
         }
@@ -412,14 +411,14 @@ public class ClientConfigResourceProvider extends 
AbstractControllerResourceProv
         hostLevelParams.put(NOT_MANAGED_HDFS_PATH_LIST, 
notManagedHdfsPathList);
 
         String jsonConfigurations = null;
-        Map<String, Object> commandParams = new HashMap<String, Object>();
-        List<Map<String, String>> xmlConfigs = new LinkedList<Map<String, 
String>>();
-        List<Map<String, String>> envConfigs = new LinkedList<Map<String, 
String>>();
-        List<Map<String, String>> propertiesConfigs = new 
LinkedList<Map<String, String>>();
+        Map<String, Object> commandParams = new HashMap<>();
+        List<Map<String, String>> xmlConfigs = new LinkedList<>();
+        List<Map<String, String>> envConfigs = new LinkedList<>();
+        List<Map<String, String>> propertiesConfigs = new LinkedList<>();
 
         //Fill file-dictionary configs from metainfo
         for (ClientConfigFileDefinition clientConfigFile : clientConfigFiles) {
-          Map<String, String> fileDict = new HashMap<String, String>();
+          Map<String, String> fileDict = new HashMap<>();
           fileDict.put(clientConfigFile.getFileName(), 
clientConfigFile.getDictionaryName());
           if (clientConfigFile.getType().equals("xml")) {
             xmlConfigs.add(fileDict);
@@ -435,7 +434,7 @@ public class ClientConfigResourceProvider extends 
AbstractControllerResourceProv
         commandParams.put("properties_configs_list", propertiesConfigs);
         commandParams.put("output_file", componentName + "-configs" + 
Configuration.DEF_ARCHIVE_EXTENSION);
 
-        Map<String, Object> jsonContent = new TreeMap<String, Object>();
+        Map<String, Object> jsonContent = new TreeMap<>();
         jsonContent.put("configurations", configurations);
         jsonContent.put("configuration_attributes", configurationAttributes);
         jsonContent.put("commandParams", commandParams);
@@ -916,7 +915,7 @@ public class ClientConfigResourceProvider extends 
AbstractControllerResourceProv
   }
 
   private List<ServiceOsSpecific> getOSSpecificsByFamily(Map<String, 
ServiceOsSpecific> osSpecifics, String osFamily) {
-    List<ServiceOsSpecific> foundedOSSpecifics = new 
ArrayList<ServiceOsSpecific>();
+    List<ServiceOsSpecific> foundedOSSpecifics = new ArrayList<>();
     for (Map.Entry<String, ServiceOsSpecific> osSpecific : 
osSpecifics.entrySet()) {
       if (osSpecific.getKey().indexOf(osFamily) != -1) {
         foundedOSSpecifics.add(osSpecific.getValue());

http://git-wip-us.apache.org/repos/asf/ambari/blob/edbb5492/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterControllerImpl.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterControllerImpl.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterControllerImpl.java
index c752e80..4a8378a 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterControllerImpl.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterControllerImpl.java
@@ -75,19 +75,19 @@ public class ClusterControllerImpl implements 
ClusterController {
    * Map of resource providers keyed by resource type.
    */
   private final Map<Resource.Type, ExtendedResourceProviderWrapper> 
resourceProviders =
-      new HashMap<Resource.Type, ExtendedResourceProviderWrapper>();
+    new HashMap<>();
 
   /**
    * Map of property provider lists keyed by resource type.
    */
   private final Map<Resource.Type, List<PropertyProvider>> propertyProviders =
-      new HashMap<Resource.Type, List<PropertyProvider>>();
+    new HashMap<>();
 
   /**
    * Map of schemas keyed by resource type.
    */
   private final Map<Resource.Type, Schema> schemas =
-      new HashMap<Resource.Type, Schema>();
+    new HashMap<>();
 
   /**
    * Resource comparator.
@@ -198,8 +198,8 @@ public class ClusterControllerImpl implements 
ClusterController {
       // if the provider did not already sort the set, then sort it based
       // on the comparator
       if (!providerAlreadySorted) {
-        TreeSet<Resource> sortedResources = new TreeSet<Resource>(
-            resourceComparator);
+        TreeSet<Resource> sortedResources = new TreeSet<>(
+          resourceComparator);
 
         sortedResources.addAll(providerResources);
         resources = sortedResources;
@@ -252,7 +252,7 @@ public class ClusterControllerImpl implements 
ClusterController {
   private void checkSortRequestProperties(SortRequest sortRequest, Type type,
                                           ResourceProvider provider) throws 
UnsupportedPropertyException {
     Set<String> requestPropertyIds = provider.checkPropertyIds(
-      new HashSet<String>(sortRequest.getPropertyIds()));
+      new HashSet<>(sortRequest.getPropertyIds()));
 
     if (requestPropertyIds.size() > 0) {
       List<PropertyProvider> propertyProviders = ensurePropertyProviders(type);
@@ -504,7 +504,7 @@ public class ClusterControllerImpl implements 
ClusterController {
 
     ResourceProvider provider = ensureResourceProvider(type);
 
-    Set<String>  keyPropertyIds = new 
HashSet<String>(provider.getKeyPropertyIds().values());
+    Set<String>  keyPropertyIds = new 
HashSet<>(provider.getKeyPropertyIds().values());
     Request      readRequest    = 
PropertyHelper.getReadRequest(keyPropertyIds);
 
     Iterable<Resource> resources = getResourceIterable(type, readRequest, 
predicate);
@@ -544,7 +544,7 @@ public class ClusterControllerImpl implements 
ClusterController {
    * @return true if the given provider can service the request
    */
   private boolean providesRequestProperties(PropertyProvider provider, Request 
request, Predicate predicate) {
-    Set<String> requestPropertyIds = new 
HashSet<String>(request.getPropertyIds());
+    Set<String> requestPropertyIds = new HashSet<>(request.getPropertyIds());
 
     if (requestPropertyIds.size() == 0) {
       return true;
@@ -582,7 +582,7 @@ public class ClusterControllerImpl implements 
ClusterController {
    */
   private LinkedList<Resource> getEvaluatedResources(ResourceIterable
                                               resourceIterable) {
-    LinkedList<Resource> resources = new LinkedList<Resource>();
+    LinkedList<Resource> resources = new LinkedList<>();
     if (resourceIterable != null) {
       for (Resource resource : resourceIterable) {
         resources.add(resource);
@@ -608,7 +608,7 @@ public class ClusterControllerImpl implements 
ClusterController {
 
     int currentOffset = 0;
     Resource previous      = null;
-    Set<Resource> pageResources = new LinkedHashSet<Resource>();
+    Set<Resource> pageResources = new LinkedHashSet<>();
     LinkedList<Resource> filteredResources =
       getEvaluatedResources(new ResourceIterable(resources, predicate, 
evaluator));
     Iterator<Resource> iterator = filteredResources.iterator();
@@ -649,7 +649,7 @@ public class ClusterControllerImpl implements 
ClusterController {
 
     int                currentOffset = resources.size() - 1;
     Resource           next          = null;
-    List<Resource>     pageResources = new LinkedList<Resource>();
+    List<Resource>     pageResources = new LinkedList<>();
     LinkedList<Resource> filteredResources =
       getEvaluatedResources(new ResourceIterable(resources, predicate, 
evaluator));
     Iterator<Resource> iterator = filteredResources.descendingIterator();

http://git-wip-us.apache.org/repos/asf/ambari/blob/edbb5492/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterKerberosDescriptorResourceProvider.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterKerberosDescriptorResourceProvider.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterKerberosDescriptorResourceProvider.java
index 385f11e..59bd96a 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterKerberosDescriptorResourceProvider.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterKerberosDescriptorResourceProvider.java
@@ -90,18 +90,18 @@ public class ClusterKerberosDescriptorResourceProvider 
extends ReadOnlyResourceP
 
   static {
     Set<String> set;
-    set = new HashSet<String>();
+    set = new HashSet<>();
     set.add(CLUSTER_KERBEROS_DESCRIPTOR_CLUSTER_NAME_PROPERTY_ID);
     set.add(CLUSTER_KERBEROS_DESCRIPTOR_TYPE_PROPERTY_ID);
     PK_PROPERTY_IDS = Collections.unmodifiableSet(set);
 
-    set = new HashSet<String>();
+    set = new HashSet<>();
     set.add(CLUSTER_KERBEROS_DESCRIPTOR_CLUSTER_NAME_PROPERTY_ID);
     set.add(CLUSTER_KERBEROS_DESCRIPTOR_TYPE_PROPERTY_ID);
     set.add(CLUSTER_KERBEROS_DESCRIPTOR_DESCRIPTOR_PROPERTY_ID);
     PROPERTY_IDS = Collections.unmodifiableSet(set);
 
-    HashMap<Type, String> map = new HashMap<Type, String>();
+    HashMap<Type, String> map = new HashMap<>();
     map.put(Type.Cluster, 
CLUSTER_KERBEROS_DESCRIPTOR_CLUSTER_NAME_PROPERTY_ID);
     map.put(Type.ClusterKerberosDescriptor, 
CLUSTER_KERBEROS_DESCRIPTOR_TYPE_PROPERTY_ID);
     KEY_PROPERTY_IDS = Collections.unmodifiableMap(map);
@@ -122,7 +122,7 @@ public class ClusterKerberosDescriptorResourceProvider 
extends ReadOnlyResourceP
     AuthorizationHelper.verifyAuthorization(ResourceType.CLUSTER, null, 
REQUIRED_GET_AUTHORIZATIONS);
 
     Set<String> requestedIds = getRequestPropertyIds(request, predicate);
-    Set<Resource> resources = new HashSet<Resource>();
+    Set<Resource> resources = new HashSet<>();
 
     AmbariManagementController managementController = 
getManagementController();
     Clusters clusters = managementController.getClusters();

http://git-wip-us.apache.org/repos/asf/ambari/blob/edbb5492/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterPrivilegeResourceProvider.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterPrivilegeResourceProvider.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterPrivilegeResourceProvider.java
index d91b88c..60cf783 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterPrivilegeResourceProvider.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterPrivilegeResourceProvider.java
@@ -56,7 +56,7 @@ public class ClusterPrivilegeResourceProvider extends 
PrivilegeResourceProvider<
   /**
    * The property ids for a privilege resource.
    */
-  private static Set<String> propertyIds = new HashSet<String>();
+  private static Set<String> propertyIds = new HashSet<>();
   static {
     propertyIds.add(PRIVILEGE_CLUSTER_NAME_PROPERTY_ID);
     propertyIds.add(PRIVILEGE_ID_PROPERTY_ID);
@@ -70,7 +70,7 @@ public class ClusterPrivilegeResourceProvider extends 
PrivilegeResourceProvider<
   /**
    * The key property ids for a privilege resource.
    */
-  private static Map<Resource.Type, String> keyPropertyIds = new 
HashMap<Resource.Type, String>();
+  private static Map<Resource.Type, String> keyPropertyIds = new HashMap<>();
   static {
     keyPropertyIds.put(Resource.Type.Cluster, 
PRIVILEGE_CLUSTER_NAME_PROPERTY_ID);
     keyPropertyIds.put(Resource.Type.ClusterPrivilege, 
PRIVILEGE_ID_PROPERTY_ID);
@@ -120,7 +120,7 @@ public class ClusterPrivilegeResourceProvider extends 
PrivilegeResourceProvider<
     String clusterName = (String) 
properties.get(PRIVILEGE_CLUSTER_NAME_PROPERTY_ID);
 
     if (clusterName == null) {
-      Map<Long, ClusterEntity> resourceEntities = new HashMap<Long, 
ClusterEntity>();
+      Map<Long, ClusterEntity> resourceEntities = new HashMap<>();
 
       List<ClusterEntity> clusterEntities = clusterDAO.findAll();
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/edbb5492/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
index 577659d..ae17de4 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
@@ -116,12 +116,12 @@ public class ClusterResourceProvider extends 
AbstractControllerResourceProvider
    * The cluster primary key properties.
    */
   private static Set<String> pkPropertyIds =
-      new HashSet<String>(Arrays.asList(new String[]{CLUSTER_ID_PROPERTY_ID}));
+    new HashSet<>(Arrays.asList(new String[]{CLUSTER_ID_PROPERTY_ID}));
 
   /**
    * The key property ids for a cluster resource.
    */
-  private static Map<Resource.Type, String> keyPropertyIds = new 
HashMap<Resource.Type, String>();
+  private static Map<Resource.Type, String> keyPropertyIds = new HashMap<>();
   static {
     keyPropertyIds.put(Resource.Type.Cluster, CLUSTER_NAME_PROPERTY_ID);
   }
@@ -129,7 +129,7 @@ public class ClusterResourceProvider extends 
AbstractControllerResourceProvider
   /**
    * The property ids for a cluster resource.
    */
-  private static Set<String> propertyIds = new HashSet<String>();
+  private static Set<String> propertyIds = new HashSet<>();
 
   /**
    * Used to serialize to/from json.
@@ -237,7 +237,7 @@ public class ClusterResourceProvider extends 
AbstractControllerResourceProvider
   public Set<Resource> getResources(Request request, Predicate predicate)
       throws SystemException, UnsupportedPropertyException, 
NoSuchResourceException, NoSuchParentResourceException {
 
-    final Set<ClusterRequest> requests = new HashSet<ClusterRequest>();
+    final Set<ClusterRequest> requests = new HashSet<>();
 
     if (predicate == null) {
       requests.add(getRequest(Collections.<String, Object>emptyMap()));
@@ -257,7 +257,7 @@ public class ClusterResourceProvider extends 
AbstractControllerResourceProvider
       }
     });
 
-    Set<Resource> resources = new HashSet<Resource>();
+    Set<Resource> resources = new HashSet<>();
     if (LOG.isDebugEnabled()) {
       LOG.debug("Found clusters matching getClusters request"
           + ", clusterResponseCount=" + responses.size());
@@ -297,7 +297,7 @@ public class ClusterResourceProvider extends 
AbstractControllerResourceProvider
   protected RequestStatus updateResourcesAuthorized(final Request request, 
Predicate predicate)
       throws SystemException, UnsupportedPropertyException, 
NoSuchResourceException, NoSuchParentResourceException {
 
-    final Set<ClusterRequest>   requests = new HashSet<ClusterRequest>();
+    final Set<ClusterRequest>   requests = new HashSet<>();
     RequestStatusResponse       response;
 
     for (Map<String, Object> requestPropertyMap : request.getProperties()) {
@@ -321,7 +321,7 @@ public class ClusterResourceProvider extends 
AbstractControllerResourceProvider
       if (updateResults != null) {
         Map<String, Collection<ServiceConfigVersionResponse>> 
serviceConfigVersions = updateResults.getDesiredServiceConfigVersions();
         if (serviceConfigVersions != null) {
-          associatedResources = new HashSet<Resource>();
+          associatedResources = new HashSet<>();
           for (Collection<ServiceConfigVersionResponse> scvCollection : 
serviceConfigVersions.values()) {
             for (ServiceConfigVersionResponse serviceConfigVersionResponse : 
scvCollection) {
               Resource resource = new 
ResourceImpl(Resource.Type.ServiceConfigVersion);
@@ -447,7 +447,7 @@ public class ClusterResourceProvider extends 
AbstractControllerResourceProvider
    * @return the map of session attributes
    */
   private Map<String, Object> getSessionAttributes(Map<String, Object> 
properties) {
-    Map<String, Object> sessionAttributes = new HashMap<String, Object>();
+    Map<String, Object> sessionAttributes = new HashMap<>();
 
     for (Map.Entry<String, Object> entry : properties.entrySet()) {
 

Reply via email to