http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.3.0/package/scripts/params.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/params.py
----------------------------------------------------------------------
diff --cc 
ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/params.py
index 8430ced,519dfbf..49f73d7
--- 
a/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/params.py
@@@ -193,8 -193,7 +193,7 @@@ if has_metric_collector
      pass
  
  # Create current Hadoop Clients  Libs
 -stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 +stack_version_unformatted = str(config['clusterLevelParams']['stack_version'])
  io_compression_codecs = 
default("/configurations/core-site/io.compression.codecs", None)
- lzo_enabled = io_compression_codecs is not None and 
"com.hadoop.compression.lzo" in io_compression_codecs.lower()
- lzo_packages = get_lzo_packages(stack_version_unformatted)
+ lzo_enabled = should_install_lzo()
  hadoop_lib_home = stack_root + '/' + stack_version + '/hadoop/lib'

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/install_params.py
----------------------------------------------------------------------
diff --cc 
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/install_params.py
index fcf0507,dc3279f..c2bf847
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/install_params.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/install_params.py
@@@ -26,8 -26,4 +26,4 @@@ else
    from resource_management.libraries.script.script import Script
  
    _config = Script.get_config()
 -  stack_version_unformatted = str(_config['hostLevelParams']['stack_version'])
 +  stack_version_unformatted = 
str(_config['clusterLevelParams']['stack_version'])
- 
-   # The logic for LZO also exists in OOZIE's params.py
-   io_compression_codecs = 
default("/configurations/core-site/io.compression.codecs", None)
-   lzo_enabled = io_compression_codecs is not None and 
"com.hadoop.compression.lzo" in io_compression_codecs.lower()

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/install_params.py
----------------------------------------------------------------------
diff --cc 
ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/install_params.py
index fcf0507,dc3279f..c2bf847
--- 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/install_params.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/install_params.py
@@@ -26,8 -26,4 +26,4 @@@ else
    from resource_management.libraries.script.script import Script
  
    _config = Script.get_config()
 -  stack_version_unformatted = str(_config['hostLevelParams']['stack_version'])
 +  stack_version_unformatted = 
str(_config['clusterLevelParams']['stack_version'])
- 
-   # The logic for LZO also exists in OOZIE's params.py
-   io_compression_codecs = 
default("/configurations/core-site/io.compression.codecs", None)
-   lzo_enabled = io_compression_codecs is not None and 
"com.hadoop.compression.lzo" in io_compression_codecs.lower()

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/params_linux.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/params_linux.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.3.0/package/scripts/params_linux.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/main/resources/common-services/TEZ/0.9.0.3.0/package/scripts/params_linux.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/main/resources/common-services/TEZ/0.9.0.3.0/package/scripts/tez.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/params_linux.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/yarn.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/main/resources/custom_actions/scripts/ru_execute_tasks.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelperTest.java
----------------------------------------------------------------------
diff --cc 
ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelperTest.java
index 87ca569,26c79e6..9aba60f
--- 
a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelperTest.java
+++ 
b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelperTest.java
@@@ -580,9 -580,9 +580,8 @@@ public class AmbariCustomCommandExecuti
      ServiceComponentDesiredStateDAO componentDAO = 
injector.getInstance(ServiceComponentDesiredStateDAO.class);
      RepositoryVersionHelper repoVersionHelper = 
injector.getInstance(RepositoryVersionHelper.class);
  
 -    CommandRepository commandRepo = repoHelper.getCommandRepository(cluster, 
componentRM, host);
 -    Assert.assertEquals(2, commandRepo.getRepositories().size());
 +    CommandRepository commandRepo = 
ambariMetaInfo.getCommandRepository(cluster, componentRM, host);
  
-     Assert.assertEquals(0, commandRepo.getRepositories().size());
  
      RepositoryInfo ri = new RepositoryInfo();
      ri.setBaseUrl("http://foo";);
@@@ -608,10 -608,10 +607,10 @@@
  
      componentEntity.setDesiredRepositoryVersion(repositoryVersion);
      componentEntity.addVersion(componentVersionEntity);
-     componentEntity = componentDAO.merge(componentEntity);
+     componentDAO.merge(componentEntity);
  
      // !!! make sure the override is set
 -    commandRepo = repoHelper.getCommandRepository(cluster, componentRM, host);
 +    commandRepo = ambariMetaInfo.getCommandRepository(cluster, componentRM, 
host);
  
      Assert.assertEquals(1, commandRepo.getRepositories().size());
      CommandRepository.Repository repo = 
commandRepo.getRepositories().iterator().next();

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/test/java/org/apache/ambari/server/controller/utilities/KerberosIdentityCleanerTest.java
----------------------------------------------------------------------
diff --cc 
ambari-server/src/test/java/org/apache/ambari/server/controller/utilities/KerberosIdentityCleanerTest.java
index d62e34c,ff0f687..e7b5deb
--- 
a/ambari-server/src/test/java/org/apache/ambari/server/controller/utilities/KerberosIdentityCleanerTest.java
+++ 
b/ambari-server/src/test/java/org/apache/ambari/server/controller/utilities/KerberosIdentityCleanerTest.java
@@@ -163,7 -163,7 +163,7 @@@ public class KerberosIdentityCleanerTes
    }
  
    private void uninstallComponent(String service, String component, String 
host) throws KerberosMissingAdminCredentialsException {
-     kerberosIdentityCleaner.componentRemoved(new 
ServiceComponentUninstalledEvent(CLUSTER_ID, "any", "any", service, component, 
host, false, false));
 -    kerberosIdentityCleaner.componentRemoved(new 
ServiceComponentUninstalledEvent(CLUSTER_ID, "any", "any", service, component, 
host, false, -1l));
++    kerberosIdentityCleaner.componentRemoved(new 
ServiceComponentUninstalledEvent(CLUSTER_ID, "any", "any", service, component, 
host, false, false, -1l));
    }
  
    private void uninstallService(String service, List<Component> components) 
throws KerberosMissingAdminCredentialsException {

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/test/java/org/apache/ambari/server/events/listeners/upgrade/HostVersionOutOfSyncListenerTest.java
----------------------------------------------------------------------
diff --cc 
ambari-server/src/test/java/org/apache/ambari/server/events/listeners/upgrade/HostVersionOutOfSyncListenerTest.java
index 1986f64,108159c..166076d
--- 
a/ambari-server/src/test/java/org/apache/ambari/server/events/listeners/upgrade/HostVersionOutOfSyncListenerTest.java
+++ 
b/ambari-server/src/test/java/org/apache/ambari/server/events/listeners/upgrade/HostVersionOutOfSyncListenerTest.java
@@@ -515,7 -514,7 +515,7 @@@ public class HostVersionOutOfSyncListen
  
          ServiceComponentUninstalledEvent event = new 
ServiceComponentUninstalledEvent(
              c1.getClusterId(), clusterStackId.getStackName(), 
clusterStackId.getStackVersion(),
-             "HDFS", "DATANODE", sch.getHostName(), false, false);
 -            "HDFS", "DATANODE", sch.getHostName(), false, -1l);
++            "HDFS", "DATANODE", sch.getHostName(), false, false, -1l);
  
          m_eventPublisher.publish(event);
        }

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/AlertDefinitionDAOTest.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFileTest.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertReceivedListenerTest.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
----------------------------------------------------------------------
diff --cc 
ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
index 52c3f62,22e8ccc..0155dd3
--- 
a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
+++ 
b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
@@@ -683,6 -701,14 +701,14 @@@ public class UpgradeCatalog260Test 
      expect(cluster.getConfig(eq("ranger-kms-audit"), 
anyString())).andReturn(newConfig).once();
      expect(cluster.addDesiredConfig("ambari-upgrade", 
Collections.singleton(newConfig), "Updated ranger-kms-audit during Ambari 
Upgrade from 2.5.2 to 2.6.0.")).andReturn(response).once();
  
+     //HIVE
+     
expect(cluster.getDesiredConfigByType("hive-site")).andReturn(hsiConfig).anyTimes();
+     
expect(cluster.getDesiredConfigByType("hive-interactive-site")).andReturn(hsiConfig).anyTimes();
+     
expect(cluster.getConfigsByType("hive-interactive-site")).andReturn(Collections.singletonMap("version1",
 hsiConfig)).anyTimes();
+     
expect(cluster.getServiceByConfigType("hive-interactive-site")).andReturn("HIVE").anyTimes();
+     expect(cluster.getConfig(eq("hive-interactive-site"), 
anyString())).andReturn(newHsiConfig).anyTimes();
 -  
++
+ 
      final Clusters clusters = injector.getInstance(Clusters.class);
      expect(clusters.getCluster(2L)).andReturn(cluster).anyTimes();
  
@@@ -781,77 -845,143 +845,143 @@@
    }
  
    @Test
+   public void testUpdateHiveConfigs() throws Exception {
+ 
+     Map<String, String> oldProperties = new HashMap<String, String>() {
+       {
+         put("hive.llap.zk.sm.keytab.file", 
"/etc/security/keytabs/hive.llap.zk.sm.keytab");
+         put("hive.llap.daemon.keytab.file", 
"/etc/security/keytabs/hive.service.keytab");
+         put("hive.llap.task.keytab.file", 
"/etc/security/keytabs/hive.llap.task.keytab");
+       }
+     };
+     Map<String, String> newProperties = new HashMap<String, String>() {
+       {
+         put("hive.llap.zk.sm.keytab.file", 
"/etc/security/keytabs/hive.service.keytab");
+         put("hive.llap.daemon.keytab.file", 
"/etc/security/keytabs/hive.service.keytab");
+         put("hive.llap.task.keytab.file", 
"/etc/security/keytabs/hive.service.keytab");
+       }
+     };
+ 
+     EasyMockSupport easyMockSupport = new EasyMockSupport();
+ 
+     Clusters clusters = easyMockSupport.createNiceMock(Clusters.class);
+     final Cluster cluster = easyMockSupport.createNiceMock(Cluster.class);
+     Config mockHsiConfigs = easyMockSupport.createNiceMock(Config.class);
+ 
+     expect(clusters.getClusters()).andReturn(new HashMap<String, Cluster>() {{
+       put("normal", cluster);
+     }}).once();
+     
expect(cluster.getDesiredConfigByType("hive-interactive-site")).andReturn(mockHsiConfigs).atLeastOnce();
+     
expect(mockHsiConfigs.getProperties()).andReturn(oldProperties).anyTimes();
+ 
+     Injector injector = easyMockSupport.createNiceMock(Injector.class);
+     expect(injector.getInstance(Gson.class)).andReturn(null).anyTimes();
+     
expect(injector.getInstance(MaintenanceStateHelper.class)).andReturn(null).anyTimes();
+ 
+     replay(injector, clusters, mockHsiConfigs, cluster);
+ 
+     AmbariManagementControllerImpl controller = 
createMockBuilder(AmbariManagementControllerImpl.class)
+             .addMockedMethod("createConfiguration")
+             .addMockedMethod("getClusters", new Class[] { })
+             .addMockedMethod("createConfig")
+             .withConstructor(createNiceMock(ActionManager.class), clusters, 
injector)
+             .createNiceMock();
+ 
+     Injector injector2 = easyMockSupport.createNiceMock(Injector.class);
+     Capture<Map> propertiesCapture = EasyMock.newCapture();
+ 
+     
expect(injector2.getInstance(AmbariManagementController.class)).andReturn(controller).anyTimes();
+     expect(controller.getClusters()).andReturn(clusters).anyTimes();
+     expect(controller.createConfig(anyObject(Cluster.class), 
anyObject(StackId.class), anyString(), capture(propertiesCapture), anyString(),
+             
anyObject(Map.class))).andReturn(createNiceMock(Config.class)).once();
+     replay(controller, injector2);
+ 
+     // This tests the update of HSI config 'hive.llap.daemon.keytab.file'.
+     UpgradeCatalog260  upgradeCatalog260 = new UpgradeCatalog260(injector2);
+     // Set 'isYarnKerberosDescUpdated' value to true, implying kerberos 
descriptor was updated.
+     
upgradeCatalog260.updateYarnKerberosDescUpdatedList("hive.llap.zk.sm.keytab.file");
+     
upgradeCatalog260.updateYarnKerberosDescUpdatedList("hive.llap.task.keytab.file");
+ 
+     upgradeCatalog260.updateHiveConfigs();
+ 
+     easyMockSupport.verifyAll();
+ 
+     Map<String, String> updatedProperties = propertiesCapture.getValue();
+     assertTrue(Maps.difference(newProperties, updatedProperties).areEqual());
+   }
+ 
+   @Test
 -   public void testHDFSWidgetUpdate() throws Exception {
 -         final Clusters clusters = createNiceMock(Clusters.class);
 -         final Cluster cluster = createNiceMock(Cluster.class);
 -         final AmbariManagementController controller = 
createNiceMock(AmbariManagementController.class);
 -         final Gson gson = new Gson();
 -         final WidgetDAO widgetDAO = createNiceMock(WidgetDAO.class);
 -         final AmbariMetaInfo metaInfo = createNiceMock(AmbariMetaInfo.class);
 -         WidgetEntity widgetEntity = createNiceMock(WidgetEntity.class);
 -         StackId stackId = new StackId("HDP", "2.0.0");
 -         StackInfo stackInfo = createNiceMock(StackInfo.class);
 -         ServiceInfo serviceInfo = createNiceMock(ServiceInfo.class);
 -         Service service  = createNiceMock(Service.class);
 -
 -           String widgetStr = "{\n" +
 -             "  \"layouts\": [\n" +
 -             "      {\n" +
 -             "      \"layout_name\": \"default_hdfs_heatmap\",\n" +
 -             "      \"display_name\": \"Standard HDFS HeatMaps\",\n" +
 -             "      \"section_name\": \"HDFS_HEATMAPS\",\n" +
 -             "      \"widgetLayoutInfo\": [\n" +
 -             "        {\n" +
 -             "          \"widget_name\": \"HDFS Bytes Read\",\n" +
 -             "          \"metrics\": [],\n" +
 -             "          \"values\": []\n" +
 -             "        }\n" +
 -             "      ]\n" +
 -             "    }\n" +
 -             "  ]\n" +
 -             "}";
 -
 -           File dataDirectory = temporaryFolder.newFolder();
 -         File file = new File(dataDirectory, "hdfs_widget.json");
 -         FileUtils.writeStringToFile(file, widgetStr);
 -
 -           final Injector mockInjector = Guice.createInjector(new 
AbstractModule() {
 -       @Override
 -       protected void configure() {
 -                 
bind(EntityManager.class).toInstance(createNiceMock(EntityManager.class));
 -                 
bind(AmbariManagementController.class).toInstance(controller);
 -                 bind(Clusters.class).toInstance(clusters);
 -                 
bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
 -                 
bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
 -                 bind(Gson.class).toInstance(gson);
 -                 bind(WidgetDAO.class).toInstance(widgetDAO);
 -                 
bind(StackManagerFactory.class).toInstance(createNiceMock(StackManagerFactory.class));
 -                 bind(AmbariMetaInfo.class).toInstance(metaInfo);
 -               }
 -     });
 -         expect(controller.getClusters()).andReturn(clusters).anyTimes();
 -         expect(clusters.getClusters()).andReturn(new HashMap<String, 
Cluster>() {{
 -             put("normal", cluster);
 -           }}).anyTimes();
 -         
expect(cluster.getServices()).andReturn(Collections.singletonMap("HDFS", 
service)).anyTimes();
 -         expect(cluster.getClusterId()).andReturn(1L).anyTimes();
 -         expect(service.getDesiredStackId()).andReturn(stackId).anyTimes();
 -         expect(stackInfo.getService("HDFS")).andReturn(serviceInfo);
 -         
expect(cluster.getDesiredStackVersion()).andReturn(stackId).anyTimes();
 -         expect(metaInfo.getStack("HDP", 
"2.0.0")).andReturn(stackInfo).anyTimes();
 -         
expect(serviceInfo.getWidgetsDescriptorFile()).andReturn(file).anyTimes();
 -
 -           expect(widgetDAO.findByName(1L, "HDFS Bytes Read", "ambari", 
"HDFS_HEATMAPS"))
 -             .andReturn(Collections.singletonList(widgetEntity));
 -         expect(widgetDAO.merge(widgetEntity)).andReturn(null);
 -         expect(widgetEntity.getWidgetName()).andReturn("HDFS Bytes 
Read").anyTimes();
 -
 -           replay(clusters, cluster, controller, widgetDAO, metaInfo, 
widgetEntity, stackInfo, serviceInfo, service);
 -
 -           
mockInjector.getInstance(UpgradeCatalog260.class).updateHDFSWidgetDefinition();
 -
 -           verify(clusters, cluster, controller, widgetDAO, widgetEntity, 
stackInfo, serviceInfo);
 -       }
 +  public void testHDFSWidgetUpdate() throws Exception {
 +    final Clusters clusters = createNiceMock(Clusters.class);
 +    final Cluster cluster = createNiceMock(Cluster.class);
 +    final AmbariManagementController controller = 
createNiceMock(AmbariManagementController.class);
 +    final Gson gson = new Gson();
 +    final WidgetDAO widgetDAO = createNiceMock(WidgetDAO.class);
 +    final AmbariMetaInfo metaInfo = createNiceMock(AmbariMetaInfo.class);
 +    WidgetEntity widgetEntity = createNiceMock(WidgetEntity.class);
 +    StackId stackId = new StackId("HDP", "2.0.0");
 +    StackInfo stackInfo = createNiceMock(StackInfo.class);
 +    ServiceInfo serviceInfo = createNiceMock(ServiceInfo.class);
 +    Service service  = createNiceMock(Service.class);
 +
 +    String widgetStr = "{\n" +
 +        "  \"layouts\": [\n" +
 +        "      {\n" +
 +        "      \"layout_name\": \"default_hdfs_heatmap\",\n" +
 +        "      \"display_name\": \"Standard HDFS HeatMaps\",\n" +
 +        "      \"section_name\": \"HDFS_HEATMAPS\",\n" +
 +        "      \"widgetLayoutInfo\": [\n" +
 +        "        {\n" +
 +        "          \"widget_name\": \"HDFS Bytes Read\",\n" +
 +        "          \"metrics\": [],\n" +
 +        "          \"values\": []\n" +
 +        "        }\n" +
 +        "      ]\n" +
 +        "    }\n" +
 +        "  ]\n" +
 +        "}";
 +
 +    File dataDirectory = temporaryFolder.newFolder();
 +    File file = new File(dataDirectory, "hdfs_widget.json");
 +    FileUtils.writeStringToFile(file, widgetStr);
 +
 +    final Injector mockInjector = Guice.createInjector(new AbstractModule() {
 +      @Override
 +      protected void configure() {
 +        
bind(EntityManager.class).toInstance(createNiceMock(EntityManager.class));
 +        bind(AmbariManagementController.class).toInstance(controller);
 +        bind(Clusters.class).toInstance(clusters);
 +        bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
 +        bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
 +        bind(Gson.class).toInstance(gson);
 +        bind(WidgetDAO.class).toInstance(widgetDAO);
 +        
bind(StackManagerFactory.class).toInstance(createNiceMock(StackManagerFactory.class));
 +        bind(AmbariMetaInfo.class).toInstance(metaInfo);
 +      }
 +    });
 +    expect(controller.getClusters()).andReturn(clusters).anyTimes();
 +    expect(clusters.getClusters()).andReturn(new HashMap<String, Cluster>() {{
 +      put("normal", cluster);
 +    }}).anyTimes();
 +    expect(cluster.getServices()).andReturn(Collections.singletonMap("HDFS", 
service)).anyTimes();
 +    expect(cluster.getClusterId()).andReturn(1L).anyTimes();
 +    expect(service.getDesiredStackId()).andReturn(stackId).anyTimes();
 +    expect(stackInfo.getService("HDFS")).andReturn(serviceInfo);
 +    expect(cluster.getDesiredStackVersion()).andReturn(stackId).anyTimes();
 +    expect(metaInfo.getStack("HDP", "2.0.0")).andReturn(stackInfo).anyTimes();
 +    expect(serviceInfo.getWidgetsDescriptorFile()).andReturn(file).anyTimes();
 +
 +    expect(widgetDAO.findByName(1L, "HDFS Bytes Read", "ambari", 
"HDFS_HEATMAPS"))
 +        .andReturn(Collections.singletonList(widgetEntity));
 +    expect(widgetDAO.merge(widgetEntity)).andReturn(null);
 +    expect(widgetEntity.getWidgetName()).andReturn("HDFS Bytes 
Read").anyTimes();
 +
 +    replay(clusters, cluster, controller, widgetDAO, metaInfo, widgetEntity, 
stackInfo, serviceInfo, service);
 +
 +    
mockInjector.getInstance(UpgradeCatalog260.class).updateHDFSWidgetDefinition();
 +
 +    verify(clusters, cluster, controller, widgetDAO, widgetEntity, stackInfo, 
serviceInfo);
 +  }
  
    private Injector getInjector() {
  

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog300Test.java
----------------------------------------------------------------------
diff --cc 
ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog300Test.java
index fc59e65,747f99b..36c753e
--- 
a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog300Test.java
+++ 
b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog300Test.java
@@@ -130,15 -141,15 +141,17 @@@ public class UpgradeCatalog300Test 
      Method showHcatDeletedUserMessage = 
UpgradeCatalog300.class.getDeclaredMethod("showHcatDeletedUserMessage");
      Method setStatusOfStagesAndRequests = 
UpgradeCatalog300.class.getDeclaredMethod("setStatusOfStagesAndRequests");
      Method updateLogSearchConfigs = 
UpgradeCatalog300.class.getDeclaredMethod("updateLogSearchConfigs");
+     Method updateKerberosConfigurations = 
UpgradeCatalog300.class.getDeclaredMethod("updateKerberosConfigurations");
 +    Method updateHostComponentLastStateTable = 
UpgradeCatalog300.class.getDeclaredMethod("updateHostComponentLastStateTable");
  
-    UpgradeCatalog300 upgradeCatalog300 = 
createMockBuilder(UpgradeCatalog300.class)
-             .addMockedMethod(showHcatDeletedUserMessage)
-             .addMockedMethod(addNewConfigurationsFromXml)
-             .addMockedMethod(setStatusOfStagesAndRequests)
-             .addMockedMethod(updateLogSearchConfigs)
-             .addMockedMethod(updateHostComponentLastStateTable)
-             .createMock();
+     UpgradeCatalog300 upgradeCatalog300 = 
createMockBuilder(UpgradeCatalog300.class)
+         .addMockedMethod(showHcatDeletedUserMessage)
+         .addMockedMethod(addNewConfigurationsFromXml)
+         .addMockedMethod(setStatusOfStagesAndRequests)
+         .addMockedMethod(updateLogSearchConfigs)
+         .addMockedMethod(updateKerberosConfigurations)
++        .addMockedMethod(updateHostComponentLastStateTable)
+         .createMock();
  
  
      upgradeCatalog300.addNewConfigurationsFromXml();
@@@ -146,9 -157,11 +159,12 @@@
      upgradeCatalog300.setStatusOfStagesAndRequests();
  
      upgradeCatalog300.updateLogSearchConfigs();
 +    upgradeCatalog300.updateHostComponentLastStateTable();
      expectLastCall().once();
  
+     upgradeCatalog300.updateKerberosConfigurations();
+     expectLastCall().once();
+ 
      replay(upgradeCatalog300);
  
      upgradeCatalog300.executeDMLUpdates();
@@@ -171,12 -184,21 +187,24 @@@
      Capture<DBAccessor.DBColumnInfo> hrcOpsDisplayNameColumn = newCapture();
      dbAccessor.addColumn(eq(UpgradeCatalog300.HOST_ROLE_COMMAND_TABLE), 
capture(hrcOpsDisplayNameColumn));
  
 +    Capture<DBAccessor.DBColumnInfo> lastValidColumn = newCapture();
 +    dbAccessor.addColumn(eq(UpgradeCatalog300.COMPONENT_LAST_STATE_COLUMN), 
capture(lastValidColumn));
 +
-     dbAccessor.dropColumn(COMPONENT_DESIRED_STATE_TABLE, 
SECURITY_STATE_COLUMN); expectLastCall().once();
-     dbAccessor.dropColumn(COMPONENT_STATE_TABLE, SECURITY_STATE_COLUMN); 
expectLastCall().once();
-     dbAccessor.dropColumn(SERVICE_DESIRED_STATE_TABLE, 
SECURITY_STATE_COLUMN); expectLastCall().once();
+     dbAccessor.dropColumn(COMPONENT_DESIRED_STATE_TABLE, 
SECURITY_STATE_COLUMN);
+     expectLastCall().once();
+     dbAccessor.dropColumn(COMPONENT_STATE_TABLE, SECURITY_STATE_COLUMN);
+     expectLastCall().once();
+     dbAccessor.dropColumn(SERVICE_DESIRED_STATE_TABLE, SECURITY_STATE_COLUMN);
+     expectLastCall().once();
+ 
+     // Ambari configuration table addition...
+     Capture<List<DBAccessor.DBColumnInfo>> ambariConfigurationTableColumns = 
newCapture();
+ 
+     dbAccessor.createTable(eq(AMBARI_CONFIGURATION_TABLE), 
capture(ambariConfigurationTableColumns));
+     expectLastCall().once();
+     dbAccessor.addPKConstraint(AMBARI_CONFIGURATION_TABLE, 
"PK_ambari_configuration", AMBARI_CONFIGURATION_CATEGORY_NAME_COLUMN, 
AMBARI_CONFIGURATION_PROPERTY_NAME_COLUMN);
+     expectLastCall().once();
+     // Ambari configuration table addition...
  
      replay(dbAccessor, configuration);
  
@@@ -189,11 -211,35 +217,40 @@@
      Assert.assertEquals(null, capturedOpsDisplayNameColumn.getDefaultValue());
      Assert.assertEquals(String.class, capturedOpsDisplayNameColumn.getType());
  
+     // Ambari configuration table addition...
+     Assert.assertTrue(ambariConfigurationTableColumns.hasCaptured());
+     List<DBAccessor.DBColumnInfo> columns = 
ambariConfigurationTableColumns.getValue();
+     Assert.assertEquals(3, columns.size());
+ 
+     for (DBAccessor.DBColumnInfo column : columns) {
+       String columnName = column.getName();
+ 
+       if (AMBARI_CONFIGURATION_CATEGORY_NAME_COLUMN.equals(columnName)) {
+         Assert.assertEquals(String.class, column.getType());
+         Assert.assertEquals(Integer.valueOf(100), column.getLength());
+         Assert.assertEquals(null, column.getDefaultValue());
+         Assert.assertFalse(column.isNullable());
+       } else if 
(AMBARI_CONFIGURATION_PROPERTY_NAME_COLUMN.equals(columnName)) {
+         Assert.assertEquals(String.class, column.getType());
+         Assert.assertEquals(Integer.valueOf(100), column.getLength());
+         Assert.assertEquals(null, column.getDefaultValue());
+         Assert.assertFalse(column.isNullable());
+       } else if 
(AMBARI_CONFIGURATION_PROPERTY_VALUE_COLUMN.equals(columnName)) {
+         Assert.assertEquals(String.class, column.getType());
+         Assert.assertEquals(Integer.valueOf(255), column.getLength());
+         Assert.assertEquals(null, column.getDefaultValue());
+         Assert.assertTrue(column.isNullable());
+       } else {
+         Assert.fail("Unexpected column name: " + columnName);
+       }
+     }
+     // Ambari configuration table addition...
+ 
 +    DBAccessor.DBColumnInfo capturedLastValidColumn = 
lastValidColumn.getValue();
 +    Assert.assertEquals(UpgradeCatalog300.HRC_OPS_DISPLAY_NAME_COLUMN, 
capturedLastValidColumn.getName());
 +    Assert.assertEquals(null, capturedLastValidColumn.getDefaultValue());
 +    Assert.assertEquals(String.class, capturedLastValidColumn.getType());
 +
      verify(dbAccessor);
    }
  

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/test/python/custom_actions/test_ru_execute_tasks.py
----------------------------------------------------------------------
diff --cc ambari-server/src/test/python/custom_actions/test_ru_execute_tasks.py
index 0c855fc,17d5e77..1d822eb
--- a/ambari-server/src/test/python/custom_actions/test_ru_execute_tasks.py
+++ b/ambari-server/src/test/python/custom_actions/test_ru_execute_tasks.py
@@@ -164,9 -164,9 +164,9 @@@ class TestRUExecuteTasks(RMFTestCase)
      call_mock.side_effect = fake_call   # echo the command
  
      # Ensure that the json file was actually read.
 -    stack_name = default("/hostLevelParams/stack_name", None)
 +    stack_name = default("/clusterLevelParams/stack_name", None)
      stack_version = default("/hostLevelParams/stack_version", None)
-     service_package_folder = default('/roleParams/service_package_folder', 
None)
+     service_package_folder = default('/commandParams/service_package_folder', 
None)
  
      self.assertEqual(stack_name, "HDP")
      self.assertEqual(stack_version, '2.2')

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-server/src/test/python/stacks/2.6/common/test_stack_advisor.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-web/app/assets/test/tests.js
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-web/app/controllers/global/update_controller.js
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-web/app/controllers/main/dashboard/config_history_controller.js
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-web/app/mappers/components_state_mapper.js
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --cc ambari-web/app/messages.js
index 4a352ed,b294877..fe6f3ac
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@@ -358,8 -358,8 +358,9 @@@ Em.I18n.translations = 
    'common.repositoryType': 'Repository Type',
    'common.rolling.downgrade': 'Rolling Downgrade',
    'common.express.downgrade': 'Express Downgrade',
 +  'common.minute.ago': 'less than a minute ago',
    'common.views': 'Views',
+   'common.critical.error': 'Critical',
  
    'models.alert_instance.tiggered.verbose': "Occurred on {0} <br> Checked on 
{1}",
    'models.alert_definition.triggered.verbose': "Occurred on {0}",

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-web/app/utils/ajax/ajax.js
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-web/app/views/main/dashboard/config_history_view.js
----------------------------------------------------------------------
diff --cc ambari-web/app/views/main/dashboard/config_history_view.js
index ae479dd,4376067..aa2bbb6
--- a/ambari-web/app/views/main/dashboard/config_history_view.js
+++ b/ambari-web/app/views/main/dashboard/config_history_view.js
@@@ -67,15 -68,10 +67,9 @@@ App.MainConfigHistoryView = App.TableVi
     * stop polling after leaving config history page
     */
    willDestroyElement: function () {
 -    this.set('controller.isPolling', false);
 -    clearTimeout(this.get('controller.timeoutRef'));
 +    this.get('controller').unsubscribeOfUpdates();
    },
  
-   updateFilter: function (iColumn, value, type) {
-     if (!this.get('isInitialRendering')) {
-       this._super(iColumn, value, type);
-     }
-   },
- 
    sortView: sort.serverWrapperView,
    versionSort: sort.fieldView.extend({
      column: 1,

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/ambari-web/test/views/main/dashboard/config_history_view_test.js
----------------------------------------------------------------------
diff --cc ambari-web/test/views/main/dashboard/config_history_view_test.js
index 090868e,a104e25..070c532
--- a/ambari-web/test/views/main/dashboard/config_history_view_test.js
+++ b/ambari-web/test/views/main/dashboard/config_history_view_test.js
@@@ -286,46 -135,10 +131,17 @@@ describe('App.MainConfigHistoryView', f
      });
    });
  
-   describe('#updateFilter()', function () {
-     var cases = [
-       {
-         isInitialRendering: false,
-         updateFilterCalled: true,
-         title: 'updateFilter should be called'
-       },
-       {
-         isInitialRendering: true,
-         updateFilterCalled: false,
-         title: 'updateFilter should not be called'
-       }
-     ];
-     beforeEach(function () {
-       sinon.stub(view, 'saveFilterConditions', Em.K);
-       view.set('filteringComplete', true);
-     });
-     afterEach(function () {
-       view.saveFilterConditions.restore();
-     });
-     cases.forEach(function (item) {
-       it(item.title, function () {
-         view.set('isInitialRendering', item.isInitialRendering);
-         view.updateFilter(1, 'value', 'string');
-         expect(view.get('saveFilterConditions').calledWith(1, 'value', 
'string')).to.equal(item.updateFilterCalled);
-       });
-     });
-   });
- 
    describe('#willDestroyElement()', function() {
 -    it('controller.isPolling is false', function() {
 +    beforeEach(function () {
 +      sinon.stub(view.get('controller'), 'unsubscribeOfUpdates');
 +    });
 +    afterEach(function () {
 +      view.get('controller').unsubscribeOfUpdates.restore();
 +    });
 +
 +    it('unsubscribeOfUpdates should be called', function() {
        view.willDestroyElement();
 -      expect(view.get('controller.isPolling')).to.be.false;
 +      
expect(view.get('controller').unsubscribeOfUpdates.calledOnce).to.be.true;
      });
    });
  

http://git-wip-us.apache.org/repos/asf/ambari/blob/366f6ca9/pom.xml
----------------------------------------------------------------------

Reply via email to