Merge branch 'branch-2.6' into branch-feature-AMBARI-21450

Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/00712125
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/00712125
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/00712125

Branch: refs/heads/branch-2.6
Commit: 00712125d7ac3f188a4eb3174b9955551e202b9b
Parents: abd6459 7efb9de
Author: Jonathan Hurley <jhur...@hortonworks.com>
Authored: Fri Aug 4 10:40:05 2017 -0400
Committer: Jonathan Hurley <jhur...@hortonworks.com>
Committed: Fri Aug 4 10:40:05 2017 -0400

----------------------------------------------------------------------
 .../libraries/functions/version.py              |   6 +
 .../ambari-metrics/datasource.js                |  35 +-
 .../MetricCollectorHAController.java            |  42 +-
 .../checks/DatabaseConsistencyCheckHelper.java  | 115 ++++
 .../AmbariManagementControllerImpl.java         |   2 +-
 .../ClusterStackVersionResourceProvider.java    |  50 +-
 .../controller/utilities/DatabaseChecker.java   | 296 ----------
 .../state/configgroup/ConfigGroupImpl.java      |   6 +-
 .../HDF/grafana-nifi-hosts.json                 |  17 +-
 .../2.1.0.2.0/package/scripts/params_linux.py   |   2 +-
 .../2.1.0.2.0/package/scripts/params_windows.py |   2 +-
 .../HDFS/2.1.0.2.0/package/scripts/utils.py     |  16 +
 .../0.4.0/package/scripts/setup_ranger_xml.py   |   6 +
 .../src/main/resources/scripts/stack_advisor.py |   7 +
 .../0.8/services/HDFS/package/scripts/params.py |   2 +-
 .../4.0/properties/stack_features.json          |  31 +-
 .../package/alerts/alert_checkpoint_time.py     | 255 +++++++++
 .../alerts/alert_datanode_unmounted_data_dir.py | 177 ++++++
 .../package/alerts/alert_ha_namenode_health.py  | 243 ++++++++
 .../package/alerts/alert_metrics_deviation.py   | 470 ++++++++++++++++
 .../package/alerts/alert_upgrade_finalized.py   | 179 ++++++
 .../services/HDFS/package/files/checkWebUI.py   |  86 +++
 .../services/HDFS/package/scripts/__init__.py   |  20 +
 .../scripts/balancer-emulator/hdfs-command.py   |  45 ++
 .../services/HDFS/package/scripts/datanode.py   | 174 ++++++
 .../HDFS/package/scripts/datanode_upgrade.py    | 156 +++++
 .../4.2.5/services/HDFS/package/scripts/hdfs.py | 173 ++++++
 .../HDFS/package/scripts/hdfs_client.py         | 123 ++++
 .../HDFS/package/scripts/hdfs_datanode.py       |  84 +++
 .../HDFS/package/scripts/hdfs_namenode.py       | 562 +++++++++++++++++++
 .../HDFS/package/scripts/hdfs_nfsgateway.py     |  75 +++
 .../HDFS/package/scripts/hdfs_rebalance.py      | 130 +++++
 .../HDFS/package/scripts/hdfs_snamenode.py      |  64 +++
 .../HDFS/package/scripts/install_params.py      |  38 ++
 .../HDFS/package/scripts/journalnode.py         | 198 +++++++
 .../HDFS/package/scripts/journalnode_upgrade.py | 152 +++++
 .../services/HDFS/package/scripts/namenode.py   | 420 ++++++++++++++
 .../HDFS/package/scripts/namenode_ha_state.py   | 219 ++++++++
 .../HDFS/package/scripts/namenode_upgrade.py    | 322 +++++++++++
 .../services/HDFS/package/scripts/nfsgateway.py | 147 +++++
 .../services/HDFS/package/scripts/params.py     |  30 +
 .../HDFS/package/scripts/params_linux.py        | 527 +++++++++++++++++
 .../HDFS/package/scripts/params_windows.py      |  76 +++
 .../HDFS/package/scripts/service_check.py       | 153 +++++
 .../HDFS/package/scripts/setup_ranger_hdfs.py   | 121 ++++
 .../services/HDFS/package/scripts/snamenode.py  | 152 +++++
 .../HDFS/package/scripts/status_params.py       |  58 ++
 .../services/HDFS/package/scripts/utils.py      | 385 +++++++++++++
 .../services/HDFS/package/scripts/zkfc_slave.py | 200 +++++++
 .../package/templates/exclude_hosts_list.j2     |  21 +
 .../HDFS/package/templates/hdfs.conf.j2         |  35 ++
 .../services/HDFS/package/templates/slaves.j2   |  21 +
 .../4.2.5/upgrades/config-upgrade.xml           |  59 ++
 .../upgrades/nonrolling-upgrade-to-hdp-2.6.xml  |  21 +
 .../BigInsights/4.2/upgrades/config-upgrade.xml |  71 ++-
 .../upgrades/nonrolling-upgrade-to-hdp-2.6.xml  |  29 +-
 .../DatabaseConsistencyCheckHelperTest.java     |  77 +++
 .../AmbariManagementControllerTest.java         |   2 +-
 ...ClusterStackVersionResourceProviderTest.java |  11 +-
 .../utilities/DatabaseCheckerTest.java          | 105 ----
 .../app/controllers/wizard/step1_controller.js  |   2 +
 .../app/mixins/common/configs/configs_saver.js  |  12 +-
 .../mixins/common/configs/enhanced_configs.js   |  12 +-
 ambari-web/app/routes/installer.js              |   4 +-
 ambari-web/app/templates/wizard/step1.hbs       | 342 +++++------
 .../app/views/main/alert_definitions_view.js    |   5 +-
 ambari-web/app/views/wizard/step1_view.js       |   2 +-
 .../test/controllers/wizard/step1_test.js       |   2 +
 .../mixins/common/configs/configs_saver_test.js |  19 +
 .../common/configs/enhanced_configs_test.js     |  10 +
 .../views/main/alert_definitions_view_test.js   |  68 +++
 ambari-web/test/views/wizard/step1_view_test.js |   2 +-
 72 files changed, 7156 insertions(+), 625 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/00712125/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/00712125/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/00712125/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java
----------------------------------------------------------------------
diff --cc 
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java
index f07ecad,b489d7f..ed509b7
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java
@@@ -706,4 -992,144 +707,47 @@@ public class ClusterStackVersionResourc
    }
  
  
 -  /**
 -   * Ensures that the stack tools and stack features are set on
 -   * {@link ConfigHelper#CLUSTER_ENV} for the stack of the repository being
 -   * distributed. This step ensures that the new repository can be distributed
 -   * with the correct tools.
 -   * <p/>
 -   * If the cluster's current stack name matches that of the new stack or the
 -   * new stack's tools are already added in the configuration, then this 
method
 -   * will not change anything.
 -   *
 -   * @param stackId
 -   *          the stack of the repository being distributed (not {@code 
null}).
 -   * @param cluster
 -   *          the cluster the new stack/repo is being distributed for (not
 -   *          {@code null}).
 -   * @throws AmbariException
 -   */
 -  private void bootstrapStackTools(StackId stackId, Cluster cluster) throws 
AmbariException {
 -    // if the stack name is the same as the cluster's current stack name, then
 -    // there's no work to do
 -    if (StringUtils.equals(stackId.getStackName(),
 -        cluster.getCurrentStackVersion().getStackName())) {
 -      return;
 -    }
 -
 -    ConfigHelper configHelper = configHelperProvider.get();
 -
 -    // get the stack tools/features for the stack being distributed
 -    Map<String, Map<String, String>> defaultStackConfigurationsByType = 
configHelper.getDefaultProperties(
 -        stackId, cluster);
 -
 -    Map<String, String> clusterEnvDefaults = 
defaultStackConfigurationsByType.get(
 -        ConfigHelper.CLUSTER_ENV);
 -
 -    Config clusterEnv = 
cluster.getDesiredConfigByType(ConfigHelper.CLUSTER_ENV);
 -    Map<String, String> clusterEnvProperties = clusterEnv.getProperties();
 -
 -    // the 3 properties we need to check and update
 -    Set<String> properties = 
Sets.newHashSet(ConfigHelper.CLUSTER_ENV_STACK_ROOT_PROPERTY,
 -        ConfigHelper.CLUSTER_ENV_STACK_TOOLS_PROPERTY,
 -        ConfigHelper.CLUSTER_ENV_STACK_FEATURES_PROPERTY);
 -
 -    // any updates are stored here and merged into the existing config type
 -    Map<String, String> updatedProperties = new HashMap<>();
 -
 -    for (String property : properties) {
 -      // determine if the property exists in the stack being distributed (it
 -      // kind of has to, but we'll be safe if it's not found)
 -      String newStackDefaultJson = clusterEnvDefaults.get(property);
 -      if (StringUtils.isBlank(newStackDefaultJson)) {
 -        continue;
 -      }
 -
 -      String existingPropertyJson = clusterEnvProperties.get(property);
 -
 -      // if the stack tools/features property doesn't exist, then just set the
 -      // one from the new stack
 -      if (StringUtils.isBlank(existingPropertyJson)) {
 -        updatedProperties.put(property, newStackDefaultJson);
 -        continue;
 -      }
 -
 -      // now is the hard part - we need to check to see if the new stack tools
 -      // exists alongside the current tools and if it doesn't, then add the 
new
 -      // tools in
 -      final Map<String, Object> existingJson;
 -      final Map<String, ?> newStackJsonAsObject;
 -      if (StringUtils.equals(property, 
ConfigHelper.CLUSTER_ENV_STACK_ROOT_PROPERTY)) {
 -        existingJson = gson.<Map<String, Object>> 
fromJson(existingPropertyJson, Map.class);
 -        newStackJsonAsObject = gson.<Map<String, String>> 
fromJson(newStackDefaultJson, Map.class);
 -      } else {
 -        existingJson = gson.<Map<String, Object>> 
fromJson(existingPropertyJson,
 -            Map.class);
 -
 -        newStackJsonAsObject = gson.<Map<String, Map<Object, Object>>> 
fromJson(newStackDefaultJson,
 -            Map.class);
 -      }
 -
 -      if (existingJson.keySet().contains(stackId.getStackName())) {
 -        continue;
 -      }
 -
 -      existingJson.put(stackId.getStackName(), 
newStackJsonAsObject.get(stackId.getStackName()));
 -
 -      String newJson = gson.toJson(existingJson);
 -      updatedProperties.put(property, newJson);
 -    }
 -
 -    if (!updatedProperties.isEmpty()) {
 -      AmbariManagementController amc = getManagementController();
 -      String serviceNote = String.format(
 -          "Adding stack tools for %s while distributing a new repository", 
stackId.toString());
 -
 -      configHelper.updateConfigType(cluster, amc, clusterEnv.getType(), 
updatedProperties, null,
 -          amc.getAuthName(), serviceNote);
 -    }
 -  }
+ 
+   /**
+    * Check one host is enough to tell the arch
+    * because all hosts should have the same arch.
+    * @param hosts List<Host>
+    * @return osFamily, null if hosts is empty or is X86_64
+ 
+    */
+   private String getPowerPCOsFamily(List<Host> hosts) {
+     if (hosts.isEmpty()){
+       return null;
+     } else {
+       Host host = hosts.get(0);
+       String osFamily = host.getHostAttributes().get("os_family");
+       if (null != osFamily && osFamily.endsWith("-ppc")){
+         return osFamily;
+       } else {
+         return null;
+       }
+     }
+   }
+ 
+   /**
+    * Use os type with -ppc post fix for powerpc
+    * in order to have it consistent with the os information
+    * stored in the Hosts table
+    * No need to apply the change if os is x86_64
+    * */
+   private String getOsTypeForRepo(OperatingSystemEntity operatingSystem, 
String osFamily) {
+     if (null != osFamily){
+       String osType = operatingSystem.getOsType();
+       int pos = osFamily.indexOf("-ppc");
+       if (pos > 0){
+         String os = osType.substring(0, pos);
+         String majorVersion = osType.substring(os.length());
+         return String.format("%s-ppc%s", os, majorVersion);
+       } else {
+         return operatingSystem.getOsType();
+       }
+     } else {
+       return operatingSystem.getOsType();
+     }
+   }
  }

http://git-wip-us.apache.org/repos/asf/ambari/blob/00712125/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/upgrades/nonrolling-upgrade-to-hdp-2.6.xml
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/00712125/ambari-server/src/main/resources/stacks/BigInsights/4.2/upgrades/nonrolling-upgrade-to-hdp-2.6.xml
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/00712125/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/00712125/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProviderTest.java
----------------------------------------------------------------------
diff --cc 
ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProviderTest.java
index 32a5358,68596e0..41c83e4
--- 
a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProviderTest.java
+++ 
b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProviderTest.java
@@@ -226,13 -233,9 +226,13 @@@ public class ClusterStackVersionResourc
            MaintenanceState.OFF).anyTimes();
        expect(host.getAllHostVersions()).andReturn(
            Collections.<HostVersionEntity>emptyList()).anyTimes();
- 
+       expect(host.getHostAttributes()).andReturn(new HashMap<String, 
String>()).anyTimes();
        replay(host);
        hostsForCluster.put(hostname, host);
 +
 +      if (!StringUtils.equals(hostWithoutVersionableComponents, hostname)) {
 +        hostsNeedingInstallCommands.add(host);
 +      }
      }
  
      final ServiceComponentHost schDatanode = 
createMock(ServiceComponentHost.class);
@@@ -641,14 -657,9 +641,14 @@@
            MaintenanceState.OFF).anyTimes();
        expect(host.getAllHostVersions()).andReturn(
            Collections.<HostVersionEntity>emptyList()).anyTimes();
- 
+       expect(host.getHostAttributes()).andReturn(new HashMap<String, 
String>()).anyTimes();
        replay(host);
        hostsForCluster.put(hostname, host);
 +
 +
 +      if (!StringUtils.equals(hostWithoutVersionableComponents, hostname)) {
 +        hostsNeedingInstallCommands.add(host);
 +      }
      }
  
      Service hdfsService = createNiceMock(Service.class);
@@@ -882,13 -888,9 +882,13 @@@
            MaintenanceState.OFF).anyTimes();
        expect(host.getAllHostVersions()).andReturn(
            Collections.<HostVersionEntity>emptyList()).anyTimes();
- 
+       expect(host.getHostAttributes()).andReturn(new HashMap<String, 
String>()).anyTimes();
        replay(host);
        hostsForCluster.put(hostname, host);
 +
 +      if (!StringUtils.equals(hostWithoutVersionableComponents, hostname)) {
 +        hostsNeedingInstallCommands.add(host);
 +      }
      }
  
      Service hdfsService = createNiceMock(Service.class);
@@@ -1130,9 -1433,10 +1130,9 @@@
        expect(host.getOsFamily()).andReturn("redhat6").anyTimes();
        expect(host.getMaintenanceState(EasyMock.anyLong())).andReturn(
            MaintenanceState.OFF).anyTimes();
 -      expect(host.getAllHostVersions()).andReturn(
 -          Collections.<HostVersionEntity>emptyList()).anyTimes();
 +      expect(host.getAllHostVersions()).andReturn(hostVersions).anyTimes();
- 
+       expect(host.getHostAttributes()).andReturn(new HashMap<String, 
String>()).anyTimes();
 -      replay(host);
 +      replay(host, hostVersion);
        hostsForCluster.put(hostname, host);
      }
  

Reply via email to