This is an automated email from the ASF dual-hosted git repository.

aonishuk pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/ambari.git


The following commit(s) were added to refs/heads/trunk by this push:
     new 0608999  AMBARI-24022. AutoStart Is not working for some of the 
components in the cluster  (aonishuk)
0608999 is described below

commit 06089994a73a7b07b266887788546b1153494d10
Author: Andrew Onishuk <[email protected]>
AuthorDate: Mon Jun 4 15:37:35 2018 +0300

    AMBARI-24022. AutoStart Is not working for some of the components in the 
cluster  (aonishuk)
---
 .../org/apache/ambari/server/agent/ExecutionCommand.java     |  1 +
 .../server/controller/AmbariManagementControllerImpl.java    | 12 ++++++++++++
 .../ACCUMULO/1.6.1.2.2.0/package/scripts/params.py           |  2 +-
 .../0.1.0/package/scripts/command_commons.py                 |  2 +-
 .../AMBARI_METRICS/0.1.0/package/scripts/params.py           |  2 +-
 .../AMBARI_METRICS/0.1.0/package/scripts/params_linux.py     |  2 +-
 .../ATLAS/0.1.0.2.3/package/scripts/params.py                |  2 +-
 .../common-services/DRUID/0.10.1/package/scripts/params.py   |  2 +-
 .../FALCON/0.5.0.2.1/package/scripts/params_linux.py         |  2 +-
 .../common-services/HAWQ/2.0.0/package/scripts/params.py     |  2 +-
 .../HBASE/0.96.0.2.0/package/scripts/params_linux.py         |  2 +-
 .../HDFS/2.1.0.2.0/package/scripts/params_linux.py           |  2 +-
 .../HIVE/0.12.0.2.0/package/scripts/params_linux.py          |  2 +-
 .../common-services/KAFKA/0.8.1/package/scripts/params.py    |  2 +-
 .../KNOX/0.5.0.2.2/package/scripts/params_linux.py           |  2 +-
 .../MAHOUT/1.0.0.2.3/package/scripts/params.py               |  2 +-
 .../OOZIE/4.0.0.2.0/package/scripts/params_linux.py          |  2 +-
 .../PIG/0.12.0.2.0/package/scripts/params_linux.py           |  2 +-
 .../common-services/PXF/3.0.0/package/scripts/params.py      |  2 +-
 .../RANGER_KMS/0.5.0.2.3/package/scripts/params.py           |  2 +-
 .../SLIDER/0.60.0.2.2/package/scripts/params_linux.py        |  2 +-
 .../common-services/SPARK/1.2.1/package/scripts/params.py    |  2 +-
 .../common-services/SPARK2/2.0.0/package/scripts/params.py   |  2 +-
 .../STORM/0.9.1/package/scripts/params_linux.py              |  2 +-
 .../TEZ/0.4.0.2.1/package/scripts/params_linux.py            |  2 +-
 .../YARN/2.1.0.2.0/package/scripts/params_linux.py           |  2 +-
 .../common-services/ZEPPELIN/0.6.0/package/scripts/params.py |  2 +-
 .../common-services/ZEPPELIN/0.7.0/package/scripts/params.py |  2 +-
 .../resources/stack-hooks/after-INSTALL/scripts/params.py    |  2 +-
 .../main/resources/stack-hooks/before-ANY/scripts/params.py  |  2 +-
 .../resources/stack-hooks/before-START/scripts/params.py     |  2 +-
 .../HDP/2.3.ECS/services/ECS/package/scripts/params.py       |  2 +-
 32 files changed, 43 insertions(+), 30 deletions(-)

diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
index 592c893..7d9964c 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
@@ -558,6 +558,7 @@ public class ExecutionCommand extends AgentCommand {
     String AGENT_STACK_RETRY_ON_UNAVAILABILITY = 
"agent_stack_retry_on_unavailability";
     String AGENT_STACK_RETRY_COUNT = "agent_stack_retry_count";
     String LOG_OUTPUT = "log_output";
+    String DFS_TYPE = "dfs_type";
 
     /**
      * A boolean indicating whether configuration tags should be refreshed
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
index da5523b..5a738e1 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
@@ -31,6 +31,7 @@ import static 
org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMMAND_T
 import static 
org.apache.ambari.server.agent.ExecutionCommand.KeyNames.CUSTOM_FOLDER;
 import static 
org.apache.ambari.server.agent.ExecutionCommand.KeyNames.DB_DRIVER_FILENAME;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.DB_NAME;
+import static 
org.apache.ambari.server.agent.ExecutionCommand.KeyNames.DFS_TYPE;
 import static 
org.apache.ambari.server.agent.ExecutionCommand.KeyNames.GPL_LICENSE_ACCEPTED;
 import static 
org.apache.ambari.server.agent.ExecutionCommand.KeyNames.GROUP_LIST;
 import static 
org.apache.ambari.server.agent.ExecutionCommand.KeyNames.HOOKS_FOLDER;
@@ -5766,6 +5767,17 @@ public class AmbariManagementControllerImpl implements 
AmbariManagementControlle
     String notManagedHdfsPathList = gson.toJson(notManagedHdfsPathSet);
     clusterLevelParams.put(NOT_MANAGED_HDFS_PATH_LIST, notManagedHdfsPathList);
 
+    Map<String, ServiceInfo> serviceInfos = 
ambariMetaInfo.getServices(stackId.getStackName(), stackId.getStackVersion());
+    for (ServiceInfo serviceInfoInstance : serviceInfos.values()) {
+      if (serviceInfoInstance.getServiceType() != null) {
+        LOG.debug("Adding {} to command parameters for {}", 
serviceInfoInstance.getServiceType(),
+            serviceInfoInstance.getName());
+
+        clusterLevelParams.put(DFS_TYPE, serviceInfoInstance.getServiceType());
+        break;
+      }
+    }
+
     return clusterLevelParams;
   }
 
diff --git 
a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
index 256be1f..d87d9c2 100644
--- 
a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
@@ -198,7 +198,7 @@ hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_nam
 hdfs_site = config['configurations']['hdfs-site']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
 
 # dfs.namenode.https-address
 import functools
diff --git 
a/ambari-server/src/main/resources/common-services/AMBARI_INFRA_SOLR/0.1.0/package/scripts/command_commons.py
 
b/ambari-server/src/main/resources/common-services/AMBARI_INFRA_SOLR/0.1.0/package/scripts/command_commons.py
index 91fff50..5d3e897 100644
--- 
a/ambari-server/src/main/resources/common-services/AMBARI_INFRA_SOLR/0.1.0/package/scripts/command_commons.py
+++ 
b/ambari-server/src/main/resources/common-services/AMBARI_INFRA_SOLR/0.1.0/package/scripts/command_commons.py
@@ -109,7 +109,7 @@ if solr_hdfs_path:
   hdfs_principal_name = 
default('/configurations/hadoop-env/hdfs_principal_name', None)
   hdfs_user_keytab = 
params.config['configurations']['hadoop-env']['hdfs_user_keytab']
 
-  dfs_type = default("/commandParams/dfs_type", "")
+  dfs_type = default("/clusterLevelParams/dfs_type", "")
 
   hdfs_site = params.config['configurations']['hdfs-site']
   default_fs = params.config['configurations']['core-site']['fs.defaultFS']
diff --git 
a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
index 458f45a..8aee8d4 100644
--- 
a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
@@ -398,7 +398,7 @@ min_hadoop_sink_version = 
default("/configurations/ams-env/min_ambari_metrics_ha
 
 hdfs_site = config['configurations']['hdfs-site']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
 
 import functools
 #create partial functions with common arguments for every HdfsResource call
diff --git 
a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params_linux.py
index 20604cf..e05b22c 100644
--- 
a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params_linux.py
@@ -61,7 +61,7 @@ hbase_conf_dir = "/etc/ams-hbase/conf"
 limits_conf_dir = "/etc/security/limits.d"
 sudo = AMBARI_SUDO_BINARY
 
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
 
 hbase_regionserver_shutdown_timeout = 
expect('/configurations/ams-hbase-env/hbase_regionserver_shutdown_timeout', int,
                                              30)
diff --git 
a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
index 8ce94be..12d7559 100644
--- 
a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
@@ -325,7 +325,7 @@ if stack_supports_atlas_ranger_plugin and 
enable_ranger_atlas:
   hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name'] if has_namenode 
else None
   hdfs_site = config['configurations']['hdfs-site']
   default_fs = config['configurations']['core-site']['fs.defaultFS']
-  dfs_type = default("/commandParams/dfs_type", "")
+  dfs_type = default("/clusterLevelParams/dfs_type", "")
 
   import functools
   from resource_management.libraries.resources.hdfs_resource import 
HdfsResource
diff --git 
a/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/params.py
index 5fe262a..9958123 100644
--- 
a/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/params.py
@@ -130,7 +130,7 @@ hdfs_principal_name = 
default('/configurations/hadoop-env/hdfs_principal_name',
                                                                                
                              hostname)
 hdfs_site = config['configurations']['hdfs-site']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
 hdfs_tmp_dir = config['configurations']['hadoop-env']['hdfs_tmp_dir']
 
 # Kerberos
diff --git 
a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
index 858e6ab..ce67ebf 100644
--- 
a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
@@ -180,7 +180,7 @@ if falcon_atlas_support:
 hdfs_site = config['configurations']['hdfs-site']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
 
 bdb_jar_name = "je-5.0.73.jar"
 bdb_resource_name = format("{jdk_location}/{bdb_jar_name}")
diff --git 
a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
index fb33e68..5e6b314 100644
--- 
a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
@@ -74,7 +74,7 @@ if dfs_nameservice is None:
  dfs_nameservice = default('/configurations/hdfs-site/dfs.nameservices', None)
 
 hawq_global_rm_type = default('/configurations/hawq-site/hawq_global_rm_type', 
None)
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
 
 # HDFSResource partial function
 HdfsResource = functools.partial(HdfsResource,
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
index f60cb5b..0552275 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
@@ -261,7 +261,7 @@ hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_nam
 hdfs_site = config['configurations']['hdfs-site']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
 
 import functools
 #create partial functions with common arguments for every HdfsResource call
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
index 16bfc8e..1c1987f 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
@@ -379,7 +379,7 @@ else:
 hdfs_site = config['configurations']['hdfs-site']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
 
 import functools
 #create partial functions with common arguments for every HdfsResource call
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
index b13fa06..5ce2d0c 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -621,7 +621,7 @@ security_param = "true" if security_enabled else "false"
 hdfs_site = config['configurations']['hdfs-site']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
 
 import functools
 #create partial functions with common arguments for every HdfsResource call
diff --git 
a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
index 722fe7c..17c678a 100644
--- 
a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
@@ -335,7 +335,7 @@ default_fs = 
config['configurations']['core-site']['fs.defaultFS'] if has_nameno
 hadoop_bin_dir = stack_select.get_hadoop_dir("bin") if has_namenode else None
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir() if has_namenode else None
 kinit_path_local = 
get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', 
None))
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
 
 import functools
 #create partial functions with common arguments for every HdfsResource call
diff --git 
a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
index 21ff2c6..ee3e8c3 100644
--- 
a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
@@ -513,7 +513,7 @@ hdfs_site = config['configurations']['hdfs-site'] if 
has_namenode else None
 default_fs = config['configurations']['core-site']['fs.defaultFS'] if 
has_namenode else None
 hadoop_bin_dir = stack_select.get_hadoop_dir("bin") if has_namenode else None
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir() if has_namenode else None
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
 
 import functools
 #create partial functions with common arguments for every HdfsResource call
diff --git 
a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
index 29ca4ba..bf85ba3 100644
--- 
a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
@@ -75,7 +75,7 @@ log4j_props = 
config['configurations']['mahout-log4j']['content']
 hdfs_site = config['configurations']['hdfs-site']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
 
 import functools
 #create partial functions with common arguments for every HdfsResource call
diff --git 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
index 1f7df7f..3b002cf 100644
--- 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
@@ -351,7 +351,7 @@ hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_nam
 hdfs_site = config['configurations']['hdfs-site']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
 
 
 ########################################################
diff --git 
a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
index c99c606..80fe145 100644
--- 
a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
@@ -81,7 +81,7 @@ log4j_props = config['configurations']['pig-log4j']['content']
 hdfs_site = config['configurations']['hdfs-site']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
 
 import functools
 #create partial functions with common arguments for every HdfsResource call
diff --git 
a/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py
index e451aa1..6bb73b7 100644
--- 
a/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py
@@ -78,7 +78,7 @@ hdfs_user_keytab = 
default('configurations/hadoop-env/hdfs_user_keytab', None)
 hdfs_principal_name = default('configurations/hadoop-env/hdfs_principal_name', 
None)
 hbase_user_keytab = default('configurations/hbase-env/hbase_user_keytab', None)
 hbase_principal_name = 
default('configurations/hbase-env/hbase_principal_name', None)
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
 
 # HDFSResource partial function
 HdfsResource = functools.partial(HdfsResource,
diff --git 
a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
index eac71dd..a9ddb5d 100755
--- 
a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
@@ -314,7 +314,7 @@ default_fs = 
default("/configurations/core-site/fs.defaultFS", None)
 hdfs_site = config['configurations']['hdfs-site'] if has_namenode else None
 hadoop_bin_dir = stack_select.get_hadoop_dir("bin") if has_namenode else None
 kinit_path_local = 
get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', 
None))
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
 
 import functools
 # create partial functions with common arguments for every HdfsResource call
diff --git 
a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
index 49839bb..35ff7a2 100644
--- 
a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
@@ -61,7 +61,7 @@ hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hdfs_site = config['configurations']['hdfs-site']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
 
 import functools
 #create partial functions with common arguments for every HdfsResource call
diff --git 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
index fc97f03..24bee8d 100644
--- 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
@@ -210,7 +210,7 @@ hdfs_resource_ignore_file = 
"/var/lib/ambari-agent/data/.hdfs_resource_ignore"
 ats_host = set(default("/clusterHostInfo/app_timeline_server_hosts", []))
 has_ats = len(ats_host) > 0
 
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
 
 # livy related config
 
diff --git 
a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py
index 7c929fb..2f38a4b 100755
--- 
a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py
@@ -195,7 +195,7 @@ hdfs_resource_ignore_file = 
"/var/lib/ambari-agent/data/.hdfs_resource_ignore"
 ats_host = set(default("/clusterHostInfo/app_timeline_server_hosts", []))
 has_ats = len(ats_host) > 0
 
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
 
 # livy related config
 
diff --git 
a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params_linux.py
index 260e74d..71ab037 100644
--- 
a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params_linux.py
@@ -409,7 +409,7 @@ hadoop_bin_dir = stack_select.get_hadoop_dir("bin") if 
has_namenode else None
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir() if has_namenode else None
 kinit_path_local = 
get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', 
None))
 
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
 
 import functools
 #create partial functions with common arguments for every HdfsResource call
diff --git 
a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
index 76e6eeb..dd70a9f 100644
--- 
a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
@@ -93,7 +93,7 @@ tez_env_sh_template = 
config['configurations']['tez-env']['content']
 hdfs_site = config['configurations']['hdfs-site']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
 
 import functools
 #create partial functions with common arguments for every HdfsResource call
diff --git 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
index 6be8149..3fbb1e4 100644
--- 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
@@ -409,7 +409,7 @@ is_webhdfs_enabled = hdfs_site['dfs.webhdfs.enabled']
 # Path to file that contains list of HDFS resources to be skipped during 
processing
 hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore"
 
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
 
 
 import functools
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/package/scripts/params.py
index ef1ba32..e0f878c 100644
--- 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/package/scripts/params.py
@@ -239,7 +239,7 @@ hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
 hdfs_site = config['configurations']['hdfs-site']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
 
 # create partial functions with common arguments for every HdfsResource call
 # to create hdfs directory we need to call params.HdfsResource in code
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py
index a960c09..529a1f6 100644
--- 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py
@@ -253,7 +253,7 @@ hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
 hdfs_site = config['configurations']['hdfs-site']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
 
 # create partial functions with common arguments for every HdfsResource call
 # to create hdfs directory we need to call params.HdfsResource in code
diff --git 
a/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/params.py 
b/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/params.py
index 6fc8f1e..a30949d 100644
--- 
a/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/params.py
@@ -33,7 +33,7 @@ from string import lower
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
 
 is_parallel_execution_enabled = 
int(default("/agentConfigParams/agent/parallel_execution", 0)) == 1
 host_sys_prepped = default("/ambariLevelParams/host_sys_prepped", False)
diff --git 
a/ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/params.py 
b/ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/params.py
index 6f6cd39..36aae1d 100644
--- a/ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/params.py
+++ b/ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/params.py
@@ -48,7 +48,7 @@ stack_root = Script.get_stack_root()
 
 architecture = get_architecture()
 
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
 
 artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
 jdk_name = default("/ambariLevelParams/jdk_name", None)
diff --git 
a/ambari-server/src/main/resources/stack-hooks/before-START/scripts/params.py 
b/ambari-server/src/main/resources/stack-hooks/before-START/scripts/params.py
index 04a5604..e3fa476 100644
--- 
a/ambari-server/src/main/resources/stack-hooks/before-START/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stack-hooks/before-START/scripts/params.py
@@ -58,7 +58,7 @@ stack_version_unformatted = 
config['clusterLevelParams']['stack_version']
 stack_version_formatted = format_stack_version(stack_version_unformatted)
 major_stack_version = get_major_version(stack_version_formatted)
 
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
 hadoop_conf_dir = "/etc/hadoop/conf"
 component_list = default("/localComponents", [])
 
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.3.ECS/services/ECS/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.3.ECS/services/ECS/package/scripts/params.py
index 6fbec4a..13ce4d4 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.3.ECS/services/ECS/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.3.ECS/services/ECS/package/scripts/params.py
@@ -63,7 +63,7 @@ hdfs_tmp_dir = 
config['configurations']['hadoop-env']['hdfs_tmp_dir']
 hdfs_principal_name = 
default('/configurations/hadoop-env/hdfs_principal_name', None)
 hdfs_site = config['configurations']['hdfs-site']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
 
 ambari_libs_dir = "/var/lib/ambari-agent/lib"
 

-- 
To stop receiving notification emails like this one, please contact
[email protected].

Reply via email to