Repository: ambari
Updated Branches:
  refs/heads/trunk 33a573cdb -> ed28ff483


AMBARI-18438. Add granular flags for sysprepped clusters to copy tarballs, 
Oozie share lib, fast jar, and create users (alejandro)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ed28ff48
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ed28ff48
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ed28ff48

Branch: refs/heads/trunk
Commit: ed28ff483f8afb994932a62ea177f06b31fae36f
Parents: 33a573c
Author: Alejandro Fernandez <afernan...@hortonworks.com>
Authored: Wed Sep 21 13:57:35 2016 -0700
Committer: Alejandro Fernandez <afernan...@hortonworks.com>
Committed: Thu Sep 22 15:54:38 2016 -0700

----------------------------------------------------------------------
 .../libraries/functions/copy_tarball.py         | 20 ++++++---
 .../libraries/functions/setup_atlas_hook.py     |  1 +
 .../1.6.1.2.2.0/package/scripts/params.py       |  2 -
 .../0.1.0/package/scripts/params.py             |  1 -
 .../FLUME/1.4.0.2.0/package/scripts/params.py   |  1 -
 .../HBASE/0.96.0.2.0/package/scripts/params.py  |  1 -
 .../HDFS/2.1.0.2.0/package/scripts/params.py    |  1 -
 .../HIVE/0.12.0.2.0/package/scripts/hive.py     | 10 ++---
 .../package/scripts/hive_interactive.py         |  2 +-
 .../0.12.0.2.0/package/scripts/hive_server.py   |  4 +-
 .../package/scripts/hive_server_interactive.py  |  4 +-
 .../HIVE/0.12.0.2.0/package/scripts/params.py   |  3 +-
 .../KAFKA/0.8.1/package/scripts/params.py       |  1 -
 .../1.10.3-10/package/scripts/params.py         |  1 -
 .../KNOX/0.5.0.2.2/package/scripts/params.py    |  1 -
 .../MAHOUT/1.0.0.2.3/package/scripts/params.py  |  1 -
 .../4.0.0.2.0/package/scripts/oozie_service.py  |  9 ++--
 .../OOZIE/4.0.0.2.0/package/scripts/params.py   |  9 +++-
 .../PIG/0.12.0.2.0/package/scripts/params.py    |  3 +-
 .../0.12.0.2.0/package/scripts/service_check.py |  2 +-
 .../RANGER/0.4.0/package/scripts/params.py      |  1 -
 .../SLIDER/0.60.0.2.2/package/scripts/params.py |  4 +-
 .../0.60.0.2.2/package/scripts/params_linux.py  |  2 -
 .../0.60.0.2.2/package/scripts/service_check.py |  4 +-
 .../1.2.1/package/scripts/job_history_server.py |  2 +-
 .../SPARK/1.2.1/package/scripts/params.py       |  4 +-
 .../1.2.1/package/scripts/spark_service.py      |  4 +-
 .../2.0.0/package/scripts/job_history_server.py |  2 +-
 .../SPARK2/2.0.0/package/scripts/params.py      |  4 +-
 .../2.0.0/package/scripts/spark_service.py      |  4 +-
 .../SQOOP/1.4.4.2.0/package/scripts/params.py   |  1 -
 .../STORM/0.9.1/package/scripts/params.py       |  1 -
 .../TEZ/0.4.0.2.1/package/scripts/params.py     |  4 +-
 .../0.4.0.2.1/package/scripts/pre_upgrade.py    |  2 +-
 .../0.4.0.2.1/package/scripts/service_check.py  |  2 +-
 .../2.1.0.2.0/package/scripts/historyserver.py  | 12 +++---
 .../YARN/2.1.0.2.0/package/scripts/params.py    |  3 +-
 .../HDP/2.0.6/configuration/cluster-env.xml     | 45 ++++++++++++++++++++
 .../2.0.6/hooks/before-ANY/scripts/params.py    |  1 +
 .../before-ANY/scripts/shared_initialization.py | 20 +++++----
 .../2.0.6/hooks/before-START/scripts/params.py  |  4 +-
 .../scripts/shared_initialization.py            |  4 +-
 .../stacks/2.0.6/HIVE/test_hive_server.py       |  8 ++--
 .../stacks/2.0.6/YARN/test_historyserver.py     |  4 +-
 .../stacks/2.2/PIG/test_pig_service_check.py    |  2 +-
 45 files changed, 141 insertions(+), 80 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
 
b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
index 2626990..0355685 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
@@ -18,7 +18,7 @@ limitations under the License.
 
 """
 
-__all__ = ["copy_to_hdfs", ]
+__all__ = ["copy_to_hdfs", "get_sysprep_skip_copy_tarballs_hdfs"]
 
 import os
 import uuid
@@ -65,6 +65,16 @@ TARBALL_MAP = {
 }
 
 
+def get_sysprep_skip_copy_tarballs_hdfs():
+  import params
+  host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
+
+  # By default, copy the tarballs to HDFS. If the cluster is sysprepped, then 
set based on the config.
+  sysprep_skip_copy_tarballs_hdfs = False
+  if host_sys_prepped:
+    sysprep_skip_copy_tarballs_hdfs = 
default("/cluster-env/sysprep_skip_copy_tarballs_hdfs", False)
+  return sysprep_skip_copy_tarballs_hdfs
+
 def get_tarball_paths(name, use_upgrading_version_during_upgrade=True, 
custom_source_file=None, custom_dest_file=None):
   """
   For a given tarball name, get the source and destination paths to use.
@@ -189,7 +199,7 @@ def _get_single_version_from_stack_select():
 
 
 def copy_to_hdfs(name, user_group, owner, file_mode=0444, 
custom_source_file=None, custom_dest_file=None, force_execute=False,
-                 use_upgrading_version_during_upgrade=True, 
replace_existing_files=False, host_sys_prepped=False):
+                 use_upgrading_version_during_upgrade=True, 
replace_existing_files=False, skip=False):
   """
   :param name: Tarball name, e.g., tez, hive, pig, sqoop.
   :param user_group: Group to own the directory.
@@ -199,7 +209,7 @@ def copy_to_hdfs(name, user_group, owner, file_mode=0444, 
custom_source_file=Non
   :param custom_dest_file: Override the destination file path
   :param force_execute: If true, will execute the HDFS commands immediately, 
otherwise, will defer to the calling function.
   :param use_upgrading_version_during_upgrade: If true, will use the version 
going to during upgrade. Otherwise, use the CURRENT (source) version.
-  :param host_sys_prepped: If true, tarballs will not be copied as the cluster 
deployment uses prepped VMs.
+  :param skip: If true, tarballs will not be copied as the cluster deployment 
uses prepped VMs.
   :return: Will return True if successful, otherwise, False.
   """
   import params
@@ -212,8 +222,8 @@ def copy_to_hdfs(name, user_group, owner, file_mode=0444, 
custom_source_file=Non
     Logger.error("Could not copy tarball {0} due to a missing or incorrect 
parameter.".format(str(name)))
     return False
 
-  if host_sys_prepped:
-    Logger.warning("Skipping copying {0} to {1} for {2} as its a sys_prepped 
host.".format(str(source_file), str(dest_file), str(name)))
+  if skip:
+    Logger.warning("Skipping copying {0} to {1} for {2} as it is a sys prepped 
host.".format(str(source_file), str(dest_file), str(name)))
     return True
 
   Logger.info("Source file: {0} , Dest file in HDFS: {1}".format(source_file, 
dest_file))

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-common/src/main/python/resource_management/libraries/functions/setup_atlas_hook.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/functions/setup_atlas_hook.py
 
b/ambari-common/src/main/python/resource_management/libraries/functions/setup_atlas_hook.py
index 591c064..a1d2f95 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/functions/setup_atlas_hook.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/functions/setup_atlas_hook.py
@@ -177,5 +177,6 @@ def setup_atlas_jar_symlinks(hook_name, jar_source_dir):
 def install_atlas_hook_packages(atlas_plugin_package, 
atlas_ubuntu_plugin_package, host_sys_prepped,
                                 agent_stack_retry_on_unavailability, 
agent_stack_retry_count):
   if not host_sys_prepped:
+    # This will install packages like atlas-metadata-${service}-plugin needed 
for Falcon and Hive.
     Package(atlas_ubuntu_plugin_package if OSCheck.is_ubuntu_family() else 
atlas_plugin_package,
             retry_on_repo_unavailability=agent_stack_retry_on_unavailability, 
retry_count=agent_stack_retry_count)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
index 4c111f1..a8a7327 100644
--- 
a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
@@ -172,8 +172,6 @@ if security_enabled:
 else:
   kinit_cmd = ""
 
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
-
 #for create_hdfs_directory
 hostname = status_params.hostname
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
index 6934924..f04f5c2 100644
--- 
a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
@@ -190,7 +190,6 @@ java64_home = config['hostLevelParams']['java_home']
 java_version = expect("/hostLevelParams/java_version", int)
 
 metrics_collector_heapsize = 
default('/configurations/ams-env/metrics_collector_heapsize', "512")
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
 metrics_report_interval = 
default("/configurations/ams-site/timeline.metrics.sink.report.interval", 60)
 metrics_collection_period = 
default("/configurations/ams-site/timeline.metrics.sink.collection.period", 10)
 skip_disk_metrics_patterns = 
default("/configurations/ams-env/timeline.metrics.skip.disk.metrics.patterns", 
None)

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py
index d3a9294..9c79909 100644
--- 
a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py
@@ -33,7 +33,6 @@ config = Script.get_config()
 stack_root = Script.get_stack_root()
 
 stack_name = default("/hostLevelParams/stack_name", None)
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
 
 # New Cluster Stack Version that is defined during the RESTART of a Stack 
Upgrade
 version = default("/commandParams/version", None)

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params.py
index f3208ce..e0607f3 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params.py
@@ -25,5 +25,4 @@ if OSCheck.is_windows_family():
 else:
   from params_linux import *
 
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
 retryAble = default("/commandParams/command_retry_enabled", False)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params.py
index 7514918..25231f9 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params.py
@@ -24,6 +24,5 @@ if OSCheck.is_windows_family():
 else:
   from params_linux import *
 
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
 nfsgateway_heapsize = 
config['configurations']['hadoop-env']['nfsgateway_heapsize']
 retryAble = default("/commandParams/command_retry_enabled", False)

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
index 3249dd4..ec64200 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
@@ -137,8 +137,8 @@ def hive(name=None):
     # *********************************
     #  if copy tarball to HDFS feature  supported copy mapreduce.tar.gz and 
tez.tar.gz to HDFS
     if params.stack_version_formatted_major and 
check_stack_feature(StackFeature.COPY_TARBALL_TO_HDFS, 
params.stack_version_formatted_major):
-      copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user, 
host_sys_prepped=params.host_sys_prepped)
-      copy_to_hdfs("tez", params.user_group, params.hdfs_user, 
host_sys_prepped=params.host_sys_prepped)
+      copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user, 
skip=params.sysprep_skip_copy_tarballs_hdfs)
+      copy_to_hdfs("tez", params.user_group, params.hdfs_user, 
skip=params.sysprep_skip_copy_tarballs_hdfs)
 
     # Always copy pig.tar.gz and hive.tar.gz using the appropriate mode.
     # This can use a different source and dest location to account
@@ -148,14 +148,14 @@ def hive(name=None):
                  file_mode=params.tarballs_mode,
                  custom_source_file=params.pig_tar_source,
                  custom_dest_file=params.pig_tar_dest_file,
-                 host_sys_prepped=params.host_sys_prepped)
+                 skip=params.sysprep_skip_copy_tarballs_hdfs)
     copy_to_hdfs("hive",
                  params.user_group,
                  params.hdfs_user,
                  file_mode=params.tarballs_mode,
                  custom_source_file=params.hive_tar_source,
                  custom_dest_file=params.hive_tar_dest_file,
-                 host_sys_prepped=params.host_sys_prepped)
+                 skip=params.sysprep_skip_copy_tarballs_hdfs)
 
     wildcard_tarballs = ["sqoop", "hadoop_streaming"]
     for tarball_name in wildcard_tarballs:
@@ -176,7 +176,7 @@ def hive(name=None):
                      file_mode=params.tarballs_mode,
                      custom_source_file=source_file,
                      custom_dest_file=dest_file,
-                     host_sys_prepped=params.host_sys_prepped)
+                     skip=params.sysprep_skip_copy_tarballs_hdfs)
     # ******* End Copy Tarballs *******
     # *********************************
     

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
index 6511d0e..888b920 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
@@ -82,7 +82,7 @@ def hive_interactive(name=None):
                  params.user_group,
                  params.hdfs_user,
                  file_mode=params.tarballs_mode,
-                 host_sys_prepped=params.host_sys_prepped)
+                 skip=params.sysprep_skip_copy_tarballs_hdfs)
 
     if resource_created:
       params.HdfsResource(None, action="execute")

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
index 7c886b2..614b2a9 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
@@ -126,13 +126,13 @@ class HiveServerDefault(HiveServer):
         "mapreduce",
         params.user_group,
         params.hdfs_user,
-        host_sys_prepped=params.host_sys_prepped)
+        skip=params.sysprep_skip_copy_tarballs_hdfs)
 
       resource_created = copy_to_hdfs(
         "tez",
         params.user_group,
         params.hdfs_user,
-        host_sys_prepped=params.host_sys_prepped) or resource_created
+        skip=params.sysprep_skip_copy_tarballs_hdfs) or resource_created
 
       if resource_created:
         params.HdfsResource(None, action="execute")

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
index 0251d27..0e00c3a 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
@@ -94,13 +94,13 @@ class HiveServerInteractiveDefault(HiveServerInteractive):
           "hive2",
           params.user_group,
           params.hdfs_user,
-          host_sys_prepped=params.host_sys_prepped)
+          skip=params.sysprep_skip_copy_tarballs_hdfs)
 
         resource_created = copy_to_hdfs(
           "tez_hive2",
           params.user_group,
           params.hdfs_user,
-          host_sys_prepped=params.host_sys_prepped) or resource_created
+          skip=params.sysprep_skip_copy_tarballs_hdfs) or resource_created
 
         if resource_created:
           params.HdfsResource(None, action="execute")

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
index f10a3f3..895ec81 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
@@ -19,11 +19,12 @@ limitations under the License.
 """
 from ambari_commons import OSCheck
 from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions.copy_tarball import 
get_sysprep_skip_copy_tarballs_hdfs
 
 if OSCheck.is_windows_family():
   from params_windows import *
 else:
   from params_linux import *
 
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
+sysprep_skip_copy_tarballs_hdfs = get_sysprep_skip_copy_tarballs_hdfs()
 retryAble = default("/commandParams/command_retry_enabled", False)

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
index f631ac9..a18c295 100644
--- 
a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
@@ -48,7 +48,6 @@ version = default("/commandParams/version", None)
 # Version that is CURRENT.
 current_version = default("/hostLevelParams/current_version", None)
 
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
 
 stack_version_unformatted = config['hostLevelParams']['stack_version']
 stack_version_formatted = format_stack_version(stack_version_unformatted)

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/params.py
index fe367b6..57f062a 100644
--- 
a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/params.py
@@ -44,7 +44,6 @@ kadm5_acl_path = kadm5_acl_dir + '/' + kadm5_acl_file
 
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
 
 configurations = None
 keytab_details = None

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params.py
index 14e021d..ad1a1dc 100644
--- 
a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params.py
@@ -26,5 +26,4 @@ if OSCheck.is_windows_family():
 else:
   from params_linux import *
 
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
 retryAble = default("/commandParams/command_retry_enabled", False)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
index 8ef3ba8..91911f3 100644
--- 
a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
@@ -33,7 +33,6 @@ tmp_dir = Script.get_tmp_dir()
 stack_root = Script.get_stack_root()
 
 stack_name = default("/hostLevelParams/stack_name", None)
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
 
 stack_version_unformatted = config['hostLevelParams']['stack_version']
 stack_version_formatted = format_stack_version(stack_version_unformatted)

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_service.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_service.py
 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_service.py
index 4740131..5c97727 100644
--- 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_service.py
+++ 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_service.py
@@ -35,6 +35,8 @@ from resource_management.libraries.providers.hdfs_resource 
import WebHDFSUtil
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons import OSConst
 
+from resource_management.core import Logger
+
 @OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
 def oozie_service(action='start', upgrade_type=None):
   import params
@@ -118,10 +120,9 @@ def oozie_service(action = 'start', upgrade_type=None):
         Execute(kinit_if_needed,
                 user = params.oozie_user,
         )
-      
-      
-      if params.host_sys_prepped:
-        print "Skipping creation of oozie sharelib as host is sys prepped"
+
+      if params.sysprep_skip_copy_oozie_share_lib_to_hdfs:
+        Logger.info("Skipping creation of oozie sharelib as host is sys 
prepped")
         # Copy current hive-site to hdfs:/user/oozie/share/lib/spark/
         params.HdfsResource(format("{hdfs_share_dir}/lib/spark/hive-site.xml"),
                             action="create_on_execute",

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py
index 28d654e..e5d674a 100644
--- 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py
@@ -20,6 +20,7 @@ limitations under the License.
 from ambari_commons import OSCheck
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.expect import expect
+from resource_management.libraries.functions.copy_tarball import 
get_sysprep_skip_copy_tarballs_hdfs
 
 if OSCheck.is_windows_family():
   from params_windows import *
@@ -29,4 +30,10 @@ else:
 java_home = config['hostLevelParams']['java_home']
 java_version = expect("/hostLevelParams/java_version", int)
 
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
\ No newline at end of file
+
+host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
+
+# By default, copy the tarballs to HDFS. If the cluster is sysprepped, then 
set based on the config.
+sysprep_skip_copy_oozie_share_lib_to_hdfs = False
+if host_sys_prepped:
+  sysprep_skip_copy_oozie_share_lib_to_hdfs = 
default("/cluster-env/sysprep_skip_copy_oozie_share_lib_to_hdfs", False)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params.py
index 36541e7..3aebda0 100644
--- 
a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params.py
@@ -20,11 +20,12 @@ Ambari Agent
 """
 from ambari_commons import OSCheck
 from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions.copy_tarball import 
get_sysprep_skip_copy_tarballs_hdfs
 
 if OSCheck.is_windows_family():
   from params_windows import *
 else:
   from params_linux import *
 
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
+sysprep_skip_copy_tarballs_hdfs = get_sysprep_skip_copy_tarballs_hdfs()
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
index e029092..ba4082e 100644
--- 
a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
+++ 
b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
@@ -110,7 +110,7 @@ class PigServiceCheckLinux(PigServiceCheck):
       resource_created = copy_to_hdfs(
         "tez", params.user_group,
         params.hdfs_user,
-        host_sys_prepped=params.host_sys_prepped)
+        skip=params.sysprep_skip_copy_tarballs_hdfs)
       if resource_created:
         params.HdfsResource(None, action="execute")
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
index e63627d..5b0dcea 100644
--- 
a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
@@ -46,7 +46,6 @@ stack_root = Script.get_stack_root()
 
 stack_name = default("/hostLevelParams/stack_name", None)
 version = default("/commandParams/version", None)
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
 
 stack_version_unformatted = config['hostLevelParams']['stack_version']
 stack_version_formatted = format_stack_version(stack_version_unformatted)

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py
index 842e5ed..c03329c 100644
--- 
a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py
@@ -23,14 +23,13 @@ from resource_management.libraries.functions import 
conf_select
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
+from resource_management.libraries.functions.copy_tarball import 
get_sysprep_skip_copy_tarballs_hdfs
 
 if OSCheck.is_windows_family():
   from params_windows import *
 else:
   from params_linux import *
 
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
-
 # server configurations
 config = Script.get_config()
 
@@ -38,6 +37,7 @@ stack_name = default("/hostLevelParams/stack_name", None)
 
 # New Cluster Stack Version that is defined during the RESTART of a Stack 
Upgrade
 version = default("/commandParams/version", None)
+sysprep_skip_copy_tarballs_hdfs = get_sysprep_skip_copy_tarballs_hdfs()
 
 #hadoop params
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
index 6bf41b4..5bbf3b4 100644
--- 
a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
@@ -50,8 +50,6 @@ slider_tar_gz = format('{slider_lib_dir}/slider.tar.gz')
 user_group = config['configurations']['cluster-env']['user_group']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
-
 
 kinit_path_local = 
get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', 
None))
 security_enabled = config['configurations']['cluster-env']['security_enabled']

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/service_check.py
index 468dfb1..8582b5e 100644
--- 
a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/service_check.py
+++ 
b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/service_check.py
@@ -41,9 +41,9 @@ class SliderServiceCheck(Script):
   def service_check(self, env):
     import params
     env.set_params(params)
-    
+
     if params.stack_version_formatted and 
check_stack_feature(StackFeature.COPY_TARBALL_TO_HDFS, 
params.stack_version_formatted):
-      copy_to_hdfs("slider", params.user_group, params.hdfs_user, 
host_sys_prepped=params.host_sys_prepped)
+      copy_to_hdfs("slider", params.user_group, params.hdfs_user, 
skip=params.sysprep_skip_copy_tarballs_hdfs)
     
     smokeuser_kinit_cmd = format(
       "{kinit_path_local} -kt {smokeuser_keytab} {smokeuser_principal};") if 
params.security_enabled else ""

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/job_history_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/job_history_server.py
 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/job_history_server.py
index 2825f35..154f430 100644
--- 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/job_history_server.py
+++ 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/job_history_server.py
@@ -86,7 +86,7 @@ class JobHistoryServer(Script):
           "tez",
           params.user_group,
           params.hdfs_user,
-          host_sys_prepped=params.host_sys_prepped)
+          skip=params.sysprep_skip_copy_tarballs_hdfs)
         if resource_created:
           params.HdfsResource(None, action="execute")
           

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
index 4e525db..3a1c482 100644
--- 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
@@ -26,6 +26,7 @@ from resource_management.libraries.functions.stack_features 
import check_stack_f
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions import conf_select, stack_select
 from resource_management.libraries.functions.get_stack_version import 
get_stack_version
+from resource_management.libraries.functions.copy_tarball import 
get_sysprep_skip_copy_tarballs_hdfs
 from resource_management.libraries.functions.version import 
format_stack_version
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.default import default
@@ -59,7 +60,8 @@ stack_version_unformatted = 
config['hostLevelParams']['stack_version']
 if upgrade_direction == Direction.DOWNGRADE:
   stack_version_unformatted = 
config['commandParams']['original_stack'].split("-")[1]
 stack_version_formatted = format_stack_version(stack_version_unformatted)
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
+
+sysprep_skip_copy_tarballs_hdfs = get_sysprep_skip_copy_tarballs_hdfs()
 
 # New Cluster Stack Version that is defined during the RESTART of a Stack 
Upgrade
 version = default("/commandParams/version", None)

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_service.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_service.py
 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_service.py
index 82a010a..31a296a 100644
--- 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_service.py
+++ 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_service.py
@@ -40,7 +40,7 @@ def spark_service(name, upgrade_type=None, action=None):
 
     if name == 'jobhistoryserver' and effective_version and 
check_stack_feature(StackFeature.SPARK_16PLUS, effective_version):
       # copy spark-hdp-assembly.jar to hdfs
-      copy_to_hdfs("spark", params.user_group, params.hdfs_user, 
host_sys_prepped=params.host_sys_prepped)
+      copy_to_hdfs("spark", params.user_group, params.hdfs_user, 
skip=params.sysprep_skip_copy_tarballs_hdfs)
       # create spark history directory
       params.HdfsResource(params.spark_history_dir,
                           type="directory",
@@ -59,7 +59,7 @@ def spark_service(name, upgrade_type=None, action=None):
     # Spark 1.3.1.2.3, and higher, which was included in HDP 2.3, does not 
have a dependency on Tez, so it does not
     # need to copy the tarball, otherwise, copy it.
     if params.stack_version_formatted and 
check_stack_feature(StackFeature.TEZ_FOR_SPARK, params.stack_version_formatted):
-      resource_created = copy_to_hdfs("tez", params.user_group, 
params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
+      resource_created = copy_to_hdfs("tez", params.user_group, 
params.hdfs_user, skip=params.sysprep_skip_copy_tarballs_hdfs)
       if resource_created:
         params.HdfsResource(None, action="execute")
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/job_history_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/job_history_server.py
 
b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/job_history_server.py
index 3a0cff5..154c83d 100755
--- 
a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/job_history_server.py
+++ 
b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/job_history_server.py
@@ -86,7 +86,7 @@ class JobHistoryServer(Script):
           "tez",
           params.user_group,
           params.hdfs_user,
-          host_sys_prepped=params.host_sys_prepped)
+          skip=params.sysprep_skip_copy_tarballs_hdfs)
         if resource_created:
           params.HdfsResource(None, action="execute")
           

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py
index e461186..e49756d 100755
--- 
a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py
@@ -25,6 +25,7 @@ from resource_management.libraries.functions.stack_features 
import check_stack_f
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions import conf_select, stack_select
 from resource_management.libraries.functions.version import 
format_stack_version
+from resource_management.libraries.functions.copy_tarball import 
get_sysprep_skip_copy_tarballs_hdfs
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
@@ -49,7 +50,8 @@ stack_name = status_params.stack_name
 stack_root = Script.get_stack_root()
 stack_version_unformatted = config['hostLevelParams']['stack_version']
 stack_version_formatted = format_stack_version(stack_version_unformatted)
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
+
+sysprep_skip_copy_tarballs_hdfs = get_sysprep_skip_copy_tarballs_hdfs()
 
 # New Cluster Stack Version that is defined during the RESTART of a Stack 
Upgrade
 version = default("/commandParams/version", None)

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/spark_service.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/spark_service.py
 
b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/spark_service.py
index 590b299..1cbca8b 100755
--- 
a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/spark_service.py
+++ 
b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/spark_service.py
@@ -60,7 +60,7 @@ def spark_service(name, upgrade_type=None, action=None):
       source_dir=params.spark_home+"/jars"
       tmp_archive_file=get_tarball_paths("spark2")[1]
       make_tarfile(tmp_archive_file, source_dir)
-      copy_to_hdfs("spark2", params.user_group, params.hdfs_user, 
host_sys_prepped=params.host_sys_prepped)
+      copy_to_hdfs("spark2", params.user_group, params.hdfs_user, 
skip=params.sysprep_skip_copy_tarballs_hdfs)
       # create spark history directory
       params.HdfsResource(params.spark_history_dir,
                           type="directory",
@@ -79,7 +79,7 @@ def spark_service(name, upgrade_type=None, action=None):
     # Spark 1.3.1.2.3, and higher, which was included in HDP 2.3, does not 
have a dependency on Tez, so it does not
     # need to copy the tarball, otherwise, copy it.
     if params.stack_version_formatted and 
check_stack_feature(StackFeature.TEZ_FOR_SPARK, params.stack_version_formatted):
-      resource_created = copy_to_hdfs("tez", params.user_group, 
params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
+      resource_created = copy_to_hdfs("tez", params.user_group, 
params.hdfs_user, skip=params.sysprep_skip_copy_tarballs_hdfs)
       if resource_created:
         params.HdfsResource(None, action="execute")
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params.py
index ec71506..61573ee 100644
--- 
a/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params.py
@@ -24,5 +24,4 @@ if OSCheck.is_windows_family():
 else:
   from params_linux import *
 
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params.py
index f10a3f3..5d53de8 100644
--- 
a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params.py
@@ -25,5 +25,4 @@ if OSCheck.is_windows_family():
 else:
   from params_linux import *
 
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
 retryAble = default("/commandParams/command_retry_enabled", False)

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params.py
index 1e591f4..c01dc62 100644
--- 
a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params.py
@@ -19,11 +19,11 @@ limitations under the License.
 """
 from ambari_commons import OSCheck
 from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions.copy_tarball import 
get_sysprep_skip_copy_tarballs_hdfs
 
 if OSCheck.is_windows_family():
   from params_windows import *
 else:
   from params_linux import *
 
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
-
+sysprep_skip_copy_tarballs_hdfs = get_sysprep_skip_copy_tarballs_hdfs()
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/pre_upgrade.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/pre_upgrade.py
 
b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/pre_upgrade.py
index 04d8be1..d182efe 100644
--- 
a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/pre_upgrade.py
+++ 
b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/pre_upgrade.py
@@ -48,7 +48,7 @@ class TezPreUpgrade(Script):
         params.user_group,
         params.hdfs_user,
         use_upgrading_version_during_upgrade=False,
-        host_sys_prepped=params.host_sys_prepped)
+        skip=params.sysprep_skip_copy_tarballs_hdfs)
       if resource_created:
         params.HdfsResource(None, action="execute")
       else:

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
index af429f6..c903d35 100644
--- 
a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
+++ 
b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
@@ -69,7 +69,7 @@ class TezServiceCheckLinux(TezServiceCheck):
     )
 
     if params.stack_version_formatted and 
check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.stack_version_formatted):
-      copy_to_hdfs("tez", params.user_group, params.hdfs_user, 
host_sys_prepped=params.host_sys_prepped)
+      copy_to_hdfs("tez", params.user_group, params.hdfs_user, 
skip=params.sysprep_skip_copy_tarballs_hdfs)
 
     params.HdfsResource(None, action = "execute")
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
index d5bdc6b..8f5d380 100644
--- 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
+++ 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
@@ -80,9 +80,9 @@ class HistoryServerDefault(HistoryServer):
       conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select("hadoop-mapreduce-historyserver", params.version)
       # MC Hammer said, "Can't touch this"
-      copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user, 
host_sys_prepped=params.host_sys_prepped)
-      copy_to_hdfs("tez", params.user_group, params.hdfs_user, 
host_sys_prepped=params.host_sys_prepped)
-      copy_to_hdfs("slider", params.user_group, params.hdfs_user, 
host_sys_prepped=params.host_sys_prepped)
+      copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user, 
skip=params.sysprep_skip_copy_tarballs_hdfs)
+      copy_to_hdfs("tez", params.user_group, params.hdfs_user, 
skip=params.sysprep_skip_copy_tarballs_hdfs)
+      copy_to_hdfs("slider", params.user_group, params.hdfs_user, 
skip=params.sysprep_skip_copy_tarballs_hdfs)
       params.HdfsResource(None, action="execute")
 
   def start(self, env, upgrade_type=None):
@@ -96,17 +96,17 @@ class HistoryServerDefault(HistoryServer):
         "mapreduce",
         params.user_group,
         params.hdfs_user,
-        host_sys_prepped=params.host_sys_prepped)
+        skip=params.sysprep_skip_copy_tarballs_hdfs)
       resource_created = copy_to_hdfs(
         "tez",
         params.user_group,
         params.hdfs_user,
-        host_sys_prepped=params.host_sys_prepped) or resource_created
+        skip=params.sysprep_skip_copy_tarballs_hdfs) or resource_created
       resource_created = copy_to_hdfs(
         "slider",
         params.user_group,
         params.hdfs_user,
-        host_sys_prepped=params.host_sys_prepped) or resource_created
+        skip=params.sysprep_skip_copy_tarballs_hdfs) or resource_created
       if resource_created:
         params.HdfsResource(None, action="execute")
     else:

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params.py
index 073e84f..d0ad6f6 100644
--- 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params.py
@@ -21,11 +21,12 @@ Ambari Agent
 """
 from ambari_commons import OSCheck
 from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions.copy_tarball import 
get_sysprep_skip_copy_tarballs_hdfs
 
 if OSCheck.is_windows_family():
   from params_windows import *
 else:
   from params_linux import *
 
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
+sysprep_skip_copy_tarballs_hdfs = get_sysprep_skip_copy_tarballs_hdfs()
 retryAble = default("/commandParams/command_retry_enabled", False)

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
index 81cb175..0d313cc 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
@@ -81,6 +81,51 @@
     <on-ambari-upgrade add="true"/>
   </property>
   <property>
+    <name>sysprep_skip_create_users_and_groups</name>
+    <display-name>Whether to skip creating users and groups in a sysprepped 
cluster</display-name>
+    <value>false</value>
+    <property-type>ADDITIONAL_USER_PROPERTY</property-type>
+    <description>Whether to skip creating users and groups in a sysprepped 
cluster</description>
+    <value-attributes>
+      <overridable>true</overridable>
+      <type>boolean</type>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>sysprep_skip_copy_fast_jar_hdfs</name>
+    <display-name>Whether to skip copying the tarballs to HDFS on a sysprepped 
cluster</display-name>
+    <value>false</value>
+    <description>Whether to skip copying the tarballs to HDFS on a sysprepped 
cluster, during both fresh install and stack upgrade</description>
+    <value-attributes>
+      <overridable>true</overridable>
+      <type>boolean</type>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>sysprep_skip_copy_tarballs_hdfs</name>
+    <display-name>Whether to skip copying the tarballs to HDFS on a sysprepped 
cluster</display-name>
+    <value>false</value>
+    <description>Whether to skip copying the tarballs to HDFS on a sysprepped 
cluster, during both fresh install and stack upgrade</description>
+    <value-attributes>
+      <overridable>true</overridable>
+      <type>boolean</type>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>sysprep_skip_copy_oozie_share_lib_to_hdfs</name>
+    <display-name>Whether to skip copying the Oozie share lib to HDFS on 
sysprepped cluster</display-name>
+    <value>false</value>
+    <description>Whether to skip copying the Oozie share lib to HDFS on 
sysprepped cluster, during both fresh install and stack upgrade</description>
+    <value-attributes>
+      <overridable>true</overridable>
+      <type>boolean</type>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
     <name>smokeuser</name>
     <display-name>Smoke User</display-name>
     <value>ambari-qa</value>

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
index 8b52ca1..f19c321 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
@@ -200,6 +200,7 @@ proxyuser_group = 
default("/configurations/hadoop-env/proxyuser_group","users")
 ranger_group = config['configurations']['ranger-env']['ranger_group']
 dfs_cluster_administrators_group = 
config['configurations']['hdfs-site']["dfs.cluster.administrators"]
 
+sysprep_skip_create_users_and_groups = 
default("/configurations/cluster-env/sysprep_skip_create_users_and_groups", 
False)
 ignore_groupsusers_create = 
default("/configurations/cluster-env/ignore_groupsusers_create", False)
 fetch_nonlocal_groups = 
config['configurations']['cluster-env']["fetch_nonlocal_groups"]
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
index 1a7d21a..320872e 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
@@ -30,7 +30,12 @@ def setup_users():
   Creates users before cluster installation
   """
   import params
-  should_create_users_and_groups = not params.host_sys_prepped and not 
params.ignore_groupsusers_create
+
+  should_create_users_and_groups = False
+  if params.host_sys_prepped:
+    should_create_users_and_groups = not 
params.sysprep_skip_create_users_and_groups
+  else:
+    should_create_users_and_groups = not params.ignore_groupsusers_create
 
   if should_create_users_and_groups:
     for group in params.group_list:
@@ -60,19 +65,16 @@ def setup_users():
                create_parents = True,
                cd_access="a",
     )
-    if not params.host_sys_prepped and params.override_uid == "true":
+    if params.override_uid == "true":
       set_uid(params.hbase_user, params.hbase_user_dirs)
     else:
-      Logger.info('Skipping setting uid for hbase user as host is sys 
prepped')      
-      pass
+      Logger.info('Skipping setting uid for hbase user as host is sys prepped')
 
-  if not params.host_sys_prepped:
+  if should_create_users_and_groups:
     if params.has_namenode:
-      if should_create_users_and_groups:
-        create_dfs_cluster_admins()
+      create_dfs_cluster_admins()
     if params.has_tez and params.stack_version_formatted != "" and 
compare_versions(params.stack_version_formatted, '2.3') >= 0:
-      if should_create_users_and_groups:
-        create_tez_am_view_acls()
+      create_tez_am_view_acls()
   else:
     Logger.info('Skipping setting dfs cluster admin and tez view acls as host 
is sys prepped')
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
index 45eab2f..c678a72 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
@@ -33,7 +33,9 @@ from resource_management.libraries.resources.hdfs_resource 
import HdfsResource
 
 config = Script.get_config()
 
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
+# Whether to skip copying fast-hdfs-resource.jar to /var/lib/ambari-agent/lib/
+# This is required if tarballs are going to be copied to HDFS, so set to False
+sysprep_skip_copy_fast_jar_hdfs = 
default("/configurations/cluster-env/sysprep_skip_copy_fast_jar_hdfs", False)
 
 stack_version_unformatted = config['hostLevelParams']['stack_version']
 stack_version_formatted = format_stack_version(stack_version_unformatted)

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
index 8f845d2..2182fd1 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
@@ -61,8 +61,8 @@ def setup_hadoop():
     else:
       tc_owner = params.hdfs_user
       
-    # if WebHDFS is not enabled we need this jar to create hadoop folders.
-    if params.host_sys_prepped:
+    # if WebHDFS is not enabled we need this jar to create hadoop folders and 
copy tarballs to HDFS.
+    if params.sysprep_skip_copy_fast_jar_hdfs:
       print "Skipping copying of fast-hdfs-resource.jar as host is sys prepped"
     elif params.dfs_type == 'HCFS' or not 
WebHDFSUtil.is_webhdfs_available(params.is_webhdfs_enabled, params.default_fs):
       # for source-code of jar goto contrib/fast-hdfs-resource

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py 
b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
index 3e5dc7e..b3e2494 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
@@ -947,8 +947,8 @@ From source with checksum 
150f554beae04f76f814f59549dead8b"""
     self.assertResourceCalled('Execute',
                               ('ambari-python-wrap', '/usr/bin/hdp-select', 
'set', 'hive-server2', version), sudo=True,)
 
-    copy_to_hdfs_mock.assert_any_call("mapreduce", "hadoop", "hdfs", 
host_sys_prepped=False)
-    copy_to_hdfs_mock.assert_any_call("tez", "hadoop", "hdfs", 
host_sys_prepped=False)
+    copy_to_hdfs_mock.assert_any_call("mapreduce", "hadoop", "hdfs", 
skip=False)
+    copy_to_hdfs_mock.assert_any_call("tez", "hadoop", "hdfs", skip=False)
     self.assertEquals(2, copy_to_hdfs_mock.call_count)
     self.assertResourceCalled('HdfsResource', None,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
@@ -990,8 +990,8 @@ From source with checksum 
150f554beae04f76f814f59549dead8b"""
     self.assertResourceCalled('Execute',
 
                               ('ambari-python-wrap', '/usr/bin/hdp-select', 
'set', 'hive-server2', version), sudo=True,)
-    copy_to_hdfs_mock.assert_any_call("mapreduce", "hadoop", "hdfs", 
host_sys_prepped=False)
-    copy_to_hdfs_mock.assert_any_call("tez", "hadoop", "hdfs", 
host_sys_prepped=False)
+    copy_to_hdfs_mock.assert_any_call("mapreduce", "hadoop", "hdfs", 
skip=False)
+    copy_to_hdfs_mock.assert_any_call("tez", "hadoop", "hdfs", skip=False)
     self.assertEquals(2, copy_to_hdfs_mock.call_count)
     self.assertResourceCalled('HdfsResource', None,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py 
b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
index 9ce5530..643f946 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
@@ -873,8 +873,8 @@ class TestHistoryServer(RMFTestCase):
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', 
'/usr/bin/hdp-select', 'set', 'hadoop-mapreduce-historyserver', version), 
sudo=True)
-    self.assertTrue(call("tez", "hadoop", "hdfs", host_sys_prepped=False) in 
copy_to_hdfs_mock.call_args_list)
-    self.assertTrue(call("slider", "hadoop", "hdfs", host_sys_prepped=False) 
in copy_to_hdfs_mock.call_args_list)
+    self.assertTrue(call("tez", "hadoop", "hdfs", skip=False) in 
copy_to_hdfs_mock.call_args_list)
+    self.assertTrue(call("slider", "hadoop", "hdfs", skip=False) in 
copy_to_hdfs_mock.call_args_list)
 
     # From call to conf_select.get_hadoop_conf_dir()
     self.assert_call_to_get_hadoop_conf_dir()

http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py 
b/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py
index 3abc601..d3508e1 100644
--- a/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py
@@ -155,7 +155,7 @@ class TestPigServiceCheck(RMFTestCase):
         action = ['create_on_execute'], 
hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
     )
 
-    copy_to_hdfs_mock.assert_called_with("tez", "hadoop", "hdfs", 
host_sys_prepped=False)
+    copy_to_hdfs_mock.assert_called_with("tez", "hadoop", "hdfs", skip=False)
     self.assertResourceCalled('HdfsResource', None,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = True,

Reply via email to