http://git-wip-us.apache.org/repos/asf/ambari/blob/d8003b39/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HIVE/package/scripts/hive.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HIVE/package/scripts/hive.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HIVE/package/scripts/hive.py
deleted file mode 100644
index b860c6e..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HIVE/package/scripts/hive.py
+++ /dev/null
@@ -1,562 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import os
-import glob
-from urlparse import urlparse
-
-from resource_management.libraries.script.script import Script
-from resource_management.libraries.resources.hdfs_resource import HdfsResource
-from resource_management.libraries.functions import copy_tarball
-from resource_management.libraries.functions import StackFeature
-from resource_management.libraries.functions.stack_features import 
check_stack_feature
-from resource_management.core.resources.service import ServiceConfig
-from resource_management.core.resources.system import File, Execute, Directory
-from resource_management.core.source import StaticFile, Template, 
DownloadSource, InlineTemplate
-from resource_management.core.shell import as_user
-from resource_management.libraries.functions.is_empty import is_empty
-from resource_management.libraries.resources.xml_config import XmlConfig
-from resource_management.libraries.functions.format import format
-from resource_management.core.exceptions import Fail
-from resource_management.core.shell import as_sudo
-from resource_management.core.shell import quote_bash_args
-from resource_management.core.logger import Logger
-from resource_management.core import utils
-from resource_management.libraries.functions.setup_atlas_hook import 
has_atlas_in_cluster, setup_atlas_hook
-from resource_management.libraries.functions.security_commons import 
update_credential_provider_path
-from resource_management.libraries.functions.lzo_utils import 
install_lzo_if_needed
-from ambari_commons.constants import SERVICE
-
-from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
-from ambari_commons import OSConst
-
-@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
-def hive(name=None):
-  import params
-
-  XmlConfig("hive-site.xml",
-            conf_dir = params.hive_conf_dir,
-            configurations = params.config['configurations']['hive-site'],
-            owner=params.hive_user,
-            
configuration_attributes=params.config['configuration_attributes']['hive-site']
-  )
-
-  if name in ["hiveserver2","metastore"]:
-    # Manually overriding service logon user & password set by the 
installation package
-    service_name = params.service_map[name]
-    ServiceConfig(service_name,
-                  action="change_user",
-                  username = params.hive_user,
-                  password = Script.get_password(params.hive_user))
-    Execute(format("cmd /c hadoop fs -mkdir -p {hive_warehouse_dir}"), 
logoutput=True, user=params.hadoop_user)
-
-  if name == 'metastore':
-    if params.init_metastore_schema:
-      check_schema_created_cmd = format('cmd /c "{hive_bin}\\hive.cmd 
--service schematool -info '
-                                        '-dbType {hive_metastore_db_type} '
-                                        '-userName {hive_metastore_user_name} '
-                                        '-passWord 
{hive_metastore_user_passwd!p}'
-                                        '&set EXITCODE=%ERRORLEVEL%&exit /B 
%EXITCODE%"', #cmd "feature", propagate the process exit code manually
-                                        hive_bin=params.hive_bin,
-                                        
hive_metastore_db_type=params.hive_metastore_db_type,
-                                        
hive_metastore_user_name=params.hive_metastore_user_name,
-                                        
hive_metastore_user_passwd=params.hive_metastore_user_passwd)
-      try:
-        Execute(check_schema_created_cmd)
-      except Fail:
-        create_schema_cmd = format('cmd /c {hive_bin}\\hive.cmd --service 
schematool -initSchema '
-                                   '-dbType {hive_metastore_db_type} '
-                                   '-userName {hive_metastore_user_name} '
-                                   '-passWord {hive_metastore_user_passwd!p}',
-                                   hive_bin=params.hive_bin,
-                                   
hive_metastore_db_type=params.hive_metastore_db_type,
-                                   
hive_metastore_user_name=params.hive_metastore_user_name,
-                                   
hive_metastore_user_passwd=params.hive_metastore_user_passwd)
-        Execute(create_schema_cmd,
-                user = params.hive_user,
-                logoutput=True
-        )
-
-  if name == "hiveserver2":
-    if params.hive_execution_engine == "tez":
-      # Init the tez app dir in hadoop
-      script_file = __file__.replace('/', os.sep)
-      cmd_file = os.path.normpath(os.path.join(os.path.dirname(script_file), 
"..", "files", "hiveTezSetup.cmd"))
-
-      Execute("cmd /c " + cmd_file, logoutput=True, user=params.hadoop_user)
-
-
-@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
-def hive(name=None):
-  import params
-
-  # LZO if needed should be installed manually
-  # install_lzo_if_needed()
-
-  hive_client_conf_path = 
format("{stack_root}/current/{component_directory}/conf")
-  # Permissions 644 for conf dir (client) files, and 600 for conf.server
-  mode_identified = 0644 if params.hive_config_dir == hive_client_conf_path 
else 0600
-  if name == 'hiveserver2':
-    # copy tarball to HDFS feature not supported
-    if not (params.stack_version_formatted_major and 
check_stack_feature(StackFeature.COPY_TARBALL_TO_HDFS, 
params.stack_version_formatted_major)):
-      params.HdfsResource(params.webhcat_apps_dir,
-                            type="directory",
-                            action="create_on_execute",
-                            owner=params.webhcat_user,
-                            mode=0755
-                          )
-    
-    # Create webhcat dirs.
-    if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
-      params.HdfsResource(params.hcat_hdfs_user_dir,
-                           type="directory",
-                           action="create_on_execute",
-                           owner=params.hcat_user,
-                           mode=params.hcat_hdfs_user_mode
-      )
-
-    params.HdfsResource(params.webhcat_hdfs_user_dir,
-                         type="directory",
-                         action="create_on_execute",
-                         owner=params.webhcat_user,
-                         mode=params.webhcat_hdfs_user_mode
-    )
-
-    # ****** Begin Copy Tarballs ******
-    # *********************************
-    #  if copy tarball to HDFS feature  supported copy mapreduce.tar.gz and 
tez.tar.gz to HDFS
-    if params.stack_version_formatted_major and 
check_stack_feature(StackFeature.COPY_TARBALL_TO_HDFS, 
params.stack_version_formatted_major):
-      copy_tarball.copy_to_hdfs("mapreduce", params.user_group, 
params.hdfs_user, skip=params.sysprep_skip_copy_tarballs_hdfs)
-      copy_tarball.copy_to_hdfs("tez", params.user_group, params.hdfs_user, 
skip=params.sysprep_skip_copy_tarballs_hdfs)
-
-    # Always copy pig.tar.gz and hive.tar.gz using the appropriate mode.
-    # This can use a different source and dest location to account
-    copy_tarball.copy_to_hdfs("pig",
-                 params.user_group,
-                 params.hdfs_user,
-                 file_mode=params.tarballs_mode,
-                 custom_source_file=params.pig_tar_source,
-                 custom_dest_file=params.pig_tar_dest_file,
-                 skip=params.sysprep_skip_copy_tarballs_hdfs)
-    copy_tarball.copy_to_hdfs("hive",
-                 params.user_group,
-                 params.hdfs_user,
-                 file_mode=params.tarballs_mode,
-                 custom_source_file=params.hive_tar_source,
-                 custom_dest_file=params.hive_tar_dest_file,
-                 skip=params.sysprep_skip_copy_tarballs_hdfs)
-
-    wildcard_tarballs = ["sqoop", "hadoop_streaming"]
-    for tarball_name in wildcard_tarballs:
-      source_file_pattern = eval("params." + tarball_name + "_tar_source")
-      dest_dir = eval("params." + tarball_name + "_tar_dest_dir")
-
-      if source_file_pattern is None or dest_dir is None:
-        continue
-
-      source_files = glob.glob(source_file_pattern) if "*" in 
source_file_pattern else [source_file_pattern]
-      for source_file in source_files:
-        src_filename = os.path.basename(source_file)
-        dest_file = os.path.join(dest_dir, src_filename)
-
-        copy_tarball.copy_to_hdfs(tarball_name,
-                     params.user_group,
-                     params.hdfs_user,
-                     file_mode=params.tarballs_mode,
-                     custom_source_file=source_file,
-                     custom_dest_file=dest_file,
-                     skip=params.sysprep_skip_copy_tarballs_hdfs)
-    # ******* End Copy Tarballs *******
-    # *********************************
-    
-    # if warehouse directory is in DFS
-    if not params.whs_dir_protocol or params.whs_dir_protocol == 
urlparse(params.default_fs).scheme:
-      # Create Hive Metastore Warehouse Dir
-      params.HdfsResource(params.hive_apps_whs_dir,
-                           type="directory",
-                            action="create_on_execute",
-                            owner=params.hive_user,
-                            group=params.user_group,
-                            mode=params.hive_apps_whs_mode
-      )
-    else:
-      Logger.info(format("Not creating warehouse directory 
'{hive_apps_whs_dir}', as the location is not in DFS."))
-
-    # Create Hive User Dir
-    params.HdfsResource(params.hive_hdfs_user_dir,
-                         type="directory",
-                          action="create_on_execute",
-                          owner=params.hive_user,
-                          mode=params.hive_hdfs_user_mode
-    )
-    
-    if not is_empty(params.hive_exec_scratchdir) and not 
urlparse(params.hive_exec_scratchdir).path.startswith("/tmp"):
-      params.HdfsResource(params.hive_exec_scratchdir,
-                           type="directory",
-                           action="create_on_execute",
-                           owner=params.hive_user,
-                           group=params.hdfs_user,
-                           mode=0777) # Hive expects this dir to be writeable 
by everyone as it is used as a temp dir
-    if params.hive_repl_cmrootdir is not None:
-      params.HdfsResource(params.hive_repl_cmrootdir,
-                          type = "directory",
-                          action = "create_on_execute",
-                          owner = params.hive_user,
-                          group=params.user_group,
-                          mode = 01777)
-    if params.hive_repl_rootdir is not None:
-      params.HdfsResource(params.hive_repl_rootdir,
-                          type = "directory",
-                          action = "create_on_execute",
-                          owner = params.hive_user,
-                          group=params.user_group,
-                          mode = 0700)
-
-    params.HdfsResource(None, action="execute")
-
-  Directory(params.hive_etc_dir_prefix,
-            mode=0755
-  )
-
-  # We should change configurations for client as well as for server.
-  # The reason is that stale-configs are service-level, not component.
-  Logger.info("Directories to fill with configs: %s" % 
str(params.hive_conf_dirs_list))
-  for conf_dir in params.hive_conf_dirs_list:
-    fill_conf_dir(conf_dir)
-
-  params.hive_site_config = 
update_credential_provider_path(params.hive_site_config,
-                                                     'hive-site',
-                                                     
os.path.join(params.hive_conf_dir, 'hive-site.jceks'),
-                                                     params.hive_user,
-                                                     params.user_group
-                                                     )
-  XmlConfig("hive-site.xml",
-            conf_dir=params.hive_config_dir,
-            configurations=params.hive_site_config,
-            
configuration_attributes=params.config['configuration_attributes']['hive-site'],
-            owner=params.hive_user,
-            group=params.user_group,
-            mode=mode_identified)
-
-  # Generate atlas-application.properties.xml file
-  if params.enable_atlas_hook:
-    atlas_hook_filepath = os.path.join(params.hive_config_dir, 
params.atlas_hook_filename)
-    setup_atlas_hook(SERVICE.HIVE, params.hive_atlas_application_properties, 
atlas_hook_filepath, params.hive_user, params.user_group)
-
-  if name == 'hiveserver2':
-    XmlConfig("hiveserver2-site.xml",
-              conf_dir=params.hive_server_conf_dir,
-              
configurations=params.config['configurations']['hiveserver2-site'],
-              
configuration_attributes=params.config['configuration_attributes']['hiveserver2-site'],
-              owner=params.hive_user,
-              group=params.user_group,
-              mode=0600)
-
-  if params.hive_metastore_site_supported and name == 'metastore':
-    XmlConfig("hivemetastore-site.xml",
-              conf_dir=params.hive_server_conf_dir,
-              
configurations=params.config['configurations']['hivemetastore-site'],
-              
configuration_attributes=params.config['configuration_attributes']['hivemetastore-site'],
-              owner=params.hive_user,
-              group=params.user_group,
-              mode=0600)
-
-  File(format("{hive_config_dir}/hive-env.sh"),
-       owner=params.hive_user,
-       group=params.user_group,
-       mode=mode_identified,
-       content=InlineTemplate(params.hive_env_sh_template)
-  )
-
-  # On some OS this folder could be not exists, so we will create it before 
pushing there files
-  Directory(params.limits_conf_dir,
-            create_parents = True,
-            owner='root',
-            group='root'
-            )
-
-  File(os.path.join(params.limits_conf_dir, 'hive.conf'),
-       owner='root',
-       group='root',
-       mode=0644,
-       content=Template("hive.conf.j2")
-       )
-  if params.security_enabled:
-    File(os.path.join(params.hive_config_dir, 'zkmigrator_jaas.conf'),
-         owner=params.hive_user,
-         group=params.user_group,
-         content=Template("zkmigrator_jaas.conf.j2")
-         )
-
-
-  if name == 'metastore' or name == 'hiveserver2':
-    if params.hive_jdbc_target is not None and not 
os.path.exists(params.hive_jdbc_target):
-      jdbc_connector(params.hive_jdbc_target, params.hive_previous_jdbc_jar)
-    if params.hive2_jdbc_target is not None and not 
os.path.exists(params.hive2_jdbc_target):
-      jdbc_connector(params.hive2_jdbc_target, params.hive2_previous_jdbc_jar)
-
-  File(format("/usr/lib/ambari-agent/{check_db_connection_jar_name}"),
-       content = 
DownloadSource(format("{jdk_location}{check_db_connection_jar_name}")),
-       mode = 0644,
-  )
-
-  if name == 'metastore':
-    File(os.path.join(params.hive_server_conf_dir, 
"hadoop-metrics2-hivemetastore.properties"),
-         owner=params.hive_user,
-         group=params.user_group,
-         mode=0600,
-         content=Template("hadoop-metrics2-hivemetastore.properties.j2")
-    )
-
-    File(params.start_metastore_path,
-         mode=0755,
-         content=StaticFile('startMetastore.sh')
-    )
-
-    if not is_empty(params.hive_exec_scratchdir):
-       dirPathStr = urlparse(params.hive_exec_scratchdir).path
-       pathComponents = dirPathStr.split("/")
-       if dirPathStr.startswith("/tmp") and len(pathComponents) > 2:   
-         Directory (params.hive_exec_scratchdir, 
-                           owner = params.hive_user,
-                           create_parents = True,
-                           mode=0777)
-
-    if params.hive_repl_cmrootdir is not None:
-      params.HdfsResource(params.hive_repl_cmrootdir,
-                        type = "directory",
-                        action = "create_on_execute",
-                        owner = params.hive_user,
-                        group=params.user_group,
-                        mode = 01777)
-    if params.hive_repl_rootdir is not None:
-      params.HdfsResource(params.hive_repl_rootdir,
-                          type = "directory",
-                          action = "create_on_execute",
-                          owner = params.hive_user,
-                          group=params.user_group,
-                          mode = 0700)
-    if params.hive_repl_cmrootdir is not None or params.hive_repl_rootdir is 
not None:
-      params.HdfsResource(None, action="execute")
-
-  elif name == 'hiveserver2':
-    File(params.start_hiveserver2_path,
-         mode=0755,
-         content=Template(format('{start_hiveserver2_script}'))
-    )
-
-    File(os.path.join(params.hive_server_conf_dir, 
"hadoop-metrics2-hiveserver2.properties"),
-         owner=params.hive_user,
-         group=params.user_group,
-         mode=0600,
-         content=Template("hadoop-metrics2-hiveserver2.properties.j2")
-    )
-
-  if name != "client":
-    Directory(params.hive_pid_dir,
-              create_parents = True,
-              cd_access='a',
-              owner=params.hive_user,
-              group=params.user_group,
-              mode=0755)
-    Directory(params.hive_log_dir,
-              create_parents = True,
-              cd_access='a',
-              owner=params.hive_user,
-              group=params.user_group,
-              mode=0755)
-    Directory(params.hive_var_lib,
-              create_parents = True,
-              cd_access='a',
-              owner=params.hive_user,
-              group=params.user_group,
-              mode=0755)
-
-def create_metastore_schema():
-  import params
-
-  create_schema_cmd = format("export HIVE_CONF_DIR={hive_server_conf_dir} ; "
-                             "{hive_schematool_bin}/schematool -initSchema "
-                             "-dbType {hive_metastore_db_type} "
-                             "-userName {hive_metastore_user_name} "
-                             "-passWord {hive_metastore_user_passwd!p} 
-verbose")
-
-  check_schema_created_cmd = as_user(format("export 
HIVE_CONF_DIR={hive_server_conf_dir} ; "
-                                    "{hive_schematool_bin}/schematool -info "
-                                    "-dbType {hive_metastore_db_type} "
-                                    "-userName {hive_metastore_user_name} "
-                                    "-passWord {hive_metastore_user_passwd!p} 
-verbose"), params.hive_user)
-
-  # HACK: in cases with quoted passwords and as_user (which does the quoting 
as well) !p won't work for hiding passwords.
-  # Fixing it with the hack below:
-  quoted_hive_metastore_user_passwd = 
quote_bash_args(quote_bash_args(params.hive_metastore_user_passwd))
-  if quoted_hive_metastore_user_passwd[0] == "'" and 
quoted_hive_metastore_user_passwd[-1] == "'" \
-      or quoted_hive_metastore_user_passwd[0] == '"' and 
quoted_hive_metastore_user_passwd[-1] == '"':
-    quoted_hive_metastore_user_passwd = quoted_hive_metastore_user_passwd[1:-1]
-  Logger.sensitive_strings[repr(check_schema_created_cmd)] = 
repr(check_schema_created_cmd.replace(
-      format("-passWord {quoted_hive_metastore_user_passwd}"), "-passWord " + 
utils.PASSWORDS_HIDE_STRING))
-
-  Execute(create_schema_cmd,
-          not_if = check_schema_created_cmd,
-          user = params.hive_user
-  )
-
-"""
-Writes configuration files required by Hive.
-"""
-def fill_conf_dir(component_conf_dir):
-  import params
-  hive_client_conf_path = 
os.path.realpath(format("{stack_root}/current/{component_directory}/conf"))
-  component_conf_dir = os.path.realpath(component_conf_dir)
-  mode_identified_for_file = 0644 if component_conf_dir == 
hive_client_conf_path else 0600
-  mode_identified_for_dir = 0755 if component_conf_dir == 
hive_client_conf_path else 0700
-
-  Directory(component_conf_dir,
-            owner=params.hive_user,
-            group=params.user_group,
-            create_parents = True,
-            mode=mode_identified_for_dir
-  )
-
-
-  if 'mapred-site' in params.config['configurations']:
-    XmlConfig("mapred-site.xml",
-              conf_dir=component_conf_dir,
-              configurations=params.config['configurations']['mapred-site'],
-              
configuration_attributes=params.config['configuration_attributes']['mapred-site'],
-              owner=params.hive_user,
-              group=params.user_group,
-              mode=mode_identified_for_file)
-
-  File(format("{component_conf_dir}/hive-default.xml.template"),
-       owner=params.hive_user,
-       group=params.user_group,
-       mode=mode_identified_for_file
-  )
-
-  File(format("{component_conf_dir}/hive-env.sh.template"),
-       owner=params.hive_user,
-       group=params.user_group,
-       mode=mode_identified_for_file
-  )
-
-  # Create hive-log4j.properties and hive-exec-log4j.properties
-  # in /etc/hive/conf and not in /etc/hive2/conf
-  if params.log4j_version == '1':
-    log4j_exec_filename = 'hive-exec-log4j.properties'
-    if (params.log4j_exec_props != None):
-      File(format("{component_conf_dir}/{log4j_exec_filename}"),
-           mode=mode_identified_for_file,
-           group=params.user_group,
-           owner=params.hive_user,
-           content=InlineTemplate(params.log4j_exec_props)
-      )
-    elif 
(os.path.exists("{component_conf_dir}/{log4j_exec_filename}.template")):
-      File(format("{component_conf_dir}/{log4j_exec_filename}"),
-           mode=mode_identified_for_file,
-           group=params.user_group,
-           owner=params.hive_user,
-           
content=StaticFile(format("{component_conf_dir}/{log4j_exec_filename}.template"))
-      )
-
-    log4j_filename = 'hive-log4j.properties'
-    if (params.log4j_props != None):
-      File(format("{component_conf_dir}/{log4j_filename}"),
-           mode=mode_identified_for_file,
-           group=params.user_group,
-           owner=params.hive_user,
-           content=InlineTemplate(params.log4j_props)
-      )
-    elif (os.path.exists("{component_conf_dir}/{log4j_filename}.template")):
-      File(format("{component_conf_dir}/{log4j_filename}"),
-           mode=mode_identified_for_file,
-           group=params.user_group,
-           owner=params.hive_user,
-           
content=StaticFile(format("{component_conf_dir}/{log4j_filename}.template"))
-      )
-
-  if params.parquet_logging_properties is not None:
-    File(format("{component_conf_dir}/parquet-logging.properties"),
-      mode = mode_identified_for_file,
-      group = params.user_group,
-      owner = params.hive_user,
-      content = params.parquet_logging_properties)
-
-
-def jdbc_connector(target, hive_previous_jdbc_jar):
-  """
-  Shared by Hive Batch, Hive Metastore, and Hive Interactive
-  :param target: Target of jdbc jar name, which could be for any of the 
components above.
-  """
-  import params
-
-  if not params.jdbc_jar_name:
-    return
-
-  if params.hive_jdbc_driver in params.hive_jdbc_drivers_list and 
params.hive_use_existing_db:
-    environment = {
-      "no_proxy": format("{ambari_server_hostname}")
-    }
-
-    if hive_previous_jdbc_jar and os.path.isfile(hive_previous_jdbc_jar):
-      File(hive_previous_jdbc_jar, action='delete')
-
-    # TODO: should be removed after ranger_hive_plugin will not provide jdbc
-    if params.prepackaged_jdbc_name != params.jdbc_jar_name:
-      Execute(('rm', '-f', params.prepackaged_ojdbc_symlink),
-              path=["/bin", "/usr/bin/"],
-              sudo = True)
-    
-    File(params.downloaded_custom_connector,
-         content = DownloadSource(params.driver_curl_source))
-
-    # maybe it will be more correcvly to use db type
-    if params.sqla_db_used:
-      untar_sqla_type2_driver = ('tar', '-xvf', 
params.downloaded_custom_connector, '-C', params.tmp_dir)
-
-      Execute(untar_sqla_type2_driver, sudo = True)
-
-      Execute(format("yes | {sudo} cp {jars_path_in_archive} {hive_lib}"))
-
-      Directory(params.jdbc_libs_dir,
-                create_parents = True)
-
-      Execute(format("yes | {sudo} cp {libs_path_in_archive} {jdbc_libs_dir}"))
-
-      Execute(format("{sudo} chown -R {hive_user}:{user_group} {hive_lib}/*"))
-
-    else:
-      Execute(('cp', '--remove-destination', 
params.downloaded_custom_connector, target),
-            #creates=target, TODO: uncomment after ranger_hive_plugin will not 
provide jdbc
-            path=["/bin", "/usr/bin/"],
-            sudo = True)
-
-  else:
-    #for default hive db (Mysql)
-    Execute(('cp', '--remove-destination', 
format('/usr/share/java/{jdbc_jar_name}'), target),
-            #creates=target, TODO: uncomment after ranger_hive_plugin will not 
provide jdbc
-            path=["/bin", "/usr/bin/"],
-            sudo=True
-    )
-  pass
-
-  File(target,
-       mode = 0644,
-  )

http://git-wip-us.apache.org/repos/asf/ambari/blob/d8003b39/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HIVE/package/scripts/hive_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HIVE/package/scripts/hive_client.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HIVE/package/scripts/hive_client.py
deleted file mode 100644
index d604330..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HIVE/package/scripts/hive_client.py
+++ /dev/null
@@ -1,62 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-import sys
-from resource_management import *
-from resource_management.libraries.functions import stack_select
-from resource_management.libraries.functions import StackFeature
-from resource_management.libraries.functions.stack_features import 
check_stack_feature
-from hive import hive
-from ambari_commons.os_family_impl import OsFamilyImpl
-from ambari_commons import OSConst
-from resource_management.core.exceptions import ClientComponentHasNoStatus
-
-class HiveClient(Script):
-  def install(self, env):
-    import params
-    self.install_packages(env)
-    self.configure(env)
-
-  def status(self, env):
-    raise ClientComponentHasNoStatus()
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    hive(name='client')
-
-
-@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
-class HiveClientWindows(HiveClient):
-  pass
-
-
-@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
-class HiveClientDefault(HiveClient):
-  def pre_upgrade_restart(self, env, upgrade_type=None):
-    Logger.info("Executing Hive client Stack Upgrade pre-restart")
-
-    import params
-    env.set_params(params)
-    if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
-      stack_select.select_packages(params.version)
-
-
-if __name__ == "__main__":
-  HiveClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/d8003b39/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HIVE/package/scripts/hive_interactive.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HIVE/package/scripts/hive_interactive.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HIVE/package/scripts/hive_interactive.py
deleted file mode 100644
index 73e6da4..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HIVE/package/scripts/hive_interactive.py
+++ /dev/null
@@ -1,360 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-# Python Imports
-import os
-import glob
-from urlparse import urlparse
-
-# Resource Management and Common Imports
-from resource_management.libraries.script.script import Script
-from resource_management.libraries.resources.hdfs_resource import HdfsResource
-from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
-from resource_management.libraries.functions import StackFeature
-from resource_management.libraries.functions.stack_features import 
check_stack_feature
-from resource_management.libraries.functions.version import compare_versions
-from resource_management.core.resources.service import ServiceConfig
-from resource_management.core.resources.system import File, Execute, Directory
-from resource_management.core.source import StaticFile, Template, 
DownloadSource, InlineTemplate
-from resource_management.core.shell import as_user
-from resource_management.libraries.functions.is_empty import is_empty
-from resource_management.libraries.resources.xml_config import XmlConfig
-from resource_management.libraries.functions.format import format
-from resource_management.core.exceptions import Fail
-from resource_management.core.shell import as_sudo
-from resource_management.core.shell import quote_bash_args
-from resource_management.core.logger import Logger
-from resource_management.core import utils
-from resource_management.libraries.functions.setup_atlas_hook import 
has_atlas_in_cluster, setup_atlas_hook
-from resource_management.libraries.functions.security_commons import 
update_credential_provider_path
-from ambari_commons.constants import SERVICE
-
-from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
-from ambari_commons import OSConst
-from hive import fill_conf_dir, jdbc_connector
-
-
-@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
-def hive_interactive(name=None):
-  pass
-
-"""
-Sets up the configs, jdbc connection and tarball copy to HDFS for Hive Server 
Interactive.
-"""
-@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
-def hive_interactive(name=None):
-  import params
-  MB_TO_BYTES = 1048576
-
-  # if warehouse directory is in DFS
-  if not params.whs_dir_protocol or params.whs_dir_protocol == 
urlparse(params.default_fs).scheme:
-    # Create Hive Metastore Warehouse Dir
-    params.HdfsResource(params.hive_apps_whs_dir,
-                        type="directory",
-                        action="create_on_execute",
-                        owner=params.hive_user,
-                        group=params.user_group,
-                        mode=params.hive_apps_whs_mode
-                        )
-  else:
-    Logger.info(format("Not creating warehouse directory 
'{hive_apps_whs_dir}', as the location is not in DFS."))
-
-  # Create Hive User Dir
-  params.HdfsResource(params.hive_hdfs_user_dir,
-                      type="directory",
-                      action="create_on_execute",
-                      owner=params.hive_user,
-                      mode=params.hive_hdfs_user_mode
-                      )
-
-  # list of properties that should be excluded from the config
-  # this approach is a compromise against adding a dedicated config
-  # type for hive_server_interactive or needed config groups on a
-  # per component basis
-  exclude_list = ['hive.enforce.bucketing',
-                  'hive.enforce.sorting']
-
-  # List of configs to be excluded from hive2 client, but present in Hive2 
server.
-  exclude_list_for_hive2_client = ['javax.jdo.option.ConnectionPassword',
-                                   'hadoop.security.credential.provider.path']
-
-  # Copy Tarballs in HDFS.
-  if params.stack_version_formatted_major and 
check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.stack_version_formatted_major):
-    resource_created = copy_to_hdfs("tez_hive2",
-                 params.user_group,
-                 params.hdfs_user,
-                 file_mode=params.tarballs_mode,
-                 skip=params.sysprep_skip_copy_tarballs_hdfs)
-
-    if resource_created:
-      params.HdfsResource(None, action="execute")
-
-  Directory(params.hive_interactive_etc_dir_prefix,
-            mode=0755
-            )
-
-  Logger.info("Directories to fill with configs: %s" % 
str(params.hive_conf_dirs_list))
-  for conf_dir in params.hive_conf_dirs_list:
-    fill_conf_dir(conf_dir)
-
-  '''
-  As hive2/hive-site.xml only contains the new + the changed props compared to 
hive/hive-site.xml,
-  we need to merge hive/hive-site.xml and hive2/hive-site.xml and store it in 
hive2/hive-site.xml.
-  '''
-  merged_hive_interactive_site = {}
-  
merged_hive_interactive_site.update(params.config['configurations']['hive-site'])
-  
merged_hive_interactive_site.update(params.config['configurations']['hive-interactive-site'])
-  for item in exclude_list:
-    if item in merged_hive_interactive_site.keys():
-      del merged_hive_interactive_site[item]
-
-  '''
-  Config 'hive.llap.io.memory.size' calculated value in stack_advisor is in MB 
as of now. We need to
-  convert it to bytes before we write it down to config file.
-  '''
-  if 'hive.llap.io.memory.size' in merged_hive_interactive_site.keys():
-    hive_llap_io_mem_size_in_mb = 
merged_hive_interactive_site.get("hive.llap.io.memory.size")
-    hive_llap_io_mem_size_in_bytes = long(hive_llap_io_mem_size_in_mb) * 
MB_TO_BYTES
-    merged_hive_interactive_site['hive.llap.io.memory.size'] = 
hive_llap_io_mem_size_in_bytes
-    Logger.info("Converted 'hive.llap.io.memory.size' value from '{0} MB' to 
'{1} Bytes' before writing "
-                "it to config file.".format(hive_llap_io_mem_size_in_mb, 
hive_llap_io_mem_size_in_bytes))
-
-  '''
-  Hive2 doesn't have support for Atlas, we need to remove the Hook 
'org.apache.atlas.hive.hook.HiveHook',
-  which would have come in config 'hive.exec.post.hooks' during the site merge 
logic, if Atlas is installed.
-  '''
-  # Generate atlas-application.properties.xml file
-  if params.enable_atlas_hook and 
params.stack_supports_atlas_hook_for_hive_interactive:
-    Logger.info("Setup for Atlas Hive2 Hook started.")
-
-    atlas_hook_filepath = 
os.path.join(params.hive_server_interactive_conf_dir, 
params.atlas_hook_filename)
-    setup_atlas_hook(SERVICE.HIVE, params.hive_atlas_application_properties, 
atlas_hook_filepath, params.hive_user, params.user_group)
-
-    Logger.info("Setup for Atlas Hive2 Hook done.")
-  else:
-    # Required for HDP 2.5 stacks
-    Logger.info("Skipping setup for Atlas Hook, as it is disabled/ not 
supported.")
-    remove_atlas_hook_if_exists(merged_hive_interactive_site)
-
-  '''
-  As tez_hive2/tez-site.xml only contains the new + the changed props compared 
to tez/tez-site.xml,
-  we need to merge tez/tez-site.xml and tez_hive2/tez-site.xml and store it in 
tez_hive2/tez-site.xml.
-  '''
-  merged_tez_interactive_site = {}
-  if 'tez-site' in params.config['configurations']:
-    
merged_tez_interactive_site.update(params.config['configurations']['tez-site'])
-    Logger.info("Retrieved 'tez/tez-site' for merging with 
'tez_hive2/tez-interactive-site'.")
-  else:
-    Logger.error("Tez's 'tez-site' couldn't be retrieved from passed-in 
configurations.")
-
-  
merged_tez_interactive_site.update(params.config['configurations']['tez-interactive-site'])
-  XmlConfig("tez-site.xml",
-            conf_dir = params.tez_interactive_config_dir,
-            configurations = merged_tez_interactive_site,
-            
configuration_attributes=params.config['configuration_attributes']['tez-interactive-site'],
-            owner = params.tez_interactive_user,
-            group = params.user_group,
-            mode = 0664)
-
-  '''
-  Merge properties from hiveserver2-interactive-site into hiveserver2-site
-  '''
-  merged_hiveserver2_interactive_site = {}
-  if 'hiveserver2-site' in params.config['configurations']:
-    
merged_hiveserver2_interactive_site.update(params.config['configurations']['hiveserver2-site'])
-    Logger.info("Retrieved 'hiveserver2-site' for merging with 
'hiveserver2-interactive-site'.")
-  else:
-    Logger.error("'hiveserver2-site' couldn't be retrieved from passed-in 
configurations.")
-  
merged_hiveserver2_interactive_site.update(params.config['configurations']['hiveserver2-interactive-site'])
-
-
-  # Create config files under /etc/hive2/conf and /etc/hive2/conf/conf.server:
-  #   hive-site.xml
-  #   hive-env.sh
-  #   llap-daemon-log4j2.properties
-  #   llap-cli-log4j2.properties
-  #   hive-log4j2.properties
-  #   hive-exec-log4j2.properties
-  #   beeline-log4j2.properties
-
-  hive2_conf_dirs_list = params.hive_conf_dirs_list
-  hive2_client_conf_path = 
format("{stack_root}/current/{component_directory}/conf")
-
-  # Making copy of 'merged_hive_interactive_site' in 
'merged_hive_interactive_site_copy', and deleting 
'javax.jdo.option.ConnectionPassword'
-  # config from there, as Hive2 client shouldn't have that config.
-  merged_hive_interactive_site_copy = merged_hive_interactive_site.copy()
-  for item in exclude_list_for_hive2_client:
-    if item in merged_hive_interactive_site.keys():
-      del merged_hive_interactive_site_copy[item]
-
-  for conf_dir in hive2_conf_dirs_list:
-      mode_identified = 0644 if conf_dir == hive2_client_conf_path else 0600
-      if conf_dir == hive2_client_conf_path:
-        XmlConfig("hive-site.xml",
-                  conf_dir=conf_dir,
-                  configurations=merged_hive_interactive_site_copy,
-                  
configuration_attributes=params.config['configuration_attributes']['hive-interactive-site'],
-                  owner=params.hive_user,
-                  group=params.user_group,
-                  mode=0644)
-      else:
-        merged_hive_interactive_site = 
update_credential_provider_path(merged_hive_interactive_site,
-                                                                  'hive-site',
-                                                                  
os.path.join(conf_dir, 'hive-site.jceks'),
-                                                                  
params.hive_user,
-                                                                  
params.user_group
-        )
-        XmlConfig("hive-site.xml",
-                  conf_dir=conf_dir,
-                  configurations=merged_hive_interactive_site,
-                  
configuration_attributes=params.config['configuration_attributes']['hive-interactive-site'],
-                  owner=params.hive_user,
-                  group=params.user_group,
-                  mode=0600)
-      XmlConfig("hiveserver2-site.xml",
-                conf_dir=conf_dir,
-                configurations=merged_hiveserver2_interactive_site,
-                
configuration_attributes=params.config['configuration_attributes']['hiveserver2-interactive-site'],
-                owner=params.hive_user,
-                group=params.user_group,
-                mode=mode_identified)
-
-      hive_server_interactive_conf_dir = conf_dir
-
-      File(format("{hive_server_interactive_conf_dir}/hive-env.sh"),
-           owner=params.hive_user,
-           group=params.user_group,
-           mode=mode_identified,
-           content=InlineTemplate(params.hive_interactive_env_sh_template))
-
-      llap_daemon_log4j_filename = 'llap-daemon-log4j2.properties'
-      
File(format("{hive_server_interactive_conf_dir}/{llap_daemon_log4j_filename}"),
-           mode=mode_identified,
-           group=params.user_group,
-           owner=params.hive_user,
-           content=InlineTemplate(params.llap_daemon_log4j))
-
-      llap_cli_log4j2_filename = 'llap-cli-log4j2.properties'
-      
File(format("{hive_server_interactive_conf_dir}/{llap_cli_log4j2_filename}"),
-           mode=mode_identified,
-           group=params.user_group,
-           owner=params.hive_user,
-           content=InlineTemplate(params.llap_cli_log4j2))
-
-      hive_log4j2_filename = 'hive-log4j2.properties'
-      File(format("{hive_server_interactive_conf_dir}/{hive_log4j2_filename}"),
-         mode=mode_identified,
-         group=params.user_group,
-         owner=params.hive_user,
-         content=InlineTemplate(params.hive_log4j2))
-
-      hive_exec_log4j2_filename = 'hive-exec-log4j2.properties'
-      
File(format("{hive_server_interactive_conf_dir}/{hive_exec_log4j2_filename}"),
-         mode=mode_identified,
-         group=params.user_group,
-         owner=params.hive_user,
-         content=InlineTemplate(params.hive_exec_log4j2))
-
-      beeline_log4j2_filename = 'beeline-log4j2.properties'
-      
File(format("{hive_server_interactive_conf_dir}/{beeline_log4j2_filename}"),
-         mode=mode_identified,
-         group=params.user_group,
-         owner=params.hive_user,
-         content=InlineTemplate(params.beeline_log4j2))
-
-      File(os.path.join(hive_server_interactive_conf_dir, 
"hadoop-metrics2-hiveserver2.properties"),
-           owner=params.hive_user,
-           group=params.user_group,
-           mode=mode_identified,
-           content=Template("hadoop-metrics2-hiveserver2.properties.j2")
-           )
-
-      
File(format("{hive_server_interactive_conf_dir}/hadoop-metrics2-llapdaemon.properties"),
-           owner=params.hive_user,
-           group=params.user_group,
-           mode=mode_identified,
-           content=Template("hadoop-metrics2-llapdaemon.j2"))
-
-      
File(format("{hive_server_interactive_conf_dir}/hadoop-metrics2-llaptaskscheduler.properties"),
-           owner=params.hive_user,
-           group=params.user_group,
-           mode=mode_identified,
-           content=Template("hadoop-metrics2-llaptaskscheduler.j2"))
-
-
-  # On some OS this folder could be not exists, so we will create it before 
pushing there files
-  Directory(params.limits_conf_dir,
-            create_parents = True,
-            owner='root',
-            group='root')
-
-  File(os.path.join(params.limits_conf_dir, 'hive.conf'),
-       owner='root',
-       group='root',
-       mode=0644,
-       content=Template("hive.conf.j2"))
-
-  if not os.path.exists(params.target_hive_interactive):
-    jdbc_connector(params.target_hive_interactive, 
params.hive_intaractive_previous_jdbc_jar)
-
-  File(format("/usr/lib/ambari-agent/{check_db_connection_jar_name}"),
-       content = 
DownloadSource(format("{jdk_location}{check_db_connection_jar_name}")),
-       mode = 0644)
-  File(params.start_hiveserver2_interactive_path,
-       mode=0755,
-       content=Template(format('{start_hiveserver2_interactive_script}')))
-
-  Directory(params.hive_pid_dir,
-            create_parents=True,
-            cd_access='a',
-            owner=params.hive_user,
-            group=params.user_group,
-            mode=0755)
-  Directory(params.hive_log_dir,
-            create_parents=True,
-            cd_access='a',
-            owner=params.hive_user,
-            group=params.user_group,
-            mode=0755)
-  Directory(params.hive_interactive_var_lib,
-            create_parents=True,
-            cd_access='a',
-            owner=params.hive_user,
-            group=params.user_group,
-            mode=0755)
-
-"""
-Remove 'org.apache.atlas.hive.hook.HiveHook' value from Hive2/hive-site.xml 
config 'hive.exec.post.hooks', if exists.
-"""
-def remove_atlas_hook_if_exists(merged_hive_interactive_site):
-  if 'hive.exec.post.hooks' in merged_hive_interactive_site.keys():
-    existing_hive_exec_post_hooks = 
merged_hive_interactive_site.get('hive.exec.post.hooks')
-    if existing_hive_exec_post_hooks:
-      hook_splits = existing_hive_exec_post_hooks.split(",")
-      updated_hook_splits = [hook for hook in hook_splits if not hook.strip() 
== 'org.apache.atlas.hive.hook.HiveHook']
-      updated_hooks_str = ",".join((str(hook)).strip() for hook in 
updated_hook_splits)
-      if updated_hooks_str != existing_hive_exec_post_hooks:
-        merged_hive_interactive_site['hive.exec.post.hooks'] = 
updated_hooks_str
-        Logger.info("Updated Hive2/hive-site.xml 'hive.exec.post.hooks' value 
from : '{0}' to : '{1}'"
-                    .format(existing_hive_exec_post_hooks, updated_hooks_str))
-      else:
-        Logger.info("No change done to Hive2/hive-site.xml 
'hive.exec.post.hooks' value.")
-  else:
-      Logger.debug("'hive.exec.post.hooks' doesn't exist in 
Hive2/hive-site.xml")

http://git-wip-us.apache.org/repos/asf/ambari/blob/d8003b39/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HIVE/package/scripts/hive_metastore.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HIVE/package/scripts/hive_metastore.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HIVE/package/scripts/hive_metastore.py
deleted file mode 100644
index dc8efa3..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HIVE/package/scripts/hive_metastore.py
+++ /dev/null
@@ -1,203 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-import os
-
-from resource_management.core.logger import Logger
-from resource_management.core.resources.system import Execute, Directory
-from resource_management.libraries.script import Script
-from resource_management.libraries.functions import stack_select
-from resource_management.libraries.functions.constants import Direction
-from resource_management.libraries.functions.format import format
-from resource_management.libraries.functions import StackFeature
-from resource_management.libraries.functions import upgrade_summary
-from resource_management.libraries.functions.stack_features import 
check_stack_feature
-from resource_management.libraries.functions.security_commons import 
cached_kinit_executor
-from resource_management.core.resources.system import File
-from setup_ranger_hive import setup_ranger_hive_metastore_service
-
-from hive import create_metastore_schema, hive, jdbc_connector
-from hive_service import hive_service
-from ambari_commons.os_family_impl import OsFamilyImpl
-from ambari_commons import OSConst
-
-# the legacy conf.server location in previous stack versions
-LEGACY_HIVE_SERVER_CONF = "/etc/hive/conf.server"
-
-class HiveMetastore(Script):
-  def install(self, env):
-    import params
-    self.install_packages(env)
-
-
-  def start(self, env, upgrade_type=None):
-    import params
-    env.set_params(params)
-
-    # writing configurations on start required for securtity
-    self.configure(env)
-    if params.init_metastore_schema:
-      create_metastore_schema()
-
-    hive_service('metastore', action='start', upgrade_type=upgrade_type)
-
-    # below function call is used for cluster depolyed in cloud env to create 
ranger hive service in ranger admin.
-    setup_ranger_hive_metastore_service()
-
-  def stop(self, env, upgrade_type=None):
-    import params
-    env.set_params(params)
-    hive_service('metastore', action='stop', upgrade_type=upgrade_type)
-
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    hive(name = 'metastore')
-
-
-@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
-class HiveMetastoreWindows(HiveMetastore):
-  def status(self, env):
-    import status_params
-    from resource_management.libraries.functions import 
check_windows_service_status
-    check_windows_service_status(status_params.hive_metastore_win_service_name)
-
-
-@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
-class HiveMetastoreDefault(HiveMetastore):
-  def status(self, env):
-    import status_params
-    from resource_management.libraries.functions import check_process_status
-
-    env.set_params(status_params)
-    pid_file = format("{hive_pid_dir}/{hive_metastore_pid}")
-    # Recursively check all existing gmetad pid files
-    check_process_status(pid_file)
-
-
-  def pre_upgrade_restart(self, env, upgrade_type=None):
-    Logger.info("Executing Metastore Stack Upgrade pre-restart")
-    import params
-
-    env.set_params(params)
-
-    is_upgrade = params.upgrade_direction == Direction.UPGRADE
-
-    if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
-      stack_select.select_packages(params.version)
-
-    if is_upgrade and params.stack_version_formatted_major and \
-            check_stack_feature(StackFeature.HIVE_METASTORE_UPGRADE_SCHEMA, 
params.stack_version_formatted_major):
-      self.upgrade_schema(env)
-
-  def upgrade_schema(self, env):
-    """
-    Executes the schema upgrade binary.  This is its own function because it 
could
-    be called as a standalone task from the upgrade pack, but is safe to run 
it for each
-    metastore instance. The schema upgrade on an already upgraded metastore is 
a NOOP.
-
-    The metastore schema upgrade requires a database driver library for most
-    databases. During an upgrade, it's possible that the library is not 
present,
-    so this will also attempt to copy/download the appropriate driver.
-
-    This function will also ensure that configurations are written out to disk 
before running
-    since the new configs will most likely not yet exist on an upgrade.
-
-    Should not be invoked for a DOWNGRADE; Metastore only supports schema 
upgrades.
-    """
-    Logger.info("Upgrading Hive Metastore Schema")
-    import status_params
-    import params
-    env.set_params(params)
-
-    # ensure that configurations are written out before trying to upgrade the 
schema
-    # since the schematool needs configs and doesn't know how to use the hive 
conf override
-    self.configure(env)
-
-    if params.security_enabled:
-      cached_kinit_executor(status_params.kinit_path_local,
-        status_params.hive_user,
-        params.hive_metastore_keytab_path,
-        params.hive_metastore_principal,
-        status_params.hostname,
-        status_params.tmp_dir)
-      
-    # ensure that the JDBC drive is present for the schema tool; if it's not
-    # present, then download it first
-    if params.hive_jdbc_driver in params.hive_jdbc_drivers_list:
-      target_directory = format("{stack_root}/{version}/hive/lib")
-
-      # download it if it does not exist
-      if not os.path.exists(params.source_jdbc_file):
-        jdbc_connector(params.hive_jdbc_target, params.hive_previous_jdbc_jar)
-
-      target_directory_and_filename = os.path.join(target_directory, 
os.path.basename(params.source_jdbc_file))
-
-      if params.sqla_db_used:
-        target_native_libs_directory = 
format("{target_directory}/native/lib64")
-
-        Execute(format("yes | {sudo} cp {jars_in_hive_lib} 
{target_directory}"))
-
-        Directory(target_native_libs_directory, create_parents = True)
-
-        Execute(format("yes | {sudo} cp {libs_in_hive_lib} 
{target_native_libs_directory}"))
-
-        Execute(format("{sudo} chown -R {hive_user}:{user_group} 
{hive_lib}/*"))
-      else:
-        # copy the JDBC driver from the older metastore location to the new 
location only
-        # if it does not already exist
-        if not os.path.exists(target_directory_and_filename):
-          Execute(('cp', params.source_jdbc_file, target_directory),
-            path=["/bin", "/usr/bin/"], sudo = True)
-
-      File(target_directory_and_filename, mode = 0644)
-
-    # build the schema tool command
-    binary = format("{hive_schematool_ver_bin}/schematool")
-
-    # the conf.server directory changed locations between stack versions
-    # since the configurations have not been written out yet during an upgrade
-    # we need to choose the original legacy location
-    schematool_hive_server_conf_dir = params.hive_server_conf_dir
-
-    upgrade_from_version = upgrade_summary.get_source_version("HIVE",
-      default_version = params.version_for_stack_feature_checks)
-
-    if not (check_stack_feature(StackFeature.CONFIG_VERSIONING, 
upgrade_from_version)):
-      schematool_hive_server_conf_dir = LEGACY_HIVE_SERVER_CONF
-
-    env_dict = {
-      'HIVE_CONF_DIR': schematool_hive_server_conf_dir
-    }
-
-    command = format("{binary} -dbType {hive_metastore_db_type} 
-upgradeSchema")
-    Execute(command, user=params.hive_user, tries=1, environment=env_dict, 
logoutput=True)
-    
-  def get_log_folder(self):
-    import params
-    return params.hive_log_dir
-
-  def get_user(self):
-    import params
-    return params.hive_user
-
-
-if __name__ == "__main__":
-  HiveMetastore().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/d8003b39/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HIVE/package/scripts/hive_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HIVE/package/scripts/hive_server.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HIVE/package/scripts/hive_server.py
deleted file mode 100644
index 943feec..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HIVE/package/scripts/hive_server.py
+++ /dev/null
@@ -1,161 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-
-from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import stack_select
-from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
-from resource_management.libraries.functions.check_process_status import 
check_process_status
-from resource_management.libraries.functions import StackFeature
-from resource_management.libraries.functions.stack_features import 
check_stack_feature
-from resource_management.libraries.functions.security_commons import 
build_expectations, \
-  cached_kinit_executor, get_params_from_filesystem, 
validate_security_config_properties, \
-  FILE_TYPE_XML
-from ambari_commons import OSCheck, OSConst
-if OSCheck.is_windows_family():
-  from resource_management.libraries.functions.windows_service_utils import 
check_windows_service_status
-from setup_ranger_hive import setup_ranger_hive
-from ambari_commons.os_family_impl import OsFamilyImpl
-from resource_management.core.logger import Logger
-
-import hive_server_upgrade
-from hive import hive
-from hive_service import hive_service
-from resource_management.core.resources.zkmigrator import ZkMigrator
-
-
-class HiveServer(Script):
-  def install(self, env):
-    self.install_packages(env)
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    hive(name='hiveserver2')
-
-
-@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
-class HiveServerWindows(HiveServer):
-  def start(self, env):
-    import params
-    env.set_params(params)
-    self.configure(env) # FOR SECURITY
-    hive_service('hiveserver2', action='start')
-
-  def stop(self, env):
-    import params
-    env.set_params(params)
-    hive_service('hiveserver2', action='stop')
-
-  def status(self, env):
-    import status_params
-    check_windows_service_status(status_params.hive_server_win_service_name)
-
-
-@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
-class HiveServerDefault(HiveServer):
-  def start(self, env, upgrade_type=None):
-    import params
-    env.set_params(params)
-    self.configure(env) # FOR SECURITY
-
-    setup_ranger_hive(upgrade_type=upgrade_type)
-    hive_service('hiveserver2', action = 'start', upgrade_type=upgrade_type)
-
-
-  def stop(self, env, upgrade_type=None):
-    import params
-    env.set_params(params)
-
-    # always de-register the old hive instance so that ZK can route clients
-    # to the newly created hive server
-    try:
-      if upgrade_type is not None:
-        hive_server_upgrade.deregister()
-    except Exception as exception:
-      Logger.exception(str(exception))
-
-    # even during rolling upgrades, Hive Server will be stopped - this is 
because Ambari will
-    # not support the "port-change/deregister" workflow as it would impact 
Hive clients
-    # which do not use ZK discovery.
-    hive_service( 'hiveserver2', action = 'stop' )
-
-
-  def status(self, env):
-    import status_params
-    env.set_params(status_params)
-    pid_file = format("{hive_pid_dir}/{hive_pid}")
-
-    # Recursively check all existing gmetad pid files
-    check_process_status(pid_file)
-
-
-  def pre_upgrade_restart(self, env, upgrade_type=None):
-    Logger.info("Executing Hive Server Stack Upgrade pre-restart")
-    import params
-    env.set_params(params)
-
-    if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
-      stack_select.select_packages(params.version)
-
-      # Copy mapreduce.tar.gz and tez.tar.gz to HDFS
-      resource_created = copy_to_hdfs(
-        "mapreduce",
-        params.user_group,
-        params.hdfs_user,
-        skip=params.sysprep_skip_copy_tarballs_hdfs)
-
-      resource_created = copy_to_hdfs(
-        "tez",
-        params.user_group,
-        params.hdfs_user,
-        skip=params.sysprep_skip_copy_tarballs_hdfs) or resource_created
-
-      if resource_created:
-        params.HdfsResource(None, action="execute")
-
-  def _base_node(self, path):
-    if not path.startswith('/'):
-      path = '/' + path
-    try:
-      return '/' + path.split('/')[1]
-    except IndexError:
-      return path
-
-  def disable_security(self, env):
-    import params
-    zkmigrator = ZkMigrator(params.hive_zookeeper_quorum, params.java_exec, 
params.java64_home, params.jaas_file, params.hive_user)
-    if params.hive_cluster_token_zkstore:
-      zkmigrator.set_acls(self._base_node(params.hive_cluster_token_zkstore), 
'world:anyone:crdwa')
-    if params.hive_zk_namespace:
-      zkmigrator.set_acls(
-        params.hive_zk_namespace if params.hive_zk_namespace.startswith('/') 
else '/' + params.hive_zk_namespace,
-        'world:anyone:crdwa')
-
-  def get_log_folder(self):
-    import params
-    return params.hive_log_dir
-  
-  def get_user(self):
-    import params
-    return params.hive_user
-
-if __name__ == "__main__":
-  HiveServer().execute()

Reply via email to