This is an automated email from the ASF dual-hosted git repository.

jialiang pushed a commit to branch zccc
in repository https://gitbox.apache.org/repos/asf/ambari.git

commit 969b74c4b506ee1b6e5970a90fad4b82cc8e34c4
Author: jialiang <[email protected]>
AuthorDate: Sat Aug 31 09:01:57 2024 +0800

    fix kerberos
    remove unused service
    fix hbase rest api service
---
 .../src/main/python/ambari_commons/shell.py        |   9 +-
 .../libraries/functions/curl_krb_request.py        |   2 +-
 .../resources/addon-services/DINKY/alerts.json     |  32 ---
 .../configuration/dinky-application-server.xml     | 141 -------------
 .../DINKY/configuration/dinky-env.xml              |  74 -------
 .../resources/addon-services/DINKY/metainfo.xml    |  99 ---------
 .../DINKY/package/scripts/dinky_env.py             |  70 -------
 .../DINKY/package/scripts/dinky_server.py          |  89 --------
 .../addon-services/DINKY/package/scripts/params.py | 108 ----------
 .../DINKY/package/scripts/service_check.py         |  32 ---
 .../package/templates/application-mysql.yml.j2     |  24 ---
 .../package/templates/application-pgsql.yml.j2     |  23 ---
 .../DINKY/package/templates/application.yml.j2     | 153 --------------
 .../DINKY/package/templates/auto.sh.j2             | 148 --------------
 .../DINKY/quicklinks/quicklinks.json               |  26 ---
 .../addon-services/DINKY/service_advisor.py        | 196 ------------------
 .../configuration/elasticsearch-env.xml            | 102 ----------
 .../configuration/elasticsearch-jvm.xml            | 114 -----------
 .../configuration/elasticsearch-site.xml           | 134 ------------
 .../addon-services/ELASTICSEARCH/metainfo.xml      |  98 ---------
 .../package/scripts/ElasticSearchService.py        | 224 ---------------------
 .../package/scripts/ElasticSearchServiceCheck.py   |  34 ----
 .../ELASTICSEARCH/package/scripts/Utils.py         |  71 -------
 .../ELASTICSEARCH/package/scripts/params.py        |  35 ----
 .../ELASTICSEARCH/quicklinks/quicklinks.json       |  43 ----
 .../BIGTOP/3.2.0/services/HBASE/metainfo.xml       |   8 +-
 .../HBASE/package/scripts/hbase_rest_gateway.py    |  92 +++++++++
 .../3.2.0/services/HBASE/role_command_order.json   |   2 +-
 .../KERBEROS/package/scripts/service_check.py      |   2 +-
 .../services/KERBEROS/properties/krb5_conf.j2      |   2 +-
 30 files changed, 105 insertions(+), 2082 deletions(-)

diff --git a/ambari-common/src/main/python/ambari_commons/shell.py 
b/ambari-common/src/main/python/ambari_commons/shell.py
index ac8f1106ef..445ebef508 100644
--- a/ambari-common/src/main/python/ambari_commons/shell.py
+++ b/ambari-common/src/main/python/ambari_commons/shell.py
@@ -812,8 +812,8 @@ class shellRunnerLinux(shellRunner):
     try:
       if self._threadLocal is not None:
         os.setuid(self._threadLocal.uid)
-    except Exception:
-      _logger.warn("can not switch user for running command.")
+    except Exception as e:
+      _logger.warn(f"Unable to switch user for running command. Error details: 
{e}")
 
   # Run any command
   def run(self, script, user=None):
@@ -825,8 +825,9 @@ class shellRunnerLinux(shellRunner):
       else:
         user = os.getuid()
       self._threadLocal.uid = user
-    except Exception:
-      _logger.warn("can not switch user for RUN_COMMAND.")
+    except Exception as e:
+      _logger.warn(f"Unable to switch user for RUN_COMMAND. Error details: 
{e}")
+
 
     cmd = script
 
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/functions/curl_krb_request.py
 
b/ambari-common/src/main/python/resource_management/libraries/functions/curl_krb_request.py
index cc234a3284..7b84182a4d 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/functions/curl_krb_request.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/functions/curl_krb_request.py
@@ -98,7 +98,7 @@ def curl_krb_request(tmp_dir, keytab, principal, url, 
cache_file_prefix,
   # when executing curl. Use a hash of the combination of the principal and 
keytab file
   # to generate a (relatively) unique cache filename so that we can use it as 
needed. Scope
   # this file by user in order to prevent sharing of cache files by multiple 
users.
-  ccache_file_name = HASH_ALGORITHM("{0}|{1}".format(principal, 
keytab)).hexdigest()
+  ccache_file_name = HASH_ALGORITHM("{0}|{1}".format(principal, 
keytab).encode('utf-8')).hexdigest()
 
   curl_krb_cache_path = os.path.join(tmp_dir, "curl_krb_cache")
   if not os.path.exists(curl_krb_cache_path):
diff --git a/ambari-server/src/main/resources/addon-services/DINKY/alerts.json 
b/ambari-server/src/main/resources/addon-services/DINKY/alerts.json
deleted file mode 100644
index 8fdd15e418..0000000000
--- a/ambari-server/src/main/resources/addon-services/DINKY/alerts.json
+++ /dev/null
@@ -1,32 +0,0 @@
-{
-  "DINKY": {
-    "service": [],
-    "DINKY_SERVER": [
-      {
-        "name": "dinky_server_port_check",
-        "label": "dinky_server_port_check",
-        "description": "dinky_server_port_check.",
-        "interval": 10,
-        "scope": "ANY",
-        "source": {
-          "type": "PORT",
-          "uri": "{{dinky-application-server/server.port}}",
-          "default_port": 8888,
-          "reporting": {
-            "ok": {
-              "text": "TCP OK - {0:.3f}s response on port {1}"
-            },
-            "warning": {
-              "text": "TCP OK - {0:.3f}s response on port {1}",
-              "value": 1.5
-            },
-            "critical": {
-              "text": "Connection failed: {0} to {1}:{2}",
-              "value": 5.0
-            }
-          }
-        }
-      }
-    ],
-  }
-}
\ No newline at end of file
diff --git 
a/ambari-server/src/main/resources/addon-services/DINKY/configuration/dinky-application-server.xml
 
b/ambari-server/src/main/resources/addon-services/DINKY/configuration/dinky-application-server.xml
deleted file mode 100644
index 79d78049bd..0000000000
--- 
a/ambari-server/src/main/resources/addon-services/DINKY/configuration/dinky-application-server.xml
+++ /dev/null
@@ -1,141 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-# -*- coding: utf-8 -*-
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
--->
-<configuration>
-    <!--   server port  -->
-    <property require-input="true">
-        <name>server.port</name>
-        <value>8888</value>
-        <property-type>
-            <type>PORT</type>
-            <default-port>8888</default-port>
-        </property-type>
-        <on-ambari-upgrade add="true"/>
-    </property>
-
-    <!--   flink big version    -->
-    <property require-input="true">
-        <name>flink.big.version</name>
-        <value>1.17</value>
-        <description>
-            en: The version of flink big version such as: 1.18, 1.17, 1.16, 
1.15, 1.14
-            Note: The version of flink big version must be consistent with the 
version of flink big version installed on
-            the cluster.
-            |
-            zh: Flink大版本号,如:1.18, 1.17, 1.16, 1.15, 1.14
-            注意:Flink大版本号必须与集群上安装的Flink大版本号保持一致。
-        </description>
-        <value-attributes>
-            <type>value-list</type>
-            <entries>
-                <entry>
-                    <value>1.14</value>
-                </entry>
-                <entry>
-                    <value>1.15</value>
-                </entry>
-                <entry>
-                    <value>1.16</value>
-                </entry>
-                <entry>
-                    <value>1.17</value>
-                </entry>
-            </entries>
-        </value-attributes>
-        <on-ambari-upgrade add="true"/>
-    </property>
-
-    <!--   数据库类型    -->
-    <property require-input="true">
-        <name>spring.profiles.active</name>
-        <value>mysql</value>
-        <description>
-            en: The type of database to use. Valid values are mysql, 
postgresql , you must input the mysql/postgresql
-            information.| zh: 数据库类型,可选值为mysql, 
postgresql,必须输入mysql/postgresql的信息。
-        </description>
-        <value-attributes>
-            <type>value-list</type>
-            <entries>
-                <entry>
-                    <value>mysql</value>
-                    <description>
-                        en: mysql database | zh: mysql数据库
-                    </description>
-                </entry>
-                <entry>
-                    <value>pgsql</value>
-                    <description>
-                        en: postgresql database | zh: postgresql数据库
-                    </description>
-                </entry>
-            </entries>
-        </value-attributes>
-        <on-ambari-upgrade add="true"/>
-    </property>
-
-    <!--   数据库主机  -->
-    <property require-input="true">
-        <name>spring.datasource.database.host</name>
-        <value>127.0.0.1</value>
-        <description>
-            en: DataBase host ip/hostname | zh: 数据库主机ip/主机名
-        </description>
-        <on-ambari-upgrade add="true"/>
-    </property>
-
-    <!--   数据库端口  -->
-    <property require-input="true">
-        <name>spring.datasource.database.port</name>
-        <value>3306</value>
-        <description>
-            en: DataBase port | zh: 数据库端口
-        </description>
-        <property-type>PORT</property-type>
-        <on-ambari-upgrade add="true"/>
-    </property>
-
-    <!--   数据库名称  -->
-    <property require-input="true">
-        <name>spring.datasource.database.name</name>
-        <value>dinky</value>
-        <description>
-            en: DataBase Name | zh: 数据库名称
-        </description>
-        <on-ambari-upgrade add="true"/>
-    </property>
-
-    <!--   数据库用户名  -->
-    <property require-input="true">
-        <name>spring.datasource.database.username</name>
-        <value>dinky</value>
-        <description>
-            en: DataBase UserName | zh: 数据库用户名
-        </description>
-        <on-ambari-upgrade add="true"/>
-    </property>
-
-    <!--   数据库密码  -->
-    <property require-input="true">
-        <name>spring.datasource.database.password</name>
-        <value>dinky1234!2</value>
-        <description>
-            en: DataBase Password | zh: 数据库密码
-        </description>
-        <property-type>PASSWORD</property-type>
-        <on-ambari-upgrade add="true"/>
-    </property>
-</configuration>
\ No newline at end of file
diff --git 
a/ambari-server/src/main/resources/addon-services/DINKY/configuration/dinky-env.xml
 
b/ambari-server/src/main/resources/addon-services/DINKY/configuration/dinky-env.xml
deleted file mode 100755
index 4279e8d9bc..0000000000
--- 
a/ambari-server/src/main/resources/addon-services/DINKY/configuration/dinky-env.xml
+++ /dev/null
@@ -1,74 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-<configuration supports_adding_forbidden="true">
-    <property>
-        <name>dinky_user</name>
-        <display-name>Dinky User</display-name>
-        <value>dinky</value>
-        <property-type>USER</property-type>
-        <value-attributes>
-            <type>user</type>
-            <overridable>false</overridable>
-            <user-groups>
-                <property>
-                    <type>cluster-env</type>
-                    <name>user_group</name>
-                </property>
-                <property>
-                    <type>dinky-env</type>
-                    <name>dinky_group</name>
-                </property>
-            </user-groups>
-        </value-attributes>
-        <on-ambari-upgrade add="true"/>
-    </property>
-    <property>
-        <name>dinky_group</name>
-        <display-name>Dinky Group</display-name>
-        <value>hadoop</value>
-        <property-type>GROUP</property-type>
-        <description>dinky group</description>
-        <value-attributes>
-            <type>user</type>
-        </value-attributes>
-        <on-ambari-upgrade add="true"/>
-    </property>
-    <property>
-        <name>dinky_log_dir</name>
-        <display-name>Dinky Log directory</display-name>
-        <value>/var/log/dinky</value>
-        <description>Dinky Log Dir</description>
-        <value-attributes>
-            <type>directory</type>
-        </value-attributes>
-        <on-ambari-upgrade add="true"/>
-    </property>
-    <property>
-        <name>dinky_pid_dir</name>
-        <display-name>Dinky PID directory</display-name>
-        <value>/var/run/dinky</value>
-        <value-attributes>
-            <type>directory</type>
-        </value-attributes>
-        <on-ambari-upgrade add="true"/>
-    </property>
-</configuration>
\ No newline at end of file
diff --git a/ambari-server/src/main/resources/addon-services/DINKY/metainfo.xml 
b/ambari-server/src/main/resources/addon-services/DINKY/metainfo.xml
deleted file mode 100644
index 3167565f2e..0000000000
--- a/ambari-server/src/main/resources/addon-services/DINKY/metainfo.xml
+++ /dev/null
@@ -1,99 +0,0 @@
-<?xml version="1.0"?>
-<!--Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*     http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*/
--->
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>DINKY</name>
-      <displayName>Dinky</displayName>
-      <comment>Dinky is a distributed multi-tenant Thrift JDBC/ODBC server for 
large-scale data management, processing, and analytics, built on top of Apache 
Spark and designed to support more engines (i.e., Flink). </comment>
-      <version>1.0.0</version>
-      <components>
-        <component>
-          <name>DINKY_SERVER</name>
-          <displayName>Dinky Server</displayName>
-          <category>MASTER</category>
-          <cardinality>1</cardinality>
-          <versionAdvertised>true</versionAdvertised>
-          <commandScript>
-            <script>scripts/dinky_server.py</script>
-            <scriptType>PYTHON</scriptType>
-            <timeout>600</timeout>
-          </commandScript>
-          <configFiles>
-            <configFile>
-              <type>env</type>
-              <fileName>dinky-application-server.xml</fileName>
-              <dictionaryName>dinky-defaults</dictionaryName>
-            </configFile>
-            <configFile>
-              <type>env</type>
-              <fileName>dinky-env.xml</fileName>
-              <dictionaryName>dinky-env</dictionaryName>
-            </configFile>
-          </configFiles>
-          <logs>
-            <log>
-              <logId>dinky_server</logId>
-              <primary>true</primary>
-            </log>
-          </logs>
-        </component>
-      </components>
-
-      <configuration-dependencies>
-        <config-type>dinky-application-server</config-type>
-        <config-type>dinky-env</config-type>
-      </configuration-dependencies>
-
-      <quickLinksConfigurations>
-        <quickLinksConfiguration>
-          <fileName>quicklinks.json</fileName>
-          <default>true</default>
-        </quickLinksConfiguration>
-      </quickLinksConfigurations>
-
-      <commandScript>
-        <script>scripts/service_check.py</script>
-        <scriptType>PYTHON</scriptType>
-        <timeout>300</timeout>
-      </commandScript>
-
-      <osSpecifics>
-        <osSpecific>
-          
<osFamily>redhat9,redhat8,redhat7,amazonlinux2,redhat6,suse11,suse12</osFamily>
-          <packages>
-            <package>
-              <name>dinky_${stack_version}</name>
-            </package>
-          </packages>
-        </osSpecific>
-        <osSpecific>
-          
<osFamily>debian7,debian9,ubuntu12,ubuntu14,ubuntu16,ubuntu18</osFamily>
-          <packages>
-            <package>
-              <name>dinky_${stack_version}</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-
-    </service>
-  </services>
-</metainfo>
diff --git 
a/ambari-server/src/main/resources/addon-services/DINKY/package/scripts/dinky_env.py
 
b/ambari-server/src/main/resources/addon-services/DINKY/package/scripts/dinky_env.py
deleted file mode 100644
index 6e1ece7055..0000000000
--- 
a/ambari-server/src/main/resources/addon-services/DINKY/package/scripts/dinky_env.py
+++ /dev/null
@@ -1,70 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-import  os
-
-def dinky_env():
-    import params  # import 导入params.py文件
-    # User(params.dinky_user, action="create", groups=params.dinky_group
-    #      , ignore_already_exists=True  # 忽略已经存在
-    #      )  # 创建用户
-
-    Directory([params.dinky_log_dir, params.dinky_pid_dir],
-              owner=params.dinky_user,
-              group=params.dinky_group,
-              mode=0o775,
-              create_parents = True
-              )
-
-
-    ## 重写 启动脚本 auto.sh 文件
-    File(os.path.join(params.dinky_bin_dir, params.start_script_name),
-         mode=0o755,
-         content=Template(params.start_script_template_file),
-         owner=params.dinky_user,
-         group=params.dinky_group
-         )
-
-    #content=Template("hbase.conf.j2")
-    # 重写 application.yml 文件
-    File(os.path.join(params.dinky_conf_dir, 
params.dinky_application_main_config_file),
-         mode=0o755,
-         content=Template(params.dinky_application_main_config_template_file),
-         owner=params.dinky_user,
-         group=params.dinky_group
-         )
-
-    ## 根据 mysql 还是 pgsql 重写 application-xxx.yml 文件
-    if params.dinky_database_config['dinky_database_type'] == "mysql":
-        ## 重写 application-mysql.yml 文件
-        File(os.path.join(params.dinky_conf_dir , 
params.dinky_application_mysql_config_file),
-             mode=0o755,
-             
content=Template(params.dinky_application_mysql_config_template_file),
-             owner=params.dinky_user,
-             group=params.dinky_group
-             )
-    else:
-        ## 重写 application-pgsql.yml 文件
-        File(os.path.join(params.dinky_conf_dir , 
params.dinky_application_pgsql_config_file),
-             mode=0o755,
-             
content=Template(params.dinky_application_pgsql_config_template_file),
-             owner=params.dinky_user,
-             group=params.dinky_group
-             )
\ No newline at end of file
diff --git 
a/ambari-server/src/main/resources/addon-services/DINKY/package/scripts/dinky_server.py
 
b/ambari-server/src/main/resources/addon-services/DINKY/package/scripts/dinky_server.py
deleted file mode 100644
index 63a8c93d2b..0000000000
--- 
a/ambari-server/src/main/resources/addon-services/DINKY/package/scripts/dinky_server.py
+++ /dev/null
@@ -1,89 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-import time
-import  os
-from resource_management import *
-from resource_management.libraries.functions.format import format
-from dinky_env import dinky_env
-
-
-class DinkyService(Script):
-
-    # 安装 dinky
-    def install(self, env):
-        import params
-        env.set_params(params)
-        self.install_packages(env)
-        #Execute(('chmod', '-R', '777', params.dinky_home))
-        #Execute(('chown', '-R', params.dinky_user + ":" + params.dinky_group, 
params.dinky_home))
-
-
-    def initialize(self, env):
-        import params
-        env.set_params(params)
-        dinky_setup_marker = os.path.join(params.dinky_conf_dir, "dinky_setup")
-        if not os.path.exists(dinky_setup_marker):
-            try :
-                Execute(params.init_sql, user=params.dinky_user)
-                Logger.info(format('dinky init finished, cmd: 
{params.init_sql}'))
-
-                File(dinky_setup_marker,
-                     owner = params.dinky_user,
-                     group = params.dinky_group,
-                     mode = 0o640)
-            except Exception as e:
-                Logger.exception("There was an exception when  ALTER SYSTEM 
ADD FOLLOWER: " + str(e))
-
-
-    def configure(self, env):
-        import params
-        params.pika_slave = True
-        env.set_params(params)
-        self.initialize(env)
-        dinky_env()
-
-    def start(self, env):
-        import params
-        env.set_params(params)
-        self.configure(env)
-
-        no_op_test = format("ls {params.dinky_pid_file} >/dev/null 2>&1 ")
-
-        start_cmd = format("sh {params.start_script_path}  start 
{params.dinky_flink_big_version}")
-        Execute(start_cmd, user=params.dinky_user, not_if=no_op_test)
-
-    def stop(self, env):
-        import params
-        env.set_params(params)
-        stop_cmd = format("sh {params.start_script_path} stop ")
-        Execute(stop_cmd, user=params.dinky_user)
-        time.sleep(5)
-
-    def status(self, env):
-        import params
-        env.set_params(params)
-        check_process_status(params.dinky_pid_file)
-
-    def restart(self, env):
-        self.stop(env)
-        self.start(env)
-
-
-if __name__ == "__main__":
-    DinkyService().execute()
diff --git 
a/ambari-server/src/main/resources/addon-services/DINKY/package/scripts/params.py
 
b/ambari-server/src/main/resources/addon-services/DINKY/package/scripts/params.py
deleted file mode 100644
index a4fa11c2cd..0000000000
--- 
a/ambari-server/src/main/resources/addon-services/DINKY/package/scripts/params.py
+++ /dev/null
@@ -1,108 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-import sys
-import os
-from resource_management import *
-from resource_management.core.logger import Logger
-from resource_management.libraries.functions.format import format
-from resource_management.libraries.script.script import Script
-
-
-
-config = Script.get_config()
-
-stack_root = Script.get_stack_root()
-
-component_directory = "dinky-server"
-dinky_home = format("{stack_root}/current/{component_directory}")
-dinky_conf_dir = format("{dinky_home}/config")
-
-
-dinky_user = config['configurations']['dinky-env']['dinky_user']
-dinky_group = config['configurations']['dinky-env']['dinky_group']
-dinky_pid_dir = config['configurations']['dinky-env']['dinky_pid_dir']
-dinky_pid_filename = format("dinky-{dinky_user}.pid")
-dinky_pid_file = os.path.join(dinky_pid_dir, dinky_pid_filename)
-
-dinky_log_dir = config['configurations']['dinky-env']['dinky_log_dir']
-
-
-
-dinky_bin_dir = dinky_home
-dinky_lib_dir = os.path.join(dinky_home, "lib")
-dinky_lib_jars = format("{dinky_lib_dir}/*")
-dinky_extends_flink_dir = os.path.join(dinky_home, "extends")
-dinky_extends_flink_jars = format("{dinky_extends_flink_dir}/*")
-
-dinky_init_mysql_sqlfile = os.path.join(dinky_home, "sql/dinky-mysql.sql")
-dinky_init_pgsqll_sqlfile = os.path.join(dinky_home, "sql/dinky-pg.sql")
-dinky_application_main_config_file = "application.yml"
-dinky_application_main_config_template_file = 
format("{dinky_application_main_config_file}.j2")
-dinky_application_mysql_config_file = "application-mysql.yml"
-dinky_application_mysql_config_template_file = 
format("{dinky_application_mysql_config_file}.j2")
-dinky_application_pgsql_config_file = "application-pgsql.yml"
-dinky_application_pgsql_config_template_file = 
format("{dinky_application_pgsql_config_file}.j2")
-
-start_script_name = "auto.sh"
-start_script_path = os.path.join(dinky_home, start_script_name)
-start_script_template_file = format("{start_script_name}.j2")
-
-
-
-dinky_env_map = {}
-
-dinky_env_map.update(config['configurations']['dinky-application-server'])
-dinky_flink_big_version = dinky_env_map['flink.big.version']
-
-dinky_server_port = dinky_env_map['server.port']
-
-dinky_database_config = {'dinky_database_type': 
dinky_env_map['spring.profiles.active'],
-                         'dinky_database_username': 
dinky_env_map['spring.datasource.database.username'],
-                         'dinky_database_password': 
dinky_env_map['spring.datasource.database.password']}
-
-
-dinky_database_password = dinky_env_map['spring.datasource.database.password']
-dinky_database_name = dinky_env_map['spring.datasource.database.name']
-if 'mysql' == dinky_database_config['dinky_database_type']:
-
-    dinky_database_config['dinky_database_driver'] = 'com.mysql.jdbc.Driver'
-    dinky_database_config['dinky_database_url'] = 'jdbc:mysql://' + 
dinky_env_map['spring.datasource.database.host'] \
-                                                  + ':' + 
dinky_env_map['spring.datasource.database.port'] \
-                                                  + '/' + 
dinky_env_map['spring.datasource.database.name'] \
-                                                  + 
'?useUnicode=true&characterEncoding=UTF-8'
-    database_host = dinky_env_map['spring.datasource.database.host']
-    database_port = dinky_env_map['spring.datasource.database.port']
-    dinky_init_sql_path = dinky_init_mysql_sqlfile
-    sql_client = "mysql"
-    #mysql -h hostname -P port -u username -p'password' dinky_database_name < 
/path/to/file.sql
-
-    init_sql = format("{sql_client} -h {database_host} -P {database_port}  -u 
{dinky_user} -p'{dinky_database_password}'   {dinky_database_name}  < 
{dinky_init_sql_path}")
-else:
-
-    dinky_database_config['dinky_database_driver'] = 'org.postgresql.Driver'
-    dinky_database_config['dinky_database_url'] = 'jdbc:postgresql://' + 
dinky_env_map[
-        'spring.datasource.database.host'] \
-                                                  + ':' + 
dinky_env_map['spring.datasource.database.port'] \
-                                                  + '/' + 
dinky_env_map['spring.datasource.database.name'] \
-                                                  + '?stringtype=unspecified'
-    database_host = dinky_env_map['spring.datasource.database.host']
-    database_port = dinky_env_map['spring.datasource.database.port']
-    dinky_init_sql_path = dinky_init_pgsqll_sqlfile
-    sql_client = "psql"
-    init_sql = format("PGPASSWORD={dinky_database_password} {sql_client} -h 
{database_host} -p {database_port}  -U {dinky_user}  -d {dinky_database_name}  
< {dinky_init_sql_path}")
diff --git 
a/ambari-server/src/main/resources/addon-services/DINKY/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/addon-services/DINKY/package/scripts/service_check.py
deleted file mode 100644
index 9c9d23bff0..0000000000
--- 
a/ambari-server/src/main/resources/addon-services/DINKY/package/scripts/service_check.py
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-
-from resource_management import *
-
-class ServiceCheck(Script):
-    def service_check(self, env):
-        import params
-        env.set_params(params)
-
-        Execute(format("sh {dinky_bin_dir}/{start_script_name} status"))
-
-if __name__ == "__main__":
-    ServiceCheck().execute()
\ No newline at end of file
diff --git 
a/ambari-server/src/main/resources/addon-services/DINKY/package/templates/application-mysql.yml.j2
 
b/ambari-server/src/main/resources/addon-services/DINKY/package/templates/application-mysql.yml.j2
deleted file mode 100644
index 73864dfc80..0000000000
--- 
a/ambari-server/src/main/resources/addon-services/DINKY/package/templates/application-mysql.yml.j2
+++ /dev/null
@@ -1,24 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-spring:
-  datasource:
-    driver-class-name: {{ dinky_database_config['dinky_database_driver'] }}
-    url: {{ dinky_database_config['dinky_database_url'] }}
-    username: {{ dinky_database_config['dinky_database_username'] }}
-    password: {{ dinky_database_config['dinky_database_password'] }}
\ No newline at end of file
diff --git 
a/ambari-server/src/main/resources/addon-services/DINKY/package/templates/application-pgsql.yml.j2
 
b/ambari-server/src/main/resources/addon-services/DINKY/package/templates/application-pgsql.yml.j2
deleted file mode 100644
index 3a158c08ec..0000000000
--- 
a/ambari-server/src/main/resources/addon-services/DINKY/package/templates/application-pgsql.yml.j2
+++ /dev/null
@@ -1,23 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-spring:
-  datasource:
-    driver-class-name: {{ dinky_database_config['dinky_database_driver'] }}
-    url: {{ dinky_database_config['dinky_database_url'] }}
-    username: {{ dinky_database_config['dinky_database_username'] }}
-    password: {{ dinky_database_config['dinky_database_password'] }}
\ No newline at end of file
diff --git 
a/ambari-server/src/main/resources/addon-services/DINKY/package/templates/application.yml.j2
 
b/ambari-server/src/main/resources/addon-services/DINKY/package/templates/application.yml.j2
deleted file mode 100644
index 2efa79ea48..0000000000
--- 
a/ambari-server/src/main/resources/addon-services/DINKY/package/templates/application.yml.j2
+++ /dev/null
@@ -1,153 +0,0 @@
-#################################################################################################################
-################################################# Common Config 
#################################################
-#################################################################################################################
-# Dinky application port
-server:
-  port: {{ dinky_server_port }}
-
-spring:
-  # Dinky application name
-  application:
-    name: Dinky
-  profiles:
-    # The h2 database is used by default. If you need to use other databases, 
please set the configuration active to: mysql, currently supports [mysql, 
pgsql, h2]
-    # If you use mysql database, please configure mysql database connection 
information in application-mysql.yml
-    # If you use pgsql database, please configure pgsql database connection 
information in application-pgsql.yml
-    # If you use the h2 database, please configure the h2 database connection 
information in application-h2.yml,
-    # note: the h2 database is only for experience use, and the related data 
that has been created cannot be migrated, please use it with caution
-    active: {{ dinky_database_config['dinky_database_type'] }} #[mysql,pgsql]
-    include: jmx
-
-  # mvc config
-  mvc:
-    pathmatch:
-      # Path matching strategy, default ant_path_matcher, support 
ant_path_matcher and path_pattern_parser
-      matching-strategy: ant_path_matcher
-    format:
-      date: yyyy-MM-dd HH:mm:ss # date format
-      time: HH:mm:ss # time format
-      date-time: yyyy-MM-dd HH:mm:ss # date-time format
-
-  # json format global configuration
-  jackson:
-    time-zone: GMT+8 # Time zone, default is GMT+8
-    date-format: yyyy-MM-dd HH:mm:ss # Date format, the default is yyyy-MM-dd 
HH:mm:ss
-
-  # circular references allowed
-  main:
-    allow-circular-references: true
-
-  # file upload config of servlet , the default is 500MB
-  servlet:
-    multipart:
-      enabled: true
-      max-file-size: 524288000
-      max-request-size: 524288000
-
-
-  # By default, memory cache metadata information is used,
-  # dinky supports redis cache, if necessary, please change simple to redis, 
and open the following redis connection configuration
-  # Sub-configuration items can be opened or customized as needed
-  cache:
-    type: simple
-    # If type is configured as redis, this item can be configured as needed, 
note: Pay attention to the indentation of this configuration item
-    #    redis:
-    #      # Whether to cache empty values, save the default
-    #      cache-null-values: false
-    #      # Cache expiration time, default 24 hours
-    #      time-to-live: 86400
-
-    ########################################################## Redis配置 
##########################################################
-    # If sa-token needs to rely on redis, please open the redis configuration 
and depend on pom.xml and dinky-admin/pom.xml, and configure redis connection 
information in application.yml
-    # note: pay attention to the indentation of this configuration item
-#  redis:
-#     host: localhost
-#     port: 6379
-#     password:
-#     database: 10
-#     jedis:
-#       pool:
-#         # The maximum number of connections in the connection pool (use a 
negative value to indicate no limit)
-#         max-active: 50
-#         # The maximum blocking waiting time of the connection pool (use a 
negative value to indicate no limit)
-#         max-wait: 3000
-#         # The maximum number of idle connections in the connection pool
-#         max-idle: 20
-#         # The minimum number of idle connections in the connection pool
-#         min-idle: 5
-#     # Connection timeout (milliseconds)
-#     timeout: 5000
-
-#################################################################################################################
-################################################# Mybatis Config 
################################################
-######### Please note: that the following configurations are not recommended 
to be modified #####################
-#################################################################################################################
-mybatis-plus:
-  mapper-locations: classpath:/mapper/*Mapper.xml
-  # Entity scanning, multiple packages are separated by commas or semicolons
-  typeAliasesPackage: org.dinky.model
-  global-config:
-    db-config:
-      id-type: auto
-      # Logic delete configuration : 0: false(Not deleted), 1: true(deleted)
-      logic-delete-field: is_delete
-      logic-delete-value: 1
-      logic-not-delete-value: 0
-    banner: false
-  configuration:
-    ##### mybatis-plus prints complete sql (only for development environment)
-    #log-impl: org.apache.ibatis.logging.stdout.StdOutImpl
-    log-impl: org.apache.ibatis.logging.nologging.NoLoggingImpl
-  type-handlers-package: org.dinky.data.typehandler
-#################################################################################################################
-################################################# SMS Config 
####################################################
-#################################################################################################################
-sms:
-  is-print: false
-
-
-
-#################################################################################################################
-################################################# Sa-Token Config 
###############################################
-#################################################################################################################
-# Sa-Token basic configuration
-sa-token:
-  # The validity period of the token, the unit is 10 hours by default, -1 
means it will never expire
-  timeout: 36000
-  # The temporary validity period of the token (the token will be considered 
as expired if there is no operation within the specified time)
-  # unit: second , if you do not need to set a temporary token, you can set it 
to -1
-  active-timeout: 1800
-  # Whether to allow the same account to log in concurrently (when true, allow 
login together, when false, new login squeezes out old login)
-  is-concurrent: false
-  # When multiple people log in to the same account, whether to share a token 
(if true, all logins share a token, and if false, create a new token for each 
login)
-  is-share: true
-  # token style
-  token-style: uuid
-  # Whether to output the operation log
-  is-log: false
-  # Whether to print banner
-  is-print: false
-  # The secret key
-  jwt-secret-key: 0DA4198858E84F1AADDF846340587A85
-  # is write header
-  is-write-header: true
-  # is read header
-  is-read-header: true
-  token-name: token
-
-#################################################################################################################
-################################################# knife4j Config 
################################################
-#################################################################################################################
-knife4j:
-  enable: true
-  setting:
-    language: en
-
-
-
-#################################################################################################################
-################################################# Crypto Config 
#################################################
-#################################################################################################################
-crypto:
-  enabled: false
-  encryption-password:
diff --git 
a/ambari-server/src/main/resources/addon-services/DINKY/package/templates/auto.sh.j2
 
b/ambari-server/src/main/resources/addon-services/DINKY/package/templates/auto.sh.j2
deleted file mode 100644
index 4dd401c36d..0000000000
--- 
a/ambari-server/src/main/resources/addon-services/DINKY/package/templates/auto.sh.j2
+++ /dev/null
@@ -1,148 +0,0 @@
-#!/bin/bash
-
-FLINK_VERSION={{ dinky_flink_big_version }}
-
-JAR_NAME="dinky-admin"
-
-PID_FILE={{ dinky_pid_filename }}
-
-# JMX path
-APP_HOME="$(cd `dirname $0`; pwd)"
-
-# Use FLINK_HOME:
-CLASS_PATH="${APP_HOME}:${APP_HOME}/lib/*:${APP_HOME}/config:${APP_HOME}/extends/*:${APP_HOME}/plugins/*:${APP_HOME}/customJar/*:${APP_HOME}/plugins/flink${FLINK_VERSION}/dinky/*:${APP_HOME}/plugins/flink${FLINK_VERSION}/*:${APP_HOME}/extends/flink${FLINK_VERSION}/dinky/*:${APP_HOME}/extends/flink${FLINK_VERSION}/*"
-
-
-JMX="-javaagent:$APP_HOME/lib/jmx_prometheus_javaagent-0.20.0.jar=10087:$APP_HOME/config/jmx/jmx_exporter_config.yaml"
-
-# Check whether the pid path exists
-PID_PATH={{ dinky_pid_dir }}
-
-if [ -d "${PID_PATH}" ];then
-    echo "${PID_PATH} is already exist." >> /dev/null
-else
-    mkdir -p  "${PID_PATH}"
-fi
-
-# Check whether the pid file exists
-if [ -f "${PID_PATH}/${PID_FILE}" ];then
-    echo "${PID_PATH}/${PID_FILE} is already exist." >> /dev/null
-else
-    touch "${PID_PATH}"/${PID_FILE}
-fi
-
-tips() {
-  echo ""
-  echo "WARNING!!!......Tips, please use command: sh auto.sh 
[start|startOnPending|startWithJmx|stop|restart|restartWithJmx|status].   For 
example: sh auto.sh start  "
-  echo ""
-  exit 1
-}
-
-updatePid() {
-  pid=$(ps -ef | grep [D]inky  | awk '{print $2}' | head -1)
-  echo $pid >"${PID_PATH}"/${PID_FILE}
-}
-
-start() {
-  updatePid
-  if [ -z "$pid" ]; then
-    nohup java -Ddruid.mysql.usePingMethod=false 
-Dlog4j2.isThreadContextMapInheritable=true -Xms512M -Xmx2048M 
-XX:PermSize=512M -XX:MaxPermSize=1024M -XX:+HeapDumpOnOutOfMemoryError 
-Xverify:none -cp "${CLASS_PATH}" org.dinky.Dinky  &
-    echo $! >"${PID_PATH}"/${PID_FILE}
-    echo "FLINK VERSION : $FLINK_VERSION"
-    echo "........................................Start Dinky 
Successfully........................................"
-  else
-    echo "Dinky pid $pid is in ${PID_PATH}/${PID_FILE}, Please stop first !!!"
-  fi
-}
-
-startOnPending() {
-  updatePid
-  if [ -z "$pid" ]; then
-    java -Ddruid.mysql.usePingMethod=false -Xms512M -Xmx2048M 
-XX:PermSize=512M -XX:MaxPermSize=1024M -XX:+HeapDumpOnOutOfMemoryError 
-Xverify:none -cp "${CLASS_PATH}" org.dinky.Dinky
-    echo "FLINK VERSION : $FLINK_VERSION"
-    echo "........................................Start Dinky 
Successfully........................................"
-  else
-    echo "Dinky pid $pid is in ${PID_PATH}/${PID_FILE}, Please stop first !!!"
-  fi
-}
-
-startWithJmx() {
-  updatePid
-  if [ -z "$pid" ]; then
-    nohup java -Ddruid.mysql.usePingMethod=false -Xms512M -Xmx2048M 
-XX:PermSize=512M -XX:MaxPermSize=1024M -XX:+HeapDumpOnOutOfMemoryError 
-Xverify:none "${JMX}" -cp "${CLASS_PATH}" org.dinky.Dinky &
-#    echo $! >"${PID_PATH}"/${PID_FILE}
-    updatePid
-    echo "........................................Start Dinky with Jmx 
Successfully.....................................
-    ..."
-  else
-    echo "Dinky pid $pid is in ${PID_PATH}/${PID_FILE}, Please stop first !!!"
-  fi
-}
-
-stop() {
-  updatePid
-  pid=$(cat "${PID_PATH}"/${PID_FILE})
-  if [ -z $pid ]; then
-    echo "Dinky pid is not exist in ${PID_PATH}/${PID_FILE}"
-  else
-    kill -9 $pid
-    sleep 1
-    echo "........................................Stop Dinky 
Successfully....................................."
-    rm -f "${PID_PATH}"/${PID_FILE}
-  fi
-}
-
-status() {
-  updatePid
-  if [ -z $pid ]; then
-    echo ""
-    echo "Service ${JAR_NAME} is not running!"
-    echo ""
-    exit 1
-  else
-    echo ""
-    echo "Service ${JAR_NAME} is running. It's pid=${pid}"
-    echo ""
-  fi
-}
-
-restart() {
-  echo ""
-  stop
-  start
-  echo "........................................Restart 
Successfully........................................"
-}
-
-restartWithJmx() {
-  echo ""
-  stop
-  startWithJmx
-  echo "........................................Restart with Jmx 
Successfully........................................"
-}
-
-case "$1" in
-"start")
-  start
-  ;;
-"startOnPending")
-  startOnPending
-  ;;
-"startWithJmx")
-  startWithJmx
-  ;;
-"stop")
-  stop
-  ;;
-"status")
-  status
-  ;;
-"restart")
-  restart
-  ;;
-"restartWithJmx")
-  restartWithJmx
-  ;;
-*)
-  tips
-  ;;
-esac
diff --git 
a/ambari-server/src/main/resources/addon-services/DINKY/quicklinks/quicklinks.json
 
b/ambari-server/src/main/resources/addon-services/DINKY/quicklinks/quicklinks.json
deleted file mode 100644
index ec5e39e9fb..0000000000
--- 
a/ambari-server/src/main/resources/addon-services/DINKY/quicklinks/quicklinks.json
+++ /dev/null
@@ -1,26 +0,0 @@
-{
-  "name": "default",
-  "description": "default quick links configuration",
-  "configuration": {
-    "protocol":
-    {
-      "type":"http"
-    },
-
-    "links": [
-      {
-        "name": "dinky-application-ui",
-        "label": "Dinky Web UI",
-        "requires_user_name": "false",
-        "component_name": "DINKY_SERVER",
-        "url": "%@://%@:%@",
-        "port":{
-          "http_property": "server.port",
-          "http_default_port": "8888",
-          "regex": "^(\\d+)$",
-          "site": "dinky-application-ui"
-        }
-      }
-    ]
-  }
-}
\ No newline at end of file
diff --git 
a/ambari-server/src/main/resources/addon-services/DINKY/service_advisor.py 
b/ambari-server/src/main/resources/addon-services/DINKY/service_advisor.py
deleted file mode 100755
index c3f5586777..0000000000
--- a/ambari-server/src/main/resources/addon-services/DINKY/service_advisor.py
+++ /dev/null
@@ -1,196 +0,0 @@
-#!/usr/bin/env ambari-python-wrap
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-# Python imports
-from ast import Param
-import imp
-import os
-import traceback
-import re
-import socket
-import fnmatch
-
-
-from resource_management.core.logger import Logger
-
-SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
-STACKS_DIR = os.path.join(SCRIPT_DIR, '../../../../../stacks/')
-PARENT_FILE = os.path.join(STACKS_DIR, 'service_advisor.py')
-
-try:
-  if "BASE_SERVICE_ADVISOR" in os.environ:
-    PARENT_FILE = os.environ["BASE_SERVICE_ADVISOR"]
-  with open(PARENT_FILE, 'rb') as fp:
-    service_advisor = imp.load_module('service_advisor', fp, PARENT_FILE, 
('.py', 'rb', imp.PY_SOURCE))
-except Exception as e:
-  traceback.print_exc()
-  print "Failed to load parent"
-
-class DinkyServiceAdvisor(service_advisor.ServiceAdvisor):
-
-  def __init__(self, *args, **kwargs):
-    self.as_super = super(DinkyServiceAdvisor, self)
-    self.as_super.__init__(*args, **kwargs)
-
-    # Always call these methods
-    self.modifyMastersWithMultipleInstances()
-    self.modifyCardinalitiesDict()
-    self.modifyHeapSizeProperties()
-    self.modifyNotValuableComponents()
-    self.modifyComponentsNotPreferableOnServer()
-    self.modifyComponentLayoutSchemes()
-
-  def modifyMastersWithMultipleInstances(self):
-    """
-    Modify the set of masters with multiple instances.
-    Must be overriden in child class.
-    """
-    # Nothing to do
-    pass
-
-  def modifyCardinalitiesDict(self):
-    """
-    Modify the dictionary of cardinalities.
-    Must be overriden in child class.
-    """
-    # Nothing to do
-    pass
-
-  def modifyHeapSizeProperties(self):
-    """
-    Modify the dictionary of heap size properties.
-    Must be overriden in child class.
-    """
-    pass
-
-  def modifyNotValuableComponents(self):
-    """
-    Modify the set of components whose host assignment is based on other 
services.
-    Must be overriden in child class.
-    """
-    # Nothing to do
-    pass
-
-  def modifyComponentsNotPreferableOnServer(self):
-    """
-    Modify the set of components that are not preferable on the server.
-    Must be overriden in child class.
-    """
-    # Nothing to do
-    pass
-
-  def modifyComponentLayoutSchemes(self):
-    """
-    Modify layout scheme dictionaries for components.
-    The scheme dictionary basically maps the number of hosts to
-    host index where component should exist.
-    Must be overriden in child class.
-    """
-
-    # Nothing to do
-    pass
-
-
-  def getServiceComponentLayoutValidations(self, services, hosts):
-    """
-    Get a list of errors.
-    Must be overriden in child class.
-    """
-
-    return self.getServiceComponentCardinalityValidations(services, hosts, 
"KYUUBI")
-
-  def getServiceConfigurationRecommendations(self, configurations, 
clusterData, services, hosts):
-    """
-    Entry point.
-    Must be overriden in child class.
-    """
-    # Logger.info("Class: %s, Method: %s. Recommending Service 
Configurations." %
-    #            (self.__class__.__name__, inspect.stack()[0][3]))
-
-    recommender = DinkyRecommender()
-    recommender.recommendDinkyConfigurationsFromHDP33(configurations, 
clusterData, services, hosts)
-
-
-  # def getServiceConfigurationRecommendationsForSSO(self, configurations, 
clusterData, services, hosts):
-  #   """
-  #   Entry point.
-  #   Must be overriden in child class.
-  #   """
-  #   recommender = DinkyRecommender()
-  #   recommender.recommendConfigurationsForSSO(configurations, clusterData, 
services, hosts)
-
-  def getServiceConfigurationsValidationItems(self, configurations, 
recommendedDefaults, services, hosts):
-    """
-    Entry point.
-    Validate configurations for the service. Return a list of errors.
-    The code for this function should be the same for each Service Advisor.
-    """
-    # Logger.info("Class: %s, Method: %s. Validating Configurations." %
-    #            (self.__class__.__name__, inspect.stack()[0][3]))
-
-    return []
-
-  @staticmethod
-  def isKerberosEnabled(services, configurations):
-    """
-    Determines if security is enabled by testing the value of 
core-site/hadoop.security.authentication enabled.
-    If the property exists and is equal to "kerberos", then is it enabled; 
otherwise is it assumed to be
-    disabled.
-
-    :type services: dict
-    :param services: the dictionary containing the existing configuration 
values
-    :type configurations: dict
-    :param configurations: the dictionary containing the updated configuration 
values
-    :rtype: bool
-    :return: True or False
-    """
-    if configurations and "core-site" in configurations and \
-            "hadoop.security.authentication" in 
configurations["core-site"]["properties"]:
-      return 
configurations["core-site"]["properties"]["hadoop.security.authentication"].lower()
 == "kerberos"
-    elif services and "core-site" in services["configurations"] and \
-            "hadoop.security.authentication" in 
services["configurations"]["core-site"]["properties"]:
-      return 
services["configurations"]["core-site"]["properties"]["hadoop.security.authentication"].lower()
 == "kerberos"
-    else:
-      return False
-
-
-class DinkyRecommender(service_advisor.ServiceAdvisor):
-  """
-  Dinky Recommender suggests properties when adding the service for the first 
time or modifying configs via the UI.
-  """
-
-  def __init__(self, *args, **kwargs):
-    self.as_super = super(DinkyRecommender, self)
-    self.as_super.__init__(*args, **kwargs)
-
-  def recommendDinkyConfigurationsFromHDP33(self, configurations, clusterData, 
services, hosts):
-    """
-    Recommend configurations for this service based on HDP 3.3.
-    """
-
-
-
-    
- 
-    
-    
-
- 
- 
- 
diff --git 
a/ambari-server/src/main/resources/addon-services/ELASTICSEARCH/configuration/elasticsearch-env.xml
 
b/ambari-server/src/main/resources/addon-services/ELASTICSEARCH/configuration/elasticsearch-env.xml
deleted file mode 100644
index 88599f61c5..0000000000
--- 
a/ambari-server/src/main/resources/addon-services/ELASTICSEARCH/configuration/elasticsearch-env.xml
+++ /dev/null
@@ -1,102 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-       http://www.apache.org/licenses/LICENSE-2.0
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-
-<configuration supports_adding_forbidden="true">
-  <property name="elasticsearch.download.url">
-    <name>elasticsearch.download.url</name>
-    
<value>https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-8.11.3-linux-x86_64.tar.gz</value>
-    <display-name>Elasticsearch Download Url</display-name>
-    <description>Elasticsearch package download url, it should be a TAR 
file.</description>
-  </property>
-
-  <property>
-    <name>elasticsearch_user</name>
-    <value>elasticsearch</value>
-    <display-name>Elasticsearch User</display-name>
-    <property-type>elasticsearch</property-type>
-    <description>Elasticsearch unix user.</description>
-    <value-attributes>
-      <type>user</type>
-      <overridable>false</overridable>
-      <user-groups>
-        <property>
-          <type>cluster-env</type>
-          <name>user_group</name>
-        </property>
-        <property>
-          <type>elasticsearch-env</type>
-          <name>elasticsearch_group</name>
-        </property>
-      </user-groups>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-
-  <property>
-    <name>elasticsearch_group</name>
-    <value>elasticsearch</value>
-    <display-name>Elasticsearch Group</display-name>
-    <property-type>GROUP</property-type>
-    <description>Elasticsearch unix group.</description>
-    <value-attributes>
-      <type>user</type>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-
-  <property>
-    <name>elasticsearch.home</name>
-    <value>/opt/elasticsearch</value>
-    <display-name>Elasticsearch Home Directory</display-name>
-    <description>Elasticsearch home directory.</description>
-  </property>
-
-  <property>
-    <name>elasticsearch.data.path</name>
-    <value>/data/elasticsearch-data</value>
-    <display-name>ElasticSearch Path Data</display-name>
-    <description>Path to directory where to store the data. (separate multiple 
locations by comma)</description>
-  </property>
-
-  <property>
-    <name>elasticsearch.log.path</name>
-    <value>/var/log/elasticsearch/</value>
-    <display-name>ElasticSearch Path Log</display-name>
-    <description>Path to directory where to store the logs.</description>
-  </property>
-
-  <property>
-    <name>elasticsearch.pid.file</name>
-    <value>/var/run/elasticsearch/elasticsearch.pid</value>
-    <display-name>Elasticsearch Pid File</display-name>
-    <description>Elasticsearch pid file.</description>
-  </property>
-
-  <property>
-    <name>elasticsearch.tmp.path</name>
-    <value>/tmp</value>
-    <display-name>Elasticsearch Temp Path</display-name>
-    <description>Elasticsearch Temp Path</description>
-  </property>
-
-  <property>
-    <name>master.is.datanode</name>
-    <value>true</value>
-    <display-name>Elasticsearch masters is datanode</display-name>
-    <description>Elasticsearch masters is datanode</description>
-  </property>
-</configuration>
\ No newline at end of file
diff --git 
a/ambari-server/src/main/resources/addon-services/ELASTICSEARCH/configuration/elasticsearch-jvm.xml
 
b/ambari-server/src/main/resources/addon-services/ELASTICSEARCH/configuration/elasticsearch-jvm.xml
deleted file mode 100644
index b662a47f20..0000000000
--- 
a/ambari-server/src/main/resources/addon-services/ELASTICSEARCH/configuration/elasticsearch-jvm.xml
+++ /dev/null
@@ -1,114 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-       http://www.apache.org/licenses/LICENSE-2.0
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<configuration>
-    <property>
-        <name>heap.min</name>
-        <value>1g</value>
-        <display-name>Minimum Heap Size</display-name>
-        <description>Minimum Heap Size</description>
-    </property>
-    <property>
-        <name>heap.max</name>
-        <value>1g</value>
-        <display-name>Maximum Heap Size</display-name>
-        <description>Maximum Heap Size</description>
-    </property>
-    <property>
-        <name>jvm.options.template</name>
-        <display-name>JVM Options Template</display-name>
-        <description>The jinja template for the Elasticsearch JVM options 
file.</description>
-        <value>
-## JVM configuration
-
-################################################################
-## IMPORTANT: JVM heap size
-################################################################
-##
-## You should always set the min and max JVM heap
-## size to the same value. For example, to set
-## the heap to 4 GB, set:
-##
-## -Xms4g
-## -Xmx4g
-##
-## See 
https://www.elastic.co/guide/en/elasticsearch/reference/current/heap-size.html
-## for more information
-##
-################################################################
-
-# Xms represents the initial size of total heap space
-# Xmx represents the maximum size of total heap space
-
--Xms{{heap.min}}
--Xmx{{heap.max}}
-
-################################################################
-## Expert settings
-################################################################
-##
-## All settings below this section are considered
-## expert settings. Don't tamper with them unless
-## you understand what you are doing
-##
-################################################################
-
-## GC configuration
-8-13:-XX:+UseConcMarkSweepGC
-8-13:-XX:CMSInitiatingOccupancyFraction=75
-8-13:-XX:+UseCMSInitiatingOccupancyOnly
-
-## G1GC Configuration
-# NOTE: G1 GC is only supported on JDK version 10 or later
-# to use G1GC, uncomment the next two lines and update the version on the
-# following three lines to your version of the JDK
-# 10-13:-XX:-UseConcMarkSweepGC
-# 10-13:-XX:-UseCMSInitiatingOccupancyOnly
-14-:-XX:+UseG1GC
-14-:-XX:G1ReservePercent=25
-14-:-XX:InitiatingHeapOccupancyPercent=30
-
-## JVM temporary directory
--Djava.io.tmpdir={{elasticsearch.tmp.path}}
-
-## heap dumps
-
-# generate a heap dump when an allocation from the Java heap fails
-# heap dumps are created in the working directory of the JVM
--XX:+HeapDumpOnOutOfMemoryError
-
-# specify an alternative path for heap dumps; ensure the directory exists and
-# has sufficient space
--XX:HeapDumpPath={{elasticsearch.log.path}}
-
-# specify an alternative path for JVM fatal error logs
--XX:ErrorFile={{elasticsearch.log.path}}/hs_err_pid%p.log
-
-## JDK 8 GC logging
-8:-XX:+PrintGCDetails
-8:-XX:+PrintGCDateStamps
-8:-XX:+PrintTenuringDistribution
-8:-XX:+PrintGCApplicationStoppedTime
-8:-Xloggc:{{elasticsearch.log.path}}/gc.log
-8:-XX:+UseGCLogFileRotation
-8:-XX:NumberOfGCLogFiles=32
-8:-XX:GCLogFileSize=64m
-
-# JDK 9+ GC logging
-9-:-Xlog:gc*,gc+age=trace,safepoint:file={{elasticsearch.log.path}}/gc.log:utctime,pid,tags:filecount=32,filesize=64m
-        </value>
-    </property>
-</configuration>
\ No newline at end of file
diff --git 
a/ambari-server/src/main/resources/addon-services/ELASTICSEARCH/configuration/elasticsearch-site.xml
 
b/ambari-server/src/main/resources/addon-services/ELASTICSEARCH/configuration/elasticsearch-site.xml
deleted file mode 100644
index cdac6328d0..0000000000
--- 
a/ambari-server/src/main/resources/addon-services/ELASTICSEARCH/configuration/elasticsearch-site.xml
+++ /dev/null
@@ -1,134 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl" ?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-       http://www.apache.org/licenses/LICENSE-2.0
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<configuration supports_final="true">
-
-  <!-- Cluster -->
-  <property>
-    <name>cluster.name</name>
-    <value>es-cluster</value>
-    <display-name>cluster.name</display-name>
-    <description>Use a descriptive name for your cluster.</description>
-  </property>
-
-  <!-- Node -->
-  <property>
-    <name>node.attr.rack</name>
-    <value>r1</value>
-    <display-name>node.attr.rack</display-name>
-    <description>Add custom attributes to the node.</description>
-  </property>
-
-  <!-- Network -->
-  <property>
-    <name>network.host</name>
-    <value>0.0.0.0</value>
-    <display-name>network.host</display-name>
-    <description>Set the bind address to a specific IP. (IPv4 or 
IPv6)</description>
-  </property>
-
-  <property>
-    <name>http.port</name>
-    <value>9200</value>
-    <value-attributes>
-      <type>int</type>
-      <minimum>1</minimum>
-      <maximum>65535</maximum>
-    </value-attributes>
-    <display-name>http.port</display-name>
-    <description>Set a custom port for HTTP</description>
-  </property>
-
-  <property>
-    <name>http.cors.enabled</name>
-    <value>true</value>
-    <value-attributes>
-      <type>boolean</type>
-      <overridable>false</overridable>
-    </value-attributes>
-    <display-name>http.cors.enabled</display-name>
-    <description>Enable or disable cross-origin resource sharing, which 
determines whether a browser on another origin can execute requests against 
Elasticsearch. Set to true to enable Elasticsearch to process pre-flight CORS 
requests. Elasticsearch will respond to those requests with the 
Access-Control-Allow-Origin header if the Origin sent in the request is 
permitted by the http.cors.allow-origin list. Set to false (the default) to 
make Elasticsearch ignore the Origin request header, e [...]
-  </property>
-
-  <property>
-    <name>http.cors.allow-origin</name>
-    <value>*</value>
-    <display-name>http.cors.allow-origin</display-name>
-    <description>Which origins to allow. If you prepend and append a forward 
slash (/) to the value, this will be treated as a regular expression, allowing 
you to support HTTP and HTTPs. For example, using 
/https?:\/\/localhost(:[0-9]+)?/ would return the request header appropriately 
in both cases. Defaults to no origins allowed.</description>
-  </property>
-
-  <!-- Gateway -->
-  <property>
-    <name>gateway.recover_after_data_nodes</name>
-    <value>0</value>
-    <value-attributes>
-      <type>int</type>
-      <minimum>0</minimum>
-      <maximum>2592000000</maximum>
-    </value-attributes>
-    <display-name>gateway.recover_after_data_nodes</display-name>
-    <description>Recover as long as this many data nodes have joined the 
cluster.</description>
-  </property>
-
-  <property>
-    <name>gateway.recover_after_time</name>
-    <value>15m</value>
-    <display-name>gateway.recover_after_time</display-name>
-    <description>If the expected number of nodes is not achieved, the recovery 
process waits for the configured amount of time before trying to recover. 
Defaults to 5m if one of the expected_nodes settings is 
configured.</description>
-  </property>
-
-  <property>
-    <name>gateway.expected_data_nodes</name>
-    <value>0</value>
-    <value-attributes>
-      <type>int</type>
-      <minimum>0</minimum>
-      <maximum>2592000000</maximum>
-    </value-attributes>
-    <display-name>gateway.expected_data_nodes</display-name>
-    <description>Number of data nodes expected in the cluster. Recovery of 
local shards begins when the expected number of data nodes join the cluster. 
Defaults to 0.</description>
-  </property>
-
-  <!-- Various -->
-  <property>
-    <name>action.destructive_requires_name</name>
-    <value>true</value>
-    <value-attributes>
-      <type>boolean</type>
-      <overridable>false</overridable>
-    </value-attributes>
-    <display-name>action.destructive_requires_name</display-name>
-    <description>Require explicit names when deleting indices.</description>
-  </property>
-  
-  <property>
-    <name>xpack.security.enabled</name>
-    <value>false</value>
-    <value-attributes>
-      <type>boolean</type>
-      <overridable>true</overridable>
-    </value-attributes>
-  </property>
-  <property>
-    <name>xpack.security.transport.ssl.enabled</name>
-    <value>false</value>
-    <value-attributes>
-      <type>boolean</type>
-      <overridable>true</overridable>
-    </value-attributes>
-  </property>
-  
-</configuration>
\ No newline at end of file
diff --git 
a/ambari-server/src/main/resources/addon-services/ELASTICSEARCH/metainfo.xml 
b/ambari-server/src/main/resources/addon-services/ELASTICSEARCH/metainfo.xml
deleted file mode 100644
index 666f749dd7..0000000000
--- a/ambari-server/src/main/resources/addon-services/ELASTICSEARCH/metainfo.xml
+++ /dev/null
@@ -1,98 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-       http://www.apache.org/licenses/LICENSE-2.0
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-
-<metainfo>
-    <schemaVersion>2.0</schemaVersion>
-    <services>
-        <service>
-            <name>ELASTICSEARCH</name>
-            <displayName>ElasticSearch</displayName>
-            <comment>A highly scalable open-source full-text search and 
analytics engine. Including storage, searching, and analyzing big volumes of 
data quickly and in near real time. </comment>
-            <version>8.11.3</version>
-            <components>
-                <component>
-                    <name>ELASTICSEARCH_MASTER</name>
-                    <displayName>Elasticsearch Master</displayName>
-                    <category>MASTER</category>
-                    <cardinality>1+</cardinality>
-                    <versionAdvertised>true</versionAdvertised>
-                    <commandScript>
-                        <script>scripts/ElasticSearchService.py</script>
-                        <scriptType>PYTHON</scriptType>
-                        <timeout>1200</timeout>
-                    </commandScript>
-                    <logs>
-                        <log>
-                            <logId>elasticsearch_master</logId>
-                            <primary>true</primary>
-                        </log>
-                    </logs>
-                </component>
-                
-                <component>
-                    <name>ELASTICSEARCH_DATA</name>
-                    <displayName>Elasticsearch Data</displayName>
-                    <category>SLAVE</category>
-                    <cardinality>0+</cardinality>
-                    <versionAdvertised>true</versionAdvertised>
-                    <commandScript>
-                        <script>scripts/ElasticSearchService.py</script>
-                        <scriptType>PYTHON</scriptType>
-                        <timeout>1200</timeout>
-                    </commandScript>
-                    <logs>
-                        <log>
-                            <logId>elasticsearch_data</logId>
-                            <primary>true</primary>
-                        </log>
-                    </logs>
-                </component>
-            </components>
-            <osSpecifics>
-                <osSpecific>
-                    <osFamily>any</osFamily>
-                    <packages>
-                        <package>
-                            <name>elasticsearch-x.y.z</name>
-                        </package>
-                    </packages>
-                </osSpecific>
-            </osSpecifics>
-            
-            <commandScript>
-                <script>scripts/ElasticSearchServiceCheck.py</script>
-                <scriptType>PYTHON</scriptType>
-                <timeout>300</timeout>
-            </commandScript>
-
-            <configuration-dependencies>
-                <config-type>elasticsearch-env</config-type>
-                <config-type>elasticsearch-jvm</config-type>
-                <config-type>elasticsearch-site</config-type>
-            </configuration-dependencies>
-
-            <restartRequiredAfterChange>true</restartRequiredAfterChange>
-
-            <quickLinksConfigurations>
-                <quickLinksConfiguration>
-                    <fileName>quicklinks.json</fileName>
-                    <default>true</default>
-                </quickLinksConfiguration>
-            </quickLinksConfigurations>
-
-        </service>
-    </services>
-</metainfo>
\ No newline at end of file
diff --git 
a/ambari-server/src/main/resources/addon-services/ELASTICSEARCH/package/scripts/ElasticSearchService.py
 
b/ambari-server/src/main/resources/addon-services/ELASTICSEARCH/package/scripts/ElasticSearchService.py
deleted file mode 100644
index 9c2db8edb8..0000000000
--- 
a/ambari-server/src/main/resources/addon-services/ELASTICSEARCH/package/scripts/ElasticSearchService.py
+++ /dev/null
@@ -1,224 +0,0 @@
-#! /usr/bin/env python2
-# -*- coding: utf-8 -*-
-# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 fileencoding=utf-8 
ff=unix ft=python
-
-import Utils
-import grp
-import os
-import pwd
-import signal
-import socket
-import tarfile
-import tempfile
-import time
-import urllib2
-import yaml
-from resource_management import Script, User, Group, Execute
-from resource_management.core.logger import Logger
-from resource_management.libraries.functions.check_process_status import 
check_process_status
-
-
-class ElasticSearchService(Script):
-    def install(self, env):
-        import params
-        if env is not None:
-            env.set_params(params)
-        # do some clean     
-        self.__cleanPreviousInstallation()
-        # check and create group and user
-        self.__createGroupIfNotExist()
-        self.__createUserIfNotExist()
-        self.__prepareDirectory()
-        self.__extractInstallationFile(self.__downloadInstallationFile())
-        Logger.info("ElasticSearch install completed")
-        # configure
-        self.configure(env)
-
-    def start(self, env, upgrade_type=None):
-        import params
-        if env is not None:
-            env.set_params(params)
-        # configure
-        self.configure(env)
-        cmd = "%s -d -p %s" % (params.elasticSearchMainCmd, 
params.elasticSearchPidFile)
-        Logger.info("Start: %s" % cmd)
-        Execute(cmd, user=params.elasticSearchUser)
-        time.sleep(10)
-
-    def stop(self, env, upgrade_type=None):
-        import params
-        if env is not None:
-            env.set_params(params)
-        if os.path.exists(params.elasticSearchPidFile):
-            fin = open(params.elasticSearchPidFile, "r")
-            pid = int(fin.read())
-            fin.close()
-            os.kill(pid, signal.SIGTERM)
-            time.sleep(10)
-            try:
-                os.kill(pid, signal.SIGKILL)
-            except Exception as e:
-                pass
-            time.sleep(3)
-        Utils.remove(params.elasticSearchPidFile)
-
-    def status(self, env, upgrade_type=None):
-        import params
-        env.set_params(params)
-        time.sleep(5)
-        check_process_status(params.elasticSearchPidFile)
-
-    def configure(self, env, upgrade_type=None, config_dir=None):
-        import params
-        if env is not None:
-            env.set_params(params)
-        self.__createSiteConfig()
-        self.__createJvmOptionFile()
-        self.__prepareDirectory()
-        Logger.info("configure over")
-
-    def __cleanPreviousInstallation(self):
-        self.__cleanLogPath()
-        self.__cleanPidFile()
-        self.__cleanInstallationHome()
-
-    def __cleanInstallationHome(self):
-        import params
-        esHome = params.elasticSearchHome
-        esHomeRealPath = os.path.realpath(esHome)
-        Logger.info("Remove %s" % esHomeRealPath)
-        if os.path.exists(esHome):
-            os.unlink(esHome)
-        Utils.remove(esHomeRealPath)
-        Logger.info("Remove %s" % esHome)
-        Utils.remove(esHome)
-
-    def __cleanLogPath(self):
-        import params
-        Logger.info("Remove Log Path: %s" % params.elasticSearchLogPath)
-        Utils.cleanDir(params.elasticSearchLogPath)
-
-    def __cleanPidFile(self):
-        import params
-        Logger.info("Remove PID file: %s" % params.elasticSearchPidFile)
-        Utils.remove(params.elasticSearchPidFile)
-
-    def __createGroupIfNotExist(self):
-        import params
-        try:
-            grp.getgrnam(params.elasticSearchGroup)
-        except Exception:
-            Logger.info(
-                "Group: %s not existed, create it" % params.elasticSearchGroup)
-            Group(params.elasticSearchGroup)
-            Logger.info(
-                "Group: %s create successful" % params.elasticSearchGroup)
-
-    def __createUserIfNotExist(self):
-        import params
-        try:
-            pwd.getpwnam(params.elasticSearchUser)
-        except Exception:
-            Logger.info(
-                "User: %s not existed, create it" % params.elasticSearchUser)
-            User(params.elasticSearchUser,
-                 gid=params.elasticSearchGroup,
-                 groups=[params.elasticSearchGroup],
-                 ignore_failures=True
-                 )
-            Logger.info(
-                "User: %s create successful" % params.elasticSearchUser)
-
-    def __downloadInstallationFile(self):
-        import params
-        localFile = tempfile.NamedTemporaryFile(delete=False)
-        instance = urllib2.urlopen(params.elasticSearchDownloadUrl)
-        blockSize = 8192
-        while True:
-            buffer = instance.read(blockSize)
-            if not buffer:
-                break
-            localFile.write(buffer)
-        localFile.close()
-        return localFile.name
-
-    def __extractInstallationFile(self, installationFile):
-        import params
-        tar = tarfile.open(installationFile)
-        childNames = tar.getnames()
-        elasticSearchName = childNames[0]
-        elasticSearchRealPath = 
os.path.join(os.path.dirname(params.elasticSearchHome), elasticSearchName)
-        Utils.remove(elasticSearchRealPath)
-        for name in tar.getnames():
-            tar.extract(name, path=os.path.dirname(elasticSearchRealPath))
-        tar.close()
-        if os.path.exists(params.elasticSearchHome):
-            os.unlink(params.elasticSearchHome)
-        os.symlink(elasticSearchRealPath, params.elasticSearchHome)
-        Logger.info("Extract installation file: %s" % params.elasticSearchHome)
-        Utils.remove(installationFile)
-        for x in [elasticSearchRealPath, params.elasticSearchHome]:
-            Utils.chown(x, params.elasticSearchUser, params.elasticSearchGroup)
-
-    def __prepareDirectory(self):
-        import params
-        for name in [params.elasticSearchDataPath, params.elasticSearchLogPath,
-                     os.path.dirname(params.elasticSearchPidFile)]:
-            if not os.path.exists(name):
-                os.makedirs(name, mode=0o755)
-            Utils.chown(name, params.elasticSearchUser,
-                        params.elasticSearchGroup)
-
-    def __createSiteConfig(self):
-        import params
-
-        configs = {}
-        for k, v in params.elasticSearchSite.iteritems():
-            if Utils.isBooleanString(v):
-                configs[k] = Utils.toBoolean(v)
-            else:
-                configs[k] = v
-        hostname = socket.gethostname()
-        isMasterNode = hostname in params.elasticSearchMasterHosts
-        configs["node.name"] = hostname
-        if isMasterNode:
-            if params.masterIsDatanode:
-                configs["node.roles"] = ['data','master']
-                
-        else:
-            configs["node.roles"] = ['data']
-        configs["path.data"] = params.elasticSearchDataPath
-        configs["path.logs"] = params.elasticSearchLogPath
-        elastic_search_data_hosts = params.elasticSearchDataHosts if 
hasattr(params, 'elasticSearchDataHosts') else []
-        if elastic_search_data_hosts and len(elastic_search_data_hosts) > 0:
-            configs["discovery.seed_hosts"] = list(
-                set(params.elasticSearchMasterHosts + 
elastic_search_data_hosts))
-        else:
-            configs["discovery.seed_hosts"] = 
list(set(params.elasticSearchMasterHosts))
-        if params.serviceVersion and params.serviceVersion >= "7.0.0":
-            configs["cluster.initial_master_nodes"] = 
params.elasticSearchMasterHosts
-        fin = open(params.elasticSearchConfigFile, "w")
-        fin.write(yaml.safe_dump(configs, encoding='utf-8', 
allow_unicode=True, default_flow_style=False,
-                                 explicit_start=True))
-        fin.close()
-        Utils.chown(params.elasticSearchConfigFile, params.elasticSearchUser, 
params.elasticSearchGroup)
-
-    def __createJvmOptionFile(self):
-        import params
-        configs = {}
-        for k, v in params.elasticSearchJvm.iteritems():
-            configs[k] = v
-        for k, v in params.elasticSearchEnv.iteritems():
-            configs[k] = v
-        content = params.elasticSearchJvmTemplateContent
-        for k, v in configs.iteritems():
-            content = content.replace("{{%s}}" % k, v)
-        fin = open(params.elasticSearchJvmOptionsFile, "w")
-        fin.write(content)
-        fin.close()
-        Utils.chown(params.elasticSearchConfigFile, params.elasticSearchUser,
-                    params.elasticSearchGroup)
-        
-if __name__ == "__main__":
-    service = ElasticSearchService()
-    service.execute()
diff --git 
a/ambari-server/src/main/resources/addon-services/ELASTICSEARCH/package/scripts/ElasticSearchServiceCheck.py
 
b/ambari-server/src/main/resources/addon-services/ELASTICSEARCH/package/scripts/ElasticSearchServiceCheck.py
deleted file mode 100644
index 3354cf9da7..0000000000
--- 
a/ambari-server/src/main/resources/addon-services/ELASTICSEARCH/package/scripts/ElasticSearchServiceCheck.py
+++ /dev/null
@@ -1,34 +0,0 @@
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 fileencoding=utf-8 
ff=unix ft=python
-
-import time
-import sys
-import urllib2
-import json
-
-from resource_management import Script
-from resource_management.core.logger import Logger
-
-class ElasticSearchServiceCheck(Script):
-
-    def service_check(self, env):
-        import params
-        env.set_params(params)
-        time.sleep(5)
-
-        health_url = 
"http://{0}:{1}/_cluster/health?wait_for_status=green&timeout=120s".format(params.hostname,
 params.elasticSearchHttpPort)
-        fd = urllib2.urlopen(health_url)
-        content = fd.read()
-        fd.close()
-        result = json.loads(content)
-        status = result["status"] == u"green"
-        if not status:
-            Logger.warning("Elasticsearch service check failed")
-            sys.exit(1)
-        else:
-            Logger.info("Elasticsearch service check successful")
-            sys.exit(0)
-
-if __name__ == "__main__":
-    ElasticSearchServiceCheck().execute()
diff --git 
a/ambari-server/src/main/resources/addon-services/ELASTICSEARCH/package/scripts/Utils.py
 
b/ambari-server/src/main/resources/addon-services/ELASTICSEARCH/package/scripts/Utils.py
deleted file mode 100644
index cc4d2513f8..0000000000
--- 
a/ambari-server/src/main/resources/addon-services/ELASTICSEARCH/package/scripts/Utils.py
+++ /dev/null
@@ -1,71 +0,0 @@
-#! /usr/bin/env python2
-# -*- coding: utf-8 -*-
-# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 fileencoding=utf-8 
ff=unix ft=python
-
-import os
-import shutil
-import pwd
-import grp
-import unicodedata
-
-def chown(path, user, group):
-    userMeta = pwd.getpwnam(user)
-    uid = userMeta.pw_uid
-    groupMeta = grp.getgrnam(group)
-    gid = groupMeta.gr_gid
-    if os.path.isdir(path):
-        os.chown(path, uid, gid)
-        for root, dirnames, filenames in os.walk(path):
-            for dirname in dirnames:
-                os.chown(os.path.join(root, dirname), uid, gid)
-            for filename in filenames:
-                os.chown(os.path.join(root, filename), uid, gid)
-    elif os.path.isfile(path):
-        os.chown(path, uid, gid)
-
-def remove(path):
-    if os.path.exists(path):
-        if os.path.islink(path) or os.path.isfile(path):
-            os.unlink(path)
-        elif os.path.isdir(path):
-            shutil.rmtree(path)
-
-def cleanDir(path):
-    if os.path.exists(path):
-        for x in os.listdir(path):
-            remove(os.path.join(path, x))
-        
-def isBooleanString(s):
-    return s == "true" or s == "false"
-
-def toBoolean(s):
-    if s == "true":
-        return True
-    else:
-        return False
-
-def isNumberic(s):
-    try:
-        float(s)
-        return True
-    except ValueError:
-        pass
- 
-    try:
-        unicodedata.numeric(s)
-        return True
-    except (TypeError, ValueError):
-        pass
-
-    return False
-
-def toNumber(s):
-    try:
-        return int(s)
-    except Exception:
-        pass
-    try:
-        return float(s)
-    except Exception:
-        pass
-    return unicodedata.numeric(s.decode("utf-8"))
\ No newline at end of file
diff --git 
a/ambari-server/src/main/resources/addon-services/ELASTICSEARCH/package/scripts/params.py
 
b/ambari-server/src/main/resources/addon-services/ELASTICSEARCH/package/scripts/params.py
deleted file mode 100644
index b35eb875f2..0000000000
--- 
a/ambari-server/src/main/resources/addon-services/ELASTICSEARCH/package/scripts/params.py
+++ /dev/null
@@ -1,35 +0,0 @@
-#! /usr/bin/env python2
-# -*- coding: utf-8 -*-
-# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 fileencoding=utf-8 
ff=unix ft=python
-
-import os
-
-from resource_management import Script
-
-config = Script.get_config()
-elasticSearchMasterHosts = 
config["clusterHostInfo"]["elasticsearch_master_hosts"]
-elasticSearchDataHosts = config["clusterHostInfo"]["elasticsearch_data_hosts"]
-javaHome = config["ambariLevelParams"]["java_home"]
-hostname = config['agentLevelParams']['hostname']
-
-serviceVersion = config['serviceLevelParams']['version']
-elasticSearchEnv = config["configurations"]["elasticsearch-env"]
-elasticSearchSite = config["configurations"]["elasticsearch-site"]
-elasticSearchJvm = config["configurations"]["elasticsearch-jvm"]
-elasticSearchDownloadUrl = elasticSearchEnv["elasticsearch.download.url"]
-elasticSearchUser = elasticSearchEnv["elasticsearch_user"]
-elasticSearchGroup = elasticSearchEnv["elasticsearch_group"]
-elasticSearchHome = elasticSearchEnv["elasticsearch.home"]
-elasticSearchDataPath = elasticSearchEnv["elasticsearch.data.path"]
-elasticSearchLogPath = elasticSearchEnv["elasticsearch.log.path"]
-elasticSearchPidFile = elasticSearchEnv["elasticsearch.pid.file"]
-elasticSearchTmpDir = elasticSearchEnv["elasticsearch.tmp.path"]
-masterIsDatanode = elasticSearchEnv['master.is.datanode']
-elasticSearchConfigDir = os.path.join(elasticSearchHome, "config")
-elasticSearchConfigFile = os.path.join(elasticSearchConfigDir,
-                                       "elasticsearch.yml")
-elasticSearchJvmOptionsFile = os.path.join(elasticSearchConfigDir, 
"jvm.options")
-elasticSearchJvmTemplateContent = elasticSearchJvm["jvm.options.template"]
-elasticSearchBinDir = os.path.join(elasticSearchHome, "bin")
-elasticSearchMainCmd = os.path.join(elasticSearchBinDir, "elasticsearch")
-elasticSearchHttpPort = elasticSearchSite["http.port"]
diff --git 
a/ambari-server/src/main/resources/addon-services/ELASTICSEARCH/quicklinks/quicklinks.json
 
b/ambari-server/src/main/resources/addon-services/ELASTICSEARCH/quicklinks/quicklinks.json
deleted file mode 100644
index fef3082a5d..0000000000
--- 
a/ambari-server/src/main/resources/addon-services/ELASTICSEARCH/quicklinks/quicklinks.json
+++ /dev/null
@@ -1,43 +0,0 @@
-{
-  "name": "default",
-  "description": "default quick links configuration",
-  "configuration": {
-    "protocol": {
-      "type": "http",
-      "checks": [
-      ]
-    },
-    "links": [
-      {
-        "name": "elasticsearch_health_link",
-        "label": "ElasticSearch Health",
-        "requires_user_name": "false",
-        "component_name": "ELASTICSEARCH_MASTER",
-        "url": "%@://%@:%@/_cat/health?v",
-        "port": {
-          "http_property": "http.port",
-          "http_default_port": "9200",
-          "https_property": "http.port",
-          "https_default_port": "9200",
-          "regex": "^(\\d+)",
-          "site": "elasticsearch-site"
-        }
-      },
-      {
-        "name": "elasticsearch_indices_link",
-        "label": "ElasticSearch Indexes",
-        "requires_user_name": "false",
-        "component_name": "ELASTICSEARCH_MASTER",
-        "url": "%@://%@:%@/_cat/indices?v",
-        "port": {
-          "http_property": "http.port",
-          "http_default_port": "9200",
-          "https_property": "http.port",
-          "https_default_port": "9200",
-          "regex": "^(\\d+)",
-          "site": "elasticsearch-site"
-        }
-      }
-    ]
-  }
-}
\ No newline at end of file
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HBASE/metainfo.xml
 
b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HBASE/metainfo.xml
index 761fb7d7b1..3cbcef1fc2 100755
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HBASE/metainfo.xml
+++ 
b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HBASE/metainfo.xml
@@ -133,12 +133,12 @@
         </component>
 
         <component>
-          <name>HTTP_GATEWAY</name>
-          <displayName>HTTP gateway</displayName>
-          <category>MASTER</category>
+          <name>HBASE_REST_GATEWAY</name>
+          <displayName>HBASE REST GATEWAY</displayName>
+          <category>SLAVE</category>
           <cardinality>0+</cardinality>
           <commandScript>
-            <script>scripts/http_component.py</script>
+            <script>scripts/hbase_rest_gateway.py</script>
             <scriptType>PYTHON</scriptType>
             <timeout>600</timeout>
           </commandScript>
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HBASE/package/scripts/hbase_rest_gateway.py
 
b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HBASE/package/scripts/hbase_rest_gateway.py
new file mode 100755
index 0000000000..76ffdec86c
--- /dev/null
+++ 
b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HBASE/package/scripts/hbase_rest_gateway.py
@@ -0,0 +1,92 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import grp
+import pwd
+
+from resource_management import *
+from resource_management.libraries.resources.xml_config import XmlConfig
+
+class Http_component(Script):
+    def configure(self, env):
+        import params
+        Logger.info("Configure Hbase Http service")
+
+        Directory(params.pid_dir,
+                  owner=params.hbase_user,
+                  group=params.user_group,
+                  create_parents=True,
+                  mode=0o755,
+                  )
+
+
+        Directory(params.log_dir,
+                  owner=params.hbase_user,
+                  group=params.user_group,
+                  create_parents=True,
+                  mode=0o755,
+                  )
+
+
+        XmlConfig("hbase-site.xml",
+                  conf_dir = params.hbase_conf_dir,
+                  configurations = 
params.config['configurations']['hbase-site'],
+                  
configuration_attributes=params.config['configurationAttributes']['hbase-site']
+                  )
+
+
+
+    def install(self, env):
+        import params
+        self.install_packages(env)
+
+    def stop(self, env):
+        import params
+        Logger.info("Stopping HttpFS service")
+        command = format("{params.daemon_script} stop rest")
+        Execute(
+            command,
+            user=params.hbase_user,
+            logoutput=True)
+
+
+        File(params.hbase_rest_pid_file,
+             action = "delete",
+             owner = params.hbase_user
+             )
+
+
+    def start(self, env):
+        import params
+        self.configure(env)
+        command = format("{params.daemon_script} start rest")
+        Execute(
+            command,
+            user=params.hbase_user,
+            logoutput=True)
+        Logger.info("Starting HttpFS service")
+
+
+    def status(self, env):
+        import params
+        check_process_status(params.hbase_rest_pid_file)
+
+
+if __name__ == "__main__":
+    Http_component().execute()
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HBASE/role_command_order.json
 
b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HBASE/role_command_order.json
index 998cd99660..7bdd53b1c3 100755
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HBASE/role_command_order.json
+++ 
b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HBASE/role_command_order.json
@@ -4,7 +4,7 @@
     "HBASE_REGIONSERVER-START": ["HBASE_MASTER-START"],
     "HBASE_SERVICE_CHECK-SERVICE_CHECK": ["HBASE_MASTER-START", 
"HBASE_REGIONSERVER-START"],
     "HBASE_MASTER-STOP": ["HBASE_REGIONSERVER-STOP"],
-    "HTTP_GATEWAY-START" : ["HBASE_MASTER-START"],
+    "HBASE_REST_GATEWAY-START" : ["HBASE_MASTER-START"],
     "HBASE_MASTER-START": ["NAMENODE-START", "DATANODE-START", 
"ZOOKEEPER_SERVER-START", "RANGER_USERSYNC-START", "RANGER_KMS_SERVER-START"]
   }
 }
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/KERBEROS/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/KERBEROS/package/scripts/service_check.py
index 1fccb4f342..4c3a068ec9 100644
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/KERBEROS/package/scripts/service_check.py
+++ 
b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/KERBEROS/package/scripts/service_check.py
@@ -51,7 +51,7 @@ class KerberosServiceCheck(Script):
           os.path.isfile(params.smoke_test_keytab_file)):
       print("Performing kinit using %s" % params.smoke_test_principal)
 
-      ccache_file_name = 
HASH_ALGORITHM("{0}|{1}".format(params.smoke_test_principal, 
params.smoke_test_keytab_file)).hexdigest()
+      ccache_file_name = 
HASH_ALGORITHM("{0}|{1}".format(params.smoke_test_principal, 
params.smoke_test_keytab_file).encode('utf-8')).hexdigest()
       ccache_file_path = 
"{0}{1}kerberos_service_check_cc_{2}".format(params.tmp_dir, os.sep, 
ccache_file_name)
 
       kinit_path_local = 
functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths',
 None))
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/KERBEROS/properties/krb5_conf.j2
 
b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/KERBEROS/properties/krb5_conf.j2
index d201c33c55..574147f027 100644
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/KERBEROS/properties/krb5_conf.j2
+++ 
b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/KERBEROS/properties/krb5_conf.j2
@@ -44,7 +44,7 @@
 {%- if master_kdc %}
     master_kdc = {{master_kdc|trim()}}
 {%- endif -%}
-{%- if kdc_hosts > 0 -%}
+{%- if kdc_hosts|length > 0 -%}
 {%- set kdc_host_list = kdc_hosts.split(',')  -%}
 {%- if kdc_host_list and kdc_host_list|length > 0 %}
     admin_server = {{admin_server_host|default(kdc_host_list[0]|trim(), True)}}


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to