Repository: ambari Updated Branches: refs/heads/branch-2.4 80d8d7eab -> abfb30ea1
Revert "AMBARI-15612: Add Livy to HDP 2.5 as slave component of Spark ( Jeff Zhang via jluniya )" This reverts commit 80d8d7eabdabb3f3b96c72ec0a953869cb4aec3b. Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/abfb30ea Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/abfb30ea Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/abfb30ea Branch: refs/heads/branch-2.4 Commit: abfb30ea16cf3bd658181810fe608d7d12ee3ef0 Parents: 80d8d7e Author: Jayush Luniya <[email protected]> Authored: Thu Jun 2 10:07:53 2016 -0700 Committer: Jayush Luniya <[email protected]> Committed: Thu Jun 2 10:07:53 2016 -0700 ---------------------------------------------------------------------- .../libraries/functions/constants.py | 1 - .../libraries/functions/stack_features.py | 5 - .../SPARK/1.2.1/package/scripts/livy_server.py | 68 ------- .../SPARK/1.2.1/package/scripts/livy_service.py | 46 ----- .../SPARK/1.2.1/package/scripts/params.py | 43 +--- .../SPARK/1.2.1/package/scripts/params.py.orig | 200 ------------------- .../1.2.1/package/scripts/service_check.py | 12 +- .../SPARK/1.2.1/package/scripts/setup_livy.py | 79 -------- .../1.2.1/package/scripts/status_params.py | 8 +- .../HDP/2.0.6/properties/stack_features.json | 5 - .../stacks/HDP/2.0.6/services/stack_advisor.py | 7 - .../stacks/HDP/2.5/role_command_order.json | 4 +- .../services/SPARK/configuration/livy-conf.xml | 59 ------ .../services/SPARK/configuration/livy-env.xml | 92 --------- .../configuration/livy-log4j-properties.xml | 41 ---- .../configuration/livy-spark-blacklist.xml | 40 ---- .../stacks/HDP/2.5/services/SPARK/kerberos.json | 114 ----------- .../stacks/HDP/2.5/services/SPARK/metainfo.xml | 82 -------- 18 files changed, 5 insertions(+), 901 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/ambari/blob/abfb30ea/ambari-common/src/main/python/resource_management/libraries/functions/constants.py ---------------------------------------------------------------------- diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/constants.py b/ambari-common/src/main/python/resource_management/libraries/functions/constants.py index 34d1a1a..eba1abf 100644 --- a/ambari-common/src/main/python/resource_management/libraries/functions/constants.py +++ b/ambari-common/src/main/python/resource_management/libraries/functions/constants.py @@ -55,7 +55,6 @@ class StackFeature: COPY_TARBALL_TO_HDFS = "copy_tarball_to_hdfs" SPARK_16PLUS = "spark_16plus" SPARK_THRIFTSERVER = "spark_thriftserver" - SPARK_LIVY = "spark_livy" STORM_KERBEROS = "storm_kerberos" STORM_AMS = "storm_ams" CREATE_KAFKA_BROKER_ID = "create_kafka_broker_id" http://git-wip-us.apache.org/repos/asf/ambari/blob/abfb30ea/ambari-common/src/main/python/resource_management/libraries/functions/stack_features.py ---------------------------------------------------------------------- diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/stack_features.py b/ambari-common/src/main/python/resource_management/libraries/functions/stack_features.py index 6804be0..9a3fa31 100644 --- a/ambari-common/src/main/python/resource_management/libraries/functions/stack_features.py +++ b/ambari-common/src/main/python/resource_management/libraries/functions/stack_features.py @@ -248,11 +248,6 @@ _DEFAULT_STACK_FEATURES = { "name": "hbase_home_directory", "description": "Hbase home directory in HDFS needed for HBASE backup", "min_version": "2.5.0.0" - }, - { - "name": "spark_livy", - "description": "Livy as slave component of spark", - "min_version": "2.5.0.0" } ] } http://git-wip-us.apache.org/repos/asf/ambari/blob/abfb30ea/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/livy_server.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/livy_server.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/livy_server.py deleted file mode 100644 index 7587ab5..0000000 --- a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/livy_server.py +++ /dev/null @@ -1,68 +0,0 @@ -#!/usr/bin/python -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -""" - -from resource_management.libraries.script.script import Script -from resource_management.libraries.functions.check_process_status import check_process_status - -from livy_service import livy_service -from setup_livy import setup_livy - -class LivyServer(Script): - - def install(self, env): - import params - env.set_params(params) - - self.install_packages(env) - - def configure(self, env, upgrade_type=None): - import params - env.set_params(params) - - setup_livy(env, 'server', upgrade_type=upgrade_type, action = 'config') - - def start(self, env, upgrade_type=None): - import params - env.set_params(params) - - self.configure(env) - livy_service('server', upgrade_type=upgrade_type, action='start') - - def stop(self, env, upgrade_type=None): - import params - env.set_params(params) - - livy_service('server', upgrade_type=upgrade_type, action='stop') - - def status(self, env): - import status_params - env.set_params(status_params) - - check_process_status(status_params.livy_server_pid_file) - - - def get_component_name(self): - return "livy-server" - - def pre_upgrade_restart(self, env, upgrade_type=None): - pass - -if __name__ == "__main__": - LivyServer().execute() \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/abfb30ea/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/livy_service.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/livy_service.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/livy_service.py deleted file mode 100644 index a78f50c..0000000 --- a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/livy_service.py +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env python - -''' -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -''' - -from resource_management.libraries.functions import format -from resource_management.core.resources.system import File, Execute -import threading - -def livy_service(name, upgrade_type=None, action=None): - import params - - if action == 'start': - livyserver_no_op_test = format( - 'ls {livy_server_pid_file} >/dev/null 2>&1 && ps -p `cat {livy_server_pid_file}` >/dev/null 2>&1') - Execute(format('{livy_server_start}'), - user=params.livy_user, - environment={'JAVA_HOME': params.java_home}, - not_if=livyserver_no_op_test) - - elif action == 'stop': - Execute(format('{livy_server_stop}'), - user=params.livy_user, - environment={'JAVA_HOME': params.java_home} - ) - File(params.livy_server_pid_file, - action="delete" - ) - - - http://git-wip-us.apache.org/repos/asf/ambari/blob/abfb30ea/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py index dfbdb12..d01ba21 100644 --- a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py +++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py @@ -28,6 +28,7 @@ import resource_management.libraries.functions from resource_management.libraries.functions import conf_select from resource_management.libraries.functions import stack_select from resource_management.libraries.functions import format +from resource_management.libraries.functions.get_stack_version import get_stack_version from resource_management.libraries.functions.version import format_stack_version from resource_management.libraries.functions.default import default from resource_management.libraries.functions import get_kinit_path @@ -40,9 +41,7 @@ from resource_management.libraries.script.script import Script SERVER_ROLE_DIRECTORY_MAP = { 'SPARK_JOBHISTORYSERVER' : 'spark-historyserver', 'SPARK_CLIENT' : 'spark-client', - 'SPARK_THRIFTSERVER' : 'spark-thriftserver', - 'LIVY_SERVER' : 'livy-server', - 'LIVY_CLIENT' : 'livy-client' + 'SPARK_THRIFTSERVER' : 'spark-thriftserver' } component_directory = Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP, "SPARK_CLIENT") @@ -181,44 +180,6 @@ hdfs_site = config['configurations']['hdfs-site'] dfs_type = default("/commandParams/dfs_type", "") -# livy related config - -# livy is only supported from HDP 2.5 -has_livyserver = False - -if stack_version_formatted and check_stack_feature(StackFeature.SPARK_LIVY, stack_version_formatted): - livy_component_directory = Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP, "LIVY_SERVER") - livy_conf = format("{stack_root}/current/{livy_component_directory}/conf") - livy_log_dir = config['configurations']['livy-env']['livy_log_dir'] - livy_pid_dir = status_params.livy_pid_dir - livy_home = format("{stack_root}/current/{livy_component_directory}") - livy_user = status_params.livy_user - livy_group = status_params.livy_group - user_group = status_params.user_group - livy_hdfs_user_dir = format("/user/{livy_user}") - livy_server_pid_file = status_params.livy_server_pid_file - - livy_server_start = format("{livy_home}/bin/livy-server start") - livy_server_stop = format("{livy_home}/bin/livy-server stop") - livy_logs_dir = format("{livy_home}/logs") - - livy_env_sh = config['configurations']['livy-env']['content'] - livy_log4j_properties = config['configurations']['livy-log4j-properties']['content'] - livy_spark_blacklist_properties = config['configurations']['livy-spark-blacklist']['content'] - - livy_kerberos_keytab = config['configurations']['livy-conf']['livy.server.kerberos.keytab'] - livy_kerberos_principal = config['configurations']['livy-conf']['livy.server.kerberos.principal'] - - livy_livyserver_hosts = default("/clusterHostInfo/livy_server_hosts", []) - - if len(livy_livyserver_hosts) > 0: - livy_livyserver_host = livy_livyserver_hosts[0] - has_livyserver = True - - livy_livyserver_port = default('configurations/livy-conf/livy.server.port',8998) - - - import functools #create partial functions with common arguments for every HdfsResource call #to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code http://git-wip-us.apache.org/repos/asf/ambari/blob/abfb30ea/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py.orig ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py.orig b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py.orig deleted file mode 100644 index d01ba21..0000000 --- a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py.orig +++ /dev/null @@ -1,200 +0,0 @@ -#!/usr/bin/python -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -""" - - -import status_params -from resource_management.libraries.functions.stack_features import check_stack_feature -from resource_management.libraries.functions import StackFeature -from setup_spark import * - -import resource_management.libraries.functions -from resource_management.libraries.functions import conf_select -from resource_management.libraries.functions import stack_select -from resource_management.libraries.functions import format -from resource_management.libraries.functions.get_stack_version import get_stack_version -from resource_management.libraries.functions.version import format_stack_version -from resource_management.libraries.functions.default import default -from resource_management.libraries.functions import get_kinit_path -from resource_management.libraries.functions.get_not_managed_resources import get_not_managed_resources - -from resource_management.libraries.script.script import Script - -# a map of the Ambari role to the component name -# for use with <stack-root>/current/<component> -SERVER_ROLE_DIRECTORY_MAP = { - 'SPARK_JOBHISTORYSERVER' : 'spark-historyserver', - 'SPARK_CLIENT' : 'spark-client', - 'SPARK_THRIFTSERVER' : 'spark-thriftserver' -} - -component_directory = Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP, "SPARK_CLIENT") - -config = Script.get_config() -tmp_dir = Script.get_tmp_dir() - -stack_name = status_params.stack_name -stack_root = Script.get_stack_root() -stack_version_unformatted = config['hostLevelParams']['stack_version'] -stack_version_formatted = format_stack_version(stack_version_unformatted) -host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False) - -# New Cluster Stack Version that is defined during the RESTART of a Stack Upgrade -version = default("/commandParams/version", None) - -spark_conf = '/etc/spark/conf' -hadoop_conf_dir = conf_select.get_hadoop_conf_dir() -hadoop_bin_dir = stack_select.get_hadoop_dir("bin") - -if stack_version_formatted and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version_formatted): - hadoop_home = stack_select.get_hadoop_dir("home") - spark_conf = format("{stack_root}/current/{component_directory}/conf") - spark_log_dir = config['configurations']['spark-env']['spark_log_dir'] - spark_daemon_memory = config['configurations']['spark-env']['spark_daemon_memory'] - spark_pid_dir = status_params.spark_pid_dir - spark_home = format("{stack_root}/current/{component_directory}") - -spark_thrift_server_conf_file = spark_conf + "/spark-thrift-sparkconf.conf" -java_home = config['hostLevelParams']['java_home'] - -hdfs_user = config['configurations']['hadoop-env']['hdfs_user'] -hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name'] -hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab'] -user_group = config['configurations']['cluster-env']['user_group'] - -spark_user = status_params.spark_user -hive_user = status_params.hive_user -spark_group = status_params.spark_group -user_group = status_params.user_group -spark_hdfs_user_dir = format("/user/{spark_user}") -spark_history_dir = default('/configurations/spark-defaults/spark.history.fs.logDirectory', "hdfs:///spark-history") - -spark_history_server_pid_file = status_params.spark_history_server_pid_file -spark_thrift_server_pid_file = status_params.spark_thrift_server_pid_file - -spark_history_server_start = format("{spark_home}/sbin/start-history-server.sh") -spark_history_server_stop = format("{spark_home}/sbin/stop-history-server.sh") - -spark_thrift_server_start = format("{spark_home}/sbin/start-thriftserver.sh") -spark_thrift_server_stop = format("{spark_home}/sbin/stop-thriftserver.sh") -spark_logs_dir = format("{spark_home}/logs") -spark_hadoop_lib_native = format("{stack_root}/current/hadoop-client/lib/native") - -spark_submit_cmd = format("{spark_home}/bin/spark-submit") -spark_smoke_example = "org.apache.spark.examples.SparkPi" -spark_service_check_cmd = format( - "{spark_submit_cmd} --class {spark_smoke_example} --master yarn-cluster --num-executors 1 --driver-memory 256m --executor-memory 256m --executor-cores 1 {spark_home}/lib/spark-examples*.jar 1") - -spark_jobhistoryserver_hosts = default("/clusterHostInfo/spark_jobhistoryserver_hosts", []) - -if len(spark_jobhistoryserver_hosts) > 0: - spark_history_server_host = spark_jobhistoryserver_hosts[0] -else: - spark_history_server_host = "localhost" - -# spark-defaults params -spark_yarn_historyServer_address = default(spark_history_server_host, "localhost") - -spark_history_ui_port = config['configurations']['spark-defaults']['spark.history.ui.port'] - -spark_env_sh = config['configurations']['spark-env']['content'] -spark_log4j_properties = config['configurations']['spark-log4j-properties']['content'] -spark_metrics_properties = config['configurations']['spark-metrics-properties']['content'] - -hive_server_host = default("/clusterHostInfo/hive_server_host", []) -is_hive_installed = not len(hive_server_host) == 0 - -security_enabled = config['configurations']['cluster-env']['security_enabled'] -kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None)) -spark_kerberos_keytab = config['configurations']['spark-defaults']['spark.history.kerberos.keytab'] -spark_kerberos_principal = config['configurations']['spark-defaults']['spark.history.kerberos.principal'] - -spark_thriftserver_hosts = default("/clusterHostInfo/spark_thriftserver_hosts", []) -has_spark_thriftserver = not len(spark_thriftserver_hosts) == 0 - -# hive-site params -spark_hive_properties = { - 'hive.metastore.uris': config['configurations']['hive-site']['hive.metastore.uris'] -} - -# security settings -if security_enabled: - spark_principal = spark_kerberos_principal.replace('_HOST',spark_history_server_host.lower()) - - if is_hive_installed: - spark_hive_properties.update({ - 'hive.metastore.sasl.enabled': str(config['configurations']['hive-site']['hive.metastore.sasl.enabled']).lower(), - 'hive.metastore.kerberos.keytab.file': config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file'], - 'hive.server2.authentication.spnego.principal': config['configurations']['hive-site']['hive.server2.authentication.spnego.principal'], - 'hive.server2.authentication.spnego.keytab': config['configurations']['hive-site']['hive.server2.authentication.spnego.keytab'], - 'hive.metastore.kerberos.principal': config['configurations']['hive-site']['hive.metastore.kerberos.principal'], - 'hive.server2.authentication.kerberos.principal': config['configurations']['hive-site']['hive.server2.authentication.kerberos.principal'], - 'hive.server2.authentication.kerberos.keytab': config['configurations']['hive-site']['hive.server2.authentication.kerberos.keytab'], - 'hive.server2.authentication': config['configurations']['hive-site']['hive.server2.authentication'], - }) - - hive_kerberos_keytab = config['configurations']['hive-site']['hive.server2.authentication.kerberos.keytab'] - hive_kerberos_principal = config['configurations']['hive-site']['hive.server2.authentication.kerberos.principal'] - -# thrift server support - available on HDP 2.3 or higher -spark_thrift_sparkconf = None -spark_thrift_cmd_opts_properties = '' -spark_thrift_fairscheduler_content = None -spark_thrift_master = "yarn-client" -if 'nm_hosts' in config['clusterHostInfo'] and len(config['clusterHostInfo']['nm_hosts']) == 1: - # use local mode when there's only one nodemanager - spark_thrift_master = "local[4]" - -if has_spark_thriftserver and 'spark-thrift-sparkconf' in config['configurations']: - spark_thrift_sparkconf = config['configurations']['spark-thrift-sparkconf'] - spark_thrift_cmd_opts_properties = config['configurations']['spark-env']['spark_thrift_cmd_opts'] - if is_hive_installed: - # update default metastore client properties (async wait for metastore component) it is useful in case of - # blueprint provisioning when hive-metastore and spark-thriftserver is not on the same host. - spark_hive_properties.update({ - 'hive.metastore.client.socket.timeout' : config['configurations']['hive-site']['hive.metastore.client.socket.timeout'] - }) - spark_hive_properties.update(config['configurations']['spark-hive-site-override']) - - if 'spark-thrift-fairscheduler' in config['configurations'] and 'fairscheduler_content' in config['configurations']['spark-thrift-fairscheduler']: - spark_thrift_fairscheduler_content = config['configurations']['spark-thrift-fairscheduler']['fairscheduler_content'] - -default_fs = config['configurations']['core-site']['fs.defaultFS'] -hdfs_site = config['configurations']['hdfs-site'] - -dfs_type = default("/commandParams/dfs_type", "") - -import functools -#create partial functions with common arguments for every HdfsResource call -#to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code -HdfsResource = functools.partial( - HdfsResource, - user=hdfs_user, - hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore", - security_enabled = security_enabled, - keytab = hdfs_user_keytab, - kinit_path_local = kinit_path_local, - hadoop_bin_dir = hadoop_bin_dir, - hadoop_conf_dir = hadoop_conf_dir, - principal_name = hdfs_principal_name, - hdfs_site = hdfs_site, - default_fs = default_fs, - immutable_paths = get_not_managed_resources(), - dfs_type = dfs_type - ) http://git-wip-us.apache.org/repos/asf/ambari/blob/abfb30ea/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/service_check.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/service_check.py index 3a6e9b6..694f046 100644 --- a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/service_check.py +++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/service_check.py @@ -32,22 +32,12 @@ class SparkServiceCheck(Script): if params.security_enabled: spark_kinit_cmd = format("{kinit_path_local} -kt {spark_kerberos_keytab} {spark_principal}; ") Execute(spark_kinit_cmd, user=params.spark_user) - if (params.has_livyserver): - livy_kinit_cmd = format("{kinit_path_local} -kt {livy_kerberos_keytab} {livy_kerberos_principal}; ") - Execute(livy_kinit_cmd, user=params.livy_user) Execute(format("curl -s -o /dev/null -w'%{{http_code}}' --negotiate -u: -k http://{spark_history_server_host}:{spark_history_ui_port} | grep 200"), - tries=5, + tries = 10, try_sleep=3, logoutput=True ) - if params.has_livyserver and params.livy_livyserver_host != "localhost" and params.livy_livyserver_host != "0.0.0.0": - Execute(format("curl -s -o /dev/null -w'%{{http_code}}' --negotiate -u: -k http://{livy_livyserver_host}:{livy_livyserver_port}/sessions | grep 200"), - tries=5, - try_sleep=3, - logoutput=True, - user=params.livy_user - ) if __name__ == "__main__": SparkServiceCheck().execute() http://git-wip-us.apache.org/repos/asf/ambari/blob/abfb30ea/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/setup_livy.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/setup_livy.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/setup_livy.py deleted file mode 100644 index a09a8db..0000000 --- a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/setup_livy.py +++ /dev/null @@ -1,79 +0,0 @@ -#!/usr/bin/python -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -""" - -import os -from resource_management import Directory, File, PropertiesFile, InlineTemplate, format - - -def setup_livy(env, type, upgrade_type = None, action = None): - import params - - Directory([params.livy_pid_dir, params.livy_log_dir], - owner=params.livy_user, - group=params.user_group, - mode=0775, - create_parents = True - ) - if type == 'server' and action == 'config': - params.HdfsResource(params.livy_hdfs_user_dir, - type="directory", - action="create_on_execute", - owner=params.livy_user, - mode=0775 - ) - params.HdfsResource(None, action="execute") - - # create livy-env.sh in etc/conf dir - File(os.path.join(params.livy_conf, 'livy-env.sh'), - owner=params.livy_user, - group=params.livy_group, - content=InlineTemplate(params.livy_env_sh), - mode=0644, - ) - - # create livy.conf in etc/conf dir - PropertiesFile(format("{livy_conf}/livy.conf"), - properties = params.config['configurations']['livy-conf'], - key_value_delimiter = " ", - owner=params.livy_user, - group=params.livy_group, - ) - - # create log4j.properties in etc/conf dir - File(os.path.join(params.livy_conf, 'log4j.properties'), - owner=params.livy_user, - group=params.livy_group, - content=params.livy_log4j_properties, - mode=0644, - ) - - # create spark-blacklist.properties in etc/conf dir - File(os.path.join(params.livy_conf, 'spark-blacklist.properties'), - owner=params.livy_user, - group=params.livy_group, - content=params.livy_spark_blacklist_properties, - mode=0644, - ) - - Directory(params.livy_logs_dir, - owner=params.livy_user, - group=params.livy_group, - mode=0755, - ) http://git-wip-us.apache.org/repos/asf/ambari/blob/abfb30ea/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/status_params.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/status_params.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/status_params.py index cce31ff..86e7f7d 100644 --- a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/status_params.py +++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/status_params.py @@ -36,10 +36,4 @@ else: spark_pid_dir = config['configurations']['spark-env']['spark_pid_dir'] spark_history_server_pid_file = format("{spark_pid_dir}/spark-{spark_user}-org.apache.spark.deploy.history.HistoryServer-1.pid") spark_thrift_server_pid_file = format("{spark_pid_dir}/spark-{hive_user}-org.apache.spark.sql.hive.thriftserver.HiveThriftServer2-1.pid") -stack_name = default("/hostLevelParams/stack_name", None) - -if "livy-env" in config['configurations']: - livy_user = config['configurations']['livy-env']['livy_user'] - livy_group = config['configurations']['livy-env']['livy_group'] - livy_pid_dir = config['configurations']['livy-env']['livy_pid_dir'] - livy_server_pid_file = format("{livy_pid_dir}/livy-{livy_user}-server.pid") +stack_name = default("/hostLevelParams/stack_name", None) \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/abfb30ea/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json b/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json index 8b669e8..99a3ade 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json +++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json @@ -230,11 +230,6 @@ "name": "hbase_home_directory", "description": "Hbase home directory in HDFS needed for HBASE backup", "min_version": "2.5.0.0" - }, - { - "name": "spark_livy", - "description": "Livy as slave component of spark", - "min_version": "2.5.0.0" } ] } http://git-wip-us.apache.org/repos/asf/ambari/blob/abfb30ea/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py index 1dedf3d..d2e0d11 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py +++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py @@ -303,13 +303,6 @@ class HDP206StackAdvisor(DefaultStackAdvisor): if not falconUser in users and falconUser is not None: users[falconUser] = {"propertyHosts" : "*","propertyGroups" : "*", "config" : "falcon-env", "propertyName" : "falcon_user"} - if "SPARK" in servicesList: - livyUser = None - if "livy-env" in services["configurations"] and "livy_user" in services["configurations"]["livy-env"]["properties"]: - livyUser = services["configurations"]["livy-env"]["properties"]["livy_user"] - if not livyUser in users and livyUser is not None: - users[livyUser] = {"propertyHosts" : "*","propertyGroups" : "*", "config" : "livy-env", "propertyName" : "livy_user"} - putCoreSiteProperty = self.putProperty(configurations, "core-site", services) putCoreSitePropertyAttribute = self.putPropertyAttribute(configurations, "core-site") http://git-wip-us.apache.org/repos/asf/ambari/blob/abfb30ea/ambari-server/src/main/resources/stacks/HDP/2.5/role_command_order.json ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/role_command_order.json b/ambari-server/src/main/resources/stacks/HDP/2.5/role_command_order.json index 1b33345..4a7c1d2 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.5/role_command_order.json +++ b/ambari-server/src/main/resources/stacks/HDP/2.5/role_command_order.json @@ -8,8 +8,6 @@ "HIVE_SERVER_INTERACTIVE-START": ["NODEMANAGER-START", "MYSQL_SERVER-START"], "HIVE_SERVER_INTERACTIVE-RESTART": ["NODEMANAGER-RESTART", "MYSQL_SERVER-RESTART"], "HIVE_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START", "HIVE_METASTORE-START", "WEBHCAT_SERVER-START", "HIVE_SERVER_INTERACTIVE-START"], - "RANGER_ADMIN-START": ["ZOOKEEPER_SERVER-START", "LOGSEARCH_SOLR-START"], - "LIVY_SERVER-START" : ["NAMENODE-START", "DATANODE-START"], - "SPARK_SERVICE_CHECK-SERVICE_CHECK" : ["SPARK_JOBHISTORYSERVER-START", "APP_TIMELINE_SERVER-START","LIVY_SERVER-START"] + "RANGER_ADMIN-START": ["ZOOKEEPER_SERVER-START", "LOGSEARCH_SOLR-START"] } } \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/abfb30ea/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/configuration/livy-conf.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/configuration/livy-conf.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/configuration/livy-conf.xml deleted file mode 100644 index d0acdda..0000000 --- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/configuration/livy-conf.xml +++ /dev/null @@ -1,59 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<!-- -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ ---> - -<configuration supports_final="true"> - - <property> - <name>livy.environment</name> - <value>production</value> - <description> - Specifies Livy's environment. May either be "production" or "development". In "development" - mode, Livy will enable debugging options, such as reporting possible routes on a 404. - defaults to development - </description> - </property> - - <property> - <name>livy.server.port</name> - <value>8998</value> - <description> - What port to start the server on. Defaults to 8998. - </description> - </property> - - <property> - <name>livy.server.session.timeout</name> - <value>3600000</value> - <description> - Time in milliseconds on how long Livy will wait before timing out an idle session. - Default is one hour. - </description> - </property> - - <property> - <name>livy.impersonation.enabled</name> - <value>true</value> - <description> - If livy should use proxy users when submitting a job. - </description> - </property> - -</configuration> \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/abfb30ea/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/configuration/livy-env.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/configuration/livy-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/configuration/livy-env.xml deleted file mode 100644 index 410a1c1..0000000 --- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/configuration/livy-env.xml +++ /dev/null @@ -1,92 +0,0 @@ -<?xml version="1.0"?> -<?xml-stylesheet type="text/xsl" href="configuration.xsl"?> -<!-- -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ ---> - -<configuration supports_adding_forbidden="true"> - <property> - <name>livy_user</name> - <display-name>Livy User</display-name> - <value>livy</value> - <property-type>USER</property-type> - <value-attributes> - <type>user</type> - <overridable>false</overridable> - </value-attributes> - </property> - - <property> - <name>livy_group</name> - <display-name>Livy Group</display-name> - <value>livy</value> - <property-type>GROUP</property-type> - <description>livy group</description> - <value-attributes> - <type>user</type> - </value-attributes> - </property> - - <property> - <name>livy_log_dir</name> - <value>/var/log/livy</value> - <description>Livy Log Dir</description> - <value-attributes> - <type>directory</type> - </value-attributes> - </property> - - <property> - <name>livy_pid_dir</name> - <value>/var/run/livy</value> - <value-attributes> - <type>directory</type> - </value-attributes> - </property> - - <property> - <name>spark_home</name> - <value>/usr/hdp/current/spark-client</value> - <value-attributes> - <type>directory</type> - </value-attributes> - </property> - - <!-- livy-env.sh --> - <property> - <name>content</name> - <description>This is the jinja template for livy-env.sh file</description> - <value> - #!/usr/bin/env bash - - # - SPARK_HOME Spark which you would like to use in livy - # - LIVY_LOG_DIR Where log files are stored. (Default: ${LIVY_HOME}/logs) - # - LIVY_PID_DIR Where the pid file is stored. (Default: /tmp) - # - LIVY_SERVER_JAVA_OPTS Java Opts for running livy server (You can set jvm related setting here, like jvm memory/gc algorithm and etc.) - export SPARK_HOME=/usr/hdp/current/spark-client - export LIVY_LOG_DIR={{livy_log_dir}} - export LIVY_PID_DIR={{livy_pid_dir}} - export LIVY_SERVER_JAVA_OPTS="-Xmx2g" - </value> - <value-attributes> - <type>content</type> - </value-attributes> - </property> - -</configuration> http://git-wip-us.apache.org/repos/asf/ambari/blob/abfb30ea/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/configuration/livy-log4j-properties.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/configuration/livy-log4j-properties.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/configuration/livy-log4j-properties.xml deleted file mode 100644 index d84207a..0000000 --- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/configuration/livy-log4j-properties.xml +++ /dev/null @@ -1,41 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<!-- -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ ---> - -<configuration supports_final="false" supports_adding_forbidden="true"> - <property> - <name>content</name> - <description>Livy-log4j-Properties</description> - <value> - # Set everything to be logged to the console - log4j.rootCategory=INFO, console - log4j.appender.console=org.apache.log4j.ConsoleAppender - log4j.appender.console.target=System.err - log4j.appender.console.layout=org.apache.log4j.PatternLayout - log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n - - log4j.logger.org.eclipse.jetty=WARN - </value> - <value-attributes> - <type>content</type> - <show-property-name>false</show-property-name> - </value-attributes> - </property> -</configuration> http://git-wip-us.apache.org/repos/asf/ambari/blob/abfb30ea/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/configuration/livy-spark-blacklist.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/configuration/livy-spark-blacklist.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/configuration/livy-spark-blacklist.xml deleted file mode 100644 index 4a5fbfb..0000000 --- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/configuration/livy-spark-blacklist.xml +++ /dev/null @@ -1,40 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<!-- -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ ---> - -<configuration supports_final="false" supports_adding_forbidden="true"> - <property> - <name>content</name> - <description>spark-blacklist.properties</description> - <value> - # - # Configuration override / blacklist. Defines a list of properties that users are not allowed - # to override when starting Spark sessions. - # - # This file takes a list of property names (one per line). Empty lines and lines starting with "#" - # are ignored. - # - </value> - <value-attributes> - <type>content</type> - <show-property-name>false</show-property-name> - </value-attributes> - </property> -</configuration> \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/abfb30ea/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/kerberos.json ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/kerberos.json b/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/kerberos.json deleted file mode 100644 index d75364b..0000000 --- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/kerberos.json +++ /dev/null @@ -1,114 +0,0 @@ -{ - "services": [ - { - "name": "SPARK", - "identities": [ - { - "name": "/smokeuser" - }, - { - "name": "sparkuser", - "principal": { - "value": "${spark-env/spark_user}-${cluster_name}@${realm}", - "type" : "user", - "configuration": "spark-defaults/spark.history.kerberos.principal", - "local_username" : "${spark-env/spark_user}" - }, - "keytab": { - "file": "${keytab_dir}/spark.headless.keytab", - "owner": { - "name": "${spark-env/spark_user}", - "access": "r" - }, - "group": { - "name": "${cluster-env/user_group}", - "access": "" - }, - "configuration": "spark-defaults/spark.history.kerberos.keytab" - } - }, - { - "name": "livyuser", - "principal": { - "value": "livy@${realm}", - "type" : "user", - "configuration": "livy-conf/livy.server.kerberos.principal", - "local_username": "${livy-env/livy_user}" - }, - "keytab": { - "file": "${keytab_dir}/livy.keytab", - "owner": { - "name": "${livy-env/livy_user}", - "access": "r" - }, - "group": { - "name": "${cluster-env/user_group}", - "access": "" - }, - "configuration": "livy-conf/livy.server.kerberos.keytab" - } - }, - { - "name": "/spnego", - "principal": { - "configuration": "livy-conf/livy.server.auth.kerberos.principal" - }, - "keytab": { - "configuration": "livy-conf/livy.server.auth.kerberos.keytab" - } - } - ], - "configurations": [ - { - "spark-defaults": { - "spark.history.kerberos.enabled": "true" - } - }, - { - "livy-conf": { - "livy.server.auth.type": "kerberos", - "livy.impersonation.enabled": "true" - } - }, - { - "core-site": { - "hadoop.proxyuser.${livy-env/livy_user}.groups": "*", - "hadoop.proxyuser.${livy-env/livy_user}.hosts": "*" - } - } - ], - "components": [ - { - "name": "SPARK_JOBHISTORYSERVER", - "identities": [ - { - "name": "/HDFS/NAMENODE/hdfs" - } - ] - }, - { - "name": "SPARK_CLIENT" - }, - { - "name": "SPARK_THRIFTSERVER", - "identities": [ - { - "name": "/HDFS/NAMENODE/hdfs" - }, - { - "name": "/HIVE/HIVE_SERVER/hive_server_hive" - } - ] - }, - { - "name": "LIVY_SERVER", - "identities": [ - { - "name": "/HDFS/NAMENODE/hdfs" - } - ] - } - ] - } - ] -} http://git-wip-us.apache.org/repos/asf/ambari/blob/abfb30ea/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/metainfo.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/metainfo.xml index 67303b6..d1129cd 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/metainfo.xml +++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/metainfo.xml @@ -24,88 +24,6 @@ <service> <name>SPARK</name> <version>1.6.x.2.5</version> - <components> - <component> - <name>LIVY_SERVER</name> - <displayName>Livy Server</displayName> - <category>SLAVE</category> - <cardinality>0+</cardinality> - <versionAdvertised>true</versionAdvertised> - <dependencies> - <dependency> - <name>SPARK/SPARK_CLIENT</name> - <scope>host</scope> - <auto-deploy> - <enabled>true</enabled> - </auto-deploy> - </dependency> - <dependency> - <name>HDFS/HDFS_CLIENT</name> - <scope>host</scope> - <auto-deploy> - <enabled>true</enabled> - </auto-deploy> - </dependency> - <dependency> - <name>YARN/YARN_CLIENT</name> - <scope>host</scope> - <auto-deploy> - <enabled>true</enabled> - </auto-deploy> - </dependency> - </dependencies> - <commandScript> - <script>scripts/livy_server.py</script> - <scriptType>PYTHON</scriptType> - <timeout>600</timeout> - </commandScript> - </component> - </components> - - <configuration-dependencies> - <config-type>spark-defaults</config-type> - <config-type>spark-env</config-type> - <config-type>spark-log4j-properties</config-type> - <config-type>spark-metrics-properties</config-type> - <config-type>spark-thrift-sparkconf</config-type> - <config-type>spark-hive-site-override</config-type> - <config-type>spark-thrift-fairscheduler</config-type> - <config-type>livy-conf</config-type> - <config-type>livy-env</config-type> - <config-type>livy-log4j-properties</config-type> - <config-type>livy-spark-blacklist</config-type> - </configuration-dependencies> - - <osSpecifics> - <osSpecific> - <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily> - <packages> - <package> - <name>spark_${stack_version}</name> - </package> - <package> - <name>spark_${stack_version}-python</name> - </package> - <package> - <name>livy_${stack_version}</name> - </package> - </packages> - </osSpecific> - <osSpecific> - <osFamily>debian7,ubuntu12,ubuntu14</osFamily> - <packages> - <package> - <name>spark-${stack_version}</name> - </package> - <package> - <name>spark-${stack_version}-python</name> - </package> - <package> - <name>livy-${stack_version}</name> - </package> - </packages> - </osSpecific> - </osSpecifics> </service> </services> </metainfo>
