This is an automated email from the ASF dual-hosted git repository.
wuzhiguo pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/bigtop-manager.git
The following commit(s) were added to refs/heads/main by this push:
new ca137792 BIGTOP-4134: Add Spark component on Bigtop-3.3.0 stack (#133)
ca137792 is described below
commit ca137792845be70ceb56a4b89153a533d1cf79f9
Author: Zhiguo Wu <[email protected]>
AuthorDate: Tue Dec 24 14:41:25 2024 +0800
BIGTOP-4134: Add Spark component on Bigtop-3.3.0 stack (#133)
---
.../stacks/bigtop/3.3.0/services/hive/order.json | 4 +
.../services/spark/configuration/fairscheduler.xml | 65 ++++++
.../3.3.0/services/spark/configuration/log4j2.xml | 103 +++++++++
.../3.3.0/services/spark/configuration/metrics.xml | 244 +++++++++++++++++++++
.../spark/configuration/spark-defaults.xml | 66 ++++++
.../services/spark/configuration/spark-env.xml | 91 ++++++++
.../spark/configuration/spark-hive-site.xml | 28 +++
.../bigtop/3.3.0/services/spark/metainfo.xml | 80 +++++++
.../stacks/bigtop/3.3.0/services/spark/order.json | 14 ++
.../bigtop/v3_3_0/spark/SparkClientScript.java | 52 +++++
.../v3_3_0/spark/SparkHistoryServerScript.java | 85 +++++++
.../stack/bigtop/v3_3_0/spark/SparkParams.java | 134 +++++++++++
.../stack/bigtop/v3_3_0/spark/SparkSetup.java | 107 +++++++++
.../v3_3_0/spark/SparkThriftServerScript.java | 85 +++++++
.../stack/core/utils/linux/LinuxFileUtils.java | 1 +
15 files changed, 1159 insertions(+)
diff --git
a/bigtop-manager-server/src/main/resources/stacks/bigtop/3.3.0/services/hive/order.json
b/bigtop-manager-server/src/main/resources/stacks/bigtop/3.3.0/services/hive/order.json
index 19452cf0..e5b220ce 100644
---
a/bigtop-manager-server/src/main/resources/stacks/bigtop/3.3.0/services/hive/order.json
+++
b/bigtop-manager-server/src/main/resources/stacks/bigtop/3.3.0/services/hive/order.json
@@ -9,6 +9,10 @@
"NAMENODE-RESTART",
"NODEMANAGER-RESTART"
],
+ "HIVE_METASTORE-STOP": [
+ "SPARK_HISTORYSERVER-STOP",
+ "SPARK_THRIFTSERVER-STOP"
+ ],
"HIVESERVER2-START": [
"NODEMANAGER-START",
"ZOOKEEPER_SERVER-START",
diff --git
a/bigtop-manager-server/src/main/resources/stacks/bigtop/3.3.0/services/spark/configuration/fairscheduler.xml
b/bigtop-manager-server/src/main/resources/stacks/bigtop/3.3.0/services/spark/configuration/fairscheduler.xml
new file mode 100644
index 00000000..32463131
--- /dev/null
+++
b/bigtop-manager-server/src/main/resources/stacks/bigtop/3.3.0/services/spark/configuration/fairscheduler.xml
@@ -0,0 +1,65 @@
+<?xml version="1.0"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+-->
+
+<configuration>
+ <property>
+ <name>content</name>
+ <display-name>fairscheduler template</display-name>
+ <description>Custom fairscheduler.xml</description>
+ <value><![CDATA[
+<?xml version="1.0"?>
+
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<#noparse>
+<allocations>
+ <pool name="production">
+ <schedulingMode>FAIR</schedulingMode>
+ <weight>1</weight>
+ <minShare>2</minShare>
+ </pool>
+ <pool name="test">
+ <schedulingMode>FIFO</schedulingMode>
+ <weight>2</weight>
+ <minShare>3</minShare>
+ </pool>
+</allocations>
+</#noparse>
+]]>
+ </value>
+ <attrs>
+ <type>longtext</type>
+ </attrs>
+ </property>
+</configuration>
\ No newline at end of file
diff --git
a/bigtop-manager-server/src/main/resources/stacks/bigtop/3.3.0/services/spark/configuration/log4j2.xml
b/bigtop-manager-server/src/main/resources/stacks/bigtop/3.3.0/services/spark/configuration/log4j2.xml
new file mode 100644
index 00000000..ab704a43
--- /dev/null
+++
b/bigtop-manager-server/src/main/resources/stacks/bigtop/3.3.0/services/spark/configuration/log4j2.xml
@@ -0,0 +1,103 @@
+<?xml version="1.0"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+-->
+
+<configuration>
+ <property>
+ <name>content</name>
+ <display-name>log4j2 template</display-name>
+ <description>Custom log4j2.properties</description>
+ <value><![CDATA[
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+<#noparse>
+# Set everything to be logged to the console
+rootLogger.level = info
+rootLogger.appenderRef.stdout.ref = console
+
+# In the pattern layout configuration below, we specify an explicit `%ex`
conversion
+# pattern for logging Throwables. If this was omitted, then (by default) Log4J
would
+# implicitly add an `%xEx` conversion pattern which logs stacktraces with
additional
+# class packaging information. That extra information can sometimes add a
substantial
+# performance overhead, so we disable it in our default logging config.
+# For more information, see SPARK-39361.
+appender.console.type = Console
+appender.console.name = console
+appender.console.target = SYSTEM_ERR
+appender.console.layout.type = PatternLayout
+appender.console.layout.pattern = %d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n%ex
+
+# Set the default spark-shell/spark-sql log level to WARN. When running the
+# spark-shell/spark-sql, the log level for these classes is used to overwrite
+# the root logger's log level, so that the user can have different defaults
+# for the shell and regular Spark apps.
+logger.repl.name = org.apache.spark.repl.Main
+logger.repl.level = warn
+
+logger.thriftserver.name =
org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver
+logger.thriftserver.level = warn
+
+# Settings to quiet third party logs that are too verbose
+logger.jetty1.name = org.sparkproject.jetty
+logger.jetty1.level = warn
+logger.jetty2.name = org.sparkproject.jetty.util.component.AbstractLifeCycle
+logger.jetty2.level = error
+logger.replexprTyper.name = org.apache.spark.repl.SparkIMain$exprTyper
+logger.replexprTyper.level = info
+logger.replSparkILoopInterpreter.name =
org.apache.spark.repl.SparkILoop$SparkILoopInterpreter
+logger.replSparkILoopInterpreter.level = info
+logger.parquet1.name = org.apache.parquet
+logger.parquet1.level = error
+logger.parquet2.name = parquet
+logger.parquet2.level = error
+
+# SPARK-9183: Settings to avoid annoying messages when looking up nonexistent
UDFs in SparkSQL with Hive support
+logger.RetryingHMSHandler.name =
org.apache.hadoop.hive.metastore.RetryingHMSHandler
+logger.RetryingHMSHandler.level = fatal
+logger.FunctionRegistry.name = org.apache.hadoop.hive.ql.exec.FunctionRegistry
+logger.FunctionRegistry.level = error
+
+# For deploying Spark ThriftServer
+# SPARK-34128: Suppress undesirable TTransportException warnings involved in
THRIFT-4805
+appender.console.filter.1.type = RegexFilter
+appender.console.filter.1.regex = .*Thrift error occurred during processing of
message.*
+appender.console.filter.1.onMatch = deny
+appender.console.filter.1.onMismatch = neutral
+</#noparse>
+]]>
+ </value>
+ <attrs>
+ <type>longtext</type>
+ </attrs>
+ </property>
+</configuration>
\ No newline at end of file
diff --git
a/bigtop-manager-server/src/main/resources/stacks/bigtop/3.3.0/services/spark/configuration/metrics.xml
b/bigtop-manager-server/src/main/resources/stacks/bigtop/3.3.0/services/spark/configuration/metrics.xml
new file mode 100644
index 00000000..ed692405
--- /dev/null
+++
b/bigtop-manager-server/src/main/resources/stacks/bigtop/3.3.0/services/spark/configuration/metrics.xml
@@ -0,0 +1,244 @@
+<?xml version="1.0"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+-->
+
+<configuration>
+ <property>
+ <name>content</name>
+ <display-name>metrics template</display-name>
+ <description>Custom metrics.properties</description>
+ <value><![CDATA[
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# syntax: [instance].sink|source.[name].[options]=[value]
+
+# This file configures Spark's internal metrics system. The metrics system is
+# divided into instances which correspond to internal components.
+# Each instance can be configured to report its metrics to one or more sinks.
+# Accepted values for [instance] are "master", "worker", "executor", "driver",
+# and "applications". A wildcard "*" can be used as an instance name, in
+# which case all instances will inherit the supplied property.
+#
+# Within an instance, a "source" specifies a particular set of grouped
metrics.
+# there are two kinds of sources:
+# 1. Spark internal sources, like MasterSource, WorkerSource, etc, which
will
+# collect a Spark component's internal state. Each instance is paired with a
+# Spark source that is added automatically.
+# 2. Common sources, like JvmSource, which will collect low level state.
+# These can be added through configuration options and are then loaded
+# using reflection.
+#
+# A "sink" specifies where metrics are delivered to. Each instance can be
+# assigned one or more sinks.
+#
+# The sink|source field specifies whether the property relates to a sink or
+# source.
+#
+# The [name] field specifies the name of source or sink.
+#
+# The [options] field is the specific property of this source or sink. The
+# source or sink is responsible for parsing this property.
+#
+# Notes:
+# 1. To add a new sink, set the "class" option to a fully qualified class
+# name (see examples below).
+# 2. Some sinks involve a polling period. The minimum allowed polling period
+# is 1 second.
+# 3. Wildcard properties can be overridden by more specific properties.
+# For example, master.sink.console.period takes precedence over
+# *.sink.console.period.
+# 4. A metrics specific configuration
+# "spark.metrics.conf=${SPARK_HOME}/conf/metrics.properties" should be
+# added to Java properties using -Dspark.metrics.conf=xxx if you want to
+# customize metrics system. You can also put the file in ${SPARK_HOME}/conf
+# and it will be loaded automatically.
+# 5. The MetricsServlet sink is added by default as a sink in the master,
+# worker and driver, and you can send HTTP requests to the "/metrics/json"
+# endpoint to get a snapshot of all the registered metrics in JSON format.
+# For master, requests to the "/metrics/master/json" and
+# "/metrics/applications/json" endpoints can be sent separately to get
+# metrics snapshots of the master instance and applications. This
+# MetricsServlet does not have to be configured.
+# 6. The metrics system can also be configured using Spark configuration
+# parameters. The relevant parameter names are formed by adding the
+# prefix "spark.metrics.conf." to the configuration entries detailed in
+# this file (see examples below).
+
+## List of available common sources and their properties.
+
+# org.apache.spark.metrics.source.JvmSource
+# Note: Currently, JvmSource is the only available common source.
+# It can be added to an instance by setting the "class" option to its
+# fully qualified class name (see examples below).
+
+## List of available sinks and their properties.
+
+# org.apache.spark.metrics.sink.ConsoleSink
+# Name: Default: Description:
+# period 10 Poll period
+# unit seconds Unit of the poll period
+
+# org.apache.spark.metrics.sink.CSVSink
+# Name: Default: Description:
+# period 10 Poll period
+# unit seconds Unit of the poll period
+# directory /tmp Where to store CSV files
+
+# org.apache.spark.metrics.sink.GangliaSink
+# Name: Default: Description:
+# host NONE Hostname or multicast group of the Ganglia server,
+# must be set
+# port NONE Port of the Ganglia server(s), must be set
+# period 10 Poll period
+# unit seconds Unit of the poll period
+# ttl 1 TTL of messages sent by Ganglia
+# dmax 0 Lifetime in seconds of metrics (0 never expired)
+# mode multicast Ganglia network mode ('unicast' or 'multicast')
+
+# org.apache.spark.metrics.sink.JmxSink
+
+# org.apache.spark.metrics.sink.MetricsServlet
+# Name: Default: Description:
+# path VARIES* Path prefix from the web server root
+# sample false Whether to show entire set of samples for histograms
+# ('false' or 'true')
+#
+# * Default path is /metrics/json for all instances except the master. The
+# master has two paths:
+# /metrics/applications/json # App information
+# /metrics/master/json # Master information
+
+# org.apache.spark.metrics.sink.PrometheusServlet
+# Name: Default: Description:
+# path VARIES* Path prefix from the web server root
+#
+# * Default path is /metrics/prometheus for all instances except the master.
The
+# master has two paths:
+# /metrics/applications/prometheus # App information
+# /metrics/master/prometheus # Master information
+
+# org.apache.spark.metrics.sink.GraphiteSink
+# Name: Default: Description:
+# host NONE Hostname of the Graphite server, must be set
+# port NONE Port of the Graphite server, must be set
+# period 10 Poll period
+# unit seconds Unit of the poll period
+# prefix EMPTY STRING Prefix to prepend to every metric's name
+# protocol tcp Protocol ("tcp" or "udp") to use
+# regex NONE Optional filter to send only metrics matching this
regex string
+
+# org.apache.spark.metrics.sink.StatsdSink
+# Name: Default: Description:
+# host 127.0.0.1 Hostname or IP of StatsD server
+# port 8125 Port of StatsD server
+# period 10 Poll period
+# unit seconds Units of poll period
+# prefix EMPTY STRING Prefix to prepend to metric name
+
+<#noparse>
+## Examples
+# Enable JmxSink for all instances by class name
+#*.sink.jmx.class=org.apache.spark.metrics.sink.JmxSink
+
+# Enable ConsoleSink for all instances by class name
+#*.sink.console.class=org.apache.spark.metrics.sink.ConsoleSink
+
+# Enable StatsdSink for all instances by class name
+#*.sink.statsd.class=org.apache.spark.metrics.sink.StatsdSink
+#*.sink.statsd.prefix=spark
+
+# Polling period for the ConsoleSink
+#*.sink.console.period=10
+# Unit of the polling period for the ConsoleSink
+#*.sink.console.unit=seconds
+
+# Polling period for the ConsoleSink specific for the master instance
+#master.sink.console.period=15
+# Unit of the polling period for the ConsoleSink specific for the master
+# instance
+#master.sink.console.unit=seconds
+
+# Enable CsvSink for all instances by class name
+#*.sink.csv.class=org.apache.spark.metrics.sink.CsvSink
+
+# Polling period for the CsvSink
+#*.sink.csv.period=1
+# Unit of the polling period for the CsvSink
+#*.sink.csv.unit=minutes
+
+# Polling directory for CsvSink
+#*.sink.csv.directory=/tmp/
+
+# Polling period for the CsvSink specific for the worker instance
+#worker.sink.csv.period=10
+# Unit of the polling period for the CsvSink specific for the worker instance
+#worker.sink.csv.unit=minutes
+
+# Enable Slf4jSink for all instances by class name
+#*.sink.slf4j.class=org.apache.spark.metrics.sink.Slf4jSink
+
+# Polling period for the Slf4JSink
+#*.sink.slf4j.period=1
+# Unit of the polling period for the Slf4jSink
+#*.sink.slf4j.unit=minutes
+
+# Example configuration for Graphite sink
+#*.sink.graphite.class=org.apache.spark.metrics.sink.GraphiteSink
+#*.sink.graphite.host=<graphiteEndPoint_hostName>
+#*.sink.graphite.port=<listening_port>
+#*.sink.graphite.period=10
+#*.sink.graphite.unit=seconds
+#*.sink.graphite.prefix=<optional_value>
+
+# Enable JvmSource for instance master, worker, driver and executor
+#master.source.jvm.class=org.apache.spark.metrics.source.JvmSource
+
+#worker.source.jvm.class=org.apache.spark.metrics.source.JvmSource
+
+#driver.source.jvm.class=org.apache.spark.metrics.source.JvmSource
+
+#executor.source.jvm.class=org.apache.spark.metrics.source.JvmSource
+
+# Example configuration for PrometheusServlet
+#*.sink.prometheusServlet.class=org.apache.spark.metrics.sink.PrometheusServlet
+#*.sink.prometheusServlet.path=/metrics/prometheus
+#master.sink.prometheusServlet.path=/metrics/master/prometheus
+#applications.sink.prometheusServlet.path=/metrics/applications/prometheus
+</#noparse>
+]]>
+ </value>
+ <attrs>
+ <type>longtext</type>
+ </attrs>
+ </property>
+</configuration>
\ No newline at end of file
diff --git
a/bigtop-manager-server/src/main/resources/stacks/bigtop/3.3.0/services/spark/configuration/spark-defaults.xml
b/bigtop-manager-server/src/main/resources/stacks/bigtop/3.3.0/services/spark/configuration/spark-defaults.xml
new file mode 100644
index 00000000..aca92442
--- /dev/null
+++
b/bigtop-manager-server/src/main/resources/stacks/bigtop/3.3.0/services/spark/configuration/spark-defaults.xml
@@ -0,0 +1,66 @@
+<?xml version="1.0"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+-->
+
+<configuration>
+ <property>
+ <name>spark_history_fs_logDirectory</name>
+ <value>file:/tmp/spark-events</value>
+ </property>
+ <property>
+ <name>content</name>
+ <display-name>spark-defaults template</display-name>
+ <description>Custom spark-defaults.conf</description>
+ <value><![CDATA[
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Default system properties included when running spark-submit.
+# This is useful for setting default environmental settings.
+
+# Example:
+# spark.master spark://master:7077
+# spark.eventLog.enabled true
+# spark.eventLog.dir hdfs://namenode:8021/directory
+# spark.serializer org.apache.spark.serializer.KryoSerializer
+# spark.driver.memory 5g
+# spark.executor.extraJavaOptions -XX:+PrintGCDetails -Dkey=value
-Dnumbers="one two three"
+
+spark.master yarn
+spark.history.fs.logDirectory ${spark_history_fs_logDirectory}
+]]>
+ </value>
+ <attrs>
+ <type>longtext</type>
+ </attrs>
+ </property>
+</configuration>
\ No newline at end of file
diff --git
a/bigtop-manager-server/src/main/resources/stacks/bigtop/3.3.0/services/spark/configuration/spark-env.xml
b/bigtop-manager-server/src/main/resources/stacks/bigtop/3.3.0/services/spark/configuration/spark-env.xml
new file mode 100755
index 00000000..9d21863b
--- /dev/null
+++
b/bigtop-manager-server/src/main/resources/stacks/bigtop/3.3.0/services/spark/configuration/spark-env.xml
@@ -0,0 +1,91 @@
+<?xml version="1.0"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+-->
+
+<configuration>
+ <property>
+ <name>spark_log_dir</name>
+ <display-name>Spark Log directory</display-name>
+ <value>/var/log/spark</value>
+ <description>Spark Log Dir</description>
+ </property>
+ <property>
+ <name>spark_pid_dir</name>
+ <display-name>Spark PID directory</display-name>
+ <value>/var/run/spark</value>
+ </property>
+ <!-- spark-env.sh -->
+ <property>
+ <name>content</name>
+ <description>This is the freemarker template for spark-env.sh
file</description>
+ <value><![CDATA[
+#!/usr/bin/env bash
+
+# This file is sourced when running various Spark programs.
+# Copy it as spark-env.sh and edit that to configure Spark for your site.
+
+# Options read in YARN client mode
+#SPARK_EXECUTOR_INSTANCES="2" #Number of workers to start (Default: 2)
+#SPARK_EXECUTOR_CORES="1" #Number of cores for the workers (Default: 1).
+#SPARK_EXECUTOR_MEMORY="1G" #Memory per Worker (e.g. 1000M, 2G) (Default: 1G)
+#SPARK_DRIVER_MEMORY="512M" #Memory for Master (e.g. 1000M, 2G) (Default: 512
Mb)
+#SPARK_YARN_APP_NAME="spark" #The name of your application (Default: Spark)
+#SPARK_YARN_QUEUE="default" #The hadoop queue to use for allocation requests
(Default: default)
+#SPARK_YARN_DIST_FILES="" #Comma separated list of files to be distributed
with the job.
+#SPARK_YARN_DIST_ARCHIVES="" #Comma separated list of archives to be
distributed with the job.
+
+# Generic options for the daemons used in the standalone deploy mode
+
+# Alternate conf dir. (Default: ${SPARK_HOME}/conf)
+export SPARK_CONF_DIR=${spark_conf_dir}
+
+# Where log files are stored.(Default:${SPARK_HOME}/logs)
+export SPARK_LOG_DIR=${spark_log_dir}
+
+# Where the pid file is stored. (Default: /tmp)
+export SPARK_PID_DIR=${spark_pid_dir}
+
+# A string representing this instance of spark.(Default: $USER)
+SPARK_IDENT_STRING=${spark_user}
+
+# The scheduling priority for daemons. (Default: 0)
+SPARK_NICENESS=0
+
+export HADOOP_HOME=${hadoop_home}
+export HADOOP_CONF_DIR=${hadoop_conf_dir}
+
+export HIVE_HOME=${hive_home}
+export HIVE_CONF_DIR=${hive_conf_dir}
+
+<#noparse>
+export SPARK_DIST_CLASSPATH=$($HADOOP_HOME/bin/hadoop classpath)
+</#noparse>
+
+# The java implementation to use.
+export JAVA_HOME=${java_home}
+
+# Add hadoop native libraries support for better performance
+export LD_LIBRARY_PATH=${hadoop_home}/lib/native/:$LD_LIBRARY_PATH
+]]>
+ </value>
+ <value-attributes>
+ <type>longtext</type>
+ </value-attributes>
+ </property>
+</configuration>
diff --git
a/bigtop-manager-server/src/main/resources/stacks/bigtop/3.3.0/services/spark/configuration/spark-hive-site.xml
b/bigtop-manager-server/src/main/resources/stacks/bigtop/3.3.0/services/spark/configuration/spark-hive-site.xml
new file mode 100644
index 00000000..57699342
--- /dev/null
+++
b/bigtop-manager-server/src/main/resources/stacks/bigtop/3.3.0/services/spark/configuration/spark-hive-site.xml
@@ -0,0 +1,28 @@
+<?xml version="1.0"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+-->
+
+<configuration>
+ <property>
+ <name>hive.server2.thrift.port</name>
+ <value>10015</value>
+ <display-name>Spark ThriftServer Port</display-name>
+ <description>TCP port number to listen on, default 10015.</description>
+ </property>
+</configuration>
\ No newline at end of file
diff --git
a/bigtop-manager-server/src/main/resources/stacks/bigtop/3.3.0/services/spark/metainfo.xml
b/bigtop-manager-server/src/main/resources/stacks/bigtop/3.3.0/services/spark/metainfo.xml
new file mode 100644
index 00000000..63a3382e
--- /dev/null
+++
b/bigtop-manager-server/src/main/resources/stacks/bigtop/3.3.0/services/spark/metainfo.xml
@@ -0,0 +1,80 @@
+<?xml version="1.0"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ https://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+-->
+
+<metainfo>
+ <service>
+ <name>spark</name>
+ <display-name>Spark</display-name>
+ <desc>
+ Apache Spark is a unified analytics engine for large-scale data
processing.
+ </desc>
+ <version>3.3.4-1</version>
+ <user>spark</user>
+
+ <components>
+ <component>
+ <name>spark_historyserver</name>
+ <display-name>Spark HistoryServer</display-name>
+ <category>server</category>
+ <cardinality>1+</cardinality>
+ <quick-link>
+ <display-name>Spark HistoryServer UI</display-name>
+
<http-port-property>spark.history.ui.port</http-port-property>
+ <http-port-default>18080</http-port-default>
+
<https-port-property>spark.history.ui.port</https-port-property>
+ <https-port-default>18080</https-port-default>
+ </quick-link>
+ </component>
+
+ <component>
+ <name>spark_thriftserver</name>
+ <display-name>Spark ThriftServer</display-name>
+ <category>server</category>
+ <cardinality>1+</cardinality>
+ </component>
+
+ <component>
+ <name>spark_client</name>
+ <display-name>Spark Client</display-name>
+ <category>client</category>
+ <cardinality>1+</cardinality>
+ </component>
+ </components>
+
+ <package-specifics>
+ <package-specific>
+ <architectures>
+ <arch>x86_64</arch>
+ <arch>aarch64</arch>
+ </architectures>
+ <packages>
+ <package>
+ <name>spark-3.3.4-1.tar.gz</name>
+
<checksum>SHA-256:91ff4fa884abee1156d7819d16bf3035e98c98833237cfd457b2d0b1b8ad42e5</checksum>
+ </package>
+ </packages>
+ </package-specific>
+ </package-specifics>
+
+ <required-services>
+ <service>hive</service>
+ </required-services>
+ </service>
+</metainfo>
\ No newline at end of file
diff --git
a/bigtop-manager-server/src/main/resources/stacks/bigtop/3.3.0/services/spark/order.json
b/bigtop-manager-server/src/main/resources/stacks/bigtop/3.3.0/services/spark/order.json
new file mode 100644
index 00000000..a26d2b3e
--- /dev/null
+++
b/bigtop-manager-server/src/main/resources/stacks/bigtop/3.3.0/services/spark/order.json
@@ -0,0 +1,14 @@
+{
+ "SPARK_HISTORYSERVER-START": [
+ "HIVE_METASTORE-START"
+ ],
+ "SPARK_HISTORYSERVER-RESTART": [
+ "HIVE_METASTORE-RESTART"
+ ],
+ "SPARK_THRIFTSERVER-START": [
+ "HIVE_METASTORE-START"
+ ],
+ "SPARK_THRIFTSERVER-RESTART": [
+ "HIVE_METASTORE-RESTART"
+ ]
+}
\ No newline at end of file
diff --git
a/bigtop-manager-stack/bigtop-manager-stack-bigtop/src/main/java/org/apache/bigtop/manager/stack/bigtop/v3_3_0/spark/SparkClientScript.java
b/bigtop-manager-stack/bigtop-manager-stack-bigtop/src/main/java/org/apache/bigtop/manager/stack/bigtop/v3_3_0/spark/SparkClientScript.java
new file mode 100644
index 00000000..6b1df772
--- /dev/null
+++
b/bigtop-manager-stack/bigtop-manager-stack-bigtop/src/main/java/org/apache/bigtop/manager/stack/bigtop/v3_3_0/spark/SparkClientScript.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.bigtop.manager.stack.bigtop.v3_3_0.spark;
+
+import org.apache.bigtop.manager.common.shell.ShellResult;
+import org.apache.bigtop.manager.stack.core.spi.param.Params;
+import org.apache.bigtop.manager.stack.core.spi.script.AbstractClientScript;
+import org.apache.bigtop.manager.stack.core.spi.script.Script;
+
+import com.google.auto.service.AutoService;
+import lombok.extern.slf4j.Slf4j;
+
+import java.util.Properties;
+
+@Slf4j
+@AutoService(Script.class)
+public class SparkClientScript extends AbstractClientScript {
+
+ @Override
+ public ShellResult add(Params params) {
+ Properties properties = new Properties();
+ properties.setProperty(PROPERTY_KEY_SKIP_LEVELS, "1");
+
+ return super.add(params, properties);
+ }
+
+ @Override
+ public ShellResult configure(Params params) {
+ return SparkSetup.configure(params);
+ }
+
+ @Override
+ public String getComponentName() {
+ return "spark_client";
+ }
+}
diff --git
a/bigtop-manager-stack/bigtop-manager-stack-bigtop/src/main/java/org/apache/bigtop/manager/stack/bigtop/v3_3_0/spark/SparkHistoryServerScript.java
b/bigtop-manager-stack/bigtop-manager-stack-bigtop/src/main/java/org/apache/bigtop/manager/stack/bigtop/v3_3_0/spark/SparkHistoryServerScript.java
new file mode 100644
index 00000000..32beacec
--- /dev/null
+++
b/bigtop-manager-stack/bigtop-manager-stack-bigtop/src/main/java/org/apache/bigtop/manager/stack/bigtop/v3_3_0/spark/SparkHistoryServerScript.java
@@ -0,0 +1,85 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.bigtop.manager.stack.bigtop.v3_3_0.spark;
+
+import org.apache.bigtop.manager.common.shell.ShellResult;
+import org.apache.bigtop.manager.stack.core.exception.StackException;
+import org.apache.bigtop.manager.stack.core.spi.param.Params;
+import org.apache.bigtop.manager.stack.core.spi.script.AbstractServerScript;
+import org.apache.bigtop.manager.stack.core.spi.script.Script;
+import org.apache.bigtop.manager.stack.core.utils.linux.LinuxOSUtils;
+
+import com.google.auto.service.AutoService;
+import lombok.extern.slf4j.Slf4j;
+
+import java.io.IOException;
+import java.text.MessageFormat;
+import java.util.Properties;
+
+@Slf4j
+@AutoService(Script.class)
+public class SparkHistoryServerScript extends AbstractServerScript {
+
+ @Override
+ public ShellResult add(Params params) {
+ Properties properties = new Properties();
+ properties.setProperty(PROPERTY_KEY_SKIP_LEVELS, "1");
+
+ return super.add(params, properties);
+ }
+
+ @Override
+ public ShellResult configure(Params params) {
+ return SparkSetup.configure(params);
+ }
+
+ @Override
+ public ShellResult start(Params params) {
+ configure(params);
+ SparkParams sparkParams = (SparkParams) params;
+ String cmd = MessageFormat.format("{0}/sbin/start-history-server.sh",
sparkParams.serviceHome());
+ try {
+ return LinuxOSUtils.sudoExecCmd(cmd, sparkParams.user());
+ } catch (IOException e) {
+ throw new StackException(e);
+ }
+ }
+
+ @Override
+ public ShellResult stop(Params params) {
+ SparkParams sparkParams = (SparkParams) params;
+ String cmd = MessageFormat.format("{0}/sbin/stop-history-server.sh",
sparkParams.serviceHome());
+ try {
+ return LinuxOSUtils.sudoExecCmd(cmd, sparkParams.user());
+ } catch (IOException e) {
+ throw new StackException(e);
+ }
+ }
+
+ @Override
+ public ShellResult status(Params params) {
+ SparkParams sparkParams = (SparkParams) params;
+ return
LinuxOSUtils.checkProcess(sparkParams.getSparkHistoryServerPidFile());
+ }
+
+ @Override
+ public String getComponentName() {
+ return "spark_historyserver";
+ }
+}
diff --git
a/bigtop-manager-stack/bigtop-manager-stack-bigtop/src/main/java/org/apache/bigtop/manager/stack/bigtop/v3_3_0/spark/SparkParams.java
b/bigtop-manager-stack/bigtop-manager-stack-bigtop/src/main/java/org/apache/bigtop/manager/stack/bigtop/v3_3_0/spark/SparkParams.java
new file mode 100644
index 00000000..dd733204
--- /dev/null
+++
b/bigtop-manager-stack/bigtop-manager-stack-bigtop/src/main/java/org/apache/bigtop/manager/stack/bigtop/v3_3_0/spark/SparkParams.java
@@ -0,0 +1,134 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.bigtop.manager.stack.bigtop.v3_3_0.spark;
+
+import org.apache.bigtop.manager.common.message.entity.payload.CommandPayload;
+import org.apache.bigtop.manager.stack.bigtop.param.BigtopParams;
+import org.apache.bigtop.manager.stack.core.annotations.GlobalParams;
+import org.apache.bigtop.manager.stack.core.spi.param.Params;
+import org.apache.bigtop.manager.stack.core.utils.LocalSettings;
+
+import com.google.auto.service.AutoService;
+import lombok.Getter;
+import lombok.NoArgsConstructor;
+
+import java.util.Map;
+
+@Getter
+@AutoService(Params.class)
+@NoArgsConstructor
+public class SparkParams extends BigtopParams {
+
+ private String sparkLogDir = "/var/log/spark";
+ private String sparkPidDir = "/var/run/spark";
+ private String sparkHistoryLogDir;
+ private String sparkHistoryServerPidFile;
+ private String sparkThriftServerPidFile;
+
+ private String sparkEnvContent;
+ private String sparkLog4j2Content;
+ private String sparkMetricsContent;
+ private String sparkFairSchedulerContent;
+ private String sparkDefaultsContent;
+
+ public SparkParams(CommandPayload commandPayload) {
+ super(commandPayload);
+ globalParamsMap.put("spark_user", user());
+ globalParamsMap.put("spark_group", group());
+ globalParamsMap.put("java_home", javaHome());
+ globalParamsMap.put("spark_conf_dir", confDir());
+ globalParamsMap.put("hadoop_home", hadoopHome());
+ globalParamsMap.put("hadoop_conf_dir", hadoopConfDir());
+ globalParamsMap.put("hive_home", hiveHome());
+ globalParamsMap.put("hive_conf_dir", hiveConfDir());
+ globalParamsMap.put("security_enabled", false);
+ }
+
+ @GlobalParams
+ public Map<String, Object> sparkFairScheduler() {
+ Map<String, Object> sparkFairScheduler =
LocalSettings.configurations(getServiceName(), "fairscheduler");
+ sparkFairSchedulerContent = (String) sparkFairScheduler.get("content");
+ return sparkFairScheduler;
+ }
+
+ @GlobalParams
+ public Map<String, Object> sparkMetrics() {
+ Map<String, Object> sparkMetrics =
LocalSettings.configurations(getServiceName(), "metrics");
+ sparkMetricsContent = (String) sparkMetrics.get("content");
+ return sparkMetrics;
+ }
+
+ @GlobalParams
+ public Map<String, Object> sparkDefaults() {
+ Map<String, Object> sparkDefaults =
LocalSettings.configurations(getServiceName(), "spark-defaults");
+ sparkHistoryLogDir =
sparkDefaults.get("spark_history_fs_logDirectory").toString();
+ sparkDefaultsContent = (String) sparkDefaults.get("content");
+ return sparkDefaults;
+ }
+
+ @GlobalParams
+ public Map<String, Object> sparkEnv() {
+ Map<String, Object> sparkEnv =
LocalSettings.configurations(getServiceName(), "spark-env");
+ sparkPidDir = sparkEnv.get("spark_pid_dir").toString();
+ sparkHistoryServerPidFile =
+ sparkPidDir + "/spark-" + user() +
"-org.apache.spark.deploy.history.HistoryServer-1.pid";
+ sparkThriftServerPidFile =
+ sparkPidDir + "/spark-" + user() +
"-org.apache.spark.sql.hive.thriftserver.HiveThriftServer2-1.pid";
+ sparkLogDir = sparkEnv.get("spark_log_dir").toString();
+ sparkEnvContent = sparkEnv.get("content").toString();
+ return sparkEnv;
+ }
+
+ @GlobalParams
+ public Map<String, Object> sparkHiveSite() {
+ Map<String, Object> configurations =
LocalSettings.configurations(getServiceName(), "spark-hive-site");
+ Map<String, Object> hiveSite = LocalSettings.configurations("hive",
"hive-site");
+ configurations.put("hive.metastore.uris",
hiveSite.get("hive.metastore.uris"));
+ configurations.put("hive.metastore.warehouse.dir",
hiveSite.get("hive.metastore.warehouse.dir"));
+ return configurations;
+ }
+
+ @GlobalParams
+ public Map<String, Object> sparkLog4j2() {
+ Map<String, Object> sparkLog4j2 =
LocalSettings.configurations(getServiceName(), "log4j2");
+ sparkLog4j2Content = (String) sparkLog4j2.get("content");
+ return sparkLog4j2;
+ }
+
+ public String hadoopConfDir() {
+ return hadoopHome() + "/etc/hadoop";
+ }
+
+ public String hadoopHome() {
+ return stackHome() + "/hadoop";
+ }
+
+ public String hiveConfDir() {
+ return hiveHome() + "/conf";
+ }
+
+ public String hiveHome() {
+ return stackHome() + "/hive";
+ }
+
+ @Override
+ public String getServiceName() {
+ return "spark";
+ }
+}
diff --git
a/bigtop-manager-stack/bigtop-manager-stack-bigtop/src/main/java/org/apache/bigtop/manager/stack/bigtop/v3_3_0/spark/SparkSetup.java
b/bigtop-manager-stack/bigtop-manager-stack-bigtop/src/main/java/org/apache/bigtop/manager/stack/bigtop/v3_3_0/spark/SparkSetup.java
new file mode 100644
index 00000000..9eb421ff
--- /dev/null
+++
b/bigtop-manager-stack/bigtop-manager-stack-bigtop/src/main/java/org/apache/bigtop/manager/stack/bigtop/v3_3_0/spark/SparkSetup.java
@@ -0,0 +1,107 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.bigtop.manager.stack.bigtop.v3_3_0.spark;
+
+import org.apache.bigtop.manager.common.constants.Constants;
+import org.apache.bigtop.manager.common.shell.ShellResult;
+import org.apache.bigtop.manager.stack.core.enums.ConfigType;
+import org.apache.bigtop.manager.stack.core.spi.param.Params;
+import org.apache.bigtop.manager.stack.core.utils.linux.LinuxFileUtils;
+
+import lombok.AccessLevel;
+import lombok.NoArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+
+import java.text.MessageFormat;
+
+import static
org.apache.bigtop.manager.common.constants.Constants.PERMISSION_755;
+
+@Slf4j
+@NoArgsConstructor(access = AccessLevel.PRIVATE)
+public class SparkSetup {
+
+ public static ShellResult configure(Params params) {
+ log.info("Configuring Spark");
+ SparkParams sparkParams = (SparkParams) params;
+
+ String confDir = sparkParams.confDir();
+ String sparkUser = sparkParams.user();
+ String sparkGroup = sparkParams.group();
+
+ LinuxFileUtils.createDirectories(sparkParams.getSparkLogDir(),
sparkUser, sparkGroup, PERMISSION_755, true);
+ LinuxFileUtils.createDirectories(sparkParams.getSparkPidDir(),
sparkUser, sparkGroup, PERMISSION_755, true);
+
+ String sparkHistoryLogDir = sparkParams.getSparkHistoryLogDir();
+ if (sparkHistoryLogDir.startsWith("file:")) {
+ String dir = sparkHistoryLogDir.split(":")[1];
+ LinuxFileUtils.createDirectories(dir, sparkUser, sparkGroup,
PERMISSION_755, true);
+ }
+
+ LinuxFileUtils.toFileByTemplate(
+ sparkParams.getSparkEnvContent(),
+ MessageFormat.format("{0}/spark-env.sh", confDir),
+ sparkUser,
+ sparkGroup,
+ Constants.PERMISSION_755,
+ sparkParams.getGlobalParamsMap());
+
+ LinuxFileUtils.toFileByTemplate(
+ sparkParams.getSparkDefaultsContent(),
+ MessageFormat.format("{0}/spark-defaults.conf", confDir),
+ sparkUser,
+ sparkGroup,
+ Constants.PERMISSION_755,
+ sparkParams.getGlobalParamsMap());
+
+ LinuxFileUtils.toFileByTemplate(
+ sparkParams.getSparkFairSchedulerContent(),
+ MessageFormat.format("{0}/fairscheduler.xml", confDir),
+ sparkUser,
+ sparkGroup,
+ Constants.PERMISSION_755,
+ sparkParams.getGlobalParamsMap());
+
+ LinuxFileUtils.toFileByTemplate(
+ sparkParams.getSparkLog4j2Content(),
+ MessageFormat.format("{0}/log4j2.properties", confDir),
+ sparkUser,
+ sparkGroup,
+ Constants.PERMISSION_755,
+ sparkParams.getGlobalParamsMap());
+
+ LinuxFileUtils.toFileByTemplate(
+ sparkParams.getSparkMetricsContent(),
+ MessageFormat.format("{0}/metrics.properties", confDir),
+ sparkUser,
+ sparkGroup,
+ Constants.PERMISSION_755,
+ sparkParams.getGlobalParamsMap());
+
+ LinuxFileUtils.toFile(
+ ConfigType.XML,
+ MessageFormat.format("{0}/hive-site.xml", confDir),
+ sparkUser,
+ sparkGroup,
+ Constants.PERMISSION_644,
+ sparkParams.sparkHiveSite());
+
+ log.info("Successfully configured Spark");
+ return ShellResult.success();
+ }
+}
diff --git
a/bigtop-manager-stack/bigtop-manager-stack-bigtop/src/main/java/org/apache/bigtop/manager/stack/bigtop/v3_3_0/spark/SparkThriftServerScript.java
b/bigtop-manager-stack/bigtop-manager-stack-bigtop/src/main/java/org/apache/bigtop/manager/stack/bigtop/v3_3_0/spark/SparkThriftServerScript.java
new file mode 100644
index 00000000..df9e9f09
--- /dev/null
+++
b/bigtop-manager-stack/bigtop-manager-stack-bigtop/src/main/java/org/apache/bigtop/manager/stack/bigtop/v3_3_0/spark/SparkThriftServerScript.java
@@ -0,0 +1,85 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.bigtop.manager.stack.bigtop.v3_3_0.spark;
+
+import org.apache.bigtop.manager.common.shell.ShellResult;
+import org.apache.bigtop.manager.stack.core.exception.StackException;
+import org.apache.bigtop.manager.stack.core.spi.param.Params;
+import org.apache.bigtop.manager.stack.core.spi.script.AbstractServerScript;
+import org.apache.bigtop.manager.stack.core.spi.script.Script;
+import org.apache.bigtop.manager.stack.core.utils.linux.LinuxOSUtils;
+
+import com.google.auto.service.AutoService;
+import lombok.extern.slf4j.Slf4j;
+
+import java.io.IOException;
+import java.text.MessageFormat;
+import java.util.Properties;
+
+@Slf4j
+@AutoService(Script.class)
+public class SparkThriftServerScript extends AbstractServerScript {
+
+ @Override
+ public ShellResult add(Params params) {
+ Properties properties = new Properties();
+ properties.setProperty(PROPERTY_KEY_SKIP_LEVELS, "1");
+
+ return super.add(params, properties);
+ }
+
+ @Override
+ public ShellResult configure(Params params) {
+ return SparkSetup.configure(params);
+ }
+
+ @Override
+ public ShellResult start(Params params) {
+ configure(params);
+ SparkParams sparkParams = (SparkParams) params;
+ String cmd = MessageFormat.format("{0}/sbin/start-thriftserver.sh",
sparkParams.serviceHome());
+ try {
+ return LinuxOSUtils.sudoExecCmd(cmd, sparkParams.user());
+ } catch (IOException e) {
+ throw new StackException(e);
+ }
+ }
+
+ @Override
+ public ShellResult stop(Params params) {
+ SparkParams sparkParams = (SparkParams) params;
+ String cmd = MessageFormat.format("{0}/sbin/stop-thriftserver.sh",
sparkParams.serviceHome());
+ try {
+ return LinuxOSUtils.sudoExecCmd(cmd, sparkParams.user());
+ } catch (IOException e) {
+ throw new StackException(e);
+ }
+ }
+
+ @Override
+ public ShellResult status(Params params) {
+ SparkParams sparkParams = (SparkParams) params;
+ return
LinuxOSUtils.checkProcess(sparkParams.getSparkHistoryServerPidFile());
+ }
+
+ @Override
+ public String getComponentName() {
+ return "spark_thriftserver";
+ }
+}
diff --git
a/bigtop-manager-stack/bigtop-manager-stack-core/src/main/java/org/apache/bigtop/manager/stack/core/utils/linux/LinuxFileUtils.java
b/bigtop-manager-stack/bigtop-manager-stack-core/src/main/java/org/apache/bigtop/manager/stack/core/utils/linux/LinuxFileUtils.java
index 87070926..39474a6a 100644
---
a/bigtop-manager-stack/bigtop-manager-stack-core/src/main/java/org/apache/bigtop/manager/stack/core/utils/linux/LinuxFileUtils.java
+++
b/bigtop-manager-stack/bigtop-manager-stack-core/src/main/java/org/apache/bigtop/manager/stack/core/utils/linux/LinuxFileUtils.java
@@ -214,6 +214,7 @@ public class LinuxFileUtils {
return;
}
+ log.info("Copy file: [{}] to [{}]", source, dest);
List<String> builderParameters = new ArrayList<>();
builderParameters.add("cp");
if (Files.exists(Path.of(source)) &&
Files.isDirectory(Paths.get(source))) {