Repository: ambari Updated Branches: refs/heads/branch-2.6 f52e5f93e -> 973ec4640
AMBARI-22598 - Pig service check failed after PU with LzoCodec CNF (jonathanhurley) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/973ec464 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/973ec464 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/973ec464 Branch: refs/heads/branch-2.6 Commit: 973ec464063196e03652ce01912aef4b24bcc07f Parents: f52e5f9 Author: Jonathan Hurley <[email protected]> Authored: Tue Dec 5 13:30:55 2017 -0500 Committer: Jonathan Hurley <[email protected]> Committed: Wed Dec 6 08:46:52 2017 -0500 ---------------------------------------------------------------------- .../MAHOUT/1.0.0.2.3/package/scripts/mahout.py | 11 ++++++++++- .../PIG/0.12.0.2.0/package/scripts/pig.py | 14 +++++++++++--- .../SPARK/1.2.1/package/scripts/setup_spark.py | 19 ++++++++++++------- .../SPARK2/2.0.0/package/scripts/setup_spark.py | 19 ++++++++++++------- 4 files changed, 45 insertions(+), 18 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/ambari/blob/973ec464/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout.py b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout.py index f2c3c18..d2df928 100644 --- a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout.py +++ b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout.py @@ -20,11 +20,20 @@ Ambari Agent """ import os -from resource_management import * +from resource_management.core.resources import Directory +from resource_management.core.resources import File +from resource_management.libraries.functions import format +from resource_management.libraries.functions import is_empty +from resource_management.libraries.functions import lzo_utils +from resource_management.libraries.resources import XmlConfig + def mahout(): import params + # ensure that matching LZO libraries are installed for Mahout + lzo_utils.install_lzo_if_needed() + Directory( params.mahout_conf_dir, create_parents = True, owner = params.mahout_user, http://git-wip-us.apache.org/repos/asf/ambari/blob/973ec464/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/pig.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/pig.py b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/pig.py index 83f7048..1378fdc 100644 --- a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/pig.py +++ b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/pig.py @@ -19,14 +19,22 @@ Ambari Agent """ import os -from resource_management import * from ambari_commons import OSConst from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl +from resource_management.core.resources import Directory +from resource_management.core.resources import File +from resource_management.core.source import InlineTemplate +from resource_management.libraries.functions import format +from resource_management.libraries.functions import lzo_utils + @OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT) def pig(): import params + # ensure that matching LZO libraries are installed for Pig + lzo_utils.install_lzo_if_needed() + Directory( params.pig_conf_dir, create_parents = True, owner = params.hdfs_user, @@ -47,7 +55,7 @@ def pig(): content=params.pig_properties ) - if (params.log4j_props != None): + if (params.log4j_props is not None): File(format("{params.pig_conf_dir}/log4j.properties"), mode=0644, group=params.user_group, @@ -70,7 +78,7 @@ def pig(): content=params.pig_properties ) - if (params.log4j_props != None): + if (params.log4j_props is not None): File(os.path.join(params.pig_conf_dir, "log4j.properties"), mode='f', owner=params.pig_user, http://git-wip-us.apache.org/repos/asf/ambari/blob/973ec464/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/setup_spark.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/setup_spark.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/setup_spark.py index 426509c..0d61511 100644 --- a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/setup_spark.py +++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/setup_spark.py @@ -18,17 +18,19 @@ limitations under the License. """ -import sys -import fileinput -import shutil import os -from resource_management import * -from resource_management.core.exceptions import ComponentIsNotRunning -from resource_management.core.logger import Logger -from resource_management.core import shell + +from resource_management.core.resources import Directory +from resource_management.core.resources import File +from resource_management.core.source import InlineTemplate from resource_management.libraries.functions.version import format_stack_version from resource_management.libraries.functions.stack_features import check_stack_feature +from resource_management.libraries.functions import lzo_utils +from resource_management.libraries.resources import PropertiesFile from resource_management.libraries.functions import StackFeature +from resource_management.libraries.resources import HdfsResource +from resource_management.libraries.resources import XmlConfig + def setup_spark(env, type, upgrade_type=None, action=None, config_dir=None): """ @@ -41,6 +43,9 @@ def setup_spark(env, type, upgrade_type=None, action=None, config_dir=None): import params + # ensure that matching LZO libraries are installed for Spark + lzo_utils.install_lzo_if_needed() + if config_dir is None: config_dir = params.spark_conf http://git-wip-us.apache.org/repos/asf/ambari/blob/973ec464/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/setup_spark.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/setup_spark.py b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/setup_spark.py index 82b6c63..3638953 100755 --- a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/setup_spark.py +++ b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/setup_spark.py @@ -18,21 +18,26 @@ limitations under the License. """ -import sys -import fileinput -import shutil import os -from resource_management import * -from resource_management.core.exceptions import ComponentIsNotRunning -from resource_management.core.logger import Logger -from resource_management.core import shell + +from resource_management.core.resources import Directory +from resource_management.core.resources import File +from resource_management.core.source import InlineTemplate from resource_management.libraries.functions.version import format_stack_version from resource_management.libraries.functions.stack_features import check_stack_feature +from resource_management.libraries.functions import format +from resource_management.libraries.functions import lzo_utils +from resource_management.libraries.resources import PropertiesFile from resource_management.libraries.functions import StackFeature +from resource_management.libraries.resources import HdfsResource +from resource_management.libraries.resources import XmlConfig def setup_spark(env, type, upgrade_type = None, action = None): import params + # ensure that matching LZO libraries are installed for Spark + lzo_utils.install_lzo_if_needed() + Directory([params.spark_pid_dir, params.spark_log_dir], owner=params.spark_user, group=params.user_group,
