This is an automated email from the ASF dual-hosted git repository.

jialiang pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/ambari.git


The following commit(s) were added to refs/heads/trunk by this push:
     new 9d47292a59 AMBARI-26247: convert .format() to f-strings for 
ambari-contrib #3903
9d47292a59 is described below

commit 9d47292a599e53d58aa1b9701f598080b7170c01
Author: yaruyng <[email protected]>
AuthorDate: Thu Dec 5 14:13:19 2024 +0800

    AMBARI-26247: convert .format() to f-strings for ambari-contrib #3903
---
 contrib/agent-simulator/docker.py                  |  2 +-
 .../agent-simulator/docker_image/launcher_agent.py |  2 +-
 .../after-INSTALL/scripts/shared_initialization.py |  6 ++--
 .../before-INSTALL/scripts/repo_initialization.py  |  2 +-
 .../stacks/HDF/2.0/services/stack_advisor.py       | 38 +++++++++++-----------
 .../src/test/python/unitTests.py                   | 10 +++---
 .../8.0.5/package/scripts/microsoft_r_server.py    |  2 +-
 .../after-INSTALL/scripts/shared_initialization.py |  6 ++--
 .../before-INSTALL/scripts/repo_initialization.py  |  2 +-
 .../HIVE/package/alerts/alert_webhcat_server.py    |  2 +-
 .../services/HIVE/package/scripts/hive_server.py   |  2 +-
 .../services/HIVE/package/scripts/service_check.py | 10 +++---
 .../HIVE/package/scripts/webhcat_server.py         |  2 +-
 .../package/scripts/application_timeline_server.py |  2 +-
 .../services/YARN/package/scripts/historyserver.py |  2 +-
 .../YARN/package/scripts/mapred_service_check.py   |  2 +-
 .../YARN/package/scripts/mapreduce2_client.py      |  2 +-
 .../services/YARN/package/scripts/nodemanager.py   |  2 +-
 .../YARN/package/scripts/nodemanager_upgrade.py    |  4 +--
 .../YARN/package/scripts/resourcemanager.py        |  2 +-
 .../services/YARN/package/scripts/service_check.py |  6 ++--
 contrib/version-builder/version_builder.py         | 10 +++---
 22 files changed, 59 insertions(+), 59 deletions(-)

diff --git a/contrib/agent-simulator/docker.py 
b/contrib/agent-simulator/docker.py
index 858f7e53f0..30d9a180bd 100644
--- a/contrib/agent-simulator/docker.py
+++ b/contrib/agent-simulator/docker.py
@@ -35,7 +35,7 @@ class Docker:
         :return: A map, which is JSON format object.
         """
         docker_json = {}
-        docker_json["weave_ip"] = "{0}/{1}".format(self.ip, self.mask)
+        docker_json["weave_ip"] = f"{self.ip}/{self.mask}"
         docker_json["weave_domain_name"] = self.weave_domain_name
         return docker_json
 
diff --git a/contrib/agent-simulator/docker_image/launcher_agent.py 
b/contrib/agent-simulator/docker_image/launcher_agent.py
index ea4e609000..03fa21e562 100644
--- a/contrib/agent-simulator/docker_image/launcher_agent.py
+++ b/contrib/agent-simulator/docker_image/launcher_agent.py
@@ -69,7 +69,7 @@ def set_weave_ip(weave_ip):
         for index in range(len(all_resolution)):
             if index == 0:
                 token = all_resolution[index].split()
-                etc_hosts.write("{0} {1} {2}\n".format(weave_ip, token[1], 
token[2]))
+                etc_hosts.write(f"{weave_ip} {token[1]} {token[2]}\n")
             else:
                 etc_hosts.write(all_resolution[index])
 
diff --git 
a/contrib/management-packs/hdf-ambari-mpack/src/main/resources/stacks/HDF/2.0/hooks/after-INSTALL/scripts/shared_initialization.py
 
b/contrib/management-packs/hdf-ambari-mpack/src/main/resources/stacks/HDF/2.0/hooks/after-INSTALL/scripts/shared_initialization.py
index 6969f3057f..0c0ac39dd4 100644
--- 
a/contrib/management-packs/hdf-ambari-mpack/src/main/resources/stacks/HDF/2.0/hooks/after-INSTALL/scripts/shared_initialization.py
+++ 
b/contrib/management-packs/hdf-ambari-mpack/src/main/resources/stacks/HDF/2.0/hooks/after-INSTALL/scripts/shared_initialization.py
@@ -51,7 +51,7 @@ def setup_stack_symlinks(struct_out_file):
   json_version = load_version(struct_out_file)
 
   if not json_version:
-    Logger.info("There is no advertised version for this component stored in 
{0}".format(struct_out_file))
+    Logger.info(f"There is no advertised version for this component stored in 
{struct_out_file}")
     return
 
   # On parallel command execution this should be executed by a single process 
at a time.
@@ -63,7 +63,7 @@ def setup_stack_symlinks(struct_out_file):
 def setup_config():
   import params
   stackversion = params.stack_version_unformatted
-  Logger.info("FS Type: {0}".format(params.dfs_type))
+  Logger.info(f"FS Type: {params.dfs_type}")
 
   is_hadoop_conf_dir_present = False
   if hasattr(params, "hadoop_conf_dir") and params.hadoop_conf_dir is not None 
and os.path.exists(params.hadoop_conf_dir):
@@ -111,7 +111,7 @@ def link_configs(struct_out_file):
   json_version = load_version(struct_out_file)
 
   if not json_version:
-    Logger.info("Could not load 'version' from {0}".format(struct_out_file))
+    Logger.info(f"Could not load 'version' from {struct_out_file}")
     return
 
   # On parallel command execution this should be executed by a single process 
at a time.
diff --git 
a/contrib/management-packs/hdf-ambari-mpack/src/main/resources/stacks/HDF/2.0/hooks/before-INSTALL/scripts/repo_initialization.py
 
b/contrib/management-packs/hdf-ambari-mpack/src/main/resources/stacks/HDF/2.0/hooks/before-INSTALL/scripts/repo_initialization.py
index 0e22228330..225fc2ea1b 100644
--- 
a/contrib/management-packs/hdf-ambari-mpack/src/main/resources/stacks/HDF/2.0/hooks/before-INSTALL/scripts/repo_initialization.py
+++ 
b/contrib/management-packs/hdf-ambari-mpack/src/main/resources/stacks/HDF/2.0/hooks/before-INSTALL/scripts/repo_initialization.py
@@ -39,7 +39,7 @@ def _alter_repo(action, repo_string, repo_template):
   if 0 == len(repo_dicts):
     Logger.info("Repository list is empty. Ambari may not be managing the 
repositories.")
   else:
-    Logger.info("Initializing {0} repositories".format(str(len(repo_dicts))))
+    Logger.info(f"Initializing {str(len(repo_dicts))} repositories")
 
   for repo in repo_dicts:
     if not 'baseUrl' in repo:
diff --git 
a/contrib/management-packs/hdf-ambari-mpack/src/main/resources/stacks/HDF/2.0/services/stack_advisor.py
 
b/contrib/management-packs/hdf-ambari-mpack/src/main/resources/stacks/HDF/2.0/services/stack_advisor.py
index 70862cdc47..9886592f2b 100644
--- 
a/contrib/management-packs/hdf-ambari-mpack/src/main/resources/stacks/HDF/2.0/services/stack_advisor.py
+++ 
b/contrib/management-packs/hdf-ambari-mpack/src/main/resources/stacks/HDF/2.0/services/stack_advisor.py
@@ -54,19 +54,19 @@ class HDF20StackAdvisor(DefaultStackAdvisor):
          if "+" in cardinality:
            hostsMin = int(cardinality[:-1])
            if componentHostsCount < hostsMin:
-             message = "At least {0} {1} components should be installed in 
cluster.".format(hostsMin, componentDisplayName)
+             message = f"At least {hostsMin} {componentDisplayName} components 
should be installed in cluster."
          elif "-" in cardinality:
            nums = cardinality.split("-")
            hostsMin = int(nums[0])
            hostsMax = int(nums[1])
            if componentHostsCount > hostsMax or componentHostsCount < hostsMin:
-             message = "Between {0} and {1} {2} components should be installed 
in cluster.".format(hostsMin, hostsMax, componentDisplayName)
+             message = f"Between {hostsMin} and {hostsMax} 
{componentDisplayName} components should be installed in cluster."
          elif "ALL" == cardinality:
            if componentHostsCount != hostsCount:
-             message = "{0} component should be installed on all hosts in 
cluster.".format(componentDisplayName)
+             message = f"{componentDisplayName} component should be installed 
on all hosts in cluster."
          else:
            if componentHostsCount != int(cardinality):
-             message = "Exactly {0} {1} components should be installed in 
cluster.".format(int(cardinality), componentDisplayName)
+             message = f"Exactly {int(cardinality)} {componentDisplayName} 
components should be installed in cluster."
 
          if message is not None:
            items.append({"type": 'host-component', "level": 'ERROR', 
"message": message, "component-name": componentName})
@@ -295,7 +295,7 @@ class HDF20StackAdvisor(DefaultStackAdvisor):
         policymgr_external_url = 
services['configurations']['admin-properties']['properties']['policymgr_external_url']
       else:
         ranger_admin_host = ranger_admin_hosts[0]
-        policymgr_external_url = "{0}://{1}:{2}".format(protocol, 
ranger_admin_host, port)
+        policymgr_external_url = f"{protocol}://{ranger_admin_host}:{port}"
 
       putRangerAdminProperty('policymgr_external_url', policymgr_external_url)
 
@@ -403,10 +403,10 @@ class HDF20StackAdvisor(DefaultStackAdvisor):
       if 'infra-solr-env' in services['configurations'] and \
         ('infra_solr_znode' in 
services['configurations']['infra-solr-env']['properties']):
         infra_solr_znode = 
services['configurations']['infra-solr-env']['properties']['infra_solr_znode']
-        ranger_audit_zk_port = '{0}{1}'.format(zookeeper_host_port, 
infra_solr_znode)
+        ranger_audit_zk_port = f'{zookeeper_host_port}{infra_solr_znode}'
       putRangerAdminSiteProperty('ranger.audit.solr.zookeepers', 
ranger_audit_zk_port)
     elif zookeeper_host_port and is_solr_cloud_enabled and 
is_external_solr_cloud_enabled:
-      ranger_audit_zk_port = '{0}/{1}'.format(zookeeper_host_port, 
'ranger_audits')
+      ranger_audit_zk_port = f'{zookeeper_host_port}/ranger_audits'
       putRangerAdminSiteProperty('ranger.audit.solr.zookeepers', 
ranger_audit_zk_port)
     else:
       putRangerAdminSiteProperty('ranger.audit.solr.zookeepers', 'NONE')
@@ -1002,13 +1002,13 @@ class HDF20StackAdvisor(DefaultStackAdvisor):
               userValue = 
convertToNumber(configurations[configName]["properties"][propertyName])
               maxValue = 
convertToNumber(recommendedDefaults[configName]["property_attributes"][propertyName]["maximum"])
               if userValue > maxValue:
-                validationItems.extend([{"config-name": propertyName, "item": 
self.getWarnItem("Value is greater than the recommended maximum of {0} 
".format(maxValue))}])
+                validationItems.extend([{"config-name": propertyName, "item": 
self.getWarnItem(f"Value is greater than the recommended maximum of {maxValue} 
")}])
             if "minimum" in 
recommendedDefaults[configName]["property_attributes"][propertyName] and \
                     propertyName in 
recommendedDefaults[configName]["properties"]:
               userValue = 
convertToNumber(configurations[configName]["properties"][propertyName])
               minValue = 
convertToNumber(recommendedDefaults[configName]["property_attributes"][propertyName]["minimum"])
               if userValue < minValue:
-                validationItems.extend([{"config-name": propertyName, "item": 
self.getWarnItem("Value is less than the recommended minimum of {0} 
".format(minValue))}])
+                validationItems.extend([{"config-name": propertyName, "item": 
self.getWarnItem(f"Value is less than the recommended minimum of {minValue} 
")}])
       items.extend(self.toConfigurationValidationProblems(validationItems, 
configName))
     pass
 
@@ -1491,7 +1491,7 @@ class HDF20StackAdvisor(DefaultStackAdvisor):
     mountPoint = getMountPointForDir(dir, mountPoints)
 
     if "/" == mountPoint and self.getPreferredMountPoints(hostInfo)[0] != 
mountPoint:
-      return self.getWarnItem("It is not recommended to use root partition for 
{0}".format(propertyName))
+      return self.getWarnItem(f"It is not recommended to use root partition 
for {propertyName}")
 
     return None
 
@@ -1509,10 +1509,10 @@ class HDF20StackAdvisor(DefaultStackAdvisor):
     mountPoint = getMountPointForDir(dir, mountPoints.keys())
 
     if not mountPoints:
-      return self.getErrorItem("No disk info found on host %s" % 
hostInfo["host_name"])
+      return self.getErrorItem(f"No disk info found on host 
{hostInfo['host_name']}")
 
     if mountPoint is None:
-      return self.getErrorItem("No mount point in directory %s. Mount points: 
%s" % (dir, ', '.join(mountPoints.keys())))
+      return self.getErrorItem(f"No mount point in directory {dir}. Mount 
points: {', '.join(mountPoints.keys())}")
 
     if mountPoints[mountPoint] < reqiuredDiskSpace:
       msg = "Ambari Metrics disk space requirements not met. \n" \
@@ -1535,22 +1535,22 @@ class HDF20StackAdvisor(DefaultStackAdvisor):
     if defaultValue is None:
       return None
     if value < defaultValue:
-      return self.getWarnItem("Value is less than the recommended default of 
{0}".format(defaultValue))
+      return self.getWarnItem(f"Value is less than the recommended default of 
{defaultValue}")
     return None
 
   def validatorEqualsPropertyItem(self, properties1, propertyName1,
                                   properties2, propertyName2,
                                   emptyAllowed=False):
     if not propertyName1 in properties1:
-      return self.getErrorItem("Value should be set for %s" % propertyName1)
+      return self.getErrorItem(f"Value should be set for {propertyName1}")
     if not propertyName2 in properties2:
-      return self.getErrorItem("Value should be set for %s" % propertyName2)
+      return self.getErrorItem(f"Value should be set for {propertyName2}")
     value1 = properties1.get(propertyName1)
     if value1 is None and not emptyAllowed:
-      return self.getErrorItem("Empty value for %s" % propertyName1)
+      return self.getErrorItem(f"Empty value for {propertyName1}")
     value2 = properties2.get(propertyName2)
     if value2 is None and not emptyAllowed:
-      return self.getErrorItem("Empty value for %s" % propertyName2)
+      return self.getErrorItem(f"Empty value for {propertyName2}")
     if value1 != value2:
       return self.getWarnItem("It is recommended to set equal values "
              "for properties {0} and {1}".format(propertyName1, propertyName2))
@@ -1560,10 +1560,10 @@ class HDF20StackAdvisor(DefaultStackAdvisor):
   def validatorEqualsToRecommendedItem(self, properties, recommendedDefaults,
                                        propertyName):
     if not propertyName in properties:
-      return self.getErrorItem("Value should be set for %s" % propertyName)
+      return self.getErrorItem(f"Value should be set for {propertyName}")
     value = properties.get(propertyName)
     if not propertyName in recommendedDefaults:
-      return self.getErrorItem("Value should be recommended for %s" % 
propertyName)
+      return self.getErrorItem(f"Value should be recommended for 
{propertyName}")
     recommendedValue = recommendedDefaults.get(propertyName)
     if value != recommendedValue:
       return self.getWarnItem("It is recommended to set value {0} "
diff --git 
a/contrib/management-packs/isilon-onefs-mpack/src/test/python/unitTests.py 
b/contrib/management-packs/isilon-onefs-mpack/src/test/python/unitTests.py
index e26708cf75..adda0f73bc 100644
--- a/contrib/management-packs/isilon-onefs-mpack/src/test/python/unitTests.py
+++ b/contrib/management-packs/isilon-onefs-mpack/src/test/python/unitTests.py
@@ -110,13 +110,13 @@ def main():
     sys.stderr.write("Failed tests:\n")
   for failed_tests in [test_errors,test_failures]:
     for err in failed_tests:
-      sys.stderr.write("{0}: {1}\n".format(err[2],err[0]))
+      sys.stderr.write(f"{err[2]}: {err[0]}\n")
       
sys.stderr.write("----------------------------------------------------------------------\n")
-      sys.stderr.write("{0}\n".format(err[1]))
+      sys.stderr.write(f"{err[1]}\n")
   
sys.stderr.write("----------------------------------------------------------------------\n")
-  sys.stderr.write("Total run:{0}\n".format(test_runs))
-  sys.stderr.write("Total errors:{0}\n".format(len(test_errors)))
-  sys.stderr.write("Total failures:{0}\n".format(len(test_failures)))
+  sys.stderr.write(f"Total run:{test_runs}\n")
+  sys.stderr.write(f"Total errors:{len(test_errors)}\n")
+  sys.stderr.write(f"Total failures:{len(test_failures)}\n")
 
   if tests_status:
     sys.stderr.write("OK\n")
diff --git 
a/contrib/management-packs/microsoft-r_mpack/src/main/resources/common-services/MICROSOFT_R_SERVER/8.0.5/package/scripts/microsoft_r_server.py
 
b/contrib/management-packs/microsoft-r_mpack/src/main/resources/common-services/MICROSOFT_R_SERVER/8.0.5/package/scripts/microsoft_r_server.py
index ba2671d90d..fa7c30bac1 100644
--- 
a/contrib/management-packs/microsoft-r_mpack/src/main/resources/common-services/MICROSOFT_R_SERVER/8.0.5/package/scripts/microsoft_r_server.py
+++ 
b/contrib/management-packs/microsoft-r_mpack/src/main/resources/common-services/MICROSOFT_R_SERVER/8.0.5/package/scripts/microsoft_r_server.py
@@ -27,7 +27,7 @@ class MicrosoftRServer(Script):
   def install(self, env):
     Logger.info('Installing R Node Client...')
     tmp_dir = Script.tmp_dir
-    Logger.debug('Using temp dir: {0}'.format(tmp_dir))
+    Logger.debug(f'Using temp dir: {tmp_dir}')
     self.install_packages(env)
     Logger.info('Installed R Node Client')
 
diff --git 
a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/hooks/after-INSTALL/scripts/shared_initialization.py
 
b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/hooks/after-INSTALL/scripts/shared_initialization.py
index a39074afac..8517fcb93b 100755
--- 
a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/hooks/after-INSTALL/scripts/shared_initialization.py
+++ 
b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/hooks/after-INSTALL/scripts/shared_initialization.py
@@ -51,7 +51,7 @@ def setup_stack_symlinks(struct_out_file):
   json_version = load_version(struct_out_file)
 
   if not json_version:
-    Logger.info("There is no advertised version for this component stored in 
{0}".format(struct_out_file))
+    Logger.info(f"There is no advertised version for this component stored in 
{struct_out_file}")
     return
 
   # On parallel command execution this should be executed by a single process 
at a time.
@@ -63,7 +63,7 @@ def setup_stack_symlinks(struct_out_file):
 def setup_config():
   import params
   stackversion = params.stack_version_unformatted
-  Logger.info("FS Type: {0}".format(params.dfs_type))
+  Logger.info(f"FS Type: {params.dfs_type}")
 
   is_hadoop_conf_dir_present = False
   if hasattr(params, "hadoop_conf_dir") and params.hadoop_conf_dir is not None 
and os.path.exists(params.hadoop_conf_dir):
@@ -111,7 +111,7 @@ def link_configs(struct_out_file):
   json_version = load_version(struct_out_file)
 
   if not json_version:
-    Logger.info("Could not load 'version' from {0}".format(struct_out_file))
+    Logger.info(f"Could not load 'version' from {struct_out_file}")
     return
 
   # On parallel command execution this should be executed by a single process 
at a time.
diff --git 
a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/hooks/before-INSTALL/scripts/repo_initialization.py
 
b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/hooks/before-INSTALL/scripts/repo_initialization.py
index 32355da029..1094d1f237 100755
--- 
a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/hooks/before-INSTALL/scripts/repo_initialization.py
+++ 
b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/hooks/before-INSTALL/scripts/repo_initialization.py
@@ -39,7 +39,7 @@ def _alter_repo(action, repo_string, repo_template):
   if 0 == len(repo_dicts):
     Logger.info("Repository list is empty. Ambari may not be managing the 
repositories.")
   else:
-    Logger.info("Initializing {0} repositories".format(str(len(repo_dicts))))
+    Logger.info(f"Initializing {str(len(repo_dicts))} repositories")
 
   for repo in repo_dicts:
     if not 'baseUrl' in repo:
diff --git 
a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/alerts/alert_webhcat_server.py
 
b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/alerts/alert_webhcat_server.py
index 788df82fac..f113cf4735 100755
--- 
a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/alerts/alert_webhcat_server.py
+++ 
b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/alerts/alert_webhcat_server.py
@@ -121,7 +121,7 @@ def execute(configurations={}, parameters={}, 
host_name=None):
     smokeuser = configurations[SMOKEUSER_KEY]
 
   # webhcat always uses http, never SSL
-  query_url = 
"http://{0}:{1}/templeton/v1/status?user.name={2}".format(host_name, 
webhcat_port, smokeuser)
+  query_url = 
f"http://{host_name}:{webhcat_port}/templeton/v1/status?user.name={smokeuser}";
 
   # initialize
   total_time = 0
diff --git 
a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_server.py
 
b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_server.py
index 4bbeeb8986..0f767b8495 100755
--- 
a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_server.py
+++ 
b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_server.py
@@ -193,7 +193,7 @@ class HiveServerDefault(HiveServer):
       else:
         issues = []
         for cf in result_issues:
-          issues.append("Configuration file %s did not pass the validation. 
Reason: %s" % (cf, result_issues[cf]))
+          issues.append(f"Configuration file {cf} did not pass the validation. 
Reason: {result_issues[cf]}")
         self.put_structured_out({"securityIssuesFound": ". ".join(issues)})
         self.put_structured_out({"securityState": "UNSECURED"})
     else:
diff --git 
a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/service_check.py
 
b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/service_check.py
index 956b7bc64c..a6e3acf519 100755
--- 
a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/service_check.py
+++ 
b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/service_check.py
@@ -98,7 +98,7 @@ class HiveServiceCheckDefault(HiveServiceCheck):
   def check_hive_server(self, env, server_component_name, kinit_cmd, 
address_list, server_port):
     import params
     env.set_params(params)
-    Logger.info("Server Address List : {0}, Port : {1}".format(address_list, 
server_port))
+    Logger.info(f"Server Address List : {address_list}, Port : {server_port}")
 
     if not address_list:
       raise Fail("Can not find any "+server_component_name+" ,host. Please 
check configuration.")
@@ -108,7 +108,7 @@ class HiveServiceCheckDefault(HiveServiceCheck):
     start_time = time.time()
     end_time = start_time + SOCKET_WAIT_SECONDS
 
-    Logger.info("Waiting for the {0} to 
start...".format(server_component_name))
+    Logger.info(f"Waiting for the {server_component_name} to start...")
 
     workable_server_available = False
     i = 0
@@ -120,10 +120,10 @@ class HiveServiceCheckDefault(HiveServiceCheck):
                                transport_mode=params.hive_transport_mode, 
http_endpoint=params.hive_http_endpoint,
                                ssl=params.hive_ssl, 
ssl_keystore=params.hive_ssl_keystore_path,
                                ssl_password=params.hive_ssl_keystore_password)
-        Logger.info("Successfully connected to {0} on port 
{1}".format(address, server_port))
+        Logger.info(f"Successfully connected to {address} on port 
{server_port}")
         workable_server_available = True
       except:
-        Logger.info("Connection to {0} on port {1} failed".format(address, 
server_port))
+        Logger.info(f"Connection to {address} on port {server_port} failed")
         time.sleep(5)
 
       i += 1
@@ -172,7 +172,7 @@ class HiveServiceCheckDefault(HiveServiceCheck):
       exec_path =  os.environ['PATH'] + os.pathsep + params.hadoop_bin_dir + 
os.pathsep + upgrade_hive_bin
 
     # beeline path
-    llap_cmd = "! beeline -u '%s'" % format(";".join(beeline_url))
+    llap_cmd = f"! beeline -u '{format(';'.join(beeline_url))}'"
     # Append LLAP SQL script path
     llap_cmd += format(" --hiveconf \"hiveLlapServiceCheck={unique_id}\" -f 
{stack_root}/current/hive-server2-hive2/scripts/llap/sql/serviceCheckScript.sql")
     # Append grep patterns for detecting failure
diff --git 
a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/webhcat_server.py
 
b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/webhcat_server.py
index 28ffa8e7c8..0011121fe1 100755
--- 
a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/webhcat_server.py
+++ 
b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/webhcat_server.py
@@ -145,7 +145,7 @@ class WebHCatServerDefault(WebHCatServer):
       else:
         issues = []
         for cf in result_issues:
-          issues.append("Configuration file %s did not pass the validation. 
Reason: %s" % (cf, result_issues[cf]))
+          issues.append(f"Configuration file {cf} did not pass the validation. 
Reason: {result_issues[cf]}")
         self.put_structured_out({"securityIssuesFound": ". ".join(issues)})
         self.put_structured_out({"securityState": "UNSECURED"})
     else:
diff --git 
a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/application_timeline_server.py
 
b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/application_timeline_server.py
index d037cea029..fd7ac66061 100755
--- 
a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/application_timeline_server.py
+++ 
b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/application_timeline_server.py
@@ -137,7 +137,7 @@ class 
ApplicationTimelineServerDefault(ApplicationTimelineServer):
       else:
         issues = []
         for cf in result_issues:
-          issues.append("Configuration file %s did not pass the validation. 
Reason: %s" % (cf, result_issues[cf]))
+          issues.append(f"Configuration file {cf} did not pass the validation. 
Reason: {result_issues[cf]}")
         self.put_structured_out({"securityIssuesFound": ". ".join(issues)})
         self.put_structured_out({"securityState": "UNSECURED"})
     else:
diff --git 
a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/historyserver.py
 
b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/historyserver.py
index cbc6a74cc2..da204d0015 100755
--- 
a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/historyserver.py
+++ 
b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/historyserver.py
@@ -172,7 +172,7 @@ class HistoryServerDefault(HistoryServer):
       else:
         issues = []
         for cf in result_issues:
-          issues.append("Configuration file %s did not pass the validation. 
Reason: %s" % (cf, result_issues[cf]))
+          issues.append(f"Configuration file {cf} did not pass the validation. 
Reason: {result_issues[cf]}")
         self.put_structured_out({"securityIssuesFound": ". ".join(issues)})
         self.put_structured_out({"securityState": "UNSECURED"})
     else:
diff --git 
a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/mapred_service_check.py
 
b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/mapred_service_check.py
index 6991812bac..36934f9454 100755
--- 
a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/mapred_service_check.py
+++ 
b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/mapred_service_check.py
@@ -52,7 +52,7 @@ class MapReduce2ServiceCheckWindows(MapReduce2ServiceCheck):
       python_executable, validateStatusFilePath, component_type, 
component_address, params.hadoop_ssl_enabled)
 
     if params.security_enabled:
-      kinit_cmd = "{0} -kt {1} {2};".format(params.kinit_path_local, 
params.smoke_user_keytab, params.smokeuser)
+      kinit_cmd = f"{params.kinit_path_local} -kt {params.smoke_user_keytab} 
{params.smokeuser};"
       smoke_cmd = kinit_cmd + validateStatusCmd
     else:
       smoke_cmd = validateStatusCmd
diff --git 
a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/mapreduce2_client.py
 
b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/mapreduce2_client.py
index 1d8210ec4b..7f1827ccd1 100755
--- 
a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/mapreduce2_client.py
+++ 
b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/mapreduce2_client.py
@@ -67,7 +67,7 @@ class MapReduce2Client(Script):
     config_dir = self.get_config_dir_during_stack_upgrade(env, base_dir, 
conf_select_name)
 
     if config_dir:
-      Logger.info("stack_upgrade_save_new_config(): Calling conf-select on %s 
using version %s" % (conf_select_name, str(params.version)))
+      Logger.info(f"stack_upgrade_save_new_config(): Calling conf-select on 
{conf_select_name} using version {str(params.version)}")
 
       # Because this script was called from ru_execute_tasks.py which already 
enters an Environment with its own basedir,
       # must change it now so this function can find the Jinja Templates for 
the service.
diff --git 
a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/nodemanager.py
 
b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/nodemanager.py
index 0141bf6923..c3495fde3b 100755
--- 
a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/nodemanager.py
+++ 
b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/nodemanager.py
@@ -143,7 +143,7 @@ class NodemanagerDefault(Nodemanager):
       else:
         issues = []
         for cf in result_issues:
-          issues.append("Configuration file %s did not pass the validation. 
Reason: %s" % (cf, result_issues[cf]))
+          issues.append(f"Configuration file {cf} did not pass the validation. 
Reason: {result_issues[cf]}")
         self.put_structured_out({"securityIssuesFound": ". ".join(issues)})
         self.put_structured_out({"securityState": "UNSECURED"})
     else:
diff --git 
a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/nodemanager_upgrade.py
 
b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/nodemanager_upgrade.py
index 30abb59c38..27356381d2 100755
--- 
a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/nodemanager_upgrade.py
+++ 
b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/nodemanager_upgrade.py
@@ -68,7 +68,7 @@ def _check_nodemanager_startup():
   yarn_output = yarn_output.lower()
 
   if hostname in yarn_output or nodemanager_address in yarn_output or 
hostname_ip in yarn_output:
-    Logger.info('NodeManager with ID \'{0}\' has rejoined the 
cluster.'.format(nodemanager_address))
+    Logger.info(f'NodeManager with ID \'{nodemanager_address}\' has rejoined 
the cluster.')
     return
   else:
-    raise Fail('NodeManager with ID \'{0}\' was not found in the list of 
running NodeManagers. \'{1}\' output was:\n{2}'.format(nodemanager_address, 
command, yarn_output))
+    raise Fail(f'NodeManager with ID \'{nodemanager_address}\' was not found 
in the list of running NodeManagers. \'{command}\' output was:\n{yarn_output}')
diff --git 
a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/resourcemanager.py
 
b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/resourcemanager.py
index bf23d68705..dcb6942d72 100755
--- 
a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/resourcemanager.py
+++ 
b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/resourcemanager.py
@@ -193,7 +193,7 @@ class ResourcemanagerDefault(Resourcemanager):
       else:
         issues = []
         for cf in result_issues:
-          issues.append("Configuration file %s did not pass the validation. 
Reason: %s" % (cf, result_issues[cf]))
+          issues.append(f"Configuration file {cf} did not pass the validation. 
Reason: {result_issues[cf]}")
         self.put_structured_out({"securityIssuesFound": ". ".join(issues)})
         self.put_structured_out({"securityState": "UNSECURED"})
     else:
diff --git 
a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/service_check.py
 
b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/service_check.py
index c21999e4a2..0d01562f18 100755
--- 
a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/service_check.py
+++ 
b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/service_check.py
@@ -49,7 +49,7 @@ class ServiceCheckWindows(ServiceCheck):
 
     yarn_exe = os_utils.quote_path(os.path.join(params.yarn_home, "bin", 
"yarn.cmd"))
 
-    run_yarn_check_cmd = "cmd /C %s node -list" % yarn_exe
+    run_yarn_check_cmd = f"cmd /C {yarn_exe} node -list"
 
     component_type = 'rm'
     if params.hadoop_ssl_enabled:
@@ -62,10 +62,10 @@ class ServiceCheckWindows(ServiceCheck):
     validateStatusFileName = "validateYarnComponentStatusWindows.py"
     validateStatusFilePath = os.path.join(temp_dir, validateStatusFileName)
     python_executable = sys.executable
-    validateStatusCmd = "%s %s %s -p %s -s %s" % (python_executable, 
validateStatusFilePath, component_type, component_address, 
params.hadoop_ssl_enabled)
+    validateStatusCmd = f"{python_executable} {validateStatusFilePath} 
{component_type} -p {component_address} -s {params.hadoop_ssl_enabled}"
 
     if params.security_enabled:
-      kinit_cmd = "%s -kt %s %s;" % (params.kinit_path_local, 
params.smoke_user_keytab, params.smokeuser)
+      kinit_cmd = f"{params.kinit_path_local} -kt {params.smoke_user_keytab} 
{params.smokeuser};"
       smoke_cmd = kinit_cmd + ' ' + validateStatusCmd
     else:
       smoke_cmd = validateStatusCmd
diff --git a/contrib/version-builder/version_builder.py 
b/contrib/version-builder/version_builder.py
index a4ab02a84e..ae841ad31e 100644
--- a/contrib/version-builder/version_builder.py
+++ b/contrib/version-builder/version_builder.py
@@ -157,7 +157,7 @@ class VersionBuilder:
 
     service_element = self.findByAttributeValue(manifest_element, "./service", 
"id", manifest_id)
     if service_element is None:
-      raise Exception("Cannot add an available service for {0}; it's not on 
the manifest".format(manifest_id))
+      raise Exception(f"Cannot add an available service for {manifest_id}; 
it's not on the manifest")
 
     available_element = self.root_element.find("./available-services")
     if available_element is None:
@@ -191,7 +191,7 @@ class VersionBuilder:
       os_element.set('family', os_family)
 
     if self.useNewSyntax():
-      repo_element = 
os_element.find("./repo/[reponame='{0}']".format(repo_name))
+      repo_element = os_element.find(f"./repo/[reponame='{repo_name}']")
     else:
       repo_element = self.findByValue(os_element, "./repo/reponame", repo_name)
 
@@ -236,15 +236,15 @@ class VersionBuilder:
 
   def findByAttributeValue(self, root, element, attribute, value):
     if self.useNewSyntax():
-      return root.find("./{0}[@{1}='{2}']".format(element, attribute, value))
+      return root.find(f"./{element}[@{attribute}='{value}']")
     else:
-      for node in root.findall("{0}".format(element)):
+      for node in root.findall(f"{element}"):
         if node.attrib[attribute] == value:
           return node
       return None;
   
   def findByValue(self, root, element, value):
-    for node in root.findall("{0}".format(element)):
+    for node in root.findall(f"{element}"):
       if node.text == value:
         return node
     return None


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]


Reply via email to