This is an automated email from the ASF dual-hosted git repository.
jialiang pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/ambari.git
The following commit(s) were added to refs/heads/trunk by this push:
new a659232a88 AMBARI-26245: refactor(ambari-agent): convert .format() to
f-strings (#3902)
a659232a88 is described below
commit a659232a88645ef1a42760dcc647dc98e0e3d910
Author: yaruyng <[email protected]>
AuthorDate: Sat Nov 30 14:11:31 2024 +0800
AMBARI-26245: refactor(ambari-agent): convert .format() to f-strings (#3902)
---
ambari-agent/conf/unix/agent-multiplier.py | 56 +++++++++++-----------
ambari-agent/conf/unix/upgrade_agent_configs.py | 8 ++--
ambari-agent/conf/windows/service_wrapper.py | 8 ++--
.../src/main/python/ambari_agent/ActionQueue.py | 18 +++----
.../python/ambari_agent/AlertSchedulerHandler.py | 8 ++--
.../src/main/python/ambari_agent/AmbariAgent.py | 2 +-
.../src/main/python/ambari_agent/AmbariConfig.py | 2 +-
.../BackgroundCommandExecutionHandle.py | 2 +-
.../ambari_agent/ClusterAlertDefinitionsCache.py | 4 +-
.../src/main/python/ambari_agent/ClusterCache.py | 8 ++--
.../python/ambari_agent/ClusterTopologyCache.py | 12 ++---
.../ambari_agent/CommandHooksOrchestrator.py | 4 +-
.../python/ambari_agent/ComponentStatusExecutor.py | 12 ++---
.../ambari_agent/ComponentVersionReporter.py | 2 +-
.../python/ambari_agent/ConfigurationBuilder.py | 2 +-
.../ambari_agent/CustomServiceOrchestrator.py | 34 ++++++-------
.../src/main/python/ambari_agent/Facter.py | 38 +++++++--------
.../src/main/python/ambari_agent/FileCache.py | 29 +++++------
.../src/main/python/ambari_agent/Hardware.py | 6 +--
.../main/python/ambari_agent/HeartbeatHandlers.py | 2 +-
.../main/python/ambari_agent/HeartbeatThread.py | 20 ++++----
.../ambari_agent/HostCheckReportFileHandler.py | 10 ++--
.../src/main/python/ambari_agent/HostCleanup.py | 26 +++++-----
.../src/main/python/ambari_agent/HostInfo.py | 6 +--
.../main/python/ambari_agent/PingPortListener.py | 2 +-
.../src/main/python/ambari_agent/PythonExecutor.py | 2 +-
.../main/python/ambari_agent/RecoveryManager.py | 2 +-
.../main/python/ambari_agent/RemoteDebugUtils.py | 6 +--
ambari-agent/src/main/python/ambari_agent/Utils.py | 2 +-
.../main/python/ambari_agent/alerts/ams_alert.py | 18 +++----
.../main/python/ambari_agent/alerts/base_alert.py | 17 ++++---
.../main/python/ambari_agent/alerts/collector.py | 2 +-
.../python/ambari_agent/alerts/metric_alert.py | 10 ++--
.../main/python/ambari_agent/alerts/port_alert.py | 7 ++-
.../python/ambari_agent/alerts/script_alert.py | 4 +-
.../main/python/ambari_agent/alerts/web_alert.py | 8 ++--
.../main/python/ambari_agent/apscheduler/job.py | 2 +-
.../apscheduler/jobstores/mongodb_store.py | 2 +-
.../apscheduler/jobstores/ram_store.py | 2 +-
.../apscheduler/jobstores/redis_store.py | 2 +-
.../apscheduler/jobstores/shelve_store.py | 2 +-
.../apscheduler/jobstores/sqlalchemy_store.py | 2 +-
.../python/ambari_agent/apscheduler/scheduler.py | 12 ++---
.../python/ambari_agent/apscheduler/threadpool.py | 2 +-
.../apscheduler/triggers/cron/__init__.py | 12 ++---
.../apscheduler/triggers/cron/expressions.py | 25 +++++-----
.../apscheduler/triggers/cron/fields.py | 6 +--
.../ambari_agent/apscheduler/triggers/interval.py | 2 +-
.../ambari_agent/apscheduler/triggers/simple.py | 5 +-
.../main/python/ambari_agent/apscheduler/util.py | 18 ++++---
.../src/main/python/ambari_agent/hostname.py | 14 +++---
.../ambari_agent/listeners/AgentActionsListener.py | 2 +-
.../listeners/ServerResponsesListener.py | 4 +-
.../main/python/ambari_agent/listeners/__init__.py | 12 ++---
ambari-agent/src/main/python/ambari_agent/main.py | 16 +++----
.../src/main/python/ambari_agent/security.py | 6 +--
.../python/ambari_agent/BaseStompServerTestCase.py | 9 ++--
.../test/python/ambari_agent/TestActionQueue.py | 6 +--
.../src/test/python/ambari_agent/TestAlerts.py | 2 +-
.../src/test/python/ambari_agent/TestHostInfo.py | 2 +-
.../src/test/python/ambari_agent/TestHostname.py | 8 ++--
.../python/ambari_agent/dummy_files/test_script.py | 2 +-
.../ambari_agent/examples/ControllerTester.py | 4 +-
.../resource_management/TestDatanodeHelper.py | 6 +--
.../resource_management/TestSecurityCommons.py | 2 +-
65 files changed, 288 insertions(+), 300 deletions(-)
diff --git a/ambari-agent/conf/unix/agent-multiplier.py
b/ambari-agent/conf/unix/agent-multiplier.py
index ae1e1d0d89..a0ff62a6cb 100644
--- a/ambari-agent/conf/unix/agent-multiplier.py
+++ b/ambari-agent/conf/unix/agent-multiplier.py
@@ -87,8 +87,8 @@ class Multiplier:
print("*** Params ***")
print("Start: %d" % self.start)
print("Num: %d" % self.num)
- print("Prefix: %s" % self.prefix)
- print("Command: %s" % self.command)
+ print(f"Prefix: {self.prefix}")
+ print(f"Command: {self.command}")
# All hostnames that will be managed by Ambari Agents on this host
self.hosts = []
@@ -105,7 +105,7 @@ class Multiplier:
Parse the configuration file to set the config params.
"""
if not os.path.exists(self.CONFIG_FILE):
- print("Did not find Agent Multiplier config file: %s" %
str(self.CONFIG_FILE))
+ print(f"Did not find Agent Multiplier config file:
{str(self.CONFIG_FILE)}")
sys.exit(-1)
params = {}
@@ -141,11 +141,11 @@ class Multiplier:
errors.append("Prefix is a required field")
if not os.path.isfile(self.source_config_file):
- errors.append("Ambari Agent config file does not exist at %s" %
self.source_config_file)
+ errors.append(f"Ambari Agent config file does not exist at
{self.source_config_file}")
valid_commands = set(["start", "stop", "restart", "status"])
if self.command is None or self.command not in valid_commands:
- errors.append("Command must be one of %s" % ", ".join(valid_commands))
+ errors.append(f"Command must be one of {', '.join(valid_commands)}")
if len(errors) > 0:
print("Error:")
@@ -171,24 +171,24 @@ class Multiplier:
for dir in [host_home_dir, host_log_dir, host_config_dir, host_pid_dir,
host_prefix, host_cache_dir]:
if not os.path.isdir(dir):
- print("Creating dir %s" % (dir))
+ print(f"Creating dir {dir}")
os.makedirs(dir)
# Copy config file
host_config_file = os.path.join(host_config_dir, "ambari-agent.ini")
if not os.path.isfile(host_config_file):
- print("Copying config file %s" % str(host_config_file))
+ print(f"Copying config file {str(host_config_file)}")
shutil.copyfile(self.source_config_file, host_config_file)
# Copy version file
version_file = os.path.join(host_prefix, "version")
if not os.path.isfile(version_file):
- print("Copying version file %s" % str(version_file))
+ print(f"Copying version file {str(version_file)}")
shutil.copyfile(self.source_version_file, version_file)
# Copy cache dir content
if not os.path.isdir(os.path.join(host_cache_dir, "stacks")):
- print("Copying cache directory content %s" % str(host_cache_dir))
+ print(f"Copying cache directory content {str(host_cache_dir)}")
self.copytree(self.cache_dir, host_cache_dir)
# Create hostname.sh script to use custom FQDN for each agent.
@@ -227,7 +227,7 @@ class Multiplier:
"echo HOSTNAME"
with open(str(host_name_script), "w+") as f:
f.writelines(template.replace("HOSTNAME", host_name))
- subprocess.call("chmod +x %s" % host_name_script, shell=True)
+ subprocess.call(f"chmod +x {host_name_script}", shell=True)
def change_config(self, config_file, config_dict):
"""
@@ -238,7 +238,7 @@ class Multiplier:
# TODO, allow appending configs to [AGENT] section.
if not os.path.exists(config_file):
- print("ERROR. Did not file config file: %s" % config_file)
+ print(f"ERROR. Did not file config file: {config_file}")
return
lines = []
@@ -265,11 +265,11 @@ class Multiplier:
# TODO, if can append configs, then this is not needed.
if len(configs_found) < len(config_dict.keys()):
missing_configs = set(config_dict.keys()) - configs_found
- print("ERROR: Did not find all required configs. Missing: %s" % ",
".join(missing_configs))
+ print(f"ERROR: Did not find all required configs. Missing: {',
'.join(missing_configs)}")
sys.exit(-1)
if len(configs_changed) > 0:
- print("Making changes to file %s" % config_file)
+ print(f"Making changes to file {config_file}")
with open(config_file, "w") as f:
f.writelines(new_lines)
@@ -280,7 +280,7 @@ class Multiplier:
"""
etc_hosts = "/etc/hosts"
if not os.path.isfile(etc_hosts):
- print("ERROR. Did not find file %s" % etc_hosts)
+ print(f"ERROR. Did not find file {etc_hosts}")
return
lines = []
@@ -300,7 +300,7 @@ class Multiplier:
new_lines.append(line)
if line_changed:
- print("Making changes to %s" % etc_hosts)
+ print(f"Making changes to {etc_hosts}")
with open(etc_hosts, "w") as f:
f.writelines(new_lines)
@@ -318,42 +318,42 @@ class Multiplier:
self.cmd_status()
def cmd_start(self):
- print("Starting %d host(s)" % len(self.hosts))
+ print(f"Starting {len(self.hosts)} host(s)")
for host in self.hosts:
- cmd = "ambari-agent start --home %s" % (host.home_dir)
+ cmd = f"ambari-agent start --home {host.home_dir}"
os.environ['AMBARI_AGENT_CONF_DIR'] = os.path.join(host.home_dir,
"etc/ambari-agent/conf")
subprocess.call(cmd, shell=True, env=os.environ)
def cmd_stop(self):
- print("Stopping %d host(s)" % len(self.hosts))
+ print(f"Stopping {len(self.hosts)} host(s)")
for host in self.hosts:
- cmd = "ambari-agent stop --home %s" % (host.home_dir)
+ cmd = f"ambari-agent stop --home {host.home_dir}"
os.environ['AMBARI_AGENT_CONF_DIR'] = os.path.join(host.home_dir,
"etc/ambari-agent/conf")
subprocess.call(cmd, shell=True, env=os.environ)
def cmd_restart(self):
- print("Restarting %d host(s)" % len(self.hosts))
+ print(f"Restarting {len(self.hosts)} host(s)")
for host in self.hosts:
- cmd = "ambari-agent restart --home %s" % (host.home_dir)
+ cmd = f"ambari-agent restart --home {host.home_dir}"
os.environ['AMBARI_AGENT_CONF_DIR'] = os.path.join(host.home_dir,
"etc/ambari-agent/conf")
subprocess.call(cmd, shell=True, env=os.environ)
def cmd_status(self):
print("Summary of Agent Status:")
- print("Total agents: %d\n" % len(self.hosts))
+ print(f"Total agents: {len(self.hosts)}\n")
(running_hosts, unknown_hosts, stopped_hosts) = self.aggregate_status()
- print("Running agents: %d" % len(running_hosts))
+ print(f"Running agents: {len(running_hosts)}")
if self.verbose and len(running_hosts):
- print("(%s)\n" % (", ".join(running_hosts)))
+ print(f"({', '.join(running_hosts)})\n")
- print("Unknown agents: %d" % len(unknown_hosts))
+ print(f"Unknown agents: {len(unknown_hosts)}")
if self.verbose and len(unknown_hosts):
- print("(%s)\n" % (", ".join(unknown_hosts)))
+ print(f"({', '.join(unknown_hosts)})\n")
- print("Stopped agents: %d" % len(stopped_hosts))
+ print(f"Stopped agents: {len(stopped_hosts)}")
if self.verbose and len(stopped_hosts):
- print("(%s)\n" % (", ".join(stopped_hosts)))
+ print(f"({', '.join(stopped_hosts)})\n")
def aggregate_status(self):
"""
diff --git a/ambari-agent/conf/unix/upgrade_agent_configs.py
b/ambari-agent/conf/unix/upgrade_agent_configs.py
index 0f30b98392..f625e791b6 100644
--- a/ambari-agent/conf/unix/upgrade_agent_configs.py
+++ b/ambari-agent/conf/unix/upgrade_agent_configs.py
@@ -33,8 +33,8 @@ CONFIG_FILE = '/etc/ambari-agent/conf/ambari-agent.ini'
if os.path.isfile(CONFIG_FILE_BACKUP):
if os.path.isfile(CONFIG_FILE):
- print("Upgrading configs in {0}".format(CONFIG_FILE))
- print("Values will be updated from {0} except the following list: {1},
{2}".format(CONFIG_FILE_BACKUP, PROPERTIES_TO_REWRITE, SECTIONS_TO_REMOVE))
+ print(f"Upgrading configs in {CONFIG_FILE}")
+ print(f"Values will be updated from {CONFIG_FILE_BACKUP} except the
following list: {PROPERTIES_TO_REWRITE}, {SECTIONS_TO_REMOVE}")
agent_config_backup = configparser.ConfigParser()
agent_config_backup.read(CONFIG_FILE_BACKUP)
@@ -54,6 +54,6 @@ if os.path.isfile(CONFIG_FILE_BACKUP):
with (open(CONFIG_FILE, "w")) as new_agent_config:
agent_config.write(new_agent_config)
else:
- print("Values are not updated, configs {0} is not
found".format(CONFIG_FILE))
+ print(f"Values are not updated, configs {CONFIG_FILE} is not found")
else:
- print("Values are not updated, backup {0} is not
found".format(CONFIG_FILE_BACKUP))
+ print(f"Values are not updated, backup {CONFIG_FILE_BACKUP} is not found")
diff --git a/ambari-agent/conf/windows/service_wrapper.py
b/ambari-agent/conf/windows/service_wrapper.py
index fa24d1ec3a..6bbb1731c3 100644
--- a/ambari-agent/conf/windows/service_wrapper.py
+++ b/ambari-agent/conf/windows/service_wrapper.py
@@ -217,7 +217,7 @@ def agent_main():
possible_args = ' or '.join(str(x) for x in possible_args_numbers)
parser.error("Invalid number of arguments. Entered: " + str(len(args)) +
", required: " + possible_args)
- options.exit_message = "Ambari Agent '%s' completed successfully." % action
+ options.exit_message = f"Ambari Agent '{action}' completed successfully."
try:
if action == SETUP_ACTION:
setup(options)
@@ -236,14 +236,14 @@ def agent_main():
for warning in options.warnings:
print_warning_msg(warning)
pass
- options.exit_message = "Ambari Agent '%s' completed with warnings." %
action
+ options.exit_message = f"Ambari Agent '{action}' completed with
warnings."
pass
except FatalException as e:
if e.reason is not None:
- print_error_msg("Exiting with exit code {0}. \nREASON:
{1}".format(e.code, e.reason))
+ print_error_msg(f"Exiting with exit code {e.code}. \nREASON: {e.reason}")
sys.exit(e.code)
except NonFatalException as e:
- options.exit_message = "Ambari Agent '%s' completed with warnings." %
action
+ options.exit_message = f"Ambari Agent '{action}' completed with warnings."
if e.reason is not None:
print_warning_msg(e.reason)
diff --git a/ambari-agent/src/main/python/ambari_agent/ActionQueue.py
b/ambari-agent/src/main/python/ambari_agent/ActionQueue.py
index 9b1e1d969e..b4e1402c94 100644
--- a/ambari-agent/src/main/python/ambari_agent/ActionQueue.py
+++ b/ambari-agent/src/main/python/ambari_agent/ActionQueue.py
@@ -98,7 +98,7 @@ class ActionQueue(threading.Thread):
def cancel(self, commands):
for command in commands:
- logger.info("Canceling command with taskId = {tid}".format(tid =
str(command['target_task_id'])))
+ logger.info(f"Canceling command with taskId =
{str(command['target_task_id'])}")
if logger.isEnabledFor(logging.DEBUG):
logger.debug(pprint.pformat(command))
@@ -151,7 +151,7 @@ class ActionQueue(threading.Thread):
if 'commandParams' in command and 'command_retry_enabled' in
command['commandParams']:
retry_able = command['commandParams']['command_retry_enabled']
== "true"
if retry_able:
- logger.info("Kicking off a thread for the command, id={}
taskId={}".format(command['commandId'], command['taskId']))
+ logger.info(f"Kicking off a thread for the command,
id={command['commandId']} taskId={command['taskId']}")
t = threading.Thread(target=self.process_command,
args=(command,))
t.daemon = True
t.start()
@@ -204,7 +204,7 @@ class ActionQueue(threading.Thread):
else:
logger.error("Unrecognized command %s", pprint.pformat(command))
except Exception:
- logger.exception("Exception while processing {0}
command".format(command_type))
+ logger.exception(f"Exception while processing {command_type} command")
def tasks_in_progress_or_pending(self):
return not self.commandQueue.empty() or
self.recovery_manager.has_active_command()
@@ -271,7 +271,7 @@ class ActionQueue(threading.Thread):
while retry_duration >= 0:
if taskId in self.taskIdsToCancel:
- logger.info('Command with taskId = {0} canceled'.format(taskId))
+ logger.info(f'Command with taskId = {taskId} canceled')
command_canceled = True
self.taskIdsToCancel.discard(taskId)
@@ -303,7 +303,7 @@ class ActionQueue(threading.Thread):
else:
status = CommandStatus.failed
if (command_result['exitcode'] == -signal.SIGTERM) or
(command_result['exitcode'] == -signal.SIGKILL):
- logger.info('Command with taskId = {cid} was
canceled!'.format(cid=taskId))
+ logger.info(f'Command with taskId = {taskId} was canceled!')
command_canceled = True
self.taskIdsToCancel.discard(taskId)
break
@@ -314,7 +314,7 @@ class ActionQueue(threading.Thread):
delay = retry_duration
retry_duration -= delay # allow one last attempt
command_result['stderr'] += "\n\nCommand failed. Retrying command
execution ...\n\n"
- logger.info("Retrying command with taskId = {cid} after a wait of
{delay}".format(cid=taskId, delay=delay))
+ logger.info(f"Retrying command with taskId = {taskId} after a wait of
{delay}")
if 'agentLevelParams' not in command:
command['agentLevelParams'] = {}
@@ -341,7 +341,7 @@ class ActionQueue(threading.Thread):
# final result to stdout
command_result['stdout'] += '\n\nCommand completed successfully!\n' if
status == CommandStatus.completed else '\n\nCommand failed after ' +
str(num_attempts) + ' tries\n'
- logger.info('Command with taskId = {cid} completed
successfully!'.format(cid=taskId) if status == CommandStatus.completed else
'Command with taskId = {cid} failed after {attempts} tries'.format(cid=taskId,
attempts=num_attempts))
+ logger.info(f'Command with taskId = {taskId} completed successfully!' if
status == CommandStatus.completed else f'Command with taskId = {taskId} failed
after {num_attempts} tries')
role_result = self.commandStatuses.generate_report_template(command)
role_result.update({
@@ -405,9 +405,9 @@ class ActionQueue(threading.Thread):
chunks = split_on_chunks(hide_passwords(text), MAX_SYMBOLS_PER_LOG_MESSAGE)
if len(chunks) > 1:
for i in range(len(chunks)):
- logger.info("Cmd log for taskId={0} and chunk {1}/{2} of log for
command: \n".format(taskId, i+1, len(chunks)) + chunks[i])
+ logger.info(f"Cmd log for taskId={taskId} and chunk {i +
1}/{len(chunks)} of log for command: \n" + chunks[i])
else:
- logger.info("Cmd log for taskId={0}: ".format(taskId) + chunks[0])
+ logger.info(f"Cmd log for taskId={taskId}: " + chunks[0])
def get_retry_delay(self, last_delay):
"""
diff --git a/ambari-agent/src/main/python/ambari_agent/AlertSchedulerHandler.py
b/ambari-agent/src/main/python/ambari_agent/AlertSchedulerHandler.py
index cf5bc3509f..eea2b2c647 100644
--- a/ambari-agent/src/main/python/ambari_agent/AlertSchedulerHandler.py
+++ b/ambari-agent/src/main/python/ambari_agent/AlertSchedulerHandler.py
@@ -184,7 +184,7 @@ class AlertSchedulerHandler():
# jobs without valid UUIDs should be unscheduled
if uuid_valid is False:
jobs_removed += 1
- logger.info("[AlertScheduler] Unscheduling
{0}".format(scheduled_job.name))
+ logger.info(f"[AlertScheduler] Unscheduling {scheduled_job.name}")
self._collector.remove_by_uuid(scheduled_job.name)
self.__scheduler.unschedule_job(scheduled_job)
@@ -222,7 +222,7 @@ class AlertSchedulerHandler():
# unschedule all scheduled jobs
for scheduled_job in scheduled_jobs:
jobs_removed += 1
- logger.info("[AlertScheduler] Unscheduling
{0}".format(scheduled_job.name))
+ logger.info(f"[AlertScheduler] Unscheduling {scheduled_job.name}")
self._collector.remove_by_uuid(scheduled_job.name)
self.__scheduler.unschedule_job(scheduled_job)
@@ -255,7 +255,7 @@ class AlertSchedulerHandler():
# cache the cluster and cluster hash after loading the JSON
if clusterName != '' and clusterHash is not None:
- logger.info('[AlertScheduler] Caching cluster {0} with alert hash
{1}'.format(clusterName, clusterHash))
+ logger.info(f'[AlertScheduler] Caching cluster {clusterName} with
alert hash {clusterHash}')
for definition in command_json['alertDefinitions']:
alert = self.__json_to_callable(clusterName, hostName, publicHostName,
Utils.get_mutable_copy(definition))
@@ -282,7 +282,7 @@ class AlertSchedulerHandler():
source_type = source.get('type', '')
if logger.isEnabledFor(logging.DEBUG):
- logger.debug("[AlertScheduler] Creating job type {0} with
{1}".format(source_type, str(json_definition)))
+ logger.debug(f"[AlertScheduler] Creating job type {source_type} with
{str(json_definition)}")
if source_type == AlertSchedulerHandler.TYPE_METRIC:
diff --git a/ambari-agent/src/main/python/ambari_agent/AmbariAgent.py
b/ambari-agent/src/main/python/ambari_agent/AmbariAgent.py
index 9d807cc368..3ce9c4197e 100644
--- a/ambari-agent/src/main/python/ambari_agent/AmbariAgent.py
+++ b/ambari-agent/src/main/python/ambari_agent/AmbariAgent.py
@@ -67,7 +67,7 @@ def check_native_libs_support():
not_loaded_extensions.append("simplejson")
if not_loaded_extensions:
- logger.warning("Some native extensions not available for module(s): {}, it
may affect execution performance".format(",".join(not_loaded_extensions)))
+ logger.warning(f"Some native extensions not available for module(s):
{','.join(not_loaded_extensions)}, it may affect execution performance")
def main():
diff --git a/ambari-agent/src/main/python/ambari_agent/AmbariConfig.py
b/ambari-agent/src/main/python/ambari_agent/AmbariConfig.py
index 4945d08a74..3a0bfcc400 100644
--- a/ambari-agent/src/main/python/ambari_agent/AmbariConfig.py
+++ b/ambari-agent/src/main/python/ambari_agent/AmbariConfig.py
@@ -147,7 +147,7 @@ class AmbariConfig:
if os.path.exists(configPath):
config.read(configPath)
else:
- raise Exception("No config found at {0}, use
default".format(configPath))
+ raise Exception(f"No config found at {configPath}, use default")
except Exception as err:
logger.warn(err)
diff --git
a/ambari-agent/src/main/python/ambari_agent/BackgroundCommandExecutionHandle.py
b/ambari-agent/src/main/python/ambari_agent/BackgroundCommandExecutionHandle.py
index 805eb64a17..b8061303ea 100644
---
a/ambari-agent/src/main/python/ambari_agent/BackgroundCommandExecutionHandle.py
+++
b/ambari-agent/src/main/python/ambari_agent/BackgroundCommandExecutionHandle.py
@@ -39,4 +39,4 @@ class BackgroundCommandExecutionHandle:
self.on_background_command_complete_callback =
on_background_command_complete_callback
def __str__(self):
- return "[BackgroundHandle: pid='{0}', status='{1}', exitCode='{2}',
commandId='{3}']".format(self.pid, self.status, self.exitCode, self.commandId)
+ return f"[BackgroundHandle: pid='{self.pid}', status='{self.status}',
exitCode='{self.exitCode}', commandId='{self.commandId}']"
diff --git
a/ambari-agent/src/main/python/ambari_agent/ClusterAlertDefinitionsCache.py
b/ambari-agent/src/main/python/ambari_agent/ClusterAlertDefinitionsCache.py
index 287effbac9..ef8b2638ad 100644
--- a/ambari-agent/src/main/python/ambari_agent/ClusterAlertDefinitionsCache.py
+++ b/ambari-agent/src/main/python/ambari_agent/ClusterAlertDefinitionsCache.py
@@ -81,7 +81,7 @@ class ClusterAlertDefinitionsCache(ClusterCache):
for cluster_id in cache_update:
if not cluster_id in mutable_dict:
- logger.error("Cannot do alert_definitions delete for cluster
cluster_id={0}, because do not have information about the
cluster".format(cluster_id))
+ logger.error(f"Cannot do alert_definitions delete for cluster
cluster_id={cluster_id}, because do not have information about the cluster")
continue
# deleting whole cluster
@@ -95,7 +95,7 @@ class ClusterAlertDefinitionsCache(ClusterCache):
index_of_alert = self.get_alert_definition_index_by_id(mutable_dict,
cluster_id, id_to_update)
if index_of_alert == None:
- raise Exception("Cannot delete an alert with
id={0}".format(id_to_update))
+ raise Exception(f"Cannot delete an alert with id={id_to_update}")
del mutable_dict[cluster_id]['alertDefinitions'][index_of_alert]
diff --git a/ambari-agent/src/main/python/ambari_agent/ClusterCache.py
b/ambari-agent/src/main/python/ambari_agent/ClusterCache.py
index bf70afc1dc..886fef83df 100644
--- a/ambari-agent/src/main/python/ambari_agent/ClusterCache.py
+++ b/ambari-agent/src/main/python/ambari_agent/ClusterCache.py
@@ -64,7 +64,7 @@ class ClusterCache(dict):
with open(self.__current_cache_hash_file, 'r') as fp:
self.hash = fp.read()
except (IOError,ValueError):
- logger.exception("Cannot load data from {0} and
{1}".format(self.__current_cache_json_file, self.__current_cache_hash_file))
+ logger.exception(f"Cannot load data from
{self.__current_cache_json_file} and {self.__current_cache_hash_file}")
self.hash = None
cache_dict = {}
@@ -72,7 +72,7 @@ class ClusterCache(dict):
self.rewrite_cache(cache_dict, self.hash)
except:
# Example: hostname change and restart causes old topology loading to
fail with exception
- logger.exception("Loading saved cache for {0}
failed".format(self.__class__.__name__))
+ logger.exception(f"Loading saved cache for {self.__class__.__name__}
failed")
self.rewrite_cache({}, None)
def get_cluster_indepedent_data(self):
@@ -118,7 +118,7 @@ class ClusterCache(dict):
:param cache:
:return:
"""
- logger.info("Rewriting cache {0} for cluster
{1}".format(self.__class__.__name__, cluster_id))
+ logger.info(f"Rewriting cache {self.__class__.__name__} for cluster
{cluster_id}")
# The cache should contain exactly the data received from server.
# Modifications on agent-side will lead to unnecessary cache sync every
agent registration. Which is a big concern on perf clusters!
@@ -151,7 +151,7 @@ class ClusterCache(dict):
try:
return super(ClusterCache, self).__getitem__(key)
except KeyError:
- raise KeyError("{0} for cluster_id={1} is missing. Check if server sent
it.".format(self.get_cache_name().title(), key))
+ raise KeyError(f"{self.get_cache_name().title()} for cluster_id={key} is
missing. Check if server sent it.")
def on_cache_update(self):
"""
diff --git a/ambari-agent/src/main/python/ambari_agent/ClusterTopologyCache.py
b/ambari-agent/src/main/python/ambari_agent/ClusterTopologyCache.py
index 75158a25de..d7c4582eae 100644
--- a/ambari-agent/src/main/python/ambari_agent/ClusterTopologyCache.py
+++ b/ambari-agent/src/main/python/ambari_agent/ClusterTopologyCache.py
@@ -73,7 +73,7 @@ class ClusterTopologyCache(ClusterCache):
if 'components' in cluster_topology:
for component_dict in cluster_topology.components:
- key = "{0}/{1}".format(component_dict.serviceName,
component_dict.componentName)
+ key = f"{component_dict.serviceName}/{component_dict.componentName}"
components_by_key[cluster_id][key] = component_dict
for cluster_id, cluster_topology in self.items():
@@ -115,7 +115,7 @@ class ClusterTopologyCache(ClusterCache):
hostnames.append(self.hosts_to_id[cluster_id][host_id].hostName)
else:
# In theory this should never happen. But in practice it happened
when ambari-server had corrupt DB cache.
- logger.warning("Cannot find host_id={} in
cluster_id={}".format(host_id, cluster_id))
+ logger.warning(f"Cannot find host_id={host_id} in
cluster_id={cluster_id}")
cluster_host_info[component_name.lower()+"_hosts"] += hostnames
@@ -140,7 +140,7 @@ class ClusterTopologyCache(ClusterCache):
"""
Find component by service_name and component_name in list of component
dictionaries.
"""
- key = "{0}/{1}".format(service_name, component_name)
+ key = f"{service_name}/{component_name}"
try:
return self.components_by_key[cluster_id][key]
@@ -255,7 +255,7 @@ class ClusterTopologyCache(ClusterCache):
for cluster_id, cluster_updates_dict in cache_update.items():
if not cluster_id in mutable_dict:
- logger.error("Cannot do topology delete for cluster cluster_id={0},
because do not have information about the cluster".format(cluster_id))
+ logger.error(f"Cannot do topology delete for cluster
cluster_id={cluster_id}, because do not have information about the cluster")
continue
if 'hosts' in cluster_updates_dict:
@@ -265,7 +265,7 @@ class ClusterTopologyCache(ClusterCache):
if host_to_delete is not None:
mutable_dict[cluster_id]['hosts'] = [host_dict for host_dict in
hosts_mutable_list if host_dict != host_to_delete]
else:
- logger.error("Cannot do topology delete for cluster_id={0},
host_id={1}, because cannot find the host in cache".format(cluster_id,
host_updates_dict['hostId']))
+ logger.error(f"Cannot do topology delete for
cluster_id={cluster_id}, host_id={host_updates_dict['hostId']}, because cannot
find the host in cache")
if 'components' in cluster_updates_dict:
components_mutable_list = mutable_dict[cluster_id]['components']
@@ -278,7 +278,7 @@ class ClusterTopologyCache(ClusterCache):
if component_mutable_dict is not None:
mutable_dict[cluster_id]['components'] = [component_dict for
component_dict in components_mutable_list if component_dict !=
component_mutable_dict]
else:
- logger.error("Cannot do component delete for cluster_id={0},
serviceName={1}, componentName={2}, because cannot find the host in
cache".format(cluster_id, component_updates_dict['serviceName'],
component_updates_dict['componentName']))
+ logger.error(f"Cannot do component delete for
cluster_id={cluster_id}, serviceName={component_updates_dict['serviceName']},
componentName={component_updates_dict['componentName']}, because cannot find
the host in cache")
if cluster_updates_dict == {}:
clusters_ids_to_delete.append(cluster_id)
diff --git
a/ambari-agent/src/main/python/ambari_agent/CommandHooksOrchestrator.py
b/ambari-agent/src/main/python/ambari_agent/CommandHooksOrchestrator.py
index 3c23274746..cd64b4bb36 100644
--- a/ambari-agent/src/main/python/ambari_agent/CommandHooksOrchestrator.py
+++ b/ambari-agent/src/main/python/ambari_agent/CommandHooksOrchestrator.py
@@ -97,7 +97,7 @@ class HookSequenceBuilder(object):
:rtype types.GeneratorType
"""
if prefix not in self._hooks_sequences:
- raise TypeError("Unable to locate hooks sequence definition for '{}'
prefix".format(prefix))
+ raise TypeError(f"Unable to locate hooks sequence definition for
'{prefix}' prefix")
for hook_definition in self._hooks_sequences[prefix]:
if "service" in hook_definition and service is None:
@@ -163,7 +163,7 @@ class HooksOrchestrator(object):
hook_script_path = os.path.join(hook_base_dir, "scripts", "hook.py")
if not os.path.isfile(hook_script_path):
- self._logger.debug("Hook script {0} not found,
skipping".format(hook_script_path))
+ self._logger.debug(f"Hook script {hook_script_path} not found,
skipping")
continue
yield hook_script_path, hook_base_dir
diff --git
a/ambari-agent/src/main/python/ambari_agent/ComponentStatusExecutor.py
b/ambari-agent/src/main/python/ambari_agent/ComponentStatusExecutor.py
index 50b5295274..e40b79a15d 100644
--- a/ambari-agent/src/main/python/ambari_agent/ComponentStatusExecutor.py
+++ b/ambari-agent/src/main/python/ambari_agent/ComponentStatusExecutor.py
@@ -105,7 +105,7 @@ class ComponentStatusExecutor(threading.Thread):
# do not run status commands for the component which is
starting/stopping or doing other action
if
self.customServiceOrchestrator.commandsRunningForComponent(cluster_id,
component_name):
- self.logger.info("Skipping status command for {0}. Since
command for it is running".format(component_name))
+ self.logger.info(f"Skipping status command for
{component_name}. Since command for it is running")
continue
result = self.check_component_status(cluster_id, service_name,
component_name, command_name)
@@ -140,7 +140,7 @@ class ComponentStatusExecutor(threading.Thread):
for cluster_report in cluster_reports:
for discarded_report in reports_to_discard:
if Utils.are_dicts_equal(cluster_report, discarded_report,
keys_to_skip=['status']):
- self.logger.info("Discarding outdated status {0} before
sending".format(cluster_report))
+ self.logger.info(f"Discarding outdated status {cluster_report}
before sending")
break
else:
new_cluster_reports[cluster_id].append(cluster_report)
@@ -170,7 +170,7 @@ class ComponentStatusExecutor(threading.Thread):
if status == LiveStatus.DEAD_STATUS:
stderr = component_status_result['stderr']
if "ComponentIsNotRunning" not in stderr and
"ClientComponentHasNoStatus" not in stderr:
- self.logger.info("Status command for {0}
failed:\n{1}".format(component_name, stderr))
+ self.logger.info(f"Status command for {component_name}
failed:\n{stderr}")
result = {
'serviceName': service_name,
@@ -180,8 +180,8 @@ class ComponentStatusExecutor(threading.Thread):
'clusterId': cluster_id,
}
- if status !=
self.reported_component_status[cluster_id]["{0}/{1}".format(service_name,
component_name)][command_name]:
- logging.info("Status for {0} has changed to {1}".format(component_name,
status))
+ if status !=
self.reported_component_status[cluster_id][f"{service_name}/{component_name}"][command_name]:
+ logging.info(f"Status for {component_name} has changed to {status}")
self.recovery_manager.handle_status_change(component_name, status)
if report:
@@ -232,7 +232,7 @@ class ComponentStatusExecutor(threading.Thread):
command = report['command']
status = report['status']
-
self.reported_component_status[cluster_id]["{0}/{1}".format(service_name,
component_name)][command] = status
+
self.reported_component_status[cluster_id][f"{service_name}/{component_name}"][command]
= status
def clean_not_existing_clusters_info(self):
"""
diff --git
a/ambari-agent/src/main/python/ambari_agent/ComponentVersionReporter.py
b/ambari-agent/src/main/python/ambari_agent/ComponentVersionReporter.py
index 3e88d2456d..a697707394 100644
--- a/ambari-agent/src/main/python/ambari_agent/ComponentVersionReporter.py
+++ b/ambari-agent/src/main/python/ambari_agent/ComponentVersionReporter.py
@@ -89,7 +89,7 @@ class ComponentVersionReporter(threading.Thread):
version_result =
self.customServiceOrchestrator.requestComponentStatus(command_dict,
command_name=AgentCommand.get_version)
if version_result['exitcode'] or not 'structuredOut' in version_result or
not 'version' in version_result['structuredOut']:
- logger.error("Could not get version for component {0} of {1} service
cluster_id={2}. Command returned: {3}".format(component_name, service_name,
cluster_id, version_result))
+ logger.error(f"Could not get version for component {component_name} of
{service_name} service cluster_id={cluster_id}. Command returned:
{version_result}")
return None
# TODO: check if no strout or version if not there
diff --git a/ambari-agent/src/main/python/ambari_agent/ConfigurationBuilder.py
b/ambari-agent/src/main/python/ambari_agent/ConfigurationBuilder.py
index af34ab7c6a..b37b95c0d3 100644
--- a/ambari-agent/src/main/python/ambari_agent/ConfigurationBuilder.py
+++ b/ambari-agent/src/main/python/ambari_agent/ConfigurationBuilder.py
@@ -32,7 +32,7 @@ class ConfigurationBuilder:
def get_configuration(self, cluster_id, service_name, component_name,
configurations_timestamp=None):
if cluster_id:
if configurations_timestamp and self.configurations_cache.timestamp <
configurations_timestamp:
- raise Exception("Command requires configs with timestamp={0} but
configs on agent have timestamp={1}".format(configurations_timestamp,
self.configurations_cache.timestamp))
+ raise Exception(f"Command requires configs with
timestamp={configurations_timestamp} but configs on agent have
timestamp={self.configurations_cache.timestamp}")
metadata_cache = self.metadata_cache[cluster_id]
configurations_cache = self.configurations_cache[cluster_id]
diff --git
a/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
b/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
index b76949fcc8..9950e66614 100644
--- a/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
+++ b/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
@@ -130,7 +130,7 @@ class CustomServiceOrchestrator(object):
log_process_information(logger)
shell.kill_process_with_children(pid)
else:
- logger.warn("Unable to find process associated with taskId = %s" %
task_id)
+ logger.warn(f"Unable to find process associated with taskId =
{task_id}")
def get_py_executor(self, forced_command_name):
"""
@@ -244,7 +244,7 @@ class CustomServiceOrchestrator(object):
value_names.append(value_name) # Gather the value_name for
deletion
if len(credentials) > 0:
configtype_credentials[config_type] = credentials
- logger.info("Identifying config {0} for CS: ".format(config_type))
+ logger.info(f"Identifying config {config_type} for CS: ")
for value_name in value_names:
# Remove the clear text password
config.pop(value_name, None)
@@ -266,11 +266,11 @@ class CustomServiceOrchestrator(object):
if 'taskId' in commandJson:
task_id = commandJson['taskId']
- logger.info('Generating the JCEKS file: roleCommand={0} and taskId =
{1}'.format(roleCommand, task_id))
+ logger.info(f'Generating the JCEKS file: roleCommand={roleCommand} and
taskId = {task_id}')
# Set up the variables for the external command to generate a JCEKS file
java_home = commandJson['ambariLevelParams']['java_home']
- java_bin = '{java_home}/bin/java'.format(java_home=java_home)
+ java_bin = f'{java_home}/bin/java'
cs_lib_path = self.credential_shell_lib_path
serviceName = commandJson['serviceName']
@@ -290,15 +290,15 @@ class CustomServiceOrchestrator(object):
config = commandJson['configurations'][config_type]
if 'role' in commandJson and commandJson['role']:
roleName = commandJson['role']
- file_path = os.path.join(self.getProviderDirectory(roleName),
"{0}.jceks".format(config_type))
+ file_path = os.path.join(self.getProviderDirectory(roleName),
f"{config_type}.jceks")
else:
- file_path = os.path.join(self.getProviderDirectory(serviceName),
"{0}.jceks".format(config_type))
+ file_path = os.path.join(self.getProviderDirectory(serviceName),
f"{config_type}.jceks")
if os.path.exists(file_path):
os.remove(file_path)
- provider_path = 'jceks://file{file_path}'.format(file_path=file_path)
- logger.info('provider_path={0}'.format(provider_path))
+ provider_path = f'jceks://file{file_path}'
+ logger.info(f'provider_path={provider_path}')
for alias, pwd in credentials.items():
- logger.debug("config={0}".format(config))
+ logger.debug(f"config={config}")
pwd = ensure_decrypted(pwd, self.encryption_key)
protected_pwd = PasswordString(pwd)
# Generate the JCEKS file
@@ -357,11 +357,11 @@ class CustomServiceOrchestrator(object):
script_tuple = (script_path, base_dir)
if not tmpstrucoutfile:
- tmpstrucoutfile = os.path.join(self.tmp_dir,
"structured-out-{0}.json".format(task_id))
+ tmpstrucoutfile = os.path.join(self.tmp_dir,
f"structured-out-{task_id}.json")
# We don't support anything else yet
if script_type.upper() != self.SCRIPT_TYPE_PYTHON:
- message = "Unknown script type {0}".format(script_type)
+ message = f"Unknown script type {script_type}"
raise AgentException(message)
# Execute command using proper interpreter
@@ -462,7 +462,7 @@ class CustomServiceOrchestrator(object):
except Exception as e:
exc_type, exc_obj, exc_tb = sys.exc_info()
- message = "Caught an exception while executing custom service command:
{0}: {1}; {2}".format(exc_type, exc_obj, e)
+ message = f"Caught an exception while executing custom service command:
{exc_type}: {exc_obj}; {e}"
logger.exception(message)
ret = {
'stdout': message,
@@ -493,7 +493,7 @@ class CustomServiceOrchestrator(object):
if not isinstance(pid, int):
reason = pid
if reason:
- return "\nCommand aborted. Reason: '{0}'".format(reason)
+ return f"\nCommand aborted. Reason: '{reason}'"
else:
return "\nCommand aborted."
return None
@@ -561,7 +561,7 @@ class CustomServiceOrchestrator(object):
"""
path = os.path.join(base_dir, script)
if not os.path.exists(path):
- message = "Script {0} does not exist".format(path)
+ message = f"Script {path} does not exist"
raise AgentException(message)
return path
@@ -574,12 +574,12 @@ class CustomServiceOrchestrator(object):
if is_status_command:
# make sure status commands that run in parallel don't use the same files
- file_path = os.path.join(self.tmp_dir,
"status_command_{0}.json".format(uuid.uuid4()))
+ file_path = os.path.join(self.tmp_dir,
f"status_command_{uuid.uuid4()}.json")
else:
task_id = command['taskId']
- file_path = os.path.join(self.tmp_dir,
"command-{0}.json".format(task_id))
+ file_path = os.path.join(self.tmp_dir, f"command-{task_id}.json")
if command_type == AgentCommand.auto_execution:
- file_path = os.path.join(self.tmp_dir,
"auto_command-{0}.json".format(task_id))
+ file_path = os.path.join(self.tmp_dir, f"auto_command-{task_id}.json")
# Json may contain passwords, that's why we need proper permissions
if os.path.isfile(file_path):
diff --git a/ambari-agent/src/main/python/ambari_agent/Facter.py
b/ambari-agent/src/main/python/ambari_agent/Facter.py
index a5bda8d5b3..f4bd0f9be0 100644
--- a/ambari-agent/src/main/python/ambari_agent/Facter.py
+++ b/ambari-agent/src/main/python/ambari_agent/Facter.py
@@ -146,7 +146,7 @@ class Facter(object):
def getMacAddress(self):
mac = uuid.getnode()
if uuid.getnode() == mac:
- mac = ':'.join('%02X' % ((mac >> 8 * i) & 0xff) for i in
reversed(list(range(6))))
+ mac = ':'.join(f'{mac >> 8 * i & 255:02X}' for i in
reversed(list(range(6))))
else:
mac = 'UNKNOWN'
return mac
@@ -189,17 +189,17 @@ class Facter(object):
if systemResourceDir:
if os.path.isdir(systemResourceDir) and
os.path.exists(systemResourceDir):
try:
- for filename in glob.glob('%s/*.json' % systemResourceDir):
+ for filename in glob.glob(f'{systemResourceDir}/*.json'):
with open(filename) as fp:
data = json.loads(fp.read())
for (key, value) in data.items():
systemResources[key] = data[key]
except:
log.warn(
- "Cannot read values from json files in %s. it won't be used for
gathering system resources." % systemResourceDir)
+ f"Cannot read values from json files in {systemResourceDir}. it
won't be used for gathering system resources.")
else:
log.info(
- "Directory: '%s' does not exist - it won't be used for gathering
system resources." % systemResourceDir)
+ f"Directory: '{systemResourceDir}' does not exist - it won't be
used for gathering system resources.")
else:
log.info("'system_resource_dir' is not set - it won't be used for
gathering system resources.")
return systemResources
@@ -259,12 +259,12 @@ class Facter(object):
#Convert kB to GB
@staticmethod
def convertSizeKbToGb(size):
- return "%0.2f GB" % round(float(size) // (1024.0 * 1024.0), 2)
+ return f"{round(float(size) // (1024.0 * 1024.0), 2):0.2f} GB"
#Convert MB to GB
@staticmethod
def convertSizeMbToGb(size):
- return "%0.2f GB" % round(float(size) // (1024.0), 2)
+ return f"{round(float(size) // 1024.0, 2):0.2f} GB"
@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
class FacterWindows(Facter):
@@ -403,7 +403,7 @@ class FacterLinux(Facter):
return_code, stdout, stderr =
run_os_command(FacterLinux.GET_IFCONFIG_SHORT_CMD)
return stdout
except OSError:
- log.warn("Can't execute {0}".format(FacterLinux.GET_IFCONFIG_SHORT_CMD))
+ log.warn(f"Can't execute {FacterLinux.GET_IFCONFIG_SHORT_CMD}")
return ""
# Returns the output of `ip link` command
@@ -414,7 +414,7 @@ class FacterLinux(Facter):
return_code, stdout, stderr = run_os_command(FacterLinux.GET_IP_LINK_CMD)
return stdout
except OSError:
- log.warn("Can't execute {0}".format(FacterLinux.GET_IP_LINK_CMD))
+ log.warn(f"Can't execute {FacterLinux.GET_IP_LINK_CMD}")
return ""
@staticmethod
@@ -424,7 +424,7 @@ class FacterLinux(Facter):
return_code, stdout, stderr = run_os_command(FacterLinux.GET_UPTIME_CMD)
return stdout
except OSError:
- log.warn("Can't execute {0}".format(FacterLinux.GET_UPTIME_CMD))
+ log.warn(f"Can't execute {FacterLinux.GET_UPTIME_CMD}")
return ""
@staticmethod
@@ -434,7 +434,7 @@ class FacterLinux(Facter):
return_code, stdout, stderr = run_os_command(FacterLinux.GET_MEMINFO_CMD)
return stdout
except OSError:
- log.warn("Can't execute {0}".format(FacterLinux.GET_MEMINFO_CMD))
+ log.warn(f"Can't execute {FacterLinux.GET_MEMINFO_CMD}")
return ""
# Returns the FQDN of the host
@@ -449,7 +449,7 @@ class FacterLinux(Facter):
if se_status:
return True
except OSError:
- log.warn("Could not run {0}: OK".format(FacterLinux.GET_SE_LINUX_ST_CMD))
+ log.warn(f"Could not run {FacterLinux.GET_SE_LINUX_ST_CMD}: OK")
return False
def return_first_words_from_list(self, list):
@@ -507,7 +507,7 @@ class FacterLinux(Facter):
struct.pack('256s', ifname[:15])
)[20:24])
except Exception as err:
- log.warn("Can't get the IP address for {0}".format(ifname))
+ log.warn(f"Can't get the IP address for {ifname}")
return ip_address_by_ifname
@@ -523,7 +523,7 @@ class FacterLinux(Facter):
if result != '':
return result
# If the host has neither `ifocnfig` command nor `ip` command, then return
"OS NOT SUPPORTED"
- log.warn("Can't get a network interfaces list from
{0}".format(self.DATA_IFCONFIG_SHORT_OUTPUT))
+ log.warn(f"Can't get a network interfaces list from
{self.DATA_IFCONFIG_SHORT_OUTPUT}")
return 'OS NOT SUPPORTED'
# Return uptime seconds
@@ -531,7 +531,7 @@ class FacterLinux(Facter):
try:
return int(self.data_return_first(FacterLinux.DIGITS_REGEXP,
self.DATA_UPTIME_OUTPUT))
except ValueError:
- log.warn("Can't get an uptime value from
{0}".format(self.DATA_UPTIME_OUTPUT))
+ log.warn(f"Can't get an uptime value from {self.DATA_UPTIME_OUTPUT}")
return 0
# Return memoryfree
@@ -540,7 +540,7 @@ class FacterLinux(Facter):
try:
return int(self.data_return_first(FacterLinux.FREEMEM_REGEXP,
self.DATA_MEMINFO_OUTPUT))
except ValueError:
- log.warn("Can't get free memory size from
{0}".format(self.DATA_MEMINFO_OUTPUT))
+ log.warn(f"Can't get free memory size from {self.DATA_MEMINFO_OUTPUT}")
return 0
# Return memorytotal
@@ -548,7 +548,7 @@ class FacterLinux(Facter):
try:
return int(self.data_return_first(FacterLinux.TOTALMEM_REGEXP,
self.DATA_MEMINFO_OUTPUT))
except ValueError:
- log.warn("Can't get total memory size from
{0}".format(self.DATA_MEMINFO_OUTPUT))
+ log.warn(f"Can't get total memory size from {self.DATA_MEMINFO_OUTPUT}")
return 0
# Return swapfree
@@ -557,7 +557,7 @@ class FacterLinux(Facter):
try:
return int(self.data_return_first(FacterLinux.SWAPFREE_REGEXP,
self.DATA_MEMINFO_OUTPUT))
except ValueError:
- log.warn("Can't get free swap memory size from
{0}".format(self.DATA_MEMINFO_OUTPUT))
+ log.warn(f"Can't get free swap memory size from
{self.DATA_MEMINFO_OUTPUT}")
return 0
# Return swapsize
@@ -566,7 +566,7 @@ class FacterLinux(Facter):
try:
return int(self.data_return_first(FacterLinux.SWAPTOTAL_REGEXP,
self.DATA_MEMINFO_OUTPUT))
except ValueError:
- log.warn("Can't get total swap memory size from
{0}".format(self.DATA_MEMINFO_OUTPUT))
+ log.warn(f"Can't get total swap memory size from
{self.DATA_MEMINFO_OUTPUT}")
return 0
# Return memorysize
@@ -575,7 +575,7 @@ class FacterLinux(Facter):
try:
return int(self.data_return_first(FacterLinux.TOTALMEM_REGEXP,
self.DATA_MEMINFO_OUTPUT))
except ValueError:
- log.warn("Can't get memory size from
{0}".format(self.DATA_MEMINFO_OUTPUT))
+ log.warn(f"Can't get memory size from {self.DATA_MEMINFO_OUTPUT}")
return 0
def facterInfo(self):
diff --git a/ambari-agent/src/main/python/ambari_agent/FileCache.py
b/ambari-agent/src/main/python/ambari_agent/FileCache.py
index 7c4ca8a0a6..9f38763ad0 100644
--- a/ambari-agent/src/main/python/ambari_agent/FileCache.py
+++ b/ambari-agent/src/main/python/ambari_agent/FileCache.py
@@ -150,7 +150,7 @@ class FileCache():
server_url_prefix: url of "resources" folder at the server
"""
full_path = os.path.join(cache_path, subdirectory)
- logger.debug("Trying to provide directory {0}".format(subdirectory))
+ logger.debug(f"Trying to provide directory {subdirectory}")
if not self.auto_cache_update_enabled():
logger.debug("Auto cache update is disabled.")
@@ -169,8 +169,7 @@ class FileCache():
try:
if full_path not in self.uptodate_paths:
- logger.debug("Checking if update is available for "
- "directory {0}".format(full_path))
+ logger.debug(f"Checking if update is available for directory
{full_path}")
# Need to check for updates at server
remote_url = self.build_download_url(server_url_prefix,
subdirectory, self.HASH_SUM_FILE)
@@ -178,7 +177,7 @@ class FileCache():
remote_hash = memory_buffer.getvalue().strip()
local_hash = self.read_hash_sum(full_path)
if not local_hash or local_hash != remote_hash:
- logger.debug("Updating directory {0}".format(full_path))
+ logger.debug(f"Updating directory {full_path}")
download_url = self.build_download_url(server_url_prefix,
subdirectory,
self.ARCHIVE_NAME)
membuffer = self.fetch_url(download_url)
@@ -187,7 +186,7 @@ class FileCache():
self.invalidate_directory(full_path)
self.unpack_archive(membuffer, full_path)
self.write_hash_sum(full_path, remote_hash)
- logger.info("Updated directory {0}".format(full_path))
+ logger.info(f"Updated directory {full_path}")
else:
logger.warn("Skipping empty archive: {0}. "
"Expected archive was not found. Cached copy will be
used.".format(download_url))
@@ -217,22 +216,20 @@ class FileCache():
directory - relative path
filename - file inside directory we are trying to fetch
"""
- return "{0}/{1}/{2}".format(server_url_prefix,
- urllib.request.pathname2url(directory),
filename)
+ return
f"{server_url_prefix}/{urllib.request.pathname2url(directory)}/{filename}"
def fetch_url(self, url):
"""
Fetches content on url to in-memory buffer and returns the resulting
buffer.
May throw exceptions because of various reasons
"""
- logger.debug("Trying to download {0}".format(url))
+ logger.debug(f"Trying to download {url}")
try:
memory_buffer = io.BytesIO()
proxy_handler = urllib.request.ProxyHandler({})
opener = urllib.request.build_opener(proxy_handler)
u = opener.open(url, timeout=self.SOCKET_TIMEOUT)
- logger.debug("Connected with {0} with code {1}".format(u.geturl(),
- u.getcode()))
+ logger.debug(f"Connected with {u.geturl()} with code {u.getcode()}")
buff = u.read(self.BLOCK_SIZE)
while buff:
memory_buffer.write(buff)
@@ -241,7 +238,7 @@ class FileCache():
break
return memory_buffer
except Exception as err:
- raise CachingException("Can not download file from url {0} :
{1}".format(url, str(err)))
+ raise CachingException(f"Can not download file from url {url} :
{str(err)}")
def read_hash_sum(self, directory):
"""
@@ -266,7 +263,7 @@ class FileCache():
fh.write(new_hash)
os.chmod(hash_file, 0o644)
except Exception as err:
- raise CachingException("Can not write to file {0} :
{1}".format(hash_file, str(err)))
+ raise CachingException(f"Can not write to file {hash_file} : {str(err)}")
def invalidate_directory(self, directory):
"""
@@ -277,7 +274,7 @@ class FileCache():
CLEAN_DIRECTORY_TRIES = 5
CLEAN_DIRECTORY_TRY_SLEEP = 0.25
- logger.debug("Invalidating directory {0}".format(directory))
+ logger.debug(f"Invalidating directory {directory}")
try:
if os.path.exists(directory):
if os.path.isfile(directory): # It would be a strange situation
@@ -292,7 +289,7 @@ class FileCache():
# create directory itself and any parent directories
os.makedirs(directory)
except Exception as err:
- logger.exception("Can not invalidate cache directory
{0}".format(directory))
+ logger.exception(f"Can not invalidate cache directory {directory}")
raise CachingException("Can not invalidate cache directory {0}: {1}",
directory, str(err))
@@ -308,8 +305,8 @@ class FileCache():
concrete_dir=os.path.abspath(os.path.join(target_directory, dirname))
if not os.path.isdir(concrete_dir):
os.makedirs(concrete_dir)
- logger.debug("Unpacking file {0} to {1}".format(name, concrete_dir))
+ logger.debug(f"Unpacking file {name} to {concrete_dir}")
if filename != '':
zfile.extract(name, target_directory)
except Exception as err:
- raise CachingException("Can not unpack zip file to directory {0} :
{1}".format(target_directory, str(err)))
+ raise CachingException(f"Can not unpack zip file to directory
{target_directory} : {str(err)}")
diff --git a/ambari-agent/src/main/python/ambari_agent/Hardware.py
b/ambari-agent/src/main/python/ambari_agent/Hardware.py
index 036596897d..d3d42f1082 100644
--- a/ambari-agent/src/main/python/ambari_agent/Hardware.py
+++ b/ambari-agent/src/main/python/ambari_agent/Hardware.py
@@ -185,7 +185,7 @@ class Hardware:
if len(ignored_mounts) > 0:
ignore_list = [el["mountpoint"] for el in ignored_mounts]
- logger.info("Some mount points were ignored: {0}".format(',
'.join(ignore_list)))
+ logger.info(f"Some mount points were ignored: {', '.join(ignore_list)}")
return result_mounts
@@ -201,10 +201,10 @@ class Hardware:
quiet=not logger.isEnabledFor(logging.DEBUG))
return call_result and call_result[0] == 0
except ExecuteTimeoutException:
- logger.exception("Exception happened while checking mount
{0}".format(mount_point))
+ logger.exception(f"Exception happened while checking mount
{mount_point}")
return False
except Fail:
- logger.exception("Exception happened while checking mount
{0}".format(mount_point))
+ logger.exception(f"Exception happened while checking mount
{mount_point}")
return False
@OsFamilyFuncImpl(OSConst.WINSRV_FAMILY)
diff --git a/ambari-agent/src/main/python/ambari_agent/HeartbeatHandlers.py
b/ambari-agent/src/main/python/ambari_agent/HeartbeatHandlers.py
index c1ef1bafc2..f30149fc09 100644
--- a/ambari-agent/src/main/python/ambari_agent/HeartbeatHandlers.py
+++ b/ambari-agent/src/main/python/ambari_agent/HeartbeatHandlers.py
@@ -79,7 +79,7 @@ class HeartbeatStopHandlersWindows(HeartbeatStopHandlers):
# linux impl
def signal_handler(signum, frame):
- logger.info("Ambari-agent received {0} signal, stopping...".format(signum))
+ logger.info(f"Ambari-agent received {signum} signal, stopping...")
_handler.set()
diff --git a/ambari-agent/src/main/python/ambari_agent/HeartbeatThread.py
b/ambari-agent/src/main/python/ambari_agent/HeartbeatThread.py
index 60b8ab5f04..0739ffe8a4 100644
--- a/ambari-agent/src/main/python/ambari_agent/HeartbeatThread.py
+++ b/ambari-agent/src/main/python/ambari_agent/HeartbeatThread.py
@@ -95,15 +95,15 @@ class HeartbeatThread(threading.Thread):
self.register()
heartbeat_body = self.get_heartbeat_body()
- logger.debug("Heartbeat body is {0}".format(heartbeat_body))
+ logger.debug(f"Heartbeat body is {heartbeat_body}")
response = self.blocking_request(heartbeat_body,
Constants.HEARTBEAT_ENDPOINT)
- logger.debug("Heartbeat response is {0}".format(response))
+ logger.debug(f"Heartbeat response is {response}")
self.handle_heartbeat_reponse(response)
except Exception as ex:
if isinstance(ex, (ConnectionIsAlreadyClosed)):
logger.info("Connection was closed. Re-running the registration")
elif isinstance(ex, (socket_error)):
- logger.info("Connection error \"{0}\". Re-running the
registration".format(str(ex)))
+ logger.info(f"Connection error \"{str(ex)}\". Re-running the
registration")
else:
logger.exception("Exception in HeartbeatThread. Re-running the
registration")
@@ -125,12 +125,12 @@ class HeartbeatThread(threading.Thread):
registration_request = self.registration_builder.build()
logger.info("Sending registration request")
- logger.debug("Registration request is {0}".format(registration_request))
+ logger.debug(f"Registration request is {registration_request}")
response = self.blocking_request(registration_request,
Constants.REGISTRATION_ENDPOINT)
logger.info("Registration response received")
- logger.debug("Registration response is {0}".format(response))
+ logger.debug(f"Registration response is {response}")
self.handle_registration_response(response)
@@ -143,7 +143,7 @@ class HeartbeatThread(threading.Thread):
try:
listener.on_event({}, response)
except:
- logger.exception("Exception while handing response to request at {0}
{1}".format(endpoint, response))
+ logger.exception(f"Exception while handing response to request at
{endpoint} {response}")
raise
finally:
with listener.event_queue_lock:
@@ -202,7 +202,7 @@ class HeartbeatThread(threading.Thread):
if exitstatus != 0:
# log - message, which will be printed to agents log
if 'log' in response.keys():
- error_message = "Registration failed due to:
{0}".format(response['log'])
+ error_message = f"Registration failed due to: {response['log']}"
else:
error_message = "Registration failed"
@@ -242,7 +242,7 @@ class HeartbeatThread(threading.Thread):
"""
Create a stomp connection
"""
- connection_url =
'wss://{0}:{1}/agent/stomp/v1'.format(self.config.server_hostname,
self.config.secured_url_port)
+ connection_url =
f'wss://{self.config.server_hostname}:{self.config.secured_url_port}/agent/stomp/v1'
connection_helper =
security.VerifiedHTTPSConnection(self.config.server_hostname, connection_url,
self.config)
self.connection = connection_helper.connect()
@@ -269,10 +269,10 @@ class HeartbeatThread(threading.Thread):
correlation_id = self.connection.send(message=message,
destination=destination, presend_hook=presend_hook)
except ConnectionIsAlreadyClosed:
# this happens when trying to connect to broken connection. Happens if
ambari-server is restarted.
- logger.warn("Connection failed while trying to connect to
{0}".format(destination))
+ logger.warn(f"Connection failed while trying to connect to
{destination}")
raise
try:
return
self.server_responses_listener.responses.blocking_pop(correlation_id,
timeout=timeout)
except BlockingDictionary.DictionaryPopTimeout:
- raise Exception("{0} seconds timeout expired waiting for response from
server at {1} to message from {2}".format(timeout,
Constants.SERVER_RESPONSES_TOPIC, destination))
+ raise Exception(f"{timeout} seconds timeout expired waiting for response
from server at {Constants.SERVER_RESPONSES_TOPIC} to message from
{destination}")
diff --git
a/ambari-agent/src/main/python/ambari_agent/HostCheckReportFileHandler.py
b/ambari-agent/src/main/python/ambari_agent/HostCheckReportFileHandler.py
index fef2d0521d..94550e197d 100644
--- a/ambari-agent/src/main/python/ambari_agent/HostCheckReportFileHandler.py
+++ b/ambari-agent/src/main/python/ambari_agent/HostCheckReportFileHandler.py
@@ -84,7 +84,7 @@ class HostCheckReportFileHandler:
with open(self.hostCheckCustomActionsFilePath, 'wt') as configfile:
config.write(configfile)
except Exception as err:
- logger.error("Can't write host check file at %s :%s " %
(self.hostCheckCustomActionsFilePath, err.message))
+ logger.error(f"Can't write host check file at
{self.hostCheckCustomActionsFilePath} :{err.message} ")
traceback.print_exc()
def _stack_list_directory(self):
@@ -102,7 +102,7 @@ class HostCheckReportFileHandler:
remlist_items_count = 0
for item in folder_content:
- full_path = "%s%s%s" % (HADOOP_ROOT_DIR, os.path.sep, item)
+ full_path = f"{HADOOP_ROOT_DIR}{os.path.sep}{item}"
if item in HADOOP_PERM_REMOVE_LIST:
remove_list.append(full_path)
remlist_items_count += 1
@@ -168,17 +168,17 @@ class HostCheckReportFileHandler:
with open(self.hostCheckFilePath, 'wt') as configfile:
config.write(configfile)
except Exception as err:
- logger.error("Can't write host check file at %s :%s " %
(self.hostCheckFilePath, err.message))
+ logger.error(f"Can't write host check file at {self.hostCheckFilePath}
:{err.message} ")
traceback.print_exc()
def removeFile(self, path):
if os.path.isfile(path):
- logger.debug("Removing old host check file at %s" % path)
+ logger.debug(f"Removing old host check file at {path}")
os.remove(path)
def touchFile(self, path):
if not os.path.isfile(path):
- logger.debug("Creating host check file at %s" % path)
+ logger.debug(f"Creating host check file at {path}")
open(path, 'w').close()
diff --git a/ambari-agent/src/main/python/ambari_agent/HostCleanup.py
b/ambari-agent/src/main/python/ambari_agent/HostCleanup.py
index 11f94c04bd..a6cb90b319 100644
--- a/ambari-agent/src/main/python/ambari_agent/HostCleanup.py
+++ b/ambari-agent/src/main/python/ambari_agent/HostCleanup.py
@@ -102,7 +102,7 @@ def get_erase_cmd():
elif OSCheck.is_ubuntu_family():
return "/usr/bin/apt-get -y -q remove {0}"
else:
- raise Exception("Unsupported OS family '{0}', cannot remove package.
".format(OSCheck.get_os_family()))
+ raise Exception(f"Unsupported OS family '{OSCheck.get_os_family()}',
cannot remove package. ")
class HostCleanup:
@@ -294,9 +294,9 @@ class HostCleanup:
for folder in file_map:
if isinstance(file_map[folder], list): # here is list of file
masks/files
for mask in file_map[folder]:
- remList += self.get_files_in_dir("%s/%s" % (cache_root, folder),
mask)
+ remList += self.get_files_in_dir(f"{cache_root}/{folder}", mask)
elif isinstance(file_map[folder], dict): # here described sub-folder
- remList += self.do_clear_cache("%s/%s" % (cache_root, folder),
file_map[folder])
+ remList += self.do_clear_cache(f"{cache_root}/{folder}",
file_map[folder])
if dir_map is not None: # push result list back as this is call from stack
return remList
@@ -474,7 +474,7 @@ class HostCleanup:
if pathList:
for aPath in pathList:
pathArr = glob.glob(aPath)
- logger.debug("Resolved {0} to {1}".format(aPath, ','.join(pathArr)))
+ logger.debug(f"Resolved {aPath} to {','.join(pathArr)}")
for path in pathArr:
if path:
if os.path.exists(path):
@@ -482,15 +482,15 @@ class HostCleanup:
try:
shutil.rmtree(path)
except:
- logger.warn("Failed to remove dir {0} , error:
{1}".format(path, str(sys.exc_info()[0])))
+ logger.warn(f"Failed to remove dir {path} , error:
{str(sys.exc_info()[0])}")
else:
- logger.info("{0} is a file, deleting file".format(path))
+ logger.info(f"{path} is a file, deleting file")
self.do_erase_files_silent([path])
elif os.path.islink(path):
- logger.info("Deleting broken symbolic link {0}".format(path))
+ logger.info(f"Deleting broken symbolic link {path}")
self.do_erase_files_silent([path])
else:
- logger.info("Path doesn't exists: {0}".format(path))
+ logger.info(f"Path doesn't exists: {path}")
return 0
def do_erase_files_silent(self, pathList):
@@ -500,9 +500,9 @@ class HostCleanup:
try:
os.remove(path)
except:
- logger.warn("Failed to delete file: {0}, error: {1}".format(path,
str(sys.exc_info()[0])))
+ logger.warn(f"Failed to delete file: {path}, error:
{str(sys.exc_info()[0])}")
else:
- logger.info("File doesn't exists: {0}".format(path))
+ logger.info(f"File doesn't exists: {path}")
return 0
def do_delete_group(self):
@@ -689,10 +689,10 @@ def main():
# Manage non UI install
if not os.path.exists(hostcheckfileca):
if options.silent:
- print('Host Check results not found. There is no {0}. Running host
checks.'.format(hostcheckfileca))
+ print(f'Host Check results not found. There is no {hostcheckfileca}.
Running host checks.')
h.run_check_hosts()
else:
- run_check_hosts_input = get_YN_input('Host Check results not found.
There is no {0}. Do you want to run host checks [y/n]
(y)'.format(hostcheckfileca), True)
+ run_check_hosts_input = get_YN_input(f'Host Check results not found.
There is no {hostcheckfileca}. Do you want to run host checks [y/n] (y)', True)
if run_check_hosts_input:
h.run_check_hosts()
@@ -710,7 +710,7 @@ def main():
if os.path.exists(config.get('agent', 'cache_dir')):
h.do_clear_cache(config.get('agent', 'cache_dir'))
- logging.info('Clean-up completed. The output is at %s' %
(str(options.outputfile)))
+ logging.info(f'Clean-up completed. The output is at
{str(options.outputfile)}')
if __name__ == '__main__':
diff --git a/ambari-agent/src/main/python/ambari_agent/HostInfo.py
b/ambari-agent/src/main/python/ambari_agent/HostInfo.py
index 6be2805c42..ae80b2fe00 100644
--- a/ambari-agent/src/main/python/ambari_agent/HostInfo.py
+++ b/ambari-agent/src/main/python/ambari_agent/HostInfo.py
@@ -191,9 +191,9 @@ class HostInfoLinux(HostInfo):
]
DEFAULT_SERVICE_NAME = "ntpd"
- SERVICE_STATUS_CMD = "%s %s status" % (SERVICE_CMD, DEFAULT_SERVICE_NAME)
+ SERVICE_STATUS_CMD = f"{SERVICE_CMD} {DEFAULT_SERVICE_NAME} status"
SERVICE_STATUS_CMD_LIST = shlex.split(SERVICE_STATUS_CMD)
- REDHAT7_SERVICE_STATUS_CMD = "%s status %s" % (REDHAT7_SERVICE_CMD,
DEFAULT_SERVICE_NAME)
+ REDHAT7_SERVICE_STATUS_CMD = f"{REDHAT7_SERVICE_CMD} status
{DEFAULT_SERVICE_NAME}"
REDHAT7_SERVICE_STATUS_CMD_LIST = shlex.split(REDHAT7_SERVICE_STATUS_CMD)
THP_FILE_REDHAT = "/sys/kernel/mm/redhat_transparent_hugepage/enabled"
@@ -381,7 +381,7 @@ class HostInfoLinux(HostInfo):
timeout = 5, quiet = True,
universal_newlines=True)
return out, err, code
except Exception as ex:
- logger.warn("Checking service {0} status failed".format(service_name))
+ logger.warn(f"Checking service {service_name} status failed")
return '', str(ex), 1
diff --git a/ambari-agent/src/main/python/ambari_agent/PingPortListener.py
b/ambari-agent/src/main/python/ambari_agent/PingPortListener.py
index 0ca87d179c..c824c2bd16 100644
--- a/ambari-agent/src/main/python/ambari_agent/PingPortListener.py
+++ b/ambari-agent/src/main/python/ambari_agent/PingPortListener.py
@@ -39,7 +39,7 @@ class PingPortListener(threading.Thread):
self.host = '0.0.0.0'
self.port = int(self.config.get('agent','ping_port'))
- logger.debug("Checking Ping port listener port {0}".format(self.port))
+ logger.debug(f"Checking Ping port listener port {self.port}")
if not self.port == None and not self.port == 0:
(stdoutdata, stderrdata) =
self.run_os_command_in_shell(FUSER_CMD.format(str(self.port), "{print $1}"))
diff --git a/ambari-agent/src/main/python/ambari_agent/PythonExecutor.py
b/ambari-agent/src/main/python/ambari_agent/PythonExecutor.py
index b58aaa2419..49d04388f4 100644
--- a/ambari-agent/src/main/python/ambari_agent/PythonExecutor.py
+++ b/ambari-agent/src/main/python/ambari_agent/PythonExecutor.py
@@ -202,6 +202,6 @@ class PythonExecutor(object):
def python_watchdog_func(self, process, timeout):
self.event.wait(timeout)
if process.returncode is None:
- self.logger.error("Executed command with pid {} timed out and will be
killed".format(process.pid))
+ self.logger.error(f"Executed command with pid {process.pid} timed out
and will be killed")
shell.kill_process_with_children(process.pid)
self.python_process_has_been_killed = True
diff --git a/ambari-agent/src/main/python/ambari_agent/RecoveryManager.py
b/ambari-agent/src/main/python/ambari_agent/RecoveryManager.py
index 6f3016691c..338073acce 100644
--- a/ambari-agent/src/main/python/ambari_agent/RecoveryManager.py
+++ b/ambari-agent/src/main/python/ambari_agent/RecoveryManager.py
@@ -457,7 +457,7 @@ class RecoveryManager:
if seconds_since_last_attempt > self.retry_gap_in_sec:
return True
else:
- logger.info("Not running recovery command due to retry_gap = {0}
(seconds)".format(self.retry_gap_in_sec))
+ logger.info(f"Not running recovery command due to retry_gap =
{self.retry_gap_in_sec} (seconds)")
else:
sec_since_last_reset = now - action_counter["lastReset"]
if sec_since_last_reset > self.window_in_sec:
diff --git a/ambari-agent/src/main/python/ambari_agent/RemoteDebugUtils.py
b/ambari-agent/src/main/python/ambari_agent/RemoteDebugUtils.py
index 3b201b8958..3d461daf81 100644
--- a/ambari-agent/src/main/python/ambari_agent/RemoteDebugUtils.py
+++ b/ambari-agent/src/main/python/ambari_agent/RemoteDebugUtils.py
@@ -30,12 +30,12 @@ def print_threads_stack_traces(sig, frame):
print("\n*** STACKTRACE - START ***\n", file=sys.stderr)
code = []
for threadId, stack in sys._current_frames().items():
- code.append("\n# ThreadID: %s" % threadId)
+ code.append(f"\n# ThreadID: {threadId}")
for filename, lineno, name, line in traceback.extract_stack(stack):
code.append('File: "%s", line %d, in %s' % (filename,
lineno, name))
if line:
- code.append(" %s" % (line.strip()))
+ code.append(f" {line.strip()}")
for line in code:
print(line, file=sys.stderr)
@@ -72,7 +72,7 @@ class NamedPipe(object):
def put(self,msg):
if self.is_open():
data = pickle.dumps(msg,1)
- self.out.write("%d\n" % len(data))
+ self.out.write(f"{len(data)}\n")
self.out.write(data)
self.out.flush()
else:
diff --git a/ambari-agent/src/main/python/ambari_agent/Utils.py
b/ambari-agent/src/main/python/ambari_agent/Utils.py
index ada5698106..16edcd2d09 100644
--- a/ambari-agent/src/main/python/ambari_agent/Utils.py
+++ b/ambari-agent/src/main/python/ambari_agent/Utils.py
@@ -186,7 +186,7 @@ class ImmutableDictionary(dict):
try:
return self[name]
except KeyError:
- raise AttributeError("'%s' object has no attribute '%s'" %
(self.__class__.__name__, name))
+ raise AttributeError(f"'{self.__class__.__name__}' object has no
attribute '{name}'")
def raise_immutable_error(*args, **kwargs):
"""
diff --git a/ambari-agent/src/main/python/ambari_agent/alerts/ams_alert.py
b/ambari-agent/src/main/python/ambari_agent/alerts/ams_alert.py
index 8b69562b0e..f27ee4f590 100644
--- a/ambari-agent/src/main/python/ambari_agent/alerts/ams_alert.py
+++ b/ambari-agent/src/main/python/ambari_agent/alerts/ams_alert.py
@@ -85,9 +85,9 @@ class AmsAlert(MetricAlert):
raw_data_points, http_code = self._load_metric(alert_uri.is_ssl_enabled,
host, port, self.metric_info)
if not raw_data_points and http_code not in [200, 307]:
collect_result = self.RESULT_UNKNOWN
- value_list.append('HTTP {0} response (metrics
unavailable)'.format(str(http_code)))
+ value_list.append(f'HTTP {str(http_code)} response (metrics
unavailable)')
elif not raw_data_points and http_code in [200, 307]:
- raise Exception("[Alert][{0}] Unable to extract JSON from HTTP
response".format(self.get_name()))
+ raise Exception(f"[Alert][{self.get_name()}] Unable to extract JSON
from HTTP response")
else:
data_points = self.metric_info.calculate_value(raw_data_points)
@@ -97,7 +97,7 @@ class AmsAlert(MetricAlert):
collect_result = self._get_result(value_list[0] if compute_result is
None else compute_result)
if logger.isEnabledFor(logging.DEBUG):
- logger.debug("[Alert][{0}] Computed result =
{1}".format(self.get_name(), str(value_list)))
+ logger.debug(f"[Alert][{self.get_name()}] Computed result =
{str(value_list)}")
return (collect_result, value_list)
@@ -132,21 +132,21 @@ class AmsAlert(MetricAlert):
data = response.read()
except Exception as exception:
if logger.isEnabledFor(logging.DEBUG):
- logger.exception("[Alert][{0}] Unable to retrieve metrics from AMS:
{1}".format(self.get_name(), str(exception)))
+ logger.exception(f"[Alert][{self.get_name()}] Unable to retrieve
metrics from AMS: {str(exception)}")
status = response.status if 'response' in vars() else None
return (None, status)
finally:
if logger.isEnabledFor(logging.DEBUG):
- logger.debug("""
- AMS request parameters - {0}
- AMS response - {1}
- """.format(encoded_get_metrics_parameters, data))
+ logger.debug(f"""
+ AMS request parameters - {encoded_get_metrics_parameters}
+ AMS response - {data}
+ """)
# explicitely close the connection as we've seen python hold onto these
if conn is not None:
try:
conn.close()
except:
- logger.debug("[Alert][{0}] Unable to close URL connection to
{1}".format(self.get_name(), url))
+ logger.debug(f"[Alert][{self.get_name()}] Unable to close URL
connection to {url}")
json_is_valid = True
try:
data_json = json.loads(data)
diff --git a/ambari-agent/src/main/python/ambari_agent/alerts/base_alert.py
b/ambari-agent/src/main/python/ambari_agent/alerts/base_alert.py
index 75c0ed5a39..c52343480a 100644
--- a/ambari-agent/src/main/python/ambari_agent/alerts/base_alert.py
+++ b/ambari-agent/src/main/python/ambari_agent/alerts/base_alert.py
@@ -137,8 +137,7 @@ class BaseAlert(object):
res_base_text = self._get_reporting_text(result_state)
except Exception as exception:
- message = "[Alert][{0}] Unable to execute alert. {1}".format(
- self.get_name(), str(exception))
+ message = f"[Alert][{self.get_name()}] Unable to execute alert.
{str(exception)}"
# print the exception if in DEBUG, otherwise just log the warning
# if logger.isEnabledFor(logging.DEBUG):
@@ -151,7 +150,7 @@ class BaseAlert(object):
if logger.isEnabledFor(logging.DEBUG):
- logger.debug("[Alert][{0}] result = {1}".format(self.get_name(),
str(res)))
+ logger.debug(f"[Alert][{self.get_name()}] result = {str(res)}")
data = {}
data['name'] = self._get_alert_meta_value_safely('name')
@@ -167,7 +166,7 @@ class BaseAlert(object):
try:
data['text'] = res_base_text.format(*res[1])
except ValueError as value_error:
- logger.warn("[Alert][{0}] - {1}".format(self.get_name(),
str(value_error)))
+ logger.warn(f"[Alert][{self.get_name()}] - {str(value_error)}")
# if there is a ValueError, it's probably because the text doesn't
match the type of
# positional arguemtns (ie {0:d} with a float)
@@ -176,14 +175,14 @@ class BaseAlert(object):
data['text'] = res_base_text.format(*data_as_strings)
if logger.isEnabledFor(logging.DEBUG):
- logger.debug("[Alert][{0}] text = {1}".format(self.get_name(),
data['text']))
+ logger.debug(f"[Alert][{self.get_name()}] text = {data['text']}")
except Exception as exception:
- logger.exception("[Alert][{0}] - The alert's data is not properly
formatted".format(self.get_name()))
+ logger.exception(f"[Alert][{self.get_name()}] - The alert's data is not
properly formatted")
# if there's a problem with getting the data returned from collect()
then mark this
# alert as UNKNOWN
data['state'] = self.RESULT_UNKNOWN
- data['text'] = "There is a problem with the alert definition:
{0}".format(str(exception))
+ data['text'] = f"There is a problem with the alert definition:
{str(exception)}"
finally:
# put the alert into the collector so it can be collected on the next run
data['text'] = data['text'].replace('\x00', '')
@@ -263,7 +262,7 @@ class BaseAlert(object):
return curr_dict
except KeyError:
- logger.debug("Cache miss for configuration property {0}".format(key))
+ logger.debug(f"Cache miss for configuration property {key}")
return None
@@ -429,7 +428,7 @@ class BaseAlert(object):
if alert_uri_lookup_keys is None:
return None
- logger.debug("[Alert][{0}] HA URI structure detected in definition,
attempting to lookup dynamic HA properties".format(self.get_name()))
+ logger.debug(f"[Alert][{self.get_name()}] HA URI structure detected in
definition, attempting to lookup dynamic HA properties")
ha_nameservice = self._get_configuration_value(configurations,
alert_uri_lookup_keys.ha_nameservice)
ha_alias_key = alert_uri_lookup_keys.ha_alias_key
diff --git a/ambari-agent/src/main/python/ambari_agent/alerts/collector.py
b/ambari-agent/src/main/python/ambari_agent/alerts/collector.py
index 4673822f4b..b33414b567 100644
--- a/ambari-agent/src/main/python/ambari_agent/alerts/collector.py
+++ b/ambari-agent/src/main/python/ambari_agent/alerts/collector.py
@@ -67,7 +67,7 @@ class AlertCollector():
alert = alert_map[alert_name]
if not 'uuid' in alert:
- logger.warn("Alert {0} does not have uuid key.".format(alert))
+ logger.warn(f"Alert {alert} does not have uuid key.")
continue
if alert['uuid'] == alert_uuid:
diff --git a/ambari-agent/src/main/python/ambari_agent/alerts/metric_alert.py
b/ambari-agent/src/main/python/ambari_agent/alerts/metric_alert.py
index 29c217aed2..3abfbf39a7 100644
--- a/ambari-agent/src/main/python/ambari_agent/alerts/metric_alert.py
+++ b/ambari-agent/src/main/python/ambari_agent/alerts/metric_alert.py
@@ -100,9 +100,9 @@ class MetricAlert(BaseAlert):
jmx_property_values, http_code =
self._load_jmx(alert_uri.is_ssl_enabled, host, port, self.metric_info)
if not jmx_property_values and http_code in [200, 307]:
collect_result = self.RESULT_UNKNOWN
- value_list.append('HTTP {0} response (metrics
unavailable)'.format(str(http_code)))
+ value_list.append(f'HTTP {str(http_code)} response (metrics
unavailable)')
elif not jmx_property_values and http_code not in [200, 307]:
- raise Exception("[Alert][{0}] Unable to extract JSON from JMX
response".format(self.get_name()))
+ raise Exception(f"[Alert][{self.get_name()}] Unable to extract JSON
from JMX response")
else:
value_list.extend(jmx_property_values)
check_value = self.metric_info.calculate(value_list)
@@ -111,7 +111,7 @@ class MetricAlert(BaseAlert):
collect_result = self._get_result(value_list[0] if check_value is None
else check_value)
if logger.isEnabledFor(logging.DEBUG):
- logger.debug("[Alert][{0}] Resolved values =
{1}".format(self.get_name(), str(value_list)))
+ logger.debug(f"[Alert][{self.get_name()}] Resolved values =
{str(value_list)}")
return (collect_result, value_list)
@@ -223,7 +223,7 @@ class MetricAlert(BaseAlert):
content = response.read()
except Exception as exception:
if logger.isEnabledFor(logging.DEBUG):
- logger.exception("[Alert][{0}] Unable to make a web request:
{1}".format(self.get_name(), str(exception)))
+ logger.exception(f"[Alert][{self.get_name()}] Unable to make a web
request: {str(exception)}")
finally:
# explicitely close the connection as we've seen python hold onto these
if response is not None:
@@ -250,7 +250,7 @@ class MetricAlert(BaseAlert):
for jmx_prop_list_item in beans:
if "name" in jmx_prop_list_item and jmx_prop_list_item["name"]
== jmx_property_key:
if attr not in jmx_prop_list_item:
- raise Exception("Unable to find {0} in JSON from {1}
".format(attr, url))
+ raise Exception(f"Unable to find {attr} in JSON from {url} ")
json_data = jmx_prop_list_item
value_list.append(json_data[attr])
diff --git a/ambari-agent/src/main/python/ambari_agent/alerts/port_alert.py
b/ambari-agent/src/main/python/ambari_agent/alerts/port_alert.py
index 59e40134c8..0bf533979e 100644
--- a/ambari-agent/src/main/python/ambari_agent/alerts/port_alert.py
+++ b/ambari-agent/src/main/python/ambari_agent/alerts/port_alert.py
@@ -124,13 +124,13 @@ class PortAlert(BaseAlert):
and self.host_name.lower() != self.public_host_name.lower():
hosts.append(self.public_host_name)
if logger.isEnabledFor(logging.DEBUG):
- logger.debug("[Alert][{0}] List of hosts = {1}".format(self.get_name(),
hosts))
+ logger.debug(f"[Alert][{self.get_name()}] List of hosts = {hosts}")
try:
port = int(get_port_from_url(uri_value))
except:
if self.default_port is None:
- label = 'Unable to determine port from URI {0}'.format(uri_value)
+ label = f'Unable to determine port from URI {uri_value}'
return (self.RESULT_UNKNOWN, [label])
port = self.default_port
@@ -139,8 +139,7 @@ class PortAlert(BaseAlert):
for host in hosts:
if logger.isEnabledFor(logging.DEBUG):
- logger.debug("[Alert][{0}] Checking {1} on port {2}".format(
- self.get_name(), host, str(port)))
+ logger.debug(f"[Alert][{self.get_name()}] Checking {host} on port
{str(port)}")
s = None
try:
diff --git a/ambari-agent/src/main/python/ambari_agent/alerts/script_alert.py
b/ambari-agent/src/main/python/ambari_agent/alerts/script_alert.py
index 2e1c507b83..d6dc04bb48 100644
--- a/ambari-agent/src/main/python/ambari_agent/alerts/script_alert.py
+++ b/ambari-agent/src/main/python/ambari_agent/alerts/script_alert.py
@@ -107,7 +107,7 @@ class ScriptAlert(BaseAlert):
except AttributeError:
# it's OK if the module doesn't have get_tokens() ; no tokens will
# be passed in so hopefully the script doesn't need any
- logger.debug("The script {0} does not have a get_tokens()
function".format(str(cmd_module)))
+ logger.debug(f"The script {str(cmd_module)} does not have a
get_tokens() function")
Script.config = full_configurations
@@ -131,7 +131,7 @@ class ScriptAlert(BaseAlert):
return result
else:
- return (self.RESULT_UNKNOWN, ["Unable to execute script
{0}".format(self.path)])
+ return (self.RESULT_UNKNOWN, [f"Unable to execute script {self.path}"])
def _load_source(self):
diff --git a/ambari-agent/src/main/python/ambari_agent/alerts/web_alert.py
b/ambari-agent/src/main/python/ambari_agent/alerts/web_alert.py
index 9e1aed9808..79dd1b149f 100644
--- a/ambari-agent/src/main/python/ambari_agent/alerts/web_alert.py
+++ b/ambari-agent/src/main/python/ambari_agent/alerts/web_alert.py
@@ -146,9 +146,9 @@ class WebAlert(BaseAlert):
host = resolve_address(host)
if uri_path:
- return "{0}://{1}:{2}/{3}".format(scheme, host, str(port), uri_path)
+ return f"{scheme}://{host}:{str(port)}/{uri_path}"
else:
- return "{0}://{1}:{2}".format(scheme, host, str(port))
+ return f"{scheme}://{host}:{str(port)}"
def _make_web_request(self, url):
"""
@@ -199,7 +199,7 @@ class WebAlert(BaseAlert):
except Exception as exception:
if logger.isEnabledFor(logging.DEBUG):
- logger.exception("[Alert][{0}] Unable to make a web
request.".format(self.get_name()))
+ logger.exception(f"[Alert][{self.get_name()}] Unable to make a web
request.")
return WebResponse(status_code=0, time_millis=0,
error_msg=str(exception))
@@ -232,7 +232,7 @@ class WebAlert(BaseAlert):
response.close()
except Exception as exception:
if logger.isEnabledFor(logging.DEBUG):
- logger.exception("[Alert][{0}] Unable to close socket
connection".format(self.get_name()))
+ logger.exception(f"[Alert][{self.get_name()}] Unable to close
socket connection")
def _get_reporting_text(self, state):
diff --git a/ambari-agent/src/main/python/ambari_agent/apscheduler/job.py
b/ambari-agent/src/main/python/ambari_agent/apscheduler/job.py
index 6689a7c349..3701c3402a 100644
--- a/ambari-agent/src/main/python/ambari_agent/apscheduler/job.py
+++ b/ambari-agent/src/main/python/ambari_agent/apscheduler/job.py
@@ -131,7 +131,7 @@ class Job(object):
return NotImplemented
def __repr__(self):
- return '<Job (name=%s, trigger=%s)>' % (self.name, repr(self.trigger))
+ return f'<Job (name={self.name}, trigger={repr(self.trigger)})>'
def __str__(self):
return '%s (trigger: %s, next run at: %s)' % (
diff --git
a/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/mongodb_store.py
b/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/mongodb_store.py
index 7ac042b38f..04db67f1ae 100644
---
a/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/mongodb_store.py
+++
b/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/mongodb_store.py
@@ -82,4 +82,4 @@ class MongoDBJobStore(JobStore):
def __repr__(self):
connection = self.collection.database.connection
- return '<%s (connection=%s)>' % (self.__class__.__name__, connection)
+ return f'<{self.__class__.__name__} (connection={connection})>'
diff --git
a/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/ram_store.py
b/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/ram_store.py
index 799ffc26b9..a708460a2f 100644
---
a/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/ram_store.py
+++
b/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/ram_store.py
@@ -23,4 +23,4 @@ class RAMJobStore(JobStore):
pass
def __repr__(self):
- return '<%s>' % (self.__class__.__name__)
+ return f'<{self.__class__.__name__}>'
diff --git
a/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/redis_store.py
b/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/redis_store.py
index 90062feae6..3fe6e8b635 100644
---
a/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/redis_store.py
+++
b/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/redis_store.py
@@ -89,4 +89,4 @@ class RedisJobStore(JobStore):
self.redis.connection_pool.disconnect()
def __repr__(self):
- return '<%s>' % self.__class__.__name__
+ return f'<{self.__class__.__name__}>'
diff --git
a/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/shelve_store.py
b/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/shelve_store.py
index 57ec350d60..c3c5538a9c 100644
---
a/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/shelve_store.py
+++
b/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/shelve_store.py
@@ -72,4 +72,4 @@ class ShelveJobStore(JobStore):
self.store.close()
def __repr__(self):
- return '<%s (path=%s)>' % (self.__class__.__name__, self.path)
+ return f'<{self.__class__.__name__} (path={self.path})>'
diff --git
a/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/sqlalchemy_store.py
b/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/sqlalchemy_store.py
index 705ee11883..80d3c8e382 100644
---
a/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/sqlalchemy_store.py
+++
b/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/sqlalchemy_store.py
@@ -89,4 +89,4 @@ class SQLAlchemyJobStore(JobStore):
self.engine.dispose()
def __repr__(self):
- return '<%s (url=%s)>' % (self.__class__.__name__, self.engine.url)
+ return f'<{self.__class__.__name__} (url={self.engine.url})>'
diff --git a/ambari-agent/src/main/python/ambari_agent/apscheduler/scheduler.py
b/ambari-agent/src/main/python/ambari_agent/apscheduler/scheduler.py
index a949e9f1d3..4af8445869 100644
--- a/ambari-agent/src/main/python/ambari_agent/apscheduler/scheduler.py
+++ b/ambari-agent/src/main/python/ambari_agent/apscheduler/scheduler.py
@@ -164,7 +164,7 @@ class Scheduler(object):
self._jobstores_lock.acquire()
try:
if alias in self._jobstores:
- raise KeyError('Alias "%s" is already in use' % alias)
+ raise KeyError(f'Alias "{alias}" is already in use')
self._jobstores[alias] = jobstore
jobstore.load_jobs()
finally:
@@ -188,7 +188,7 @@ class Scheduler(object):
try:
jobstore = self._jobstores.pop(alias)
if not jobstore:
- raise KeyError('No such job store: %s' % alias)
+ raise KeyError(f'No such job store: {alias}')
finally:
self._jobstores_lock.release()
@@ -251,7 +251,7 @@ class Scheduler(object):
try:
store = self._jobstores[jobstore]
except KeyError:
- raise KeyError('No such job store: %s' % jobstore)
+ raise KeyError(f'No such job store: {jobstore}')
store.add_job(job)
finally:
self._jobstores_lock.release()
@@ -435,7 +435,7 @@ class Scheduler(object):
finally:
self._jobstores_lock.release()
- raise KeyError('Job "%s" is not scheduled in any job store' % job)
+ raise KeyError(f'Job "{job}" is not scheduled in any job store')
def unschedule_func(self, func):
"""
@@ -469,10 +469,10 @@ class Scheduler(object):
self._jobstores_lock.acquire()
try:
for alias, jobstore in iteritems(self._jobstores):
- job_strs.append('Jobstore %s:' % alias)
+ job_strs.append(f'Jobstore {alias}:')
if jobstore.jobs:
for job in jobstore.jobs:
- job_strs.append(' %s' % job)
+ job_strs.append(f' {job}')
else:
job_strs.append(' No scheduled jobs')
finally:
diff --git
a/ambari-agent/src/main/python/ambari_agent/apscheduler/threadpool.py
b/ambari-agent/src/main/python/ambari_agent/apscheduler/threadpool.py
index eaccb2e38c..9c218fb8c2 100644
--- a/ambari-agent/src/main/python/ambari_agent/apscheduler/threadpool.py
+++ b/ambari-agent/src/main/python/ambari_agent/apscheduler/threadpool.py
@@ -141,4 +141,4 @@ class ThreadPool(object):
else:
threadcount = '%d' % self.num_threads
- return '<ThreadPool at %x; threads=%s>' % (id(self), threadcount)
+ return f'<ThreadPool at {id(self):x}; threads={threadcount}>'
diff --git
a/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/cron/__init__.py
b/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/cron/__init__.py
index f813f29939..4a79272922 100644
---
a/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/cron/__init__.py
+++
b/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/cron/__init__.py
@@ -25,7 +25,7 @@ class CronTrigger(object):
# Check field names and yank out all None valued fields
for key, value in list(iteritems(values)):
if key not in self.FIELD_NAMES:
- raise TypeError('Invalid field name: %s' % key)
+ raise TypeError(f'Invalid field name: {key}')
if value is None:
del values[key]
@@ -133,13 +133,13 @@ class CronTrigger(object):
return next_date
def __str__(self):
- options = ["%s='%s'" % (f.name, str(f)) for f in self.fields
+ options = [f"{f.name}='{str(f)}'" for f in self.fields
if not f.is_default]
- return 'cron[%s]' % (', '.join(options))
+ return f"cron[{', '.join(options)}]"
def __repr__(self):
- options = ["%s='%s'" % (f.name, str(f)) for f in self.fields
+ options = [f"{f.name}='{str(f)}'" for f in self.fields
if not f.is_default]
if self.start_date:
- options.append("start_date='%s'" % self.start_date.isoformat(' '))
- return '<%s (%s)>' % (self.__class__.__name__, ', '.join(options))
+ options.append(f"start_date='{self.start_date.isoformat(' ')}'")
+ return f"<{self.__class__.__name__} ({', '.join(options)})>"
diff --git
a/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/cron/expressions.py
b/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/cron/expressions.py
index 6c190fe939..4a786bb700 100644
---
a/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/cron/expressions.py
+++
b/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/cron/expressions.py
@@ -44,7 +44,7 @@ class AllExpression(object):
return '*'
def __repr__(self):
- return "%s(%s)" % (self.__class__.__name__, self.step)
+ return f"{self.__class__.__name__}({self.step})"
class RangeExpression(AllExpression):
@@ -99,7 +99,7 @@ class RangeExpression(AllExpression):
args.append(str(self.last))
if self.step:
args.append(str(self.step))
- return "%s(%s)" % (self.__class__.__name__, ', '.join(args))
+ return f"{self.__class__.__name__}({', '.join(args)})"
class WeekdayRangeExpression(RangeExpression):
@@ -110,13 +110,13 @@ class WeekdayRangeExpression(RangeExpression):
try:
first_num = WEEKDAYS.index(first.lower())
except ValueError:
- raise ValueError('Invalid weekday name "%s"' % first)
+ raise ValueError(f'Invalid weekday name "{first}"')
if last:
try:
last_num = WEEKDAYS.index(last.lower())
except ValueError:
- raise ValueError('Invalid weekday name "%s"' % last)
+ raise ValueError(f'Invalid weekday name "{last}"')
else:
last_num = None
@@ -124,14 +124,14 @@ class WeekdayRangeExpression(RangeExpression):
def __str__(self):
if self.last != self.first and self.last is not None:
- return '%s-%s' % (WEEKDAYS[self.first], WEEKDAYS[self.last])
+ return f'{WEEKDAYS[self.first]}-{WEEKDAYS[self.last]}'
return WEEKDAYS[self.first]
def __repr__(self):
- args = ["'%s'" % WEEKDAYS[self.first]]
+ args = [f"'{WEEKDAYS[self.first]}'"]
if self.last != self.first and self.last is not None:
- args.append("'%s'" % WEEKDAYS[self.last])
- return "%s(%s)" % (self.__class__.__name__, ', '.join(args))
+ args.append(f"'{WEEKDAYS[self.last]}'")
+ return f"{self.__class__.__name__}({', '.join(args)})"
class WeekdayPositionExpression(AllExpression):
@@ -143,12 +143,12 @@ class WeekdayPositionExpression(AllExpression):
try:
self.option_num = self.options.index(option_name.lower())
except ValueError:
- raise ValueError('Invalid weekday position "%s"' % option_name)
+ raise ValueError(f'Invalid weekday position "{option_name}"')
try:
self.weekday = WEEKDAYS.index(weekday_name.lower())
except ValueError:
- raise ValueError('Invalid weekday name "%s"' % weekday_name)
+ raise ValueError(f'Invalid weekday name "{weekday_name}"')
def get_next_value(self, date, field):
# Figure out the weekday of the month's first day and the number
@@ -170,8 +170,7 @@ class WeekdayPositionExpression(AllExpression):
return target_day
def __str__(self):
- return '%s %s' % (self.options[self.option_num],
- WEEKDAYS[self.weekday])
+ return f'{self.options[self.option_num]} {WEEKDAYS[self.weekday]}'
def __repr__(self):
return "%s('%s', '%s')" % (self.__class__.__name__,
@@ -192,4 +191,4 @@ class LastDayOfMonthExpression(AllExpression):
return 'last'
def __repr__(self):
- return "%s()" % self.__class__.__name__
+ return f"{self.__class__.__name__}()"
diff --git
a/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/cron/fields.py
b/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/cron/fields.py
index 1c916f99da..486cfeb5f9 100644
---
a/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/cron/fields.py
+++
b/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/cron/fields.py
@@ -66,16 +66,14 @@ class BaseField(object):
self.expressions.append(compiled_expr)
return
- raise ValueError('Unrecognized expression "%s" for field "%s"' %
- (expr, self.name))
+ raise ValueError(f'Unrecognized expression "{expr}" for field
"{self.name}"')
def __str__(self):
expr_strings = (str(e) for e in self.expressions)
return ','.join(expr_strings)
def __repr__(self):
- return "%s('%s', '%s')" % (self.__class__.__name__, self.name,
- str(self))
+ return f"{self.__class__.__name__}('{self.name}', '{str(self)}')"
class WeekField(BaseField):
diff --git
a/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/interval.py
b/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/interval.py
index 230c8c9b14..9d35622bf1 100644
--- a/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/interval.py
+++ b/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/interval.py
@@ -32,7 +32,7 @@ class IntervalTrigger(object):
return self.start_date + self.interval * next_interval_num
def __str__(self):
- return 'interval[%s]' % str(self.interval)
+ return f'interval[{str(self.interval)}]'
def __repr__(self):
return "<%s (interval=%s, start_date=%s)>" % (
diff --git
a/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/simple.py
b/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/simple.py
index 3753e9ae4b..2afec50e1a 100644
--- a/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/simple.py
+++ b/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/simple.py
@@ -11,8 +11,7 @@ class SimpleTrigger(object):
return self.run_date
def __str__(self):
- return 'date[%s]' % str(self.run_date)
+ return f'date[{str(self.run_date)}]'
def __repr__(self):
- return '<%s (run_date=%s)>' % (
- self.__class__.__name__, repr(self.run_date))
+ return f'<{self.__class__.__name__} (run_date={repr(self.run_date)})>'
diff --git a/ambari-agent/src/main/python/ambari_agent/apscheduler/util.py
b/ambari-agent/src/main/python/ambari_agent/apscheduler/util.py
index bb4a1e69de..4685a6f667 100644
--- a/ambari-agent/src/main/python/ambari_agent/apscheduler/util.py
+++ b/ambari-agent/src/main/python/ambari_agent/apscheduler/util.py
@@ -38,7 +38,7 @@ def asbool(obj):
return True
if obj in ('false', 'no', 'off', 'n', 'f', '0'):
return False
- raise ValueError('Unable to interpret value "%s" as boolean' % obj)
+ raise ValueError(f'Unable to interpret value "{obj}" as boolean')
return bool(obj)
@@ -71,7 +71,7 @@ def convert_to_datetime(input):
values = [(k, int(v or 0)) for k, v in m.groupdict().items()]
values = dict(values)
return datetime(**values)
- raise TypeError('Unsupported input type: %s' % type(input))
+ raise TypeError(f'Unsupported input type: {type(input)}')
def timedelta_seconds(delta):
@@ -144,9 +144,9 @@ def get_callable_name(func):
if isinstance(f_self, type):
# class method
clsname = getattr(f_self, '__qualname__', None) or f_self.__name__
- return '%s.%s' % (clsname, func.__name__)
+ return f'{clsname}.{func.__name__}'
# bound method
- return '%s.%s' % (f_self.__class__.__name__, func.__name__)
+ return f'{f_self.__class__.__name__}.{func.__name__}'
if hasattr(func, '__call__'):
if hasattr(func, '__name__'):
@@ -163,13 +163,13 @@ def obj_to_ref(obj):
"""
Returns the path to the given object.
"""
- ref = '%s:%s' % (obj.__module__, get_callable_name(obj))
+ ref = f'{obj.__module__}:{get_callable_name(obj)}'
try:
obj2 = ref_to_obj(ref)
if obj != obj2:
raise ValueError
except Exception:
- raise ValueError('Cannot determine the reference to %s' % repr(obj))
+ raise ValueError(f'Cannot determine the reference to {repr(obj)}')
return ref
@@ -187,16 +187,14 @@ def ref_to_obj(ref):
try:
obj = __import__(modulename)
except ImportError:
- raise LookupError('Error resolving reference %s: '
- 'could not import module' % ref)
+ raise LookupError(f'Error resolving reference {ref}: could not import
module')
try:
for name in modulename.split('.')[1:] + rest.split('.'):
obj = getattr(obj, name)
return obj
except Exception:
- raise LookupError('Error resolving reference %s: '
- 'error looking up object' % ref)
+ raise LookupError(f'Error resolving reference {ref}: error looking up
object')
def maybe_ref(ref):
diff --git a/ambari-agent/src/main/python/ambari_agent/hostname.py
b/ambari-agent/src/main/python/ambari_agent/hostname.py
index b45342a8b8..0faa1573a7 100644
--- a/ambari-agent/src/main/python/ambari_agent/hostname.py
+++ b/ambari-agent/src/main/python/ambari_agent/hostname.py
@@ -53,18 +53,18 @@ def hostname(config):
out, err = osStat.communicate()
if (0 == osStat.returncode and 0 != len(out.strip())):
cached_hostname = out.strip()
- logger.info("Read hostname '{0}' using agent:hostname_script
'{1}'".format(cached_hostname, scriptname))
+ logger.info(f"Read hostname '{cached_hostname}' using
agent:hostname_script '{scriptname}'")
else:
- logger.warn("Execution of '{0}' failed with exit code {1}.
err='{2}'\nout='{3}'".format(scriptname, osStat.returncode, err.strip(),
out.strip()))
+ logger.warn(f"Execution of '{scriptname}' failed with exit code
{osStat.returncode}. err='{err.strip()}'\nout='{out.strip()}'")
cached_hostname = socket.getfqdn()
- logger.info("Read hostname '{0}' using socket.getfqdn() as '{1}'
failed".format(cached_hostname, scriptname))
+ logger.info(f"Read hostname '{cached_hostname}' using socket.getfqdn()
as '{scriptname}' failed")
except:
cached_hostname = socket.getfqdn()
- logger.warn("Unexpected error while retrieving hostname: '{0}',
defaulting to socket.getfqdn()".format(sys.exc_info()))
- logger.info("Read hostname '{0}' using
socket.getfqdn().".format(cached_hostname))
+ logger.warn(f"Unexpected error while retrieving hostname:
'{sys.exc_info()}', defaulting to socket.getfqdn()")
+ logger.info(f"Read hostname '{cached_hostname}' using socket.getfqdn().")
except:
cached_hostname = socket.getfqdn()
- logger.info("agent:hostname_script configuration not defined thus read
hostname '{0}' using socket.getfqdn().".format(cached_hostname))
+ logger.info(f"agent:hostname_script configuration not defined thus read
hostname '{cached_hostname}' using socket.getfqdn().")
cached_hostname = cached_hostname.lower()
return cached_hostname
@@ -87,7 +87,7 @@ def public_hostname(config):
logger.info("Read public hostname '" + cached_public_hostname + "'
using agent:public_hostname_script")
return cached_public_hostname
else:
- logger.warn("Execution of '{0}' returned {1}.
{2}\n{3}".format(scriptname, output.returncode, err.strip(), out.strip()))
+ logger.warn(f"Execution of '{scriptname}' returned
{output.returncode}. {err.strip()}\n{out.strip()}")
except:
#ignore for now.
trace_info = traceback.format_exc()
diff --git
a/ambari-agent/src/main/python/ambari_agent/listeners/AgentActionsListener.py
b/ambari-agent/src/main/python/ambari_agent/listeners/AgentActionsListener.py
index 2e72f2912a..0fa7351f57 100644
---
a/ambari-agent/src/main/python/ambari_agent/listeners/AgentActionsListener.py
+++
b/ambari-agent/src/main/python/ambari_agent/listeners/AgentActionsListener.py
@@ -54,7 +54,7 @@ class AgentActionsListener(EventListener):
if action_name == self.RESTART_AGENT_ACTION:
self.restart_agent()
else:
- logger.warn("Unknown action '{0}' requested by server. Ignoring
it".format(action_name))
+ logger.warn(f"Unknown action '{action_name}' requested by server.
Ignoring it")
def restart_agent(self):
logger.warn("Restarting the agent by the request from server")
diff --git
a/ambari-agent/src/main/python/ambari_agent/listeners/ServerResponsesListener.py
b/ambari-agent/src/main/python/ambari_agent/listeners/ServerResponsesListener.py
index 571009aab9..0d0fddeeee 100644
---
a/ambari-agent/src/main/python/ambari_agent/listeners/ServerResponsesListener.py
+++
b/ambari-agent/src/main/python/ambari_agent/listeners/ServerResponsesListener.py
@@ -64,7 +64,7 @@ class ServerResponsesListener(EventListener):
self.listener_functions_on_error[correlation_id](headers, message)
del self.listener_functions_on_error[correlation_id]
else:
- logger.warn("Received a message from server without a '{0}' header.
Ignoring the message".format(Constants.CORRELATION_ID_STRING))
+ logger.warn(f"Received a message from server without a
'{Constants.CORRELATION_ID_STRING}' header. Ignoring the message")
def get_handled_path(self):
return Constants.SERVER_RESPONSES_TOPIC
@@ -82,7 +82,7 @@ class ServerResponsesListener(EventListener):
message_json = message_json[2:]
del self.logging_handlers[correlation_id]
- return " (correlation_id={0}): {1}".format(correlation_id, message_json)
+ return f" (correlation_id={correlation_id}): {message_json}"
return str(message_json)
def reset_responses(self):
diff --git a/ambari-agent/src/main/python/ambari_agent/listeners/__init__.py
b/ambari-agent/src/main/python/ambari_agent/listeners/__init__.py
index 2eccd75fdf..b1445d3957 100644
--- a/ambari-agent/src/main/python/ambari_agent/listeners/__init__.py
+++ b/ambari-agent/src/main/python/ambari_agent/listeners/__init__.py
@@ -46,7 +46,7 @@ class EventListener(ambari_stomp.ConnectionListener):
while not self.unprocessed_messages_queue.empty():
payload = self.unprocessed_messages_queue.get_nowait()
if payload:
- logger.info("Processing event from unprocessed queue {0}
{1}".format(payload[0], payload[1]))
+ logger.info(f"Processing event from unprocessed queue {payload[0]}
{payload[1]}")
destination = payload[0]
headers = payload[1]
message_json = payload[2]
@@ -54,7 +54,7 @@ class EventListener(ambari_stomp.ConnectionListener):
try:
self.on_event(headers, message_json)
except Exception as ex:
- logger.exception("Exception while handing event from {0} {1}
{2}".format(destination, headers, message))
+ logger.exception(f"Exception while handing event from {destination}
{headers} {message}")
self.report_status_to_sender(headers, message, ex)
else:
self.report_status_to_sender(headers, message)
@@ -75,12 +75,12 @@ class EventListener(ambari_stomp.ConnectionListener):
try:
message_json = json.loads(message)
except ValueError as ex:
- logger.exception("Received from server event is not a valid message
json. Message is:\n{0}".format(message))
+ logger.exception(f"Received from server event is not a valid message
json. Message is:\n{message}")
self.report_status_to_sender(headers, message, ex)
return
if destination != Constants.ENCRYPTION_KEY_TOPIC:
- logger.info("Event from server at {0}{1}".format(destination,
self.get_log_message(headers, copy.deepcopy(message_json))))
+ logger.info(f"Event from server at
{destination}{self.get_log_message(headers, copy.deepcopy(message_json))}")
if not self.enabled:
with self.event_queue_lock:
@@ -97,7 +97,7 @@ class EventListener(ambari_stomp.ConnectionListener):
try:
self.on_event(headers, message_json)
except Exception as ex:
- logger.exception("Exception while handing event from {0} {1}
{2}".format(destination, headers, message))
+ logger.exception(f"Exception while handing event from {destination}
{headers} {message}")
self.report_status_to_sender(headers, message, ex)
else:
self.report_status_to_sender(headers, message)
@@ -127,7 +127,7 @@ class EventListener(ambari_stomp.ConnectionListener):
try:
connection.send(message=confirmation_of_received,
destination=Constants.AGENT_RESPONSES_TOPIC)
except:
- logger.exception("Could not send a confirmation '{0}' to
server".format(confirmation_of_received))
+ logger.exception(f"Could not send a confirmation
'{confirmation_of_received}' to server")
def on_event(self, headers, message):
"""
diff --git a/ambari-agent/src/main/python/ambari_agent/main.py
b/ambari-agent/src/main/python/ambari_agent/main.py
index 9c8432cb0a..3deb3591b0 100644
--- a/ambari-agent/src/main/python/ambari_agent/main.py
+++ b/ambari-agent/src/main/python/ambari_agent/main.py
@@ -93,7 +93,7 @@ def setup_logging(logger, filename, logging_level):
logging.basicConfig(format=formatstr, level=logging_level, filename=filename)
logger.setLevel(logging_level)
-
logger.info("loglevel=logging.{0}".format(logging._levelToName[logging_level]))
+ logger.info(f"loglevel=logging.{logging._levelToName[logging_level]}")
GRACEFUL_STOP_TRIES = 300
GRACEFUL_STOP_TRIES_SLEEP = 0.1
@@ -151,7 +151,7 @@ def resolve_ambari_config():
if os.path.exists(configPath):
config.read(configPath)
else:
- raise Exception("No config found at {0}, use default".format(configPath))
+ raise Exception(f"No config found at {configPath}, use default")
except Exception as err:
logger.warn(err)
@@ -188,10 +188,10 @@ def update_open_files_ulimit(config):
if open_files_ulimit >= soft_limit:
try:
resource.setrlimit(resource.RLIMIT_NOFILE, (soft_limit,
open_files_ulimit))
- logger.info('open files ulimit = {0}'.format(open_files_ulimit))
+ logger.info(f'open files ulimit = {open_files_ulimit}')
except ValueError as err:
- logger.error('Unable to set open files ulimit to {0}:
{1}'.format(open_files_ulimit, str(err)))
- logger.info('open files ulimit = {0}'.format(hard_limit))
+ logger.error(f'Unable to set open files ulimit to {open_files_ulimit}:
{str(err)}')
+ logger.info(f'open files ulimit = {hard_limit}')
def perform_prestart_checks(expected_hostname):
# Check if current hostname is equal to expected one (got from the server
@@ -211,7 +211,7 @@ def perform_prestart_checks(expected_hostname):
sys.exit(1)
# Check if there is another instance running
if os.path.isfile(agent_pidfile) and not OSCheck.get_os_family() ==
OSConst.WINSRV_FAMILY:
- print(("%s already exists, exiting" % agent_pidfile))
+ print(f"{agent_pidfile} already exists, exiting")
sys.exit(1)
# check if ambari prefix exists
elif config.has_option('agent', 'prefix') and not
os.path.isdir(os.path.abspath(config.get('agent', 'prefix'))):
@@ -357,13 +357,13 @@ def main(options, initializer_module,
heartbeat_stop_callback=None):
if home_dir != "":
# When running multiple Ambari Agents on this host for simulation, each
one will use a unique home directory.
- Logger.info("Agent is using Home Dir: %s" % str(home_dir))
+ Logger.info(f"Agent is using Home Dir: {str(home_dir)}")
# use the host's locale for numeric formatting
try:
locale.setlocale(locale.LC_ALL, '')
except locale.Error as ex:
- logger.warning("Cannot set locale for ambari-agent. Please check your
systemwide locale settings. Failed due to: {0}.".format(str(ex)))
+ logger.warning(f"Cannot set locale for ambari-agent. Please check your
systemwide locale settings. Failed due to: {str(ex)}.")
default_cfg = {'agent': {'prefix': '/home/ambari'}}
config.load(default_cfg)
diff --git a/ambari-agent/src/main/python/ambari_agent/security.py
b/ambari-agent/src/main/python/ambari_agent/security.py
index 24434e7f96..36da0570e3 100644
--- a/ambari-agent/src/main/python/ambari_agent/security.py
+++ b/ambari-agent/src/main/python/ambari_agent/security.py
@@ -58,7 +58,7 @@ class VerifiedHTTPSConnection:
logger.info(
'Server require two-way SSL authentication. Use it instead of
one-way...')
- logging.info("Connecting to {0}".format(self.connection_url))
+ logging.info(f"Connecting to {self.connection_url}")
if not self.two_way_ssl_required:
@@ -113,7 +113,7 @@ class VerifiedHTTPSConnection:
logger.exception("Exception during conn.disconnect()")
if isinstance(ex, socket_error):
- logger.warn("Could not connect to {0}.
{1}".format(self.connection_url, str(ex)))
+ logger.warn(f"Could not connect to {self.connection_url}. {str(ex)}")
raise
@@ -132,7 +132,7 @@ class AmbariStompConnection(WsConnection):
presend_hook(correlation_id)
logged_message = log_message_function(copy.deepcopy(message))
- logger.info("Event to server at {0} (correlation_id={1}):
{2}".format(destination, correlation_id, logged_message))
+ logger.info(f"Event to server at {destination}
(correlation_id={correlation_id}): {logged_message}")
body = json.dumps(message)
WsConnection.send(self, destination, body, content_type=content_type,
headers=headers, correlationId=correlation_id, **keyword_headers)
diff --git
a/ambari-agent/src/test/python/ambari_agent/BaseStompServerTestCase.py
b/ambari-agent/src/test/python/ambari_agent/BaseStompServerTestCase.py
index 7e94cd9f32..52b8acc0dc 100644
--- a/ambari-agent/src/test/python/ambari_agent/BaseStompServerTestCase.py
+++ b/ambari-agent/src/test/python/ambari_agent/BaseStompServerTestCase.py
@@ -221,8 +221,7 @@ class TestStompClient(object):
@param connect: Whether to connect socket to specified addr.
@type connect: C{bool}
"""
- self.log = logging.getLogger('%s.%s' % (
- self.__module__, self.__class__.__name__))
+ self.log =
logging.getLogger(f'{self.__module__}.{self.__class__.__name__}')
self.sock = None
self.addr = addr
self.received_frames = Queue()
@@ -261,7 +260,7 @@ class TestStompClient(object):
self.connected = True
self.read_stopped.clear()
t = threading.Thread(target=self._read_loop,
- name="client-receiver-%s" % hex(id(self)))
+ name=f"client-receiver-{hex(id(self))}")
t.start()
def _read_loop(self):
@@ -271,7 +270,7 @@ class TestStompClient(object):
data = self.sock.recv(1024)
self.buffer.append(data)
for frame in self.buffer:
- self.log.debug("Processing frame: %s" % frame)
+ self.log.debug(f"Processing frame: {frame}")
self.received_frames.put(frame)
self.read_stopped.set()
# print "Read loop has been quit! for %s" % id(self)
@@ -296,7 +295,7 @@ class TestCaseTcpConnection(ambari_stomp.Connection):
with self.lock:
self.correlation_id += 1
- logger.info("Event to server at {0} (correlation_id={1}):
{2}".format(destination, self.correlation_id, message))
+ logger.info(f"Event to server at {destination}
(correlation_id={self.correlation_id}): {message}")
body = json.dumps(message)
ambari_stomp.Connection.send(self, destination, body,
content_type=content_type, headers=headers, correlationId=self.correlation_id,
**keyword_headers)
diff --git a/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py
b/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py
index a9010286b5..5b97639fd6 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py
@@ -628,9 +628,9 @@ class TestActionQueue(TestCase):
if len(reports) != 0:
break
expected = {'status': 'IN_PROGRESS',
- 'stderr': 'Read from {0}'.format(os.path.join(tempdir,
"errors-3.txt")),
- 'stdout': 'Read from {0}'.format(os.path.join(tempdir,
"output-3.txt")),
- 'structuredOut' : 'Read from
{0}'.format(os.path.join(tempdir, "structured-out-3.json")),
+ 'stderr': f"Read from {os.path.join(tempdir,
'errors-3.txt')}",
+ 'stdout': f"Read from {os.path.join(tempdir,
'output-3.txt')}",
+ 'structuredOut' : f"Read from {os.path.join(tempdir,
'structured-out-3.json')}",
'clusterId': CLUSTER_ID,
'roleCommand': 'INSTALL',
'serviceName': 'HDFS',
diff --git a/ambari-agent/src/test/python/ambari_agent/TestAlerts.py
b/ambari-agent/src/test/python/ambari_agent/TestAlerts.py
index 1536ceba1e..ef66cd4650 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestAlerts.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestAlerts.py
@@ -645,7 +645,7 @@ class TestAlerts(TestCase):
self.assertEqual(0, len(collector.alerts()))
self.assertEqual('OK', alerts[0]['state'])
- self.assertEqual('(Unit Tests) ok: {code}'.format(code=code),
alerts[0]['text'])
+ self.assertEqual(f'(Unit Tests) ok: {code}', alerts[0]['text'])
def test_reschedule(self):
test_file_path = os.path.join('ambari_agent', 'dummy_files')
diff --git a/ambari-agent/src/test/python/ambari_agent/TestHostInfo.py
b/ambari-agent/src/test/python/ambari_agent/TestHostInfo.py
index 9f6489c54e..ce2cc51368 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestHostInfo.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestHostInfo.py
@@ -299,7 +299,7 @@ class TestHostInfo:#(TestCase):
self.assertEqual(result[0]['status'], 'Unhealthy')
self.assertEqual(result[0]['name'], 'service1 or service2')
- self.assertEqual(result[0]['desc'], 'out{0}out'.format(os.linesep))
+ self.assertEqual(result[0]['desc'], f'out{os.linesep}out')
msg = 'thrown by shell call'
shell_call.side_effect = Exception(msg)
diff --git a/ambari-agent/src/test/python/ambari_agent/TestHostname.py
b/ambari-agent/src/test/python/ambari_agent/TestHostname.py
index 9570d67246..09310b9613 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestHostname.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestHostname.py
@@ -50,7 +50,7 @@ class TestHostname(TestCase):
config.set('server', 'hostname', 'ambari-host')
server_hostnames = hostname.server_hostnames(config)
self.assertEqual(['ambari-host'], server_hostnames,
- "expected host name ['ambari-host']; got
{0}".format(server_hostnames))
+ f"expected host name ['ambari-host']; got
{server_hostnames}")
config.set('server', 'hostname', default_server_hostname)
pass
@@ -62,7 +62,7 @@ class TestHostname(TestCase):
server_hostnames = hostname.server_hostnames(config)
self.assertEqual(len(server_hostnames), 3)
self.assertEqual(['ambari-host', 'ambari-host2', 'ambari-host3'],
server_hostnames,
- "expected host name ['ambari-host']; got
{0}".format(server_hostnames))
+ f"expected host name ['ambari-host']; got
{server_hostnames}")
config.set('server', 'hostname', default_server_hostname)
pass
@@ -83,7 +83,7 @@ class TestHostname(TestCase):
config.set('server', 'hostname_script', tmpname)
server_hostnames = hostname.server_hostnames(config)
- self.assertEqual(server_hostnames, ['test.example.com'], "expected
hostname ['test.example.com']; got {0}".format(server_hostnames))
+ self.assertEqual(server_hostnames, ['test.example.com'], f"expected
hostname ['test.example.com']; got {server_hostnames}")
finally:
os.remove(tmpname)
config.remove_option('server', 'hostname_script')
@@ -108,7 +108,7 @@ class TestHostname(TestCase):
expected_hostnames = ['host1.example.com', 'host2.example.com',
'host3.example.com']
server_hostnames = hostname.server_hostnames(config)
- self.assertEqual(server_hostnames, expected_hostnames, "expected
hostnames {0}; got {1}".format(expected_hostnames, server_hostnames))
+ self.assertEqual(server_hostnames, expected_hostnames, f"expected
hostnames {expected_hostnames}; got {server_hostnames}")
finally:
os.remove(tmpname)
config.remove_option('server', 'hostname_script')
diff --git
a/ambari-agent/src/test/python/ambari_agent/dummy_files/test_script.py
b/ambari-agent/src/test/python/ambari_agent/dummy_files/test_script.py
index c2d54129ad..aed0ed99a4 100644
--- a/ambari-agent/src/test/python/ambari_agent/dummy_files/test_script.py
+++ b/ambari-agent/src/test/python/ambari_agent/dummy_files/test_script.py
@@ -45,5 +45,5 @@ def execute(configurations={}, parameters={}, host_name=None):
if '{{foo-site/skip}}' in configurations:
return ('SKIPPED', ['This alert is skipped and will not be in the
collector'])
- label = "bar is {0}, baz is {1}".format(bar, baz)
+ label = f"bar is {bar}, baz is {baz}"
return ('WARNING', [label])
diff --git
a/ambari-agent/src/test/python/ambari_agent/examples/ControllerTester.py
b/ambari-agent/src/test/python/ambari_agent/examples/ControllerTester.py
index a0ba685f41..88eb0c5793 100644
--- a/ambari-agent/src/test/python/ambari_agent/examples/ControllerTester.py
+++ b/ambari-agent/src/test/python/ambari_agent/examples/ControllerTester.py
@@ -117,7 +117,7 @@ def main():
run_file_py_method.side_effect = \
lambda command, file, tmpoutfile, tmperrfile: {
'exitcode' : 0,
- 'stdout' : "Simulated run of py %s" % file,
+ 'stdout' : f"Simulated run of py {file}",
'stderr' : 'None'
}
run_simulation()
@@ -150,7 +150,7 @@ def run_simulation():
queue.put(responce)
def send_stub(url, data):
- logger.info("Controller sends data to %s :" % url)
+ logger.info(f"Controller sends data to {url} :")
logger.info(pprint.pformat(data))
if not queue.empty():
responce = queue.get()
diff --git
a/ambari-agent/src/test/python/resource_management/TestDatanodeHelper.py
b/ambari-agent/src/test/python/resource_management/TestDatanodeHelper.py
index e6b8408ecf..23cb66528d 100644
--- a/ambari-agent/src/test/python/resource_management/TestDatanodeHelper.py
+++ b/ambari-agent/src/test/python/resource_management/TestDatanodeHelper.py
@@ -44,14 +44,14 @@ class StubParams(object):
name = self.__class__.__name__
mocks = set(dir(self))
mocks = [x for x in mocks if not str(x).startswith("__")] # Exclude
private methods
- return "<StubParams: {0}; mocks: {1}>".format(name, str(mocks))
+ return f"<StubParams: {name}; mocks: {str(mocks)}>"
def fake_create_dir(directory):
"""
Fake function used as function pointer.
"""
- print("Fake function to create directory {0}".format(directory))
+ print(f"Fake function to create directory {directory}")
@patch.object(Script, "get_config",
new=MagicMock(return_value={'configurations':{'cluster-env':
{'ignore_bad_mounts': False, 'manage_dirs_on_root': True,
'one_dir_per_partition': False}}}))
@@ -67,7 +67,7 @@ class TestDatanodeHelper(TestCase):
params = StubParams()
params.data_dir_mount_file =
"/var/lib/ambari-agent/data/datanode/dfs_data_dir_mount.hist"
- params.dfs_data_dir = "{0},{1},{2}".format(grid0, grid1, grid2)
+ params.dfs_data_dir = f"{grid0},{grid1},{grid2}"
params.hdfs_user = "hdfs_test"
params.user_group = "hadoop_test"
diff --git
a/ambari-agent/src/test/python/resource_management/TestSecurityCommons.py
b/ambari-agent/src/test/python/resource_management/TestSecurityCommons.py
index 24d3cc7d1e..e23b8e7b8e 100644
--- a/ambari-agent/src/test/python/resource_management/TestSecurityCommons.py
+++ b/ambari-agent/src/test/python/resource_management/TestSecurityCommons.py
@@ -250,7 +250,7 @@ class TestSecurityCommons(TestCase):
hostname ="hostnamne"
keytab_file ="/etc/security/keytabs/nn.service.keytab"
principal = "nn/[email protected]"
- key = str(hash("%s|%s" % (principal, keytab_file)))
+ key = str(hash(f"{principal}|{keytab_file}"))
expiration_time = 30
filename = key + "_tmp.txt"
file_path = temp_dir + os.sep + "kinit_executor_cache"
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]