This is an automated email from the ASF dual-hosted git repository.
jialiang pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/ambari.git
The following commit(s) were added to refs/heads/trunk by this push:
new e92b1feab9 AMBARI-26563: Replace deprecated logger.warn() with
logger.warning() (#4095)
e92b1feab9 is described below
commit e92b1feab94ff37c2b0a7fd5ea1a67ec4d81aa23
Author: Peng Lu <[email protected]>
AuthorDate: Sat Dec 13 15:52:07 2025 +0800
AMBARI-26563: Replace deprecated logger.warn() with logger.warning() (#4095)
---
.../python/ambari_agent/AlertStatusReporter.py | 4 +--
.../src/main/python/ambari_agent/AmbariConfig.py | 2 +-
.../python/ambari_agent/CommandStatusReporter.py | 2 +-
.../python/ambari_agent/ComponentStatusExecutor.py | 2 +-
.../ambari_agent/CustomServiceOrchestrator.py | 2 +-
.../src/main/python/ambari_agent/DataCleaner.py | 8 ++---
.../src/main/python/ambari_agent/Facter.py | 42 +++++++++++-----------
.../src/main/python/ambari_agent/FileCache.py | 4 +--
.../src/main/python/ambari_agent/Hardware.py | 2 +-
.../main/python/ambari_agent/HeartbeatThread.py | 2 +-
.../ambari_agent/HostCheckReportFileHandler.py | 2 +-
.../src/main/python/ambari_agent/HostCleanup.py | 42 +++++++++++-----------
.../src/main/python/ambari_agent/HostInfo.py | 2 +-
.../src/main/python/ambari_agent/NetUtil.py | 2 +-
.../src/main/python/ambari_agent/PythonExecutor.py | 2 +-
.../main/python/ambari_agent/RecoveryManager.py | 16 ++++-----
.../main/python/ambari_agent/alerts/base_alert.py | 2 +-
.../main/python/ambari_agent/alerts/collector.py | 2 +-
.../main/python/ambari_agent/alerts/port_alert.py | 4 +--
.../src/main/python/ambari_agent/hostname.py | 6 ++--
.../ambari_agent/listeners/AgentActionsListener.py | 4 +--
.../listeners/ServerResponsesListener.py | 2 +-
.../main/python/ambari_agent/listeners/__init__.py | 2 +-
ambari-agent/src/main/python/ambari_agent/main.py | 6 ++--
.../src/main/python/ambari_agent/security.py | 4 +--
.../src/main/python/ambari_commons/shell.py | 12 +++----
.../src/main/python/ambari_ws4py/websocket.py | 2 +-
.../resource_management/libraries/script/script.py | 4 +--
.../src/main/repo/install_ambari_tarball.py | 2 +-
.../src/test/python/coilmq/config/__init__.py | 2 +-
ambari-server/src/main/python/bootstrap.py | 4 +--
.../src/main/resources/scripts/configs.py | 4 +--
.../main/resources/scripts/export_ams_metrics.py | 10 +++---
.../resources/scripts/takeover_config_merge.py | 12 +++----
.../BIGTOP/3.2.0/services/HDFS/service_advisor.py | 2 +-
.../BIGTOP/3.2.0/services/YARN/service_advisor.py | 4 +--
36 files changed, 113 insertions(+), 113 deletions(-)
diff --git a/ambari-agent/src/main/python/ambari_agent/AlertStatusReporter.py
b/ambari-agent/src/main/python/ambari_agent/AlertStatusReporter.py
index 3850097416..dc8c97d11f 100644
--- a/ambari-agent/src/main/python/ambari_agent/AlertStatusReporter.py
+++ b/ambari-agent/src/main/python/ambari_agent/AlertStatusReporter.py
@@ -53,7 +53,7 @@ class AlertStatusReporter(threading.Thread):
Run an endless loop which reports all the alert statuses got from collector
"""
if self.alert_reports_interval == 0:
- logger.warn(
+ logger.warning(
"AlertStatusReporter is turned off. Some functionality might not work
correctly."
)
return
@@ -142,7 +142,7 @@ class AlertStatusReporter(threading.Thread):
):
changed_alerts.append(alert)
else:
- logger.warn(
+ logger.warning(
"Cannot find alert definition for alert='{0}',
alert_state='{1}'.".format(
alert_name, alert_state
)
diff --git a/ambari-agent/src/main/python/ambari_agent/AmbariConfig.py
b/ambari-agent/src/main/python/ambari_agent/AmbariConfig.py
index 89df8711fe..46c2a7aed3 100644
--- a/ambari-agent/src/main/python/ambari_agent/AmbariConfig.py
+++ b/ambari-agent/src/main/python/ambari_agent/AmbariConfig.py
@@ -162,7 +162,7 @@ class AmbariConfig:
raise Exception(f"No config found at {configPath}, use default")
except Exception as err:
- logger.warn(err)
+ logger.warning(err)
setattr(cls, "_conf_cache", config)
return config
diff --git a/ambari-agent/src/main/python/ambari_agent/CommandStatusReporter.py
b/ambari-agent/src/main/python/ambari_agent/CommandStatusReporter.py
index 04b4469a8e..4214cddef4 100644
--- a/ambari-agent/src/main/python/ambari_agent/CommandStatusReporter.py
+++ b/ambari-agent/src/main/python/ambari_agent/CommandStatusReporter.py
@@ -37,7 +37,7 @@ class CommandStatusReporter(threading.Thread):
Run an endless loop which reports all the commands results (IN_PROGRESS,
FAILED, COMPLETE) every self.command_reports_interval seconds.
"""
if self.command_reports_interval == 0:
- logger.warn(
+ logger.warning(
"CommandStatusReporter is turned off. Some functionality might not
work correctly."
)
return
diff --git
a/ambari-agent/src/main/python/ambari_agent/ComponentStatusExecutor.py
b/ambari-agent/src/main/python/ambari_agent/ComponentStatusExecutor.py
index 2de24e876b..d0b3955d11 100644
--- a/ambari-agent/src/main/python/ambari_agent/ComponentStatusExecutor.py
+++ b/ambari-agent/src/main/python/ambari_agent/ComponentStatusExecutor.py
@@ -56,7 +56,7 @@ class ComponentStatusExecutor(threading.Thread):
Run an endless loop which executes all status commands every
'status_commands_run_interval' seconds.
"""
if self.status_commands_run_interval == 0:
- self.logger.warn(
+ self.logger.warning(
"ComponentStatusExecutor is turned off. Some functionality might not
work correctly."
)
return
diff --git
a/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
b/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
index 0a4b4e780f..e733729a15 100644
--- a/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
+++ b/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
@@ -147,7 +147,7 @@ class CustomServiceOrchestrator(object):
log_process_information(logger)
shell.kill_process_with_children(pid)
else:
- logger.warn(f"Unable to find process associated with taskId =
{task_id}")
+ logger.warning(f"Unable to find process associated with taskId =
{task_id}")
def get_py_executor(self, forced_command_name):
"""
diff --git a/ambari-agent/src/main/python/ambari_agent/DataCleaner.py
b/ambari-agent/src/main/python/ambari_agent/DataCleaner.py
index 2808bada8a..e6ea41475f 100644
--- a/ambari-agent/src/main/python/ambari_agent/DataCleaner.py
+++ b/ambari-agent/src/main/python/ambari_agent/DataCleaner.py
@@ -44,7 +44,7 @@ class DataCleaner(threading.Thread):
self.file_max_age = config.get("agent", "data_cleanup_max_age", 86400)
self.file_max_age = int(self.file_max_age) if self.file_max_age else None
if self.file_max_age is None or self.file_max_age < 86400: # keep for at
least 24h
- logger.warn(
+ logger.warning(
"The minimum value allowed for data_cleanup_max_age is 1 "
"day. Setting data_cleanup_max_age to 86400."
)
@@ -57,7 +57,7 @@ class DataCleaner(threading.Thread):
if (
self.cleanup_interval is None or self.cleanup_interval < 3600
): # wait at least 1 hour between runs
- logger.warn(
+ logger.warning(
"The minimum value allowed for data_cleanup_interval is 1 "
"hour. Setting data_cleanup_interval to 3600."
)
@@ -70,7 +70,7 @@ class DataCleaner(threading.Thread):
if (
self.cleanup_max_size_MB is None or self.cleanup_max_size_MB > 10000
): # no more than 10 GBs
- logger.warn(
+ logger.warning(
"The maximum value allowed for cleanup_max_size_MB is 10000 MB (10
GB). "
"Setting cleanup_max_size_MB to 10000."
)
@@ -132,7 +132,7 @@ class DataCleaner(threading.Thread):
pass
else:
# Did not reach below cap.
- logger.warn(
+ logger.warning(
"DataCleaner deleted an additional %d files, currently log files
occupy %d bytes."
% (count, total_size_bytes)
)
diff --git a/ambari-agent/src/main/python/ambari_agent/Facter.py
b/ambari-agent/src/main/python/ambari_agent/Facter.py
index 7256756417..033b67ad89 100644
--- a/ambari-agent/src/main/python/ambari_agent/Facter.py
+++ b/ambari-agent/src/main/python/ambari_agent/Facter.py
@@ -74,7 +74,7 @@ class Facter(object):
raise Exception("No config found, use default")
except Exception as err:
- log.warn(err)
+ log.warning(err)
return config
# Return first ip adress
@@ -200,7 +200,7 @@ class Facter(object):
for key, value in data.items():
systemResources[key] = data[key]
except:
- log.warn(
+ log.warning(
f"Cannot read values from json files in {systemResourceDir}. it
won't be used for gathering system resources."
)
else:
@@ -308,7 +308,7 @@ class FacterWindows(Facter):
)
return int(result)
except:
- log.warn("Can not get SwapFree")
+ log.warning("Can not get SwapFree")
return 0
# Return memoryfree
@@ -323,7 +323,7 @@ class FacterWindows(Facter):
)
return result
except:
- log.warn("Can not get MemoryFree")
+ log.warning("Can not get MemoryFree")
return 0
# Return memorytotal
@@ -338,7 +338,7 @@ class FacterWindows(Facter):
)
return result
except:
- log.warn("Can not get MemoryTotal")
+ log.warning("Can not get MemoryTotal")
return 0
# Return swapfree
@@ -353,7 +353,7 @@ class FacterWindows(Facter):
)
return result
except:
- log.warn("Can not get SwapFree")
+ log.warning("Can not get SwapFree")
return 0
# Return swapsize
@@ -368,7 +368,7 @@ class FacterWindows(Facter):
)
return result
except:
- log.warn("Can not get SwapFree")
+ log.warning("Can not get SwapFree")
return 0
# Return memorysize
@@ -383,7 +383,7 @@ class FacterWindows(Facter):
)
return result
except:
- log.warn("Can not get MemorySize")
+ log.warning("Can not get MemorySize")
return 0
def facterInfo(self):
@@ -437,7 +437,7 @@ class FacterLinux(Facter):
return_code, stdout, stderr =
run_os_command(FacterLinux.GET_IFCONFIG_SHORT_CMD)
return stdout
except OSError:
- log.warn(f"Can't execute {FacterLinux.GET_IFCONFIG_SHORT_CMD}")
+ log.warning(f"Can't execute {FacterLinux.GET_IFCONFIG_SHORT_CMD}")
return ""
# Returns the output of `ip link` command
@@ -447,7 +447,7 @@ class FacterLinux(Facter):
return_code, stdout, stderr = run_os_command(FacterLinux.GET_IP_LINK_CMD)
return stdout
except OSError:
- log.warn(f"Can't execute {FacterLinux.GET_IP_LINK_CMD}")
+ log.warning(f"Can't execute {FacterLinux.GET_IP_LINK_CMD}")
return ""
@staticmethod
@@ -456,7 +456,7 @@ class FacterLinux(Facter):
return_code, stdout, stderr = run_os_command(FacterLinux.GET_UPTIME_CMD)
return stdout
except OSError:
- log.warn(f"Can't execute {FacterLinux.GET_UPTIME_CMD}")
+ log.warning(f"Can't execute {FacterLinux.GET_UPTIME_CMD}")
return ""
@staticmethod
@@ -465,7 +465,7 @@ class FacterLinux(Facter):
return_code, stdout, stderr = run_os_command(FacterLinux.GET_MEMINFO_CMD)
return stdout
except OSError:
- log.warn(f"Can't execute {FacterLinux.GET_MEMINFO_CMD}")
+ log.warning(f"Can't execute {FacterLinux.GET_MEMINFO_CMD}")
return ""
# Returns the FQDN of the host
@@ -479,7 +479,7 @@ class FacterLinux(Facter):
if se_status:
return True
except OSError:
- log.warn(f"Could not run {FacterLinux.GET_SE_LINUX_ST_CMD}: OK")
+ log.warning(f"Could not run {FacterLinux.GET_SE_LINUX_ST_CMD}: OK")
return False
def return_first_words_from_list(self, list):
@@ -549,7 +549,7 @@ class FacterLinux(Facter):
)[20:24]
)
except Exception as err:
- log.warn(f"Can't get the IP address for {ifname}")
+ log.warning(f"Can't get the IP address for {ifname}")
return ip_address_by_ifname
@@ -566,7 +566,7 @@ class FacterLinux(Facter):
if result != "":
return result
# If the host has neither `ifocnfig` command nor `ip` command, then return
"OS NOT SUPPORTED"
- log.warn(
+ log.warning(
f"Can't get a network interfaces list from
{self.DATA_IFCONFIG_SHORT_OUTPUT}"
)
return "OS NOT SUPPORTED"
@@ -578,7 +578,7 @@ class FacterLinux(Facter):
self.data_return_first(FacterLinux.DIGITS_REGEXP,
self.DATA_UPTIME_OUTPUT)
)
except ValueError:
- log.warn(f"Can't get an uptime value from {self.DATA_UPTIME_OUTPUT}")
+ log.warning(f"Can't get an uptime value from {self.DATA_UPTIME_OUTPUT}")
return 0
# Return memoryfree
@@ -589,7 +589,7 @@ class FacterLinux(Facter):
self.data_return_first(FacterLinux.FREEMEM_REGEXP,
self.DATA_MEMINFO_OUTPUT)
)
except ValueError:
- log.warn(f"Can't get free memory size from {self.DATA_MEMINFO_OUTPUT}")
+ log.warning(f"Can't get free memory size from
{self.DATA_MEMINFO_OUTPUT}")
return 0
# Return memorytotal
@@ -599,7 +599,7 @@ class FacterLinux(Facter):
self.data_return_first(FacterLinux.TOTALMEM_REGEXP,
self.DATA_MEMINFO_OUTPUT)
)
except ValueError:
- log.warn(f"Can't get total memory size from {self.DATA_MEMINFO_OUTPUT}")
+ log.warning(f"Can't get total memory size from
{self.DATA_MEMINFO_OUTPUT}")
return 0
# Return swapfree
@@ -610,7 +610,7 @@ class FacterLinux(Facter):
self.data_return_first(FacterLinux.SWAPFREE_REGEXP,
self.DATA_MEMINFO_OUTPUT)
)
except ValueError:
- log.warn(f"Can't get free swap memory size from
{self.DATA_MEMINFO_OUTPUT}")
+ log.warning(f"Can't get free swap memory size from
{self.DATA_MEMINFO_OUTPUT}")
return 0
# Return swapsize
@@ -621,7 +621,7 @@ class FacterLinux(Facter):
self.data_return_first(FacterLinux.SWAPTOTAL_REGEXP,
self.DATA_MEMINFO_OUTPUT)
)
except ValueError:
- log.warn(f"Can't get total swap memory size from
{self.DATA_MEMINFO_OUTPUT}")
+ log.warning(f"Can't get total swap memory size from
{self.DATA_MEMINFO_OUTPUT}")
return 0
# Return memorysize
@@ -632,7 +632,7 @@ class FacterLinux(Facter):
self.data_return_first(FacterLinux.TOTALMEM_REGEXP,
self.DATA_MEMINFO_OUTPUT)
)
except ValueError:
- log.warn(f"Can't get memory size from {self.DATA_MEMINFO_OUTPUT}")
+ log.warning(f"Can't get memory size from {self.DATA_MEMINFO_OUTPUT}")
return 0
def facterInfo(self):
diff --git a/ambari-agent/src/main/python/ambari_agent/FileCache.py
b/ambari-agent/src/main/python/ambari_agent/FileCache.py
index b8f232dbcb..6af654a679 100644
--- a/ambari-agent/src/main/python/ambari_agent/FileCache.py
+++ b/ambari-agent/src/main/python/ambari_agent/FileCache.py
@@ -212,7 +212,7 @@ class FileCache:
self.write_hash_sum(full_path, remote_hash)
logger.info(f"Updated directory {full_path}")
else:
- logger.warn(
+ logger.warning(
"Skipping empty archive: {0}. "
"Expected archive was not found. Cached copy will be
used.".format(
download_url
@@ -224,7 +224,7 @@ class FileCache:
except CachingException as e:
if self.tolerate_download_failures:
# ignore
- logger.warn(
+ logger.warning(
"Error occurred during cache update. "
"Error tolerate setting is set to true, so"
" ignoring this error and continuing with current cache. "
diff --git a/ambari-agent/src/main/python/ambari_agent/Hardware.py
b/ambari-agent/src/main/python/ambari_agent/Hardware.py
index 5de14273e3..6a1641a739 100644
--- a/ambari-agent/src/main/python/ambari_agent/Hardware.py
+++ b/ambari-agent/src/main/python/ambari_agent/Hardware.py
@@ -184,7 +184,7 @@ class Hardware:
)
dfdata = out
except Exception as ex:
- logger.warn("Checking disk usage failed: " + str(ex))
+ logger.warning("Checking disk usage failed: " + str(ex))
dfdata = ""
result_mounts = []
diff --git a/ambari-agent/src/main/python/ambari_agent/HeartbeatThread.py
b/ambari-agent/src/main/python/ambari_agent/HeartbeatThread.py
index 08d7a96677..a8bb70740c 100644
--- a/ambari-agent/src/main/python/ambari_agent/HeartbeatThread.py
+++ b/ambari-agent/src/main/python/ambari_agent/HeartbeatThread.py
@@ -330,7 +330,7 @@ class HeartbeatThread(threading.Thread):
)
except ConnectionIsAlreadyClosed:
# this happens when trying to connect to broken connection. Happens if
ambari-server is restarted.
- logger.warn(f"Connection failed while trying to connect to
{destination}")
+ logger.warning(f"Connection failed while trying to connect to
{destination}")
raise
try:
diff --git
a/ambari-agent/src/main/python/ambari_agent/HostCheckReportFileHandler.py
b/ambari-agent/src/main/python/ambari_agent/HostCheckReportFileHandler.py
index 3885ec9221..b69a72ac54 100644
--- a/ambari-agent/src/main/python/ambari_agent/HostCheckReportFileHandler.py
+++ b/ambari-agent/src/main/python/ambari_agent/HostCheckReportFileHandler.py
@@ -57,7 +57,7 @@ class HostCheckReportFileHandler:
raise Exception("No config found, use default")
except Exception as err:
- logger.warn(err)
+ logger.warning(err)
return config
def writeHostChecksCustomActionsFile(self, structuredOutput):
diff --git a/ambari-agent/src/main/python/ambari_agent/HostCleanup.py
b/ambari-agent/src/main/python/ambari_agent/HostCleanup.py
index 17cc5732ed..061658e52b 100644
--- a/ambari-agent/src/main/python/ambari_agent/HostCleanup.py
+++ b/ambari-agent/src/main/python/ambari_agent/HostCleanup.py
@@ -116,7 +116,7 @@ class HostCleanup:
raise Exception("No config found, use default")
except Exception as err:
- logger.warn(err)
+ logger.warning(err)
return config
def get_additional_dirs(self):
@@ -210,7 +210,7 @@ class HostCleanup:
","
)
except:
- logger.warn("Cannot read package list: " + str(sys.exc_info()[0]))
+ logger.warning("Cannot read package list: " + str(sys.exc_info()[0]))
try:
proc_map = {}
@@ -230,13 +230,13 @@ class HostCleanup:
if proc_map:
propertyMap[PROCESS_SECTION] = proc_map
except:
- logger.warn("Cannot read process list: " + str(sys.exc_info()))
+ logger.warning("Cannot read process list: " + str(sys.exc_info()))
try:
if config.has_option(USER_SECTION, USER_KEY):
propertyMap[USER_SECTION] = config.get(USER_SECTION,
USER_KEY).split(",")
except:
- logger.warn("Cannot read user list: " + str(sys.exc_info()[0]))
+ logger.warning("Cannot read user list: " + str(sys.exc_info()[0]))
try:
if config.has_option(USER_SECTION, USER_HOMEDIR_KEY):
@@ -244,19 +244,19 @@ class HostCleanup:
USER_SECTION, USER_HOMEDIR_KEY
).split(",")
except:
- logger.warn("Cannot read user homedir list: " + str(sys.exc_info()[0]))
+ logger.warning("Cannot read user homedir list: " +
str(sys.exc_info()[0]))
try:
if config.has_option(REPO_SECTION, REPOS_KEY):
propertyMap[REPO_SECTION] = config.get(REPO_SECTION,
REPOS_KEY).split(",")
except:
- logger.warn("Cannot read repositories list: " + str(sys.exc_info()[0]))
+ logger.warning("Cannot read repositories list: " +
str(sys.exc_info()[0]))
try:
if config.has_option(DIR_SECTION, DIR_KEY):
propertyMap[DIR_SECTION] = config.get(DIR_SECTION, DIR_KEY).split(",")
except:
- logger.warn("Cannot read dir list: " + str(sys.exc_info()[0]))
+ logger.warning("Cannot read dir list: " + str(sys.exc_info()[0]))
try:
alt_map = {}
@@ -267,7 +267,7 @@ class HostCleanup:
if alt_map:
propertyMap[ALT_SECTION] = alt_map
except:
- logger.warn("Cannot read alternates list: " + str(sys.exc_info()[0]))
+ logger.warning("Cannot read alternates list: " + str(sys.exc_info()[0]))
return propertyMap
@@ -286,7 +286,7 @@ class HostCleanup:
out = p2.communicate()[0]
logger.debug("alternatives --display " + alt_name + "\n, out = " + out)
except:
- logger.warn(
+ logger.warning(
"Cannot process alternative named: "
+ alt_name
+ ","
@@ -333,7 +333,7 @@ class HostCleanup:
out = self.get_alternatives_desc(alt_name)
if not out:
- logger.warn("No alternatives found for: " + alt_name)
+ logger.warning("No alternatives found for: " + alt_name)
continue
else:
alternates = out.split("\n")
@@ -349,7 +349,7 @@ class HostCleanup:
command = ALT_ERASE_CMD.format(alt_name, alt_path)
(returncode, stdoutdata, stderrdata) =
self.run_os_command(command)
if returncode != 0:
- logger.warn(
+ logger.warning(
"Failed to remove alternative: "
+ alt_name
+ ", path: "
@@ -465,7 +465,7 @@ class HostCleanup:
elif OSCheck.is_ubuntu_family():
fileList = self.get_files_in_dir(REPO_PATH_UBUNTU)
else:
- logger.warn("Unsupported OS type, cannot get repository location.")
+ logger.warning("Unsupported OS type, cannot get repository location.")
return []
if fileList:
@@ -493,7 +493,7 @@ class HostCleanup:
logger.debug("Executing: " + str(command))
(returncode, stdoutdata, stderrdata) = self.run_os_command(command)
if returncode != 0:
- logger.warn("Erasing packages failed: " + stderrdata)
+ logger.warning("Erasing packages failed: " + stderrdata)
else:
logger.info("Erased packages successfully.\n" + stdoutdata)
return 0
@@ -510,7 +510,7 @@ class HostCleanup:
try:
shutil.rmtree(path)
except:
- logger.warn(
+ logger.warning(
f"Failed to remove dir {path} , error:
{str(sys.exc_info()[0])}"
)
else:
@@ -530,7 +530,7 @@ class HostCleanup:
try:
os.remove(path)
except:
- logger.warn(
+ logger.warning(
f"Failed to delete file: {path}, error: {str(sys.exc_info()[0])}"
)
else:
@@ -541,7 +541,7 @@ class HostCleanup:
groupDelCommand = GROUP_ERASE_CMD.format(HADOOP_GROUP)
(returncode, stdoutdata, stderrdata) = self.run_os_command(groupDelCommand)
if returncode != 0:
- logger.warn("Cannot delete group : " + HADOOP_GROUP + ", " + stderrdata)
+ logger.warning("Cannot delete group : " + HADOOP_GROUP + ", " +
stderrdata)
else:
logger.info("Successfully deleted group: " + HADOOP_GROUP)
@@ -553,7 +553,7 @@ class HostCleanup:
stat = os.stat(fileToCheck)
except OSError:
stat = None
- logger.warn("Cannot stat file, skipping: " + fileToCheck)
+ logger.warning("Cannot stat file, skipping: " + fileToCheck)
if stat and stat.st_uid in userIds:
self.do_erase_dir_silent([fileToCheck])
@@ -576,7 +576,7 @@ class HostCleanup:
try:
userIds.append(getpwnam(user).pw_uid)
except Exception:
- logger.warn("Cannot find user : " + user)
+ logger.warning("Cannot find user : " + user)
return userIds
def do_delete_users(self, userList):
@@ -586,7 +586,7 @@ class HostCleanup:
command = USER_ERASE_CMD.format(user)
(returncode, stdoutdata, stderrdata) = self.run_os_command(command)
if returncode != 0:
- logger.warn("Cannot delete user : " + user + ", " + stderrdata)
+ logger.warning("Cannot delete user : " + user + ", " + stderrdata)
else:
logger.info("Successfully deleted user: " + user)
self.do_delete_group()
@@ -627,7 +627,7 @@ class HostCleanup:
)
(returncode, stdoutdata, stderrdata) =
self.run_os_command(run_checks_command)
if returncode != 0:
- logger.warn(
+ logger.warning(
"Failed to run host checks,\nstderr:\n "
+ stderrdata
+ "\n\nstdout:\n"
@@ -643,7 +643,7 @@ def backup_file(filePath):
try:
shutil.copyfile(filePath, filePath + "." + timestamp.strftime(format))
except Exception as e:
- logger.warn('Could not backup file "%s": %s' % (str(filePath, e)))
+ logger.warning('Could not backup file "%s": %s' % (str(filePath, e)))
return 0
diff --git a/ambari-agent/src/main/python/ambari_agent/HostInfo.py
b/ambari-agent/src/main/python/ambari_agent/HostInfo.py
index c981c9036e..e01ac4d11f 100644
--- a/ambari-agent/src/main/python/ambari_agent/HostInfo.py
+++ b/ambari-agent/src/main/python/ambari_agent/HostInfo.py
@@ -465,7 +465,7 @@ class HostInfoLinux(HostInfo):
)
return out, err, code
except Exception as ex:
- logger.warn(f"Checking service {service_name} status failed")
+ logger.warning(f"Checking service {service_name} status failed")
return "", str(ex), 1
diff --git a/ambari-agent/src/main/python/ambari_agent/NetUtil.py
b/ambari-agent/src/main/python/ambari_agent/NetUtil.py
index 0c18307d1d..4da6e7b4be 100644
--- a/ambari-agent/src/main/python/ambari_agent/NetUtil.py
+++ b/ambari-agent/src/main/python/ambari_agent/NetUtil.py
@@ -123,7 +123,7 @@ class NetUtil:
break
else:
if logger is not None:
- logger.warn(
+ logger.warning(
"Server at {0} is not reachable, sleeping for {1}
seconds...".format(
server_url, self.connect_retry_delay
)
diff --git a/ambari-agent/src/main/python/ambari_agent/PythonExecutor.py
b/ambari-agent/src/main/python/ambari_agent/PythonExecutor.py
index 96a246ed68..dd22909078 100644
--- a/ambari-agent/src/main/python/ambari_agent/PythonExecutor.py
+++ b/ambari-agent/src/main/python/ambari_agent/PythonExecutor.py
@@ -210,7 +210,7 @@ class PythonExecutor(object):
structured_out = {
"msg": "Unable to read structured output from " + structured_out_path
}
- self.logger.warn(structured_out)
+ self.logger.warning(structured_out)
except (OSError, IOError):
structured_out = {}
return out, error, structured_out
diff --git a/ambari-agent/src/main/python/ambari_agent/RecoveryManager.py
b/ambari-agent/src/main/python/ambari_agent/RecoveryManager.py
index a612c579a6..0a2e693da0 100644
--- a/ambari-agent/src/main/python/ambari_agent/RecoveryManager.py
+++ b/ambari-agent/src/main/python/ambari_agent/RecoveryManager.py
@@ -442,7 +442,7 @@ class RecoveryManager:
else:
if action_counter["warnedLastAttempt"] is False:
action_counter["warnedLastAttempt"] = True
- logger.warn(
+ logger.warning(
"%s seconds has not passed since last occurrence %s seconds back
for %s. "
+ "Will silently skip execution without warning till retry gap
is passed",
self.retry_gap_in_sec,
@@ -469,7 +469,7 @@ class RecoveryManager:
else:
if action_counter["warnedLastReset"] is False:
action_counter["warnedLastReset"] = True
- logger.warn(
+ logger.warning(
"%s occurrences in %s minutes reached the limit for %s. "
+ "Will silently skip execution without warning till window is
reset",
action_counter["count"],
@@ -486,7 +486,7 @@ class RecoveryManager:
else:
if action_counter["warnedThresholdReached"] is False:
action_counter["warnedThresholdReached"] = True
- logger.warn(
+ logger.warning(
"%s occurrences in agent life time reached the limit for %s. "
+ "Will silently skip execution without warning till window is
reset",
action_counter["lifetimeCount"],
@@ -652,23 +652,23 @@ class RecoveryManager:
"""
self.recovery_enabled = False
if max_count <= 0:
- logger.warn("Recovery disabled: max_count must be a non-negative number")
+ logger.warning("Recovery disabled: max_count must be a non-negative
number")
return
if window_in_min <= 0:
- logger.warn("Recovery disabled: window_in_min must be a non-negative
number")
+ logger.warning("Recovery disabled: window_in_min must be a non-negative
number")
return
if retry_gap < 1:
- logger.warn(
+ logger.warning(
"Recovery disabled: retry_gap must be a positive number and at least 1"
)
return
if retry_gap >= window_in_min:
- logger.warn("Recovery disabled: retry_gap must be smaller than
window_in_min")
+ logger.warning("Recovery disabled: retry_gap must be smaller than
window_in_min")
return
if max_lifetime_count < 0 or max_lifetime_count < max_count:
- logger.warn(
+ logger.warning(
"Recovery disabled: max_lifetime_count must more than 0 and >=
max_count"
)
return
diff --git a/ambari-agent/src/main/python/ambari_agent/alerts/base_alert.py
b/ambari-agent/src/main/python/ambari_agent/alerts/base_alert.py
index f513883bab..15f08a94c0 100644
--- a/ambari-agent/src/main/python/ambari_agent/alerts/base_alert.py
+++ b/ambari-agent/src/main/python/ambari_agent/alerts/base_alert.py
@@ -162,7 +162,7 @@ class BaseAlert(object):
try:
data["text"] = res_base_text.format(*res[1])
except ValueError as value_error:
- logger.warn(f"[Alert][{self.get_name()}] - {str(value_error)}")
+ logger.warning(f"[Alert][{self.get_name()}] - {str(value_error)}")
# if there is a ValueError, it's probably because the text doesn't
match the type of
# positional arguemtns (ie {0:d} with a float)
diff --git a/ambari-agent/src/main/python/ambari_agent/alerts/collector.py
b/ambari-agent/src/main/python/ambari_agent/alerts/collector.py
index b81c508003..c717452013 100644
--- a/ambari-agent/src/main/python/ambari_agent/alerts/collector.py
+++ b/ambari-agent/src/main/python/ambari_agent/alerts/collector.py
@@ -66,7 +66,7 @@ class AlertCollector:
alert = alert_map[alert_name]
if not "uuid" in alert:
- logger.warn(f"Alert {alert} does not have uuid key.")
+ logger.warning(f"Alert {alert} does not have uuid key.")
continue
if alert["uuid"] == alert_uuid:
diff --git a/ambari-agent/src/main/python/ambari_agent/alerts/port_alert.py
b/ambari-agent/src/main/python/ambari_agent/alerts/port_alert.py
index 9442a31c36..c9ee58cf93 100644
--- a/ambari-agent/src/main/python/ambari_agent/alerts/port_alert.py
+++ b/ambari-agent/src/main/python/ambari_agent/alerts/port_alert.py
@@ -77,7 +77,7 @@ class PortAlert(BaseAlert):
# check warning threshold for sanity
if self.warning_timeout >= 30:
- logger.warn(
+ logger.warning(
"[Alert][{0}] The warning threshold of {1}s is too large, resetting to
{2}s".format(
self.get_name(), str(self.warning_timeout),
str(DEFAULT_WARNING_TIMEOUT)
)
@@ -87,7 +87,7 @@ class PortAlert(BaseAlert):
# check critical threshold for sanity
if self.critical_timeout >= 30:
- logger.warn(
+ logger.warning(
"[Alert][{0}] The critical threshold of {1}s is too large, resetting
to {2}s".format(
self.get_name(), str(self.critical_timeout),
str(DEFAULT_CRITICAL_TIMEOUT)
)
diff --git a/ambari-agent/src/main/python/ambari_agent/hostname.py
b/ambari-agent/src/main/python/ambari_agent/hostname.py
index 69a358fa2f..d5b98889f4 100644
--- a/ambari-agent/src/main/python/ambari_agent/hostname.py
+++ b/ambari-agent/src/main/python/ambari_agent/hostname.py
@@ -64,7 +64,7 @@ def hostname(config):
f"Read hostname '{cached_hostname}' using agent:hostname_script
'{scriptname}'"
)
else:
- logger.warn(
+ logger.warning(
f"Execution of '{scriptname}' failed with exit code
{osStat.returncode}. err='{err.strip()}'\nout='{out.strip()}'"
)
cached_hostname = socket.getfqdn()
@@ -73,7 +73,7 @@ def hostname(config):
)
except:
cached_hostname = socket.getfqdn()
- logger.warn(
+ logger.warning(
f"Unexpected error while retrieving hostname: '{sys.exc_info()}',
defaulting to socket.getfqdn()"
)
logger.info(f"Read hostname '{cached_hostname}' using socket.getfqdn().")
@@ -114,7 +114,7 @@ def public_hostname(config):
)
return cached_public_hostname
else:
- logger.warn(
+ logger.warning(
f"Execution of '{scriptname}' returned {output.returncode}.
{err.strip()}\n{out.strip()}"
)
except:
diff --git
a/ambari-agent/src/main/python/ambari_agent/listeners/AgentActionsListener.py
b/ambari-agent/src/main/python/ambari_agent/listeners/AgentActionsListener.py
index 0453e7b3f5..b1c4b66d66 100644
---
a/ambari-agent/src/main/python/ambari_agent/listeners/AgentActionsListener.py
+++
b/ambari-agent/src/main/python/ambari_agent/listeners/AgentActionsListener.py
@@ -56,10 +56,10 @@ class AgentActionsListener(EventListener):
if action_name == self.RESTART_AGENT_ACTION:
self.restart_agent()
else:
- logger.warn(f"Unknown action '{action_name}' requested by server.
Ignoring it")
+ logger.warning(f"Unknown action '{action_name}' requested by server.
Ignoring it")
def restart_agent(self):
- logger.warn("Restarting the agent by the request from server")
+ logger.warning("Restarting the agent by the request from server")
Utils.restartAgent(self.stop_event)
def get_handled_path(self):
diff --git
a/ambari-agent/src/main/python/ambari_agent/listeners/ServerResponsesListener.py
b/ambari-agent/src/main/python/ambari_agent/listeners/ServerResponsesListener.py
index 7bd39c0a32..f7933b4faf 100644
---
a/ambari-agent/src/main/python/ambari_agent/listeners/ServerResponsesListener.py
+++
b/ambari-agent/src/main/python/ambari_agent/listeners/ServerResponsesListener.py
@@ -69,7 +69,7 @@ class ServerResponsesListener(EventListener):
self.listener_functions_on_error[correlation_id](headers, message)
del self.listener_functions_on_error[correlation_id]
else:
- logger.warn(
+ logger.warning(
f"Received a message from server without a
'{Constants.CORRELATION_ID_STRING}' header. Ignoring the message"
)
diff --git a/ambari-agent/src/main/python/ambari_agent/listeners/__init__.py
b/ambari-agent/src/main/python/ambari_agent/listeners/__init__.py
index 9d70321a0b..4ef36fc8aa 100644
--- a/ambari-agent/src/main/python/ambari_agent/listeners/__init__.py
+++ b/ambari-agent/src/main/python/ambari_agent/listeners/__init__.py
@@ -71,7 +71,7 @@ class EventListener(ambari_stomp.ConnectionListener):
Here we handle some decode the message to json and check if it addressed
to this specific event listener.
"""
if not "destination" in headers:
- logger.warn(
+ logger.warning(
"Received event from server which does not contain 'destination'
header"
)
return
diff --git a/ambari-agent/src/main/python/ambari_agent/main.py
b/ambari-agent/src/main/python/ambari_agent/main.py
index 8e32f8b0eb..892ec96e99 100644
--- a/ambari-agent/src/main/python/ambari_agent/main.py
+++ b/ambari-agent/src/main/python/ambari_agent/main.py
@@ -174,7 +174,7 @@ def resolve_ambari_config():
raise Exception(f"No config found at {configPath}, use default")
except Exception as err:
- logger.warn(err)
+ logger.warning(err)
def check_sudo():
@@ -201,7 +201,7 @@ def check_sudo():
) # bad sudo configurations
if run_time > 2:
- logger.warn(
+ logger.warning(
(
"Sudo commands on this host are running slowly ('{0}' took {1}
seconds).\n"
+ "This will create a significant slow down for ambari-agent service
tasks."
@@ -515,7 +515,7 @@ def main(options, initializer_module,
heartbeat_stop_callback=None):
server_ip = socket.gethostbyname(server_hostname)
logger.info("Connecting to Ambari server at %s (%s)", server_url,
server_ip)
except socket.error:
- logger.warn(
+ logger.warning(
"Unable to determine the IP address of the Ambari server '%s'",
server_hostname,
)
diff --git a/ambari-agent/src/main/python/ambari_agent/security.py
b/ambari-agent/src/main/python/ambari_agent/security.py
index 75d8936bdb..10410a5959 100644
--- a/ambari-agent/src/main/python/ambari_agent/security.py
+++ b/ambari-agent/src/main/python/ambari_agent/security.py
@@ -125,7 +125,7 @@ class VerifiedHTTPSConnection:
logger.exception("Exception during conn.disconnect()")
if isinstance(ex, socket_error):
- logger.warn(f"Could not connect to {self.connection_url}. {str(ex)}")
+ logger.warning(f"Could not connect to {self.connection_url}.
{str(ex)}")
raise
@@ -306,7 +306,7 @@ class CertificateManager:
if logger.isEnabledFor(logging.DEBUG):
logger.debug("Sign response from Server: \n" + pprint.pformat(data))
except Exception:
- logger.warn("Malformed response! data: %s", data)
+ logger.warning("Malformed response! data: %s", data)
data = {"result": "ERROR"}
result = data["result"]
if result == "OK":
diff --git a/ambari-common/src/main/python/ambari_commons/shell.py
b/ambari-common/src/main/python/ambari_commons/shell.py
index b3f7ed8817..e5b5e686f6 100644
--- a/ambari-common/src/main/python/ambari_commons/shell.py
+++ b/ambari-common/src/main/python/ambari_commons/shell.py
@@ -709,9 +709,9 @@ def kill_process_with_children(base_pid):
if (
get_existing_pids(all_child_pids) and error_log
): # we're unable to kill all requested PIDs
- _logger.warn("Process termination error log:\n")
+ _logger.warning("Process termination error log:\n")
for error_item in error_log:
- _logger.warn(
+ _logger.warning(
f"PID: {error_item[0]}, Process: {error_item[1]}, Exception message:
{error_item[2]}"
)
@@ -842,7 +842,7 @@ class shellRunnerWindows(shellRunner):
# Run any command
def run(self, script, user=None):
global _logger
- _logger.warn("user argument ignored on windows")
+ _logger.warning("user argument ignored on windows")
code = 0
if isinstance(script, list):
cmd = " ".join(script)
@@ -862,7 +862,7 @@ class shellRunnerWindows(shellRunner):
def runPowershell(self, f=None, script_block=None, args=set()):
global _logger
- _logger.warn("user argument ignored on windows")
+ _logger.warning("user argument ignored on windows")
cmd = None
if f:
@@ -894,7 +894,7 @@ class shellRunnerLinux(shellRunner):
if self._threadLocal is not None:
os.setuid(self._threadLocal.uid)
except Exception as e:
- _logger.warn(f"Unable to switch user for running command. Error details:
{e}")
+ _logger.warning(f"Unable to switch user for running command. Error
details: {e}")
# Run any command
def run(self, script, user=None):
@@ -908,7 +908,7 @@ class shellRunnerLinux(shellRunner):
if self._threadLocal is not None:
self._threadLocal.uid = user
except Exception as e:
- _logger.warn(f"Unable to switch user for RUN_COMMAND. Error details:
{e}")
+ _logger.warning(f"Unable to switch user for RUN_COMMAND. Error details:
{e}")
cmd = script
diff --git a/ambari-common/src/main/python/ambari_ws4py/websocket.py
b/ambari-common/src/main/python/ambari_ws4py/websocket.py
index 8057ac3f91..53e353235b 100644
--- a/ambari-common/src/main/python/ambari_ws4py/websocket.py
+++ b/ambari-common/src/main/python/ambari_ws4py/websocket.py
@@ -510,7 +510,7 @@ class WebSocket(object):
if s.errors:
for error in s.errors:
- logger.warn("Error message received (%d) '%s'" % (error.code,
error.reason))
+ logger.warning("Error message received (%d) '%s'" % (error.code,
error.reason))
self.close(error.code, error.reason)
s.errors = []
return False
diff --git
a/ambari-common/src/main/python/resource_management/libraries/script/script.py
b/ambari-common/src/main/python/resource_management/libraries/script/script.py
index 6b231349d3..291c50ff61 100644
---
a/ambari-common/src/main/python/resource_management/libraries/script/script.py
+++
b/ambari-common/src/main/python/resource_management/libraries/script/script.py
@@ -469,11 +469,11 @@ class Script(object):
user = self.get_user()
if log_folder == "":
- Logger.logger.warn("Log folder for current script is not defined")
+ Logger.logger.warning("Log folder for current script is not defined")
return
if user == "":
- Logger.logger.warn("User for current script is not defined")
+ Logger.logger.warning("User for current script is not defined")
return
show_logs(
diff --git a/ambari-common/src/main/repo/install_ambari_tarball.py
b/ambari-common/src/main/repo/install_ambari_tarball.py
index d21a8fb6c4..aecad945ca 100644
--- a/ambari-common/src/main/repo/install_ambari_tarball.py
+++ b/ambari-common/src/main/repo/install_ambari_tarball.py
@@ -199,7 +199,7 @@ class Installer:
if packages_string is None:
err_msg = "No os dependencies found. "
if self.skip_dependencies:
- logger.warn(err_msg)
+ logger.warning(err_msg)
else:
raise Exception(err_msg)
diff --git a/ambari-common/src/test/python/coilmq/config/__init__.py
b/ambari-common/src/test/python/coilmq/config/__init__.py
index 2331fc4bf1..90da9beb7f 100644
--- a/ambari-common/src/test/python/coilmq/config/__init__.py
+++ b/ambari-common/src/test/python/coilmq/config/__init__.py
@@ -102,7 +102,7 @@ def init_logging(logfile=None, loglevel=logging.INFO,
configfile=None):
logging.config.fileConfig(configfile)
if logfile:
msg = "Config file conflicts with explicitly specified logfile; config
file takes precedence."
- logging.warn(msg)
+ logging.warning(msg)
else:
format = "%(asctime)s [%(threadName)s] %(name)s - %(levelname)s -
%(message)s"
if logfile:
diff --git a/ambari-server/src/main/python/bootstrap.py
b/ambari-server/src/main/python/bootstrap.py
index f0ed933ba1..1edb39e31a 100755
--- a/ambari-server/src/main/python/bootstrap.py
+++ b/ambari-server/src/main/python/bootstrap.py
@@ -1101,7 +1101,7 @@ class BootstrapDefault(Bootstrap):
"at {0}. Please delete it manually".format(self.getPasswordFile())
)
self.host_log.write(message)
- logging.warn(message)
+ logging.warning(message)
self.createDoneFile(last_retcode)
self.status["return_code"] = last_retcode
@@ -1139,7 +1139,7 @@ class PBootstrap:
elapsedtime = time.time() - starttime
if elapsedtime > HOST_BOOTSTRAP_TIMEOUT:
# bootstrap timed out
- logging.warn(
+ logging.warning(
f"Bootstrap at host {bootstrap.host} timed out and will be
interrupted"
)
bootstrap.interruptBootstrap()
diff --git a/ambari-server/src/main/resources/scripts/configs.py
b/ambari-server/src/main/resources/scripts/configs.py
index a3394ce85c..11574bd467 100644
--- a/ambari-server/src/main/resources/scripts/configs.py
+++ b/ambari-server/src/main/resources/scripts/configs.py
@@ -195,13 +195,13 @@ def read_xml_data_to_map(path):
if name != None:
name_text = name.text if name.text else ""
else:
- logger.warn(f"No name is found for one of the properties in {path},
ignoring it")
+ logger.warning(f"No name is found for one of the properties in {path},
ignoring it")
continue
if value != None:
value_text = value.text if value.text else ""
else:
- logger.warn(
+ logger.warning(
f'No value is found for "{name_text}" in {path}, using empty string
for it'
)
value_text = ""
diff --git a/ambari-server/src/main/resources/scripts/export_ams_metrics.py
b/ambari-server/src/main/resources/scripts/export_ams_metrics.py
index 0457a548ca..07675526d7 100644
--- a/ambari-server/src/main/resources/scripts/export_ams_metrics.py
+++ b/ambari-server/src/main/resources/scripts/export_ams_metrics.py
@@ -117,7 +117,7 @@ class Utils:
try:
response_data = json.loads(connection.read())
except Exception as e:
- logger.warn(f"Error parsing json data returned from URI:
{collector_uri}")
+ logger.warning(f"Error parsing json data returned from URI:
{collector_uri}")
logger.debug(str(e))
return response_data
@@ -159,7 +159,7 @@ class Utils:
Params.END_TIME = json["END_TIME"]
Params.AGGREGATE = json["AGGREGATE"]
else:
- logger.warn(
+ logger.warning(
"Not found config file in {0}".format(
os.path.join(Params.INPUT_DIR), "configs"
)
@@ -576,7 +576,7 @@ def main():
)
if Params.START_TIME == -1:
- logger.warn(
+ logger.warning(
"No start time provided, or it is in the wrong format. Please "
"provide milliseconds since epoch or a value in YYYY-MM-DDTHH:mm:ssZ
format"
)
@@ -586,7 +586,7 @@ def main():
Params.END_TIME = Utils.get_epoch(options.end_time)
if Params.END_TIME == -1:
- logger.warn(
+ logger.warning(
"No end time provided, or it is in the wrong format. Please "
"provide milliseconds since epoch or a value in YYYY-MM-DDTHH:mm:ssZ
format"
)
@@ -609,7 +609,7 @@ def main():
FlaskServer(ams_metrics_processor)
else:
- logger.warn(
+ logger.warning(
"Action '{0}' not supported. Please use action 'export' for exporting
AMS metrics "
"or use action 'run' for starting REST server".format(Params.ACTION)
)
diff --git a/ambari-server/src/main/resources/scripts/takeover_config_merge.py
b/ambari-server/src/main/resources/scripts/takeover_config_merge.py
index b5632a667a..66875bf6d4 100644
--- a/ambari-server/src/main/resources/scripts/takeover_config_merge.py
+++ b/ambari-server/src/main/resources/scripts/takeover_config_merge.py
@@ -136,7 +136,7 @@ class XmlParser(Parser): # Used DOM parser to read data
into a map
if name != None:
name_text = name.text if name.text else ""
else:
- logger.warn(
+ logger.warning(
f"No name is found for one of the properties in {path}, ignoring it"
)
continue
@@ -144,7 +144,7 @@ class XmlParser(Parser): # Used DOM parser to read data
into a map
if value != None:
value_text = value.text if value.text else ""
else:
- logger.warn(
+ logger.warning(
f'No value is found for "{name_text}" in {path}, using empty string
for it'
)
value_text = ""
@@ -205,7 +205,7 @@ class ConfigMerge:
ext in ConfigMerge.SUPPORTED_FILENAME_ENDINGS
and not ConfigMerge.SUPPORTED_FILENAME_ENDINGS[ext] in root
):
- logger.warn(f"File {file_path} is not configurable by Ambari.
Skipping...")
+ logger.warning(f"File {file_path} is not configurable by Ambari.
Skipping...")
continue
config_name = None
@@ -356,7 +356,7 @@ class ConfigMerge:
conflict_filename = os.path.join(
self.OUTPUT_DIR, configuration_type + "-conflicts.txt"
)
- logger.warn(
+ logger.warning(
f"You have configurations conflicts for {configuration_type}. Please
check {conflict_filename}"
)
with open(conflict_filename, "w") as fp:
@@ -367,7 +367,7 @@ class ConfigMerge:
conflict_filename = os.path.join(
self.OUTPUT_DIR, configuration_type + "-attributes-conflicts.txt"
)
- logger.warn(
+ logger.warning(
f"You have property attribute conflicts for {configuration_type}.
Please check {conflict_filename}"
)
with open(conflict_filename, "w") as fp:
@@ -430,7 +430,7 @@ class ConfigMerge:
if configuration_diff_output and configuration_diff_output != "":
conflict_filename = os.path.join(ConfigMerge.OUTPUT_DIR, "file-diff.txt")
- logger.warn(f"You have file diff conflicts. Please check
{conflict_filename}")
+ logger.warning(f"You have file diff conflicts. Please check
{conflict_filename}")
with open(conflict_filename, "w") as fp:
fp.write(configuration_diff_output)
diff --git
a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HDFS/service_advisor.py
b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HDFS/service_advisor.py
index fdd9ad296c..518b648c74 100644
---
a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HDFS/service_advisor.py
+++
b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HDFS/service_advisor.py
@@ -747,7 +747,7 @@ class HDFSRecommender(service_advisor.ServiceAdvisor):
)
else:
# Since Kerberos is not enabled, we can not enable SSO
- self.logger.warn(
+ self.logger.warning(
"Enabling SSO integration for HDFS requires Kerberos, Since
Kerberos is not enabled, SSO integration is not being recommended."
)
putHdfsSiteProperty("hadoop.http.authentication.type", "simple")
diff --git
a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/YARN/service_advisor.py
b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/YARN/service_advisor.py
index 1a9cf3255e..a1345e4a9b 100644
---
a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/YARN/service_advisor.py
+++
b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/YARN/service_advisor.py
@@ -1460,7 +1460,7 @@ class YARNRecommender(service_advisor.ServiceAdvisor):
)
else:
# Since Kerberos is not enabled, we can not enable SSO
- self.logger.warn(
+ self.logger.warning(
"Enabling SSO integration for Yarn requires Kerberos, Since
Kerberos is not enabled, SSO integration is not being recommended."
)
putYarnSiteProperty("hadoop.http.authentication.type", "simple")
@@ -3814,7 +3814,7 @@ class MAPREDUCE2Recommender(YARNRecommender):
)
else:
# Since Kerberos is not enabled, we can not enable SSO
- self.logger.warn(
+ self.logger.warning(
"Enabling SSO integration for MapReduce requires Kerberos, Since
Kerberos is not enabled, SSO integration is not being recommended."
)
putMapRedSiteProperty("hadoop.http.authentication.type", "simple")
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]