ArielGlenn has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/242458

Change subject: runnerutils: pylint, convert many camelcases
......................................................................

runnerutils: pylint, convert many camelcases

Change-Id: Ie92862f2dac41f985f5808fb1ad9139aa2561a1d
---
M xmldumps-backup/dumps/fileutils.py
M xmldumps-backup/dumps/jobs.py
M xmldumps-backup/dumps/runnerutils.py
M xmldumps-backup/worker.py
4 files changed, 182 insertions(+), 182 deletions(-)


  git pull ssh://gerrit.wikimedia.org:29418/operations/dumps 
refs/changes/58/242458/1

diff --git a/xmldumps-backup/dumps/fileutils.py 
b/xmldumps-backup/dumps/fileutils.py
index f38db40..465d714 100644
--- a/xmldumps-backup/dumps/fileutils.py
+++ b/xmldumps-backup/dumps/fileutils.py
@@ -378,7 +378,7 @@
             date_string = self._wiki.date
         return os.path.join(self._wiki.publicDir(), date_string, 
dump_file.filename)
 
-    def latestDir(self):
+    def latest_dir(self):
         """Return 'latest' directory for the current project being dumped, e.g.
         if the current project is enwiki, this would return something like
         /mnt/data/xmldatadumps/public/enwiki/latest (if the directory 
/mnt/data/xmldatadumps/public
diff --git a/xmldumps-backup/dumps/jobs.py b/xmldumps-backup/dumps/jobs.py
index a5edea0..48c5e5a 100644
--- a/xmldumps-backup/dumps/jobs.py
+++ b/xmldumps-backup/dumps/jobs.py
@@ -188,7 +188,7 @@
                 runner.log.add_to_log_queue(line)
             sys.stderr.write(line)
         self.progress = line.strip()
-        runner.status.updateStatusFiles()
+        runner.status.update_status_files()
         
runner.runInfoFile.saveDumpRunInfoFile(runner.dumpItemList.report_dump_runinfo())
 
     def timeToWait(self):
diff --git a/xmldumps-backup/dumps/runnerutils.py 
b/xmldumps-backup/dumps/runnerutils.py
index d52baa8..cb2175e 100644
--- a/xmldumps-backup/dumps/runnerutils.py
+++ b/xmldumps-backup/dumps/runnerutils.py
@@ -11,110 +11,110 @@
 from CommandManagement import CommandPipeline, CommandSeries, 
CommandsInParallel
 from dumps.jobs import *
 
-def xmlEscape(text):
+def xml_escape(text):
         return text.replace("&", "&amp;").replace("<", "&lt;").replace(">", 
"&gt;")
 
 class Maintenance(object):
 
-    def inMaintenanceMode():
+    def in_maintenance_mode():
         """Use this to let callers know that we really should not
         be running.  Callers should try to exit the job
         they are running as soon as possible."""
         return exists("maintenance.txt")
 
-    def exitIfInMaintenanceMode(message=None):
+    def exit_if_in_maintenance_mode(message=None):
         """Call this from possible exit points of running jobs
         in order to exit if we need to"""
-        if Maintenance.inMaintenanceMode():
+        if Maintenance.in_maintenance_mode():
             if message:
                 raise BackupError(message)
             else:
                 raise BackupError("In maintenance mode, exiting.")
 
-    inMaintenanceMode = staticmethod(inMaintenanceMode)
-    exitIfInMaintenanceMode = staticmethod(exitIfInMaintenanceMode)
+    in_maintenance_mode = staticmethod(in_maintenance_mode)
+    exit_if_in_maintenance_mode = staticmethod(exit_if_in_maintenance_mode)
 
 
 class Checksummer(object):
-    def __init__(self, wiki, dumpDir, enabled=True, verbose=False):
+    def __init__(self, wiki, dump_dir, enabled=True, verbose=False):
         self.wiki = wiki
-        self.dumpDir = dumpDir
+        self.dumpDir = dump_dir
         self.verbose = verbose
         self.timestamp = time.strftime("%Y%m%d%H%M%S", time.gmtime())
         self._enabled = enabled
 
-    def prepareChecksums(self):
+    def prepare_checksums(self):
         """Create a temporary md5 checksum file.
         Call this at the start of the dump run, and move the file
         into the final location at the completion of the dump run."""
         if self._enabled:
-            checksumFileName = self._getChecksumFileNameTmp()
-            output = file(checksumFileName, "w")
+            checksum_filename = self._get_checksum_filename_tmp()
+            output = file(checksum_filename, "w")
 
-    def checksum(self, fileObj, runner):
+    def checksum(self, file_obj, runner):
         """Run checksum for an output file, and append to the list."""
         if self._enabled:
-            checksumFileName = self._getChecksumFileNameTmp()
-            output = file(checksumFileName, "a")
-            runner.debug("Checksumming %s" % fileObj.filename)
-            dumpfile = DumpFile(self.wiki, 
runner.dumpDir.filenamePublicPath(fileObj), None, self.verbose)
+            checksum_filename = self._get_checksum_filename_tmp()
+            output = file(checksum_filename, "a")
+            runner.debug("Checksumming %s" % file_obj.filename)
+            dumpfile = DumpFile(self.wiki, 
runner.dumpDir.filenamePublicPath(file_obj), None, self.verbose)
             checksum = dumpfile.md5sum()
             if checksum != None:
-                output.write("%s  %s\n" % (checksum, fileObj.filename))
+                output.write("%s  %s\n" % (checksum, file_obj.filename))
             output.close()
 
-    def moveMd5FileIntoPlace(self):
+    def move_md5file_into_place(self):
         if self._enabled:
-            tmpFileName = self._getChecksumFileNameTmp()
-            realFileName = self._getChecksumFileName()
-            os.rename(tmpFileName, realFileName)
+            tmp_filename = self._get_checksum_filename_tmp()
+            real_filename = self._get_checksum_filename()
+            os.rename(tmp_filename, real_filename)
 
-    def cpMd5TmpFileToPermFile(self):
+    def cp_md5_tmpfile_to_permfile(self):
         if self._enabled:
-            tmpFileName = self._getChecksumFileNameTmp()
-            realFileName = self._getChecksumFileName()
-            text = FileUtils.readFile(tmpFileName)
-            FileUtils.writeFile(self.wiki.config.tempDir, realFileName, text, 
self.wiki.config.fileperms)
+            tmp_filename = self._get_checksum_filename_tmp()
+            real_filename = self._get_checksum_filename()
+            text = FileUtils.readFile(tmp_filename)
+            FileUtils.writeFile(self.wiki.config.tempDir, real_filename, text, 
self.wiki.config.fileperms)
 
-    def getChecksumFileNameBasename(self):
+    def get_checksum_filename_basename(self):
         return "md5sums.txt"
 
     #
     # functions internal to the class
     #
-    def _getChecksumFileName(self):
-        fileObj = DumpFilename(self.wiki, None, 
self.getChecksumFileNameBasename())
-        return self.dumpDir.filenamePublicPath(fileObj)
+    def _get_checksum_filename(self):
+        file_obj = DumpFilename(self.wiki, None, 
self.get_checksum_filename_basename())
+        return self.dumpDir.filenamePublicPath(file_obj)
 
-    def _getChecksumFileNameTmp(self):
-        fileObj = DumpFilename(self.wiki, None, 
self.getChecksumFileNameBasename() + "." + self.timestamp + ".tmp")
-        return self.dumpDir.filenamePublicPath(fileObj)
+    def _get_checksum_filename_tmp(self):
+        file_obj = DumpFilename(self.wiki, None, 
self.get_checksum_filename_basename() + "." + self.timestamp + ".tmp")
+        return self.dumpDir.filenamePublicPath(file_obj)
 
-    def _getMd5FileDirName(self):
+    def _getmd5file_dir_name(self):
         return os.path.join(self.wiki.publicDir(), self.wiki.date)
 
 
 # everything that has to do with reporting the status of a piece
 # of a dump is collected here
 class Status(object):
-    def __init__(self, wiki, dumpDir, items, checksums, enabled, email=True, 
noticeFile=None, errorCallback=None, verbose=False):
+    def __init__(self, wiki, dump_dir, items, checksums, enabled, email=True, 
notice_file=None, error_callback=None, verbose=False):
         self.wiki = wiki
         self.dbName = wiki.dbName
-        self.dumpDir = dumpDir
+        self.dumpDir = dump_dir
         self.items = items
         self.checksums = checksums
-        self.noticeFile = noticeFile
-        self.errorCallback = errorCallback
-        self.failCount = 0
+        self.notice_file = notice_file
+        self.error_callback = error_callback
+        self.fail_count = 0
         self.verbose = verbose
         self._enabled = enabled
         self.email = email
 
-    def updateStatusFiles(self, done=False):
+    def update_status_files(self, done=False):
         if self._enabled:
-            self._saveStatusSummaryAndDetail(done)
+            self._save_status_summary_and_detail(done)
 
-    def reportFailure(self):
+    def report_failure(self):
         if self._enabled and self.email:
             if self.wiki.config.adminMail and 
self.wiki.config.adminMail.lower() != 'nomail':
                 subject = "Dump failure for " + self.dbName
@@ -127,84 +127,84 @@
 
     # this is a per-dump-item report (well, per file generated by the item)
     # Report on the file size & item status of the current output and output a 
link if we are done
-    def reportFile(self, fileObj, itemStatus):
-        filename = self.dumpDir.filenamePublicPath(fileObj)
+    def report_file(self, file_obj, item_status):
+        filename = self.dumpDir.filenamePublicPath(file_obj)
         if exists(filename):
             size = os.path.getsize(filename)
         else:
-            itemStatus = "missing"
+            item_status = "missing"
             size = 0
         size = FileUtils.prettySize(size)
-        if itemStatus == "in-progress":
-            return "<li class='file'>%s %s (written) </li>" % 
(fileObj.filename, size)
-        elif itemStatus == "done":
-            webpathRelative = self.dumpDir.web_path_relative(fileObj)
-            return "<li class='file'><a href=\"%s\">%s</a> %s</li>" % 
(webpathRelative, fileObj.filename, size)
+        if item_status == "in-progress":
+            return "<li class='file'>%s %s (written) </li>" % 
(file_obj.filename, size)
+        elif item_status == "done":
+            webpath_relative = self.dumpDir.web_path_relative(file_obj)
+            return "<li class='file'><a href=\"%s\">%s</a> %s</li>" % 
(webpath_relative, file_obj.filename, size)
         else:
-            return "<li class='missing'>%s</li>" % fileObj.filename
+            return "<li class='missing'>%s</li>" % file_obj.filename
 
     #
     # functions internal to the class
     #
-    def _saveStatusSummaryAndDetail(self, done=False):
+    def _save_status_summary_and_detail(self, done=False):
         """Write out an HTML file with the status for this wiki's dump
         and links to completed files, as well as a summary status in a 
separate file."""
         try:
             # Comprehensive report goes here
-            
self.wiki.writePerDumpIndex(self._reportDatabaseStatusDetailed(done))
+            
self.wiki.writePerDumpIndex(self._report_database_status_detailed(done))
             # Short line for report extraction goes here
-            self.wiki.writeStatus(self._reportDatabaseStatusSummary(done))
+            self.wiki.writeStatus(self._report_database_status_summary(done))
         except:
             if self.verbose:
                 exc_type, exc_value, exc_traceback = sys.exc_info()
                 sys.stderr.write(repr(traceback.format_exception(exc_type, 
exc_value, exc_traceback)))
             message = "Couldn't update status files. Continuing anyways"
-            if self.errorCallback:
-                self.errorCallback(message)
+            if self.error_callback:
+                self.error_callback(message)
             else:
                 sys.stderr.write("%s\n" % message)
 
-    def _reportDatabaseStatusSummary(self, done=False):
+    def _report_database_status_summary(self, done=False):
         """Put together a brief status summary and link for the current 
database."""
-        status = self._reportStatusSummaryLine(done)
+        status = self._report_status_summary_line(done)
         html = self.wiki.reportStatusLine(status)
 
-        activeItems = [x for x in self.items if x.status() == "in-progress"]
-        if activeItems:
-            return html + "<ul>" + "\n".join([self._reportItem(x) for x in 
activeItems]) + "</ul>"
+        active_items = [x for x in self.items if x.status() == "in-progress"]
+        if active_items:
+            return html + "<ul>" + "\n".join([self._report_item(x) for x in 
active_items]) + "</ul>"
         else:
             return html
 
-    def _reportDatabaseStatusDetailed(self, done=False):
+    def _report_database_status_detailed(self, done=False):
         """Put together a status page for this database, with all its 
component dumps."""
-        self.noticeFile.refreshNotice()
-        statusItems = [self._reportItem(item) for item in self.items]
-        statusItems.reverse()
-        html = "\n".join(statusItems)
-        f = DumpFilename(self.wiki, None, 
self.checksums.getChecksumFileNameBasename())
+        self.notice_file.refresh_notice()
+        status_items = [self._report_item(item) for item in self.items]
+        status_items.reverse()
+        html = "\n".join(status_items)
+        fname = DumpFilename(self.wiki, None, 
self.checksums.get_checksum_filename_basename())
         return self.wiki.config.readTemplate("report.html") % {
             "db": self.dbName,
             "date": self.wiki.date,
-            "notice": self.noticeFile.notice,
-            "status": self._reportStatusSummaryLine(done),
-            "previous": self._reportPreviousDump(done),
+            "notice": self.notice_file.notice,
+            "status": self._report_status_summary_line(done),
+            "previous": self._report_previous_dump(done),
             "items": html,
-            "checksum": self.dumpDir.web_path_relative(f),
+            "checksum": self.dumpDir.web_path_relative(fname),
             "index": self.wiki.config.index}
 
-    def _reportPreviousDump(self, done):
+    def _report_previous_dump(self, done):
         """Produce a link to the previous dump, if any"""
         # get the list of dumps for this wiki in order, find me in the list, 
find the one prev to me.
         # why? we might be rerunning a job from an older dumps. we might have 
two
         # runs going at once (think en pedia, one finishing up the history, 
another
         # starting at the beginning to get the new abstracts and stubs).
         try:
-            dumpsInOrder = self.wiki.latestDump(all=True)
-            meIndex = dumpsInOrder.index(self.wiki.date)
+            dumps_in_order = self.wiki.latestDump(all=True)
+            me_index = dumps_in_order.index(self.wiki.date)
             # don't wrap around to the newest dump in the list!
-            if meIndex > 0:
-                rawDate = dumpsInOrder[meIndex-1]
-            elif meIndex == 0:
+            if me_index > 0:
+                raw_date = dumps_in_order[me_index-1]
+            elif me_index == 0:
                 # We are the first item in the list. This is not an error, but 
there is no
                 # previous dump
                 return "No prior dumps of this database stored."
@@ -215,16 +215,16 @@
                 exc_type, exc_value, exc_traceback = sys.exc_info()
                 sys.stderr.write(repr(traceback.format_exception(exc_type, 
exc_value, exc_traceback)))
             return "No prior dumps of this database stored."
-        prettyDate = TimeUtils.prettyDate(rawDate)
+        pretty_date = TimeUtils.prettyDate(raw_date)
         if done:
             prefix = ""
             message = "Last dumped on"
         else:
             prefix = "This dump is in progress; see also the "
             message = "previous dump from"
-        return "%s<a href=\"../%s/\">%s %s</a>" % (prefix, rawDate, message, 
prettyDate)
+        return "%s<a href=\"../%s/\">%s %s</a>" % (prefix, raw_date, message, 
pretty_date)
 
-    def _reportStatusSummaryLine(self, done=False):
+    def _report_status_summary_line(self, done=False):
         if done == "done":
             classes = "done"
             text = "Dump complete"
@@ -234,16 +234,16 @@
         else:
             classes = "in-progress"
             text = "Dump in progress"
-        if self.failCount > 0:
+        if self.fail_count > 0:
             classes += " failed"
-            if self.failCount == 1:
+            if self.fail_count == 1:
                 ess = ""
             else:
                 ess = "s"
-            text += ", %d item%s failed" % (self.failCount, ess)
+            text += ", %d item%s failed" % (self.fail_count, ess)
         return "<span class='%s'>%s</span>" % (classes, text)
 
-    def _reportItem(self, item):
+    def _report_item(self, item):
         """Return an HTML fragment with info on the progress of this item."""
         item.status()
         item.updated()
@@ -251,14 +251,14 @@
         html = "<li class='%s'><span class='updates'>%s</span> <span 
class='status'>%s</span> <span class='title'>%s</span>" % (item.status(), 
item.updated(), item.status(), item.description())
         if item.progress:
             html += "<div class='progress'>%s</div>\n" % item.progress
-        fileObjs = item.listOutputFilesToPublish(self.dumpDir)
-        if fileObjs:
-            listItems = [self.reportFile(fileObj, item.status()) for fileObj 
in fileObjs]
+        file_objs = item.listOutputFilesToPublish(self.dumpDir)
+        if file_objs:
+            list_items = [self.report_file(file_obj, item.status()) for 
file_obj in file_objs]
             html += "<ul>"
             detail = item.detail()
             if detail:
                 html += "<li class='detail'>%s</li>\n" % detail
-            html += "\n".join(listItems)
+            html += "\n".join(list_items)
             html += "</ul>"
         html += "</li>"
         return html
@@ -268,31 +268,31 @@
         self.wiki = wiki
         self.notice = notice
         self._enabled = enabled
-        self.writeNoticeFile()
+        self.write_notice_file()
 
-    def writeNoticeFile(self):
+    def write_notice_file(self):
         if self._enabled:
-            noticeFile = self._getNoticeFilename()
+            notice_file = self._get_notice_filename()
             # delnotice.  toss any existing file
             if self.notice == False:
-                if exists(noticeFile):
-                    os.remove(noticeFile)
+                if exists(notice_file):
+                    os.remove(notice_file)
                 self.notice = ""
             # addnotice, stuff notice in a file for other jobs etc
             elif self.notice != "":
-                noticeDir = self._getNoticeDir()
-                FileUtils.writeFile(self.wiki.config.tempDir, noticeFile, 
self.notice, self.wiki.config.fileperms)
+                notice_dir = self._get_notice_dir()
+                FileUtils.writeFile(self.wiki.config.tempDir, notice_file, 
self.notice, self.wiki.config.fileperms)
             # default case. if there is a file get the contents, otherwise
             # we have empty contents, all good
             else:
-                if exists(noticeFile):
-                    self.notice = FileUtils.readFile(noticeFile)
+                if exists(notice_file):
+                    self.notice = FileUtils.readFile(notice_file)
 
-    def refreshNotice(self):
+    def refresh_notice(self):
         # if the notice file has changed or gone away, we comply.
-        noticeFile = self._getNoticeFilename()
-        if exists(noticeFile):
-            self.notice = FileUtils.readFile(noticeFile)
+        notice_file = self._get_notice_filename()
+        if exists(notice_file):
+            self.notice = FileUtils.readFile(notice_file)
         else:
             self.notice = ""
 
@@ -300,22 +300,22 @@
     #
     # functions internal to class
     #
-    def _getNoticeFilename(self):
+    def _get_notice_filename(self):
         return os.path.join(self.wiki.publicDir(), self.wiki.date, 
"notice.txt")
 
-    def _getNoticeDir(self):
+    def _get_notice_dir(self):
         return os.path.join(self.wiki.publicDir(), self.wiki.date)
 
 
 class SymLinks(object):
-    def __init__(self, wiki, dumpDir, logfn, debugfn, enabled):
+    def __init__(self, wiki, dump_dir, logfn, debugfn, enabled):
         self.wiki = wiki
-        self.dumpDir = dumpDir
+        self.dumpDir = dump_dir
         self._enabled = enabled
         self.logfn = logfn
         self.debugfn = debugfn
 
-    def makeDir(self, dir):
+    def make_dir(self, dir):
         if self._enabled:
             if exists(dir):
                 self.debugfn("Checkdir dir %s ..." % dir)
@@ -323,12 +323,12 @@
                 self.debugfn("Creating %s ..." % dir)
                 os.makedirs(dir)
 
-    def saveSymlink(self, dumpFile):
+    def save_symlink(self, dumpfile):
         if self._enabled:
-            self.makeDir(self.dumpDir.latestDir())
-            realfile = self.dumpDir.filenamePublicPath(dumpFile)
-            latestFilename = dumpFile.newFilename(dumpFile.dumpName, 
dumpFile.file_type, dumpFile.file_ext, 'latest', dumpFile.chunk, 
dumpFile.checkpoint, dumpFile.temp)
-            link = os.path.join(self.dumpDir.latestDir(), latestFilename)
+            self.make_dir(self.dumpDir.latest_dir())
+            realfile = self.dumpDir.filenamePublicPath(dumpfile)
+            latest_filename = dumpfile.newFilename(dumpfile.dumpName, 
dumpfile.file_type, dumpfile.file_ext, 'latest', dumpfile.chunk, 
dumpfile.checkpoint, dumpfile.temp)
+            link = os.path.join(self.dumpDir.latest_dir(), latest_filename)
             if exists(link) or os.path.islink(link):
                 if os.path.islink(link):
                     oldrealfile = os.readlink(link)
@@ -353,55 +353,55 @@
                 self.debugfn("Adding symlink %s -> %s" % (link, relative))
                 os.symlink(relative, link)
 
-    def cleanupSymLinks(self):
+    def cleanup_symlinks(self):
         if self._enabled:
-            latestDir = self.dumpDir.latestDir()
-            files = os.listdir(latestDir)
-            for f in files:
-                link = os.path.join(latestDir, f)
+            latest_dir = self.dumpDir.latest_dir()
+            files = os.listdir(latest_dir)
+            for filename in files:
+                link = os.path.join(latest_dir, filename)
                 if os.path.islink(link):
                     realfile = os.readlink(link)
-                    if not exists(os.path.join(latestDir, realfile)):
+                    if not exists(os.path.join(latest_dir, realfile)):
                         os.remove(link)
 
     # if the args are False or None, we remove all the old links for all 
values of the arg.
     # example: if chunk is False or None then we remove all old values for all 
chunks
     # "old" means "older than the specified datestring".
-    def removeSymLinksFromOldRuns(self, dateString, dumpName=None, chunk=None, 
checkpoint=None, onlychunks=False):
+    def remove_symlinks_from_old_runs(self, date_string, dump_name=None, 
chunk=None, checkpoint=None, onlychunks=False):
         # fixme this needs to do more work if there are chunks or checkpoint 
files linked in here from
         # earlier dates. checkpoint ranges change, and configuration of chunks 
changes too, so maybe
         # old files still exist and the links need to be removed because we 
have newer files for the
         # same phase of the dump.
 
         if self._enabled:
-            latestDir = self.dumpDir.latestDir()
-            files = os.listdir(latestDir)
-            for f in files:
-                link = os.path.join(latestDir, f)
+            latest_dir = self.dumpDir.latest_dir()
+            files = os.listdir(latest_dir)
+            for filename in files:
+                link = os.path.join(latest_dir, filename)
                 if os.path.islink(link):
                     realfile = os.readlink(link)
-                    fileObj = DumpFilename(self.dumpDir._wiki)
-                    fileObj.newFromFilename(os.path.basename(realfile))
-                    if fileObj.date < dateString:
+                    file_obj = DumpFilename(self.dumpDir._wiki)
+                    file_obj.newFromFilename(os.path.basename(realfile))
+                    if file_obj.date < date_string:
                         # fixme check that these are ok if the value is None
-                        if dumpName and (fileObj.dumpName != dumpName):
+                        if dump_name and (file_obj.dumpName != dump_name):
                             continue
-                        if (chunk or onlychunks) and (fileObj.chunk != chunk):
+                        if (chunk or onlychunks) and (file_obj.chunk != chunk):
                             continue
-                        if checkpoint and (fileObj.checkpoint != checkpoint):
+                        if checkpoint and (file_obj.checkpoint != checkpoint):
                             continue
                         self.debugfn("Removing old symlink %s -> %s" % (link, 
realfile))
                         os.remove(link)
 
 class Feeds(object):
-    def __init__(self, wiki, dumpDir, dbName, debugfn, enabled):
+    def __init__(self, wiki, dump_dir, dbname, debugfn, enabled):
         self.wiki = wiki
-        self.dumpDir = dumpDir
-        self.dbName = dbName
+        self.dumpDir = dump_dir
+        self.dbName = dbname
         self.debugfn = debugfn
         self._enabled = enabled
 
-    def makeDir(self, dirname):
+    def make_dir(self, dirname):
         if self._enabled:
             if exists(dirname):
                 self.debugfn("Checkdir dir %s ..." % dirname)
@@ -409,36 +409,36 @@
                 self.debugfn("Creating %s ..." % dirname)
                 os.makedirs(dirname)
 
-    def saveFeed(self, fileObj):
+    def save_feed(self, file_obj):
         if self._enabled:
-            self.makeDir(self.dumpDir.latestDir())
-            filenameAndPath = self.dumpDir.webPath(fileObj)
-            webPath = os.path.dirname(filenameAndPath)
-            rssText = self.wiki.config.readTemplate("feed.xml") % {
-                "chantitle": fileObj.basename,
-                "chanlink": webPath,
+            self.make_dir(self.dumpDir.latest_dir())
+            filename_and_path = self.dumpDir.webPath(file_obj)
+            web_path = os.path.dirname(filename_and_path)
+            rss_text = self.wiki.config.readTemplate("feed.xml") % {
+                "chantitle": file_obj.basename,
+                "chanlink": web_path,
                 "chandesc": "Wikimedia dump updates for %s" % self.dbName,
-                "title": webPath,
-                "link": webPath,
-                "description": xmlEscape("<a href=\"%s\">%s</a>" % 
(filenameAndPath, fileObj.filename)),
+                "title": web_path,
+                "link": web_path,
+                "description": xml_escape("<a href=\"%s\">%s</a>" % 
(filename_and_path, file_obj.filename)),
                 "date": time.strftime("%a, %d %b %Y %H:%M:%S GMT", 
time.gmtime()) }
-            directory = self.dumpDir.latestDir()
-            rssPath = os.path.join(self.dumpDir.latestDir(), self.dbName + 
"-latest-" + fileObj.basename + "-rss.xml")
-            self.debugfn("adding rss feed file %s " % rssPath)
-            FileUtils.writeFile(self.wiki.config.tempDir, rssPath, rssText, 
self.wiki.config.fileperms)
+            directory = self.dumpDir.latest_dir()
+            rss_path = os.path.join(self.dumpDir.latest_dir(), self.dbName + 
"-latest-" + file_obj.basename + "-rss.xml")
+            self.debugfn("adding rss feed file %s " % rss_path)
+            FileUtils.writeFile(self.wiki.config.tempDir, rss_path, rss_text, 
self.wiki.config.fileperms)
 
-    def cleanupFeeds(self):
+    def cleanup_feeds(self):
         # call this after sym links in this dir have been cleaned up.
         # we should probably fix this so there is no such dependency,
         # but it would mean parsing the contents of the rss file, bleah
         if self._enabled:
-            latestDir = self.dumpDir.latestDir()
-            files = os.listdir(latestDir)
-            for f in files:
-                if f.endswith("-rss.xml"):
-                    filename = f[:-8]
-                    link = os.path.join(latestDir, filename)
+            latest_dir = self.dumpDir.latest_dir()
+            files = os.listdir(latest_dir)
+            for fname in files:
+                if fname.endswith("-rss.xml"):
+                    filename = fname[:-8]
+                    link = os.path.join(latest_dir, filename)
                     if not exists(link):
-                        self.debugfn("Removing old rss feed %s for link %s" % 
(os.path.join(latestDir, f), link))
-                        os.remove(os.path.join(latestDir, f))
+                        self.debugfn("Removing old rss feed %s for link %s" % 
(os.path.join(latest_dir, fname), link))
+                        os.remove(os.path.join(latest_dir, fname))
 
diff --git a/xmldumps-backup/worker.py b/xmldumps-backup/worker.py
index acec904..de1f5ef 100644
--- a/xmldumps-backup/worker.py
+++ b/xmldumps-backup/worker.py
@@ -482,7 +482,7 @@
             self.pretty_print_commands([series])
             return 0
         else:
-            return self.runCommand([series], callbackTimed = 
self.status.updateStatusFiles)
+            return self.runCommand([series], callbackTimed = 
self.status.update_status_files)
 
     def pretty_print_commands(self, command_series_list):
         for series in command_series_list:
@@ -531,10 +531,10 @@
         self.log_and_print("%s: %s %s" % (TimeUtils.prettyTime(), self.dbName, 
stuff))
 
     def run_handle_failure(self):
-        if self.status.failCount < 1:
+        if self.status.fail_count < 1:
             # Email the site administrator just once per database
-            self.status.reportFailure()
-        self.status.failCount += 1
+            self.status.report_failure()
+        self.status.fail_count += 1
 
     def run_update_item_fileinfo(self, item):
         # this will include checkpoint files if they are enabled.
@@ -543,11 +543,11 @@
                 # why would the file not exist? because we changed chunk 
numbers in the
                 # middle of a run, and now we list more files for the next 
stage than there
                 # were for earlier ones
-                self.sym_links.saveSymlink(file_obj)
-                self.feeds.saveFeed(file_obj)
+                self.sym_links.save_symlink(file_obj)
+                self.feeds.save_feed(file_obj)
                 self.checksums.checksum(file_obj, self)
-                self.sym_links.cleanupSymLinks()
-                self.feeds.cleanupFeeds()
+                self.sym_links.cleanup_symlinks()
+                self.feeds.cleanup_feeds()
 
     def run(self):
         if self.job_requested:
@@ -574,7 +574,7 @@
         else:
             self.dumpItemList.mark_all_jobs_to_run(self.skipdone);
 
-        Maintenance.exitIfInMaintenanceMode("In maintenance mode, exiting dump 
of %s" % self.dbName)
+        Maintenance.exit_if_in_maintenance_mode("In maintenance mode, exiting 
dump of %s" % self.dbName)
 
         self.make_dir(os.path.join(self.wiki.publicDir(), self.wiki.date))
         self.make_dir(os.path.join(self.wiki.privateDir(), self.wiki.date))
@@ -592,13 +592,13 @@
         else:
             self.show_runner_state("Starting backup of %s" % self.dbName)
 
-        self.checksums.prepareChecksums()
+        self.checksums.prepare_checksums()
 
         for item in self.dumpItemList.dumpItems:
-            Maintenance.exitIfInMaintenanceMode("In maintenance mode, exiting 
dump of %s at step %s" % (self.dbName, item.name()))
+            Maintenance.exit_if_in_maintenance_mode("In maintenance mode, 
exiting dump of %s at step %s" % (self.dbName, item.name()))
             if item.toBeRun():
                 item.start(self)
-                self.status.updateStatusFiles()
+                self.status.update_status_files()
                 
self.runInfoFile.saveDumpRunInfoFile(self.dumpItemList.report_dump_runinfo())
                 try:
                     item.dump(self)
@@ -615,7 +615,7 @@
                             item.setStatus("failed")
 
             if item.status() == "done":
-                self.checksums.cpMd5TmpFileToPermFile()
+                self.checksums.cp_md5_tmpfile_to_permfile()
                 self.run_update_item_fileinfo(item)
             elif item.status() == "waiting" or item.status() == "skipped":
                 # don't update the md5 file for this item.
@@ -635,26 +635,26 @@
 
         if self.dumpItemList.all_possible_jobs_done(self.skip_jobs):
             # All jobs are either in status "done", "waiting", "failed", 
"skipped"
-            self.status.updateStatusFiles("done")
+            self.status.update_status_files("done")
         else:
             # This may happen if we start a dump now and abort before all 
items are
             # done. Then some are left for example in state "waiting". When
             # afterwards running a specific job, all (but one) of the jobs
             # previously in "waiting" are still in status "waiting"
-            self.status.updateStatusFiles("partialdone")
+            self.status.update_status_files("partialdone")
 
         
self.runInfoFile.saveDumpRunInfoFile(self.dumpItemList.report_dump_runinfo())
 
         # if any job succeeds we might as well make the sym link
-        if self.status.failCount < 1:
+        if self.status.fail_count < 1:
             self.complete_dump()
 
         if self.job_requested:
             # special case...
             if self.job_requested == "latestlinks":
                 if self.dumpItemList.all_possible_jobs_done(self.skip_jobs):
-                    self.sym_links.removeSymLinksFromOldRuns(self.wiki.date)
-                    self.feeds.cleanupFeeds()
+                    
self.sym_links.remove_symlinks_from_old_runs(self.wiki.date)
+                    self.feeds.cleanup_feeds()
 
         # Informing about completion
         if self.job_requested:
@@ -666,7 +666,7 @@
             self.show_runner_state_complete()
 
         # let caller know if this was a successful run
-        if self.status.failCount > 0:
+        if self.status.fail_count > 0:
             return False
         else:
             return True
@@ -712,10 +712,10 @@
         # files from different runs, in which case the md5sums file
         # will have accurate checksums for the run for which it was
         # produced, but not the other files. FIXME
-        self.checksums.moveMd5FileIntoPlace()
-        dumpFile = DumpFilename(self.wiki, None, 
self.checksums.getChecksumFileNameBasename())
-        self.sym_links.saveSymlink(dumpFile)
-        self.sym_links.cleanupSymLinks()
+        self.checksums.move_md5file_into_place()
+        dumpFile = DumpFilename(self.wiki, None, 
self.checksums.get_checksum_filename_basename())
+        self.sym_links.save_symlink(dumpFile)
+        self.sym_links.cleanup_symlinks()
 
         for item in self.dumpItemList.dumpItems:
             if item.toBeRun():
@@ -742,9 +742,9 @@
                         checkpoint = item.checkpoint_file.checkpoint
 
                 for dump in dump_names:
-                    self.sym_links.removeSymLinksFromOldRuns(self.wiki.date, 
dump, chunk, checkpoint, onlychunks=item.onlychunks)
+                    
self.sym_links.remove_symlinks_from_old_runs(self.wiki.date, dump, chunk, 
checkpoint, onlychunks=item.onlychunks)
 
-                self.feeds.cleanupFeeds()
+                self.feeds.cleanup_feeds()
 
     def make_dir(self, dir):
         if self._makedir_enabled:

-- 
To view, visit https://gerrit.wikimedia.org/r/242458
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: Ie92862f2dac41f985f5808fb1ad9139aa2561a1d
Gerrit-PatchSet: 1
Gerrit-Project: operations/dumps
Gerrit-Branch: ariel
Gerrit-Owner: ArielGlenn <ar...@wikimedia.org>

_______________________________________________
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits

Reply via email to