jenkins-bot has submitted this change and it was merged.

Change subject: Alphabetize functions and objects in modules
......................................................................


Alphabetize functions and objects in modules

Clean up the source code ordering of the log, main, ssh, tasks and utils
modules in an attempt to make it easier for future maintainers to find
things the code base.

Change-Id: I8b8ccadbbea0c956bad62d0e2de91493de11a7f4
---
M scap/log.py
M scap/main.py
M scap/ssh.py
M scap/tasks.py
M scap/utils.py
5 files changed, 380 insertions(+), 379 deletions(-)

Approvals:
  Hashar: Looks good to me, approved
  jenkins-bot: Verified



diff --git a/scap/log.py b/scap/log.py
index 44f3935..353140b 100644
--- a/scap/log.py
+++ b/scap/log.py
@@ -22,6 +22,35 @@
 CONSOLE_LOG_FORMAT = '%(asctime)s %(levelname)-8s - %(message)s'
 
 
+class AnsiColorFormatter(logging.Formatter):
+    """Colorize output according to logging level."""
+
+    colors = {
+        'CRITICAL': '41;37',  # white on red
+        'ERROR': '31',        # red
+        'WARNING': '33',      # yellow
+        'INFO': '32',         # green
+        'DEBUG': '36',        # cyan
+    }
+
+    def __init__(self, fmt=None, datefmt=None, colors=None):
+        """
+        :param fmt: Message format string
+        :param datefmt: Time format string
+        :param colors: Dict of {'levelname': ANSI SGR parameters}
+
+        .. seealso:: https://en.wikipedia.org/wiki/ANSI_escape_code
+        """
+        super(self.__class__, self).__init__(fmt, datefmt)
+        if colors:
+            self.colors.extend(colors)
+
+    def format(self, record):
+        msg = super(self.__class__, self).format(record)
+        color = self.colors.get(record.levelname, '0')
+        return '\x1b[%sm%s\x1b[0m' % (color, msg)
+
+
 class IRCSocketHandler(logging.Handler):
     """Log handler for logmsgbot on #wikimedia-operation.
 
@@ -53,68 +82,6 @@
             sock.close()
         except (socket.timeout, socket.error, socket.gaierror):
             self.handleError(record)
-
-
-class Udp2LogHandler(logging.handlers.DatagramHandler):
-    """Log handler for udp2log."""
-
-    def __init__(self, host, port, prefix='scap'):
-        """
-        :param host: Hostname or ip address
-        :param port: Port
-        :param prefix: Line prefix (udp2log destination)
-        """
-        super(self.__class__, self).__init__(host, port)
-        self.prefix = prefix
-
-    def makePickle(self, record):
-        """Format record as a udp2log packet.
-
-        >>> Udp2LogHandler('127.0.0.1', 12345).makePickle(
-        ...     logging.makeLogRecord({'msg':'line1\\nline2'}))
-        'scap line1\\nscap line2\\n'
-        >>> Udp2LogHandler('127.0.0.1', 12345).makePickle(
-        ...     logging.makeLogRecord({'msg':'%s12'% ('0'*65500)}))
-        ...     # doctest: +ELLIPSIS
-        'scap 00000...00001\\n'
-        """
-        text = self.format(record)
-        if self.prefix:
-            text = re.sub(r'^', self.prefix + ' ', text, flags=re.MULTILINE)
-        if len(text) > 65506:
-            text = text[:65506]
-        if text[-1] != '\n':
-            text = text + '\n'
-        return text
-
-
-class AnsiColorFormatter(logging.Formatter):
-    """Colorize output according to logging level."""
-
-    colors = {
-        'CRITICAL': '41;37',  # white on red
-        'ERROR': '31',        # red
-        'WARNING': '33',      # yellow
-        'INFO': '32',         # green
-        'DEBUG': '36',        # cyan
-    }
-
-    def __init__(self, fmt=None, datefmt=None, colors=None):
-        """
-        :param fmt: Message format string
-        :param datefmt: Time format string
-        :param colors: Dict of {'levelname': ANSI SGR parameters}
-
-        .. seealso:: https://en.wikipedia.org/wiki/ANSI_escape_code
-        """
-        super(self.__class__, self).__init__(fmt, datefmt)
-        if colors:
-            self.colors.extend(colors)
-
-    def format(self, record):
-        msg = super(self.__class__, self).format(record)
-        color = self.colors.get(record.levelname, '0')
-        return '\x1b[%sm%s\x1b[0m' % (color, msg)
 
 
 class LogstashFormatter(logging.Formatter):
@@ -190,6 +157,77 @@
         }
 
 
+class ProgressReporter(object):
+    """Track and display progress of a process.
+
+    Report on the status of a multi-step process by displaying the completion
+    percentage and succes, failure and remaining task counts on a single
+    output line.
+    """
+
+    def __init__(self, name, expect=0, fd=sys.stderr):
+        """
+        :param name: Name of command being monitored
+        :param expect: Number of results to expect
+        :param fd: File handle to write status messages to
+        """
+        self._name = name
+        self._expect = expect
+        self._done = 0
+        self._ok = 0
+        self._failed = 0
+        self._fd = fd
+
+    @property
+    def ok(self):
+        return self._ok
+
+    @property
+    def failed(self):
+        return self._failed
+
+    @property
+    def remaining(self):
+        return self._expect - self._done
+
+    @property
+    def percent_complete(self):
+        return 100.0 * (float(self._done) / max(self._expect, 1))
+
+    def expect(self, count):
+        """Set expected result count."""
+        self._expect = count
+
+    def start(self):
+        """Start tracking progress."""
+        self._progress()
+
+    def finish(self):
+        """Finish tracking progress."""
+        self._progress()
+        self._fd.write('\n')
+
+    def add_success(self):
+        """Record a sucessful task completion."""
+        self._done += 1
+        self._ok += 1
+        self._progress()
+
+    def add_failure(self):
+        """Record a failed task completion."""
+        self._done += 1
+        self._failed += 1
+        self._progress()
+
+    def _progress(self):
+        self._fd.write('%-80s\r' % self._output())
+
+    def _output(self):
+        return '%s: %3.0f%% (ok: %d; fail: %d; left: %d)' % (
+            self._name, self.percent_complete,
+            self.ok, self.failed, self.remaining)
+
+
 class Stats(object):
     """A simple StatsD metric client that can log measurements and counts to
     a remote StatsD host.
@@ -217,35 +255,6 @@
             self.socket.sendto(metric.encode('utf-8'), self.address)
         except Exception:
             self.logger.exception('Failed to send metric "%s"', metric)
-
-
-def setup_loggers(cfg):
-    """Setup the logging system.
-
-    * Configure the root logger to use :class:`AnsiColorFormatter`
-    * Optionally add a :class:`Udp2LogHandler` to send logs to a udp2log server
-    * Optional add a :class:`IRCSocketHandler` for the `scap.announce` log
-      channel to send messages to a tcpircbot server
-
-    :param cfg: Dict of global configuration values
-    """
-    # Colorize log messages sent to stderr
-    logging.root.handlers[0].setFormatter(AnsiColorFormatter(
-        '%(asctime)s %(message)s', '%H:%M:%S'))
-
-    if cfg['udp2log_host']:
-        # Send a copy of all logs to the udp2log relay
-        udp_handler = Udp2LogHandler(
-            cfg['udp2log_host'], int(cfg['udp2log_port']))
-        udp_handler.setLevel(logging.DEBUG)
-        udp_handler.setFormatter(LogstashFormatter())
-        logging.root.addHandler(udp_handler)
-
-    if cfg['tcpircbot_host']:
-        # Send 'scap.announce' messages to irc relay
-        irc_logger = logging.getLogger('scap.announce')
-        irc_logger.addHandler(IRCSocketHandler(
-            cfg['tcpircbot_host'], int(cfg['tcpircbot_port'])))
 
 
 class Timer(object):
@@ -320,72 +329,63 @@
             self.stats.timing('scap.%s' % label, elapsed * 1000)
 
 
-class ProgressReporter(object):
-    """Track and display progress of a process.
+class Udp2LogHandler(logging.handlers.DatagramHandler):
+    """Log handler for udp2log."""
 
-    Report on the status of a multi-step process by displaying the completion
-    percentage and succes, failure and remaining task counts on a single
-    output line.
+    def __init__(self, host, port, prefix='scap'):
+        """
+        :param host: Hostname or ip address
+        :param port: Port
+        :param prefix: Line prefix (udp2log destination)
+        """
+        super(self.__class__, self).__init__(host, port)
+        self.prefix = prefix
+
+    def makePickle(self, record):
+        """Format record as a udp2log packet.
+
+        >>> Udp2LogHandler('127.0.0.1', 12345).makePickle(
+        ...     logging.makeLogRecord({'msg':'line1\\nline2'}))
+        'scap line1\\nscap line2\\n'
+        >>> Udp2LogHandler('127.0.0.1', 12345).makePickle(
+        ...     logging.makeLogRecord({'msg':'%s12'% ('0'*65500)}))
+        ...     # doctest: +ELLIPSIS
+        'scap 00000...00001\\n'
+        """
+        text = self.format(record)
+        if self.prefix:
+            text = re.sub(r'^', self.prefix + ' ', text, flags=re.MULTILINE)
+        if len(text) > 65506:
+            text = text[:65506]
+        if text[-1] != '\n':
+            text = text + '\n'
+        return text
+
+
+def setup_loggers(cfg):
+    """Setup the logging system.
+
+    * Configure the root logger to use :class:`AnsiColorFormatter`
+    * Optionally add a :class:`Udp2LogHandler` to send logs to a udp2log server
+    * Optional add a :class:`IRCSocketHandler` for the `scap.announce` log
+      channel to send messages to a tcpircbot server
+
+    :param cfg: Dict of global configuration values
     """
+    # Colorize log messages sent to stderr
+    logging.root.handlers[0].setFormatter(AnsiColorFormatter(
+        '%(asctime)s %(message)s', '%H:%M:%S'))
 
-    def __init__(self, name, expect=0, fd=sys.stderr):
-        """
-        :param name: Name of command being monitored
-        :param expect: Number of results to expect
-        :param fd: File handle to write status messages to
-        """
-        self._name = name
-        self._expect = expect
-        self._done = 0
-        self._ok = 0
-        self._failed = 0
-        self._fd = fd
+    if cfg['udp2log_host']:
+        # Send a copy of all logs to the udp2log relay
+        udp_handler = Udp2LogHandler(
+            cfg['udp2log_host'], int(cfg['udp2log_port']))
+        udp_handler.setLevel(logging.DEBUG)
+        udp_handler.setFormatter(LogstashFormatter())
+        logging.root.addHandler(udp_handler)
 
-    @property
-    def ok(self):
-        return self._ok
-
-    @property
-    def failed(self):
-        return self._failed
-
-    @property
-    def remaining(self):
-        return self._expect - self._done
-
-    @property
-    def percent_complete(self):
-        return 100.0 * (float(self._done) / max(self._expect, 1))
-
-    def expect(self, count):
-        """Set expected result count."""
-        self._expect = count
-
-    def start(self):
-        """Start tracking progress."""
-        self._progress()
-
-    def finish(self):
-        """Finish tracking progress."""
-        self._progress()
-        self._fd.write('\n')
-
-    def add_success(self):
-        """Record a sucessful task completion."""
-        self._done += 1
-        self._ok += 1
-        self._progress()
-
-    def add_failure(self):
-        """Record a failed task completion."""
-        self._done += 1
-        self._failed += 1
-        self._progress()
-
-    def _progress(self):
-        self._fd.write('%-80s\r' % self._output())
-
-    def _output(self):
-        return '%s: %3.0f%% (ok: %d; fail: %d; left: %d)' % (
-            self._name, self.percent_complete,
-            self.ok, self.failed, self.remaining)
+    if cfg['tcpircbot_host']:
+        # Send 'scap.announce' messages to irc relay
+        irc_logger = logging.getLogger('scap.announce')
+        irc_logger.addHandler(IRCSocketHandler(
+            cfg['tcpircbot_host'], int(cfg['tcpircbot_port'])))
diff --git a/scap/main.py b/scap/main.py
index 082b5be..c3b05d5 100644
--- a/scap/main.py
+++ b/scap/main.py
@@ -89,35 +89,6 @@
             tasks.merge_cdb_updates(cache_dir, use_cores, True)
 
 
-class SyncCommon(cli.Application):
-    """Sync local MediaWiki deployment directory with deploy server state."""
-
-    @cli.argument('servers', nargs=argparse.REMAINDER,
-        help='Rsync server(s) to copy from')
-    def main(self, *extra_args):
-        tasks.sync_common(self.config, self.arguments.servers)
-        return 0
-
-
-class SyncWikiversions(cli.Application):
-    """Rebuild and sync wikiversions.cdb to the cluster."""
-
-    def _process_arguments(self, args, extra_args):
-        args.message = ' '.join(args.message) or '(no message)'
-        return args, extra_args
-
-    @cli.argument('message', nargs='*', help='Log message for SAL')
-    def main(self, *extra_args):
-        self._assert_auth_sock()
-
-        mw_install_hosts = utils.read_dsh_hosts_file('mediawiki-installation')
-        tasks.sync_wikiversions(mw_install_hosts, self.config)
-
-        self.announce(
-            'rebuilt wikiversions.cdb and synchronized wikiversions files: %s',
-            self.arguments.message)
-
-
 class Scap(cli.Application):
     """Deploy MediaWiki to the cluster."""
 
@@ -262,6 +233,35 @@
         return exit_status
 
 
+class SyncCommon(cli.Application):
+    """Sync local MediaWiki deployment directory with deploy server state."""
+
+    @cli.argument('servers', nargs=argparse.REMAINDER,
+        help='Rsync server(s) to copy from')
+    def main(self, *extra_args):
+        tasks.sync_common(self.config, self.arguments.servers)
+        return 0
+
+
+class SyncWikiversions(cli.Application):
+    """Rebuild and sync wikiversions.cdb to the cluster."""
+
+    def _process_arguments(self, args, extra_args):
+        args.message = ' '.join(args.message) or '(no message)'
+        return args, extra_args
+
+    @cli.argument('message', nargs='*', help='Log message for SAL')
+    def main(self, *extra_args):
+        self._assert_auth_sock()
+
+        mw_install_hosts = utils.read_dsh_hosts_file('mediawiki-installation')
+        tasks.sync_wikiversions(mw_install_hosts, self.config)
+
+        self.announce(
+            'rebuilt wikiversions.cdb and synchronized wikiversions files: %s',
+            self.arguments.message)
+
+
 class UpdateL10n(cli.Application):
     """Update localization files"""
 
diff --git a/scap/ssh.py b/scap/ssh.py
index 5f7e5fd..c43e1cd 100644
--- a/scap/ssh.py
+++ b/scap/ssh.py
@@ -20,43 +20,6 @@
 SSH = ('/usr/bin/ssh', '-oBatchMode=yes', '-oSetupTimeout=10')
 
 
-def cluster_ssh(hosts, command, limit=80):
-    """Run a command via SSH on multiple hosts concurrently."""
-    hosts = set(hosts)
-
-    try:
-        command = shlex.split(command)
-    except AttributeError:
-        pass
-
-    procs = {}
-    fds = {}
-    poll = select.epoll()
-    try:
-        while hosts or procs:
-            if hosts and len(procs) < limit:
-                host = hosts.pop()
-                ssh_command = SSH + (host,) + tuple(command)
-                proc = subprocess.Popen(ssh_command, stdout=subprocess.PIPE,
-                        stderr=subprocess.STDOUT, preexec_fn=os.setsid)
-                procs[proc.pid] = (proc, host)
-                poll.register(proc.stdout, select.EPOLLIN)
-            else:
-                pid, status = os.waitpid(-1, os.WNOHANG)
-                for fd, event in poll.poll(0.01):
-                    fds[fd] = fds.get(fd, '') + os.read(fd, 1048576)
-                if pid:
-                    status = -(status & 255) or (status >> 8)
-                    proc, host = procs.pop(pid)
-                    poll.unregister(proc.stdout)
-                    output = fds.pop(proc.stdout.fileno(), '')
-                    yield host, status, output
-    finally:
-        poll.close()
-        for pid, (proc, host) in procs.items():
-            proc.kill()
-
-
 class Job(object):
     """Execute a job on a group of remote hosts via ssh."""
     _logger = None
@@ -143,3 +106,40 @@
                 self._reporter.add_failure()
         self._reporter.finish()
         return self._reporter.ok, self._reporter.failed
+
+
+def cluster_ssh(hosts, command, limit=80):
+    """Run a command via SSH on multiple hosts concurrently."""
+    hosts = set(hosts)
+
+    try:
+        command = shlex.split(command)
+    except AttributeError:
+        pass
+
+    procs = {}
+    fds = {}
+    poll = select.epoll()
+    try:
+        while hosts or procs:
+            if hosts and len(procs) < limit:
+                host = hosts.pop()
+                ssh_command = SSH + (host,) + tuple(command)
+                proc = subprocess.Popen(ssh_command, stdout=subprocess.PIPE,
+                        stderr=subprocess.STDOUT, preexec_fn=os.setsid)
+                procs[proc.pid] = (proc, host)
+                poll.register(proc.stdout, select.EPOLLIN)
+            else:
+                pid, status = os.waitpid(-1, os.WNOHANG)
+                for fd, event in poll.poll(0.01):
+                    fds[fd] = fds.get(fd, '') + os.read(fd, 1048576)
+                if pid:
+                    status = -(status & 255) or (status >> 8)
+                    proc, host = procs.pop(pid)
+                    poll.unregister(proc.stdout)
+                    output = fds.pop(proc.stdout.fileno(), '')
+                    yield host, status, output
+    finally:
+        poll.close()
+        for pid, (proc, host) in procs.items():
+            proc.kill()
diff --git a/scap/tasks.py b/scap/tasks.py
index f54badc..b2d7d93 100644
--- a/scap/tasks.py
+++ b/scap/tasks.py
@@ -144,6 +144,44 @@
     logger.debug('Compiled %s to %s', json_file, cdb_file)
 
 
+def merge_cdb_updates(directory, pool_size, trust_mtime=False):
+    """Update l10n CDB files using JSON data.
+
+    :param directory: L10n cache directory
+    :param pool_size: Number of parallel processes to use
+    :param trust_mtime: Trust file modification time?
+    """
+    logger = logging.getLogger('merge_cdb_updates')
+
+    cache_dir = os.path.realpath(directory)
+    upstream_dir = os.path.join(cache_dir, 'upstream')
+
+    files = [os.path.splitext(os.path.basename(f))[0]
+        for f in glob.glob('%s/*.json' % upstream_dir)]
+    if not files:
+        logger.warning('Directory %s is empty', upstream_dir)
+        return 0
+
+    pool = multiprocessing.Pool(pool_size)
+    updated = 0
+
+    reporter = log.ProgressReporter('l10n merge')
+    reporter.expect(len(files))
+    reporter.start()
+
+    for i, result in enumerate(pool.imap_unordered(
+        update_l10n_cdb_wrapper, itertools.izip(
+            itertools.repeat(cache_dir),
+            files,
+            itertools.repeat(trust_mtime))), 1):
+        if result:
+            updated += 1
+        reporter.add_success()
+
+    reporter.finish()
+    logger.info('Updated %d CDB files(s) in %s', updated, cache_dir)
+
+
 def purge_l10n_cache(version, cfg):
     """Purge the localization cache for a given version.
 
@@ -227,44 +265,6 @@
             '%(master_rsync)s::common/wikiversions*.{json,cdb} '
             '%(deploy_dir)s' % cfg)
         return rsync.progress('sync_wikiversions').run()
-
-
-def merge_cdb_updates(directory, pool_size, trust_mtime=False):
-    """Update l10n CDB files using JSON data.
-
-    :param directory: L10n cache directory
-    :param pool_size: Number of parallel processes to use
-    :param trust_mtime: Trust file modification time?
-    """
-    logger = logging.getLogger('merge_cdb_updates')
-
-    cache_dir = os.path.realpath(directory)
-    upstream_dir = os.path.join(cache_dir, 'upstream')
-
-    files = [os.path.splitext(os.path.basename(f))[0]
-        for f in glob.glob('%s/*.json' % upstream_dir)]
-    if not files:
-        logger.warning('Directory %s is empty', upstream_dir)
-        return 0
-
-    pool = multiprocessing.Pool(pool_size)
-    updated = 0
-
-    reporter = log.ProgressReporter('l10n merge')
-    reporter.expect(len(files))
-    reporter.start()
-
-    for i, result in enumerate(pool.imap_unordered(
-        update_l10n_cdb_wrapper, itertools.izip(
-            itertools.repeat(cache_dir),
-            files,
-            itertools.repeat(trust_mtime))), 1):
-        if result:
-            updated += 1
-        reporter.add_success()
-
-    reporter.finish()
-    logger.info('Updated %d CDB files(s) in %s', updated, cache_dir)
 
 
 def update_l10n_cdb(cache_dir, cdb_file, trust_mtime=False):
diff --git a/scap/utils.py b/scap/utils.py
index decddeb..d2a3cb6 100644
--- a/scap/utils.py
+++ b/scap/utils.py
@@ -19,55 +19,9 @@
 import subprocess
 
 
-def read_dsh_hosts_file(path):
-    """Reads hosts from a file into a list.
-
-    Blank lines and comments are ignored.
-    """
-    try:
-        with open(os.path.join('/etc/dsh/group', path)) as hosts_file:
-            return re.findall(r'^[\w\.\-]+', hosts_file.read(), re.MULTILINE)
-    except IOError, e:
-        raise IOError(e.errno, e.strerror, path)
-
-
 class LockFailedError(Exception):
     """Signal that a locking attempt failed."""
     pass
-
-
-@contextlib.contextmanager
-def lock(filename):
-    """Context manager. Acquires a file lock on entry, releases on exit.
-
-    :param filename: File to lock
-    :raises: LockFailedError on failure
-    """
-    lock_fd = None
-    try:
-        lock_fd = open(filename, 'w+')
-        fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
-    except IOError as e:
-        raise LockFailedError('Failed to lock %s: %s' % (filename, e))
-    else:
-        yield
-    finally:
-        if lock_fd:
-            fcntl.lockf(lock_fd, fcntl.LOCK_UN)
-            lock_fd.close()
-
-
-def human_duration(elapsed):
-    """Format an elapsed seconds count as human readable duration.
-
-    >>> human_duration(1)
-    '00m 01s'
-    >>> human_duration(65)
-    '01m 05s'
-    >>> human_duration(60*30+11)
-    '30m 11s'
-    """
-    return '%02dm %02ds' % divmod(elapsed, 60)
 
 
 def find_nearest_host(hosts, port=22, timeout=1):
@@ -118,6 +72,19 @@
                 s.close()
 
 
+def get_real_username():
+    """Get the username of the real user."""
+    try:
+        # Get the username of the user owning the terminal (ie the user
+        # that is running scap even if they are sudo-ing something)
+        return os.getlogin()
+    except OSError:
+        # When running under Jenkins there is no terminal so os.getlogin()
+        # blows up. Use the username matching the effective user id
+        # instead.
+        return get_username()
+
+
 def get_realm_specific_filename(filename, realm, datacenter):
     """Find the most specific file for the given realm and datacenter.
 
@@ -156,79 +123,6 @@
 def get_username():
     """Get the username of the effective user."""
     return pwd.getpwuid(os.getuid())[0]
-
-
-def get_real_username():
-    """Get the username of the real user."""
-    try:
-        # Get the username of the user owning the terminal (ie the user
-        # that is running scap even if they are sudo-ing something)
-        return os.getlogin()
-    except OSError:
-        # When running under Jenkins there is no terminal so os.getlogin()
-        # blows up. Use the username matching the effective user id
-        # instead.
-        return get_username()
-
-
-def md5_file(path):
-    """Compute the md5 checksum of a file's contents.
-
-    :param path: Path to file
-    :returns: hexdigest of md5 checksum
-    """
-    crc = hashlib.md5()
-    with open(path, 'rb') as f:
-        # Digest file in 1M chunks just in case it's huge
-        for block in iter(lambda: f.read(1048576), b''):
-            crc.update(block)
-    return crc.hexdigest()
-
-
-def sudo_check_call(user, cmd, logger=None):
-    """Run a command as a specific user. Reports stdout/stderr of process
-    to logger during execution.
-
-    :param user: User to run command as
-    :param cmd: Command to execute
-    :param logger: Logger to send process output to
-    :raises: subprocess.CalledProcessError on non-zero process exit
-    """
-    if logger is None:
-        logger = logging.getLogger('sudo_check_call')
-
-    proc = subprocess.Popen('sudo -u %s -- %s' % (user, cmd),
-        stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
-
-    while proc.poll() is None:
-        line = proc.stdout.readline().strip()
-        if line:
-            logger.debug(line)
-
-    if proc.returncode:
-        raise subprocess.CalledProcessError(proc.returncode, cmd)
-
-
-def iterate_subdirectories(root):
-    for name in os.listdir(root):
-        subdir = os.path.join(root, name)
-        if os.path.isdir(subdir):
-            yield subdir
-
-
-def git_info_filename(directory, install_path, cache_path):
-    """Compute the path for a git_info cache file related to a given
-    directory.
-
-    >>> git_info_filename('foo', 'foo', '')
-    'info.json'
-    >>> git_info_filename('foo/bar/baz', 'foo', 'xyzzy')
-    'xyzzy/info-bar-baz.json'
-    """
-    path = directory
-    if path.startswith(install_path):
-        path = path[len(install_path):]
-    return os.path.join(cache_path, 'info%s.json' % path.replace('/', '-'))
 
 
 def git_info(directory):
@@ -294,3 +188,110 @@
         'branch': branch,
         'remoteURL': remote_url,
     }
+
+
+def git_info_filename(directory, install_path, cache_path):
+    """Compute the path for a git_info cache file related to a given
+    directory.
+
+    >>> git_info_filename('foo', 'foo', '')
+    'info.json'
+    >>> git_info_filename('foo/bar/baz', 'foo', 'xyzzy')
+    'xyzzy/info-bar-baz.json'
+    """
+    path = directory
+    if path.startswith(install_path):
+        path = path[len(install_path):]
+    return os.path.join(cache_path, 'info%s.json' % path.replace('/', '-'))
+
+
+def human_duration(elapsed):
+    """Format an elapsed seconds count as human readable duration.
+
+    >>> human_duration(1)
+    '00m 01s'
+    >>> human_duration(65)
+    '01m 05s'
+    >>> human_duration(60*30+11)
+    '30m 11s'
+    """
+    return '%02dm %02ds' % divmod(elapsed, 60)
+
+
+def iterate_subdirectories(root):
+    """Generator over the child directories of a given directory."""
+    for name in os.listdir(root):
+        subdir = os.path.join(root, name)
+        if os.path.isdir(subdir):
+            yield subdir
+
+
+@contextlib.contextmanager
+def lock(filename):
+    """Context manager. Acquires a file lock on entry, releases on exit.
+
+    :param filename: File to lock
+    :raises: LockFailedError on failure
+    """
+    lock_fd = None
+    try:
+        lock_fd = open(filename, 'w+')
+        fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
+    except IOError as e:
+        raise LockFailedError('Failed to lock %s: %s' % (filename, e))
+    else:
+        yield
+    finally:
+        if lock_fd:
+            fcntl.lockf(lock_fd, fcntl.LOCK_UN)
+            lock_fd.close()
+
+
+def md5_file(path):
+    """Compute the md5 checksum of a file's contents.
+
+    :param path: Path to file
+    :returns: hexdigest of md5 checksum
+    """
+    crc = hashlib.md5()
+    with open(path, 'rb') as f:
+        # Digest file in 1M chunks just in case it's huge
+        for block in iter(lambda: f.read(1048576), b''):
+            crc.update(block)
+    return crc.hexdigest()
+
+
+def read_dsh_hosts_file(path):
+    """Reads hosts from a file into a list.
+
+    Blank lines and comments are ignored.
+    """
+    try:
+        with open(os.path.join('/etc/dsh/group', path)) as hosts_file:
+            return re.findall(r'^[\w\.\-]+', hosts_file.read(), re.MULTILINE)
+    except IOError, e:
+        raise IOError(e.errno, e.strerror, path)
+
+
+def sudo_check_call(user, cmd, logger=None):
+    """Run a command as a specific user. Reports stdout/stderr of process
+    to logger during execution.
+
+    :param user: User to run command as
+    :param cmd: Command to execute
+    :param logger: Logger to send process output to
+    :raises: subprocess.CalledProcessError on non-zero process exit
+    """
+    if logger is None:
+        logger = logging.getLogger('sudo_check_call')
+
+    proc = subprocess.Popen('sudo -u %s -- %s' % (user, cmd),
+        stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
+
+    while proc.poll() is None:
+        line = proc.stdout.readline().strip()
+        if line:
+            logger.debug(line)
+
+    if proc.returncode:
+        raise subprocess.CalledProcessError(proc.returncode, cmd)

-- 
To view, visit https://gerrit.wikimedia.org/r/134272
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: merged
Gerrit-Change-Id: I8b8ccadbbea0c956bad62d0e2de91493de11a7f4
Gerrit-PatchSet: 2
Gerrit-Project: mediawiki/tools/scap
Gerrit-Branch: master
Gerrit-Owner: BryanDavis <bda...@wikimedia.org>
Gerrit-Reviewer: Hashar <has...@free.fr>
Gerrit-Reviewer: jenkins-bot <>

_______________________________________________
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits

Reply via email to