ArielGlenn has submitted this change and it was merged.
Change subject: don't modify sys.path, convert remaining execs into local and
remote
......................................................................
don't modify sys.path, convert remaining execs into local and remote
split examiner code into two files, local/remote
split user cf retrieval into two files, local/remote
use confdir var instead of hardcoded path to ignores.yaml
make sure self.ignored always has the four basic dicts in it
fix up a few static method references (now module fn refs)
Change-Id: I3996ba7700580c959692dda70297de4b290b7b8f
---
M dataretention/data_auditor.py
M dataretention/retention/cli.py
M dataretention/retention/cliutils.py
M dataretention/retention/completion.py
M dataretention/retention/config.yaml
D dataretention/retention/examiner.py
M dataretention/retention/ignored.yaml
M dataretention/retention/ignores.py
A dataretention/retention/localexaminer.py
M dataretention/retention/localfileaudit.py
M dataretention/retention/localhomeaudit.py
M dataretention/retention/locallogaudit.py
A dataretention/retention/localusercfgrabber.py
A dataretention/retention/remoteexaminer.py
M dataretention/retention/remotefileauditor.py
M dataretention/retention/remotehomeauditor.py
M dataretention/retention/remotelogauditor.py
A dataretention/retention/remoteusercfgrabber.py
M dataretention/retention/retentionaudit.py
M dataretention/rulestore.py
20 files changed, 510 insertions(+), 499 deletions(-)
Approvals:
ArielGlenn: Verified; Looks good to me, approved
diff --git a/dataretention/data_auditor.py b/dataretention/data_auditor.py
index 37800af..a2b355b 100644
--- a/dataretention/data_auditor.py
+++ b/dataretention/data_auditor.py
@@ -1,20 +1,19 @@
import sys
import getopt
-sys.path.append('/srv/audits/retention/scripts/')
from retention.cli import CommandLine
#from retention.auditor import HomesAuditor
from retention.remotefileauditor import RemoteFilesAuditor
from retention.remotelogauditor import RemoteLogsAuditor
from retention.remotehomeauditor import RemoteHomesAuditor
-from retention.examiner import RemoteFileExaminer, RemoteDirExaminer
-from retention.ignores import RemoteUserCfRetriever
+from retention.remoteexaminer import RemoteFileExaminer, RemoteDirExaminer
+from retention.remoteusercfgrabber import RemoteUserCfGrabber
def usage(message=None):
if message:
sys.stderr.write(message + "\n")
- usage_message = """Usage: data_auditor.py --target <hostexpr>
- [--prettyprint] [-report] [--depth <number>] [--dirsizes]
+ usage_message = """Usage: data_auditor.py --audit <audit-type> --target
<hostexpr>
+ [--confdir <path>] [--prettyprint] [-report] [--depth <number>]
[--dirsizes]
[--maxfiles <number>] [--sample] [--files <filelist>]
[--ignore <filelist>] [--examine <path>]
[--timeout <number>] [--verbose]
@@ -40,6 +39,8 @@
audit (-a) -- specify the type of audit to be done, one of 'root',
'logs' or 'homes'; this may not be specified with
the 'info' option.
+ confdir (-d) -- path to dir where ignores.yaml is located
+ default: /srv/salt/audits/retention/configs
target (-t) -- for local runs, this must be 'localhost' or '127.0.1'
for remote hosts, this should be a host expression
recognizable by salt, in the following format:
@@ -109,6 +110,7 @@
def main():
hosts_expr = None
audit_type = None
+ confdir = '/srv/salt/audits/retention/configs'
files_to_check = None
prettyprint = False
show_sample_content = False
@@ -131,8 +133,8 @@
try:
(options, remainder) = getopt.gnu_getopt(
- sys.argv[1:], "a:b:d:Df:F:l:i:Ie:m:oprsSt:T:uvh",
- ["audit=", "files=",
+ sys.argv[1:], "a:b:c:d:Df:F:l:i:Ie:m:oprsSt:T:uvh",
+ ["audit=", "confdir=", "files=",
"filecontents=", "linecount=",
"ignore=",
"interactive",
@@ -151,6 +153,8 @@
hosts_expr = val
elif opt in ["-a", "--audit"]:
audit_type = val
+ elif opt in ["-c", "--confir"]:
+ confdir = val
elif opt in ["-d", "--depth"]:
if not val.isdigit():
usage("depth must be a number")
@@ -226,7 +230,7 @@
fileexam.run()
sys.exit(0)
elif getuserconfs:
- getconfs = RemoteUserCfRetriever(hosts_expr, timeout, 'homes')
+ getconfs = RemoteUserCfGrabber(hosts_expr, timeout, 'homes')
getconfs.run()
sys.exit(0)
@@ -240,7 +244,8 @@
usage("'oldest' argument may only be used with logs audit")
if audit_type == 'logs':
- logsaudit = RemoteLogsAuditor(hosts_expr, audit_type, prettyprint,
+ logsaudit = RemoteLogsAuditor(hosts_expr, audit_type, confdir,
+ prettyprint,
oldest_only, show_sample_content,
dirsizes,
show_system_logs,
summary_report, depth, files_to_check,
ignore_also,
@@ -251,7 +256,8 @@
cmdline.run(report, ignored)
elif audit_type == 'root':
- filesaudit = RemoteFilesAuditor(hosts_expr, audit_type, prettyprint,
+ filesaudit = RemoteFilesAuditor(hosts_expr, audit_type, confdir,
+ prettyprint,
show_sample_content, dirsizes,
summary_report,
depth, files_to_check, ignore_also,
@@ -262,7 +268,8 @@
cmdline.run(report, ignored)
elif audit_type == 'homes':
- homesaudit = RemoteHomesAuditor(hosts_expr, audit_type, prettyprint,
+ homesaudit = RemoteHomesAuditor(hosts_expr, audit_type, confdir,
+ prettyprint,
show_sample_content, dirsizes,
summary_report,
depth, files_to_check, ignore_also,
diff --git a/dataretention/retention/cli.py b/dataretention/retention/cli.py
index b13174a..e31e450 100644
--- a/dataretention/retention/cli.py
+++ b/dataretention/retention/cli.py
@@ -5,8 +5,6 @@
import readline
import traceback
-sys.path.append('/srv/audits/retention/scripts/')
-
from retention.status import Status
from retention.rule import RuleStore
import retention.remotefileauditor
@@ -15,11 +13,12 @@
import retention.utils
from retention.utils import JsonHelper
import retention.config
-from retention.examiner import RemoteDirExaminer, RemoteFileExaminer
+from retention.remoteexaminer import RemoteDirExaminer, RemoteFileExaminer
import retention.fileutils
import retention.ruleutils
import retention.cliutils
-from retention.ignores import Ignores, RemoteUserCfRetriever
+from retention.ignores import Ignores
+from retention.remoteusercfgrabber import RemoteUserCfGrabber
import retention.ignores
from retention.completion import Completion
@@ -277,7 +276,7 @@
print "exiting at user request"
break
else:
- local_ign = RemoteUserCfRetriever(host_todo, self.timeout,
self.audit_type)
+ local_ign = RemoteUserCfGrabber(host_todo, self.timeout,
self.audit_type)
self.local_ignores = local_ign.run(True)
local_ignored_dirs, local_ignored_files =
retention.ignores.process_local_ignores(
self.local_ignores, self.ignored)
@@ -383,18 +382,18 @@
return False
elif entrytype == 'dir':
- if retention.fileutils.dir_is_ignored(path, self.ignored):
+ if retention.ignores.dir_is_ignored(path, self.ignored):
return False
# check perhost file
if self.cenv.host in self.ignores.perhost_ignores:
- if retention.fileutils.dir_is_ignored(
+ if retention.ignores.dir_is_ignored(
path, self.ignores.perhost_ignores[self.cenv.host]):
return False
# check perhost rules
if self.cenv.host in self.ignores.perhost_ignores_from_rules:
- if retention.fileutils.dir_is_ignored(
+ if retention.ignores.dir_is_ignored(
path,
self.ignores.perhost_ignores_from_rules[self.cenv.host]):
return False
else:
diff --git a/dataretention/retention/cliutils.py
b/dataretention/retention/cliutils.py
index 3909b02..e5089ff 100644
--- a/dataretention/retention/cliutils.py
+++ b/dataretention/retention/cliutils.py
@@ -3,8 +3,6 @@
import readline
import atexit
-sys.path.append('/srv/audits/retention/scripts/')
-
import retention.remotefileauditor
import retention.utils
import retention.fileutils
diff --git a/dataretention/retention/completion.py
b/dataretention/retention/completion.py
index 59f9931..133becc 100644
--- a/dataretention/retention/completion.py
+++ b/dataretention/retention/completion.py
@@ -2,8 +2,6 @@
import sys
import readline
-sys.path.append('/srv/audits/retention/scripts/')
-
import retention.remotefileauditor
import retention.utils
import retention.fileutils
diff --git a/dataretention/retention/config.yaml
b/dataretention/retention/config.yaml
index 137edca..a2fc86e 100644
--- a/dataretention/retention/config.yaml
+++ b/dataretention/retention/config.yaml
@@ -21,3 +21,22 @@
# run on this many hosts at once
batchsize: 20
+
+ignored_types:
+ - script
+ - package
+ - python
+ - debian
+ - HTML
+ - RPM
+ - GIF
+ - JPEG
+ - PNG
+ - SVG
+ - program
+ - DSA
+ - PDF
+ - symbolic link
+ - executable
+ - shared object
+ - MS Windows icon
diff --git a/dataretention/retention/examiner.py
b/dataretention/retention/examiner.py
deleted file mode 100644
index 8d8dc1f..0000000
--- a/dataretention/retention/examiner.py
+++ /dev/null
@@ -1,300 +0,0 @@
-import os
-import sys
-import stat
-import json
-import logging
-
-sys.path.append('/srv/audits/retention/scripts/')
-
-from salt.client import LocalClient
-#from retention.saltclientplus import LocalClientPlus
-import retention.utils
-from retention.utils import JsonHelper
-from retention.fileinfo import FileInfo, EntryInfo
-
-log = logging.getLogger(__name__)
-
-
-class RemoteFileExaminer(object):
- '''
- retrieval and display of file contents on remote host
- '''
- def __init__(self, path, host, num_lines, timeout=20, quiet=False):
- self.path = path
- self.host = host
- self.timeout = timeout
- self.num_lines = num_lines
- self.quiet = quiet
-
- def run(self):
- '''
- do all the work
- '''
-# client = LocalClientPlus()
- client = LocalClient()
- module_args = [self.path,
- self.num_lines,
- self.timeout]
-
- result = client.cmd([self.host],
- "retentionaudit.examine_file",
- module_args, expr_form='list',
- timeout=self.timeout)
-
- if self.host in result:
- if not self.quiet:
- print result[self.host]
- return result[self.host]
-
-
-class LocalFileExaminer(object):
- '''
- retrieval and display of file contents on local host
- '''
- def __init__(self, path, num_lines, timeout=20, quiet=False):
- self.path = path
- self.timeout = timeout
- self.num_lines = num_lines
- self.quiet = quiet
-
- def run(self):
- '''
- do all the work
- '''
- finf = FileInfo(self.path, None)
- if finf.get_is_binary(self.num_lines):
- result = "BINARY CONTENT\n"
- else:
- result = finf.start_content
- if not self.quiet:
- print result,
- return result
-
-
-class DirContents(object):
- '''
- retrieval and display directory contents on local host
- '''
- def __init__(self, path, batchno=1, batchsize=50, prettyprint=False):
- self.path = path
- self.st = None
- self.full_contents = None
- self.batch_contents = None
- self.batch_entryinfo = None
- self.batchno = batchno
- self.batchsize = batchsize
- self.prettyprint = prettyprint
-
- def get_dir_stats(self, path=None):
- '''
- return results of stat call on the specified dir
- '''
- if path is None:
- path = self.path
- if self.st is None:
- try:
- self.st = os.stat(self.path)
- except:
- return None
- return self.st
-
- def read_dir_batch(self):
- '''
- retrieve directory contents if not already cached,
- grab the specified batch of entries (counting from 1)
- if there there are fewer batches than the
- requested batch number, the batch is set to the empty list
-
- NOTE this is horrid, os.listdir reads the whole dir anyways
- so batching rereads the whole list and tosses everything
- we don't want
- '''
- if self.full_contents is None:
- try:
- # can be a problem for directories with hundreds
- # of thousands of entries, will we encounter that?
- self.full_contents = os.listdir(self.path)
- except:
- self.full_contents = None
- return
-
- if len(self.full_contents) < (self.batchno - 1) * self.batchsize:
- self.batch_contents = []
- else:
- self.batch_contents = self.full_contents[
- (self.batchno - 1) * self.batchsize: self.batchno
- * self.batchsize]
-
- def get_contents(self):
- if self.batch_contents is None:
- self.get_dir_stats()
- if self.st is None:
- return "dir stat failed"
- if stat.S_ISLNK(self.st.st_mode):
- return "link"
- if not stat.S_ISDIR(self.st.st_mode):
- return "not dir"
- self.read_dir_batch()
- if self.batch_contents is None:
- return "dir read failed"
-
- return "ok"
-
- @staticmethod
- def get_entryinfo(path):
- '''
- get entry info object for path, populated
- '''
- finfo = EntryInfo(path)
- finfo.produce_json()
- return finfo.json
-
- def get_batch_entryinfo(self):
- '''
- get entry info for the entries in self.batch_contents
- (stat, first line of contents if not binary)
- '''
- if self.batch_contents is None:
- self.batch_entryinfo = None
- return
-
- results = []
- for dname in self.batch_contents:
- info = DirContents.get_entryinfo(os.path.join(self.path, dname))
- if info is not None:
- results.append(info)
-
- self.batch_entryinfo = results
-
- def display_json(self, json_text):
- if not self.prettyprint:
- print json_text
- return json_text
-
- try:
- item = json.loads(json_text, object_hook=JsonHelper.decode_dict)
- except:
- print json_text
- return json_text
- output = FileInfo.format_pretty_output_from_dict(item, path_justify=50)
- print output
- return output
-
- def show_batch(self):
- output = []
- for entry in self.batch_entryinfo:
- output.append(self.display_json(entry))
- output = '\n'.join(output)
- return output
-
-
-class RemoteDirExaminer(object):
- '''
- retrieval and display of directory contents on remote host
- '''
- def __init__(self, path, host, batchno=1, batchsize=300, timeout=20,
- prettyprint=False):
- self.path = path
- self.st = None
- self.host = host
- self.timeout = timeout
- self.batchno = batchno
- self.batchsize = batchsize
- self.prettyprint = prettyprint
-
- def run(self, quiet=False):
- '''
- do all the work
-
- note that 'quiet' applies only to remotely
- run, and the same is true for returning the contents.
- maybe we want to fix that
- '''
-
- while True:
-# client = LocalClientPlus()
- client = LocalClient()
- module_args = [self.path, self.batchno,
- self.batchsize, self.timeout,
- quiet]
-
- result = client.cmd([self.host],
- "retentionaudit.examine_dir",
- module_args, expr_form='list',
- timeout=self.timeout)
-
- if self.host in result:
- lines = result[self.host].split("\n")
-
- maxlen = 0
- for line in lines:
- if (line.startswith("WARNING:") or
- line.startswith("INFO:")):
- continue
- else:
- try:
- entry = json.loads(
- line, object_hook=JsonHelper.decode_dict)
- if len(entry['path']) > maxlen:
- maxlen = len(entry['path'])
- except:
- continue
-
- if not quiet:
- for line in lines:
- if (line.startswith("WARNING:") or
- line.startswith("INFO:")):
- print line
- else:
- try:
- entry = json.loads(
- line,
- object_hook=JsonHelper.decode_dict)
- EntryInfo.display_from_dict(
- entry, True, maxlen)
- except:
- print line
- return result[self.host]
- else:
- print "Failed to retrieve dir content for", self.path, "on",
self.host
- continuing = ("Try again? Y/N [N]: ")
- if continuing == "":
- continuing = "N"
- if continuing.upper() != "Y":
- return None
-
-
-class LocalDirExaminer(object):
- '''
- retrieval and display of directory contents on local host
- '''
- def __init__(self, path, batchno=1, batchsize=300, timeout=20,
quiet=False):
- self.path = path
- self.st = None
- self.timeout = timeout
- self.batchno = batchno
- self.batchsize = batchsize
- self.quiet = quiet
-
- def run(self, quiet=False):
- '''
- do all the work
-
- note that 'quiet' applies only to remotely
- run, and the same is true for returning the contents.
- maybe we want to fix that
- '''
-
- print ('WARNING: trying to get directory contents')
- dcont = DirContents(self.path, self.batchno, self.batchsize, False)
- result = dcont.get_contents()
- if result != 'ok':
- print ('WARNING: failed to get directory contents'
- 'for <%s> (%s)'
- % (self.path, result))
- else:
- dcont.get_batch_entryinfo()
- output = dcont.show_batch()
- return output
-
-
diff --git a/dataretention/retention/ignored.yaml
b/dataretention/retention/ignored.yaml
index afffecd..83d471c 100644
--- a/dataretention/retention/ignored.yaml
+++ b/dataretention/retention/ignored.yaml
@@ -225,25 +225,6 @@
- udev
- ufw.log
-ignored_types:
- - script
- - package
- - python
- - debian
- - HTML
- - RPM
- - GIF
- - JPEG
- - PNG
- - SVG
- - program
- - DSA
- - PDF
- - symbolic link
- - executable
- - shared object
- - MS Windows icon
-
ignored_extensions:
"*":
- amd64.changes
diff --git a/dataretention/retention/ignores.py
b/dataretention/retention/ignores.py
index 4a0270b..2284655 100644
--- a/dataretention/retention/ignores.py
+++ b/dataretention/retention/ignores.py
@@ -1,19 +1,13 @@
import os
import sys
import runpy
-import json
import salt.client
import salt.utils.yamlloader
-sys.path.append('/srv/audits/retention/scripts/')
-
from retention.status import Status
-import retention.remotefileauditor
import retention.utils
-from retention.utils import JsonHelper
import retention.fileutils
import retention.ruleutils
-import retention.cliutils
import retention.config
def expand_ignored_dirs(basedir, ignored):
@@ -58,7 +52,7 @@
os.path.dirname(dirname), ignored)
if dirname in expanded_dirs:
return True
- if wildcard_matches(dirname, wildcard_dirs):
+ if retention.fileutils.wildcard_matches(dirname, wildcard_dirs):
return True
return False
@@ -73,15 +67,15 @@
basename = os.path.basename(fname)
if 'prefixes' in ignored:
- if startswith(basename, ignored['prefixes']):
+ if retention.fileutils.startswith(basename, ignored['prefixes']):
return True
if 'extensions' in ignored:
if '*' in ignored['extensions']:
- if endswith(basename, ignored['extensions']['*']):
+ if retention.fileutils.endswith(basename,
ignored['extensions']['*']):
return True
if basedir in ignored['extensions']:
- if endswith(
+ if retention.fileutils.endswith(
basename, ignored['extensions'][basedir]):
return True
@@ -89,18 +83,18 @@
if basename in ignored['files']:
return True
if '*' in ignored['files']:
- if endswith(basename, ignored['files']['*']):
+ if retention.fileutils.endswith(basename, ignored['files']['*']):
return True
if '/' in ignored['files']:
if fname in ignored['files']['/']:
return True
- if wildcard_matches(
+ if retention.fileutils.wildcard_matches(
fname, [w for w in ignored['files']['/'] if '*' in w]):
return True
if basedir in ignored['files']:
- if endswith(basename, ignored['files'][basedir]):
+ if retention.fileutils.endswith(basename,
ignored['files'][basedir]):
return True
return False
@@ -110,8 +104,13 @@
specified in the Config class (see 'home_locations'), by reading
these root location dirs and grabbing all subdirectory names from them
'''
+ retention.config.set_up_conf()
home_dirs = []
+# filep =
open('/home/ariel/src/wmf/git-ops-software/software/dataretention/retention/junk',
'w+')
+# filep.write('INFO: ' + ','.join(dir('retention.config')))
+# filep.close()
+# print 'INFO:', dir('retention.config')
for location in retention.config.cf[locations]:
if not os.path.isdir(location):
continue
@@ -200,28 +199,32 @@
self.get_perhost_cf_from_file()
self.ignored = {}
- def set_up_ignored(self, ignore_also):
+ def set_up_ignored(self, confdir, ignore_also=None):
'''
collect up initial list of files/dirs to skip during audit
'''
- if os.path.exists('/srv/salt/audits/retention/configs/ignored.yaml'):
- try:
- contents =
open('/srv/salt/audits/retention/configs/ignored.yaml').read()
- ign = salt.utils.yamlloader.load(contents,
Loader=salt.utils.yamlloader.SaltYamlSafeLoader)
- if 'ignored_files' in ign:
- self.ignored['files'] = ign['ignored_files']
- if 'ignored_dirs' in ign:
- self.ignored['dirs'] = ign['ignored_dirs']
- if 'ignored_prefixes' in ign:
- self.ignored['prefixes'] = ign['ignored_prefixes']
- if 'ignored_extensions' in ign:
- self.ignored['extensions'] = ign['ignored_extensions']
- except:
- self.ignored['files'] = {}
- self.ignored['dirs'] = {}
- self.ignored['prefixes'] = {}
- self.ignored['extensions'] = {}
+ self.ignored['files'] = {}
+ self.ignored['dirs'] = {}
+ self.ignored['prefixes'] = {}
+ self.ignored['extensions'] = {}
+
+ if confdir is not None:
+ configfile = os.path.join(confdir, 'ignored.yaml')
+ if os.path.exists(configfile):
+ try:
+ contents = open(configfile).read()
+ ign = salt.utils.yamlloader.load(contents,
Loader=salt.utils.yamlloader.SaltYamlSafeLoader)
+ if 'ignored_files' in ign:
+ self.ignored['files'] = ign['ignored_files']
+ if 'ignored_dirs' in ign:
+ self.ignored['dirs'] = ign['ignored_dirs']
+ if 'ignored_prefixes' in ign:
+ self.ignored['prefixes'] = ign['ignored_prefixes']
+ if 'ignored_extensions' in ign:
+ self.ignored['extensions'] = ign['ignored_extensions']
+ except:
+ pass
if ignore_also is not None:
# silently skip paths that are not absolute
@@ -361,75 +364,3 @@
sys.stderr.write("INFO: " + ','.join(
self.ignored['extensions'][basedir])
+ " in " + basedir + '\n')
-
-
-class RemoteUserCfRetriever(object):
- '''
- retrieval and display dirs / files listed as to
- be ignored in per-user lists on remote host
- '''
- def __init__(self, host, timeout, audit_type):
- self.host = host
- self.timeout = timeout
- self.audit_type = audit_type
- self.locations = audit_type + "_locations"
-
- def run(self, quiet=False):
- '''
- do all the work
-
- note that 'quiet' applies only to remotely
- run, and the same is true for returning the contents.
- maybe we want to fix that
- '''
-
- local_ignores = {}
-
- client = salt.client.LocalClient()
- module_args = [self.timeout, self.audit_type]
-
- result = client.cmd([self.host], "retentionaudit.retrieve_usercfs",
- module_args, expr_form='list',
- timeout=self.timeout)
-
- if self.host in result:
- input = result[self.host]
- try:
- local_ignores = json.loads(
- input, object_hook=JsonHelper.decode_dict)
- except:
- print "WARNING: failed to get local ignores on host",
- print self.host,
- print "got this:", input
- local_ignores = {}
-
- if not quiet:
- print local_ignores
-
- return local_ignores
-
-class LocalUserCfRetriever(object):
- '''
- retrieval and display dirs / files listed as to
- be ignored in per-user lists on local host
- '''
- def __init__(self, timeout, audit_type='homes'):
- self.timeout = timeout
- self.audit_type = audit_type
- self.locations = audit_type + "_locations"
-
- def run(self, quiet=False):
- '''
- do all the work
-
- note that 'quiet' applies only to remotely
- run, and the same is true for returning the contents.
- maybe we want to fix that
- '''
-
- local_ignores = {}
-
- local_ignores = get_local_ignores(self.locations)
- output = json.dumps(local_ignores)
- print output
- return output
diff --git a/dataretention/retention/localexaminer.py
b/dataretention/retention/localexaminer.py
new file mode 100644
index 0000000..976c038
--- /dev/null
+++ b/dataretention/retention/localexaminer.py
@@ -0,0 +1,187 @@
+import os
+import stat
+import json
+import logging
+
+import retention.utils
+from retention.utils import JsonHelper
+from retention.fileinfo import FileInfo, EntryInfo
+
+log = logging.getLogger(__name__)
+
+
+class LocalFileExaminer(object):
+ '''
+ retrieval and display of file contents on local host
+ '''
+ def __init__(self, path, num_lines, timeout=20, quiet=False):
+ self.path = path
+ self.timeout = timeout
+ self.num_lines = num_lines
+ self.quiet = quiet
+
+ def run(self):
+ '''
+ do all the work
+ '''
+ finf = FileInfo(self.path, None)
+ if finf.get_is_binary(self.num_lines):
+ result = "BINARY CONTENT\n"
+ else:
+ result = finf.start_content
+ if not self.quiet:
+ print result,
+ return result
+
+
+class DirContents(object):
+ '''
+ retrieval and display directory contents on local host
+ '''
+ def __init__(self, path, batchno=1, batchsize=50, prettyprint=False):
+ self.path = path
+ self.st = None
+ self.full_contents = None
+ self.batch_contents = None
+ self.batch_entryinfo = None
+ self.batchno = batchno
+ self.batchsize = batchsize
+ self.prettyprint = prettyprint
+
+ def get_dir_stats(self, path=None):
+ '''
+ return results of stat call on the specified dir
+ '''
+ if path is None:
+ path = self.path
+ if self.st is None:
+ try:
+ self.st = os.stat(self.path)
+ except:
+ return None
+ return self.st
+
+ def read_dir_batch(self):
+ '''
+ retrieve directory contents if not already cached,
+ grab the specified batch of entries (counting from 1)
+ if there there are fewer batches than the
+ requested batch number, the batch is set to the empty list
+
+ NOTE this is horrid, os.listdir reads the whole dir anyways
+ so batching rereads the whole list and tosses everything
+ we don't want
+ '''
+ if self.full_contents is None:
+ try:
+ # can be a problem for directories with hundreds
+ # of thousands of entries, will we encounter that?
+ self.full_contents = os.listdir(self.path)
+ except:
+ self.full_contents = None
+ return
+
+ if len(self.full_contents) < (self.batchno - 1) * self.batchsize:
+ self.batch_contents = []
+ else:
+ self.batch_contents = self.full_contents[
+ (self.batchno - 1) * self.batchsize: self.batchno
+ * self.batchsize]
+
+ def get_contents(self):
+ if self.batch_contents is None:
+ self.get_dir_stats()
+ if self.st is None:
+ return "dir stat failed"
+ if stat.S_ISLNK(self.st.st_mode):
+ return "link"
+ if not stat.S_ISDIR(self.st.st_mode):
+ return "not dir"
+ self.read_dir_batch()
+ if self.batch_contents is None:
+ return "dir read failed"
+
+ return "ok"
+
+ @staticmethod
+ def get_entryinfo(path):
+ '''
+ get entry info object for path, populated
+ '''
+ finfo = EntryInfo(path)
+ finfo.produce_json()
+ return finfo.json
+
+ def get_batch_entryinfo(self):
+ '''
+ get entry info for the entries in self.batch_contents
+ (stat, first line of contents if not binary)
+ '''
+ if self.batch_contents is None:
+ self.batch_entryinfo = None
+ return
+
+ results = []
+ for dname in self.batch_contents:
+ info = DirContents.get_entryinfo(os.path.join(self.path, dname))
+ if info is not None:
+ results.append(info)
+
+ self.batch_entryinfo = results
+
+ def display_json(self, json_text):
+ if not self.prettyprint:
+ print json_text
+ return json_text
+
+ try:
+ item = json.loads(json_text, object_hook=JsonHelper.decode_dict)
+ except:
+ print json_text
+ return json_text
+ output = FileInfo.format_pretty_output_from_dict(item, path_justify=50)
+ print output
+ return output
+
+ def show_batch(self):
+ output = []
+ for entry in self.batch_entryinfo:
+ output.append(self.display_json(entry))
+ output = '\n'.join(output)
+ return output
+
+
+class LocalDirExaminer(object):
+ '''
+ retrieval and display of directory contents on local host
+ '''
+ def __init__(self, path, batchno=1, batchsize=300, timeout=20,
quiet=False):
+ self.path = path
+ self.st = None
+ self.timeout = timeout
+ self.batchno = batchno
+ self.batchsize = batchsize
+ self.quiet = quiet
+
+ def run(self, quiet=False):
+ '''
+ do all the work
+
+ note that 'quiet' applies only to remotely
+ run, and the same is true for returning the contents.
+ maybe we want to fix that
+ '''
+
+ print ('WARNING: trying to get directory contents')
+ dcont = DirContents(self.path, self.batchno, self.batchsize, False)
+ result = dcont.get_contents()
+ if result != 'ok':
+ print ('WARNING: failed to get directory contents'
+ 'for <%s> (%s)'
+ % (self.path, result))
+ else:
+ dcont.get_batch_entryinfo()
+ output = dcont.show_batch()
+ return output
+
+
diff --git a/dataretention/retention/localfileaudit.py
b/dataretention/retention/localfileaudit.py
index dcef02d..fcd57c4 100644
--- a/dataretention/retention/localfileaudit.py
+++ b/dataretention/retention/localfileaudit.py
@@ -6,8 +6,6 @@
import stat
import locale
-sys.path.append('/srv/audits/retention/scripts/')
-
import retention.utils
import retention.magic
from retention.rule import Rule
@@ -23,7 +21,7 @@
audit files on the local host
in a specified set of directories
'''
- def __init__(self, audit_type,
+ def __init__(self, audit_type, confdir=None,
show_content=False, dirsizes=False,
depth=2, to_check=None, ignore_also=None,
timeout=60, maxfiles=None):
@@ -67,7 +65,7 @@
self.ignored = {}
self.ignores = Ignores(None)
- self.ignores.set_up_ignored()
+ self.ignores.set_up_ignored(confdir)
self.hostname = socket.getfqdn()
@@ -170,7 +168,7 @@
def get_subdirs_to_do(self, dirname, dirname_depth, todo):
locale.setlocale(locale.LC_ALL, '')
- if retention.fileutils.dir_is_ignored(dirname, self.ignores.ignored):
+ if retention.ignores.dir_is_ignored(dirname, self.ignores.ignored):
return todo
if retention.fileutils.dir_is_wrong_type(dirname):
return todo
@@ -284,7 +282,7 @@
if not retention.fileutils.dirtree_check(subdirpath,
self.dirs_to_check):
return
- if retention.fileutils.dir_is_ignored(subdirpath,
self.ignores.ignored):
+ if retention.ignores.dir_is_ignored(subdirpath, self.ignores.ignored):
return True
count = 0
diff --git a/dataretention/retention/localhomeaudit.py
b/dataretention/retention/localhomeaudit.py
index 3b323f8..17867db 100644
--- a/dataretention/retention/localhomeaudit.py
+++ b/dataretention/retention/localhomeaudit.py
@@ -1,7 +1,5 @@
import sys
-sys.path.append('/srv/audits/retention/scripts/')
-
import retention.utils
import retention.magic
from retention.localfileaudit import LocalFilesAuditor
@@ -16,14 +14,14 @@
or directories (dirs must end in '/') to skip during the audit
'''
- def __init__(self, audit_type,
+ def __init__(self, audit_type, confdir=None,
show_content=False, dirsizes=False,
depth=2, to_check=None, ignore_also=None, timeout=60,
maxfiles=None):
'''
see FilesAuditor for the arguments to the constructor
'''
- super(LocalHomesAuditor, self).__init__(audit_type,
+ super(LocalHomesAuditor, self).__init__(audit_type, confdir,
show_content, dirsizes,
depth, to_check, ignore_also,
timeout, maxfiles)
diff --git a/dataretention/retention/locallogaudit.py
b/dataretention/retention/locallogaudit.py
index 71d86f8..9852218 100644
--- a/dataretention/retention/locallogaudit.py
+++ b/dataretention/retention/locallogaudit.py
@@ -2,8 +2,6 @@
import sys
import glob
-sys.path.append('/srv/audits/retention/scripts/')
-
import retention.utils
import retention.magic
import retention.config
@@ -12,13 +10,13 @@
import retention.fileutils
class LocalLogsAuditor(LocalFilesAuditor):
- def __init__(self, audit_type,
+ def __init__(self, audit_type, confdir=None,
oldest=False,
show_content=False, show_system_logs=False,
dirsizes=False, depth=2,
to_check=None, ignore_also=None,
timeout=60, maxfiles=None):
- super(LocalLogsAuditor, self).__init__(audit_type,
+ super(LocalLogsAuditor, self).__init__(audit_type, confdir,
show_content, dirsizes,
depth, to_check, ignore_also,
timeout, maxfiles)
@@ -85,7 +83,7 @@
continue
if '*' in line:
log_group.extend(glob.glob(
- os.path.join(Config.cf['rotate_basedir'], line)))
+ os.path.join(retention.config.cf['rotate_basedir'],
line)))
else:
log_group.append(line)
elif state == 'want_rbracket':
@@ -111,7 +109,7 @@
def get_logrotate_defaults(self):
retention.config.set_up_conf()
- contents = open(Config.cf['rotate_mainconf']).read()
+ contents = open(retention.config.cf['rotate_mainconf']).read()
lines = contents.split('\n')
skip = False
freq = '-'
@@ -148,10 +146,10 @@
rotated_logs = {}
default_freq, default_keep = self.get_logrotate_defaults()
rotated_logs.update(LocalLogsAuditor.parse_logrotate_contents(
- open(Config.cf['rotate_mainconf']).read(),
+ open(retention.config.cf['rotate_mainconf']).read(),
default_freq, default_keep))
- for fname in os.listdir(Config.cf['rotate_basedir']):
- pathname = os.path.join(Config.cf['rotate_basedir'], fname)
+ for fname in os.listdir(retention.config.cf['rotate_basedir']):
+ pathname = os.path.join(retention.config.cf['rotate_basedir'],
fname)
if os.path.isfile(pathname):
rotated_logs.update(LocalLogsAuditor.parse_logrotate_contents(
open(pathname).read(), default_freq, default_keep))
@@ -164,7 +162,7 @@
# note that I also see my.cnf.s3 and we don't check those (yet)
retention.config.set_up_conf()
output = ''
- for filename in Config.cf['mysqlconf']:
+ for filename in retention.config.cf['mysqlconf']:
found = False
try:
contents = open(filename).read()
@@ -217,7 +215,7 @@
if not fields[1].isdigit():
continue
found = True
- if int(fields[1]) > Config.cf['cutoff']/86400:
+ if int(fields[1]) > retention.config.cf['cutoff']/86400:
if output:
output = output + '\n'
output = output + ('WARNING: some mysql logs expired
after %s days in %s'
@@ -264,7 +262,7 @@
for fname in all_files_sorted:
if retention.fileutils.contains(all_files[fname].filetype,
- Config.cf['ignored_types']):
+
retention.config.cf['ignored_types']):
continue
if (self.oldest_only and
diff --git a/dataretention/retention/localusercfgrabber.py
b/dataretention/retention/localusercfgrabber.py
new file mode 100644
index 0000000..1b67a7e
--- /dev/null
+++ b/dataretention/retention/localusercfgrabber.py
@@ -0,0 +1,28 @@
+import json
+import retention.ignores
+
+class LocalUserCfGrabber(object):
+ '''
+ retrieval and display dirs / files listed as to
+ be ignored in per-user lists on local host
+ '''
+ def __init__(self, timeout, audit_type='homes'):
+ self.timeout = timeout
+ self.audit_type = audit_type
+ self.locations = audit_type + "_locations"
+
+ def run(self, quiet=False):
+ '''
+ do all the work
+
+ note that 'quiet' applies only to remotely
+ run, and the same is true for returning the contents.
+ maybe we want to fix that
+ '''
+
+ local_ignores = {}
+
+ local_ignores = retention.ignores.get_local_ignores(self.locations)
+ output = json.dumps(local_ignores)
+ print output
+ return output
diff --git a/dataretention/retention/remoteexaminer.py
b/dataretention/retention/remoteexaminer.py
new file mode 100644
index 0000000..d14c3de
--- /dev/null
+++ b/dataretention/retention/remoteexaminer.py
@@ -0,0 +1,114 @@
+import json
+import logging
+
+from salt.client import LocalClient
+import retention.utils
+from retention.utils import JsonHelper
+from retention.fileinfo import EntryInfo
+
+log = logging.getLogger(__name__)
+
+class RemoteFileExaminer(object):
+ '''
+ retrieval and display of file contents on remote host
+ '''
+ def __init__(self, path, host, num_lines, timeout=20, quiet=False):
+ self.path = path
+ self.host = host
+ self.timeout = timeout
+ self.num_lines = num_lines
+ self.quiet = quiet
+
+ def run(self):
+ '''
+ do all the work
+ '''
+ client = LocalClient()
+ module_args = [self.path,
+ self.num_lines,
+ self.timeout]
+
+ result = client.cmd([self.host],
+ "retentionaudit.examine_file",
+ module_args, expr_form='list',
+ timeout=self.timeout)
+
+ if self.host in result:
+ if not self.quiet:
+ print result[self.host]
+ return result[self.host]
+
+
+class RemoteDirExaminer(object):
+ '''
+ retrieval and display of directory contents on remote host
+ '''
+ def __init__(self, path, host, batchno=1, batchsize=300, timeout=20,
+ prettyprint=False):
+ self.path = path
+ self.st = None
+ self.host = host
+ self.timeout = timeout
+ self.batchno = batchno
+ self.batchsize = batchsize
+ self.prettyprint = prettyprint
+
+ def run(self, quiet=False):
+ '''
+ do all the work
+
+ note that 'quiet' applies only to remotely
+ run, and the same is true for returning the contents.
+ maybe we want to fix that
+ '''
+
+ while True:
+ client = LocalClient()
+ module_args = [self.path, self.batchno,
+ self.batchsize, self.timeout,
+ quiet]
+
+ result = client.cmd([self.host],
+ "retentionaudit.examine_dir",
+ module_args, expr_form='list',
+ timeout=self.timeout)
+
+ if self.host in result:
+ lines = result[self.host].split("\n")
+
+ maxlen = 0
+ for line in lines:
+ if (line.startswith("WARNING:") or
+ line.startswith("INFO:")):
+ continue
+ else:
+ try:
+ entry = json.loads(
+ line, object_hook=JsonHelper.decode_dict)
+ if len(entry['path']) > maxlen:
+ maxlen = len(entry['path'])
+ except:
+ continue
+
+ if not quiet:
+ for line in lines:
+ if (line.startswith("WARNING:") or
+ line.startswith("INFO:")):
+ print line
+ else:
+ try:
+ entry = json.loads(
+ line,
+ object_hook=JsonHelper.decode_dict)
+ EntryInfo.display_from_dict(
+ entry, True, maxlen)
+ except:
+ print line
+ return result[self.host]
+ else:
+ print "Failed to retrieve dir content for", self.path, "on",
self.host
+ continuing = ("Try again? Y/N [N]: ")
+ if continuing == "":
+ continuing = "N"
+ if continuing.upper() != "Y":
+ return None
diff --git a/dataretention/retention/remotefileauditor.py
b/dataretention/retention/remotefileauditor.py
index 8cf61a4..f7f38a1 100644
--- a/dataretention/retention/remotefileauditor.py
+++ b/dataretention/retention/remotefileauditor.py
@@ -5,8 +5,6 @@
import socket
import runpy
-sys.path.append('/srv/audits/retention/scripts/')
-
import retention.utils
import retention.magic
from retention.status import Status
@@ -16,7 +14,6 @@
from retention.fileinfo import FileInfo
from retention.utils import JsonHelper
from retention.runner import Runner
-from retention.localfileaudit import LocalFilesAuditor
import retention.ruleutils
from retention.ignores import Ignores
@@ -70,7 +67,9 @@
audit files across a set of remote hosts,
in a specified set of directories
'''
- def __init__(self, hosts_expr, audit_type, prettyprint=False,
+ def __init__(self, hosts_expr, audit_type,
+ confdir=None,
+ prettyprint=False,
show_content=False, dirsizes=False, summary_report=False,
depth=2, to_check=None, ignore_also=None,
timeout=60, maxfiles=None,
@@ -80,6 +79,7 @@
hosts_expr: list or grain-based or wildcard expr for hosts
to be audited
audit_type: type of audit e.g. 'logs', 'homes'
+ confdir: directory where the ignores.yaml file is stored,
prettyprint: nicely format the output display
show_content: show the first line or so from problematic files
dirsizes: show only directories which have too many files to
@@ -108,6 +108,7 @@
self.hosts_expr = hosts_expr
self.audit_type = audit_type
+ self.confdir = confdir
self.locations = audit_type + "_locations"
self.prettyprint = prettyprint
self.show_sample_content = show_content
@@ -153,7 +154,7 @@
self.set_up_and_export_rule_store()
self.ignores = Ignores(self.cdb)
- self.ignores.set_up_ignored(self.ignore_also)
+ self.ignores.set_up_ignored(self.confdir, self.ignore_also)
if self.verbose:
self.ignores.show_ignored(retention.config.cf[self.locations])
@@ -164,7 +165,8 @@
self.display_from_dict = FileInfo.display_from_dict
def get_audit_args(self):
- audit_args = [self.show_sample_content,
+ audit_args = [self.confdir,
+ self.show_sample_content,
self.dirsizes,
self.depth - 1,
self.to_check,
diff --git a/dataretention/retention/remotehomeauditor.py
b/dataretention/retention/remotehomeauditor.py
index 1f0e040..0adb819 100644
--- a/dataretention/retention/remotehomeauditor.py
+++ b/dataretention/retention/remotehomeauditor.py
@@ -1,8 +1,6 @@
import os
import sys
-sys.path.append('/srv/audits/retention/scripts/')
-
import retention.utils
import retention.magic
from retention.remotefileauditor import RemoteFilesAuditor
@@ -17,14 +15,15 @@
or directories (dirs must end in '/') to skip during the audit
'''
- def __init__(self, hosts_expr, audit_type, prettyprint=False,
+ def __init__(self, hosts_expr, audit_type, confdir=None, prettyprint=False,
show_content=False, dirsizes=False, summary_report=False,
depth=2, to_check=None, ignore_also=None, timeout=60,
maxfiles=None, store_filepath=None, verbose=False):
'''
see FilesAuditor for the arguments to the constructor
'''
- super(RemoteHomesAuditor, self).__init__(hosts_expr, audit_type,
prettyprint,
+ super(RemoteHomesAuditor, self).__init__(hosts_expr, audit_type,
+ confdir, prettyprint,
show_content, dirsizes,
summary_report, depth,
to_check, ignore_also,
timeout,
@@ -32,7 +31,8 @@
self.homes_owners = {}
def get_audit_args(self):
- audit_args = [self.show_sample_content,
+ audit_args = [self.confdir,
+ self.show_sample_content,
self.dirsizes,
self.depth - 1,
self.to_check,
diff --git a/dataretention/retention/remotelogauditor.py
b/dataretention/retention/remotelogauditor.py
index d917d9f..83fafda 100644
--- a/dataretention/retention/remotelogauditor.py
+++ b/dataretention/retention/remotelogauditor.py
@@ -1,8 +1,6 @@
import sys
import json
-sys.path.append('/srv/audits/retention/scripts/')
-
import retention.utils
import retention.magic
from retention.fileinfo import LogInfo
@@ -11,14 +9,15 @@
class RemoteLogsAuditor(RemoteFilesAuditor):
- def __init__(self, hosts_expr, audit_type, prettyprint=False,
+ def __init__(self, hosts_expr, audit_type, confdir=None, prettyprint=False,
oldest=False,
show_content=False, show_system_logs=False,
dirsizes=False, summary_report=False, depth=2,
to_check=None, ignore_also=None,
timeout=60, maxfiles=None, store_filepath=None,
verbose=False):
- super(RemoteLogsAuditor, self).__init__(hosts_expr, audit_type,
prettyprint,
+ super(RemoteLogsAuditor, self).__init__(hosts_expr, audit_type,
+ confdir, prettyprint,
show_content, dirsizes,
summary_report, depth,
to_check, ignore_also, timeout,
@@ -26,12 +25,13 @@
self.oldest_only = oldest
self.show_system_logs = show_system_logs
if self.show_system_logs:
- self.ignored['files'].pop("/var/log")
+ self.ignores.ignored['files'].pop("/var/log")
self.display_from_dict = LogInfo.display_from_dict
def get_audit_args(self):
# fixme check if locallogauditor wants the oldest_only param
- audit_args = [self.oldest_only,
+ audit_args = [self.confdir,
+ self.oldest_only,
self.show_sample_content,
self.show_system_logs,
self.dirsizes,
diff --git a/dataretention/retention/remoteusercfgrabber.py
b/dataretention/retention/remoteusercfgrabber.py
new file mode 100644
index 0000000..7f31af3
--- /dev/null
+++ b/dataretention/retention/remoteusercfgrabber.py
@@ -0,0 +1,58 @@
+import json
+import salt.client
+import salt.utils.yamlloader
+
+import retention.remotefileauditor
+import retention.utils
+from retention.utils import JsonHelper
+import retention.fileutils
+import retention.ruleutils
+import retention.cliutils
+import retention.config
+
+
+class RemoteUserCfGrabber(object):
+ '''
+ retrieval and display dirs / files listed as to
+ be ignored in per-user lists on remote host
+ '''
+ def __init__(self, host, timeout, audit_type):
+ self.host = host
+ self.timeout = timeout
+ self.audit_type = audit_type
+ self.locations = audit_type + "_locations"
+
+ def run(self, quiet=False):
+ '''
+ do all the work
+
+ note that 'quiet' applies only to remotely
+ run, and the same is true for returning the contents.
+ maybe we want to fix that
+ '''
+
+ local_ignores = {}
+
+ client = salt.client.LocalClient()
+ module_args = [self.timeout, self.audit_type]
+
+ result = client.cmd([self.host], "retentionaudit.retrieve_usercfs",
+ module_args, expr_form='list',
+ timeout=self.timeout)
+
+ if self.host in result:
+ input = result[self.host]
+ try:
+ local_ignores = json.loads(
+ input, object_hook=JsonHelper.decode_dict)
+ except:
+ print "WARNING: failed to get local ignores on host",
+ print self.host,
+ print "got this:", input
+ local_ignores = {}
+
+ if not quiet:
+ print local_ignores
+
+ return local_ignores
+
diff --git a/dataretention/retention/retentionaudit.py
b/dataretention/retention/retentionaudit.py
index 0711ec1..87bf64d 100644
--- a/dataretention/retention/retentionaudit.py
+++ b/dataretention/retention/retentionaudit.py
@@ -1,32 +1,29 @@
# salt module
-import sys
import logging
-sys.path.append('/srv/audits/retention/scripts/')
+log = logging.getLogger(__name__)
from retention.localfileaudit import LocalFilesAuditor
from retention.locallogaudit import LocalLogsAuditor
from retention.localhomeaudit import LocalHomesAuditor
-from retention.examiner import LocalFileExaminer, LocalDirExaminer
-from retention.ignores import LocalUserCfRetriever
+from retention.localexaminer import LocalFileExaminer, LocalDirExaminer
+from retention.localusercfgrabber import LocalUserCfGrabber
-log = logging.getLogger(__name__)
-
-def fileaudit_host(show_content, dirsizes, depth,
+def fileaudit_host(confdir,show_content, dirsizes, depth,
to_check, ignore_also, timeout,
maxfiles):
- fauditor = LocalFilesAuditor('root', show_content,
+ fauditor = LocalFilesAuditor('root', confdir, show_content,
dirsizes, depth, to_check,
ignore_also, timeout,
maxfiles)
result = fauditor.do_local_audit()
return result
-def logaudit_host(oldest, show_content, show_system_logs,
+def logaudit_host(confdir, oldest, show_content, show_system_logs,
dirsizes, depth,
to_check, ignore_also, timeout,
maxfiles):
- lauditor = LocalLogsAuditor('logs', oldest, show_content,
+ lauditor = LocalLogsAuditor('logs', confdir, oldest, show_content,
show_system_logs,
dirsizes, depth, to_check,
ignore_also, timeout,
@@ -34,11 +31,11 @@
result = lauditor.do_local_audit()
return result
-def homeaudit_host(show_content,
+def homeaudit_host(confdir, show_content,
dirsizes, depth,
to_check, ignore_also, timeout,
maxfiles):
- hauditor = LocalHomesAuditor('homes', show_content,
+ hauditor = LocalHomesAuditor('homes', confdir, show_content,
dirsizes, depth, to_check,
ignore_also, timeout,
maxfiles)
@@ -60,6 +57,6 @@
return result
def retrieve_usercfs(timeout, audit_type):
- ucfsretriever = LocalUserCfRetriever(timeout, audit_type)
+ ucfsretriever = LocalUserCfGrabber(timeout, audit_type)
result = ucfsretriever.run()
return result
diff --git a/dataretention/rulestore.py b/dataretention/rulestore.py
index 976590d..515acf0 100644
--- a/dataretention/rulestore.py
+++ b/dataretention/rulestore.py
@@ -6,8 +6,6 @@
import sys
import getopt
-sys.path.append('/srv/audits/retention/scripts/')
-
from retention.saltclientplus import LocalClientPlus
import retention.utils
import retention.ruleutils
--
To view, visit https://gerrit.wikimedia.org/r/233467
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I3996ba7700580c959692dda70297de4b290b7b8f
Gerrit-PatchSet: 2
Gerrit-Project: operations/software
Gerrit-Branch: master
Gerrit-Owner: ArielGlenn <[email protected]>
Gerrit-Reviewer: ArielGlenn <[email protected]>
_______________________________________________
MediaWiki-commits mailing list
[email protected]
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits