Author: sebb
Date: Fri Sep 15 12:02:08 2023
New Revision: 1912325
URL: http://svn.apache.org/viewvc?rev=1912325&view=rev
Log:
Pylint
Modified:
comdev/projects.apache.org/trunk/scripts/cronjobs/countaccounts.py
comdev/projects.apache.org/trunk/scripts/cronjobs/errtee.py
comdev/projects.apache.org/trunk/scripts/cronjobs/generaterepos.py
comdev/projects.apache.org/trunk/scripts/cronjobs/jsonutils.py
comdev/projects.apache.org/trunk/scripts/cronjobs/parsecommitteeinfo.py
comdev/projects.apache.org/trunk/scripts/cronjobs/parsecommitters.py
comdev/projects.apache.org/trunk/scripts/cronjobs/parseprojects.py
comdev/projects.apache.org/trunk/scripts/cronjobs/parsereleases.py
comdev/projects.apache.org/trunk/scripts/cronjobs/podlings.py
comdev/projects.apache.org/trunk/scripts/cronjobs/sendmail.py
comdev/projects.apache.org/trunk/scripts/cronjobs/testlogging.py
comdev/projects.apache.org/trunk/scripts/cronjobs/urlutils.py
Modified: comdev/projects.apache.org/trunk/scripts/cronjobs/countaccounts.py
URL:
http://svn.apache.org/viewvc/comdev/projects.apache.org/trunk/scripts/cronjobs/countaccounts.py?rev=1912325&r1=1912324&r2=1912325&view=diff
==============================================================================
--- comdev/projects.apache.org/trunk/scripts/cronjobs/countaccounts.py
(original)
+++ comdev/projects.apache.org/trunk/scripts/cronjobs/countaccounts.py Fri Sep
15 12:02:08 2023
@@ -64,7 +64,7 @@ def loadJson(url):
return j
js = {}
-with open("../../site/json/foundation/accounts-evolution.json") as f:
+with open("../../site/json/foundation/accounts-evolution.json", "r",
encoding='utf-8') as f:
js = json.loads(f.read())
f.close()
@@ -101,7 +101,7 @@ for p in ldappeople:
if stamp.startswith(tym1):
js[ym1] += 1
-with open("../../site/json/foundation/accounts-evolution.json", "w") as f:
+with open("../../site/json/foundation/accounts-evolution.json", "w",
encoding='utf-8') as f:
json.dump(js, f, sort_keys=True, indent=0)
f.close()
Modified: comdev/projects.apache.org/trunk/scripts/cronjobs/errtee.py
URL:
http://svn.apache.org/viewvc/comdev/projects.apache.org/trunk/scripts/cronjobs/errtee.py?rev=1912325&r1=1912324&r2=1912325&view=diff
==============================================================================
--- comdev/projects.apache.org/trunk/scripts/cronjobs/errtee.py (original)
+++ comdev/projects.apache.org/trunk/scripts/cronjobs/errtee.py Fri Sep 15
12:02:08 2023
@@ -33,4 +33,4 @@ if __name__ == '__main__': # simple test
sys.stderr.write("STDERR2\n")
sys.stderr=ErrTee() # enable for testing
sys.stderr.write("STDERR3 (should also appear on STDOUT)\n")
- raise Exception("STDERR4 (should also appear on STDOUT)")
\ No newline at end of file
+ raise ValueError("STDERR4 (should also appear on STDOUT)")
Modified: comdev/projects.apache.org/trunk/scripts/cronjobs/generaterepos.py
URL:
http://svn.apache.org/viewvc/comdev/projects.apache.org/trunk/scripts/cronjobs/generaterepos.py?rev=1912325&r1=1912324&r2=1912325&view=diff
==============================================================================
--- comdev/projects.apache.org/trunk/scripts/cronjobs/generaterepos.py
(original)
+++ comdev/projects.apache.org/trunk/scripts/cronjobs/generaterepos.py Fri Sep
15 12:02:08 2023
@@ -35,7 +35,7 @@ class SVNRepoParser(HTMLParser):
# Parse svn repos
try:
- svnResponse = requests.get("https://svn.apache.org/repos/asf/")
+ svnResponse = requests.get("https://svn.apache.org/repos/asf/",
timeout=120)
svnResponse.raise_for_status()
parser = SVNRepoParser()
@@ -46,7 +46,7 @@ except requests.exceptions.RequestExcept
# Parse git repos
try:
- gitResponse = requests.get("https://gitbox.apache.org/repositories.json")
+ gitResponse = requests.get("https://gitbox.apache.org/repositories.json",
timeout=120)
gitResponse.raise_for_status()
gitData = json.loads(gitResponse.content.decode("utf-8"))
@@ -57,7 +57,7 @@ except requests.exceptions.RequestExcept
print("ERROR: Unable to retrieve git repos: %s", e)
print("Writing json/foundation/repositories.json...")
-with open("../../site/json/foundation/repositories.json", "w") as f:
+with open("../../site/json/foundation/repositories.json", "w",
encoding='utf-8') as f:
json.dump(repos, f, sort_keys=True, indent=0)
f.close()
Modified: comdev/projects.apache.org/trunk/scripts/cronjobs/jsonutils.py
URL:
http://svn.apache.org/viewvc/comdev/projects.apache.org/trunk/scripts/cronjobs/jsonutils.py?rev=1912325&r1=1912324&r2=1912325&view=diff
==============================================================================
--- comdev/projects.apache.org/trunk/scripts/cronjobs/jsonutils.py (original)
+++ comdev/projects.apache.org/trunk/scripts/cronjobs/jsonutils.py Fri Sep 15
12:02:08 2023
@@ -24,4 +24,4 @@ def read_utf8(path):
Read and parse JSON from the given file path assuming UTF-8 encoding
"""
with open(path, "rb") as f:
- input = json.loads(f.read().decode('UTF-8', errors='replace'))
+ return json.loads(f.read().decode('UTF-8', errors='replace'))
Modified:
comdev/projects.apache.org/trunk/scripts/cronjobs/parsecommitteeinfo.py
URL:
http://svn.apache.org/viewvc/comdev/projects.apache.org/trunk/scripts/cronjobs/parsecommitteeinfo.py?rev=1912325&r1=1912324&r2=1912325&view=diff
==============================================================================
--- comdev/projects.apache.org/trunk/scripts/cronjobs/parsecommitteeinfo.py
(original)
+++ comdev/projects.apache.org/trunk/scripts/cronjobs/parsecommitteeinfo.py Fri
Sep 15 12:02:08 2023
@@ -13,13 +13,12 @@ Updates:
"""
-import errtee
+import errtee # pylint: disable=unused-import
import re
import json
import sys
if sys.hexversion < 0x03000000:
raise ImportError("This script requires Python 3")
-import io
import os
import os.path
import xml.etree.ElementTree as ET
@@ -82,7 +81,7 @@ skipImageTest = len(sys.argv) >= 2 and s
# get PMC Data from /data/committees.xml
print("Reading PMC Data (/data/committees.xml)")
-with open("../../data/committees.xml", "r") as f:
+with open("../../data/committees.xml", "r", encoding='utf-8') as f:
xmldoc = minidom.parseString(f.read())
f.close()
@@ -220,7 +219,7 @@ for group in sorted(committees, key=keyo
committee['charter'] =
compress(pmcs[committeeId]['charter'])
committeesList.append(committee)
- committeesMap[committeeId] = committee;
+ committeesMap[committeeId] = committee
else:
print("INFO: %s ignored - not yet in section 3" % fullName)
else:
@@ -229,12 +228,12 @@ for group in sorted(committees, key=keyo
# detect retired committees to add to committees-retired.json
-with open("../../site/json/foundation/committees-retired.json", "r") as f:
+with open("../../site/json/foundation/committees-retired.json", "r",
encoding='utf-8') as f:
committeesRetired = json.loads(f.read())
f.close()
committeesRetiredIds = [item['id'] for item in committeesRetired]
-with open("../../site/json/foundation/committees.json", "r") as f:
+with open("../../site/json/foundation/committees.json", "r", encoding='utf-8')
as f:
committeesPrevious = json.loads(f.read())
f.close()
committeesPreviousIds = [item['id'] for item in committeesPrevious]
@@ -271,13 +270,13 @@ for previous in committeesPrevious:
committeesRetired.append(previous)
print("Writing json/foundation/committees.json...")
-with open("../../site/json/foundation/committees.json", "w") as f:
+with open("../../site/json/foundation/committees.json", "w", encoding='utf-8')
as f:
json.dump(committeesList, f, sort_keys=True, indent=0)
f.close()
print("Writing json/foundation/committees-retired.json...")
-with open("../../site/json/foundation/committees-retired.json", "w") as f:
+with open("../../site/json/foundation/committees-retired.json", "w",
encoding='utf-8') as f:
json.dump(committeesRetired, f, sort_keys=True, indent=0)
f.close()
-print("All done")
\ No newline at end of file
+print("All done")
Modified: comdev/projects.apache.org/trunk/scripts/cronjobs/parsecommitters.py
URL:
http://svn.apache.org/viewvc/comdev/projects.apache.org/trunk/scripts/cronjobs/parsecommitters.py?rev=1912325&r1=1912324&r2=1912325&view=diff
==============================================================================
--- comdev/projects.apache.org/trunk/scripts/cronjobs/parsecommitters.py
(original)
+++ comdev/projects.apache.org/trunk/scripts/cronjobs/parsecommitters.py Fri
Sep 15 12:02:08 2023
@@ -20,9 +20,8 @@ Creates:
"""
-import errtee
+import errtee # pylint: disable=unused-import
import sys
-import io
import json
from urlutils import UrlCache
@@ -137,4 +136,4 @@ with open("../../site/json/foundation/ac
json.dump(accounts, f, sort_keys=True, indent=0, ensure_ascii=False)
###### Test of alternate account evolution counting - end #####
-print("All done!")
\ No newline at end of file
+print("All done!")
Modified: comdev/projects.apache.org/trunk/scripts/cronjobs/parseprojects.py
URL:
http://svn.apache.org/viewvc/comdev/projects.apache.org/trunk/scripts/cronjobs/parseprojects.py?rev=1912325&r1=1912324&r2=1912325&view=diff
==============================================================================
--- comdev/projects.apache.org/trunk/scripts/cronjobs/parseprojects.py
(original)
+++ comdev/projects.apache.org/trunk/scripts/cronjobs/parseprojects.py Fri Sep
15 12:02:08 2023
@@ -18,7 +18,7 @@ Deletes any obsolete files from:
"""
-import errtee # N.B. this is imported for its side-effect
+import errtee # pylint: disable=unused-import
import sys
if sys.hexversion < 0x03000000:
raise ImportError("This script requires Python 3")
@@ -45,7 +45,7 @@ FAILURES_DIR = '../../failures'
# grab the validation criteria
validation = {}
-with open(os.path.join(SITEDIR, "validation.json")) as f:
+with open(os.path.join(SITEDIR, "validation.json"), encoding='utf-8') as f:
validation = json.loads(f.read())
langs = {}
lang = validation['languages'].keys()
@@ -114,7 +114,7 @@ if '--test' in sys.argv:
print(f"Test mode; will cache DOAPs under {tmpdir}")
filecache = urlutils.UrlCache(cachedir=tmpdir, interval=-1, silent=True)
-with open(projectsList, "r") as f:
+with open(projectsList, "r", encoding='utf-8') as f:
data = f.read()
f.close()
xmldoc = minidom.parseString(data)
@@ -170,9 +170,8 @@ ATTIC = 'Attic <[email protected]
# Print to log and send a conditional email to Attic
def printAtticMail(msg, file=sys.stdout):
print(msg, file=file)
- import datetime
# Only send the mail once a week
- if datetime.datetime.now().day % 7 != 0:
+ if datetime.now().day % 7 != 0:
print("Not sending the email to '" + str(ATTIC) +"'" , file=file)
return
try:
@@ -185,7 +184,7 @@ def site2committee(s):
return siteMap[s]
return s
-with open("../../site/json/foundation/committees-retired.json", "r") as f:
+with open("../../site/json/foundation/committees-retired.json", "r",
encoding='utf-8') as f:
committeesRetired = json.loads(f.read())
f.close()
retired = []
@@ -422,7 +421,7 @@ for s in itemlist :
urlname = urlname.split(';')[0] # trim any trailing qualifiers
urlname = join(FAILURES_DIR, urlname)
print("Saving invalid data in %s " % urlname)
- with open (urlname, "wb") as f:
+ with open (urlname, "wb", encoding='utf-8') as f:
f.write(rdf)
f.close()
@@ -439,7 +438,7 @@ for f in os.listdir(PROJECTS_DIR):
os.remove(join(PROJECTS_DIR,f))
if len(failures) > 0:
- with open ("parseprojects-failures.xml", "w") as f:
+ with open ("parseprojects-failures.xml", "w", encoding='utf-8') as f:
f.write("<doapFiles>\n")
for fail in failures:
f.write("<location>%s</location>\n" % fail)
Modified: comdev/projects.apache.org/trunk/scripts/cronjobs/parsereleases.py
URL:
http://svn.apache.org/viewvc/comdev/projects.apache.org/trunk/scripts/cronjobs/parsereleases.py?rev=1912325&r1=1912324&r2=1912325&view=diff
==============================================================================
--- comdev/projects.apache.org/trunk/scripts/cronjobs/parsereleases.py
(original)
+++ comdev/projects.apache.org/trunk/scripts/cronjobs/parsereleases.py Fri Sep
15 12:02:08 2023
@@ -1,6 +1,6 @@
#!/usr/bin/env python3
-import errtee # this is imported for its side-effects
+import errtee # pylint: disable=unused-import
from collections import defaultdict
import gzip
import json
@@ -145,9 +145,9 @@ if __name__ == '__main__':
jsondir = join(myhome, 'site', 'json', 'foundation') # where the JSON
files go
main()
print("Writing releases.json")
- with open(join(jsondir, "releases.json"), "w") as f:
+ with open(join(jsondir, "releases.json"), "w", encoding='utf-8') as f:
json.dump(releases, f, sort_keys=True, indent=0)
print("Writing releases-files.json")
- with open(join(jsondir, "releases-files.json"), "w") as f:
+ with open(join(jsondir, "releases-files.json"), "w", encoding='utf-8') as
f:
json.dump(files, f, sort_keys=True, indent=0)
print("All done!")
Modified: comdev/projects.apache.org/trunk/scripts/cronjobs/podlings.py
URL:
http://svn.apache.org/viewvc/comdev/projects.apache.org/trunk/scripts/cronjobs/podlings.py?rev=1912325&r1=1912324&r2=1912325&view=diff
==============================================================================
--- comdev/projects.apache.org/trunk/scripts/cronjobs/podlings.py (original)
+++ comdev/projects.apache.org/trunk/scripts/cronjobs/podlings.py Fri Sep 15
12:02:08 2023
@@ -1,4 +1,4 @@
-import errtee
+import errtee # pylint: disable=unused-import
from xml.dom import minidom
import re
import json
Modified: comdev/projects.apache.org/trunk/scripts/cronjobs/sendmail.py
URL:
http://svn.apache.org/viewvc/comdev/projects.apache.org/trunk/scripts/cronjobs/sendmail.py?rev=1912325&r1=1912324&r2=1912325&view=diff
==============================================================================
--- comdev/projects.apache.org/trunk/scripts/cronjobs/sendmail.py (original)
+++ comdev/projects.apache.org/trunk/scripts/cronjobs/sendmail.py Fri Sep 15
12:02:08 2023
@@ -24,7 +24,7 @@ def sendMail(subject, body='', recipient
smtp.sendmail(sender, recipients, msg.as_string())
smtp.quit()
-if __name__ == '__main__':
+def main():
import sys
port = 25
if len(sys.argv) > 1: # argv[0] is the script name
@@ -37,3 +37,6 @@ if __name__ == '__main__':
print("Sent")
sendMail('Another Test message, please ignore', "Thanks again!",
recipients=['a.b.c','d.e.f'], port=port)
print("Sent second")
+
+if __name__ == '__main__':
+ main()
Modified: comdev/projects.apache.org/trunk/scripts/cronjobs/testlogging.py
URL:
http://svn.apache.org/viewvc/comdev/projects.apache.org/trunk/scripts/cronjobs/testlogging.py?rev=1912325&r1=1912324&r2=1912325&view=diff
==============================================================================
--- comdev/projects.apache.org/trunk/scripts/cronjobs/testlogging.py (original)
+++ comdev/projects.apache.org/trunk/scripts/cronjobs/testlogging.py Fri Sep 15
12:02:08 2023
@@ -2,7 +2,7 @@
import sys
import os
-import errtee
+import errtee # pylint: disable=unused-import
print("Stdout1")
if 'ERRTEE' in os.environ:
@@ -11,4 +11,4 @@ else:
print("ERRTEE is not defined")
print("Stderr2", file=sys.stderr) # should appear in log file if ERRTEE is
defined
print("Stdout3")
-raise Exception("Except") # should appear in log file if ERRTEE is defined
+raise ValueError("Except") # should appear in log file if ERRTEE is defined
Modified: comdev/projects.apache.org/trunk/scripts/cronjobs/urlutils.py
URL:
http://svn.apache.org/viewvc/comdev/projects.apache.org/trunk/scripts/cronjobs/urlutils.py?rev=1912325&r1=1912324&r2=1912325&view=diff
==============================================================================
--- comdev/projects.apache.org/trunk/scripts/cronjobs/urlutils.py (original)
+++ comdev/projects.apache.org/trunk/scripts/cronjobs/urlutils.py Fri Sep 15
12:02:08 2023
@@ -15,7 +15,7 @@ try:
from urllib.error import HTTPError
from urllib.parse import urlparse
_PY3 = True
-except:
+except ImportError:
from urllib2 import urlopen, Request
from urllib2 import HTTPError
from urlparse import urlparse
@@ -113,14 +113,14 @@ def URLexists(url):
try:
getIfNewer(url, method='HEAD', silent=True)
return True
- except:
+ except Exception: # narrow this
return False
def findRelPath(relpath):
for d in ['./','../','../../']: # we may located at same level or 1 or 2
below
- dir = join(d,relpath)
- if os.path.isdir(dir):
- return dir
+ folder = join(d,relpath)
+ if os.path.isdir(folder):
+ return folder
raise OSError("Cannot find path " + relpath)
class UrlCache(object):
@@ -272,7 +272,7 @@ class UrlCache(object):
if useFileModTime:
os.utime(check, None) # touch the marker file
else:
- with open(check,'a'):
+ with open(check,'a', encoding='utf-8'):
os.utime(check, None) # touch the marker file
if encoding:
@@ -280,7 +280,7 @@ class UrlCache(object):
else:
return open(target, 'rb')
-if __name__ == '__main__':
+def main():
print(URLexists('https://www.apache.org/'))
print(URLexists('https://www.apache.org/__'))
print(URLexists('https://__.apache.org/'))
@@ -289,16 +289,19 @@ if __name__ == '__main__':
try:
fc = UrlCache(cachedir='x')
- raise Error("Expected OSError")
- except OSError as e:
- print('Expected: %s' % e)
+ raise AssertionError("Expected OSError")
+ except OSError as ex:
+ print('Expected: %s' % ex)
fc = UrlCache(interval=0)
name = "_wao.html"
- fc._deleteCacheFile(name)
+ fc._deleteCacheFile(name) # pylint: disable=protected-access
icla_info = fc.get("http://www.apache.org/", name, encoding='utf-8')
print(icla_info.readline().rstrip())
print(icla_info.readline().rstrip())
print(icla_info.readline().rstrip())
print(icla_info.readline().rstrip())
icla_info = fc.get("http://www.apache.org/", name, encoding='utf-8')
- fc._deleteCacheFile(name)
+ fc._deleteCacheFile(name) # pylint: disable=protected-access
+
+if __name__ == '__main__':
+ main()