Modified: trunk/Tools/Scripts/validate-committer-lists (290507 => 290508)
--- trunk/Tools/Scripts/validate-committer-lists 2022-02-25 15:49:58 UTC (rev 290507)
+++ trunk/Tools/Scripts/validate-committer-lists 2022-02-25 16:04:56 UTC (rev 290508)
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright (c) 2009, Google Inc. All rights reserved.
#
@@ -66,8 +66,8 @@
class CommitterListFromMailingList(object):
- committers_list_url = "http://lists.webkit.org/mailman/roster/webkit-committers"
- reviewers_list_url = "http://lists.webkit.org/mailman/roster/webkit-reviewers"
+ committers_list_url = "https://lists.webkit.org/mailman/roster/webkit-committers"
+ reviewers_list_url = "https://lists.webkit.org/mailman/roster/webkit-reviewers"
def _fetch_emails_from_page(self, url):
page = urlopen(url)
@@ -201,10 +201,10 @@
def _fetch_authors_and_last_commit_time_from_git_log(self):
last_commit_dates = {}
git_log_args = ['git', 'log', '--reverse', '--pretty=format:%ae %at']
- process = subprocess.Popen(git_log_args, stdout=subprocess.PIPE)
+ process = subprocess.Popen(git_log_args, stdout=subprocess.PIPE, encoding='utf-8')
# [email protected]@268f45cc-cd09-0410-ab3c-d52691b4dbfc 1257090899
- line_regexp = re.compile("^(?P<author>.+)@\S+ (?P<timestamp>\d+)$")
+ line_regexp = re.compile(r"^(?P<author>.+@.+) (?P<timestamp>\d+)$")
while True:
output_line = process.stdout.readline()
if output_line == '' and process.poll() != None:
@@ -212,9 +212,9 @@
match_result = line_regexp.match(output_line)
if not match_result:
- _log.error("Failed to match line: %s" % output_line)
- exit(1)
- last_commit_dates[match_result.group('author')] = float(match_result.group('timestamp'))
+ continue
+ last_commit_dates['@'.join(match_result.group('author').split('@')[:2])] = float(match_result.group('timestamp'))
+ return last_commit_dates
def _fill_in_emails_for_old_logins(self):
authors_missing_email = [author for author in self._last_commit_time_by_author_cache if author.find('@') == -1]
@@ -246,7 +246,8 @@
if not self._last_commit_time_by_author_cache:
self._last_commit_time_by_author_cache = self._fetch_authors_and_last_commit_time_from_git_log()
self._fill_in_emails_for_old_logins()
- del self._last_commit_time_by_author_cache['(no author)'] # The initial svn import isn't very useful.
+ if '(no author)' in self._last_commit_time_by_author_cache:
+ del self._last_commit_time_by_author_cache['(no author)'] # The initial svn import isn't very useful.
return self._last_commit_time_by_author_cache
@staticmethod
@@ -255,7 +256,7 @@
def possibly_expired_committers(self, committer_list):
authors_and_last_commits = list(self._last_commit_by_author().items())
- authors_and_last_commits.sort(lambda a,b: cmp(a[1], b[1]), reverse=True)
+ authors_and_last_commits = reversed(sorted(authors_and_last_commits, key=lambda pair: pair[1]))
committer_cutof = date.today() - timedelta(days=365)
retired_authors_and_last_commits = []
for (author, last_commit) in authors_and_last_commits:
@@ -266,7 +267,7 @@
def possibly_inactive_reviewers(self, committer_list):
git_log_args = ['git', 'log', '--since=1.year']
- process = subprocess.Popen(git_log_args, stdout=subprocess.PIPE)
+ process = subprocess.Popen(git_log_args, stdout=subprocess.PIPE, encoding='utf-8')
git_output, err = process.communicate()
comment_regex = re.compile(r'^Date: .+?\n+(.+?)(?:^commit |\Z)', re.MULTILINE | re.DOTALL)
@@ -277,7 +278,7 @@
for comment in comment_regex.findall(git_output):
reviewer_match = reviewed_by_regexp.search(comment)
if reviewer_match:
- reviewers_text = reviewer_match.group('reviewer').decode('utf-8', 'backslashreplace')
+ reviewers_text = reviewer_match.group('reviewer')
# reviewers might be something like "Darin Adler and Dave Hyatt".
# Rather than trying to fuzzy match names, find known reviewers and remove them from the list.
for reviewer in reviewers:
@@ -294,7 +295,9 @@
print("Committers who have not committed within one year:")
self._print_three_column_row(column_widths, ("Last Commit", "Committer Email", "Committer Record"))
for (author, last_commit) in retired_authors_and_last_commits:
- committer_record = committer_list.committer_by_email(author)
+ committer_record = committer_list.committer_by_email(author) or committer_list.committer_by_email(author.split('@')[0])
+ if not committer_record or not committer_record.can_commit:
+ continue
last_commit_date = date.fromtimestamp(last_commit)
self._print_three_column_row(column_widths, (str(last_commit_date), author, committer_record))
@@ -305,13 +308,13 @@
print()
print("Reviewers who have not reviewed within one year:")
for contributor in inactive_reviewers:
- print("\"{}\" {}".format(contributor.full_name.encode("utf-8"), contributor.bugzilla_email()))
+ print("\"{}\" {}".format(contributor.full_name, contributor.bugzilla_email()))
def print_committers_missing_from_committer_list(self, committer_list):
missing_from_contributors_json = []
last_commit_time_by_author = self._last_commit_by_author()
for author in last_commit_time_by_author:
- if not committer_list.committer_by_email(author):
+ if not committer_list.contributor_by_email(author):
missing_from_contributors_json.append(author)
never_committed = []
Modified: trunk/Tools/Scripts/webkitpy/common/net/bugzilla/bugzilla.py (290507 => 290508)
--- trunk/Tools/Scripts/webkitpy/common/net/bugzilla/bugzilla.py 2022-02-25 15:49:58 UTC (rev 290507)
+++ trunk/Tools/Scripts/webkitpy/common/net/bugzilla/bugzilla.py 2022-02-25 16:04:56 UTC (rev 290508)
@@ -36,7 +36,6 @@
import re
import socket
import sys
-import urllib
from datetime import datetime # used in timestamp()
from webkitcorepy import BytesIO, StringIO, string_utils, unicode
@@ -50,6 +49,11 @@
from webkitpy.common.system.user import User
from webkitpy.thirdparty.BeautifulSoup import BeautifulSoup, BeautifulStoneSoup, SoupStrainer
+if sys.version_info > (3, 0):
+ from urllib.parse import quote as urlquote
+else:
+ from urllib import quote as urlquote
+
_log = logging.getLogger(__name__)
@@ -233,7 +237,7 @@
# We may want to use a more explicit query than "quicksearch".
# If quicksearch changes we should probably change to use
# a normal buglist.cgi?query_format=advanced query.
- quicksearch_url = "buglist.cgi?quicksearch=%s" % urllib.quote(search_string)
+ quicksearch_url = "buglist.cgi?quicksearch=%s" % urlquote(search_string)
return self._fetch_bugs_from_advanced_query(quicksearch_url)
# Currently this returns all bugs across all components.
@@ -241,7 +245,7 @@
def fetch_bugs_matching_search(self, search_string):
query = "buglist.cgi?query_format=advanced"
if search_string:
- query += "&short_desc_type=allwordssubstr&short_desc=%s" % urllib.quote(search_string)
+ query += "&short_desc_type=allwordssubstr&short_desc=%s" % urlquote(search_string)
return self._fetch_bugs_from_advanced_query(query)
def fetch_patches_from_pending_commit_list(self):
@@ -252,7 +256,7 @@
query = "buglist.cgi?query_format=advanced&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&field0-0-0=flagtypes.name&type0-0-0=equals&value0-0-0=review?"
if cc_email:
- query += "&emailcc1=1&emailtype1=substring&email1=%s" % urllib.quote(cc_email)
+ query += "&emailcc1=1&emailtype1=substring&email1=%s" % urlquote(cc_email)
return self._fetch_bugs_from_advanced_query(query)
@@ -286,13 +290,13 @@
# We could easily parse https://bugs.webkit.org/userprefs.cgi?tab=permissions to
# check permissions, but bugzilla will just return an error if we don't have them.
def fetch_login_userid_pairs_matching_substring(self, search_string):
- review_queue_url = "editusers.cgi?action="" % urllib.quote(search_string)
+ review_queue_url = "editusers.cgi?action="" % urlquote(search_string)
results_page = self._load_query(review_queue_url)
# We could pull the EditUsersParser off Bugzilla if needed.
return EditUsersParser().login_userid_pairs_from_edit_user_results(results_page)
def is_invalid_bugzilla_email(self, search_string):
- review_queue_url = "request.cgi?action="" % urllib.quote(search_string)
+ review_queue_url = "request.cgi?action="" % urlquote(search_string)
results_page = self._load_query(review_queue_url)
return bool(re.search('did not match anything', string_utils.decode(results_page.read(), target_type=str)))