jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/908966 )
Change subject: [bugfix] Remove sites from FamilyFileGenerator.langs on error ...................................................................... [bugfix] Remove sites from FamilyFileGenerator.langs on error - If loading a site fails, remove it from langs attribute of FamilyFileGenerator - [s]trict: add the ability to process sites with the same domain only - with [e]dit sites can be given delimited by space, comma or both - collect self.prefixes for tests after FamilyFileGenerator.getapis() was processed - use [s]trict option for Wikimedia site for within tests - remove old implementation removing obsolete sites in tests Bug: T334775 Bug: T334714 Change-Id: I5b049ead55e19f2a4c2ba522f34412e541737669 --- M tests/generate_family_file_tests.py M pywikibot/scripts/generate_family_file.py 2 files changed, 79 insertions(+), 22 deletions(-) Approvals: Xqt: Looks good to me, approved jenkins-bot: Verified diff --git a/pywikibot/scripts/generate_family_file.py b/pywikibot/scripts/generate_family_file.py index d5c00cc..8b0607c 100755 --- a/pywikibot/scripts/generate_family_file.py +++ b/pywikibot/scripts/generate_family_file.py @@ -12,7 +12,7 @@ <url>: an url from where the family settings are loaded <name>: the family name without "_family.py" tail. - <dointerwiki>: predefined answer (y|n) to add multiple site codes + <dointerwiki>: predefined answer (y|s|n) to add multiple site codes <verify>: disable certificate validaton `(y|n) Example:: @@ -25,14 +25,18 @@ .. versionchanged:: 7.0 moved to pywikibot.scripts folder; create family files in families folder of your base directory instead of pywikibot/families. +.. versionchanged:: 8.1 + [s]trict can be given for <dointerwiki> parameter to ensure that + sites are from the given domain. """ # -# (C) Pywikibot team, 2010-2022 +# (C) Pywikibot team, 2010-2023 # # Distributed under the terms of the MIT license # import codecs import os +import re import string import sys from contextlib import suppress @@ -63,8 +67,10 @@ :param url: an url from where the family settings are loaded :param name: the family name without "_family.py" tail. :param dointerwiki: Predefined answer to add multiple site - codes. Pass `Y` or `y` for yes `N` or `n` for no and - `E` or `e` if you want to edit the collection of sites. + codes. Pass `Y` or `y` for yes, `S` or `s` for strict which + only includes site of the same domain (usually for Wikimedia + sites), `N` or `n` for no and `E` or `e` if you want to edit + the collection of sites. :param verify: If a certificate verification failes, you may pass `Y` or `y` to disable certificate validaton `N` or `n` to keep it enabled. @@ -149,7 +155,14 @@ self.writefile(verify) def getlangs(self, w) -> None: - """Determine site code of a family.""" + """Determine site code of a family. + + .. versionchanged:: 8.1 + with [e]dit the interwiki list can be given delimited by + space or comma or both. With [s]trict only sites with the + same domain are collected. A [h]elp answer was added to show + more information about possible answers. + """ print('Determining other sites...', end='') try: self.langs = w.langs @@ -169,21 +182,39 @@ code_len = len(self.langs) if code_len > 1: if self.dointerwiki is None: - makeiw = input( - '\nThere are {} sites available.' - '\nDo you want to generate interwiki links? ' - 'This might take a long time. ([y]es/[N]o/[e]dit)' - .format(code_len)).lower() + while True: + makeiw = input( + '\n' + f'There are {code_len} sites available.' + ' Do you want to generate interwiki links?\n' + 'This might take a long time. ' + '([y]es, [s]trict, [N]o, [e]dit), [h]elp) ').lower() + if makeiw in ('y', 's', 'n', 'e', ''): + break + print( + '\n' + '[y]es: create interwiki links for all sites\n' + '[s]trict: yes, but for sites with same domain only\n' + '[N]o: no, use the current site only (default)\n' + '[e]dit: get a list delimited with space or comma\n' + '[h]elp: this help message' + ) else: makeiw = self.dointerwiki - if makeiw == 'n': + if makeiw in ('n', ''): self.langs = [wiki for wiki in self.langs if wiki['url'] == w.iwpath] + elif makeiw == 's': + domain = '.'.join(urlparse(w.server).hostname.split('.')[1:]) + self.langs = [wiki for wiki in self.langs + if domain in wiki['url']] + elif makeiw == 'e': for wiki in self.langs: print(wiki['prefix'], wiki['url']) - do_langs = input('Which sites do you want: ') + do_langs = re.split(' *,| +', + input('Which sites do you want: ')) self.langs = [wiki for wiki in self.langs if wiki['prefix'] in do_langs or wiki['url'] == w.iwpath] @@ -196,7 +227,8 @@ def getapis(self) -> None: """Load other site pages.""" - print('Loading wikis... ') + print(f'Loading {len(self.langs)} wikis... ') + remove = [] for lang in self.langs: key = lang['prefix'] print(f' * {key}... ', end='') @@ -206,17 +238,21 @@ print('downloaded') except Exception as e: # pragma: no cover print(e) + remove.append(lang) else: print('in cache') + for lang in remove: + self.langs.remove(lang) + def writefile(self, verify) -> None: """Write the family file.""" fn = os.path.join(self.base_dir, 'families', f'{self.name}_family.py') print(f'Writing {fn}... ') - if os.path.exists(fn) and input('{} already exists. Overwrite? (y/n)' - .format(fn)).lower() == 'n': + if os.path.exists(fn) and input( + f'{fn} already exists. Overwrite? (y/n) ').lower() == 'n': print('Terminating.') sys.exit(1) diff --git a/tests/generate_family_file_tests.py b/tests/generate_family_file_tests.py index f219ac6..e97f884 100755 --- a/tests/generate_family_file_tests.py +++ b/tests/generate_family_file_tests.py @@ -11,6 +11,7 @@ from urllib.parse import urlparse from pywikibot import Site +from pywikibot.family import WikimediaFamily from pywikibot.scripts import generate_family_file from tests.aspects import DefaultSiteTestCase from tests.utils import skipping @@ -29,15 +30,15 @@ self.langs.append(wiki) break - self.prefixes = [item['prefix'] for item in self.langs] super().getapis() + self.prefixes = [item['prefix'] for item in self.langs] self.langs = save def writefile(self, verify): """Pass writing.""" -class TestGenerateFamilyFiles(DefaultSiteTestCase): +class TestGenerateFamilyFile(DefaultSiteTestCase): """Test generate_family_file functionality.""" @@ -55,15 +56,17 @@ def setUp(self): """Set up tests.""" super().setUp() + answer = 's' if isinstance(self.site.family, WikimediaFamily) else 'y' self.generator_instance = FamilyTestGenerator( - url=self.site.base_url(''), name=self.familyname, dointerwiki='y') + url=self.site.base_url(''), name=self.familyname, + dointerwiki=answer) def test_initial_attributes(self): """Test initial FamilyFileGenerator attributes.""" self.assertEqual(self.generator_instance.base_url, self.site.base_url('')) self.assertEqual(self.generator_instance.name, self.familyname) - self.assertEqual(self.generator_instance.dointerwiki, 'y') + self.assertIn(self.generator_instance.dointerwiki, ['s', 'y']) self.assertIsInstance(self.generator_instance.wikis, dict) self.assertIsInstance(self.generator_instance.langs, list) @@ -80,9 +83,7 @@ with self.subTest(test='Test element counts'): if self.site.lang not in gen.prefixes: gen.prefixes.append(self.site.lang) - obsolete = self.site.family.interwiki_removals - self.assertCountEqual(set(gen.prefixes) - obsolete, - set(gen.wikis) - obsolete) + self.assertCountEqual(gen.prefixes, gen.wikis) # test creating Site from url # only test Sites for downloaded wikis (T241413) -- To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/908966 To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings Gerrit-Project: pywikibot/core Gerrit-Branch: master Gerrit-Change-Id: I5b049ead55e19f2a4c2ba522f34412e541737669 Gerrit-Change-Number: 908966 Gerrit-PatchSet: 3 Gerrit-Owner: Xqt <i...@gno.de> Gerrit-Reviewer: Xqt <i...@gno.de> Gerrit-Reviewer: jenkins-bot Gerrit-MessageType: merged
_______________________________________________ Pywikibot-commits mailing list -- pywikibot-commits@lists.wikimedia.org To unsubscribe send an email to pywikibot-commits-le...@lists.wikimedia.org