jenkins-bot has submitted this change and it was merged.

Change subject: APISite.editpage should not reload the Page
......................................................................


APISite.editpage should not reload the Page

APISite.editpage reloads the revisions after success in order
to ensure that the Page is up to date, as the actual text of the
page may be different from the submitted text due to merges
performed by the server to resolve edit conflicts.

APISite and api achieved this by modifying private variables
of BasePage.

By exposing a public setter and deleter for latest_revision_id,
APISite and api can use this property to inform BasePage of
when its caches are invalid, so it can clear the caches so
subsequent fetches retrieve the actual text of the saved revision
when it is needed.

Normalise WikibasePage to using attribute _revid instead of lastrevid,
deprecating the latter.

Also add missing text deleter to ProofreadPage.

Change-Id: I55a49d115a8f207216f24f9acbfa64d87da88578
---
M pywikibot/data/api.py
M pywikibot/page.py
M pywikibot/proofreadpage.py
M pywikibot/site.py
M tests/aspects.py
A tests/basepage_tests.py
M tests/flow_tests.py
M tests/pagegenerators_tests.py
M tests/proofreadpage_tests.py
M tests/site_tests.py
M tests/wikibase_tests.py
M tox.ini
12 files changed, 414 insertions(+), 79 deletions(-)

Approvals:
  XZise: Looks good to me, approved
  jenkins-bot: Verified



diff --git a/pywikibot/data/api.py b/pywikibot/data/api.py
index 38ddee2..fbdb02d 100644
--- a/pywikibot/data/api.py
+++ b/pywikibot/data/api.py
@@ -2786,9 +2786,8 @@
             page._revisions[revision.revid] = revision
 
     if 'lastrevid' in pagedict:
-        page._revid = pagedict['lastrevid']
-        if page._revid in page._revisions:
-            page._text = page._revisions[page._revid].text
+        page.latest_revision_id = pagedict['lastrevid']
+        del page.text
 
     if 'imageinfo' in pagedict:
         assert(isinstance(page, pywikibot.FilePage))
diff --git a/pywikibot/page.py b/pywikibot/page.py
index 7c27551..2c49270 100644
--- a/pywikibot/page.py
+++ b/pywikibot/page.py
@@ -53,8 +53,8 @@
     UserRightsError,
 )
 from pywikibot.tools import (
-    MediaWikiVersion, UnicodeMixin, DotReadableDict,
-    ComparableMixin, deprecated, deprecate_arg, deprecated_args,
+    MediaWikiVersion, UnicodeMixin, ComparableMixin, DotReadableDict,
+    deprecated, deprecate_arg, deprecated_args, issue_deprecation_warning,
     first_upper, remove_last_args, _NotImplementedWarning,
     OrderedDict, Counter,
 )
@@ -347,13 +347,7 @@
 
         """
         if force:
-            # When forcing, we retry the page no matter what:
-            # * Old exceptions do not apply any more
-            # * Deleting _revid to force reload
-            # * Deleting _redirtarget, that info is now obsolete.
-            for attr in ['_redirtarget', '_getexception', '_revid']:
-                if hasattr(self, attr):
-                    delattr(self, attr)
+            del self.latest_revision_id
         try:
             self._getInternals(sysop)
         except pywikibot.IsRedirectPage:
@@ -431,6 +425,34 @@
             self.revisions(self)
         return self._revid
 
+    @latest_revision_id.deleter
+    def latest_revision_id(self):
+        """Remove the latest revision id set for this Page.
+
+        All internal cached values specifically for the latest revision
+        of this page are cleared.
+
+        The following cached values are not cleared:
+        - text property
+        - page properties, and page coordinates
+        - lastNonBotUser
+        - isDisambig and isCategoryRedirect status
+        - langlinks, templates and deleted revisions
+        """
+        # When forcing, we retry the page no matter what:
+        # * Old exceptions do not apply any more
+        # * Deleting _revid to force reload
+        # * Deleting _redirtarget, that info is now obsolete.
+        for attr in ['_redirtarget', '_getexception', '_revid']:
+            if hasattr(self, attr):
+                delattr(self, attr)
+
+    @latest_revision_id.setter
+    def latest_revision_id(self, value):
+        """Set the latest revision for this Page."""
+        del self.latest_revision_id
+        self._revid = value
+
     @deprecated('latest_revision_id')
     def latestRevision(self):
         """Return the current revision id for this page."""
@@ -480,6 +502,8 @@
         """Delete the current (edited) wikitext."""
         if hasattr(self, "_text"):
             del self._text
+        if hasattr(self, '_expanded_text'):
+            del self._expanded_text
 
     def preloadText(self):
         """The text returned by EditFormPreloadText.
@@ -967,6 +991,11 @@
         @return: bool
         """
         # TODO: move this to Site object?
+
+        # FIXME: templatesWithParams is defined in Page only.
+        if not hasattr(self, 'templatesWithParams'):
+            return True
+
         if config.ignore_bot_templates:  # Check the "master ignore switch"
             return True
         username = self.site.user()
@@ -3134,6 +3163,30 @@
 
         return params
 
+    def __getattribute__(self, name):
+        """Low-level attribute getter. Deprecates lastrevid."""
+        if name == 'lastrevid':
+            issue_deprecation_warning(
+                'WikibasePage.lastrevid', 'latest_revision_id', 2)
+            name = '_revid'
+        return super(WikibasePage, self).__getattribute__(name)
+
+    def __setattr__(self, attr, value):
+        """Attribute setter. Deprecates lastrevid."""
+        if attr == 'lastrevid':
+            issue_deprecation_warning(
+                'WikibasePage.lastrevid', 'latest_revision_id', 2)
+            attr = '_revid'
+        return super(WikibasePage, self).__setattr__(attr, value)
+
+    def __delattr__(self, attr):
+        """Attribute deleter. Deprecates lastrevid."""
+        if attr == 'lastrevid':
+            issue_deprecation_warning(
+                'WikibasePage.lastrevid', 'latest_revision_id', 2)
+            attr = '_revid'
+        return super(WikibasePage, self).__delattr__(attr)
+
     def namespace(self):
         """Return the number of the namespace of the entity.
 
@@ -3191,7 +3244,7 @@
 
             self._content = data[item_index]
         if 'lastrevid' in self._content:
-            self.lastrevid = self._content['lastrevid']
+            self.latest_revision_id = self._content['lastrevid']
         else:
             if lazy_loading_id:
                 p = Page(self._site, self._title)
@@ -3301,9 +3354,17 @@
 
         @return: long
         """
-        if not hasattr(self, 'lastrevid'):
+        if not hasattr(self, '_revid'):
             self.get()
-        return self.lastrevid
+        return self._revid
+
+    @latest_revision_id.setter
+    def latest_revision_id(self, value):
+        self._revid = value
+
+    @latest_revision_id.deleter
+    def latest_revision_id(self, value):
+        del self._revid
 
     @staticmethod
     def _normalizeLanguages(data):
@@ -3374,8 +3435,8 @@
         @param data: Data to be saved
         @type data: dict, or None to save the current content of the entity.
         """
-        if hasattr(self, 'lastrevid'):
-            baserevid = self.lastrevid
+        if hasattr(self, '_revid'):
+            baserevid = self.latest_revision_id
         else:
             baserevid = None
 
@@ -3386,7 +3447,7 @@
 
         updates = self.repo.editEntity(self._defined_by(singular=True), data,
                                        baserevid=baserevid, **kwargs)
-        self.lastrevid = updates['entity']['lastrevid']
+        self.latest_revision_id = updates['entity']['lastrevid']
 
         lazy_loading_id = not hasattr(self, 'id') and hasattr(self, '_site')
         if lazy_loading_id or self.id == '-1':
@@ -4203,7 +4264,7 @@
         source = defaultdict(list)
         for claim in claims:
             claim.hash = data['reference']['hash']
-            self.on_item.lastrevid = data['pageinfo']['lastrevid']
+            self.on_item.latest_revision_id = data['pageinfo']['lastrevid']
             source[claim.getID()].append(claim)
         self.sources.append(source)
 
@@ -4237,7 +4298,7 @@
         """
         data = self.repo.editQualifier(self, qualifier, **kwargs)
         qualifier.isQualifier = True
-        self.on_item.lastrevid = data['pageinfo']['lastrevid']
+        self.on_item.latest_revision_id = data['pageinfo']['lastrevid']
         if qualifier.getID() in self.qualifiers:
             self.qualifiers[qualifier.getID()].append(qualifier)
         else:
diff --git a/pywikibot/proofreadpage.py b/pywikibot/proofreadpage.py
index 4b0f1d3..2acb362 100644
--- a/pywikibot/proofreadpage.py
+++ b/pywikibot/proofreadpage.py
@@ -235,6 +235,11 @@
         if not self._text:
             self._create_empty_page()
 
+    @text.deleter
+    def text(self):
+        if hasattr(self, '_text'):
+            del self._text
+
     def _decompose_page(self):
         """Split Proofread Page text in header, body and footer.
 
diff --git a/pywikibot/site.py b/pywikibot/site.py
index 9bc9d84..572e489 100644
--- a/pywikibot/site.py
+++ b/pywikibot/site.py
@@ -4333,10 +4333,10 @@
                     pywikibot.log(u"Page [[%s]] saved without any changes."
                                   % page.title())
                     return True
-                page._revid = result["edit"]["newrevid"]
+                page.latest_revision_id = result["edit"]["newrevid"]
                 # see 
https://www.mediawiki.org/wiki/API:Wikimania_2006_API_discussion#Notes
                 # not safe to assume that saved text is the same as sent
-                self.loadrevisions(page, getText=True)
+                del page.text
                 return True
             elif result["edit"]["result"] == "Failure":
                 if "captcha" in result["edit"]:
@@ -5885,7 +5885,7 @@
             item.claims[claim.getID()].append(claim)
         else:
             item.claims[claim.getID()] = [claim]
-        item.lastrevid = data['pageinfo']['lastrevid']
+        item.latest_revision_id = data['pageinfo']['lastrevid']
 
     @must_be(group='user')
     def changeClaimTarget(self, claim, snaktype='value', bot=True, **kwargs):
@@ -5914,7 +5914,7 @@
         if snaktype == 'value':
             params['value'] = json.dumps(claim._formatValue())
 
-        params['baserevid'] = claim.on_item.lastrevid
+        params['baserevid'] = claim.on_item.latest_revision_id
         req = self._simple_request(**params)
         data = req.submit()
         return data
@@ -5935,7 +5935,7 @@
         params = {'action': 'wbsetclaim',
                   'claim': json.dumps(claim.toJSON()),
                   'token': self.tokens['edit'],
-                  'baserevid': claim.on_item.lastrevid,
+                  'baserevid': claim.on_item.latest_revision_id,
                   }
         if 'bot' not in kwargs or kwargs['bot']:
             params['bot'] = True
@@ -5964,7 +5964,7 @@
                       statement=claim.snak,
                       )
         if claim.on_item:  # I think this wouldn't be false, but lets be safe
-            params['baserevid'] = claim.on_item.lastrevid
+            params['baserevid'] = claim.on_item.latest_revision_id
         if bot:
             params['bot'] = 1
         params['token'] = self.tokens['edit']
@@ -6016,7 +6016,7 @@
                       claim=claim.snak,
                       )
         if claim.on_item:  # I think this wouldn't be false, but lets be safe
-            params['baserevid'] = claim.on_item.lastrevid
+            params['baserevid'] = claim.on_item.latest_revision_id
         if bot:
             params['bot'] = 1
         if (not new and
diff --git a/tests/aspects.py b/tests/aspects.py
index 446fe6f..3e6f6c2 100644
--- a/tests/aspects.py
+++ b/tests/aspects.py
@@ -896,12 +896,19 @@
         # Create an instance method named the same as the class method
         self.get_site = lambda name=None: self.__class__.get_site(name)
 
-    def get_mainpage(self, site=None):
-        """Create a Page object for the sites main page."""
+    def get_mainpage(self, site=None, force=False):
+        """Create a Page object for the sites main page.
+
+        @param site: Override current site, obtained using L{get_site}.
+        @type site: APISite or None
+        @param force: Get an unused Page object
+        @type force: bool
+        @rtype: Page
+        """
         if not site:
             site = self.get_site()
 
-        if hasattr(self, '_mainpage'):
+        if hasattr(self, '_mainpage') and not force:
             # For multi-site test classes, or site is specified as a param,
             # the cached mainpage object may not be the desired site.
             if self._mainpage.site == site:
@@ -911,6 +918,9 @@
         if mainpage.isRedirectPage():
             mainpage = mainpage.getRedirectTarget()
 
+        if force:
+            mainpage = pywikibot.Page(self.site, mainpage.title())
+
         self._mainpage = mainpage
 
         return mainpage
diff --git a/tests/basepage_tests.py b/tests/basepage_tests.py
new file mode 100644
index 0000000..c7cd907
--- /dev/null
+++ b/tests/basepage_tests.py
@@ -0,0 +1,113 @@
+# -*- coding: utf-8  -*-
+"""Tests for BasePage subclasses."""
+#
+# (C) Pywikibot team, 2015
+#
+# Distributed under the terms of the MIT license.
+#
+from __future__ import unicode_literals
+
+__version__ = '$Id$'
+
+from pywikibot.page import BasePage
+
+from tests.aspects import (
+    unittest, TestCase,
+)
+
+
+class BasePageTestBase(TestCase):
+
+    """Base of BasePage test classes."""
+
+    _page = None
+
+    def setUp(self):
+        """Set up test."""
+        super(BasePageTestBase, self).setUp()
+        assert self._page, 'setUp() must create an empty BasePage in _page'
+        assert isinstance(self._page, BasePage)
+
+
+class BasePageLoadRevisionsCachingTestBase(BasePageTestBase):
+
+    """
+    Test site.loadrevisions() caching.
+
+    This test class monkey patches site.loadrevisions, which will cause
+    the pickling tests in site_tests and page_tests to fail, if it
+    is done on the same site as those tests use (the default site).
+    """
+
+    cached = False
+
+    def setUp(self):
+        """Set up test."""
+        super(BasePageLoadRevisionsCachingTestBase, self).setUp()
+        assert self.cached is False, 'Tests do not support caching'
+
+    def _test_page_text(self):
+        """Test site.loadrevisions() with .text."""
+        page = self._page
+
+        self.assertFalse(hasattr(page, '_revid'))
+        self.assertFalse(hasattr(page, '_text'))
+        self.assertTrue(hasattr(page, '_revisions'))
+        self.assertFalse(page._revisions)
+
+        self.site.loadrevisions(page, total=1)
+
+        self.assertTrue(hasattr(page, '_revid'))
+        self.assertTrue(hasattr(page, '_revisions'))
+        self.assertEqual(len(page._revisions), 1)
+        self.assertIn(page._revid, page._revisions)
+
+        self.assertFalse(hasattr(page, '_text'))
+        self.assertIsNone(page._revisions[page._revid].text)
+        self.assertFalse(hasattr(page, '_text'))
+        self.assertIsNone(page._latest_cached_revision())
+
+        self.site.loadrevisions(page, total=1, getText=True)
+        self.assertFalse(hasattr(page, '_text'))
+        self.assertIsNotNone(page._latest_cached_revision())
+
+        # Verify that calling .text doesnt call loadrevisions again
+        loadrevisions = self.site.loadrevisions
+        try:
+            self.site.loadrevisions = None
+            self.assertIsNotNone(page.text)
+        finally:
+            self.site.loadrevisions = loadrevisions
+
+        self.assertTrue(hasattr(page, '_text'))
+
+
+class BasePageMethodsTestBase(BasePageTestBase):
+
+    """Test base methods."""
+
+    def _test_invoke(self):
+        """Basic invocation of some base methods and properties."""
+        self.assertTrue(self._page.exists())
+        self._page.previous_revision_id
+        self._page.botMayEdit()
+
+    def _test_return_datatypes(self):
+        """Test the base methods have correct datatypes only."""
+        self.assertIsInstance(self._page.langlinks(), list)
+        self.assertIsInstance(self._page.templates(), list)
+        self.assertIsInstance(self._page.isCategoryRedirect(), int)
+
+    def _test_no_wikitext(self):
+        """Test the base methods responses simulate no wikitext."""
+        self._test_return_datatypes()
+        self.assertEqual(self._page.langlinks(), [])
+        self.assertEqual(self._page.templates(), [])
+        self.assertFalse(self._page.isCategoryRedirect())
+        self.assertTrue(self._page.botMayEdit())
+
+if __name__ == '__main__':
+    try:
+        unittest.main()
+    except SystemExit:
+        pass
diff --git a/tests/flow_tests.py b/tests/flow_tests.py
index 575fcb6..6500c49 100644
--- a/tests/flow_tests.py
+++ b/tests/flow_tests.py
@@ -16,29 +16,50 @@
     TestCase,
 )
 
+from tests.basepage_tests import (
+    BasePageMethodsTestBase,
+    BasePageLoadRevisionsCachingTestBase,
+)
 
-class TestFlowBasePage(TestCase):
+
+class TestBoardBasePageMethods(BasePageMethodsTestBase):
 
     """Test Flow pages using BasePage-defined methods."""
 
     family = 'mediawiki'
     code = 'mediawiki'
 
-    cached = True
+    def setUp(self):
+        self._page = pywikibot.flow.Board(
+            self.site, 'Talk:Sandbox')
+        super(TestBoardBasePageMethods, self).setUp()
 
-    def test_methods(self):
+    def test_basepage_methods(self):
         """Test basic Page methods on a Flow page."""
-        site = self.get_site()
-        page = pywikibot.Page(site, u'Talk:Sandbox')
-        self.assertEqual(page.exists(), True)
-        page.get()
-        self.assertEqual(page.isRedirectPage(), False)
+        self._test_invoke()
+        self._test_return_datatypes()
+        self.assertEqual(self._page.isRedirectPage(), False)
 
     def test_content_model(self):
         """Test Flow page content model."""
-        site = self.get_site()
-        page = pywikibot.Page(site, u'Talk:Sandbox')
-        self.assertEqual(page.content_model, 'flow-board')
+        self.assertEqual(self._page.content_model, 'flow-board')
+
+
+class TestLoadRevisionsCaching(BasePageLoadRevisionsCachingTestBase):
+
+    """Test site.loadrevisions() caching."""
+
+    family = 'mediawiki'
+    code = 'mediawiki'
+
+    def setUp(self):
+        self._page = pywikibot.flow.Board(
+            self.site, 'Talk:Sandbox')
+        super(TestLoadRevisionsCaching, self).setUp()
+
+    def test_page_text(self):
+        """Test site.loadrevisions() with Page.text."""
+        self._test_page_text()
 
 
 class TestFlowLoading(TestCase):
diff --git a/tests/pagegenerators_tests.py b/tests/pagegenerators_tests.py
index 967b95e..d845b84 100755
--- a/tests/pagegenerators_tests.py
+++ b/tests/pagegenerators_tests.py
@@ -351,8 +351,8 @@
             self.assertIsInstance(page, pywikibot.Page)
             self.assertIsInstance(page.exists(), bool)
             if page.exists():
-                self.assertTrue(hasattr(page, "_text"))
                 self.assertEqual(len(page._revisions), 1)
+                self.assertIsNotNone(page._revisions[page._revid].text)
                 self.assertFalse(hasattr(page, '_pageprops'))
             count += 1
         self.assertEqual(len(links), count)
@@ -366,8 +366,8 @@
             self.assertIsInstance(page, pywikibot.Page)
             self.assertIsInstance(page.exists(), bool)
             if page.exists():
-                self.assertTrue(hasattr(page, "_text"))
                 self.assertEqual(len(page._revisions), 1)
+                self.assertIsNotNone(page._revisions[page._revid].text)
                 self.assertFalse(hasattr(page, '_pageprops'))
             count += 1
         self.assertEqual(len(links), count)
diff --git a/tests/proofreadpage_tests.py b/tests/proofreadpage_tests.py
index 00abb32..b13e9e2 100644
--- a/tests/proofreadpage_tests.py
+++ b/tests/proofreadpage_tests.py
@@ -14,6 +14,11 @@
 
 from tests.aspects import unittest, TestCase
 
+from tests.basepage_tests import (
+    BasePageMethodsTestBase,
+    BasePageLoadRevisionsCachingTestBase,
+)
+
 
 class TestProofreadPageInvalidSite(TestCase):
 
@@ -30,6 +35,41 @@
                           ProofreadPage, self.site, 'title')
 
 
+class TestBasePageMethods(BasePageMethodsTestBase):
+
+    """Test behavior of ProofreadPage methods inherited from BasePage."""
+
+    family = 'wikisource'
+    code = 'en'
+
+    def setUp(self):
+        self._page = ProofreadPage(
+            self.site, 'Page:Popular Science Monthly Volume 1.djvu/12')
+        super(TestBasePageMethods, self).setUp()
+
+    def test_basepage_methods(self):
+        """Test ProofreadPage methods inherited from superclass BasePage."""
+        self._test_invoke()
+        self._test_return_datatypes()
+
+
+class TestLoadRevisionsCaching(BasePageLoadRevisionsCachingTestBase):
+
+    """Test site.loadrevisions() caching."""
+
+    family = 'wikisource'
+    code = 'en'
+
+    def setUp(self):
+        self._page = ProofreadPage(
+            self.site, 'Page:Popular Science Monthly Volume 1.djvu/12')
+        super(TestLoadRevisionsCaching, self).setUp()
+
+    def test_page_text(self):
+        """Test site.loadrevisions() with Page.text."""
+        self._test_page_text()
+
+
 class TestProofreadPageValidSite(TestCase):
 
     """Test ProofreadPage class."""
diff --git a/tests/site_tests.py b/tests/site_tests.py
index e6e8bb5..6d54121 100644
--- a/tests/site_tests.py
+++ b/tests/site_tests.py
@@ -1,7 +1,7 @@
 # -*- coding: utf-8  -*-
 """Tests for the site module."""
 #
-# (C) Pywikibot team, 2008-2014
+# (C) Pywikibot team, 2008-2015
 #
 # Distributed under the terms of the MIT license.
 #
@@ -35,6 +35,7 @@
     AlteredDefaultSiteTestCase,
 )
 from tests.utils import allowed_failure, allowed_failure_if
+from tests.basepage_tests import BasePageLoadRevisionsCachingTestBase
 
 if sys.version_info[0] > 2:
     basestring = (str, )
@@ -1669,6 +1670,20 @@
         self.assertIn('statistics', self.site.siteinfo)
 
 
+class TestSiteLoadRevisionsCaching(BasePageLoadRevisionsCachingTestBase,
+                                   DefaultSiteTestCase):
+
+    """Test site.loadrevisions() caching."""
+
+    def setUp(self):
+        self._page = self.get_mainpage(force=True)
+        super(TestSiteLoadRevisionsCaching, self).setUp()
+
+    def test_page_text(self):
+        """Test site.loadrevisions() with Page.text."""
+        self._test_page_text()
+
+
 class TestSiteLoadRevisions(TestCase):
 
     """Test cases for Site.loadrevision() method."""
@@ -1687,17 +1702,23 @@
 
     def testLoadRevisions_basic(self):
         """Test the site.loadrevisions() method."""
+        # Load revisions without content
         self.mysite.loadrevisions(self.mainpage, total=15)
-        self.assertTrue(hasattr(self.mainpage, "_revid"))
-        self.assertTrue(hasattr(self.mainpage, "_revisions"))
-        self.assertIn(self.mainpage._revid, self.mainpage._revisions)
+        self.assertFalse(hasattr(self.mainpage, '_text'))
         self.assertEqual(len(self.mainpage._revisions), 15)
-        self.assertEqual(self.mainpage._text, None)
+        self.assertIn(self.mainpage._revid, self.mainpage._revisions)
+        self.assertIsNone(self.mainpage._revisions[self.mainpage._revid].text)
+        # The revision content will be loaded by .text
+        self.assertIsNotNone(self.mainpage.text)
 
     def testLoadRevisions_getText(self):
         """Test the site.loadrevisions() method with getText=True."""
         self.mysite.loadrevisions(self.mainpage, getText=True, total=5)
-        self.assertGreater(len(self.mainpage._text), 0)
+        self.assertFalse(hasattr(self.mainpage, '_text'))
+        self.assertIn(self.mainpage._revid, self.mainpage._revisions)
+        
self.assertIsNotNone(self.mainpage._revisions[self.mainpage._revid].text)
+        self.assertTrue(self.mainpage._revisions[self.mainpage._revid].text)
+        self.assertIsNotNone(self.mainpage.text)
 
     def testLoadRevisions_revids(self):
         """Test the site.loadrevisions() method, listing based on revid."""
@@ -1911,8 +1932,10 @@
             self.assertIsInstance(page, pywikibot.Page)
             self.assertIsInstance(page.exists(), bool)
             if page.exists():
-                self.assertTrue(hasattr(page, "_text"))
+                self.assertTrue(hasattr(page, '_revid'))
                 self.assertEqual(len(page._revisions), 1)
+                self.assertIn(page._revid, page._revisions)
+                self.assertIsNotNone(page._revisions[page._revid].text)
                 self.assertFalse(hasattr(page, '_pageprops'))
             count += 1
             if count >= 5:
@@ -1934,8 +1957,8 @@
             self.assertIsInstance(page, pywikibot.Page)
             self.assertIsInstance(page.exists(), bool)
             if page.exists():
-                self.assertTrue(hasattr(page, "_text"))
                 self.assertEqual(len(page._revisions), 1)
+                self.assertIsNotNone(page._revisions[page._revid].text)
                 self.assertFalse(hasattr(page, '_pageprops'))
             count += 1
             if count >= 5:
@@ -1951,8 +1974,8 @@
             self.assertIsInstance(page, pywikibot.Page)
             self.assertIsInstance(page.exists(), bool)
             if page.exists():
-                self.assertTrue(hasattr(page, "_text"))
                 self.assertEqual(len(page._revisions), 1)
+                self.assertIsNotNone(page._revisions[page._revid].text)
                 self.assertFalse(hasattr(page, '_pageprops'))
             count += 1
             if count >= 6:
@@ -1977,8 +2000,8 @@
             self.assertIsInstance(page, pywikibot.Page)
             self.assertIsInstance(page.exists(), bool)
             if page.exists():
-                self.assertTrue(hasattr(page, "_text"))
                 self.assertEqual(len(page._revisions), 1)
+                self.assertIsNotNone(page._revisions[page._revid].text)
                 self.assertFalse(hasattr(page, '_pageprops'))
             count += 1
         self.assertEqual(count, link_count)
@@ -2002,8 +2025,8 @@
             self.assertIsInstance(page, pywikibot.Page)
             self.assertIsInstance(page.exists(), bool)
             if page.exists():
-                self.assertTrue(hasattr(page, "_text"))
                 self.assertEqual(len(page._revisions), 1)
+                self.assertIsNotNone(page._revisions[page._revid].text)
                 self.assertFalse(hasattr(page, '_pageprops'))
             count += 1
         self.assertEqual(count, link_count)
@@ -2030,8 +2053,8 @@
             self.assertIsInstance(page, pywikibot.Page)
             self.assertIsInstance(page.exists(), bool)
             if page.exists():
-                self.assertTrue(hasattr(page, "_text"))
                 self.assertEqual(len(page._revisions), 1)
+                self.assertIsNotNone(page._revisions[page._revid].text)
                 self.assertFalse(hasattr(page, '_pageprops'))
             count += 1
             if count > 5:
@@ -2059,8 +2082,8 @@
             self.assertIsInstance(page, pywikibot.Page)
             self.assertIsInstance(page.exists(), bool)
             if page.exists():
-                self.assertTrue(hasattr(page, "_text"))
                 self.assertEqual(len(page._revisions), 1)
+                self.assertIsNotNone(page._revisions[page._revid].text)
                 self.assertFalse(hasattr(page, '_pageprops'))
             count += 1
             if count > 5:
@@ -2100,8 +2123,8 @@
             self.assertIsInstance(page, pywikibot.Page)
             self.assertIsInstance(page.exists(), bool)
             if page.exists():
-                self.assertTrue(hasattr(page, "_text"))
                 self.assertEqual(len(page._revisions), 1)
+                self.assertIsNotNone(page._revisions[page._revid].text)
                 self.assertFalse(hasattr(page, '_pageprops'))
                 self.assertTrue(hasattr(page, '_langlinks'))
             count += 1
@@ -2122,8 +2145,8 @@
             self.assertIsInstance(page, pywikibot.Page)
             self.assertIsInstance(page.exists(), bool)
             if page.exists():
-                self.assertTrue(hasattr(page, "_text"))
                 self.assertEqual(len(page._revisions), 1)
+                self.assertIsNotNone(page._revisions[page._revid].text)
                 self.assertFalse(hasattr(page, '_pageprops'))
             count += 1
 
@@ -2142,8 +2165,8 @@
             self.assertIsInstance(page, pywikibot.Page)
             self.assertIsInstance(page.exists(), bool)
             if page.exists():
-                self.assertTrue(hasattr(page, "_text"))
                 self.assertEqual(len(page._revisions), 1)
+                self.assertIsNotNone(page._revisions[page._revid].text)
                 self.assertFalse(hasattr(page, '_pageprops'))
                 self.assertTrue(hasattr(page, '_langlinks'))
             count += 1
@@ -2162,8 +2185,8 @@
             self.assertIsInstance(page, pywikibot.Page)
             self.assertIsInstance(page.exists(), bool)
             if page.exists():
-                self.assertTrue(hasattr(page, "_text"))
                 self.assertEqual(len(page._revisions), 1)
+                self.assertIsNotNone(page._revisions[page._revid].text)
                 self.assertFalse(hasattr(page, '_pageprops'))
                 self.assertTrue(hasattr(page, '_templates'))
             count += 1
@@ -2182,8 +2205,8 @@
             self.assertIsInstance(page, pywikibot.Page)
             self.assertIsInstance(page.exists(), bool)
             if page.exists():
-                self.assertTrue(hasattr(page, "_text"))
                 self.assertEqual(len(page._revisions), 1)
+                self.assertIsNotNone(page._revisions[page._revid].text)
                 self.assertFalse(hasattr(page, '_pageprops'))
                 self.assertTrue(hasattr(page, '_templates'))
                 self.assertTrue(hasattr(page, '_langlinks'))
diff --git a/tests/wikibase_tests.py b/tests/wikibase_tests.py
index 5a37136..bbac998 100644
--- a/tests/wikibase_tests.py
+++ b/tests/wikibase_tests.py
@@ -18,10 +18,19 @@
 
 from pywikibot import pagegenerators
 from pywikibot.tools import SelfCallDict
-from pywikibot.page import WikibasePage
+from pywikibot.page import WikibasePage, ItemPage
 from pywikibot.site import Namespace
 
-from tests.aspects import unittest, WikidataTestCase, TestCase
+from tests.aspects import (
+    unittest, TestCase,
+    WikidataTestCase,
+    DeprecationTestCase,
+)
+
+from tests.basepage_tests import (
+    BasePageMethodsTestBase,
+    BasePageLoadRevisionsCachingTestBase,
+)
 
 
 # fetch a page which is very likely to be unconnected, which doesnt have
@@ -33,6 +42,54 @@
     for page in gen:
         if not page.properties().get('wikibase_item'):
             return page
+
+
+class TestLoadRevisionsCaching(BasePageLoadRevisionsCachingTestBase,
+                               WikidataTestCase):
+
+    """Test site.loadrevisions() caching."""
+
+    def setUp(self):
+        self._page = ItemPage(self.get_repo(), 'Q60')
+        super(TestLoadRevisionsCaching, self).setUp()
+
+    def test_page_text(self):
+        """Test site.loadrevisions() with Page.text."""
+        self._test_page_text()
+
+
+class TestDeprecatedAttributes(WikidataTestCase, DeprecationTestCase):
+
+    """Test deprecated lastrevid."""
+
+    def test_lastrevid(self):
+        """Test deprecated lastrevid."""
+        item = ItemPage(self.get_repo(), 'Q60')
+        self.assertFalse(hasattr(item, 'lastrevid'))
+        item.get()
+        self.assertTrue(hasattr(item, 'lastrevid'))
+        self.assertIsInstance(item.lastrevid, int)
+        self.assertDeprecation()
+        self._reset_messages()
+
+        item.lastrevid = 1
+        self.assertTrue(hasattr(item, 'lastrevid'))
+        self.assertTrue(hasattr(item, '_revid'))
+        self.assertEqual(item.lastrevid, 1)
+        self.assertEqual(item._revid, 1)
+        self.assertDeprecation()
+
+    def test_lastrevid_del(self):
+        """Test del with deprecated lastrevid."""
+        item = ItemPage(self.get_repo(), 'Q60')
+        item.get()
+        self.assertTrue(hasattr(item, 'lastrevid'))
+        self.assertTrue(hasattr(item, '_revid'))
+
+        del item.lastrevid
+        self.assertFalse(hasattr(item, 'lastrevid'))
+        self.assertFalse(hasattr(item, '_revid'))
+        self.assertDeprecation()
 
 
 class TestGeneral(WikidataTestCase):
@@ -540,27 +597,32 @@
         self.assertRaises(ValueError, claim.setTarget, pywikibot.WbTime(2001, 
site=wikidata))
 
 
-class TestPageMethods(WikidataTestCase):
+class TestItemBasePageMethods(WikidataTestCase, BasePageMethodsTestBase):
 
-    """Test behavior of WikibasePage methods inherited from BasePage."""
+    """Test behavior of ItemPage methods inherited from BasePage."""
 
-    def test_page_methods(self):
+    def setUp(self):
+        self._page = ItemPage(self.get_repo(), 'Q60')
+        super(TestItemBasePageMethods, self).setUp()
+
+    def test_basepage_methods(self):
         """Test ItemPage methods inherited from superclass BasePage."""
-        self.wdp = pywikibot.ItemPage(self.get_repo(), 'Q60')
-        self.wdp.previousRevision()
-        self.assertEqual(self.wdp.langlinks(), [])
-        self.assertEqual(self.wdp.templates(), [])
-        self.assertFalse(self.wdp.isCategoryRedirect())
+        self._test_invoke()
+        self._test_no_wikitext()
 
-    def test_item_bot_may_edit(self):
-        """Test botMayEdit."""
-        site = self.get_site()
-        page = pywikibot.Page(site, 'Q60')
-        self.assertTrue(page.botMayEdit())
 
-        repo = self.get_repo()
-        item = pywikibot.ItemPage(repo, 'Q60')
-        self.assertTrue(item.botMayEdit())
+class TestPageMethodsWithItemTitle(WikidataTestCase, BasePageMethodsTestBase):
+
+    """Test behavior of Page methods for wikibase item."""
+
+    def setUp(self):
+        self._page = pywikibot.Page(self.site, 'Q60')
+        super(TestPageMethodsWithItemTitle, self).setUp()
+
+    def test_basepage_methods(self):
+        """Test Page methods inherited from superclass BasePage with Q60."""
+        self._test_invoke()
+        self._test_no_wikitext()
 
 
 class TestLinks(WikidataTestCase):
diff --git a/tox.ini b/tox.ini
index 7e2d4d7..5459eda 100644
--- a/tox.ini
+++ b/tox.ini
@@ -109,6 +109,7 @@
     tests/__init__.py \
     tests/utils.py \
     tests/archivebot_tests.py \
+    tests/basepage_tests.py \
     tests/category_tests.py \
     tests/cache_tests.py \
     tests/cosmetic_changes_tests.py \

-- 
To view, visit https://gerrit.wikimedia.org/r/216523
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: merged
Gerrit-Change-Id: I55a49d115a8f207216f24f9acbfa64d87da88578
Gerrit-PatchSet: 12
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: John Vandenberg <[email protected]>
Gerrit-Reviewer: John Vandenberg <[email protected]>
Gerrit-Reviewer: Ladsgroup <[email protected]>
Gerrit-Reviewer: Merlijn van Deen <[email protected]>
Gerrit-Reviewer: Mpaa <[email protected]>
Gerrit-Reviewer: XZise <[email protected]>
Gerrit-Reviewer: jenkins-bot <>

_______________________________________________
Pywikibot-commits mailing list
[email protected]
https://lists.wikimedia.org/mailman/listinfo/pywikibot-commits

Reply via email to