jenkins-bot has submitted this change and it was merged.

Change subject: APISite.siteinfo automatically fetches the results
......................................................................


APISite.siteinfo automatically fetches the results

This changes it from a dict to a Wrapper which automatically
queries the result if they aren't cached. It is basically merging
APISite._add_siteinfo, APISite._getsiteinfo and APISite.siteinfo.

It'll always caches the result of the siprop "general", because
otherwise it would be unable to determine if a requested property
is in fact a property of "general" (e.g. APISite.case()).

It also supports the "in" operator which tells you if the value is
cached. Setting it from the outside is not possible anymore.

If a property isn't supported (e.g. 'restrictions' before 1.23) it
will cache either an empty dummy object (which acts like an empty
string/iterable/dict) or a specific default constant.

Change-Id: Ib5b6d84f0baf60376cdc25bc715d346c1579da1f
---
M pywikibot/data/api.py
M pywikibot/site.py
M pywikibot/tools.py
M tests/dry_api_tests.py
M tests/dry_site_tests.py
M tests/site_tests.py
M tests/utils.py
7 files changed, 378 insertions(+), 147 deletions(-)

Approvals:
  John Vandenberg: Looks good to me, approved
  jenkins-bot: Verified



diff --git a/pywikibot/data/api.py b/pywikibot/data/api.py
index efe88b3..9ff7ba7 100644
--- a/pywikibot/data/api.py
+++ b/pywikibot/data/api.py
@@ -157,7 +157,7 @@
         # otherwise be a problem.
         # This situation is only tripped when one of the first actions
         # on the site is a write action and the extension isn't installed.
-        if hasattr(self.site, "_extensions"):
+        if 'extensions' in self.site.siteinfo:
             use_assert_edit_extension = self.site.hasExtension('AssertEdit', 
False)
         else:
             use_assert_edit_extension = True
diff --git a/pywikibot/site.py b/pywikibot/site.py
index 63fca60..30b07c9 100644
--- a/pywikibot/site.py
+++ b/pywikibot/site.py
@@ -20,13 +20,15 @@
 import re
 import sys
 from distutils.version import LooseVersion as LV
-from collections import Iterable
+from collections import Iterable, Container
 import threading
 import time
 import urllib
 import json
+import copy
 
 import pywikibot
+import pywikibot.tools as tools
 from pywikibot import deprecate_arg
 from pywikibot import config
 from pywikibot import deprecated
@@ -890,6 +892,226 @@
     return decorator
 
 
+class Siteinfo(Container):
+
+    """
+    A 'dictionary' like container for siteinfo.
+
+    This class queries the server to get the requested siteinfo property.
+    Optionally it can cache this directly in the instance so that later
+    requests don't need to query the server.
+
+    All values of the siteinfo property 'general' are directly available.
+    """
+
+    def __init__(self, site):
+        """Initialise it with an empty cache."""
+        self._site = site
+        self._cache = {}
+
+    @staticmethod
+    def _get_default(key):
+        """
+        Return the default value for different properties.
+
+        If the property is 'restrictions' it returns a dictionary with:
+            'cascadinglevels': 'sysop'
+            'semiprotectedlevels': 'autoconfirmed'
+            'levels': '' (everybody), 'autoconfirmed', 'sysop'
+            'types': 'create', 'edit', 'move', 'upload'
+        Otherwise it returns L{tools.EMPTY_DEFAULT}.
+
+        @param key: The property name
+        @type key: str
+        @return: The default value
+        @rtype: dict or L{tools.EmptyDefault}
+        """
+        if key == 'restrictions':
+            # implemented in b73b5883d486db0e9278ef16733551f28d9e096d
+            return {
+                'cascadinglevels': ['sysop'],
+                'semiprotectedlevels': ['autoconfirmed'],
+                'levels': ['', 'autoconfirmed', 'sysop'],
+                'types': ['create', 'edit', 'move', 'upload']
+            }
+        else:
+            return tools.EMPTY_DEFAULT
+
+    def _get_siteinfo(self, prop, force=False):
+        """
+        Retrieve a siteinfo property. All properties which the site doesn't
+        support contain the default value. Because pre-1.12 no data was
+        returned when a property doesn't exists, it queries each property
+        independetly if a property is invalid.
+
+        @param prop: The property names of the siteinfo.
+        @type prop: str or iterable
+        @param force: Don't access the cached request.
+        @type force: bool
+        @return: A dictionary with the properties of the site. Each entry in
+            the dictionary is a tuple of the value and a boolean to save if it
+            is the default value.
+        @rtype: dict (the values)
+        @see: U{https://www.mediawiki.org/wiki/API:Meta#siteinfo_.2F_si}
+        """
+        if isinstance(prop, basestring):
+            props = [prop]
+        else:
+            props = prop
+        if len(props) == 0:
+            raise ValueError('At least one property name must be provided.')
+        try:
+            data = pywikibot.data.api.CachedRequest(
+                expiry=0 if force else pywikibot.config.API_config_expiry,
+                site=self._site,
+                action='query',
+                meta='siteinfo',
+                siprop='|'.join(props)).submit()
+        except api.APIError as e:
+            if e.code == 'siunknown_siprop':
+                if len(props) == 1:
+                    pywikibot.log(u"Unable to get siprop 
'{0}'".format(props[0]))
+                    return {props[0]: (Siteinfo._get_default(props[0]), True)}
+                else:
+                    pywikibot.log(u"Unable to get siteinfo, because at least "
+                                  u"one property is unknown: '{0}'".format(
+                                  u"', '".join(props)))
+                    results = {}
+                    for prop in props:
+                        results.update(self._get_siteinfo(prop, force))
+                    return results
+            else:
+                raise
+        else:
+            result = {}
+            if 'warnings' in data:
+                invalid_properties = []
+                for prop in re.match(u"^Unrecognized values? for parameter "
+                                     u"'siprop': ([^,]+(?:, [^,]+)*)$",
+                                     
data['warnings']['siteinfo']['*']).group(1).split(','):
+                    prop = prop.strip()
+                    invalid_properties += [prop]
+                    result[prop] = (Siteinfo._get_default(prop), True)
+                pywikibot.log(u"Unable to get siprop(s) '{0}'".format(
+                    u"', '".join(invalid_properties)))
+            if 'query' in data:
+                # todo iterate through the properties!
+                for prop in props:
+                    if prop in data['query']:
+                        result[prop] = (data['query'][prop], False)
+            return result
+
+    def _get_general(self, key, force):
+        """
+        Return a siteinfo property which is loaded by default.
+
+        The property 'general' will be queried if it wasn't yet or it's forced.
+        Additionally all uncached default properties are queried. This way
+        multiple default properties are queried with one request. It'll cache
+        always all results.
+
+        @param key: The key to search for.
+        @type key: str
+        @param force: If 'general' should be queried in any case.
+        @type force: bool
+        @return: If that property was retrived via this method. Returns an
+            empty tuple if it wasn't retrieved.
+        @rtype: various (the value), bool (if the default value is used)
+        """
+        if 'general' not in self._cache:
+            pywikibot.debug('general siteinfo not loaded yet.', _logger)
+            force = True
+            props = ['namespaces', 'namespacealiases']
+        else:
+            props = []
+        if force:
+            props = [prop for prop in props if prop not in self._cache]
+            if props:
+                pywikibot.debug(
+                    u"Load siteinfo properties '{0}' along with 
'general'".format(
+                        u"', '".join(props)), _logger)
+            props += ['general']
+            default_info = self._get_siteinfo(props, force)
+            for prop in props:
+                self._cache[prop] = default_info[prop]
+            if key in default_info:
+                return default_info[key]
+        if key in self._cache['general'][0]:
+            return self._cache['general'][0][key], False
+        else:
+            return tuple()
+
+    def __getitem__(self, key):
+        """Return a siteinfo property, caching and not forcing it."""
+        return self.get(key, False)  # caches and doesn't force it
+
+    def get(self, key, get_default=True, cache=True, force=False):
+        """
+        Return a siteinfo property.
+
+        @param key: The name of the siteinfo property.
+        @type key: str
+        @param get_default: Whether to throw an KeyError if the key is invalid.
+        @type get_default: bool
+        @param cache: Caches the result interally so that future accesses via
+            this method won't query the server.
+        @type cache: bool
+        @param force: Ignores the cache and always queries the server to get
+            the newest value.
+        @type force: bool
+        @return: The gathered property
+        @rtype: various
+        @see: L{_get_siteinfo}
+        """
+        if not force:
+            try:
+                cached = self._get_cached(key)
+            except KeyError:
+                cached = None
+            # a not recognised result was cached, but they aren't requested
+            if cached:
+                if cached[1] and not get_default:
+                    raise KeyError(key)
+                else:
+                    return copy.deepcopy(cached[0])
+        preloaded = self._get_general(key, force)
+        if not preloaded:
+            preloaded = self._get_siteinfo(key, force)[key]
+        else:
+            cache = False
+        if preloaded[1] and not get_default:
+            raise KeyError(key)
+        else:
+            if cache:
+                self._cache[key] = preloaded
+            return copy.deepcopy(preloaded[0])
+
+    def _get_cached(self, key):
+        """Return the cached value or a KeyError exception if not cached."""
+        if 'general' in self._cache:
+            if key in self._cache['general'][0]:
+                return (self._cache['general'][0][key], False)
+            else:
+                return self._cache[key]
+        raise KeyError(key)
+
+    def __contains__(self, key):
+        """Return whether the value is cached."""
+        try:
+            self._get_cached(key)
+        except KeyError:
+            return False
+        else:
+            return True
+
+    def is_recognised(self, key):
+        """Return if 'key' is a valid property name. 'None' if not cached."""
+        try:
+            return not self._get_cached(key)[1]
+        except KeyError:
+            return None
+
+
 class APISite(BaseSite):
 
     """API interface to MediaWiki site.
@@ -924,6 +1146,7 @@
         BaseSite.__init__(self, code, fam, user, sysop)
         self._msgcache = {}
         self._loginstatus = LoginStatus.NOT_ATTEMPTED
+        self._siteinfo = Siteinfo(self)
         return
 
     @staticmethod
@@ -1067,8 +1290,6 @@
                                  if sysop else LoginStatus.AS_USER)
         else:
             self._loginstatus = LoginStatus.NOT_LOGGED_IN  # failure
-        if not hasattr(self, "_siteinfo"):
-            self._getsiteinfo()
 
     forceLogin = login  # alias for backward-compatibility
 
@@ -1383,15 +1604,9 @@
     def getmagicwords(self, word):
         """Return list of localized "word" magic words for the site."""
         if not hasattr(self, "_magicwords"):
-            try:
-                # don't cache in _siteinfo, because we cache it in _magicwords
-                magicwords = self._add_siteinfo("magicwords", False)
-                self._magicwords = dict((item["name"], item["aliases"])
+            magicwords = self.siteinfo.get("magicwords", cache=False)
+            self._magicwords = dict((item["name"], item["aliases"])
                                         for item in magicwords)
-            except api.APIError:
-                # hack for older sites that don't support 1.13 properties
-                # probably should delete if we're not going to support pre-1.13
-                self._magicwords = {}
 
         if word in self._magicwords:
             return self._magicwords[word]
@@ -1432,76 +1647,7 @@
         """Return list of localized PAGENAMEE tags for the site."""
         return self.getmagicwords("pagenamee")
 
-    def _add_siteinfo(self, prop, cache, force=False):
-        """
-        Retrieve additional siteinfo and optionally cache it.
-
-        Queries the site and returns the properties. It can cache the value
-        so that future queries will access the cache. With C{force} set to
-        True it won't access the cache but it can still cache the value. If
-        the property doesn't exists it returns None.
-
-        @param prop: The property name of the siteinfo.
-        @type prop: str
-        @param cache: Should this be cached?
-        @type cache: bool
-        @param force: Should the cache be skipped?
-        @type force: bool
-        @return: The properties of the site.
-        @rtype: various (depends on prop)
-        """
-        if not hasattr(self, '_siteinfo'):
-            force = True  # if it doesn't exists there won't be a cache
-            if cache:  # but only initialise cache if that is requested
-                self._getsiteinfo()
-        if not force and prop in self._siteinfo:
-            return self._siteinfo[prop]
-        data = pywikibot.data.api.CachedRequest(
-            expiry=0 if force else pywikibot.config.API_config_expiry,
-            site=self,
-            action='query',
-            meta='siteinfo',
-            siprop=prop).submit()
-        try:
-            prop_data = data['query'][prop]
-        except KeyError:
-            prop_data = None
-        if cache:
-            self._siteinfo[prop] = prop_data
-        return prop_data
-
-    def _getsiteinfo(self, force=False):
-        """Retrieve siteinfo and namespaces from site."""
-        sirequest = api.CachedRequest(
-            expiry=(0 if force else config.API_config_expiry),
-            site=self,
-            action="query",
-            meta="siteinfo",
-            siprop="general|namespaces|namespacealiases|extensions"
-        )
-        try:
-            sidata = sirequest.submit()
-        except api.APIError:
-            # hack for older sites that don't support 1.12 properties
-            # probably should delete if we're not going to support pre-1.12
-            sirequest = api.Request(
-                site=self,
-                action="query",
-                meta="siteinfo",
-                siprop="general|namespaces"
-            )
-            sidata = sirequest.submit()
-
-        assert 'query' in sidata, \
-               "API siteinfo response lacks 'query' key"
-        sidata = sidata['query']
-        assert 'general' in sidata, \
-               "API siteinfo response lacks 'general' key"
-        assert 'namespaces' in sidata, \
-               "API siteinfo response lacks 'namespaces' key"
-        self._siteinfo = sidata['general']
-
-        nsdata = sidata['namespaces']
+    def _build_namespaces(self):
 
         self._namespaces = {}
 
@@ -1511,36 +1657,27 @@
         # the defaults defined in Namespace.
         is_mw114 = LV(self.version()) >= LV('1.14')
 
-        for nskey in nsdata:
-            ns = int(nskey)
+        for nsdata in self.siteinfo.get('namespaces', cache=False).values():
+            ns = nsdata.pop('id')
             custom_name = None
             canonical_name = None
             if ns == 0:
-                canonical_name = nsdata[nskey].pop('*')
+                canonical_name = nsdata.pop('*')
                 custom_name = canonical_name
             else:
-                custom_name = nsdata[nskey].pop('*')
+                custom_name = nsdata.pop('*')
                 if is_mw114:
-                    canonical_name = nsdata[nskey].pop('canonical')
+                    canonical_name = nsdata.pop('canonical')
 
-            # Remove the 'id' from nsdata
-            nsdata[nskey].pop('id')
             namespace = Namespace(ns, canonical_name, custom_name,
-                                  use_image_name=not is_mw114, **nsdata[nskey])
-
+                                  use_image_name=not is_mw114,
+                                  **nsdata)
             self._namespaces[ns] = namespace
 
-        if 'namespacealiases' in sidata:
-            aliasdata = sidata['namespacealiases']
-            for item in aliasdata:
-                ns = int(item['id'])
-                if item['*'] not in self._namespaces[ns]:
-                    self._namespaces[ns].aliases.append(item['*'])
-
-        if 'extensions' in sidata:
-            self._extensions = sidata['extensions']
-        else:
-            self._extensions = None
+        for item in self.siteinfo.get('namespacealiases'):
+            ns = int(item['id'])
+            if item['*'] not in self._namespaces[ns]:
+                self._namespaces[ns].aliases.append(item['*'])
 
     def hasExtension(self, name, unknown=NotImplementedError):
         """ Determine whether extension `name` is loaded.
@@ -1552,15 +1689,15 @@
 
         @return: bool
         """
-        if not hasattr(self, '_extensions'):
-            self._getsiteinfo()
-        if self._extensions is None:
+        try:
+            extensions = self.siteinfo['extensions']
+        except KeyError:
             if isinstance(unknown, type) and issubclass(unknown, Exception):
                 raise unknown(
                     "Feature 'hasExtension' only available in MW 1.14+")
             else:
                 return unknown
-        for ext in self._extensions:
+        for ext in extensions:
             if ext['name'].lower() == name.lower():
                 return True
         return False
@@ -1568,8 +1705,6 @@
     @property
     def siteinfo(self):
         """Site information dict."""
-        if not hasattr(self, "_siteinfo"):
-            self._getsiteinfo()
         return self._siteinfo
 
     def case(self):
@@ -1632,8 +1767,8 @@
 
     def namespaces(self):
         """Return dict of valid namespaces on this wiki."""
-        if not hasattr(self, "_siteinfo"):
-            self._getsiteinfo()
+        if not hasattr(self, '_namespaces'):
+            self._build_namespaces()
         return self._namespaces
 
     def namespace(self, num, all=False):
@@ -1655,9 +1790,7 @@
 
         """
         try:
-            if force:
-                self._getsiteinfo(force=True)    # drop/expire cache and reload
-            versionstring = self.siteinfo['generator']
+            versionstring = self.siteinfo.get('generator', force=force)
             m = re.match(r"^MediaWiki ([0-9]+)\.([0-9]+)(.*)$", versionstring)
             if m:
                 return (int(m.group(1)), int(m.group(2)), m.group(3))
@@ -3504,37 +3637,22 @@
         """
         Return the protection types available on this site.
 
-        With MediaWiki version 1.23 protection types can be retrieved. To
-        support older wikis, the default protection types 'create', 'edit',
-        'move' and 'upload' are returned.
-
         @return protection types available
         @rtype: set of unicode instances
+        @see: L{Siteinfo._get_default()}
         """
-        # implemented in b73b5883d486db0e9278ef16733551f28d9e096d
-        restrictions = self._add_siteinfo('restrictions', True)
-        if restrictions is None or 'types' not in restrictions:
-            return set([u'create', u'edit', u'move', u'upload'])
-        else:
-            return set(restrictions['types'])
+        return set(self.siteinfo.get('restrictions')['types'])
 
     def protection_levels(self):
         """
         Return the protection levels available on this site.
 
-        With MediaWiki version 1.23 protection levels can be retrieved. To
-        support older wikis, the default protection levels '', 'autoconfirmed',
-        and 'sysop' are returned.
-
         @return protection types available
         @rtype: set of unicode instances
+        @see: L{Siteinfo._get_default()}
         """
         # implemented in b73b5883d486db0e9278ef16733551f28d9e096d
-        restrictions = self._add_siteinfo('restrictions', True)
-        if restrictions is None or 'levels' not in restrictions:
-            return set([u'', u'autoconfirmed', u'sysop'])
-        else:
-            return set(restrictions['levels'])
+        return set(self.siteinfo.get('restrictions')['levels'])
 
     @must_be(group='sysop')
     @deprecate_arg("summary", "reason")
diff --git a/pywikibot/tools.py b/pywikibot/tools.py
index 6f0c26c..bf3dbc9 100644
--- a/pywikibot/tools.py
+++ b/pywikibot/tools.py
@@ -10,6 +10,7 @@
 import sys
 import threading
 import time
+from collections import Mapping
 
 if sys.version_info[0] > 2:
     import queue as Queue
@@ -216,6 +217,44 @@
         thd.start()
 
 
+class CombinedError(KeyError, IndexError):
+
+    """An error that gets caught by both KeyError and IndexError."""
+
+
+class EmptyDefault(str, Mapping):
+
+    """
+    A default for a not existing siteinfo property.
+
+    It should be chosen if there is no better default known. It acts like an
+    empty collections, so it can be iterated through it savely if treated as a
+    list, tuple, set or dictionary. It is also basically an empty string.
+
+    Accessing a value via __getitem__ will result in an combined KeyError and
+    IndexError.
+    """
+
+    def __init__(self):
+        """Initialise the default as an empty string."""
+        str.__init__(self)
+
+    # http://stackoverflow.com/a/13243870/473890
+    def _empty_iter(self):
+        """An iterator which does nothing."""
+        return
+        yield
+
+    def __getitem__(self, key):
+        """Raise always a L{CombinedError}."""
+        raise CombinedError(key)
+
+    iteritems = itervalues = iterkeys = __iter__ = _empty_iter
+
+
+EMPTY_DEFAULT = EmptyDefault()
+
+
 if __name__ == "__main__":
     def _test():
         import doctest
diff --git a/tests/dry_api_tests.py b/tests/dry_api_tests.py
index c6cddf8..6c61a33 100644
--- a/tests/dry_api_tests.py
+++ b/tests/dry_api_tests.py
@@ -10,7 +10,7 @@
 import datetime
 import pywikibot
 from pywikibot.data.api import CachedRequest, QueryGenerator
-from utils import unittest, NoSiteTestCase, SiteTestCase
+from utils import unittest, NoSiteTestCase, SiteTestCase, DummySiteinfo
 
 
 class DryCachedRequestTests(SiteTestCase):
@@ -68,6 +68,7 @@
             def __init__(self):
                 self._user = 'anon'
                 pywikibot.site.BaseSite.__init__(self, 'mock', MockFamily())
+                self._siteinfo = DummySiteinfo({'case': 'first-letter'})
 
             def version(self):
                 return '1.13'  # pre 1.14
@@ -84,9 +85,9 @@
             def encodings(self):
                 return []
 
-            def _getsiteinfo(self):
-                self._siteinfo = {'case': 'first-letter'}
-                return {}
+            @property
+            def siteinfo(self):
+                return self._siteinfo
 
             def __repr__(self):
                 return "MockSite()"
diff --git a/tests/dry_site_tests.py b/tests/dry_site_tests.py
index 709d569..ebea4ce 100644
--- a/tests/dry_site_tests.py
+++ b/tests/dry_site_tests.py
@@ -10,7 +10,7 @@
 import pywikibot
 from pywikibot.site import must_be
 
-from tests.utils import unittest, NoSiteTestCase
+from tests.utils import unittest, NoSiteTestCase, DummySiteinfo
 
 
 class DrySite(pywikibot.site.APISite):
@@ -20,6 +20,10 @@
     def userinfo(self):
         return self._userinfo
 
+    @property
+    def siteinfo(self):
+        return DummySiteinfo({})
+
 
 class TestDrySite(NoSiteTestCase):
     def test_logged_in(self):
diff --git a/tests/site_tests.py b/tests/site_tests.py
index 171bc4e..621da4e 100644
--- a/tests/site_tests.py
+++ b/tests/site_tests.py
@@ -14,6 +14,8 @@
 from collections import Iterable
 import pywikibot
 from tests.utils import PywikibotTestCase, unittest
+from datetime import datetime
+import re
 
 import sys
 if sys.version_info[0] > 2:
@@ -196,12 +198,7 @@
         self.assertFalse('*' in mysite.mediawiki_messages(['*']))
 
         self.assertType(mysite.getcurrenttimestamp(), basestring)
-        self.assertType(mysite.siteinfo, dict)
-        self.assertType(mysite.case(), basestring)
-        ver = mysite.live_version()
-        self.assertType(ver, tuple)
-        self.assertTrue(all(isinstance(ver[i], int) for i in (0, 1)))
-        self.assertType(ver[2], basestring)
+        self.assertType(mysite.siteinfo, pywikibot.site.Siteinfo)
         self.assertType(mysite.months_names, list)
         self.assertEqual(mysite.months_names[4], (u'May', u'May'))
         self.assertEqual(mysite.list_to_text(('pywikibot',)), 'pywikibot')
@@ -980,8 +977,9 @@
     #       and the other following methods in site.py
 
     def testExtensions(self):
-        # test automatically getting _extensions
-        del mysite._extensions
+        # test automatically getting extensions cache
+        if 'extensions' in mysite.siteinfo:
+            del mysite.siteinfo._cache['extensions']
         self.assertTrue(mysite.hasExtension('Disambiguator'))
 
         # test case-sensitivity
@@ -990,7 +988,7 @@
         self.assertFalse(mysite.hasExtension('ThisExtensionDoesNotExist'))
 
         # test behavior for sites that do not report extensions
-        mysite._extensions = None
+        mysite.siteinfo._cache['extensions'] = (None, True)
         self.assertRaises(NotImplementedError, mysite.hasExtension, 
('anything'))
 
         class MyException(Exception):
@@ -999,7 +997,7 @@
 
         self.assertTrue(mysite.hasExtension('anything', True))
         self.assertFalse(mysite.hasExtension('anything', False))
-        del mysite._extensions
+        del mysite.siteinfo._cache['extensions']
 
     def test_API_limits_with_site_methods(self):
         # test step/total parameters for different sitemethods
@@ -1027,6 +1025,47 @@
         pickle.dumps(site)
         self.assertTrue(True)  # No exception thrown!
 
+    def testSiteinfo(self):
+        """Test the siteinfo property."""
+        # general enteries
+        self.assertIsInstance(mysite.siteinfo['timeoffset'], (int, float))
+        self.assertTrue(-12 * 60 <= mysite.siteinfo['timeoffset'] <= +14 * 60)
+        self.assertEqual(mysite.siteinfo['timeoffset'] % 15, 0)
+        self.assertRegexpMatches(mysite.siteinfo['timezone'], 
"([A-Z]{3,4}|[A-Z][a-z]+/[A-Z][a-z]+)")
+        self.assertType(datetime.strptime(mysite.siteinfo['time'], 
"%Y-%m-%dT%H:%M:%SZ"), datetime)
+        self.assertTrue(mysite.siteinfo['maxuploadsize'] > 0)
+        self.assertIn(mysite.case(), ["first-letter", "case-sensitive"])
+        self.assertEqual(re.findall("\$1", mysite.siteinfo['articlepath']), 
["$1"])
+        ver = mysite.live_version()
+        self.assertType(ver, tuple)
+        self.assertTrue(all(isinstance(ver[i], int) for i in (0, 1)))
+        self.assertType(ver[2], basestring)
+
+        def entered_loop(iterable):
+            for iterable_item in iterable:
+                return True
+            return False
+
+        self.assertType(mysite.siteinfo.get('restrictions'), dict)
+        self.assertTrue('restrictions' in mysite.siteinfo)
+        # the following line only works in 1.23+
+        self.assertTrue(mysite.siteinfo.is_recognised('restrictions'))
+        del mysite.siteinfo._cache['restrictions']
+        self.assertType(mysite.siteinfo.get('restrictions', cache=False), dict)
+        self.assertFalse('restrictions' in mysite.siteinfo)
+
+        not_exists = 'this-property-does-not-exist'
+        self.assertRaises(KeyError, mysite.siteinfo.__getitem__, not_exists)
+        self.assertFalse(not_exists in mysite.siteinfo)
+        self.assertEqual(len(mysite.siteinfo.get(not_exists)), 0)
+        self.assertFalse(entered_loop(mysite.siteinfo.get(not_exists)))
+        
self.assertFalse(entered_loop(mysite.siteinfo.get(not_exists).iteritems()))
+        
self.assertFalse(entered_loop(mysite.siteinfo.get(not_exists).itervalues()))
+        
self.assertFalse(entered_loop(mysite.siteinfo.get(not_exists).iterkeys()))
+        self.assertFalse(entered_loop(mysite.siteinfo.get(not_exists).items()))
+        
self.assertFalse(entered_loop(mysite.siteinfo.get(not_exists).values()))
+        self.assertFalse(entered_loop(mysite.siteinfo.get(not_exists).keys()))
+
 
 class TestSiteLoadRevisions(PywikibotTestCase):
     """Test cases for Site.loadrevision() method."""
diff --git a/tests/utils.py b/tests/utils.py
index f767538..db30699 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -113,3 +113,33 @@
         unpatch_request()
 
 PywikibotTestCase = CachedTestCase
+
+
+class DummySiteinfo():
+
+    def __init__(self, cache):
+        self._cache = dict((key, (item, False)) for key, item in 
cache.iteritems())
+
+    def __getitem__(self, key):
+        return self.get(key, False)
+
+    def get(self, key, get_default=True, cache=True, force=False):
+        if not force and key in self._cache:
+            loaded = self._cache[key]
+            if not loaded[1] and not get_default:
+                raise KeyError(key)
+            else:
+                return loaded[0]
+        elif get_default:
+            default = pywikibot.site.Siteinfo._get_default(key)
+            if cache:
+                self._cache[key] = (default, True)
+            return default
+        else:
+            raise KeyError(key)
+
+    def __contains__(self, key):
+        return False
+
+    def is_recognised(self, key):
+        return None

-- 
To view, visit https://gerrit.wikimedia.org/r/154413
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: merged
Gerrit-Change-Id: Ib5b6d84f0baf60376cdc25bc715d346c1579da1f
Gerrit-PatchSet: 12
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: XZise <[email protected]>
Gerrit-Reviewer: John Vandenberg <[email protected]>
Gerrit-Reviewer: Ladsgroup <[email protected]>
Gerrit-Reviewer: Merlijn van Deen <[email protected]>
Gerrit-Reviewer: Mpaa <[email protected]>
Gerrit-Reviewer: XZise <[email protected]>
Gerrit-Reviewer: jenkins-bot <>

_______________________________________________
Pywikibot-commits mailing list
[email protected]
https://lists.wikimedia.org/mailman/listinfo/pywikibot-commits

Reply via email to