Merlijn van Deen has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/79561


Change subject: Tests for ItemPage.iterlinks
......................................................................

Tests for ItemPage.iterlinks

These tests use a fabricated sitelinks file, which makes testing easier as
we no longer depend on the actual WikiData data.

ItemPage.get() was adapted: its parent already prevents getting data from WD
if _content exists, and its parent needs to parse _content too. It's much easier
to mock data when we can just set _content and call .get().

Change-Id: I5cd0e235db5bb0792d39178ee0c9e097fb053823
---
M pywikibot/page.py
A tests/pages/Q60_only_sitelinks.wd
A tests/wikidata_tests.py
3 files changed, 76 insertions(+), 2 deletions(-)


  git pull ssh://gerrit.wikimedia.org:29418/pywikibot/core 
refs/changes/61/79561/1

diff --git a/pywikibot/page.py b/pywikibot/page.py
index 81daf92..815bcf4 100644
--- a/pywikibot/page.py
+++ b/pywikibot/page.py
@@ -2510,8 +2510,7 @@
         force will override caching
         args are the values of props
         """
-        if force or not hasattr(self, '_content'):
-            super(ItemPage, self).get(force=force, *args)
+        super(ItemPage, self).get(force=force, *args)
 
         #claims
         self.claims = {}
diff --git a/tests/pages/Q60_only_sitelinks.wd 
b/tests/pages/Q60_only_sitelinks.wd
new file mode 100644
index 0000000..03b15bc
--- /dev/null
+++ b/tests/pages/Q60_only_sitelinks.wd
@@ -0,0 +1,30 @@
+{
+    "lastrevid": 66097354,
+    "modified": "2013-08-17T17:17:36Z",
+    "ns": 0,
+    "pageid": 186,
+    "sitelinks": {
+        "afwiki": {
+            "site": "afwiki",
+            "title": "New York Stad"
+        },
+        "enwiki": {
+            "site": "enwiki",
+            "title": "New York City"
+        },
+        "enwikivoyage": {
+            "site": "enwikivoyage",
+            "title": "New York City"
+        },
+        "eswiki": {
+            "site": "eswiki",
+            "title": "Nueva York"
+        },
+        "eswikivoyage": {
+            "site": "eswikivoyage",
+            "title": "Ciudad de Nueva York"
+        }
+   },
+    "title": "Q60",
+    "type": "item"
+}
diff --git a/tests/wikidata_tests.py b/tests/wikidata_tests.py
new file mode 100644
index 0000000..2e59e0f
--- /dev/null
+++ b/tests/wikidata_tests.py
@@ -0,0 +1,45 @@
+# -*- coding: utf-8  -*-
+"""
+Tests for the WikiData parts of the page module.
+"""
+#
+# (C) Pywikipedia bot team, 2008-2013
+#
+# Distributed under the terms of the MIT license.
+#
+__version__ = '$Id$'
+
+import os
+import unittest
+import pywikibot
+import pywikibot.page
+import json
+site = pywikibot.Site('en', 'wikipedia')
+wikidata = site.data_repository()
+
+class TestLinks(unittest.TestCase):
+    """Test cases to test links stored in wikidata"""
+    wdp = pywikibot.page.ItemPage(wikidata, 'Q60')
+    wdp.id = 'q60'
+    wdp._content = json.load(open(os.path.join(os.path.split(__file__)[0], 
'pages', 'Q60_only_sitelinks.wd')))
+    wdp.get()
+
+    def test_iterlinks_page_object(self):
+        page = [pg for pg in self.wdp.iterlinks() if pg.site.language() == 
'af'][0]
+        self.assertEquals(page, pywikibot.Page(pywikibot.getSite('af', 
'wikipedia'), u'New York Stad'))
+
+    def test_iterlinks_filtering(self):
+        wikilinks = list(self.wdp.iterlinks('wikipedia'))
+        wvlinks = list(self.wdp.iterlinks('wikivoyage'))
+
+        self.assertEquals(len(wikilinks), 3)
+        self.assertEquals(len(wvlinks), 2)
+
+if __name__ == '__main__':
+    try:
+        try:
+            unittest.main()
+        except SystemExit:
+            pass
+    finally:
+        pywikibot.stopme()

-- 
To view, visit https://gerrit.wikimedia.org/r/79561
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: I5cd0e235db5bb0792d39178ee0c9e097fb053823
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Merlijn van Deen <[email protected]>

_______________________________________________
MediaWiki-commits mailing list
[email protected]
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits

Reply via email to