[Python-modules-commits] [python-wikipedia] 01/02: Import Upstream version 1.4.0

Ethan Ward ethanward-guest at moszumanska.debian.org
Fri Aug 11 20:48:04 UTC 2017


This is an automated email from the git hooks/post-receive script.

ethanward-guest pushed a commit to branch master
in repository python-wikipedia.

commit d66b7985dd1c2a7a76a530a7a275f9c29aee9040
Author: Ethan Ward <ethan.ward at mycroft.ai>
Date:   Fri Aug 11 15:46:41 2017 -0500

    Import Upstream version 1.4.0
---
 LICENSE                                 |  19 +
 MANIFEST.in                             |   1 +
 PKG-INFO                                | 122 ++++++
 README.rst                              | 106 +++++
 requirements.txt                        |   2 +
 setup.cfg                               |   5 +
 setup.py                                |  45 ++
 tests/__init__.py                       |   0
 tests/page_test.py                      | 161 +++++++
 tests/request_mock_data.py              | 188 ++++++++
 tests/search_test.py                    |  44 ++
 wikipedia.egg-info/PKG-INFO             | 122 ++++++
 wikipedia.egg-info/SOURCES.txt          |  18 +
 wikipedia.egg-info/dependency_links.txt |   1 +
 wikipedia.egg-info/requires.txt         |   2 +
 wikipedia.egg-info/top_level.txt        |   1 +
 wikipedia/__init__.py                   |   4 +
 wikipedia/exceptions.py                 |  80 ++++
 wikipedia/util.py                       |  41 ++
 wikipedia/wikipedia.py                  | 742 ++++++++++++++++++++++++++++++++
 20 files changed, 1704 insertions(+)

diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..44acf52
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,19 @@
+Copyright 2013 Jonathan Goldsmith
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
\ No newline at end of file
diff --git a/MANIFEST.in b/MANIFEST.in
new file mode 100644
index 0000000..738616d
--- /dev/null
+++ b/MANIFEST.in
@@ -0,0 +1 @@
+include README.rst LICENSE requirements.txt
\ No newline at end of file
diff --git a/PKG-INFO b/PKG-INFO
new file mode 100644
index 0000000..743d1de
--- /dev/null
+++ b/PKG-INFO
@@ -0,0 +1,122 @@
+Metadata-Version: 1.1
+Name: wikipedia
+Version: 1.4.0
+Summary: Wikipedia API for Python
+Home-page: https://github.com/goldsmith/Wikipedia
+Author: Jonathan Goldsmith
+Author-email: jhghank at gmail.com
+License: MIT
+Description: Wikipedia
+        =========
+        
+        .. image:: https://travis-ci.org/goldsmith/Wikipedia.png?branch=master
+          :target: https://travis-ci.org/goldsmith/Wikipedia
+        .. image:: https://pypip.in/d/wikipedia/badge.png
+          :target: https://crate.io/packages/wikipedia
+        .. image:: https://pypip.in/v/wikipedia/badge.png
+          :target: https://crate.io/packages/wikipedia
+        .. image:: https://pypip.in/license/wikipedia/badge.png
+            :target: https://pypi.python.org/pypi/wikipedia/
+            :alt: License
+        
+        **Wikipedia** is a Python library that makes it easy to access and parse
+        data from Wikipedia.
+        
+        Search Wikipedia, get article summaries, get data like links and images
+        from a page, and more. Wikipedia wraps the `MediaWiki
+        API <https://www.mediawiki.org/wiki/API>`__ so you can focus on using
+        Wikipedia data, not getting it.
+        
+        .. code:: python
+        
+          >>> import wikipedia
+          >>> print wikipedia.summary("Wikipedia")
+          # Wikipedia (/ˌwɪkɨˈpiːdiə/ or /ˌwɪkiˈpiːdiə/ WIK-i-PEE-dee-ə) is a collaboratively edited, multilingual, free Internet encyclopedia supported by the non-profit Wikimedia Foundation...
+        
+          >>> wikipedia.search("Barack")
+          # [u'Barak (given name)', u'Barack Obama', u'Barack (brandy)', u'Presidency of Barack Obama', u'Family of Barack Obama', u'First inauguration of Barack Obama', u'Barack Obama presidential campaign, 2008', u'Barack Obama, Sr.', u'Barack Obama citizenship conspiracy theories', u'Presidential transition of Barack Obama']
+        
+          >>> ny = wikipedia.page("New York")
+          >>> ny.title
+          # u'New York'
+          >>> ny.url
+          # u'http://en.wikipedia.org/wiki/New_York'
+          >>> ny.content
+          # u'New York is a state in the Northeastern region of the United States. New York is the 27th-most exten'...
+          >>> ny.links[0]
+          # u'1790 United States Census'
+        
+          >>> wikipedia.set_lang("fr")
+          >>> wikipedia.summary("Facebook", sentences=1)
+          # Facebook est un service de réseautage social en ligne sur Internet permettant d'y publier des informations (photographies, liens, textes, etc.) en contrôlant leur visibilité par différentes catégories de personnes.
+        
+        Note: this library was designed for ease of use and simplicity, not for advanced use. If you plan on doing serious scraping or automated requests, please use `Pywikipediabot <http://www.mediawiki.org/wiki/Manual:Pywikipediabot>`__ (or one of the other more advanced `Python MediaWiki API wrappers <http://en.wikipedia.org/wiki/Wikipedia:Creating_a_bot#Python>`__), which has a larger API, rate limiting, and other features so we can be considerate of the MediaWiki infrastructure.
+        
+        Installation
+        ------------
+        
+        To install Wikipedia, simply run:
+        
+        ::
+        
+          $ pip install wikipedia
+        
+        Wikipedia is compatible with Python 2.6+ (2.7+ to run unittest discover) and Python 3.3+.
+        
+        Documentation
+        -------------
+        
+        Read the docs at https://wikipedia.readthedocs.org/en/latest/.
+        
+        -  `Quickstart <https://wikipedia.readthedocs.org/en/latest/quickstart.html>`__
+        -  `Full API <https://wikipedia.readthedocs.org/en/latest/code.html>`__
+        
+        To run tests, clone the `respository on GitHub <https://github.com/goldsmith/Wikipedia>`__, then run:
+        
+        ::
+        
+          $ pip install -r requirements.txt
+          $ bash runtests  # will run tests for python and python3
+          $ python -m unittest discover tests/ '*test.py'  # manual style
+        
+        in the root project directory.
+        
+        To build the documentation yourself, after installing requirements.txt, run:
+        
+        ::
+        
+          $ pip install sphinx
+          $ cd docs/
+          $ make html
+        
+        License
+        -------
+        
+        MIT licensed. See the `LICENSE
+        file <https://github.com/goldsmith/Wikipedia/blob/master/LICENSE>`__ for
+        full details.
+        
+        Credits
+        -------
+        
+        -  `wiki-api <https://github.com/richardasaurus/wiki-api>`__ by
+           @richardasaurus for inspiration
+        -  @nmoroze and @themichaelyang for feedback and suggestions
+        -  The `Wikimedia
+           Foundation <http://wikimediafoundation.org/wiki/Home>`__ for giving
+           the world free access to data
+        
+        
+        
+        .. image:: https://d2weczhvl823v0.cloudfront.net/goldsmith/wikipedia/trend.png
+           :alt: Bitdeli badge
+           :target: https://bitdeli.com/free
+        
+        
+Keywords: python wikipedia API
+Platform: UNKNOWN
+Classifier: Development Status :: 4 - Beta
+Classifier: Topic :: Software Development :: Libraries
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
diff --git a/README.rst b/README.rst
new file mode 100644
index 0000000..4f49c48
--- /dev/null
+++ b/README.rst
@@ -0,0 +1,106 @@
+Wikipedia
+=========
+
+.. image:: https://travis-ci.org/goldsmith/Wikipedia.png?branch=master
+  :target: https://travis-ci.org/goldsmith/Wikipedia
+.. image:: https://pypip.in/d/wikipedia/badge.png
+  :target: https://crate.io/packages/wikipedia
+.. image:: https://pypip.in/v/wikipedia/badge.png
+  :target: https://crate.io/packages/wikipedia
+.. image:: https://pypip.in/license/wikipedia/badge.png
+    :target: https://pypi.python.org/pypi/wikipedia/
+    :alt: License
+
+**Wikipedia** is a Python library that makes it easy to access and parse
+data from Wikipedia.
+
+Search Wikipedia, get article summaries, get data like links and images
+from a page, and more. Wikipedia wraps the `MediaWiki
+API <https://www.mediawiki.org/wiki/API>`__ so you can focus on using
+Wikipedia data, not getting it.
+
+.. code:: python
+
+  >>> import wikipedia
+  >>> print wikipedia.summary("Wikipedia")
+  # Wikipedia (/ˌwɪkɨˈpiːdiə/ or /ˌwɪkiˈpiːdiə/ WIK-i-PEE-dee-ə) is a collaboratively edited, multilingual, free Internet encyclopedia supported by the non-profit Wikimedia Foundation...
+
+  >>> wikipedia.search("Barack")
+  # [u'Barak (given name)', u'Barack Obama', u'Barack (brandy)', u'Presidency of Barack Obama', u'Family of Barack Obama', u'First inauguration of Barack Obama', u'Barack Obama presidential campaign, 2008', u'Barack Obama, Sr.', u'Barack Obama citizenship conspiracy theories', u'Presidential transition of Barack Obama']
+
+  >>> ny = wikipedia.page("New York")
+  >>> ny.title
+  # u'New York'
+  >>> ny.url
+  # u'http://en.wikipedia.org/wiki/New_York'
+  >>> ny.content
+  # u'New York is a state in the Northeastern region of the United States. New York is the 27th-most exten'...
+  >>> ny.links[0]
+  # u'1790 United States Census'
+
+  >>> wikipedia.set_lang("fr")
+  >>> wikipedia.summary("Facebook", sentences=1)
+  # Facebook est un service de réseautage social en ligne sur Internet permettant d'y publier des informations (photographies, liens, textes, etc.) en contrôlant leur visibilité par différentes catégories de personnes.
+
+Note: this library was designed for ease of use and simplicity, not for advanced use. If you plan on doing serious scraping or automated requests, please use `Pywikipediabot <http://www.mediawiki.org/wiki/Manual:Pywikipediabot>`__ (or one of the other more advanced `Python MediaWiki API wrappers <http://en.wikipedia.org/wiki/Wikipedia:Creating_a_bot#Python>`__), which has a larger API, rate limiting, and other features so we can be considerate of the MediaWiki infrastructure.
+
+Installation
+------------
+
+To install Wikipedia, simply run:
+
+::
+
+  $ pip install wikipedia
+
+Wikipedia is compatible with Python 2.6+ (2.7+ to run unittest discover) and Python 3.3+.
+
+Documentation
+-------------
+
+Read the docs at https://wikipedia.readthedocs.org/en/latest/.
+
+-  `Quickstart <https://wikipedia.readthedocs.org/en/latest/quickstart.html>`__
+-  `Full API <https://wikipedia.readthedocs.org/en/latest/code.html>`__
+
+To run tests, clone the `respository on GitHub <https://github.com/goldsmith/Wikipedia>`__, then run:
+
+::
+
+  $ pip install -r requirements.txt
+  $ bash runtests  # will run tests for python and python3
+  $ python -m unittest discover tests/ '*test.py'  # manual style
+
+in the root project directory.
+
+To build the documentation yourself, after installing requirements.txt, run:
+
+::
+
+  $ pip install sphinx
+  $ cd docs/
+  $ make html
+
+License
+-------
+
+MIT licensed. See the `LICENSE
+file <https://github.com/goldsmith/Wikipedia/blob/master/LICENSE>`__ for
+full details.
+
+Credits
+-------
+
+-  `wiki-api <https://github.com/richardasaurus/wiki-api>`__ by
+   @richardasaurus for inspiration
+-  @nmoroze and @themichaelyang for feedback and suggestions
+-  The `Wikimedia
+   Foundation <http://wikimediafoundation.org/wiki/Home>`__ for giving
+   the world free access to data
+
+
+
+.. image:: https://d2weczhvl823v0.cloudfront.net/goldsmith/wikipedia/trend.png
+   :alt: Bitdeli badge
+   :target: https://bitdeli.com/free
+
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000..130b5ae
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,2 @@
+beautifulsoup4
+requests>=2.0.0,<3.0.0
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000..861a9f5
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,5 @@
+[egg_info]
+tag_build = 
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..62a8f8c
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,45 @@
+# -*- coding: utf-8 -*-
+import codecs
+import os
+import re
+import setuptools
+
+
+def local_file(file):
+  return codecs.open(
+    os.path.join(os.path.dirname(__file__), file), 'r', 'utf-8'
+  )
+
+install_reqs = [
+  line.strip()
+  for line in local_file('requirements.txt').readlines()
+  if line.strip() != ''
+]
+
+version = re.search(
+  "^__version__ = \((\d+), (\d+), (\d+)\)$",
+  local_file('wikipedia/__init__.py').read(),
+  re.MULTILINE
+).groups()
+
+
+setuptools.setup(
+  name = "wikipedia",
+  version = '.'.join(version),
+  author = "Jonathan Goldsmith",
+  author_email = "jhghank at gmail.com",
+  description = "Wikipedia API for Python",
+  license = "MIT",
+  keywords = "python wikipedia API",
+  url = "https://github.com/goldsmith/Wikipedia",
+  install_requires = install_reqs,
+  packages = ['wikipedia'],
+  long_description = local_file('README.rst').read(),
+  classifiers = [
+    'Development Status :: 4 - Beta',
+    'Topic :: Software Development :: Libraries',
+    'License :: OSI Approved :: MIT License',
+    'Programming Language :: Python',
+    'Programming Language :: Python :: 3'
+  ]
+)
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/page_test.py b/tests/page_test.py
new file mode 100644
index 0000000..7b69da4
--- /dev/null
+++ b/tests/page_test.py
@@ -0,0 +1,161 @@
+# -*- coding: utf-8 -*-
+from decimal import Decimal
+import unittest
+
+from wikipedia import wikipedia
+from request_mock_data import mock_data
+
+
+# mock out _wiki_request
+def _wiki_request(params):
+  return mock_data["_wiki_request calls"][tuple(sorted(params.items()))]
+wikipedia._wiki_request = _wiki_request
+
+
+class TestPageSetUp(unittest.TestCase):
+  """Test the functionality of wikipedia.page's __init__ and load functions."""
+
+  def test_missing(self):
+    """Test that page raises a PageError for a nonexistant page."""
+    # Callicarpa?
+    purpleberry = lambda: wikipedia.page("purpleberry", auto_suggest=False)
+    self.assertRaises(wikipedia.PageError, purpleberry)
+
+  def test_redirect_true(self):
+    """Test that a page successfully redirects a query."""
+    # no error should be raised if redirect is test_redirect_true
+    mp = wikipedia.page("Menlo Park, New Jersey")
+
+    self.assertEqual(mp.title, "Edison, New Jersey")
+    self.assertEqual(mp.url, "http://en.wikipedia.org/wiki/Edison,_New_Jersey")
+
+  def test_redirect_false(self):
+    """Test that page raises an error on a redirect when redirect == False."""
+    mp = lambda: wikipedia.page("Menlo Park, New Jersey", auto_suggest=False, redirect=False)
+    self.assertRaises(wikipedia.RedirectError, mp)
+
+  def test_redirect_no_normalization(self):
+    """Test that a page with redirects but no normalization query loads correctly"""
+    the_party = wikipedia.page("Communist Party", auto_suggest=False)
+    self.assertIsInstance(the_party, wikipedia.WikipediaPage)
+    self.assertEqual(the_party.title, "Communist party")
+
+  def test_redirect_with_normalization(self):
+    """Test that a page redirect with a normalized query loads correctly"""
+    the_party = wikipedia.page("communist Party", auto_suggest=False)
+    self.assertIsInstance(the_party, wikipedia.WikipediaPage)
+    self.assertEqual(the_party.title, "Communist party")
+
+  def test_redirect_normalization(self):
+    """Test that a page redirect loads correctly with or without a query normalization"""
+    capital_party = wikipedia.page("Communist Party", auto_suggest=False)
+    lower_party = wikipedia.page("communist Party", auto_suggest=False)
+
+    self.assertIsInstance(capital_party, wikipedia.WikipediaPage)
+    self.assertIsInstance(lower_party, wikipedia.WikipediaPage)
+    self.assertEqual(capital_party.title, "Communist party")
+    self.assertEqual(capital_party, lower_party)
+
+  def test_disambiguate(self):
+    """Test that page raises an error when a disambiguation page is reached."""
+    try:
+      ram = wikipedia.page("Dodge Ram (disambiguation)", auto_suggest=False, redirect=False)
+      error_raised = False
+    except wikipedia.DisambiguationError as e:
+      error_raised = True
+      options = e.options
+
+    self.assertTrue(error_raised)
+    self.assertEqual(options, [u'Dodge Ramcharger', u'Dodge Ram Van', u'Dodge Mini Ram', u'Dodge Caravan C/V', u'Dodge Caravan C/V', u'Ram C/V', u'Dodge Ram 50', u'Dodge D-Series', u'Dodge Rampage', u'Ram (brand)'])
+
+  def test_auto_suggest(self):
+    """Test that auto_suggest properly corrects a typo."""
+    # yum, butter.
+    butterfly = wikipedia.page("butteryfly")
+
+    self.assertEqual(butterfly.title, "Butterfly")
+    self.assertEqual(butterfly.url, "http://en.wikipedia.org/wiki/Butterfly")
+
+
+class TestPage(unittest.TestCase):
+  """Test the functionality of the rest of wikipedia.page."""
+
+  def setUp(self):
+    # shortest wikipedia articles with images and sections
+    self.celtuce = wikipedia.page("Celtuce")
+    self.cyclone = wikipedia.page("Tropical Depression Ten (2005)")
+    self.great_wall_of_china = wikipedia.page("Great Wall of China")
+
+  def test_from_page_id(self):
+    """Test loading from a page id"""
+    self.assertEqual(self.celtuce, wikipedia.page(pageid=1868108))
+
+  def test_title(self):
+    """Test the title."""
+    self.assertEqual(self.celtuce.title, "Celtuce")
+    self.assertEqual(self.cyclone.title, "Tropical Depression Ten (2005)")
+
+  def test_url(self):
+    """Test the url."""
+    self.assertEqual(self.celtuce.url, "http://en.wikipedia.org/wiki/Celtuce")
+    self.assertEqual(self.cyclone.url, "http://en.wikipedia.org/wiki/Tropical_Depression_Ten_(2005)")
+
+  def test_content(self):
+    """Test the plain text content."""
+    self.assertEqual(self.celtuce.content, mock_data['data']["celtuce.content"])
+    self.assertEqual(self.cyclone.content, mock_data['data']["cyclone.content"])
+
+  def test_revision_id(self):
+    """Test the revision id."""
+    self.assertEqual(self.celtuce.revision_id, mock_data['data']["celtuce.revid"])
+    self.assertEqual(self.cyclone.revision_id, mock_data['data']["cyclone.revid"])
+
+  def test_parent_id(self):
+    """Test the parent id."""
+    self.assertEqual(self.celtuce.parent_id, mock_data['data']["celtuce.parentid"])
+    self.assertEqual(self.cyclone.parent_id, mock_data['data']["cyclone.parentid"])
+
+
+  def test_summary(self):
+    """Test the summary."""
+    self.assertEqual(self.celtuce.summary, mock_data['data']["celtuce.summary"])
+    self.assertEqual(self.cyclone.summary, mock_data['data']["cyclone.summary"])
+
+  def test_images(self):
+    """Test the list of image URLs."""
+    self.assertEqual(sorted(self.celtuce.images), mock_data['data']["celtuce.images"])
+    self.assertEqual(sorted(self.cyclone.images), mock_data['data']["cyclone.images"])
+
+  def test_references(self):
+    """Test the list of reference URLs."""
+    self.assertEqual(self.celtuce.references, mock_data['data']["celtuce.references"])
+    self.assertEqual(self.cyclone.references, mock_data['data']["cyclone.references"])
+
+  def test_links(self):
+    """Test the list of titles of links to Wikipedia pages."""
+    self.assertEqual(self.celtuce.links, mock_data['data']["celtuce.links"])
+    self.assertEqual(self.cyclone.links, mock_data['data']["cyclone.links"])
+
+  def test_categories(self):
+    """Test the list of categories of Wikipedia pages."""
+    self.assertEqual(self.celtuce.categories, mock_data['data']["celtuce.categories"])
+    self.assertEqual(self.cyclone.categories, mock_data['data']["cyclone.categories"])
+
+  def test_html(self):
+    """Test the full HTML method."""
+    self.assertEqual(self.celtuce.html(), mock_data['data']["celtuce.html"])
+
+  def test_sections(self):
+    """Test the list of section titles."""
+    self.assertEqual(sorted(self.cyclone.sections), mock_data['data']["cyclone.sections"])
+
+  def test_section(self):
+    """Test text content of a single section."""
+    self.assertEqual(self.cyclone.section("Impact"), mock_data['data']["cyclone.section.impact"])
+    self.assertEqual(self.cyclone.section("History"), None)
+
+  def test_coordinates(self):
+    """Test geo coordinates of a page"""
+    lat, lon = self.great_wall_of_china.coordinates
+    self.assertEqual(str(lat.quantize(Decimal('1.000'))), mock_data['data']['great_wall_of_china.coordinates.lat'])
+    self.assertEqual(str(lon.quantize(Decimal('1.000'))), mock_data['data']['great_wall_of_china.coordinates.lon'])
diff --git a/tests/request_mock_data.py b/tests/request_mock_data.py
new file mode 100644
index 0000000..cea34eb
--- /dev/null
+++ b/tests/request_mock_data.py
@@ -0,0 +1,188 @@
+# -*- coding: utf-8 -*-
+from __future__ import unicode_literals
+
+mock_data = {
+  "_wiki_request calls": {
+
+    (('explaintext', ''), ('prop', 'extracts|revisions'), ('rvprop', 'ids'), ('titles', 'Celtuce')):
+    {'query': {'pages': {'1868108': {'extract': 'Celtuce (Lactuca sativa var. asparagina, augustana, or angustata), also called stem lettuce, celery lettuce, asparagus lettuce, or Chinese lettuce, IPA (UK,US) /\u02c8s\u025blt.\u0259s/, is a cultivar of lettuce grown primarily for its thick stem, used as a vegetable. It is especially popular in China, and is called wosun (Chinese: \u83b4\u7b0b; pinyin: w\u014ds\u016dn) or woju (Chinese: \u83b4\u82e3; pinyin: w\u014dj\xf9) (although the la [...]
+
+    (('explaintext', ''), ('prop', 'extracts|revisions'), ('rvprop', 'ids'), ('titles', 'Tropical Depression Ten (2005)')):
+    {'query': {'pages': {'21196082': {'extract': 'Tropical Depression Ten was the tenth tropical cyclone of the record-breaking 2005 Atlantic hurricane season. It formed on August 13 from a tropical wave that emerged from the west coast of Africa on August 8. As a result of strong wind shear, the depression remained weak and did not strengthen beyond tropical depression status. The cyclone degenerated on August 14, although its remnants partially contributed to the formation of Tropical  [...]
+
+    (('inprop', 'url'), ('ppprop', 'disambiguation'), ('prop', 'info|pageprops'), ('redirects', ''), ('titles', 'purpleberry')):
+    {'query': {'normalized': [{'to': 'Purpleberry', 'from': 'purpleberry'}], 'pages': {'-1': {'missing': '', 'editurl': 'http://en.wikipedia.org/w/index.php?title=Purpleberry&action=edit', 'title': 'Purpleberry', 'contentmodel': 'wikitext', 'pagelanguage': 'en', 'ns': 0, 'fullurl': 'http://en.wikipedia.org/wiki/Purpleberry'}}}},
+
+    (('limit', 1), ('list', 'search'), ('srinfo', 'suggestion'), ('srlimit', 1), ('srprop', ''), ('srsearch', 'Menlo Park, New Jersey')):
+    {'query-continue': {'search': {'sroffset': 1}}, 'query': {'search': [{'ns': 0, 'title': 'Edison, New Jersey'}]}, 'warnings': {'main': {'*': "Unrecognized parameter: 'limit'"}}},
+
+    (('inprop', 'url'), ('ppprop', 'disambiguation'), ('prop', 'info|pageprops'), ('redirects', ''), ('titles', 'Menlo Park, New Jersey')):
+    {'query': {'redirects': [{'to': 'Edison, New Jersey', 'from': 'Menlo Park, New Jersey'}], 'pages': {'125414': {'lastrevid': 607768264, 'pageid': 125414, 'title': 'Edison, New Jersey', 'editurl': 'http://en.wikipedia.org/w/index.php?title=Edison,_New_Jersey&action=edit', 'counter': '', 'length': 85175, 'contentmodel': 'wikitext', 'pagelanguage': 'en', 'touched': '2014-05-14T17:10:49Z', 'ns': 0, 'fullurl': 'http://en.wikipedia.org/wiki/Edison,_New_Jersey'}}}},
+
+    (('inprop', 'url'), ('ppprop', 'disambiguation'), ('prop', 'info|pageprops'), ('redirects', ''), ('titles', 'Communist Party')):
+    {'query': {'redirects': [{'to': 'Communist party', 'from': 'Communist Party'}], 'pages': {'37008': {'lastrevid': 608086859, 'pageid': 37008, 'title': 'Communist party', 'editurl': 'http://en.wikipedia.org/w/index.php?title=Communist_party&action=edit', 'counter': '', 'length': 7868, 'contentmodel': 'wikitext', 'pagelanguage': 'en', 'touched': '2014-05-26T01:19:01Z', 'ns': 0, 'fullurl': 'http://en.wikipedia.org/wiki/Communist_party'}}}},
+
+    (('inprop', 'url'), ('ppprop', 'disambiguation'), ('prop', 'info|pageprops'), ('redirects', ''), ('titles', 'communist Party')):
+    {'query': {'redirects': [{'to': 'Communist party', 'from': 'Communist Party'}], 'normalized': [{'to': 'Communist Party', 'from': 'communist Party'}], 'pages': {'37008': {'lastrevid': 608086859, 'pageid': 37008, 'title': 'Communist party', 'editurl': 'http://en.wikipedia.org/w/index.php?title=Communist_party&action=edit', 'counter': '', 'length': 7868, 'contentmodel': 'wikitext', 'pagelanguage': 'en', 'touched': '2014-05-26T01:19:01Z', 'ns': 0, 'fullurl': 'http://en.wikipedia.org/wiki [...]
+
+    (('inprop', 'url'), ('ppprop', 'disambiguation'), ('prop', 'info|pageprops'), ('redirects', ''), ('titles', 'Communist party')):
+    {'query': {'pages': {'37008': {'lastrevid': 608086859, 'pageid': 37008, 'title': 'Communist party', 'editurl': 'http://en.wikipedia.org/w/index.php?title=Communist_party&action=edit', 'counter': '', 'length': 7868, 'contentmodel': 'wikitext', 'pagelanguage': 'en', 'touched': '2014-05-26T01:19:01Z', 'ns': 0, 'fullurl': 'http://en.wikipedia.org/wiki/Communist_party'}}}},
+
+    (('inprop', 'url'), ('ppprop', 'disambiguation'), ('prop', 'info|pageprops'), ('redirects', ''), ('titles', 'Edison, New Jersey')):
+    {'query': {'pages': {'125414': {'lastrevid': 607768264, 'pageid': 125414, 'title': 'Edison, New Jersey', 'editurl': 'http://en.wikipedia.org/w/index.php?title=Edison,_New_Jersey&action=edit', 'counter': '', 'length': 85175, 'contentmodel': 'wikitext', 'pagelanguage': 'en', 'touched': '2014-05-14T17:10:49Z', 'ns': 0, 'fullurl': 'http://en.wikipedia.org/wiki/Edison,_New_Jersey'}}}},
+
+    (('inprop', 'url'), ('ppprop', 'disambiguation'), ('prop', 'info|pageprops'), ('redirects', ''), ('titles', 'Dodge Ram (disambiguation)')):
+    {'query': {'pages': {'18803364': {'lastrevid': 567152802, 'pageid': 18803364, 'title': 'Dodge Ram (disambiguation)', 'editurl': 'http://en.wikipedia.org/w/index.php?title=Dodge_Ram_(disambiguation)&action=edit', 'counter': '', 'length': 702, 'contentmodel': 'wikitext', 'pagelanguage': 'en', 'touched': '2013-08-08T15:12:27Z', 'ns': 0, 'pageprops': {'disambiguation': ''}, 'fullurl': 'http://en.wikipedia.org/wiki/Dodge_Ram_(disambiguation)'}}}},
+
+    (('prop', 'revisions'), ('rvlimit', 1), ('rvparse', ''), ('rvprop', 'content'), ('titles', 'Dodge Ram (disambiguation)')):
+    {'query-continue': {'revisions': {'rvcontinue': 556603298}}, 'query': {'pages': {'18803364': {'ns': 0, 'pageid': 18803364, 'revisions': [{'*': '<p><b><a href="/wiki/Dodge_Ram" title="Dodge Ram">Dodge Ram</a></b> is a collective nameplate for light trucks made by <a href="/wiki/Dodge" title="Dodge">Dodge</a>\n</p>\n<ul><li><a href="/wiki/Dodge_Ramcharger" title="Dodge Ramcharger">Dodge Ramcharger</a> - full-size SUV based on the Ram chassis (first vehicle to use the Ram name)\n</li><l [...]
+
+    (('limit', 1), ('list', 'search'), ('srinfo', 'suggestion'), ('srlimit', 1), ('srprop', ''), ('srsearch', 'butteryfly')):
+    {'query-continue': {'search': {'sroffset': 1}}, 'query': {'searchinfo': {'suggestion': 'butterfly'}, 'search': [{'ns': 0, 'title': "Butterfly's Tongue"}]}, 'warnings': {'main': {'*': "Unrecognized parameter: 'limit'"}}},
+
+    (('inprop', 'url'), ('ppprop', 'disambiguation'), ('prop', 'info|pageprops'), ('redirects', ''), ('titles', 'butterfly')):
+    {'query': {'normalized': [{'to': 'Butterfly', 'from': 'butterfly'}], 'pages': {'48338': {'lastrevid': 566847704, 'pageid': 48338, 'title': 'Butterfly',  'editurl': 'http://en.wikipedia.org/w/index.php?title=Butterfly&action=edit', 'counter': '', 'length': 60572, 'contentmodel': 'wikitext', '    pagelanguage': 'en', 'touched': '2013-08-07T11:15:37Z', 'ns': 0, 'fullurl': 'http://en.wikipedia.org/wiki/Butterfly'}}}},
+
+    (('limit', 1), ('list', 'search'), ('srinfo', 'suggestion'), ('srlimit', 1), ('srprop', ''), ('srsearch', 'Celtuce')):
+    {'query-continue': {'search': {'sroffset': 1}}, 'query': {'search': [{'ns': 0, 'title': 'Celtuce'}]}, 'warnings': {'main': {'*': "Unrecognized parameter: 'limit'"}}},
+
+    (('limit', 1), ('list', 'search'), ('srinfo', 'suggestion'), ('srlimit', 1), ('srprop', ''), ('srsearch', 'Tropical Depression Ten (2005)')):
+    {'query-continue': {'search': {'sroffset': 1}}, 'query': {'search': [{'ns': 0, 'title': 'Tropical Depression Ten (2005)'}]}, 'warnings': {'main': {'*': "Unrecognized parameter: 'limit'"}}},
+
+    (('limit', 1), ('list', 'search'), ('srinfo', 'suggestion'), ('srlimit', 1), ('srprop', ''), ('srsearch', 'Great Wall of China')):
+    {'query-continue': {'search': {'sroffset': 1}}, 'query': {'search': [{'ns': 0, 'title': 'Great Wall of China'}]}, 'warnings': {'main': {'*': "Unrecognized parameter: 'limit'"}}},
+
+    (('inprop', 'url'), ('ppprop', 'disambiguation'), ('prop', 'info|pageprops'), ('redirects', ''), ('titles', 'Celtuce')):
+    {'query': {'pages': {'1868108': {'lastrevid': 562756085, 'pageid': 1868108, 'title': 'Celtuce', 'editurl': 'http://en.wikipedia.org/w/index.php?title=Celtuce&action=edit', 'counter': '', 'length': 1662, 'contentmodel': 'wikitext', 'pagelanguage': 'en', 'touched': '2013-08-17T03:30:23Z', 'ns': 0, 'fullurl': 'http://en.wikipedia.org/wiki/Celtuce'}}}},
+
+    (('inprop', 'url'), ('ppprop', 'disambiguation'), ('prop', 'info|pageprops'), ('redirects', ''), ('titles', 'Tropical Depression Ten (2005)')):
+    {'query': {'pages': {'21196082': {'lastrevid': 572715399, 'pageid': 21196082, 'title': 'Tropical Depression Ten (2005)', 'editurl': 'http://en.wikipedia.org/w/index.php?title=Tropical_Depression_Ten_(2005)&action=edit', 'counter': '', 'length': 8543, 'contentmodel': 'wikitext', 'pagelanguage': 'en', 'touched': '2013-09-18T13:45:33Z', 'ns': 0, 'fullurl': 'http://en.wikipedia.org/wiki/Tropical_Depression_Ten_(2005)'}}}},
+
+    (('inprop', 'url'), ('ppprop', 'disambiguation'), ('prop', 'info|pageprops'), ('redirects', ''), ('titles', 'Great Wall of China')):
+    {'query': {'pages': {'5094570': {'lastrevid': 604138653, 'pageid': 5094570, 'title': 'Great Wall of China', 'editurl': 'http://en.wikipedia.org/w/index.php?title=Great_Wall_of_China&action=edit', 'counter': '', 'length': 23895, 'contentmodel': 'wikitext', 'pagelanguage': 'en', 'touched': '2013-08-17T03:30:23Z', 'ns': 0, 'fullurl': 'http://en.wikipedia.org/wiki/Great_Wall_of_China'}}}},
+
+    (('explaintext', ''), ('prop', 'extracts'), ('titles', 'Celtuce')):
+    {'query': {'pages': {'1868108': {'extract': 'Celtuce (Lactuca sativa var. asparagina, augustana, or angustata), also called stem lettuce, celery lettuce, asparagus lettuce, or Chinese lettuce, IPA (UK,US) /\u02c8s\u025blt.\u0259s/, is a cultivar of lettuce grown primarily for its thick stem, used as a vegetable. It is especially popular in China, and is called wosun (Chinese: \u83b4\u7b0b; pinyin: w\u014ds\u016dn) or woju (Chinese: \u83b4\u82e3; pinyin: w\u014dj\xf9) (although the la [...]
+
+    (('exintro', ''), ('explaintext', ''), ('prop', 'extracts'), ('titles', 'Celtuce')):
+    {'query': {'pages': {'1868108': {'extract': 'Celtuce (Lactuca sativa var. asparagina, augustana, or angustata), also called stem lettuce, celery lettuce, asparagus lettuce, or Chinese lettuce, IPA (UK,US) /\u02c8s\u025blt.\u0259s/, is a cultivar of lettuce grown primarily for its thick stem, used as a vegetable. It is especially popular in China, and is called wosun (Chinese: \u83b4\u7b0b; pinyin: w\u014ds\u016dn) or woju (Chinese: \u83b4\u82e3; pinyin: w\u014dj\xf9) (although the la [...]
+
+    (('exintro', ''), ('explaintext', ''), ('prop', 'extracts'), ('titles', 'Tropical Depression Ten (2005)')):
+    {'query': {'pages': {'21196082': {'extract': 'Tropical Depression Ten was the tenth tropical cyclone of the record-breaking 2005 Atlantic hurricane season. It formed on August 13 from a tropical wave that emerged from the west coast of Africa on August 8. As a result of strong wind shear, the depression remained weak and did not strengthen beyond tropical depression status. The cyclone degenerated on August 14, although its remnants partially contributed to the formation of Tropical  [...]
+
+    (('generator', 'images'), ('gimlimit', 'max'), ('iiprop', 'url'), ('prop', 'imageinfo'), ('titles', 'Celtuce')):
+    {'query': {'pages': {'22263385': {'imagerepository': 'local', 'ns': 6, 'pageid': 22263385, 'imageinfo': [{'url': 'http://upload.wikimedia.org/wikipedia/en/9/99/Question_book-new.svg', 'descriptionurl': 'http://en.wikipedia.org/wiki/File:Question_book-new.svg'}], 'title': 'File:Question book-new.svg'}, '-1': {'imagerepository': 'shared', 'ns': 6, 'imageinfo': [{'url': 'http://upload.wikimedia.org/wikipedia/commons/8/87/Celtuce.jpg', 'descriptionurl': 'http://commons.wikimedia.org/wiki [...]
+
+    (('generator', 'images'), ('gimlimit', 'max'), ('iiprop', 'url'), ('prop', 'imageinfo'), ('titles', 'Tropical Depression Ten (2005)')):
+    {'query': {'pages': {'33285577': {'imagerepository': 'local', 'ns': 6, 'pageid': 33285577, 'imageinfo': [{'url': 'http://upload.wikimedia.org/wikipedia/en/4/4a/Commons-logo.svg', 'descriptionurl': 'http://en.wikipedia.org/wiki/File:Commons-logo.svg'}], 'title': 'File:Commons-logo.svg'}, '23473511': {'imagerepository': 'local', 'ns': 6, 'pageid': 23473511, 'imageinfo': [{'url': 'http://upload.wikimedia.org/wikipedia/en/4/48/Folder_Hexagonal_Icon.svg', 'descriptionurl': 'http://en.wiki [...]
+
+    (('ellimit', 'max'), ('prop', 'extlinks'), ('titles', 'Celtuce')):
+    {'query': {'pages': {'1868108': {'extlinks': [{'*': 'http://ndb.nal.usda.gov/ndb/search/list'}, {'*': 'http://ndb.nal.usda.gov/ndb/search/list?qlookup=11145&format=Full'}], 'ns': 0, 'pageid': 1868108, 'title': 'Celtuce'}}}, 'limits': {'extlinks': 500}},
+
+    (('ellimit', 'max'), ('prop', 'extlinks'), ('titles', 'Tropical Depression Ten (2005)')):
+    {'query': {'pages': {'21196082': {'extlinks': [{'*': 'http://books.google.com/?id=-a8DRl1HuwoC&q=%22tropical+depression+ten%22+2005&dq=%22tropical+depression+ten%22+2005'}, {'*': 'http://facstaff.unca.edu/chennon/research/documents/erb_ncur2006_preprint.pdf'}, {'*': 'http://www.nhc.noaa.gov/archive/2005/TEN.shtml?'}, {'*': 'http://www.nhc.noaa.gov/archive/2005/dis/al102005.discus.001.shtml?'}, {'*': 'http://www.nhc.noaa.gov/archive/2005/dis/al102005.discus.002.shtml?'}, {'*': 'http:/ [...]
+
+    (('pllimit', 'max'), ('plnamespace', 0), ('prop', 'links'), ('titles', 'Celtuce')):
+    {'query': {'pages': {'1868108': {'ns': 0, 'pageid': 1868108, 'links': [{'ns': 0, 'title': 'Calcium'}, {'ns': 0, 'title': 'Carbohydrate'}, {'ns': 0, 'title': 'Chinese language'}, {'ns': 0, 'title': 'Dietary Reference Intake'}, {'ns': 0, 'title': 'Dietary fiber'}, {'ns': 0, 'title': 'Fat'}, {'ns': 0, 'title': 'Folate'}, {'ns': 0, 'title': 'Food energy'}, {'ns': 0, 'title': 'Iron'}, {'ns': 0, 'title': 'Lettuce'}, {'ns': 0, 'title': 'Lhasa'}, {'ns': 0, 'title': 'Magnesium in biology'}, { [...]
+
+    (('pllimit', 'max'), ('plnamespace', 0), ('prop', 'links'), ('titles', 'Tropical Depression Ten (2005)')):
+    {'query': {'pages': {'21196082': {'ns': 0, 'pageid': 21196082, 'links': [{'ns': 0, 'title': '2005 Atlantic hurricane season'}, {'ns': 0, 'title': '2005 Azores subtropical storm'}, {'ns': 0, 'title': 'Atlantic Ocean'}, {'ns': 0, 'title': 'Atmospheric circulation'}, {'ns': 0, 'title': 'Atmospheric convection'}, {'ns': 0, 'title': 'Azores'}, {'ns': 0, 'title': 'Bahamas'}, {'ns': 0, 'title': 'Bar (unit)'}, {'ns': 0, 'title': 'Barbados'}, {'ns': 0, 'title': 'Bermuda'}, {'ns': 0, 'title':  [...]
+
+    (('cllimit', 'max'), ('prop', 'categories'), ('titles', 'Celtuce')):
+    {"query":{"pages":{"1868108":{"pageid":1868108,"ns":0,"title":"Celtuce","categories":[{"ns":14,"title":"All articles lacking sources"},{"ns":14,"title":"All stub articles"},{"ns":14,"title":"Articles containing Chinese-language text"},{"ns":14,"title":"Articles lacking sources from December 2009"},{"ns":14,"title":"Stem vegetables"},{"ns":14,"title":"Vegetable stubs"}]}}},"limits":{"categories":500}},
+
+    (('cllimit', 'max'), ('prop', 'categories'), ('titles', 'Tropical Depression Ten (2005)')):
+    {"query":{"pages":{"21196082":{"pageid":21196082,"ns":0,"title":"Tropical Depression Ten (2005)","categories":[{"ns":14,"title":"2005 Atlantic hurricane season"},{"ns":14,"title":"Articles with hAudio microformats"},{"ns":14,"title":"Atlantic tropical depressions"},{"ns":14,"title":"CS1 errors: dates"},{"ns":14,"title":"Commons category with local link same as on Wikidata"},{"ns":14,"title":"Featured articles"},{"ns":14,"title":"Hurricane Katrina"},{"ns":14,"title":"Spoken articles"} [...]
+
+    (('prop', 'revisions'), ('rvlimit', 1), ('rvparse', ''), ('rvprop', 'content'), ('titles', 'Celtuce')):
+    {'query-continue': {'revisions': {'rvcontinue': 547842204}}, 'query': {'pages': {'1868108': {'ns': 0, 'pageid': 1868108, 'revisions': [{'*': '<table class="metadata plainlinks ambox ambox-content ambox-Unreferenced" style="" role="presentation">\n<tr><td class="mbox-image"><div style="width: 52px;"><a href="/wiki/File:Question_book-new.svg" class="image"><img alt="Question book-new.svg" src="//upload.wikimedia.org/wikipedia/en/thumb/9/99/Question_book-new.svg/50px-Question_book-new.s [...]
+
+    (('action', 'parse'), ('prop', 'sections'), ('titles', 'Tropical Depression Ten (2005)')):
+    {'parse': {'sections': [{'index': '1', 'level': '2', 'fromtitle': 'Tropical_Depression_Ten_(2005)', 'toclevel': 1, 'number': '1', 'byteoffset': 1369, 'line': 'Meteorological history', 'anchor': 'Meteorological_history'}, {'index': '2', 'level': '2', 'fromtitle': 'Tropical_Depression_Ten_(2005)', 'toclevel': 1, 'number': '2', 'byteoffset': 6248, 'line': 'Impact', 'anchor': 'Impact'}, {'index': '3', 'level': '2', 'fromtitle': 'Tropical_Depression_Ten_(2005)', 'toclevel': 1, 'number': ' [...]
+
+    (('limit', 10), ('list', 'search'), ('srlimit', 10), ('srprop', ''), ('srsearch', 'Barack Obama')):
+    {'query-continue': {'search': {'sroffset': 10}}, 'query': {'searchinfo': {'totalhits': 12987}, 'search': [{'ns': 0, 'title': 'Barack Obama'}, {'ns': 0, 'title': 'Barack Obama, Sr.'}, {'ns': 0, 'title': 'Presidency of Barack Obama'}, {'ns': 0, 'title': 'Barack Obama presidential campaign, 2008'}, {'ns': 0, 'title': 'List of federal judges appointed by Barack Obama'}, {'ns': 0, 'title': 'Barack Obama in comics'}, {'ns': 0, 'title': 'Political positions of Barack Obama'}, {'ns': 0, 'tit [...]
+
+    (('limit', 3), ('list', 'search'), ('srlimit', 3), ('srprop', ''), ('srsearch', 'Porsche')):
+    {'query-continue': {'search': {'sroffset': 3}}, 'query': {'searchinfo': {'totalhits': 5335}, 'search': [{'ns': 0, 'title': 'Porsche'}, {'ns': 0, 'title': 'Porsche in motorsport'}, {'ns': 0, 'title': 'Porsche 911 GT3'}]}, 'warnings': {'main': {'*': "Unrecognized parameter: 'limit'"}}},
+
+    (('limit', 10), ('list', 'search'), ('srinfo', 'suggestion'), ('srlimit', 10), ('srprop', ''), ('srsearch', 'hallelulejah')):
+    {'query': {'searchinfo': {'suggestion': 'hallelujah'}, 'search': []}, 'warnings': {'main': {'*': "Unrecognized parameter: 'limit'"}}},
+
+    (('limit', 10), ('list', 'search'), ('srinfo', 'suggestion'), ('srlimit', 10), ('srprop', ''), ('srsearch', 'qmxjsudek')):
+    {'query': {'search': []}, 'warnings': {'main': {'*': "Unrecognized parameter: 'limit'"}}},
+
+    (('inprop', 'url'), ('pageids', 1868108), ('ppprop', 'disambiguation'), ('prop', 'info|pageprops'), ('redirects', '')):
+    {'query': {'pages': {'1868108': {'lastrevid': 575687826, 'pageid': 1868108, 'title': 'Celtuce', 'editurl': 'http://en.wikipedia.org/w/index.php?title=Celtuce&action=edit', 'counter': '', 'length': 1960, 'contentmodel': 'wikitext', 'pagelanguage': 'en', 'touched': '2014-01-12T09:30:00Z', 'ns': 0, 'fullurl': 'http://en.wikipedia.org/wiki/Celtuce'}}}},
+
+    (('colimit', 'max'), ('prop', 'coordinates'), ('titles', 'Great Wall of China')):
+    {'query': {'pages': {'5094570': {'ns': 0, 'pageid': 5094570, 'coordinates': [{'lat': 40.6769, 'globe': 'earth', 'lon': 117.232, 'primary': ''}], 'title': 'Great Wall of China'}}}, 'limits': {'extlinks': 500}},
+
+    (('gscoord', '40.67693|117.23193'), ('gslimit', 10), ('gsradius', 1000), ('list', 'geosearch')):
+    {'query': {'geosearch': [{'pageid': 5094570, 'title': 'Great Wall of China', 'lon': 117.232, 'primary': '', 'lat': 40.6769, 'dist': 6.8, 'ns': 0}]}},
+
+    (('gscoord', '40.67693|117.23193'), ('gslimit', 10), ('gsradius', 10000), ('list', 'geosearch')):
+    {'query': {'geosearch': [{'pageid': 5094570, 'title': 'Great Wall of China', 'lon': 117.232, 'primary': '', 'lat': 40.6769, 'dist': 6.8, 'ns': 0}, {'pageid': 10135375, 'title': 'Jinshanling', 'lon': 117.244, 'primary': '', 'lat': 40.6764, 'dist': 1019.6, 'ns': 0}]}},
+
+    (('gscoord', '40.67693|117.23193'), ('gslimit', 10), ('gsradius', 1000), ('list', 'geosearch'), ('titles', 'Great Wall of China')):
+    {'query': {'geosearch': [{'pageid': 5094570, 'title': 'Great Wall of China', 'lon': 117.232, 'primary': '', 'lat': 40.6769, 'dist': 6.8, 'ns': 0}]}},
+
+    (('gscoord', '40.67693|117.23193'), ('gslimit', 10), ('gsradius', 1000), ('list', 'geosearch'), ('titles', 'Test')):
+    {'query': {'geosearch': []}},
+  },
+
+  "data": {
+    "celtuce.content": 'Celtuce (Lactuca sativa var. asparagina, augustana, or angustata), also called stem lettuce, celery lettuce, asparagus lettuce, or Chinese lettuce, IPA (UK,US) /\u02c8s\u025blt.\u0259s/, is a cultivar of lettuce grown primarily for its thick stem, used as a vegetable. It is especially popular in China, and is called wosun (Chinese: \u83b4\u7b0b; pinyin: w\u014ds\u016dn) or woju (Chinese: \u83b4\u82e3; pinyin: w\u014dj\xf9) (although the latter name may also be use [...]
+
+    "celtuce.parentid": 574302108,
+
+    "celtuce.revid": 575687826,
+
+    "celtuce.summary": "Celtuce (Lactuca sativa var. asparagina, augustana, or angustata), also called stem lettuce, celery lettuce, asparagus lettuce, or Chinese lettuce, IPA (UK,US) /\u02c8s\u025blt.\u0259s/, is a cultivar of lettuce grown primarily for its thick stem, used as a vegetable. It is especially popular in China, and is called wosun (Chinese: \u83b4\u7b0b; pinyin: w\u014ds\u016dn) or woju (Chinese: \u83b4\u82e3; pinyin: w\u014dj\xf9) (although the latter name may also be use [...]
+
+    "celtuce.images": ['http://upload.wikimedia.org/wikipedia/commons/7/79/VegCorn.jpg', 'http://upload.wikimedia.org/wikipedia/commons/8/87/Celtuce.jpg', 'http://upload.wikimedia.org/wikipedia/commons/d/dc/The_farmer%27s_market_near_the_Potala_in_Lhasa.jpg', 'http://upload.wikimedia.org/wikipedia/en/9/99/Question_book-new.svg'],
+
+    "celtuce.references": ['http://ndb.nal.usda.gov/ndb/search/list', 'http://ndb.nal.usda.gov/ndb/search/list?qlookup=11145&format=Full'],
+
+    "celtuce.links": ['Calcium', 'Carbohydrate', 'Chinese language', 'Dietary Reference Intake', 'Dietary fiber', 'Fat', 'Folate', 'Food energy', 'Iron', 'Lettuce', 'Lhasa', 'Magnesium in biology', 'Manganese', 'Niacin', 'Pantothenic acid', 'Phosphorus', 'Pinyin', 'Plant stem', 'Potassium', 'Protein (nutrient)', 'Riboflavin', 'Sodium', 'Stir frying', 'Thiamine', 'Vegetable', 'Vitamin A', 'Vitamin B6', 'Vitamin C', 'Zinc'],
+
+    "celtuce.categories": ['All articles lacking sources', 'All stub articles', 'Articles containing Chinese-language text', 'Articles lacking sources from December 2009', 'Stem vegetables', 'Vegetable stubs'],
+
+    "celtuce.html": '<table class="metadata plainlinks ambox ambox-content ambox-Unreferenced" style="" role="presentation">\n<tr><td class="mbox-image"><div style="width: 52px;"><a href="/wiki/File:Question_book-new.svg" class="image"><img alt="Question book-new.svg" src="//upload.wikimedia.org/wikipedia/en/thumb/9/99/Question_book-new.svg/50px-Question_book-new.svg.png" width="50" height="39" srcset="//upload.wikimedia.org/wikipedia/en/thumb/9/99/Question_book-new.svg/75px-Question_boo [...]
+
+    "cyclone.content": 'Tropical Depression Ten was the tenth tropical cyclone of the record-breaking 2005 Atlantic hurricane season. It formed on August 13 from a tropical wave that emerged from the west coast of Africa on August 8. As a result of strong wind shear, the depression remained weak and did not strengthen beyond tropical depression status. The cyclone degenerated on August 14, although its remnants partially contributed to the formation of Tropical Depression Twelve, which e [...]
+
+    "cyclone.revid": 572715399,
+
+    "cyclone.parentid": 539367750,
+
+    "cyclone.summary": 'Tropical Depression Ten was the tenth tropical cyclone of the record-breaking 2005 Atlantic hurricane season. It formed on August 13 from a tropical wave that emerged from the west coast of Africa on August 8. As a result of strong wind shear, the depression remained weak and did not strengthen beyond tropical depression status. The cyclone degenerated on August 14, although its remnants partially contributed to the formation of Tropical Depression Twelve, which e [...]
+
+    "cyclone.images": ['http://upload.wikimedia.org/wikipedia/commons/3/33/Tropical_Depression_Ten_%282005%29.ogg', 'http://upload.wikimedia.org/wikipedia/commons/3/37/People_icon.svg', 'http://upload.wikimedia.org/wikipedia/commons/4/47/Sound-icon.svg', 'http://upload.wikimedia.org/wikipedia/commons/4/4a/TD_10_August_13%2C_2005.jpg', 'http://upload.wikimedia.org/wikipedia/commons/7/7d/Tropical_Depression_10_%282005%29.png', 'http://upload.wikimedia.org/wikipedia/commons/8/89/Cyclone_Cat [...]
+
+    "cyclone.references": ['http://books.google.com/?id=-a8DRl1HuwoC&q=%22tropical+depression+ten%22+2005&dq=%22tropical+depression+ten%22+2005', 'http://facstaff.unca.edu/chennon/research/documents/erb_ncur2006_preprint.pdf', 'http://www.nhc.noaa.gov/archive/2005/TEN.shtml?', 'http://www.nhc.noaa.gov/archive/2005/dis/al102005.discus.001.shtml?', 'http://www.nhc.noaa.gov/archive/2005/dis/al102005.discus.002.shtml?', 'http://www.nhc.noaa.gov/archive/2005/dis/al102005.discus.003.shtml?', ' [...]
+
+    "cyclone.links": ['2005 Atlantic hurricane season', '2005 Azores subtropical storm', 'Atlantic Ocean', 'Atmospheric circulation', 'Atmospheric convection', 'Azores', 'Bahamas', 'Bar (unit)', 'Barbados', 'Bermuda', 'High pressure area', 'Hurricane Beta', 'Hurricane Cindy (2005)', 'Hurricane Dennis', 'Hurricane Emily (2005)', 'Hurricane Epsilon', 'Hurricane Irene (2005)', 'Hurricane Katrina', 'Hurricane Maria (2005)', 'Hurricane Nate (2005)', 'Hurricane Ophelia (2005)', 'Hurricane Phil [...]
+
+    "cyclone.categories": ['2005 Atlantic hurricane season', 'Articles with hAudio microformats', 'Atlantic tropical depressions', 'CS1 errors: dates', 'Commons category with local link same as on Wikidata', 'Featured articles', 'Hurricane Katrina', 'Spoken articles'],
+
+    "cyclone.sections": ['External links', 'Impact', 'Meteorological history', 'References', 'See also'],
+
+    "cyclone.section.impact": 'Because Tropical Depression Ten never approached land as a tropical cyclone, no tropical cyclone watches and warnings were issued for any land masses. No effects, damages, or fatalities were reported, and no ships reported tropical storm-force winds in association with the depression. The system did not attain tropical storm status; as such, it was not given a name by the National Hurricane Center. The storm partially contributed to the formation of Hurrica [...]
+
+    "barack.search": ['Barack Obama', 'Barack Obama, Sr.', 'Presidency of Barack Obama', 'Barack Obama presidential campaign, 2008', 'List of federal judges appointed by Barack Obama', 'Barack Obama in comics', 'Political positions of Barack Obama', 'Barack Obama on social media', 'List of Batman: The Brave and the Bold characters', 'Family of Barack Obama'],
+
+    "porsche.search": ['Porsche', 'Porsche in motorsport', 'Porsche 911 GT3'],
+
+    "great_wall_of_china.coordinates.lat": '40.677',
+    "great_wall_of_china.coordinates.lon": '117.232',
+
+    "great_wall_of_china.geo_seach": ['Great Wall of China'],
+
+    "great_wall_of_china.geo_seach_with_radius": ['Great Wall of China', 'Jinshanling'],
+
+    "great_wall_of_china.geo_seach_with_existing_article_name": ['Great Wall of China'],
+
+    "great_wall_of_china.geo_seach_with_non_existing_article_name": [],
+  }
+}
diff --git a/tests/search_test.py b/tests/search_test.py
new file mode 100644
index 0000000..a021138
--- /dev/null
+++ b/tests/search_test.py
@@ -0,0 +1,44 @@
+# -*- coding: utf-8 -*-
+import unittest
+
+from collections import defaultdict
+
+from wikipedia import wikipedia
+from request_mock_data import mock_data
+
+
+# mock out _wiki_request
+class _wiki_request(object):
+
+  calls = defaultdict(int)
+
+  @classmethod
+  def __call__(cls, params):
+    cls.calls[params.__str__()] += 1
+    return mock_data["_wiki_request calls"][tuple(sorted(params.items()))]
+
+wikipedia._wiki_request = _wiki_request()
+
+
+class TestSearch(unittest.TestCase):
+  """Test the functionality of wikipedia.search."""
+
+  def test_search(self):
+    """Test parsing a Wikipedia request result."""
+    self.assertEqual(wikipedia.search("Barack Obama"), mock_data['data']["barack.search"])
+
+  def test_limit(self):
+    """Test limiting a request results."""
+    self.assertEqual(wikipedia.search("Porsche", results=3), mock_data['data']["porsche.search"])
+
+  def test_suggestion(self):
+    """Test getting a suggestion as well as search results."""
+    search, suggestion = wikipedia.search("hallelulejah", suggestion=True)
+    self.assertEqual(search, [])
+    self.assertEqual(suggestion, u'hallelujah')
+
+  def test_suggestion_none(self):
+    """Test getting a suggestion when there is no suggestion."""
+    search, suggestion = wikipedia.search("qmxjsudek", suggestion=True)
+    self.assertEqual(search, [])
+    self.assertEqual(suggestion, None)
diff --git a/wikipedia.egg-info/PKG-INFO b/wikipedia.egg-info/PKG-INFO
new file mode 100644
index 0000000..743d1de
--- /dev/null
+++ b/wikipedia.egg-info/PKG-INFO
@@ -0,0 +1,122 @@
+Metadata-Version: 1.1
+Name: wikipedia
+Version: 1.4.0
+Summary: Wikipedia API for Python
+Home-page: https://github.com/goldsmith/Wikipedia
+Author: Jonathan Goldsmith
+Author-email: jhghank at gmail.com
+License: MIT
+Description: Wikipedia
+        =========
+        
+        .. image:: https://travis-ci.org/goldsmith/Wikipedia.png?branch=master
+          :target: https://travis-ci.org/goldsmith/Wikipedia
+        .. image:: https://pypip.in/d/wikipedia/badge.png
+          :target: https://crate.io/packages/wikipedia
+        .. image:: https://pypip.in/v/wikipedia/badge.png
+          :target: https://crate.io/packages/wikipedia
+        .. image:: https://pypip.in/license/wikipedia/badge.png
+            :target: https://pypi.python.org/pypi/wikipedia/
+            :alt: License
+        
+        **Wikipedia** is a Python library that makes it easy to access and parse
+        data from Wikipedia.
+        
+        Search Wikipedia, get article summaries, get data like links and images
+        from a page, and more. Wikipedia wraps the `MediaWiki
+        API <https://www.mediawiki.org/wiki/API>`__ so you can focus on using
+        Wikipedia data, not getting it.
+        
+        .. code:: python
+        
+          >>> import wikipedia
+          >>> print wikipedia.summary("Wikipedia")
+          # Wikipedia (/ˌwɪkɨˈpiːdiə/ or /ˌwɪkiˈpiːdiə/ WIK-i-PEE-dee-ə) is a collaboratively edited, multilingual, free Internet encyclopedia supported by the non-profit Wikimedia Foundation...
+        
+          >>> wikipedia.search("Barack")
+          # [u'Barak (given name)', u'Barack Obama', u'Barack (brandy)', u'Presidency of Barack Obama', u'Family of Barack Obama', u'First inauguration of Barack Obama', u'Barack Obama presidential campaign, 2008', u'Barack Obama, Sr.', u'Barack Obama citizenship conspiracy theories', u'Presidential transition of Barack Obama']
+        
+          >>> ny = wikipedia.page("New York")
+          >>> ny.title
+          # u'New York'
+          >>> ny.url
+          # u'http://en.wikipedia.org/wiki/New_York'
+          >>> ny.content
+          # u'New York is a state in the Northeastern region of the United States. New York is the 27th-most exten'...
+          >>> ny.links[0]
+          # u'1790 United States Census'
+        
+          >>> wikipedia.set_lang("fr")
+          >>> wikipedia.summary("Facebook", sentences=1)
+          # Facebook est un service de réseautage social en ligne sur Internet permettant d'y publier des informations (photographies, liens, textes, etc.) en contrôlant leur visibilité par différentes catégories de personnes.
+        
+        Note: this library was designed for ease of use and simplicity, not for advanced use. If you plan on doing serious scraping or automated requests, please use `Pywikipediabot <http://www.mediawiki.org/wiki/Manual:Pywikipediabot>`__ (or one of the other more advanced `Python MediaWiki API wrappers <http://en.wikipedia.org/wiki/Wikipedia:Creating_a_bot#Python>`__), which has a larger API, rate limiting, and other features so we can be considerate of the MediaWiki infrastructure.
+        
+        Installation
+        ------------
+        
+        To install Wikipedia, simply run:
+        
+        ::
+        
+          $ pip install wikipedia
+        
+        Wikipedia is compatible with Python 2.6+ (2.7+ to run unittest discover) and Python 3.3+.
+        
+        Documentation
+        -------------
+        
+        Read the docs at https://wikipedia.readthedocs.org/en/latest/.
+        
+        -  `Quickstart <https://wikipedia.readthedocs.org/en/latest/quickstart.html>`__
+        -  `Full API <https://wikipedia.readthedocs.org/en/latest/code.html>`__
+        
+        To run tests, clone the `respository on GitHub <https://github.com/goldsmith/Wikipedia>`__, then run:
+        
+        ::
+        
+          $ pip install -r requirements.txt
+          $ bash runtests  # will run tests for python and python3
+          $ python -m unittest discover tests/ '*test.py'  # manual style
+        
+        in the root project directory.
+        
+        To build the documentation yourself, after installing requirements.txt, run:
+        
+        ::
+        
+          $ pip install sphinx
+          $ cd docs/
+          $ make html
+        
+        License
+        -------
+        
+        MIT licensed. See the `LICENSE
+        file <https://github.com/goldsmith/Wikipedia/blob/master/LICENSE>`__ for
+        full details.
+        
+        Credits
+        -------
+        
+        -  `wiki-api <https://github.com/richardasaurus/wiki-api>`__ by
+           @richardasaurus for inspiration
+        -  @nmoroze and @themichaelyang for feedback and suggestions
+        -  The `Wikimedia
+           Foundation <http://wikimediafoundation.org/wiki/Home>`__ for giving
+           the world free access to data
+        
+        
+        
+        .. image:: https://d2weczhvl823v0.cloudfront.net/goldsmith/wikipedia/trend.png
+           :alt: Bitdeli badge
+           :target: https://bitdeli.com/free
+        
+        
+Keywords: python wikipedia API
+Platform: UNKNOWN
+Classifier: Development Status :: 4 - Beta
+Classifier: Topic :: Software Development :: Libraries
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
diff --git a/wikipedia.egg-info/SOURCES.txt b/wikipedia.egg-info/SOURCES.txt
new file mode 100644
index 0000000..8b98b1c
--- /dev/null
+++ b/wikipedia.egg-info/SOURCES.txt
@@ -0,0 +1,18 @@
+LICENSE
+MANIFEST.in
+README.rst
+requirements.txt
+setup.py
+tests/__init__.py
+tests/page_test.py
+tests/request_mock_data.py
+tests/search_test.py
+wikipedia/__init__.py
+wikipedia/exceptions.py
+wikipedia/util.py
+wikipedia/wikipedia.py
+wikipedia.egg-info/PKG-INFO
+wikipedia.egg-info/SOURCES.txt
+wikipedia.egg-info/dependency_links.txt
+wikipedia.egg-info/requires.txt
+wikipedia.egg-info/top_level.txt
\ No newline at end of file
diff --git a/wikipedia.egg-info/dependency_links.txt b/wikipedia.egg-info/dependency_links.txt
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/wikipedia.egg-info/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/wikipedia.egg-info/requires.txt b/wikipedia.egg-info/requires.txt
new file mode 100644
index 0000000..611c2bc
--- /dev/null
+++ b/wikipedia.egg-info/requires.txt
@@ -0,0 +1,2 @@
+beautifulsoup4
+requests>=2.0.0,<3.0.0
\ No newline at end of file
diff --git a/wikipedia.egg-info/top_level.txt b/wikipedia.egg-info/top_level.txt
new file mode 100644
index 0000000..e2589d2
--- /dev/null
+++ b/wikipedia.egg-info/top_level.txt
@@ -0,0 +1 @@
+wikipedia
diff --git a/wikipedia/__init__.py b/wikipedia/__init__.py
new file mode 100644
index 0000000..0a6ee25
--- /dev/null
+++ b/wikipedia/__init__.py
@@ -0,0 +1,4 @@
+from .wikipedia import *
+from .exceptions import *
+
+__version__ = (1, 4, 0)
diff --git a/wikipedia/exceptions.py b/wikipedia/exceptions.py
new file mode 100644
index 0000000..0295b1c
--- /dev/null
+++ b/wikipedia/exceptions.py
@@ -0,0 +1,80 @@
+"""
+Global wikipedia exception and warning classes.
+"""
+
+import sys
+
+
+ODD_ERROR_MESSAGE = "This shouldn't happen. Please report on GitHub: github.com/goldsmith/Wikipedia"
+
+
+class WikipediaException(Exception):
+  """Base Wikipedia exception class."""
+
+  def __init__(self, error):
+    self.error = error
+
+  def __unicode__(self):
+    return "An unknown error occured: \"{0}\". Please report it on GitHub!".format(self.error)
+
+  if sys.version_info > (3, 0):
+    def __str__(self):
+      return self.__unicode__()
... 853 lines suppressed ...

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/python-modules/packages/python-wikipedia.git



More information about the Python-modules-commits mailing list