[Python-modules-commits] [python-requests-cache] 01/07: Import python-requests-cache_0.4.10.orig.tar.gz

Sandro Tosi morph at moszumanska.debian.org
Mon Jan 18 00:42:00 UTC 2016


This is an automated email from the git hooks/post-receive script.

morph pushed a commit to branch master
in repository python-requests-cache.

commit 28c86001c1c741c25e3ba8108a2a3914e40a0e85
Author: Sandro Tosi <morph at debian.org>
Date:   Mon Jan 18 00:08:45 2016 +0000

    Import python-requests-cache_0.4.10.orig.tar.gz
---
 HISTORY.rst                               |  5 +++
 PKG-INFO                                  |  9 +++--
 README.rst                                |  2 +-
 requests_cache.egg-info/PKG-INFO          |  9 +++--
 requests_cache/__init__.py                |  2 +-
 requests_cache/backends/base.py           |  2 ++
 requests_cache/backends/storage/dbdict.py | 22 ++++--------
 requests_cache/core.py                    | 23 +++++++++---
 setup.py                                  |  2 +-
 tests/test_cache.py                       | 59 ++++++++++++++++++++-----------
 10 files changed, 88 insertions(+), 47 deletions(-)

diff --git a/HISTORY.rst b/HISTORY.rst
index 221a024..706d462 100644
--- a/HISTORY.rst
+++ b/HISTORY.rst
@@ -3,6 +3,11 @@
 History
 -------
 
+0.4.10 (2015-04-28)
++++++++++++++++++++
+* Better transactional handling in sqlite #50, thanks to @rgant
+* Compatibility with streaming in requests >= 2.6.x
+
 
 0.4.9 (2015-01-17)
 ++++++++++++++++++
diff --git a/PKG-INFO b/PKG-INFO
index 6b0cce3..0255666 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: requests-cache
-Version: 0.4.9
+Version: 0.4.10
 Summary: Persistent cache for requests library
 Home-page: https://github.com/reclosedev/requests-cache
 Author: Roman Haritonov
@@ -34,7 +34,7 @@ Description: requests-cache
         
         And all responses with headers and cookies will be transparently cached to
         `demo_cache.sqlite` database. For example, following code will take only
-        1-2 seconds instead 10, and will run instantly on next launch:
+        1-2 seconds instead of 10, and will run instantly on next launch:
         
         .. code-block:: python
         
@@ -70,6 +70,11 @@ Description: requests-cache
         History
         -------
         
+        0.4.10 (2015-04-28)
+        +++++++++++++++++++
+        * Better transactional handling in sqlite #50, thanks to @rgant
+        * Compatibility with streaming in requests >= 2.6.x
+        
         
         0.4.9 (2015-01-17)
         ++++++++++++++++++
diff --git a/README.rst b/README.rst
index eec4725..818a24f 100644
--- a/README.rst
+++ b/README.rst
@@ -26,7 +26,7 @@ Just write:
 
 And all responses with headers and cookies will be transparently cached to
 `demo_cache.sqlite` database. For example, following code will take only
-1-2 seconds instead 10, and will run instantly on next launch:
+1-2 seconds instead of 10, and will run instantly on next launch:
 
 .. code-block:: python
 
diff --git a/requests_cache.egg-info/PKG-INFO b/requests_cache.egg-info/PKG-INFO
index 6b0cce3..0255666 100644
--- a/requests_cache.egg-info/PKG-INFO
+++ b/requests_cache.egg-info/PKG-INFO
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: requests-cache
-Version: 0.4.9
+Version: 0.4.10
 Summary: Persistent cache for requests library
 Home-page: https://github.com/reclosedev/requests-cache
 Author: Roman Haritonov
@@ -34,7 +34,7 @@ Description: requests-cache
         
         And all responses with headers and cookies will be transparently cached to
         `demo_cache.sqlite` database. For example, following code will take only
-        1-2 seconds instead 10, and will run instantly on next launch:
+        1-2 seconds instead of 10, and will run instantly on next launch:
         
         .. code-block:: python
         
@@ -70,6 +70,11 @@ Description: requests-cache
         History
         -------
         
+        0.4.10 (2015-04-28)
+        +++++++++++++++++++
+        * Better transactional handling in sqlite #50, thanks to @rgant
+        * Compatibility with streaming in requests >= 2.6.x
+        
         
         0.4.9 (2015-01-17)
         ++++++++++++++++++
diff --git a/requests_cache/__init__.py b/requests_cache/__init__.py
index 1742617..368842e 100644
--- a/requests_cache/__init__.py
+++ b/requests_cache/__init__.py
@@ -23,7 +23,7 @@
     :license: BSD, see LICENSE for more details.
 """
 __docformat__ = 'restructuredtext'
-__version__ = '0.4.9'
+__version__ = '0.4.10'
 
 from .core import(
     CachedSession, install_cache, uninstall_cache,
diff --git a/requests_cache/backends/base.py b/requests_cache/backends/base.py
index 60287c0..1a05037 100644
--- a/requests_cache/backends/base.py
+++ b/requests_cache/backends/base.py
@@ -148,6 +148,8 @@ class BaseCache(object):
             result = _RawStore()
             for field in self._raw_response_attrs:
                 setattr(result, field, getattr(value, field, None))
+            if result._original_response is not None:
+                setattr(result._original_response, "fp", None)  # _io.BufferedReader is not picklable
             value = result
         return value
 
diff --git a/requests_cache/backends/storage/dbdict.py b/requests_cache/backends/storage/dbdict.py
index ef0f516..0d6ce59 100644
--- a/requests_cache/backends/storage/dbdict.py
+++ b/requests_cache/backends/storage/dbdict.py
@@ -10,7 +10,7 @@ from collections import MutableMapping
 import sqlite3 as sqlite
 from contextlib import contextmanager
 try:
-   import threading
+    import threading
 except ImportError:
     import dummy_threading as threading
 try:
@@ -21,7 +21,6 @@ except ImportError:
 from requests_cache.compat import bytes
 
 
-
 class DbDict(MutableMapping):
     """ DbDict - a dictionary-like object for saving large datasets to `sqlite` database
 
@@ -122,21 +121,14 @@ class DbDict(MutableMapping):
 
     def __setitem__(self, key, item):
         with self.connection(True) as con:
-            if con.execute("select key from `%s` where key=?" %
-                           self.table_name, (key,)).fetchone():
-                con.execute("update `%s` set value=? where key=?" %
-                            self.table_name, (item, key))
-            else:
-                con.execute("insert into `%s` (key,value) values (?,?)" %
-                            self.table_name, (key, item))
+            con.execute("insert or replace into `%s` (key,value) values (?,?)" %
+                        self.table_name, (key, item))
 
     def __delitem__(self, key):
         with self.connection(True) as con:
-            if con.execute("select key from `%s` where key=?" %
-                           self.table_name, (key,)).fetchone():
-                con.execute("delete from `%s` where key=?" %
-                            self.table_name, (key,))
-            else:
+            cur = con.execute("delete from `%s` where key=?" %
+                              self.table_name, (key,))
+            if not cur.rowcount:
                 raise KeyError
 
     def __iter__(self):
@@ -148,7 +140,7 @@ class DbDict(MutableMapping):
     def __len__(self):
         with self.connection() as con:
             return con.execute("select count(key) from `%s`" %
-                                self.table_name).fetchone()[0]
+                               self.table_name).fetchone()[0]
 
     def clear(self):
         with self.connection(True) as con:
diff --git a/requests_cache/core.py b/requests_cache/core.py
index 89149e0..36e4d59 100644
--- a/requests_cache/core.py
+++ b/requests_cache/core.py
@@ -15,7 +15,7 @@ from requests import Session as OriginalSession
 from requests.hooks import dispatch_hook
 
 from requests_cache import backends
-from requests_cache.compat import str, basestring
+from requests_cache.compat import basestring
 
 try:
     ver = tuple(map(int, requests.__version__.split(".")))
@@ -34,6 +34,7 @@ class CachedSession(OriginalSession):
 
     def __init__(self, cache_name='cache', backend=None, expire_after=None,
                  allowable_codes=(200,), allowable_methods=('GET',),
+                 old_data_on_error=False,
                  **backend_options):
         """
         :param cache_name: for ``sqlite`` backend: cache file will start with this prefix,
@@ -60,6 +61,7 @@ class CachedSession(OriginalSession):
         :param include_get_headers: If `True` headers will be part of cache key.
                                     E.g. after get('some_link', headers={'Accept':'application/json'})
                                     get('some_link', headers={'Accept':'application/xml'}) is not from cache.
+        :param old_data_on_error: If `True` it will return expired cached response if update fails
         """
         if backend is None or isinstance(backend, basestring):
             self.cache = backends.create_backend(backend, cache_name,
@@ -74,6 +76,7 @@ class CachedSession(OriginalSession):
 
         self._cache_allowable_codes = allowable_codes
         self._cache_allowable_methods = allowable_methods
+        self._return_old_data_on_error = old_data_on_error
         self._is_cache_disabled = False
         super(CachedSession, self).__init__()
 
@@ -98,10 +101,20 @@ class CachedSession(OriginalSession):
             return send_request_and_cache_response()
 
         if self._cache_expire_after is not None:
-            difference = datetime.utcnow() - timestamp
-            if difference > self._cache_expire_after:
-                self.cache.delete(cache_key)
-                return send_request_and_cache_response()
+            is_expired = datetime.utcnow() - timestamp > self._cache_expire_after
+            if is_expired:
+                if not self._return_old_data_on_error:
+                    self.cache.delete(cache_key)
+                    return send_request_and_cache_response()
+                try:
+                    new_response = send_request_and_cache_response()
+                except Exception:
+                    return response
+                else:
+                    if new_response.status_code not in self._cache_allowable_codes:
+                        return response
+                    return new_response
+
         # dispatch hook here, because we've removed it before pickling
         response.from_cache = True
         response = dispatch_hook('response', request.hooks, response, **kwargs)
diff --git a/setup.py b/setup.py
index 8a57e71..78801dd 100644
--- a/setup.py
+++ b/setup.py
@@ -20,7 +20,7 @@ setup(
     packages=['requests_cache',
               'requests_cache.backends',
               'requests_cache.backends.storage'],
-    version='0.4.9',
+    version='0.4.10',
     description='Persistent cache for requests library',
     author='Roman Haritonov',
     author_email='reclosedev at gmail.com',
diff --git a/tests/test_cache.py b/tests/test_cache.py
index 2ed2b7f..56a2858 100644
--- a/tests/test_cache.py
+++ b/tests/test_cache.py
@@ -12,7 +12,9 @@ except ImportError:
 import time
 import json
 from collections import defaultdict
+from datetime import datetime, timedelta
 
+import mock
 import requests
 from requests import Request
 
@@ -68,26 +70,6 @@ class CacheTestCase(unittest.TestCase):
         with self.assertRaises(ValueError):
             CachedSession(CACHE_NAME, backend='nonexistent')
 
-#    def test_async_compatibility(self):
-#        try:
-#            import grequests
-#        except Exception:
-#            self.skipTest('gevent is not installed')
-#        n = 3
-#        def long_running():
-#            t = time.time()
-#            rs = [grequests.get(httpbin('delay/%s' % i)) for i in range(n + 1)]
-#            grequests.map(rs)
-#            return time.time() - t
-#        # cache it
-#        delta = long_running()
-#        self.assertGreaterEqual(delta, n)
-#        # fast from cache
-#        delta = 0
-#        for i in range(n):
-#            delta += long_running()
-#        self.assertLessEqual(delta, 1)
-
     def test_hooks(self):
         state = defaultdict(int)
         for hook in ('response',):  # TODO it's only one hook here
@@ -324,6 +306,43 @@ class CacheTestCase(unittest.TestCase):
         self.assertIn(CACHE_NAME, s)
         self.assertIn("10", s)
 
+    @mock.patch("requests_cache.core.datetime")
+    def test_return_old_data_on_error(self, datetime_mock):
+        datetime_mock.utcnow.return_value = datetime.utcnow()
+        expire_after = 100
+        url = httpbin("get")
+        s = CachedSession(CACHE_NAME, CACHE_BACKEND, old_data_on_error=True, expire_after=expire_after)
+        header = "X-Tst"
+
+        def get(n):
+            return s.get(url, headers={header: n}).json()["headers"][header]
+
+        get("expired")
+        self.assertEquals(get("2"), "expired")
+        datetime_mock.utcnow.return_value = datetime.utcnow() + timedelta(seconds=expire_after * 2)
+
+        with mock.patch.object(s.cache, "save_response", side_effect=Exception):
+            self.assertEquals(get("3"), "expired")
+
+        with mock.patch("requests_cache.core.OriginalSession.send") as send_mock:
+            resp_mock = requests.Response()
+            request = requests.Request("GET", url)
+            resp_mock.request = request.prepare()
+            resp_mock.status_code = 400
+            resp_mock._content = '{"other": "content"}'
+            send_mock.return_value = resp_mock
+            self.assertEquals(get("3"), "expired")
+
+            resp_mock.status_code = 200
+            self.assertIs(s.get(url).content, resp_mock.content)
+
+        # default behaviour
+        datetime_mock.return_value = datetime.utcnow() + timedelta(seconds=expire_after * 2)
+        s = CachedSession(CACHE_NAME, CACHE_BACKEND, old_data_on_error=False, expire_after=100)
+        with mock.patch.object(s.cache, "save_response", side_effect=Exception):
+            with self.assertRaises(Exception):
+                s.get(url)
+
 
 if __name__ == '__main__':
     unittest.main()

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/python-modules/packages/python-requests-cache.git



More information about the Python-modules-commits mailing list