[Python-modules-commits] r30963 - in packages/urlgrabber/trunk/debian (7 files)

piotr at users.alioth.debian.org piotr at users.alioth.debian.org
Thu Oct 9 09:13:37 UTC 2014


    Date: Thursday, October 9, 2014 @ 09:13:36
  Author: piotr
Revision: 30963

  [ Tristan Hill ]
* New upstream release
* Drop all patches now applied upstream
* Add libexec patch that moves urlgrabber-ext-down script to /usr/share
* debian/control
  + Switch to dh_python2
  + Bumped standards version to 3.9.5. No changes needed.
  + Update package description per lintian warning

Added:
  packages/urlgrabber/trunk/debian/patches/libexec.diff
Modified:
  packages/urlgrabber/trunk/debian/changelog
  packages/urlgrabber/trunk/debian/control
  packages/urlgrabber/trunk/debian/patches/series
  packages/urlgrabber/trunk/debian/rules
Deleted:
  packages/urlgrabber/trunk/debian/patches/grabber_fix.diff
  packages/urlgrabber/trunk/debian/patches/progress_fix.diff

Modified: packages/urlgrabber/trunk/debian/changelog
===================================================================
--- packages/urlgrabber/trunk/debian/changelog	2014-10-09 04:59:27 UTC (rev 30962)
+++ packages/urlgrabber/trunk/debian/changelog	2014-10-09 09:13:36 UTC (rev 30963)
@@ -1,9 +1,19 @@
-urlgrabber (3.9.1-5) UNRELEASED; urgency=low
+urlgrabber (3.10.1-1) UNRELEASED; urgency=low
 
+  [ Jakub Wilk ]
   * Use canonical URIs for Vcs-* fields.
 
- -- Jakub Wilk <jwilk at debian.org>  Sun, 05 May 2013 16:04:15 +0200
+  [ Tristan Hill ]
+  * New upstream release
+  * Drop all patches now applied upstream
+  * Add libexec patch that moves urlgrabber-ext-down script to /usr/share
+  * debian/control
+    + Switch to dh_python2
+    + Bumped standards version to 3.9.5. No changes needed.
+    + Update package description per lintian warning
 
+ -- Tristan Hill <tristan at saticed.me.uk>  Wed, 08 Oct 2014 16:39:48 +0100
+
 urlgrabber (3.9.1-4) unstable; urgency=low
 
   * Add two patches created from upstream development version. Closes: #587575. 

Modified: packages/urlgrabber/trunk/debian/control
===================================================================
--- packages/urlgrabber/trunk/debian/control	2014-10-09 04:59:27 UTC (rev 30962)
+++ packages/urlgrabber/trunk/debian/control	2014-10-09 09:13:36 UTC (rev 30963)
@@ -3,8 +3,8 @@
 Priority: optional
 Maintainer: Kevin Coyner <kcoyner at debian.org>
 Uploaders: Debian Python Modules Team <python-modules-team at lists.alioth.debian.org>
-Build-Depends: debhelper (>= 7.4~), python-all, python-support (>= 0.6), python-pycurl, quilt (>= 0.46-7~)
-Standards-Version: 3.8.4
+Build-Depends: debhelper (>= 7.4~), dh-python, python-all (>= 2.6.6-3~), python-pycurl, quilt (>= 0.46-7~)
+Standards-Version: 3.9.5
 Homepage: http://urlgrabber.baseurl.org/
 Vcs-Svn: svn://anonscm.debian.org/python-modules/packages/urlgrabber/trunk/
 Vcs-Browser: http://anonscm.debian.org/viewvc/python-modules/packages/urlgrabber/trunk/
@@ -12,9 +12,9 @@
 
 Package: python-urlgrabber
 Architecture: all
-Depends: ${shlibs:Depends}, ${misc:Depends}, ${python:Depends}, python-pycurl
+Depends: ${misc:Depends}, ${python:Depends}, python-pycurl
 Provides: ${python:Provides}
-Description: A high-level cross-protocol url-grabber
+Description: high-level URL transfer library
  urlgrabber dramatically simplifies the fetching of files. It is designed to
  be used in programs that need common (but not necessarily simple)
  url-fetching features. This package provides both a binary and a module, both

Deleted: packages/urlgrabber/trunk/debian/patches/grabber_fix.diff
===================================================================
--- packages/urlgrabber/trunk/debian/patches/grabber_fix.diff	2014-10-09 04:59:27 UTC (rev 30962)
+++ packages/urlgrabber/trunk/debian/patches/grabber_fix.diff	2014-10-09 09:13:36 UTC (rev 30963)
@@ -1,236 +0,0 @@
---- urlgrabber-3.9.1/urlgrabber/grabber.py.orig	2010-07-02 21:24:12.000000000 -0400
-+++ urlgrabber-3.9.1/urlgrabber/grabber.py	2010-07-02 20:30:25.000000000 -0400
-@@ -68,14 +68,14 @@
-     (which can be set on default_grabber.throttle) is used. See
-     BANDWIDTH THROTTLING for more information.
- 
--  timeout = None
-+  timeout = 300
- 
--    a positive float expressing the number of seconds to wait for socket
--    operations. If the value is None or 0.0, socket operations will block
--    forever. Setting this option causes urlgrabber to call the settimeout
--    method on the Socket object used for the request. See the Python
--    documentation on settimeout for more information.
--    http://www.python.org/doc/current/lib/socket-objects.html
-+    a positive integer expressing the number of seconds to wait before
-+    timing out attempts to connect to a server. If the value is None
-+    or 0, connection attempts will not time out. The timeout is passed
-+    to the underlying pycurl object as its CONNECTTIMEOUT option, see
-+    the curl documentation on CURLOPT_CONNECTTIMEOUT for more information.
-+    http://curl.haxx.se/libcurl/c/curl_easy_setopt.html#CURLOPTCONNECTTIMEOUT
- 
-   bandwidth = 0
- 
-@@ -439,6 +439,12 @@
- except:
-     __version__ = '???'
- 
-+try:
-+    # this part isn't going to do much - need to talk to gettext
-+    from i18n import _
-+except ImportError, msg:
-+    def _(st): return st
-+    
- ########################################################################
- # functions for debugging output.  These functions are here because they
- # are also part of the module initialization.
-@@ -808,7 +814,7 @@
-         self.prefix = None
-         self.opener = None
-         self.cache_openers = True
--        self.timeout = None
-+        self.timeout = 300
-         self.text = None
-         self.http_headers = None
-         self.ftp_headers = None
-@@ -1052,9 +1058,15 @@
-         self._reget_length = 0
-         self._prog_running = False
-         self._error = (None, None)
--        self.size = None
-+        self.size = 0
-+        self._hdr_ended = False
-         self._do_open()
-         
-+
-+    def geturl(self):
-+        """ Provide the geturl() method, used to be got from
-+            urllib.addinfourl, via. urllib.URLopener.* """
-+        return self.url
-         
-     def __getattr__(self, name):
-         """This effectively allows us to wrap at the instance level.
-@@ -1085,9 +1097,14 @@
-             return -1
-             
-     def _hdr_retrieve(self, buf):
-+        if self._hdr_ended:
-+            self._hdr_dump = ''
-+            self.size = 0
-+            self._hdr_ended = False
-+
-         if self._over_max_size(cur=len(self._hdr_dump), 
-                                max_size=self.opts.max_header_size):
--            return -1            
-+            return -1
-         try:
-             self._hdr_dump += buf
-             # we have to get the size before we do the progress obj start
-@@ -1104,7 +1121,17 @@
-                     s = parse150(buf)
-                 if s:
-                     self.size = int(s)
--            
-+                    
-+            if buf.lower().find('location') != -1:
-+                location = ':'.join(buf.split(':')[1:])
-+                location = location.strip()
-+                self.scheme = urlparse.urlsplit(location)[0]
-+                self.url = location
-+                
-+            if len(self._hdr_dump) != 0 and buf == '\r\n':
-+                self._hdr_ended = True
-+                if DEBUG: DEBUG.info('header ended:')
-+                
-             return len(buf)
-         except KeyboardInterrupt:
-             return pycurl.READFUNC_ABORT
-@@ -1113,8 +1140,10 @@
-         if self._parsed_hdr:
-             return self._parsed_hdr
-         statusend = self._hdr_dump.find('\n')
-+        statusend += 1 # ridiculous as it may seem.
-         hdrfp = StringIO()
-         hdrfp.write(self._hdr_dump[statusend:])
-+        hdrfp.seek(0)
-         self._parsed_hdr =  mimetools.Message(hdrfp)
-         return self._parsed_hdr
-     
-@@ -1136,6 +1165,7 @@
-         self.curl_obj.setopt(pycurl.PROGRESSFUNCTION, self._progress_update)
-         self.curl_obj.setopt(pycurl.FAILONERROR, True)
-         self.curl_obj.setopt(pycurl.OPT_FILETIME, True)
-+        self.curl_obj.setopt(pycurl.FOLLOWLOCATION, True)
-         
-         if DEBUG:
-             self.curl_obj.setopt(pycurl.VERBOSE, True)
-@@ -1148,9 +1178,11 @@
-         
-         # timeouts
-         timeout = 300
--        if opts.timeout:
--            timeout = int(opts.timeout)
--            self.curl_obj.setopt(pycurl.CONNECTTIMEOUT, timeout)
-+        if hasattr(opts, 'timeout'):
-+            timeout = int(opts.timeout or 0)
-+        self.curl_obj.setopt(pycurl.CONNECTTIMEOUT, timeout)
-+        self.curl_obj.setopt(pycurl.LOW_SPEED_LIMIT, 1)
-+        self.curl_obj.setopt(pycurl.LOW_SPEED_TIME, timeout)
- 
-         # ssl options
-         if self.scheme == 'https':
-@@ -1276,7 +1308,7 @@
-                 raise err
- 
-             elif errcode == 60:
--                msg = _("client cert cannot be verified or client cert incorrect")
-+                msg = _("Peer cert cannot be verified or peer cert invalid")
-                 err = URLGrabError(14, msg)
-                 err.url = self.url
-                 raise err
-@@ -1291,7 +1323,12 @@
-                 raise err
-                     
-             elif str(e.args[1]) == '' and self.http_code != 0: # fake it until you make it
--                msg = 'HTTP Error %s : %s ' % (self.http_code, self.url)
-+                if self.scheme in ['http', 'https']:
-+                    msg = 'HTTP Error %s : %s ' % (self.http_code, self.url)
-+                elif self.scheme in ['ftp']:
-+                    msg = 'FTP Error %s : %s ' % (self.http_code, self.url)
-+                else:
-+                    msg = "Unknown Error: URL=%s , scheme=%s" % (self.url, self.scheme)
-             else:
-                 msg = 'PYCURL ERROR %s - "%s"' % (errcode, str(e.args[1]))
-                 code = errcode
-@@ -1299,6 +1336,12 @@
-             err.code = code
-             err.exception = e
-             raise err
-+        else:
-+            if self._error[1]:
-+                msg = self._error[1]
-+                err = URLGRabError(14, msg)
-+                err.url = self.url
-+                raise err
- 
-     def _do_open(self):
-         self.curl_obj = _curl_cache
-@@ -1446,9 +1489,23 @@
-             # set the time
-             mod_time = self.curl_obj.getinfo(pycurl.INFO_FILETIME)
-             if mod_time != -1:
--                os.utime(self.filename, (mod_time, mod_time))
-+                try:
-+                    os.utime(self.filename, (mod_time, mod_time))
-+                except OSError, e:
-+                    err = URLGrabError(16, _(\
-+                      'error setting timestamp on file %s from %s, OSError: %s') 
-+                              % (self.filenameself.url, e))
-+                    err.url = self.url
-+                    raise err
-             # re open it
--            self.fo = open(self.filename, 'r')
-+            try:
-+                self.fo = open(self.filename, 'r')
-+            except IOError, e:
-+                err = URLGrabError(16, _(\
-+                  'error opening file from %s, IOError: %s') % (self.url, e))
-+                err.url = self.url
-+                raise err
-+                
-         else:
-             #self.fo = open(self._temp_name, 'r')
-             self.fo.seek(0)
-@@ -1532,11 +1589,14 @@
-     def _over_max_size(self, cur, max_size=None):
- 
-         if not max_size:
--            max_size = self.size
--        if self.opts.size: # if we set an opts size use that, no matter what
--            max_size = self.opts.size
-+            if not self.opts.size:
-+                max_size = self.size
-+            else:
-+                max_size = self.opts.size
-+
-         if not max_size: return False # if we have None for all of the Max then this is dumb
--        if cur > max_size + max_size*.10:
-+
-+        if cur > int(float(max_size) * 1.10):
- 
-             msg = _("Downloaded more than max size for %s: %s > %s") \
-                         % (self.url, cur, max_size)
-@@ -1582,9 +1642,21 @@
-             self.opts.progress_obj.end(self._amount_read)
-         self.fo.close()
-         
--
-+    def geturl(self):
-+        """ Provide the geturl() method, used to be got from
-+            urllib.addinfourl, via. urllib.URLopener.* """
-+        return self.url
-+        
- _curl_cache = pycurl.Curl() # make one and reuse it over and over and over
- 
-+def reset_curl_obj():
-+    """To make sure curl has reread the network/dns info we force a reload"""
-+    global _curl_cache
-+    _curl_cache.close()
-+    _curl_cache = pycurl.Curl()
-+
-+
-+    
- 
- #####################################################################
- # DEPRECATED FUNCTIONS

Added: packages/urlgrabber/trunk/debian/patches/libexec.diff
===================================================================
--- packages/urlgrabber/trunk/debian/patches/libexec.diff	                        (rev 0)
+++ packages/urlgrabber/trunk/debian/patches/libexec.diff	2014-10-09 09:13:36 UTC (rev 30963)
@@ -0,0 +1,26 @@
+Index: urlgrabber-3.10.1.obsolete.0.712795512591587/setup.py
+===================================================================
+--- urlgrabber-3.10.1.obsolete.0.712795512591587.orig/setup.py
++++ urlgrabber-3.10.1.obsolete.0.712795512591587/setup.py
+@@ -17,7 +17,7 @@ package_dir = {'urlgrabber':'urlgrabber'
+ scripts = ['scripts/urlgrabber']
+ data_files = [
+     ('share/doc/' + name + '-' + version, ['README','LICENSE', 'TODO', 'ChangeLog']),
+-    ('libexec', ['scripts/urlgrabber-ext-down']),
++    ('share/python-urlgrabber', ['scripts/urlgrabber-ext-down']),
+ ]
+ options = { 'clean' : { 'all' : 1 } }
+ classifiers = [
+Index: urlgrabber-3.10.1.obsolete.0.712795512591587/urlgrabber/grabber.py
+===================================================================
+--- urlgrabber-3.10.1.obsolete.0.712795512591587.orig/urlgrabber/grabber.py
++++ urlgrabber-3.10.1.obsolete.0.712795512591587/urlgrabber/grabber.py
+@@ -2042,7 +2042,7 @@ import subprocess
+ class _ExternalDownloader:
+     def __init__(self):
+         self.popen = subprocess.Popen(
+-            '/usr/libexec/urlgrabber-ext-down',
++            '/usr/share/python-urlgrabber/urlgrabber-ext-down',
+             stdin = subprocess.PIPE,
+             stdout = subprocess.PIPE,
+         )

Deleted: packages/urlgrabber/trunk/debian/patches/progress_fix.diff
===================================================================
--- packages/urlgrabber/trunk/debian/patches/progress_fix.diff	2014-10-09 04:59:27 UTC (rev 30962)
+++ packages/urlgrabber/trunk/debian/patches/progress_fix.diff	2014-10-09 09:13:36 UTC (rev 30963)
@@ -1,11 +0,0 @@
---- urlgrabber-3.9.1/urlgrabber/progress.py.orig	2010-07-02 21:25:51.000000000 -0400
-+++ urlgrabber-3.9.1/urlgrabber/progress.py	2010-07-02 20:30:25.000000000 -0400
-@@ -658,6 +658,8 @@
-     if seconds is None or seconds < 0:
-         if use_hours: return '--:--:--'
-         else:         return '--:--'
-+    elif seconds == float('inf'):
-+        return 'Infinite'
-     else:
-         seconds = int(seconds)
-         minutes = seconds / 60

Modified: packages/urlgrabber/trunk/debian/patches/series
===================================================================
--- packages/urlgrabber/trunk/debian/patches/series	2014-10-09 04:59:27 UTC (rev 30962)
+++ packages/urlgrabber/trunk/debian/patches/series	2014-10-09 09:13:36 UTC (rev 30963)
@@ -1,2 +1 @@
-grabber_fix.diff
-progress_fix.diff
+libexec.diff

Modified: packages/urlgrabber/trunk/debian/rules
===================================================================
--- packages/urlgrabber/trunk/debian/rules	2014-10-09 04:59:27 UTC (rev 30962)
+++ packages/urlgrabber/trunk/debian/rules	2014-10-09 09:13:36 UTC (rev 30963)
@@ -64,7 +64,7 @@
 	dh_strip
 	dh_compress
 	dh_fixperms
-	dh_pysupport
+	dh_python2
 	dh_installdeb
 	dh_shlibdeps
 	dh_gencontrol




More information about the Python-modules-commits mailing list