Bug#935455: python-xarray: autopkgtest regression: test_dask fails

Paul Gevers elbrus at debian.org
Thu Aug 22 19:41:48 BST 2019


Source: python-xarray
Version: 0.12.3-1
X-Debbugs-CC: debian-ci at lists.debian.org
User: debian-ci at lists.debian.org
Usertags: regression

Dear maintainers,

With a recent upload of python-xarray the autopkgtest of python-xarray
fails in testing when that autopkgtest is run with the binary packages
of python-xarray from unstable. It passes when run with only packages
from testing. In tabular form:
                       pass            fail
python-xarray          from testing    0.12.3-1
all others             from testing    from testing

I copied some of the output at the bottom of this report.

Currently this regression is blocking the migration to testing [1]. Can
you please investigate the situation and fix it? If needed, please
change the bug's severity.

More information about this bug and the reason for filing it can be found on
https://wiki.debian.org/ContinuousIntegration/RegressionEmailInformation

Paul

[1] https://qa.debian.org/excuses.php?package=python-xarray

https://ci.debian.net/data/autopkgtest/testing/amd64/p/python-xarray/2790734/log.gz
=================================== FAILURES
===================================
__________________________________ test_dask
___________________________________

    @requires_dask
    def test_dask():
        import dask.array as da
>       arr = da.from_array(['a', 'b', 'c'])
E       TypeError: from_array() missing 1 required positional argument:
'chunks'

/usr/lib/python3/dist-packages/xarray/tests/test_accessor_str.py:58:
TypeError
__________________ TestLoadDataset.test_download_from_github
___________________

self = <urllib.request.HTTPSHandler object at 0x7fbe35ee1f10>
http_class = <class 'http.client.HTTPSConnection'>
req = <urllib.request.Request object at 0x7fbe367d9b10>
http_conn_args = {'check_hostname': None, 'context': None}, host =
'github.com'
h = <http.client.HTTPSConnection object at 0x7fbe36329690>

    def do_open(self, http_class, req, **http_conn_args):
        """Return an HTTPResponse object for the request, using http_class.

        http_class must implement the HTTPConnection API from http.client.
        """
        host = req.host
        if not host:
            raise URLError('no host given')

        # will parse host:port
        h = http_class(host, timeout=req.timeout, **http_conn_args)
        h.set_debuglevel(self._debuglevel)

        headers = dict(req.unredirected_hdrs)
        headers.update({k: v for k, v in req.headers.items()
                        if k not in headers})

        # TODO(jhylton): Should this be redesigned to handle
        # persistent connections?

        # We want to make an HTTP/1.1 request, but the addinfourl
        # class isn't prepared to deal with a persistent connection.
        # It will try to read all remaining data from the socket,
        # which will block while the server waits for the next request.
        # So make sure the connection gets closed after the (only)
        # request.
        headers["Connection"] = "close"
        headers = {name.title(): val for name, val in headers.items()}

        if req._tunnel_host:
            tunnel_headers = {}
            proxy_auth_hdr = "Proxy-Authorization"
            if proxy_auth_hdr in headers:
                tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr]
                # Proxy-Authorization should not be sent to origin
                # server.
                del headers[proxy_auth_hdr]
            h.set_tunnel(req._tunnel_host, headers=tunnel_headers)

        try:
            try:
                h.request(req.get_method(), req.selector, req.data, headers,
>
encode_chunked=req.has_header('Transfer-encoding'))

/usr/lib/python3.7/urllib/request.py:1317:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _ _ _

self = <http.client.HTTPSConnection object at 0x7fbe36329690>, method =
'GET'
url = '/pydata/xarray-data/raw/master/tiny.nc', body = None
headers = {'Connection': 'close', 'Host': 'github.com', 'User-Agent':
'Python-urllib/3.7'}

    def request(self, method, url, body=None, headers={}, *,
                encode_chunked=False):
        """Send a complete request to the server."""
>       self._send_request(method, url, body, headers, encode_chunked)

/usr/lib/python3.7/http/client.py:1244:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _ _ _

self = <http.client.HTTPSConnection object at 0x7fbe36329690>, method =
'GET'
url = '/pydata/xarray-data/raw/master/tiny.nc', body = None
headers = {'Connection': 'close', 'Host': 'github.com', 'User-Agent':
'Python-urllib/3.7'}
encode_chunked = False

    def _send_request(self, method, url, body, headers, encode_chunked):
        # Honor explicitly requested Host: and Accept-Encoding: headers.
        header_names = frozenset(k.lower() for k in headers)
        skips = {}
        if 'host' in header_names:
            skips['skip_host'] = 1
        if 'accept-encoding' in header_names:
            skips['skip_accept_encoding'] = 1

        self.putrequest(method, url, **skips)

        # chunked encoding will happen if HTTP/1.1 is used and either
        # the caller passes encode_chunked=True or the following
        # conditions hold:
        # 1. content-length has not been explicitly set
        # 2. the body is a file or iterable, but not a str or bytes-like
        # 3. Transfer-Encoding has NOT been explicitly set by the caller

        if 'content-length' not in header_names:
            # only chunk body if not explicitly set for backwards
            # compatibility, assuming the client code is already
handling the
            # chunking
            if 'transfer-encoding' not in header_names:
                # if content-length cannot be automatically determined, fall
                # back to chunked encoding
                encode_chunked = False
                content_length = self._get_content_length(body, method)
                if content_length is None:
                    if body is not None:
                        if self.debuglevel > 0:
                            print('Unable to determine size of %r' % body)
                        encode_chunked = True
                        self.putheader('Transfer-Encoding', 'chunked')
                else:
                    self.putheader('Content-Length', str(content_length))
        else:
            encode_chunked = False

        for hdr, value in headers.items():
            self.putheader(hdr, value)
        if isinstance(body, str):
            # RFC 2616 Section 3.7.1 says that text default has a
            # default charset of iso-8859-1.
            body = _encode(body, 'body')
>       self.endheaders(body, encode_chunked=encode_chunked)

/usr/lib/python3.7/http/client.py:1290:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _ _ _

self = <http.client.HTTPSConnection object at 0x7fbe36329690>
message_body = None

    def endheaders(self, message_body=None, *, encode_chunked=False):
        """Indicate that the last header line has been sent to the server.

        This method sends the request to the server.  The optional
message_body
        argument can be used to pass a message body associated with the
        request.
        """
        if self.__state == _CS_REQ_STARTED:
            self.__state = _CS_REQ_SENT
        else:
            raise CannotSendHeader()
>       self._send_output(message_body, encode_chunked=encode_chunked)

/usr/lib/python3.7/http/client.py:1239:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _ _ _

self = <http.client.HTTPSConnection object at 0x7fbe36329690>
message_body = None, encode_chunked = False

    def _send_output(self, message_body=None, encode_chunked=False):
        """Send the currently buffered request and clear the buffer.

        Appends an extra \\r\\n to the buffer.
        A message_body may be specified, to be appended to the request.
        """
        self._buffer.extend((b"", b""))
        msg = b"\r\n".join(self._buffer)
        del self._buffer[:]
>       self.send(msg)

/usr/lib/python3.7/http/client.py:1026:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _ _ _

self = <http.client.HTTPSConnection object at 0x7fbe36329690>
data = b'GET /pydata/xarray-data/raw/master/tiny.nc
HTTP/1.1\r\nAccept-Encoding: identity\r\nHost: github.com\r\nUser-Agent:
Python-urllib/3.7\r\nConnection: close\r\n\r\n'

    def send(self, data):
        """Send `data' to the server.
        ``data`` can be a string object, a bytes object, an array object, a
        file-like object that supports a .read() method, or an iterable
object.
        """

        if self.sock is None:
            if self.auto_open:
>               self.connect()

/usr/lib/python3.7/http/client.py:966:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _ _ _

self = <http.client.HTTPSConnection object at 0x7fbe36329690>

    def connect(self):
        "Connect to a host on a given (SSL) port."

        super().connect()

        if self._tunnel_host:
            server_hostname = self._tunnel_host
        else:
            server_hostname = self.host

        self.sock = self._context.wrap_socket(self.sock,
>
server_hostname=server_hostname)

/usr/lib/python3.7/http/client.py:1414:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _ _ _

self = <ssl.SSLContext object at 0x7fbe372a8d00>
sock = <socket.socket [closed] fd=-1, family=AddressFamily.AF_INET,
type=SocketKind.SOCK_STREAM, proto=6>
server_side = False, do_handshake_on_connect = True,
suppress_ragged_eofs = True
server_hostname = 'github.com', session = None

    def wrap_socket(self, sock, server_side=False,
                    do_handshake_on_connect=True,
                    suppress_ragged_eofs=True,
                    server_hostname=None, session=None):
        # SSLSocket class handles server_hostname encoding before it calls
        # ctx._wrap_socket()
        return self.sslsocket_class._create(
            sock=sock,
            server_side=server_side,
            do_handshake_on_connect=do_handshake_on_connect,
            suppress_ragged_eofs=suppress_ragged_eofs,
            server_hostname=server_hostname,
            context=self,
>           session=session
        )

/usr/lib/python3.7/ssl.py:423:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _ _ _

cls = <class 'ssl.SSLSocket'>
sock = <socket.socket [closed] fd=-1, family=AddressFamily.AF_INET,
type=SocketKind.SOCK_STREAM, proto=6>
server_side = False, do_handshake_on_connect = True,
suppress_ragged_eofs = True
server_hostname = 'github.com'
context = <ssl.SSLContext object at 0x7fbe372a8d00>, session = None

    @classmethod
    def _create(cls, sock, server_side=False, do_handshake_on_connect=True,
                suppress_ragged_eofs=True, server_hostname=None,
                context=None, session=None):
        if sock.getsockopt(SOL_SOCKET, SO_TYPE) != SOCK_STREAM:
            raise NotImplementedError("only stream sockets are supported")
        if server_side:
            if server_hostname:
                raise ValueError("server_hostname can only be specified "
                                 "in client mode")
            if session is not None:
                raise ValueError("session can only be specified in "
                                 "client mode")
        if context.check_hostname and not server_hostname:
            raise ValueError("check_hostname requires server_hostname")

        kwargs = dict(
            family=sock.family, type=sock.type, proto=sock.proto,
            fileno=sock.fileno()
        )
        self = cls.__new__(cls, **kwargs)
        super(SSLSocket, self).__init__(**kwargs)
        self.settimeout(sock.gettimeout())
        sock.detach()

        self._context = context
        self._session = session
        self._closed = False
        self._sslobj = None
        self.server_side = server_side
        self.server_hostname = context._encode_hostname(server_hostname)
        self.do_handshake_on_connect = do_handshake_on_connect
        self.suppress_ragged_eofs = suppress_ragged_eofs

        # See if we are connected
        try:
            self.getpeername()
        except OSError as e:
            if e.errno != errno.ENOTCONN:
                raise
            connected = False
        else:
            connected = True

        self._connected = connected
        if connected:
            # create the SSL object
            try:
                self._sslobj = self._context._wrap_socket(
                    self, server_side, self.server_hostname,
                    owner=self, session=self._session,
                )
                if do_handshake_on_connect:
                    timeout = self.gettimeout()
                    if timeout == 0.0:
                        # non-blocking
                        raise ValueError("do_handshake_on_connect should
not be specified for non-blocking sockets")
>                   self.do_handshake()

/usr/lib/python3.7/ssl.py:870:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _ _ _

self = <ssl.SSLSocket [closed] fd=-1, family=AddressFamily.AF_INET,
type=SocketKind.SOCK_STREAM, proto=6>
block = False

    @_sslcopydoc
    def do_handshake(self, block=False):
        self._check_connected()
        timeout = self.gettimeout()
        try:
            if timeout == 0.0 and block:
                self.settimeout(None)
>           self._sslobj.do_handshake()
E           ssl.SSLCertVerificationError: [SSL:
CERTIFICATE_VERIFY_FAILED] certificate verify failed: unable to get
local issuer certificate (_ssl.c:1076)

/usr/lib/python3.7/ssl.py:1139: SSLCertVerificationError

During handling of the above exception, another exception occurred:

self = <xarray.tests.test_tutorial.TestLoadDataset object at 0x7fbe35eac290>

    def test_download_from_github(self):
>       ds = tutorial.open_dataset(self.testfile).load()

/usr/lib/python3/dist-packages/xarray/tests/test_tutorial.py:24:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _ _ _
/usr/lib/python3/dist-packages/xarray/tutorial.py:68: in open_dataset
    urlretrieve(url, localfile)
/usr/lib/python3.7/urllib/request.py:247: in urlretrieve
    with contextlib.closing(urlopen(url, data)) as fp:
/usr/lib/python3.7/urllib/request.py:222: in urlopen
    return opener.open(url, data, timeout)
/usr/lib/python3.7/urllib/request.py:525: in open
    response = self._open(req, data)
/usr/lib/python3.7/urllib/request.py:543: in _open
    '_open', req)
/usr/lib/python3.7/urllib/request.py:503: in _call_chain
    result = func(*args)
/usr/lib/python3.7/urllib/request.py:1360: in https_open
    context=self._context, check_hostname=self._check_hostname)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _ _ _

self = <urllib.request.HTTPSHandler object at 0x7fbe35ee1f10>
http_class = <class 'http.client.HTTPSConnection'>
req = <urllib.request.Request object at 0x7fbe367d9b10>
http_conn_args = {'check_hostname': None, 'context': None}, host =
'github.com'
h = <http.client.HTTPSConnection object at 0x7fbe36329690>

    def do_open(self, http_class, req, **http_conn_args):
        """Return an HTTPResponse object for the request, using http_class.

        http_class must implement the HTTPConnection API from http.client.
        """
        host = req.host
        if not host:
            raise URLError('no host given')

        # will parse host:port
        h = http_class(host, timeout=req.timeout, **http_conn_args)
        h.set_debuglevel(self._debuglevel)

        headers = dict(req.unredirected_hdrs)
        headers.update({k: v for k, v in req.headers.items()
                        if k not in headers})

        # TODO(jhylton): Should this be redesigned to handle
        # persistent connections?

        # We want to make an HTTP/1.1 request, but the addinfourl
        # class isn't prepared to deal with a persistent connection.
        # It will try to read all remaining data from the socket,
        # which will block while the server waits for the next request.
        # So make sure the connection gets closed after the (only)
        # request.
        headers["Connection"] = "close"
        headers = {name.title(): val for name, val in headers.items()}

        if req._tunnel_host:
            tunnel_headers = {}
            proxy_auth_hdr = "Proxy-Authorization"
            if proxy_auth_hdr in headers:
                tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr]
                # Proxy-Authorization should not be sent to origin
                # server.
                del headers[proxy_auth_hdr]
            h.set_tunnel(req._tunnel_host, headers=tunnel_headers)

        try:
            try:
                h.request(req.get_method(), req.selector, req.data, headers,

encode_chunked=req.has_header('Transfer-encoding'))
            except OSError as err: # timeout error
>               raise URLError(err)
E               urllib.error.URLError: <urlopen error [SSL:
CERTIFICATE_VERIFY_FAILED] certificate verify failed: unable to get
local issuer certificate (_ssl.c:1076)>

/usr/lib/python3.7/urllib/request.py:1319: URLError
_________ TestLoadDataset.test_download_from_github_load_without_cache
_________

self = <urllib.request.HTTPSHandler object at 0x7fbe35ee1f10>
http_class = <class 'http.client.HTTPSConnection'>
req = <urllib.request.Request object at 0x7fbe353e96d0>
http_conn_args = {'check_hostname': None, 'context': None}, host =
'github.com'
h = <http.client.HTTPSConnection object at 0x7fbe353e9990>

    def do_open(self, http_class, req, **http_conn_args):
        """Return an HTTPResponse object for the request, using http_class.

        http_class must implement the HTTPConnection API from http.client.
        """
        host = req.host
        if not host:
            raise URLError('no host given')

        # will parse host:port
        h = http_class(host, timeout=req.timeout, **http_conn_args)
        h.set_debuglevel(self._debuglevel)

        headers = dict(req.unredirected_hdrs)
        headers.update({k: v for k, v in req.headers.items()
                        if k not in headers})

        # TODO(jhylton): Should this be redesigned to handle
        # persistent connections?

        # We want to make an HTTP/1.1 request, but the addinfourl
        # class isn't prepared to deal with a persistent connection.
        # It will try to read all remaining data from the socket,
        # which will block while the server waits for the next request.
        # So make sure the connection gets closed after the (only)
        # request.
        headers["Connection"] = "close"
        headers = {name.title(): val for name, val in headers.items()}

        if req._tunnel_host:
            tunnel_headers = {}
            proxy_auth_hdr = "Proxy-Authorization"
            if proxy_auth_hdr in headers:
                tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr]
                # Proxy-Authorization should not be sent to origin
                # server.
                del headers[proxy_auth_hdr]
            h.set_tunnel(req._tunnel_host, headers=tunnel_headers)

        try:
            try:
                h.request(req.get_method(), req.selector, req.data, headers,
>
encode_chunked=req.has_header('Transfer-encoding'))

/usr/lib/python3.7/urllib/request.py:1317:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _ _ _

self = <http.client.HTTPSConnection object at 0x7fbe353e9990>, method =
'GET'
url = '/pydata/xarray-data/raw/master/tiny.nc', body = None
headers = {'Connection': 'close', 'Host': 'github.com', 'User-Agent':
'Python-urllib/3.7'}

    def request(self, method, url, body=None, headers={}, *,
                encode_chunked=False):
        """Send a complete request to the server."""
>       self._send_request(method, url, body, headers, encode_chunked)

/usr/lib/python3.7/http/client.py:1244:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _ _ _

self = <http.client.HTTPSConnection object at 0x7fbe353e9990>, method =
'GET'
url = '/pydata/xarray-data/raw/master/tiny.nc', body = None
headers = {'Connection': 'close', 'Host': 'github.com', 'User-Agent':
'Python-urllib/3.7'}
encode_chunked = False

    def _send_request(self, method, url, body, headers, encode_chunked):
        # Honor explicitly requested Host: and Accept-Encoding: headers.
        header_names = frozenset(k.lower() for k in headers)
        skips = {}
        if 'host' in header_names:
            skips['skip_host'] = 1
        if 'accept-encoding' in header_names:
            skips['skip_accept_encoding'] = 1

        self.putrequest(method, url, **skips)

        # chunked encoding will happen if HTTP/1.1 is used and either
        # the caller passes encode_chunked=True or the following
        # conditions hold:
        # 1. content-length has not been explicitly set
        # 2. the body is a file or iterable, but not a str or bytes-like
        # 3. Transfer-Encoding has NOT been explicitly set by the caller

        if 'content-length' not in header_names:
            # only chunk body if not explicitly set for backwards
            # compatibility, assuming the client code is already
handling the
            # chunking
            if 'transfer-encoding' not in header_names:
                # if content-length cannot be automatically determined, fall
                # back to chunked encoding
                encode_chunked = False
                content_length = self._get_content_length(body, method)
                if content_length is None:
                    if body is not None:
                        if self.debuglevel > 0:
                            print('Unable to determine size of %r' % body)
                        encode_chunked = True
                        self.putheader('Transfer-Encoding', 'chunked')
                else:
                    self.putheader('Content-Length', str(content_length))
        else:
            encode_chunked = False

        for hdr, value in headers.items():
            self.putheader(hdr, value)
        if isinstance(body, str):
            # RFC 2616 Section 3.7.1 says that text default has a
            # default charset of iso-8859-1.
            body = _encode(body, 'body')
>       self.endheaders(body, encode_chunked=encode_chunked)

/usr/lib/python3.7/http/client.py:1290:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _ _ _

self = <http.client.HTTPSConnection object at 0x7fbe353e9990>
message_body = None

    def endheaders(self, message_body=None, *, encode_chunked=False):
        """Indicate that the last header line has been sent to the server.

        This method sends the request to the server.  The optional
message_body
        argument can be used to pass a message body associated with the
        request.
        """
        if self.__state == _CS_REQ_STARTED:
            self.__state = _CS_REQ_SENT
        else:
            raise CannotSendHeader()
>       self._send_output(message_body, encode_chunked=encode_chunked)

/usr/lib/python3.7/http/client.py:1239:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _ _ _

self = <http.client.HTTPSConnection object at 0x7fbe353e9990>
message_body = None, encode_chunked = False

    def _send_output(self, message_body=None, encode_chunked=False):
        """Send the currently buffered request and clear the buffer.

        Appends an extra \\r\\n to the buffer.
        A message_body may be specified, to be appended to the request.
        """
        self._buffer.extend((b"", b""))
        msg = b"\r\n".join(self._buffer)
        del self._buffer[:]
>       self.send(msg)

/usr/lib/python3.7/http/client.py:1026:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _ _ _

self = <http.client.HTTPSConnection object at 0x7fbe353e9990>
data = b'GET /pydata/xarray-data/raw/master/tiny.nc
HTTP/1.1\r\nAccept-Encoding: identity\r\nHost: github.com\r\nUser-Agent:
Python-urllib/3.7\r\nConnection: close\r\n\r\n'

    def send(self, data):
        """Send `data' to the server.
        ``data`` can be a string object, a bytes object, an array object, a
        file-like object that supports a .read() method, or an iterable
object.
        """

        if self.sock is None:
            if self.auto_open:
>               self.connect()

/usr/lib/python3.7/http/client.py:966:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _ _ _

self = <http.client.HTTPSConnection object at 0x7fbe353e9990>

    def connect(self):
        "Connect to a host on a given (SSL) port."

        super().connect()

        if self._tunnel_host:
            server_hostname = self._tunnel_host
        else:
            server_hostname = self.host

        self.sock = self._context.wrap_socket(self.sock,
>
server_hostname=server_hostname)

/usr/lib/python3.7/http/client.py:1414:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _ _ _

self = <ssl.SSLContext object at 0x7fbe353127c0>
sock = <socket.socket [closed] fd=-1, family=AddressFamily.AF_INET,
type=SocketKind.SOCK_STREAM, proto=6>
server_side = False, do_handshake_on_connect = True,
suppress_ragged_eofs = True
server_hostname = 'github.com', session = None

    def wrap_socket(self, sock, server_side=False,
                    do_handshake_on_connect=True,
                    suppress_ragged_eofs=True,
                    server_hostname=None, session=None):
        # SSLSocket class handles server_hostname encoding before it calls
        # ctx._wrap_socket()
        return self.sslsocket_class._create(
            sock=sock,
            server_side=server_side,
            do_handshake_on_connect=do_handshake_on_connect,
            suppress_ragged_eofs=suppress_ragged_eofs,
            server_hostname=server_hostname,
            context=self,
>           session=session
        )

/usr/lib/python3.7/ssl.py:423:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _ _ _

cls = <class 'ssl.SSLSocket'>
sock = <socket.socket [closed] fd=-1, family=AddressFamily.AF_INET,
type=SocketKind.SOCK_STREAM, proto=6>
server_side = False, do_handshake_on_connect = True,
suppress_ragged_eofs = True
server_hostname = 'github.com'
context = <ssl.SSLContext object at 0x7fbe353127c0>, session = None

    @classmethod
    def _create(cls, sock, server_side=False, do_handshake_on_connect=True,
                suppress_ragged_eofs=True, server_hostname=None,
                context=None, session=None):
        if sock.getsockopt(SOL_SOCKET, SO_TYPE) != SOCK_STREAM:
            raise NotImplementedError("only stream sockets are supported")
        if server_side:
            if server_hostname:
                raise ValueError("server_hostname can only be specified "
                                 "in client mode")
            if session is not None:
                raise ValueError("session can only be specified in "
                                 "client mode")
        if context.check_hostname and not server_hostname:
            raise ValueError("check_hostname requires server_hostname")

        kwargs = dict(
            family=sock.family, type=sock.type, proto=sock.proto,
            fileno=sock.fileno()
        )
        self = cls.__new__(cls, **kwargs)
        super(SSLSocket, self).__init__(**kwargs)
        self.settimeout(sock.gettimeout())
        sock.detach()

        self._context = context
        self._session = session
        self._closed = False
        self._sslobj = None
        self.server_side = server_side
        self.server_hostname = context._encode_hostname(server_hostname)
        self.do_handshake_on_connect = do_handshake_on_connect
        self.suppress_ragged_eofs = suppress_ragged_eofs

        # See if we are connected
        try:
            self.getpeername()
        except OSError as e:
            if e.errno != errno.ENOTCONN:
                raise
            connected = False
        else:
            connected = True

        self._connected = connected
        if connected:
            # create the SSL object
            try:
                self._sslobj = self._context._wrap_socket(
                    self, server_side, self.server_hostname,
                    owner=self, session=self._session,
                )
                if do_handshake_on_connect:
                    timeout = self.gettimeout()
                    if timeout == 0.0:
                        # non-blocking
                        raise ValueError("do_handshake_on_connect should
not be specified for non-blocking sockets")
>                   self.do_handshake()

/usr/lib/python3.7/ssl.py:870:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _ _ _

self = <ssl.SSLSocket [closed] fd=-1, family=AddressFamily.AF_INET,
type=SocketKind.SOCK_STREAM, proto=6>
block = False

    @_sslcopydoc
    def do_handshake(self, block=False):
        self._check_connected()
        timeout = self.gettimeout()
        try:
            if timeout == 0.0 and block:
                self.settimeout(None)
>           self._sslobj.do_handshake()
E           ssl.SSLCertVerificationError: [SSL:
CERTIFICATE_VERIFY_FAILED] certificate verify failed: unable to get
local issuer certificate (_ssl.c:1076)

/usr/lib/python3.7/ssl.py:1139: SSLCertVerificationError

During handling of the above exception, another exception occurred:

self = <xarray.tests.test_tutorial.TestLoadDataset object at 0x7fbe353e9510>

    def test_download_from_github_load_without_cache(self):
>       ds_nocache = tutorial.open_dataset(self.testfile,
cache=False).load()

/usr/lib/python3/dist-packages/xarray/tests/test_tutorial.py:29:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _ _ _
/usr/lib/python3/dist-packages/xarray/tutorial.py:68: in open_dataset
    urlretrieve(url, localfile)
/usr/lib/python3.7/urllib/request.py:247: in urlretrieve
    with contextlib.closing(urlopen(url, data)) as fp:
/usr/lib/python3.7/urllib/request.py:222: in urlopen
    return opener.open(url, data, timeout)
/usr/lib/python3.7/urllib/request.py:525: in open
    response = self._open(req, data)
/usr/lib/python3.7/urllib/request.py:543: in _open
    '_open', req)
/usr/lib/python3.7/urllib/request.py:503: in _call_chain
    result = func(*args)
/usr/lib/python3.7/urllib/request.py:1360: in https_open
    context=self._context, check_hostname=self._check_hostname)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _ _ _

self = <urllib.request.HTTPSHandler object at 0x7fbe35ee1f10>
http_class = <class 'http.client.HTTPSConnection'>
req = <urllib.request.Request object at 0x7fbe353e96d0>
http_conn_args = {'check_hostname': None, 'context': None}, host =
'github.com'
h = <http.client.HTTPSConnection object at 0x7fbe353e9990>

    def do_open(self, http_class, req, **http_conn_args):
        """Return an HTTPResponse object for the request, using http_class.

        http_class must implement the HTTPConnection API from http.client.
        """
        host = req.host
        if not host:
            raise URLError('no host given')

        # will parse host:port
        h = http_class(host, timeout=req.timeout, **http_conn_args)
        h.set_debuglevel(self._debuglevel)

        headers = dict(req.unredirected_hdrs)
        headers.update({k: v for k, v in req.headers.items()
                        if k not in headers})

        # TODO(jhylton): Should this be redesigned to handle
        # persistent connections?

        # We want to make an HTTP/1.1 request, but the addinfourl
        # class isn't prepared to deal with a persistent connection.
        # It will try to read all remaining data from the socket,
        # which will block while the server waits for the next request.
        # So make sure the connection gets closed after the (only)
        # request.
        headers["Connection"] = "close"
        headers = {name.title(): val for name, val in headers.items()}

        if req._tunnel_host:
            tunnel_headers = {}
            proxy_auth_hdr = "Proxy-Authorization"
            if proxy_auth_hdr in headers:
                tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr]
                # Proxy-Authorization should not be sent to origin
                # server.
                del headers[proxy_auth_hdr]
            h.set_tunnel(req._tunnel_host, headers=tunnel_headers)

        try:
            try:
                h.request(req.get_method(), req.selector, req.data, headers,

encode_chunked=req.has_header('Transfer-encoding'))
            except OSError as err: # timeout error
>               raise URLError(err)
E               urllib.error.URLError: <urlopen error [SSL:
CERTIFICATE_VERIFY_FAILED] certificate verify failed: unable to get
local issuer certificate (_ssl.c:1076)>

/usr/lib/python3.7/urllib/request.py:1319: URLError


-------------- next part --------------
A non-text attachment was scrubbed...
Name: signature.asc
Type: application/pgp-signature
Size: 488 bytes
Desc: OpenPGP digital signature
URL: <http://alioth-lists.debian.net/pipermail/debian-science-maintainers/attachments/20190822/fc6de7e1/attachment-0001.sig>


More information about the debian-science-maintainers mailing list