[Python-modules-commits] [ipykernel] 01/07: Import ipykernel_4.5.1.orig.tar.gz

Gordon Ball chronitis-guest at moszumanska.debian.org
Sat Nov 19 13:49:30 UTC 2016


This is an automated email from the git hooks/post-receive script.

chronitis-guest pushed a commit to branch master
in repository ipykernel.

commit e8de387ae770137ca132a448d9569d3962178ae9
Author: Gordon Ball <gordon at chronitis.net>
Date:   Sat Nov 19 14:11:30 2016 +0100

    Import ipykernel_4.5.1.orig.tar.gz
---
 PKG-INFO                                 |   2 +-
 docs/changelog.rst                       |  10 +++
 docs/conf.py                             |   4 +-
 docs/index.rst                           |   2 +-
 ipykernel/_version.py                    |   2 +-
 ipykernel/comm/comm.py                   |   4 +-
 ipykernel/inprocess/socket.py            |   7 +-
 ipykernel/inprocess/tests/test_kernel.py |   8 ++
 ipykernel/iostream.py                    | 136 +++++++++++++++++++++----------
 ipykernel/ipkernel.py                    |   2 -
 ipykernel/kernelbase.py                  |   6 +-
 ipykernel/tests/test_jsonutil.py         |  12 ++-
 ipykernel/zmqshell.py                    |   2 +-
 13 files changed, 141 insertions(+), 56 deletions(-)

diff --git a/PKG-INFO b/PKG-INFO
index dfe97c6..f6589cc 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: ipykernel
-Version: 4.5.0
+Version: 4.5.1
 Summary: IPython Kernel for Jupyter
 Home-page: http://ipython.org
 Author: IPython Development Team
diff --git a/docs/changelog.rst b/docs/changelog.rst
index e7bb08f..a9acb1f 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -4,6 +4,16 @@ Changes in IPython kernel
 4.5
 ---
 
+4.5.1
+*****
+
+`4.5.1 on GitHub <https://github.com/ipython/ipykernel/milestones/4.5.1>`__
+
+- Add missing ``stream`` parameter to overridden :func:`getpass`
+- Remove locks from iopub thread, which could cause deadlocks during debugging
+- Fix regression where KeyboardInterrupt was treated as an aborted request, rather than an error
+- Allow instantating Comms outside of the IPython kernel
+
 4.5.0
 *****
 
diff --git a/docs/conf.py b/docs/conf.py
index 83a317b..c126d82 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -298,6 +298,6 @@ texinfo_documents = [
 # Example configuration for intersphinx: refer to the Python standard library.
 intersphinx_mapping = {
     'https://docs.python.org/': None,
-    'ipython': ('https://ipython.readthedocs.org/en/latest', None),
-    'jupyter': ('https://jupyter.readthedocs.org/en/latest', None),
+    'ipython': ('https://ipython.readthedocs.io/en/latest', None),
+    'jupyter': ('https://jupyter.readthedocs.io/en/latest', None),
 }
diff --git a/docs/index.rst b/docs/index.rst
index 3dab6ce..770904d 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -4,7 +4,7 @@ IPython Kernel Docs
 ===================
 
 This contains minimal version-sensitive documentation for the IPython kernel package.
-Most IPython kernel documentation is in the `IPython documentation <http://ipython.readthedocs.org/en/latest/>`_.
+Most IPython kernel documentation is in the `IPython documentation <https://ipython.readthedocs.io/en/latest/>`_.
 
 Contents:
 
diff --git a/ipykernel/_version.py b/ipykernel/_version.py
index 00a46c9..22ed655 100644
--- a/ipykernel/_version.py
+++ b/ipykernel/_version.py
@@ -1,4 +1,4 @@
-version_info = (4, 5, 0)
+version_info = (4, 5, 1)
 __version__ = '.'.join(map(str, version_info))
 
 kernel_protocol_version_info = (5, 0)
diff --git a/ipykernel/comm/comm.py b/ipykernel/comm/comm.py
index 7656f94..9ffc061 100644
--- a/ipykernel/comm/comm.py
+++ b/ipykernel/comm/comm.py
@@ -14,7 +14,7 @@ from traitlets import Instance, Unicode, Bytes, Bool, Dict, Any, default
 
 class Comm(LoggingConfigurable):
     """Class for communicating between a Frontend and a Kernel"""
-    kernel = Instance('ipykernel.kernelbase.Kernel')
+    kernel = Instance('ipykernel.kernelbase.Kernel', allow_none=True)
 
     @default('kernel')
     def _default_kernel(self):
@@ -51,7 +51,7 @@ class Comm(LoggingConfigurable):
         if target_name:
             kwargs['target_name'] = target_name
         super(Comm, self).__init__(**kwargs)
-        if self.primary:
+        if self.kernel is not None and self.primary:
             # I am primary, open my peer.
             self.open(data=data, metadata=metadata, buffers=buffers)
         else:
diff --git a/ipykernel/inprocess/socket.py b/ipykernel/inprocess/socket.py
index 88e3751..3788489 100644
--- a/ipykernel/inprocess/socket.py
+++ b/ipykernel/inprocess/socket.py
@@ -31,7 +31,9 @@ class SocketABC(with_metaclass(abc.ABCMeta, object)):
     
     @classmethod
     def register(cls, other_cls):
-        warnings.warn("SocketABC is deprecated.", DeprecationWarning)
+        if other_cls is not DummySocket:
+            warnings.warn("SocketABC is deprecated since ipykernel version 4.5.0.",
+                    DeprecationWarning, stacklevel=2)
         abc.ABCMeta.register(cls, other_cls)
 
 #-----------------------------------------------------------------------------
@@ -43,6 +45,9 @@ class DummySocket(HasTraits):
 
     queue = Instance(Queue, ())
     message_sent = Int(0) # Should be an Event
+    context = Instance(zmq.Context)
+    def _context_default(self):
+        return zmq.Context.instance()
 
     #-------------------------------------------------------------------------
     # Socket interface
diff --git a/ipykernel/inprocess/tests/test_kernel.py b/ipykernel/inprocess/tests/test_kernel.py
index 8f15376..0231c86 100644
--- a/ipykernel/inprocess/tests/test_kernel.py
+++ b/ipykernel/inprocess/tests/test_kernel.py
@@ -66,3 +66,11 @@ class InProcessKernelTestCase(unittest.TestCase):
         kc.execute('print("bar")')
         out, err = assemble_output(kc.iopub_channel)
         self.assertEqual(out, 'bar\n')
+
+    def test_getpass_stream(self):
+        "Tests that kernel getpass accept the stream parameter"
+        kernel = InProcessKernel()
+        kernel._allow_stdin = True
+        kernel._input_request = lambda *args, **kwargs : None
+
+        kernel.getpass(stream='non empty')
diff --git a/ipykernel/iostream.py b/ipykernel/iostream.py
index 5c7f96e..4f0c7c4 100644
--- a/ipykernel/iostream.py
+++ b/ipykernel/iostream.py
@@ -6,10 +6,10 @@
 
 from __future__ import print_function
 import atexit
+from binascii import b2a_hex
 import os
 import sys
 import threading
-import uuid
 import warnings
 from io import StringIO, UnsupportedOperation
 
@@ -35,14 +35,25 @@ CHILD = 1
 
 class IOPubThread(object):
     """An object for sending IOPub messages in a background thread
-    
-    prevents a blocking main thread
-    
+
+    Prevents a blocking main thread from delaying output from threads.
+
     IOPubThread(pub_socket).background_socket is a Socket-API-providing object
     whose IO is always run in a thread.
     """
 
     def __init__(self, socket, pipe=False):
+        """Create IOPub thread
+
+        Parameters
+        ----------
+
+        socket: zmq.PUB Socket
+            the socket on which messages will be sent.
+        pipe: bool
+            Whether this process should listen for IOPub messages
+            piped from subprocesses.
+        """
         self.socket = socket
         self.background_socket = BackgroundSocket(self)
         self._master_pid = os.getpid()
@@ -50,6 +61,9 @@ class IOPubThread(object):
         self.io_loop = IOLoop()
         if pipe:
             self._setup_pipe_in()
+        self._local = threading.local()
+        self._events = {}
+        self._setup_event_pipe()
         self.thread = threading.Thread(target=self._thread_main)
         self.thread.daemon = True
 
@@ -57,16 +71,51 @@ class IOPubThread(object):
         """The inner loop that's actually run in a thread"""
         self.io_loop.start()
         self.io_loop.close()
+        if hasattr(self._local, 'event_pipe'):
+            self._local.event_pipe.close()
+
+    def _setup_event_pipe(self):
+        """Create the PULL socket listening for events that should fire in this thread."""
+        ctx = self.socket.context
+        pipe_in = ctx.socket(zmq.PULL)
+        pipe_in.linger = 0
 
+        _uuid = b2a_hex(os.urandom(16)).decode('ascii')
+        iface = self._event_interface = 'inproc://%s' % _uuid
+        pipe_in.bind(iface)
+        self._event_puller = ZMQStream(pipe_in, self.io_loop)
+        self._event_puller.on_recv(self._handle_event)
+    
+    @property
+    def _event_pipe(self):
+        """thread-local event pipe for signaling events that should be processed in the thread"""
+        try:
+            event_pipe = self._local.event_pipe
+        except AttributeError:
+            # new thread, new event pipe
+            ctx = self.socket.context
+            event_pipe = ctx.socket(zmq.PUSH)
+            event_pipe.linger = 0
+            event_pipe.connect(self._event_interface)
+            self._local.event_pipe = event_pipe
+        return event_pipe
+
+    def _handle_event(self, msg):
+        """Handle an event on the event pipe"""
+        event_id = msg[0]
+        event_f = self._events.pop(event_id)
+        event_f()
+    
     def _setup_pipe_in(self):
-        """setup listening pipe for subprocesses"""
+        """setup listening pipe for IOPub from forked subprocesses"""
         ctx = self.socket.context
 
         # use UUID to authenticate pipe messages
-        self._pipe_uuid = uuid.uuid4().bytes
+        self._pipe_uuid = os.urandom(16)
 
         pipe_in = ctx.socket(zmq.PULL)
         pipe_in.linger = 0
+
         try:
             self._pipe_port = pipe_in.bind_to_random_port("tcp://127.0.0.1")
         except zmq.ZMQError as e:
@@ -127,17 +176,27 @@ class IOPubThread(object):
     @property
     def closed(self):
         return self.socket is None
-    
+
+    def schedule(self, f):
+        """Schedule a function to be called in our IO thread.
+
+        If the thread is not running, call immediately.
+        """
+        if self.thread.is_alive():
+            event_id = os.urandom(16)
+            while event_id in self._events:
+                event_id = os.urandom(16)
+            self._events[event_id] = f
+            self._event_pipe.send(event_id)
+        else:
+            f()
+
     def send_multipart(self, *args, **kwargs):
         """send_multipart schedules actual zmq send in my thread.
         
         If my thread isn't running (e.g. forked process), send immediately.
         """
-
-        if self.thread.is_alive():
-            self.io_loop.add_callback(lambda : self._really_send(*args, **kwargs))
-        else:
-            self._really_send(*args, **kwargs)
+        self.schedule(lambda : self._really_send(*args, **kwargs))
     
     def _really_send(self, msg, *args, **kwargs):
         """The callback that actually sends messages"""
@@ -219,10 +278,8 @@ class OutStream(object):
         self.topic = b'stream.' + py3compat.cast_bytes(name)
         self.parent_header = {}
         self._master_pid = os.getpid()
-        self._flush_lock = threading.Lock()
-        self._flush_timeout = None
+        self._flush_pending = False
         self._io_loop = pub_thread.io_loop
-        self._buffer_lock = threading.Lock()
         self._new_buffer()
 
     def _is_master_process(self):
@@ -243,18 +300,14 @@ class OutStream(object):
         
         call this on write, to indicate that flush should be called soon.
         """
-        with self._flush_lock:
-            if self._flush_timeout is not None:
-                return
-            # None indicates there's no flush scheduled.
-            # Use a non-None placeholder to indicate that a flush is scheduled
-            # to avoid races while we wait for _schedule_in_thread below to fire in the io thread.
-            self._flush_timeout = 'placeholder'
-        
-        # add_timeout has to be handed to the io thread with add_callback
+        if self._flush_pending:
+            return
+        self._flush_pending = True
+
+        # add_timeout has to be handed to the io thread via event pipe
         def _schedule_in_thread():
-            self._flush_timeout = self._io_loop.call_later(self.flush_interval, self._flush)
-        self._io_loop.add_callback(_schedule_in_thread)
+            self._io_loop.call_later(self.flush_interval, self._flush)
+        self.pub_thread.schedule(_schedule_in_thread)
 
     def flush(self):
         """trigger actual zmq send
@@ -262,10 +315,10 @@ class OutStream(object):
         send will happen in the background thread
         """
         if self.pub_thread.thread.is_alive():
-            self._io_loop.add_callback(self._flush)
             # wait for flush to actually get through:
+            self.pub_thread.schedule(self._flush)
             evt = threading.Event()
-            self._io_loop.add_callback(evt.set)
+            self.pub_thread.schedule(evt.set)
             evt.wait()
         else:
             self._flush()
@@ -276,9 +329,8 @@ class OutStream(object):
         _flush should generally be called in the IO thread,
         unless the thread has been destroyed (e.g. forked subprocess).
         """
-        with self._flush_lock:
-            self._flush_timeout = None
-            data = self._flush_buffer()
+        self._flush_pending = False
+        data = self._flush_buffer()
         if data:
             # FIXME: this disables Session's fork-safe check,
             # since pub_thread is itself fork-safe.
@@ -315,8 +367,8 @@ class OutStream(object):
                 string = string.decode(self.encoding, 'replace')
 
             is_child = (not self._is_master_process())
-            with self._buffer_lock:
-                self._buffer.write(string)
+            # only touch the buffer in the IO thread to avoid races
+            self.pub_thread.schedule(lambda : self._buffer.write(string))
             if is_child:
                 # newlines imply flush in subprocesses
                 # mp.Pool cannot be trusted to flush promptly (or ever),
@@ -334,14 +386,16 @@ class OutStream(object):
                 self.write(string)
 
     def _flush_buffer(self):
-        """clear the current buffer and return the current buffer data"""
-        with self._buffer_lock:
-            data = u''
-            if self._buffer is not None:
-                buf = self._buffer
-                self._new_buffer()
-                data = buf.getvalue()
-                buf.close()
+        """clear the current buffer and return the current buffer data.
+        
+        This should only be called in the IO thread.
+        """
+        data = u''
+        if self._buffer is not None:
+            buf = self._buffer
+            self._new_buffer()
+            data = buf.getvalue()
+            buf.close()
         return data
 
     def _new_buffer(self):
diff --git a/ipykernel/ipkernel.py b/ipykernel/ipkernel.py
index 8d726a4..d28302c 100644
--- a/ipykernel/ipkernel.py
+++ b/ipykernel/ipkernel.py
@@ -204,8 +204,6 @@ class IPythonKernel(KernelBase):
 
         if res.success:
             reply_content[u'status'] = u'ok'
-        elif isinstance(err, KeyboardInterrupt):
-            reply_content[u'status'] = u'aborted'
         else:
             reply_content[u'status'] = u'error'
 
diff --git a/ipykernel/kernelbase.py b/ipykernel/kernelbase.py
index eb24b74..64e6737 100644
--- a/ipykernel/kernelbase.py
+++ b/ipykernel/kernelbase.py
@@ -656,7 +656,7 @@ class Kernel(SingletonConfigurable):
         raise StdinNotImplementedError("raw_input was called, but this "
                                        "frontend does not support stdin.")
 
-    def getpass(self, prompt=''):
+    def getpass(self, prompt='', stream=None):
         """Forward getpass to frontends
 
         Raises
@@ -667,6 +667,10 @@ class Kernel(SingletonConfigurable):
             raise StdinNotImplementedError(
                 "getpass was called, but this frontend does not support input requests."
             )
+        if stream is not None:
+            import warnings
+            warnings.warn("The `stream` parameter of `getpass.getpass` will have no effect when using ipykernel",
+                    UserWarning, stacklevel=2)
         return self._input_request(prompt,
             self._parent_ident,
             self._parent_header,
diff --git a/ipykernel/tests/test_jsonutil.py b/ipykernel/tests/test_jsonutil.py
index 2e4901e..794ff6c 100644
--- a/ipykernel/tests/test_jsonutil.py
+++ b/ipykernel/tests/test_jsonutil.py
@@ -5,7 +5,13 @@
 # Distributed under the terms of the Modified BSD License.
 
 import json
-from base64 import decodestring
+import sys
+
+if sys.version_info < (3,):
+    from base64 import decodestring as decodebytes
+else:
+    from base64 import decodebytes
+
 from datetime import datetime
 import numbers
 
@@ -73,7 +79,7 @@ def test_encode_images():
     encoded = encode_images(fmt)
     for key, value in iteritems(fmt):
         # encoded has unicode, want bytes
-        decoded = decodestring(encoded[key].encode('ascii'))
+        decoded = decodebytes(encoded[key].encode('ascii'))
         nt.assert_equal(decoded, value)
     encoded2 = encode_images(encoded)
     nt.assert_equal(encoded, encoded2)
@@ -85,7 +91,7 @@ def test_encode_images():
     nt.assert_equal(encoded3, b64_str)
     for key, value in iteritems(fmt):
         # encoded3 has str, want bytes
-        decoded = decodestring(str_to_bytes(encoded3[key]))
+        decoded = decodebytes(str_to_bytes(encoded3[key]))
         nt.assert_equal(decoded, value)
 
 def test_lambda():
diff --git a/ipykernel/zmqshell.py b/ipykernel/zmqshell.py
index cb17c70..9d59e99 100644
--- a/ipykernel/zmqshell.py
+++ b/ipykernel/zmqshell.py
@@ -563,7 +563,7 @@ class ZMQInteractiveShell(InteractiveShell):
         # Overridden not to do virtualenv detection, because it's probably
         # not appropriate in a kernel. To use a kernel in a virtualenv, install
         # it inside the virtualenv.
-        # http://ipython.readthedocs.org/en/latest/install/kernel_install.html
+        # https://ipython.readthedocs.io/en/latest/install/kernel_install.html
         pass
 
 

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/python-modules/packages/ipykernel.git



More information about the Python-modules-commits mailing list