commit e7ff6259a3ced9b3d7a8f86c07f9e27c0445203b Author: David Wilson Date: Fri Sep 15 11:54:41 2017 +0530 Initial commit. diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..21301a71 --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +docs/_build +*.egg-info diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 00000000..aec458ea --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,71 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +PAPER = +BUILDDIR = _build + +# User-friendly check for sphinx-build +ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) +$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) +endif + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . + +.PHONY: help +help: + @echo "Please use \`make ' where is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + @echo " coverage to run coverage check of the documentation (if enabled)" + +.PHONY: clean +clean: + rm -rf $(BUILDDIR)/* + +.PHONY: html +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +.PHONY: dirhtml +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +.PHONY: changes +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +.PHONY: linkcheck +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +.PHONY: doctest +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." + +.PHONY: coverage +coverage: + $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage + @echo "Testing of coverage in the sources finished, look at the " \ + "results in $(BUILDDIR)/coverage/python.txt." diff --git a/docs/_static/style.css b/docs/_static/style.css new file mode 100644 index 00000000..d42f3c0d --- /dev/null +++ b/docs/_static/style.css @@ -0,0 +1,4 @@ + +div.figure { + padding: 0; +} diff --git a/docs/_templates/github.html b/docs/_templates/github.html new file mode 100644 index 00000000..f26b0c86 --- /dev/null +++ b/docs/_templates/github.html @@ -0,0 +1,4 @@ +

+
+GitHub Repository +

diff --git a/docs/_templates/layout.html b/docs/_templates/layout.html new file mode 100644 index 00000000..42efa77e --- /dev/null +++ b/docs/_templates/layout.html @@ -0,0 +1,2 @@ +{% extends "!layout.html" %} +{% set css_files = css_files + ['_static/style.css'] %} diff --git a/docs/api.rst b/docs/api.rst new file mode 100644 index 00000000..81533558 --- /dev/null +++ b/docs/api.rst @@ -0,0 +1,92 @@ + +API Reference +************* + + +Package Layout +============== + + +mitogen Package +--------------- + +.. automodule:: mitogen + +.. autodata:: mitogen.slave +.. autodata:: mitogen.context_id +.. autodata:: mitogen.parent_id + + +mitogen.core +------------ + +.. automodule:: mitogen.core + + +mitogen.master +-------------- + +.. automodule:: mitogen.master + + +mitogen.fakessh +--------------- + +.. automodule:: mitogen.fakessh + +.. autofunction:: mitogen.fakessh.run + + +Router Class +============ + +.. autoclass:: mitogen.master.Router + :members: + :inherited-members: + + +Broker Class +============ + +.. autoclass:: mitogen.master.Broker + :members: + :inherited-members: + + +Context Class +============= + +.. autoclass:: mitogen.master.Context + :members: + :inherited-members: + + +Channel Class +------------- + +.. autoclass:: mitogen.core.Channel + :members: + + +Context Class +------------- + +.. autoclass:: mitogen.master.Context + :members: + + +Utility Functions +================= + +.. automodule:: mitogen.utils + :members: + + +Exceptions +========== + +.. autoclass:: mitogen.core.Error +.. autoclass:: mitogen.core.CallError +.. autoclass:: mitogen.core.ChannelError +.. autoclass:: mitogen.core.StreamError +.. autoclass:: mitogen.core.TimeoutError diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 00000000..59a46065 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,23 @@ +import sys +sys.path.append('..') + +author = u'David Wilson' +copyright = u'2016, David Wilson' +exclude_patterns = ['_build'] +extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx'] +html_show_sourcelink = False +html_show_sphinx = False +html_sidebars = {'**': ['globaltoc.html', 'github.html']} +html_static_path = ['_static'] +html_theme = 'alabaster' +htmlhelp_basename = 'mitogendoc' +intersphinx_mapping = {'python': ('https://docs.python.org/2', None)} +language = None +master_doc = 'toc' +project = u'Mitogen' +pygments_style = 'sphinx' +release = u'master' +source_suffix = '.rst' +templates_path = ['_templates'] +todo_include_todos = False +version = u'master' diff --git a/docs/examples.rst b/docs/examples.rst new file mode 100644 index 00000000..b1d29589 --- /dev/null +++ b/docs/examples.rst @@ -0,0 +1,67 @@ + +Examples +======== + + +Recursively Nested Bootstrap +---------------------------- + +This demonstrates the library's ability to use slave contexts to recursively +proxy connections to additional slave contexts, with a uniform API to any +slave, and all features (function calls, import forwarding, stdio forwarding, +log forwarding) functioning transparently. + +This example uses a chain of local contexts for clarity, however SSH and sudo +contexts work identically. + +nested.py: + +.. code-block:: python + + import os + import mitogen.utils + + @mitogen.utils.run_with_router + def main(router): + mitogen.utils.log_to_file() + + context = None + for x in range(1, 11): + print 'Connect local%d via %s' % (x, context) + context = router.local(via=context, name='local%d' % x) + + context.call(os.system, 'pstree -s python -s mitogen') + + +Output: + +.. code-block:: shell + + $ python nested.py + Connect local1 via None + Connect local2 via Context(1, 'local1') + Connect local3 via Context(2, 'local2') + Connect local4 via Context(3, 'local3') + Connect local5 via Context(4, 'local4') + Connect local6 via Context(5, 'local5') + Connect local7 via Context(6, 'local6') + Connect local8 via Context(7, 'local7') + Connect local9 via Context(8, 'local8') + Connect local10 via Context(9, 'local9') + 18:14:07 I ctx.local10: stdout: -+= 00001 root /sbin/launchd + 18:14:07 I ctx.local10: stdout: \-+= 08126 dmw /Applications/iTerm.app/Contents/MacOS/iTerm2 + 18:14:07 I ctx.local10: stdout: \-+= 10638 dmw /Applications/iTerm.app/Contents/MacOS/iTerm2 --server bash --login + 18:14:07 I ctx.local10: stdout: \-+= 10639 dmw bash --login + 18:14:07 I ctx.local10: stdout: \-+= 13632 dmw python nested.py + 18:14:07 I ctx.local10: stdout: \-+- 13633 dmw mitogen:dmw@Eldil.local:13632 + 18:14:07 I ctx.local10: stdout: \-+- 13635 dmw mitogen:dmw@Eldil.local:13633 + 18:14:07 I ctx.local10: stdout: \-+- 13637 dmw mitogen:dmw@Eldil.local:13635 + 18:14:07 I ctx.local10: stdout: \-+- 13639 dmw mitogen:dmw@Eldil.local:13637 + 18:14:07 I ctx.local10: stdout: \-+- 13641 dmw mitogen:dmw@Eldil.local:13639 + 18:14:07 I ctx.local10: stdout: \-+- 13643 dmw mitogen:dmw@Eldil.local:13641 + 18:14:07 I ctx.local10: stdout: \-+- 13645 dmw mitogen:dmw@Eldil.local:13643 + 18:14:07 I ctx.local10: stdout: \-+- 13647 dmw mitogen:dmw@Eldil.local:13645 + 18:14:07 I ctx.local10: stdout: \-+- 13649 dmw mitogen:dmw@Eldil.local:13647 + 18:14:07 I ctx.local10: stdout: \-+- 13651 dmw mitogen:dmw@Eldil.local:13649 + 18:14:07 I ctx.local10: stdout: \-+- 13653 dmw pstree -s python -s mitogen + 18:14:07 I ctx.local10: stdout: \--- 13654 root ps -axwwo user,pid,ppid,pgid,command diff --git a/docs/getting_started.rst b/docs/getting_started.rst new file mode 100644 index 00000000..97258614 --- /dev/null +++ b/docs/getting_started.rst @@ -0,0 +1,5 @@ + +Getting Started +=============== + +xxx diff --git a/docs/howitworks.rst b/docs/howitworks.rst new file mode 100644 index 00000000..71a1c778 --- /dev/null +++ b/docs/howitworks.rst @@ -0,0 +1,511 @@ + +How Mitogen Works +================= + +Some effort is required to accomplish the seemingly magical feat of +bootstrapping a remote Python process without any software installed on the +remote machine. The steps involved are unlikely to be immediately obvious to +the casual reader, and they required several iterations to discover, so we +document them thoroughly below. + + +The UNIX First Stage +-------------------- + +To allow delivery of the bootstrap compressed using :py:mod:`zlib`, it is +necessary for something on the remote to be prepared to decompress the payload +and feed it to a Python interpreter. Since we would like to avoid writing an +error-prone shell fragment to implement this, and since we must avoid writing +to the remote machine's disk in case it is read-only, the Python process +started on the remote machine by Mitogen immediately forks in order to +implement the decompression. + + +Python Command Line +################### + +The Python command line sent to the host is a base64-encoded copy of the +:py:meth:`mitogen.master.LocalStream._first_stage` function, which has been +carefully optimized to reduce its size. Prior to base64 encoding, +``CONTEXT_NAME`` is replaced with the desired context name in the function's +source code. + +.. code:: + + python -c 'exec "xxx".decode("base64")' + +The command-line arranges for the Python interpreter to decode the base64'd +component and execute it as Python code. Base64 is used since the first stage +implementation contains newlines, and many special characters that may be +interpreted by the system shell in use. + + +Forking The First Stage +####################### + +The first stage creates a UNIX pipe and saves a copy of the process's real +``stdin`` file descriptor (used for communication with the master) so that it +can be recovered by the bootstrapped process later. It then forks into a new +process. + +After fork, the parent half overwrites its ``stdin`` with the read end of the +pipe, and the child half writes the string ``EC0\n``, then begins reading the +:py:mod:`zlib`-compressed payload supplied on ``stdin`` by the master, and +writing the decompressed result to the write-end of the UNIX pipe. + +To allow recovery of ``stdin`` for reuse by the bootstrapped process for +master<->slave communication, it is necessary for the first stage to avoid +closing ``stdin`` or reading from it until until EOF. Therefore, the master +sends the :py:mod:`zlib`-compressed payload prefixed with an integer size, +allowing reading by the first stage of exactly the required bytes. + + +Configuring argv[0] +################### + +Forking provides us with an excellent opportunity for tidying up the eventual +Python interpreter, in particular, restarting it using a fresh command-line to +get rid of the large base64-encoded first stage parameter, and to replace +**argv[0]** with something descriptive. + +After configuring its ``stdin`` to point to the read end of the pipe, the +parent half of the fork re-executes Python, with **argv[0]** taken from the +``CONTEXT_NAME`` variable earlier substituted into its source code. As no +arguments are provided to this new execution of Python, and since ``stdin`` is +connected to a pipe (whose write end is connected to the first stage), the +Python interpreter begins reading source code to execute from the pipe +connected to ``stdin``. + + +Bootstrap Preparation +##################### + +Now we have the mechanism in place to send a :py:mod:`zlib`-compressed script +to the remote Python interpreter, it is time to choose what to send. + +The script sent is simply the source code for :py:mod:`mitogen.core`, with a +single line suffixed to trigger execution of the +:py:meth:`mitogen.core.ExternalContext.main` function. The encoded arguments +to the main function include some additional details, such as the logging package +level that was active in the parent process, and a random secret key that may +later be used to generate HMAC signatures over the data frames that will be +exchanged after bootstrap. + +After the script source code is prepared, it is passed through +:py:func:`mitogen.master.minimize_source` to strip it of docstrings and +comments, while preserving line numbers. This reduces the compressed payload +by around 20%. + + +Preserving The `mitogen.core` Source +#################################### + +One final trick is implemented in the first stage: after bootstrapping the new +slave, it writes a duplicate copy of the :py:mod:`mitogen.core` source it just +used to bootstrap it back into another pipe connected to the slave. The slave's +module importer cache is initialized with a copy of the source, so that +subsequent bootstraps of slave-of-slaves do not require the source to be +fetched from the master a second time. + + +Signalling Success +################## + +Once the first stage has signalled ``EC0\n``, the master knows it is ready to +receive the compressed bootstrap. After decompressing and writing the bootstrap +source to its parent Python interpreter, the first stage writes the string +``EC1\n`` to ``stdout`` before exiting. The master process waits for this +string before considering bootstrap successful and the child's ``stdio`` ready +to receive messages. + + +ExternalContext.main() +---------------------- + +.. automethod:: mitogen.core.ExternalContext.main + + +Generating A Synthetic `mitogen` Package +######################################## + +Since the bootstrap consists of the :py:mod:`mitogen.core` source code, and +this code is loaded by Python by way of its main script (``__main__`` module), +initially the module layout in the slave will be incorrect. + +The first step taken after bootstrap is to rearrange :py:data:`sys.modules` slightly +so that :py:mod:`mitogen.core` appears in the correct location, and all +classes defined in that module have their ``__module__`` attribute fixed up +such that :py:mod:`cPickle` correctly serializes instance module names. + +Once a synthetic :py:mod:`mitogen` package and :py:mod:`mitogen.core` module +have been generated, the bootstrap **deletes** `sys.modules['__main__']`, so +that any attempt to import it (by :py:mod:`cPickle`) will cause the import to +be satisfied by fetching the master's actual ``__main__`` module. This is +necessary to allow master programs to be written as a self-contained Python +script. + + +Reaping The First Stage +####################### + +After the bootstrap has called :py:func:`os.dup` on the copy of the ``stdin`` +file descriptor saved by the first stage, it is closed. + +Additionally, since the first stage was forked prior to re-executing the Python +interpreter, it will exist as a zombie process until the parent process reaps +it. Therefore the bootstrap must call :py:func:`os.wait` soon after startup. + + +Setup Logging +############# + +The slave's :py:mod:`logging` package root logger is configured to have the +same log level as the root logger in the master, and +:py:class:`mitogen.core.LogHandler` is installed to forward logs to the master +context's :py:data:`FORWARD_LOG ` handle. + +The log level is copied into the slave to avoid generating a potentially large +amount of network IO forwarding logs that will simply be filtered away once +they reach the master. + + +The Module Importer +################### + +An instance of :py:class:`mitogen.core.Importer` is installed in +:py:data:`sys.meta_path`, where Python's ``import`` statement will execute it +before attempting to find a module locally. + + +Standard IO Redirection +####################### + +Two instances of :py:class:`mitogen.core.IoLogger` are created, one for +``stdout`` and one for ``stderr``. This class creates a UNIX pipe whose read +end is added to the IO multiplexer, and whose write end is used to overwrite +the handles inherited during process creation. + +Even without IO redirection, something must replace ``stdin`` and ``stdout``, +otherwise it is possible for the stream used for communication between the +master and slave to be accidentally corrupted by subprocesses run by user code. + +The inherited ``stdin`` is replaced by a file descriptor pointing to +``/dev/null``. + +Finally Python's :py:data:`sys.stdout` is reopened to ensure line buffering is +active, so that ``print`` statements and suchlike promptly appear in the logs. + + +Function Call Dispatch +###################### + +After all initialization is complete, the slave's main thread sits in a loop +reading from a :py:class:`Channel ` connected to the +:py:data:`CALL_FUNCTION ` handle. This handle is +written to by +:py:meth:`call_with_deadline() ` +and :py:meth:`call() `. + + +Shutdown +######## + +When the master signals the :py:data:`CALL_FUNCTION +` :py:class:`Channel ` is +closed, the slave calls :py:meth:`shutdown() ` +followed by :py:meth:`wait() ` on its own broker, +triggering graceful shutdown. + +During shutdown, the master will wait a few seconds for slaves to disconnect +gracefully before force disconnecting them, while the slaves will use that time +to call :py:meth:`socket.shutdown(SHUT_WR) ` on their +:py:class:`IoLogger ` socket's write ends before +draining any remaining data buffered on the read ends. + +An alternative approach is to wait until the socket is completely closed, with +some hard timeout, but this necessitates greater discipline than is common in +infrastructure code (how often have you forgotten to redirect stderr to +``/dev/null``?), so needless irritating delays would often be experienced +during program termination. + +If the main thread (responsible for function call dispatch) fails to trigger +shutdown (because some user function is hanging), then the eventual force +disconnection by the master will cause the IO multiplexer thread to enter +shutdown by itself. + + +.. _stream-protocol: + +Stream Protocol +--------------- + +Once connected, a basic framing protocol is used to communicate between +master and slave: + ++--------------------+------+------------------------------------------------------+ +| Field | Size | Description | ++====================+======+======================================================+ +| ``dst_id`` | 2 | Integer target context ID. | ++--------------------+------+------------------------------------------------------+ +| ``src_id`` | 2 | Integer source context ID. | ++--------------------+------+------------------------------------------------------+ +| ``handle`` | 4 | Integer target handle in recipient. | ++--------------------+------+------------------------------------------------------+ +| ``reply_to`` | 4 | Integer response target ID. | ++--------------------+------+------------------------------------------------------+ +| ``length`` | 4 | Message length | ++--------------------+------+------------------------------------------------------+ +| ``data`` | n/a | Pickled message data. | ++--------------------+------+------------------------------------------------------+ + +Masters listen on the following handles: + +.. data:: mitogen.core.FORWARD_LOG + + Receives `(logger_name, level, msg)` 3-tuples and writes them to the + master's ``mitogen.ctx.`` logger. + +.. data:: mitogen.core.GET_MODULE + + Receives `(reply_to, fullname)` 2-tuples, looks up the source code for the + module named ``fullname``, and writes the source along with some metadata + back to the handle ``reply_to``. If lookup fails, ``None`` is sent instead. + + +Slaves listen on the following handles: + +.. data:: mitogen.core.CALL_FUNCTION + + Receives `(with_context, mod_name, class_name, func_name, args, kwargs)` + 5-tuples from + :py:meth:`call_with_deadline() `, + imports ``mod_name``, then attempts to execute + `class_name.func_name(\*args, \**kwargs)`. + + When this channel is closed (by way of sending ``_DEAD`` to it), the + slave's main thread begins graceful shutdown of its own `Broker` and + `Router`. Each slave is responsible for sending ``_DEAD`` to each of its + directly connected slaves in response to the master sending ``_DEAD`` to + it, and arranging for the connection to its parent context to be closed + shortly thereafter. + +.. data:: mitogen.core.ADD_ROUTE + + Receives `(target_id, via_id)` integer tuples, describing how messages + arriving at this context on any Stream should be forwarded on the stream + associated with the Context `via_id` such that they are eventually + delivered to the target Context. + + This message is necessary to inform intermediary contexts of the existence + of a downstream Context, as they do not otherwise parse traffic they are + fowarding to their downstream contexts that may cause new contexts to be + established. + + Given a chain `master -> ssh1 -> sudo1`, no `ADD_ROUTE` message is + necessary, since :py:class:`mitogen.core.Router` in the `ssh` context can + arrange to update its routes while setting up the new slave during + `proxy_connect()`. + + However, given a chain like `master -> ssh1 -> sudo1 -> ssh2 -> sudo2`, + `ssh1` requires an `ADD_ROUTE` for `ssh2`, and both `ssh1` and `sudo1` + require an `ADD_ROUTE` for `sudo2`, as neither directly dealt with its + establishment. + + +Slaves that have ever been used to create a descendent child context also +listen on the following handles: + +.. data:: mitogen.core.GET_MODULE + + As with master's ``GET_MODULE``, except this implementation + (:py:class:`mitogen.master.ModuleForwarder`) serves responses using + :py:class:`mitogen.core.Importer`'s cache before forwarding the request to + its parent context. The response is cached by each context in turn before + being forwarded on to the slave context that originally made the request. + In this way, the master need never re-send a module it has already sent to + a direct descendant. + + +Additional handles are created to receive the result of every function call +triggered by :py:meth:`call_with_deadline() `. + + +Sentinel Value +############## + +.. autodata:: mitogen.core._DEAD + +The special value :py:data:`mitogen.core._DEAD` is used to signal +disconnection or closure of the remote end. It is used internally by +:py:class:`Channel ` and also passed to any function +still registered with :py:meth:`add_handler() +` during Broker shutdown. + + +Use of Pickle +############# + +The current implementation uses the Python :py:mod:`cPickle` module, with +mitigations to prevent untrusted slaves from triggering code excution in the +master. The primary reason for using :py:mod:`cPickle` is that it is +computationally efficient, and avoids including a potentially large body of +serialization code in the bootstrap. + +The pickler active in slave contexts will instantiate any class, however in the +master it is initially restricted to only permitting +:py:class:`CallError ` and :py:data:`_DEAD +`. While not recommended, it is possible to register more +using :py:meth:`mitogen.master.LocalStream.allow_class`. + +The choice of Pickle is one area to be revisited later. All accounts suggest it +cannot be used securely, however few of those accounts appear to be expert, and +none mention any additional attacks that would not be prevented by using a +restrictive class whitelist. + +.. note:: + + Since unpickling may trigger module loads, it is not possible to + deserialize data on the broker thread, as this will result in recursion + leading to a deadlock. Therefore any internal services (module loader, + logging forwarder, etc.) must rely on simple string formats, or only + perform serialization from within the broker thread. + + +The IO Multiplexer +------------------ + +Since we must include our IO multiplexer as part of the bootstrap, +off-the-shelf implementations are for the most part entirely inappropriate. For +example, a minimal copy of Twisted weighs in at around 440KiB and is composed +of approximately 115 files. Even if we could arrange for an entire Python +package to be transferred during bootstrap, this minimal configuration is +massive in comparison to Mitogen's solution, multiplies quickly in the +presence of many machines, and would require manually splitting up the parts of +Twisted that we would like to use. + + +Message Routing +--------------- + +Routing assumes it is impossible to construct a tree such that one of a +context's parents will not know the ID of a target the context is attempting to +communicate with. + +When :py:class:`mitogen.core.Router` receives a message, it checks the IDs +associated with its directly connected streams for a potential route. If any +stream matches, either because it directly connects to the target ID, or +because the master sent an ``ADD_ROUTE`` message associating it, then the +message will be forwarded down the tree using that stream. + +If the message does not match any ``ADD_ROUTE`` message or stream, instead it +is forwarded upwards to the immediate parent, and recursively by each parent in +turn until one is reached that knows how to forward the message down the tree. + +When the master establishes a new context via an existing child context, it +sends corresponding ``ADD_ROUTE`` messages to each indirect parent between the +context and the root. + + +Example +####### + +.. image:: images/context-tree.png + +In the diagram, when ``master`` is creating the ``sudo:node12b:webapp`` +context, it must send ``ADD_ROUTE`` messages to ``rack12``, ``dc1``, +``bastion``, and itself; ``node12b`` does not require an ``ADD_ROUTE`` message +since it has a stream directly connected to the new context. + +When ``sudo:node22a:webapp`` wants to send a message to +``sudo:node12b:webapp``, the message will be routed as follows: + +``sudo:node22a:webapp -> node22a -> rack22 -> dc2 -> bastion -> dc1 -> rack12 -> node12b -> sudo:node12b:webapp`` + +.. image:: images/route.png + + +Future +###### + +The current routing approach is incomplete, since routes to downstream contexts +are not propagated upwards when a descendant of the master context establishes +a new child context, but that is okay for now, since child contexts cannot +currently allocate new context IDs anyway. + + +Differences Between Master And Slave Brokers +############################################ + +The main difference between :py:class:`mitogen.core.Broker` and +:py:class:`mitogen.master.Broker` is that when the stream connection to the +parent is lost in a slave, the broker will trigger its own shutdown. + + +The Module Importer +------------------- + +:py:class:`mitogen.core.Importer` is still a work in progress, as there +are a variety of approaches to implementing it, and the present implementation +is not pefectly efficient in every case. + +It operates by intercepting ``import`` statements via `sys.meta_path`, asking +Python if it can satisfy the import by itself, and if not, indicating to Python +that it is capable of loading the module. + +In :py:meth:`load_module() ` an RPC is +started to the parent context, requesting the module source code. Once the +source is fetched, the method builds a new module object using the best +practice documented in PEP-302. + + +Minimizing Roundtrips +##################### + +In Python 2.x where relative imports are the default, a large number of import +requests will be made for modules that do not exist. For example: + +.. code-block:: python + + # mypkg/__init__.py + + import sys + import os + +In Python 2.x, Python will first try to load ``mypkg.sys`` and ``mypkg.os``, +which do not exist, before falling back on :py:mod:`sys` and :py:mod:`os`. + +These negative imports present a challenge, as they introduce a large number of +pointless network roundtrips. Therefore in addition to the +:py:mod:`zlib`-compressed source, for packages the master sends along a list of +child modules known to exist. + +Before indicating it can satisfy an import request, +:py:class:`mitogen.core.Importer` first checks to see if the module belongs to +a package it has previously imported, and if so, ignores the request if the +module does not appear in the enumeration of child modules belonging to the +package. + + +Child Module Enumeration +######################## + +Package children are enumerated using :py:func:`pkgutil.iter_modules`. + + +Use Of Threads +-------------- + +The package always runs the IO multiplexer in a thread. This is so the +multiplexer retains control flow in order to shut down gracefully, say, if the +user's code has hung and the master context has disconnected. + +While it is possible for the IO multiplexer to recover control of a hung +function call on UNIX using for example :py:mod:`signal.SIGALRM `, this +mechanism is not portable to non-UNIX operating systems, and does not work in +every case, for example when Python blocks signals during a variety of +:py:mod:`threading` package operations. + +At some point it is likely Mitogen will be extended to support starting slaves +running on Windows. When that happens, it would be nice if the process model on +Windows and UNIX did not differ, and in fact the code used on both were +identical. diff --git a/docs/images/billing.graphml b/docs/images/billing.graphml new file mode 100644 index 00000000..1830135f --- /dev/null +++ b/docs/images/billing.graphml @@ -0,0 +1,155 @@ + + + + + + + + + + + + + + + + + + + + + + + + master + + + + + + + + + + + + + + + + + + ssh:bastion + + + + + + + + + + + + + + + + + + sudo + + + + + + + + + + + + + + + + + + ssh:billing0 + + + + + + + + + + + + + + + + + + run-nightly-billing.py + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/images/billing.png b/docs/images/billing.png new file mode 100644 index 00000000..a5dbf10d Binary files /dev/null and b/docs/images/billing.png differ diff --git a/docs/images/cell_division.png b/docs/images/cell_division.png new file mode 100644 index 00000000..067d5613 Binary files /dev/null and b/docs/images/cell_division.png differ diff --git a/docs/images/context-tree.graphml b/docs/images/context-tree.graphml new file mode 100644 index 00000000..3ec973e3 --- /dev/null +++ b/docs/images/context-tree.graphml @@ -0,0 +1,497 @@ + + + + + + + + + + + + + + + + + + + + + + + master + + + + + + + + + + + + + + + + + bastion + + + + + + + + + + + + + + + + + dc1 + + + + + + + + + + + + + + + + + dc2 + + + + + + + + + + + + + + + + + rack11 + + + + + + + + + + + + + + + + + rack12 + + + + + + + + + + + + + + + + + node11a + + + + + + + + + + + + + + + + + node11b + + + + + + + + + + + + + + + + + node12a + + + + + + + + + + + + + + + + + node12b + + + + + + + + + + + + + + + + + node21a + + + + + + + + + + + + + + + + + node21b + + + + + + + + + + + + + + + + + node22a + + + + + + + + + + + + + + + + + node22b + + + + + + + + + + + + + + + + + rack21 + + + + + + + + + + + + + + + + + rack22 + + + + + + + + + + + + + + + + + sudo:node12b:webapp + + + + + + + + + + + + + + + + + sudo:node22a:webapp + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/images/context-tree.png b/docs/images/context-tree.png new file mode 100644 index 00000000..eb17ee8b Binary files /dev/null and b/docs/images/context-tree.png differ diff --git a/docs/images/fakessh.graphml b/docs/images/fakessh.graphml new file mode 100644 index 00000000..2eaa4c0d --- /dev/null +++ b/docs/images/fakessh.graphml @@ -0,0 +1,331 @@ + + + + + + + + + + + + + + + + + + + + + + + + bastion + + + + + + + + + + + + + + + + + + ssh.webserver + + + + + + + + + + + + + + + + + + sudo.webapp + + + + + + + + + + + + + + + + + + fakessh + + + + + + + + + + + + + + + + + + rsync + + + + + + + + + + + + + + + + + + rsync --server + + + + + + + + + + + + + + + + + + ssh.fileserver + + + + + + + + + + + + + + + + + + master + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/images/fakessh.png b/docs/images/fakessh.png new file mode 100644 index 00000000..d25b006b Binary files /dev/null and b/docs/images/fakessh.png differ diff --git a/docs/images/route.graphml b/docs/images/route.graphml new file mode 100644 index 00000000..4cf55d1f --- /dev/null +++ b/docs/images/route.graphml @@ -0,0 +1,637 @@ + + + + + + + + + + + + + + + + + + + + + + + + master + + + + + + + + + + + + + + + + + + bastion + + + + + + + + + + + + + + + + + + dc1 + + + + + + + + + + + + + + + + + + dc2 + + + + + + + + + + + + + + + + + + rack11 + + + + + + + + + + + + + + + + + + rack12 + + + + + + + + + + + + + + + + + + node11a + + + + + + + + + + + + + + + + + + node11b + + + + + + + + + + + + + + + + + + node12a + + + + + + + + + + + + + + + + + + node12b + + + + + + + + + + + + + + + + + + node21a + + + + + + + + + + + + + + + + + + node21b + + + + + + + + + + + + + + + + + + node22a + + + + + + + + + + + + + + + + + + node22b + + + + + + + + + + + + + + + + + + rack21 + + + + + + + + + + + + + + + + + + rack22 + + + + + + + + + + + + + + + + + + sudo:node12b:webapp + + + + + + + + + + + + + + + + + + sudo:node22a:webapp + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/images/route.png b/docs/images/route.png new file mode 100644 index 00000000..d535f847 Binary files /dev/null and b/docs/images/route.png differ diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 00000000..a1b3c6fd --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,357 @@ + +Mitogen +======= + +Mitogen is a Python library for writing distributed self-replicating programs. + +.. raw:: html + + + +.. warning:: + + This is alpha-quality code. If you intend to use it, be aware of how little + real world testing it has received, the total absence of any systematic + tests, and the nightmare-level difficulty of debugging hangs in a tree of + processes running identical code straddling multiple thread and machine + boundaries! ``router.enable_debug()`` is your friend. + + If you think you have a use for this software, please `drop me an e-mail`_ + so that expectations and bug fixes can be managed sensibly. + + .. _drop me an e-mail: dw@botanicus.net + +.. image:: images/cell_division.png + :align: right + +There is no requirement for installing packages, copying files around, writing +shell snippets, upfront configuration, or providing any secondary link to a +remote machine aside from an SSH connection. Due to its origins for use in +managing potentially damaged infrastructure, the **remote machine need not even +have free disk space or a writeable filesystem**. + +It is not intended as a generic RPC framework; the goal is to provide a robust +and efficient low-level API on which tools like `Salt`_, `Ansible`_, or +`Fabric`_ can be built, and while the API is quite friendly and comparable to +`Fabric`_, ultimately it is not intended for direct use by consumer software. + +.. _Salt: https://docs.saltstack.com/en/latest/ +.. _Ansible: http://docs.ansible.com/ +.. _Fabric: http://docs.fabfile.org/en/ + +The focus is to centralize and perfect the intricate dance required to run +Python code safely and efficiently on a remote machine, while **avoiding +temporary files or large chunks of error-prone shell scripts**, and supporting +common privilege escalation techniques like `sudo`, potentially in combination +with exotic connection methods such as WMI, `telnet`, or console-over-IPMI. + + +Automatic Bootstrap +################### + +Mitogen's main feature is enabling your Python program to bootstrap and +communicate with new copies of itself under its control running on remote +machines, **using only an existing installed Python interpreter and SSH +client**, something that by default can be found on almost all contemporary +machines in the wild. To accomplish bootstrap, Mitogen uses a single 600 byte +SSH command line and 6KB of its own source code sent to stdin of the remote SSH +connection. + +.. code:: + + $ python preamble_size.py + SSH command size: 576 + Preamble size: 6360 (6.21KiB) + mitogen.master size: 4104 (4.01KiB) + mitogen.ssh size: 295 (0.29KiB) + mtiogen.sudo size: 1210 (1.18KiB) + +Once bootstrapped, the remote process is configured with a customizable +**argv[0]**, readily visible to system administrators of the remote machine +using the UNIX **ps** command: + +.. code:: + + 20051 ? Ss 0:00 \_ sshd: dmw [priv] + 20053 ? S 0:00 | \_ sshd: dmw@notty + 20054 ? Ssl 0:00 | \_ mitogen:dmw@Eldil.home:22476 + 20103 ? S 0:00 | \_ tar zxvf myapp.tar.gz + +The example context was started by UID ``dmw`` on host ``Eldil.home``, process +ID ``22476``. + + +IO Multiplexer +############## + +The bootstrap includes a compact IO multiplexer (like Twisted or asyncio) that +allows it to perform work in the background while executing your program's +code. For example, the remote context can be used to **connect to a new user on +the remote machine using sudo**, or as an intermediary for extending the +program's domain of control outward to other machines, enabling your program to +**manipulate machines behind a firewall**, or enable its **data plane to cohere +to your network topology**. + +.. image:: images/billing.png + :align: right + +.. code:: + + bastion_host = router.ssh( + hostname='jump-box.mycorp.com' + ) + + ssh_account = router.sudo( + via=bastion_host, + username='user_with_magic_ssh_key', + password='sudo password', + ) + + internal_box = router.ssh( + via=ssh_account, + hostname='billing0.internal.mycorp.com' + ) + + internal_box.call(os.system, './run-nightly-billing.py') + +The multiplexer also ensures the remote process is terminated if your Python +program crashes, communication is lost, or the application code running in the +context has hung. + + +Module Forwarder +################ + +In addition to an IO multiplexer, slaves are configured with a custom `PEP-302 +importer`_ that forwards requests for unknown Python modules back to the host +program. When your program asks a context to execute code from an unknown +module, all requisite modules are transferred automatically and imported +entirely in RAM without need for further configuration. + +.. _PEP-302 importer: https://www.python.org/dev/peps/pep-0302/ + +.. code-block:: python + + import myapp.mypkg.mymodule + + # myapp/__init__.py, myapp/mypkg/__init__.py, and myapp/mypkg/mymodule.py + # are transferred automatically. + print context.call(myapp.mymodule.my_function) + +As the forwarder reuses the import mechanism, it should integrate cleanly with +any tool such as `py2exe`_ that correctly implement the protocols in PEP-302, +allowing truly single file applications to run across multiple machines without +further effort. + +.. _py2exe: http://www.py2exe.org/ + + +SSH Client Emulation +#################### + +.. image:: images/fakessh.png + :align: right + +Support is included for starting subprocesses with a modified environment, that +cause their attempt to use SSH to be redirected back into the host program. In +this way tools like `rsync`, `sftp`, and `scp` can efficiently reuse the host +program's existing connection to the remote machine, including any +firewall/user account hopping in use, with no additional configuration. + +Scenarios that were not previously possible with these tools are enabled, such +as running `sftp` and `rsync` over a `sudo` session, to an account the user +cannot otherwise directly log into, including in restrictive environments that +for example enforce an interactive TTY and account password. + +.. code-block:: python + + bastion = router.ssh(hostname='bastion.mycorp.com') + webserver = router.ssh(via=bastion, hostname='webserver') + webapp = router.sudo(via=webserver, username='webapp') + fileserver = router.ssh(via=bastion, hostname='fileserver') + + # Transparently tunnelled over fileserver -> .. -> sudo.webapp link + fileserver.call(mitogen.fakessh.run, webapp, [ + 'rsync', 'appdata', 'appserver:appdata' + ]) + + +Inter-slave Message Routing +########################### + +.. image:: images/route.png + +Slaves may communicate autonomously without direct interaction with the master, +allowing a wide variety of complex data and control flows to be expressed using +the links between the processes. + + +Logging Forwarder +################# + +The bootstrap configures the remote process's Python logging package to forward +all logs back to the local process, enabling management of program logs in one +location. + +.. code:: + + 18:15:29 D mitogen.ctx.k3: mitogen: Importer.find_module('mitogen.zlib') + 18:15:29 D mitogen.ctx.k3: mitogen: _dispatch_calls((1002L, False, 'posix', None, 'system', ('ls -l /proc/self/fd',), {})) + + +Stdio Forwarder +############### + +To ease porting of crusty old infrastructure scripts to Python, the bootstrap +redirects stdio for itself and any child processes back into the logging +framework. This allows use of functions as basic as **os.system('hostname; +uptime')** without further need to capture or manage output. + +.. code:: + + 18:17:28 D mitogen.ctx.k3: mitogen: _dispatch_calls((1002L, False, 'posix', None, 'system', ('hostname; uptime',), {})) + 18:17:56 I mitogen.ctx.k3: stdout: k3 + 18:17:56 I mitogen.ctx.k3: stdout: 17:37:10 up 562 days, 2:25, 5 users, load average: 1.24, 1.13, 1.14 + + +Blocking Code Friendly +###################### + +Within each process, a private thread runs the I/O multiplexer, leaving the +main thread and any additional application threads free to perform useful work. + +While Mitogen is internally asynchronous, it hides this asynchrony from +consumer code. This is since writing asynchronous code is mostly a foreign +concept to the target application of managing infrastructure. It should be +possible to rewrite a shell script in Python without significant restructuring, +or mind-bending feats of comprehension to understand control flow. + +Before: + +.. code-block:: sh + + #!/bin/bash + # Install our application. + + tar zxvf app.tar.gz + +After: + +.. code-block:: python + + def install_app(): + """ + Install our application. + """ + os.system('tar zxvf app.tar.gz') + + context.call(install_app) + +Or even: + +.. code-block:: python + + context.call(os.system, 'tar zxvf app.tar.gz') + +Exceptions raised by function calls are propagated back to the parent program, +and timeouts can be configured to ensure failed calls do not block progress of +the parent. + + +Support For Single File Programs +################################ + +Programs that are self-contained within a single Python script are supported. +External contexts are configured such that any attempt to execute a function +from the main Python script will correctly cause that script to be imported as +usual into the slave process. + +.. code-block:: python + + #!/usr/bin/env python + """ + Install our application on a remote machine. + + Usage: + install_app.py + + Where: + Hostname to install to. + """ + import os + import sys + + import mitogen + + + def install_app(): + os.system('tar zxvf my_app.tar.gz') + + + def main(broker): + if len(sys.argv) != 2: + print __doc__ + sys.exit(1) + + context = mitogen.ssh.connect(broker, sys.argv[1]) + context.call(install_app) + + if __name__ == '__main__' and mitogen.master: + import mitogen.utils + mitogen.utils.run_with_broker(main) + + +Event-driven IO +############### + +Code running in a remote context can be connected to a *Channel*. Channels are +used to send data asynchronously back to the parent, without further need for +the parent to poll for changes. This is useful for monitoring systems managing +a large fleet of machines, or to alert the parent of unexpected state changes. + +.. code-block:: python + + def tail_log_file(channel, path='/var/log/messages'): + """ + Forward new lines in a log file to the parent. + """ + size = os.path.getsize(path) + + while channel.open(): + new_size = os.path.getsize(path) + if new_size == size: + time.sleep(1) + continue + elif new_size < size: + size = 0 + + fp = file(path, 'r') + fp.seek(size) + channel.send(fp.read(new_size - size)) + fp.close() + size = new_size + + +Compatibility +############# + +The package is written using syntax compatible all the way back to **Python +2.4** released November 2004, making it suitable for managing a fleet of +potentially ancient corporate hardware. For example Mitogen can be used out of +the box against Red Hat Enterprise Linux 5, released in 2007. + +There is currently no support for Python 3, and no solid plan for supporting it +any time soon. Due to constraints on implementation size and desire for +compatibility with ancient Python versions, conventional porting methods such +as ``six.py`` are likely to be unsuitable. + + +Zero Dependencies +################# + +Mitogen is implemented entirely using the standard library functionality and +interfaces that were available in Python 2.4. diff --git a/docs/internals.rst b/docs/internals.rst new file mode 100644 index 00000000..aec3f775 --- /dev/null +++ b/docs/internals.rst @@ -0,0 +1,61 @@ + +Internal API Reference +********************** + + +mitogen.core +============ + + +Side Class +---------- + +.. autoclass:: mitogen.core.Side + :members: + + +Stream Classes +-------------- + +.. autoclass:: mitogen.core.BasicStream + :members: + +.. autoclass:: mitogen.core.Stream + :members: + +.. autoclass:: mitogen.master.Stream + :members: + +.. autoclass:: mitogen.ssh.Stream + :members: + + +Other Stream Subclasses +----------------------- + +.. autoclass:: mitogen.core.IoLogger + :members: + +.. autoclass:: mitogen.core.Waker + :members: + + + +ExternalContext Class +--------------------- + +.. autoclass:: mitogen.core.ExternalContext + + +mitogen.master +=============== + +.. autoclass:: mitogen.master.ProcessMonitor + + +Helper Functions +---------------- + +.. autofunction:: mitogen.master.create_child +.. autofunction:: mitogen.master.get_child_modules +.. autofunction:: mitogen.master.minimize_source diff --git a/docs/toc.rst b/docs/toc.rst new file mode 100644 index 00000000..8b173669 --- /dev/null +++ b/docs/toc.rst @@ -0,0 +1,13 @@ + +Table Of Contents +================= + +.. toctree:: + :maxdepth: 2 + + index + howitworks + examples + getting_started + api + internals diff --git a/examples/ansible_demo.py b/examples/ansible_demo.py new file mode 100644 index 00000000..c1406703 --- /dev/null +++ b/examples/ansible_demo.py @@ -0,0 +1,110 @@ +""" +Minimal demo of running an Ansible module via mitogen. +""" + +import json +import logging +import time + +import mitogen +import mitogen.master +import mitogen.utils + +# Prevent accident import of an Ansible module from hanging on stdin read. +import ansible.module_utils.basic +ansible.module_utils.basic._ANSIBLE_ARGS = '{}' + + +class Exit(Exception): + """ + Raised when a module exits with success. + """ + def __init__(self, dct): + self.dct = dct + + +class ModuleError(Exception): + """ + Raised when a module voluntarily indicates failure via .fail_json(). + """ + def __init__(self, msg, dct): + Exception.__init__(self, msg) + self.dct = dct + + +def wtf_exit_json(self, **kwargs): + """ + Replace AnsibleModule.exit_json() with something that doesn't try to + suicide the process or JSON-encode the dictionary. Instead, cause Exit to + be raised, with a `dct` attribute containing the successful result + dictionary. + """ + self.add_path_info(kwargs) + kwargs.setdefault('changed', False) + kwargs.setdefault('invocation', { + 'module_args': self.params + }) + kwargs = ansible.module_utils.basic.remove_values(kwargs, self.no_log_values) + self.do_cleanup_files() + raise Exit(kwargs) + + +def wtf_fail_json(self, **kwargs): + """ + Replace AnsibleModule.fail_json() with something that raises ModuleError, + which includes a `dct` attribute. + """ + self.add_path_info(kwargs) + kwargs.setdefault('failed', True) + kwargs.setdefault('invocation', { + 'module_args': self.params + }) + kwargs = ansible.module_utils.basic.remove_values(kwargs, self.no_log_values) + self.do_cleanup_files() + raise ModuleError(kwargs.get('msg'), kwargs) + + +def run_module(module, raw_params=None, args=None): + """ + Set up the process environment in preparation for running an Ansible + module. The monkey-patches the Ansible libraries in various places to + prevent it from trying to kill the process on completion, and to prevent it + from reading sys.stdin. + """ + if args is None: + args = {} + if raw_params is not None: + args['_raw_params'] = raw_params + + ansible.module_utils.basic.AnsibleModule.exit_json = wtf_exit_json + ansible.module_utils.basic.AnsibleModule.fail_json = wtf_fail_json + ansible.module_utils.basic._ANSIBLE_ARGS = json.dumps({ + 'ANSIBLE_MODULE_ARGS': args + }) + + try: + mod = __import__(module, {}, {}, ['']) + # Ansible modules begin execution on import, because they're crap from + # hell. Thus the above __import__ will cause either Exit or + # ModuleError to be raised. If we reach the line below, the module did + # not execute and must already have been imported for a previous + # invocation, so we need to invoke main explicitly. + mod.main() + except Exit, e: + return e.dct + + +def main(router): + fmt = '%(asctime)s %(levelname).1s %(name)s: %(message)s' + datefmt = '%H:%M:%S' + level = logging.DEBUG + level = logging.INFO + logging.basicConfig(level=level, format=fmt, datefmt=datefmt) + + context = mitogen.master.connect(broker) + print context.call(run_module, 'ansible.modules.core.system.setup') + for x in xrange(10): + print context.call(run_module, 'ansible.modules.core.commands.command', 'hostname') + +if __name__ == '__main__' and not mitogen.slave: + mitogen.utils.run_with_router(main) diff --git a/mitogen/__init__.py b/mitogen/__init__.py new file mode 100644 index 00000000..5324cd90 --- /dev/null +++ b/mitogen/__init__.py @@ -0,0 +1,33 @@ +""" +On the Mitogen master, this is imported from ``mitogen/__init__.py`` as would +be expected. On the slave, it is built dynamically during startup. +""" + +#: This is ``False`` in slave contexts. It is used in single-file Python +#: programs to avoid reexecuting the program's :py:func:`main` function in the +#: slave. For example: +#: +#: .. code-block:: python +#: +#: def do_work(): +#: os.system('hostname') +#: +#: def main(broker): +#: context = mitogen.master.connect(broker) +#: context.call(do_work) # Causes slave to import __main__. +#: +#: if __name__ == '__main__' and mitogen.master: +#: import mitogen.utils +#: mitogen.utils.run_with_broker(main) +#: +master = True + + +#: This is ``0`` in a master, otherwise it is a master-generated ID unique to +#: the slave context used for message routing. +context_id = 0 + + +#: This is ``None`` in a master, otherwise it is the master-generated ID unique +#: to the slave's parent context. +parent_id = None diff --git a/mitogen/ansible/__init__.py b/mitogen/ansible/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/mitogen/ansible/connection.py b/mitogen/ansible/connection.py new file mode 100644 index 00000000..4070707f --- /dev/null +++ b/mitogen/ansible/connection.py @@ -0,0 +1,64 @@ +""" +Basic Ansible connection plug-in mostly useful for testing functionality, +due to Ansible's use of the multiprocessing package a lot more work is required +to share the mitogen SSH connection across tasks. + +Enable it by: + + $ cat ansible.cfg + [defaults] + connection_plugins = plugins/connection + + $ mkdir -p plugins/connection + $ cat > plugins/connection/mitogen_conn.py <<-EOF + from mitogen.ansible.connection import Connection + EOF +""" + +import mitogen.master +import mitogen.ssh +import mitogen.utils +from mitogen.ansible import helpers + +import ansible.plugins.connection + + +class Connection(ansible.plugins.connection.ConnectionBase): + broker = None + context = None + + become_methods = [] + transport = 'mitogen' + + @property + def connected(self): + return self.broker is not None + + def _connect(self): + if self.connected: + return + self.broker = mitogen.master.Broker() + if self._play_context.remote_addr == 'localhost': + self.context = mitogen.master.connect(self.broker) + else: + self.context = mitogen.ssh.connect(broker, + self._play_context.remote_addr) + + def exec_command(self, cmd, in_data=None, sudoable=True): + super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable) + if in_data: + raise AnsibleError("does not support module pipelining") + + return self.context.call(helpers.exec_command, cmd, in_data) + + def fetch_file(self, in_path, out_path): + output = self.context.call(helpers.read_path, in_path) + helpers.write_path(out_path, output) + + def put_file(self, in_path, out_path): + self.context.call(helpers.write_path, out_path, + helpers.read_path(in_path)) + + def close(self): + self.broker.shutdown() + self.broker.join() diff --git a/mitogen/ansible/helpers.py b/mitogen/ansible/helpers.py new file mode 100644 index 00000000..4b70cde2 --- /dev/null +++ b/mitogen/ansible/helpers.py @@ -0,0 +1,28 @@ +""" +Ansible is so poorly layered that attempting to import anything under +ansible.plugins automatically triggers import of __main__, which causes +remote execution of the ansible command-line tool. :( + +So here we define helpers in some sanely layered package where the entirety of +Ansible won't be imported. +""" + +import subprocess + + +def exec_command(cmd, in_data=None): + proc = subprocess.Popen(cmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + stdin=subprocess.PIPE, + shell=True) + stdout, stderr = proc.communicate(in_data) + return proc.returncode, stdout, stderr + + +def read_path(path): + return file(path, 'rb').read() + + +def write_path(path, s): + open(path, 'wb').write(s) diff --git a/mitogen/compat/__init__.py b/mitogen/compat/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/mitogen/compat/pkgutil.py b/mitogen/compat/pkgutil.py new file mode 100644 index 00000000..ce072ec9 --- /dev/null +++ b/mitogen/compat/pkgutil.py @@ -0,0 +1,591 @@ +"""Utilities to support packages.""" + +# NOTE: This module must remain compatible with Python 2.3, as it is shared +# by setuptools for distribution with Python 2.3 and up. + +import os +import sys +import imp +import os.path +from types import ModuleType + +__all__ = [ + 'get_importer', 'iter_importers', 'get_loader', 'find_loader', + 'walk_packages', 'iter_modules', 'get_data', + 'ImpImporter', 'ImpLoader', 'read_code', 'extend_path', +] + +def read_code(stream): + # This helper is needed in order for the PEP 302 emulation to + # correctly handle compiled files + import marshal + + magic = stream.read(4) + if magic != imp.get_magic(): + return None + + stream.read(4) # Skip timestamp + return marshal.load(stream) + + +def simplegeneric(func): + """Make a trivial single-dispatch generic function""" + registry = {} + def wrapper(*args, **kw): + ob = args[0] + try: + cls = ob.__class__ + except AttributeError: + cls = type(ob) + try: + mro = cls.__mro__ + except AttributeError: + try: + class cls(cls, object): + pass + mro = cls.__mro__[1:] + except TypeError: + mro = object, # must be an ExtensionClass or some such :( + for t in mro: + if t in registry: + return registry[t](*args, **kw) + else: + return func(*args, **kw) + try: + wrapper.__name__ = func.__name__ + except (TypeError, AttributeError): + pass # Python 2.3 doesn't allow functions to be renamed + + def register(typ, func=None): + if func is None: + return lambda f: register(typ, f) + registry[typ] = func + return func + + wrapper.__dict__ = func.__dict__ + wrapper.__doc__ = func.__doc__ + wrapper.register = register + return wrapper + + +def walk_packages(path=None, prefix='', onerror=None): + """Yields (module_loader, name, ispkg) for all modules recursively + on path, or, if path is None, all accessible modules. + + 'path' should be either None or a list of paths to look for + modules in. + + 'prefix' is a string to output on the front of every module name + on output. + + Note that this function must import all *packages* (NOT all + modules!) on the given path, in order to access the __path__ + attribute to find submodules. + + 'onerror' is a function which gets called with one argument (the + name of the package which was being imported) if any exception + occurs while trying to import a package. If no onerror function is + supplied, ImportErrors are caught and ignored, while all other + exceptions are propagated, terminating the search. + + Examples: + + # list all modules python can access + walk_packages() + + # list all submodules of ctypes + walk_packages(ctypes.__path__, ctypes.__name__+'.') + """ + + def seen(p, m={}): + if p in m: + return True + m[p] = True + + for importer, name, ispkg in iter_modules(path, prefix): + yield importer, name, ispkg + + if ispkg: + try: + __import__(name) + except ImportError: + if onerror is not None: + onerror(name) + except Exception: + if onerror is not None: + onerror(name) + else: + raise + else: + path = getattr(sys.modules[name], '__path__', None) or [] + + # don't traverse path items we've seen before + path = [p for p in path if not seen(p)] + + for item in walk_packages(path, name+'.', onerror): + yield item + + +def iter_modules(path=None, prefix=''): + """Yields (module_loader, name, ispkg) for all submodules on path, + or, if path is None, all top-level modules on sys.path. + + 'path' should be either None or a list of paths to look for + modules in. + + 'prefix' is a string to output on the front of every module name + on output. + """ + + if path is None: + importers = iter_importers() + else: + importers = map(get_importer, path) + + yielded = {} + for i in importers: + for name, ispkg in iter_importer_modules(i, prefix): + if name not in yielded: + yielded[name] = 1 + yield i, name, ispkg + + +#@simplegeneric +def iter_importer_modules(importer, prefix=''): + if not hasattr(importer, 'iter_modules'): + return [] + return importer.iter_modules(prefix) + +iter_importer_modules = simplegeneric(iter_importer_modules) + + +class ImpImporter: + """PEP 302 Importer that wraps Python's "classic" import algorithm + + ImpImporter(dirname) produces a PEP 302 importer that searches that + directory. ImpImporter(None) produces a PEP 302 importer that searches + the current sys.path, plus any modules that are frozen or built-in. + + Note that ImpImporter does not currently support being used by placement + on sys.meta_path. + """ + + def __init__(self, path=None): + self.path = path + + def find_module(self, fullname, path=None): + # Note: we ignore 'path' argument since it is only used via meta_path + subname = fullname.split(".")[-1] + if subname != fullname and self.path is None: + return None + if self.path is None: + path = None + else: + path = [os.path.realpath(self.path)] + try: + file, filename, etc = imp.find_module(subname, path) + except ImportError: + return None + return ImpLoader(fullname, file, filename, etc) + + def iter_modules(self, prefix=''): + if self.path is None or not os.path.isdir(self.path): + return + + yielded = {} + import inspect + try: + filenames = os.listdir(self.path) + except OSError: + # ignore unreadable directories like import does + filenames = [] + filenames.sort() # handle packages before same-named modules + + for fn in filenames: + modname = inspect.getmodulename(fn) + if modname=='__init__' or modname in yielded: + continue + + path = os.path.join(self.path, fn) + ispkg = False + + if not modname and os.path.isdir(path) and '.' not in fn: + modname = fn + try: + dircontents = os.listdir(path) + except OSError: + # ignore unreadable directories like import does + dircontents = [] + for fn in dircontents: + subname = inspect.getmodulename(fn) + if subname=='__init__': + ispkg = True + break + else: + continue # not a package + + if modname and '.' not in modname: + yielded[modname] = 1 + yield prefix + modname, ispkg + + +class ImpLoader: + """PEP 302 Loader that wraps Python's "classic" import algorithm + """ + code = source = None + + def __init__(self, fullname, file, filename, etc): + self.file = file + self.filename = filename + self.fullname = fullname + self.etc = etc + + def load_module(self, fullname): + self._reopen() + try: + mod = imp.load_module(fullname, self.file, self.filename, self.etc) + finally: + if self.file: + self.file.close() + # Note: we don't set __loader__ because we want the module to look + # normal; i.e. this is just a wrapper for standard import machinery + return mod + + def get_data(self, pathname): + return open(pathname, "rb").read() + + def _reopen(self): + if self.file and self.file.closed: + mod_type = self.etc[2] + if mod_type==imp.PY_SOURCE: + self.file = open(self.filename, 'rU') + elif mod_type in (imp.PY_COMPILED, imp.C_EXTENSION): + self.file = open(self.filename, 'rb') + + def _fix_name(self, fullname): + if fullname is None: + fullname = self.fullname + elif fullname != self.fullname: + raise ImportError("Loader for module %s cannot handle " + "module %s" % (self.fullname, fullname)) + return fullname + + def is_package(self, fullname): + fullname = self._fix_name(fullname) + return self.etc[2]==imp.PKG_DIRECTORY + + def get_code(self, fullname=None): + fullname = self._fix_name(fullname) + if self.code is None: + mod_type = self.etc[2] + if mod_type==imp.PY_SOURCE: + source = self.get_source(fullname) + self.code = compile(source, self.filename, 'exec') + elif mod_type==imp.PY_COMPILED: + self._reopen() + try: + self.code = read_code(self.file) + finally: + self.file.close() + elif mod_type==imp.PKG_DIRECTORY: + self.code = self._get_delegate().get_code() + return self.code + + def get_source(self, fullname=None): + fullname = self._fix_name(fullname) + if self.source is None: + mod_type = self.etc[2] + if mod_type==imp.PY_SOURCE: + self._reopen() + try: + self.source = self.file.read() + finally: + self.file.close() + elif mod_type==imp.PY_COMPILED: + if os.path.exists(self.filename[:-1]): + f = open(self.filename[:-1], 'rU') + self.source = f.read() + f.close() + elif mod_type==imp.PKG_DIRECTORY: + self.source = self._get_delegate().get_source() + return self.source + + + def _get_delegate(self): + return ImpImporter(self.filename).find_module('__init__') + + def get_filename(self, fullname=None): + fullname = self._fix_name(fullname) + mod_type = self.etc[2] + if self.etc[2]==imp.PKG_DIRECTORY: + return self._get_delegate().get_filename() + elif self.etc[2] in (imp.PY_SOURCE, imp.PY_COMPILED, imp.C_EXTENSION): + return self.filename + return None + + +try: + import zipimport + from zipimport import zipimporter + + def iter_zipimport_modules(importer, prefix=''): + dirlist = zipimport._zip_directory_cache[importer.archive].keys() + dirlist.sort() + _prefix = importer.prefix + plen = len(_prefix) + yielded = {} + import inspect + for fn in dirlist: + if not fn.startswith(_prefix): + continue + + fn = fn[plen:].split(os.sep) + + if len(fn)==2 and fn[1].startswith('__init__.py'): + if fn[0] not in yielded: + yielded[fn[0]] = 1 + yield fn[0], True + + if len(fn)!=1: + continue + + modname = inspect.getmodulename(fn[0]) + if modname=='__init__': + continue + + if modname and '.' not in modname and modname not in yielded: + yielded[modname] = 1 + yield prefix + modname, False + + iter_importer_modules.register(zipimporter, iter_zipimport_modules) + +except ImportError: + pass + + +def get_importer(path_item): + """Retrieve a PEP 302 importer for the given path item + + The returned importer is cached in sys.path_importer_cache + if it was newly created by a path hook. + + If there is no importer, a wrapper around the basic import + machinery is returned. This wrapper is never inserted into + the importer cache (None is inserted instead). + + The cache (or part of it) can be cleared manually if a + rescan of sys.path_hooks is necessary. + """ + try: + importer = sys.path_importer_cache[path_item] + except KeyError: + for path_hook in sys.path_hooks: + try: + importer = path_hook(path_item) + break + except ImportError: + pass + else: + importer = None + sys.path_importer_cache.setdefault(path_item, importer) + + if importer is None: + try: + importer = ImpImporter(path_item) + except ImportError: + importer = None + return importer + + +def iter_importers(fullname=""): + """Yield PEP 302 importers for the given module name + + If fullname contains a '.', the importers will be for the package + containing fullname, otherwise they will be importers for sys.meta_path, + sys.path, and Python's "classic" import machinery, in that order. If + the named module is in a package, that package is imported as a side + effect of invoking this function. + + Non PEP 302 mechanisms (e.g. the Windows registry) used by the + standard import machinery to find files in alternative locations + are partially supported, but are searched AFTER sys.path. Normally, + these locations are searched BEFORE sys.path, preventing sys.path + entries from shadowing them. + + For this to cause a visible difference in behaviour, there must + be a module or package name that is accessible via both sys.path + and one of the non PEP 302 file system mechanisms. In this case, + the emulation will find the former version, while the builtin + import mechanism will find the latter. + + Items of the following types can be affected by this discrepancy: + imp.C_EXTENSION, imp.PY_SOURCE, imp.PY_COMPILED, imp.PKG_DIRECTORY + """ + if fullname.startswith('.'): + raise ImportError("Relative module names not supported") + if '.' in fullname: + # Get the containing package's __path__ + pkg = '.'.join(fullname.split('.')[:-1]) + if pkg not in sys.modules: + __import__(pkg) + path = getattr(sys.modules[pkg], '__path__', None) or [] + else: + for importer in sys.meta_path: + yield importer + path = sys.path + for item in path: + yield get_importer(item) + if '.' not in fullname: + yield ImpImporter() + +def get_loader(module_or_name): + """Get a PEP 302 "loader" object for module_or_name + + If the module or package is accessible via the normal import + mechanism, a wrapper around the relevant part of that machinery + is returned. Returns None if the module cannot be found or imported. + If the named module is not already imported, its containing package + (if any) is imported, in order to establish the package __path__. + + This function uses iter_importers(), and is thus subject to the same + limitations regarding platform-specific special import locations such + as the Windows registry. + """ + if module_or_name in sys.modules: + module_or_name = sys.modules[module_or_name] + if isinstance(module_or_name, ModuleType): + module = module_or_name + loader = getattr(module, '__loader__', None) + if loader is not None: + return loader + fullname = module.__name__ + else: + fullname = module_or_name + return find_loader(fullname) + +def find_loader(fullname): + """Find a PEP 302 "loader" object for fullname + + If fullname contains dots, path must be the containing package's __path__. + Returns None if the module cannot be found or imported. This function uses + iter_importers(), and is thus subject to the same limitations regarding + platform-specific special import locations such as the Windows registry. + """ + for importer in iter_importers(fullname): + loader = importer.find_module(fullname) + if loader is not None: + return loader + + return None + + +def extend_path(path, name): + """Extend a package's path. + + Intended use is to place the following code in a package's __init__.py: + + from pkgutil import extend_path + __path__ = extend_path(__path__, __name__) + + This will add to the package's __path__ all subdirectories of + directories on sys.path named after the package. This is useful + if one wants to distribute different parts of a single logical + package as multiple directories. + + It also looks for *.pkg files beginning where * matches the name + argument. This feature is similar to *.pth files (see site.py), + except that it doesn't special-case lines starting with 'import'. + A *.pkg file is trusted at face value: apart from checking for + duplicates, all entries found in a *.pkg file are added to the + path, regardless of whether they are exist the filesystem. (This + is a feature.) + + If the input path is not a list (as is the case for frozen + packages) it is returned unchanged. The input path is not + modified; an extended copy is returned. Items are only appended + to the copy at the end. + + It is assumed that sys.path is a sequence. Items of sys.path that + are not (unicode or 8-bit) strings referring to existing + directories are ignored. Unicode items of sys.path that cause + errors when used as filenames may cause this function to raise an + exception (in line with os.path.isdir() behavior). + """ + + if not isinstance(path, list): + # This could happen e.g. when this is called from inside a + # frozen package. Return the path unchanged in that case. + return path + + pname = os.path.join(*name.split('.')) # Reconstitute as relative path + # Just in case os.extsep != '.' + sname = os.extsep.join(name.split('.')) + sname_pkg = sname + os.extsep + "pkg" + init_py = "__init__" + os.extsep + "py" + + path = path[:] # Start with a copy of the existing path + + for dir in sys.path: + if not isinstance(dir, basestring) or not os.path.isdir(dir): + continue + subdir = os.path.join(dir, pname) + # XXX This may still add duplicate entries to path on + # case-insensitive filesystems + initfile = os.path.join(subdir, init_py) + if subdir not in path and os.path.isfile(initfile): + path.append(subdir) + # XXX Is this the right thing for subpackages like zope.app? + # It looks for a file named "zope.app.pkg" + pkgfile = os.path.join(dir, sname_pkg) + if os.path.isfile(pkgfile): + try: + f = open(pkgfile) + except IOError, msg: + sys.stderr.write("Can't open %s: %s\n" % + (pkgfile, msg)) + else: + for line in f: + line = line.rstrip('\n') + if not line or line.startswith('#'): + continue + path.append(line) # Don't check for existence! + f.close() + + return path + +def get_data(package, resource): + """Get a resource from a package. + + This is a wrapper round the PEP 302 loader get_data API. The package + argument should be the name of a package, in standard module format + (foo.bar). The resource argument should be in the form of a relative + filename, using '/' as the path separator. The parent directory name '..' + is not allowed, and nor is a rooted name (starting with a '/'). + + The function returns a binary string, which is the contents of the + specified resource. + + For packages located in the filesystem, which have already been imported, + this is the rough equivalent of + + d = os.path.dirname(sys.modules[package].__file__) + data = open(os.path.join(d, resource), 'rb').read() + + If the package cannot be located or loaded, or it uses a PEP 302 loader + which does not support get_data(), then None is returned. + """ + + loader = get_loader(package) + if loader is None or not hasattr(loader, 'get_data'): + return None + mod = sys.modules.get(package) or loader.load_module(package) + if mod is None or not hasattr(mod, '__file__'): + return None + + # Modify the resource name to be compatible with the loader.get_data + # signature - an os.path format "filename" starting with the dirname of + # the package's __file__ + parts = resource.split('/') + parts.insert(0, os.path.dirname(mod.__file__)) + resource_name = os.path.join(*parts) + return loader.get_data(resource_name) diff --git a/mitogen/core.py b/mitogen/core.py new file mode 100644 index 00000000..9db9f773 --- /dev/null +++ b/mitogen/core.py @@ -0,0 +1,1162 @@ +""" +This module implements most package functionality, but remains separate from +non-essential code in order to reduce its size, since it is also serves as the +bootstrap implementation sent to every new slave context. +""" + +import Queue +import cPickle +import cStringIO +import errno +import fcntl +import imp +import itertools +import logging +import os +import random +import select +import socket +import struct +import sys +import threading +import time +import traceback +import zlib + + +LOG = logging.getLogger('mitogen') +IOLOG = logging.getLogger('mitogen.io') +IOLOG.setLevel(logging.INFO) + +GET_MODULE = 100 +CALL_FUNCTION = 101 +FORWARD_LOG = 102 +ADD_ROUTE = 103 + +CHUNK_SIZE = 16384 + + +if __name__ == 'mitogen.core': + # When loaded using import mechanism, ExternalContext.main() will not have + # a chance to set the synthetic mitogen global, so just import it here. + import mitogen +else: + # When loaded as __main__, ensure classes and functions gain a __module__ + # attribute consistent with the host process, so that pickling succeeds. + __name__ = 'mitogen.core' + + +class Error(Exception): + """Base for all exceptions raised by this module.""" + def __init__(self, fmt, *args): + if args: + fmt %= args + Exception.__init__(self, fmt % args) + + +class CallError(Error): + """Raised when :py:meth:`Context.call() ` + fails. A copy of the traceback from the external context is appended to the + exception message. + """ + def __init__(self, e): + s = '' + if not isinstance(e, basestring): + s += '%s.%s: ' % (type(e).__module__, type(e).__name__) + s += str(e) + tb = sys.exc_info()[2] + if tb: + s += '\n' + s += ''.join(traceback.format_tb(tb)) + Error.__init__(self, s) + + +class ChannelError(Error): + """Raised when a channel dies or has been closed.""" + + +class StreamError(Error): + """Raised when a stream cannot be established.""" + + +class TimeoutError(StreamError): + """Raised when a timeout occurs on a stream.""" + + +class Dead(object): + def __eq__(self, other): + return type(other) is Dead + + def __repr__(self): + return '' + + +#: Sentinel value used to represent :py:class:`Channel` disconnection. +_DEAD = Dead() + + +def listen(obj, name, func): + signals = vars(obj).setdefault('_signals', {}) + signals.setdefault(name, []).append(func) + + +def fire(obj, name, *args, **kwargs): + signals = vars(obj).get('_signals', {}) + return [func(*args, **kwargs) for func in signals.get(name, ())] + + +def set_cloexec(fd): + flags = fcntl.fcntl(fd, fcntl.F_GETFD) + fcntl.fcntl(fd, fcntl.F_SETFD, flags | fcntl.FD_CLOEXEC) + + +def io_op(func, *args): + """ + When connected over a TTY (i.e. sudo), disconnection of the remote end is + signalled by EIO, rather than an empty read like sockets or pipes. Ideally + this will be replaced later by a 'goodbye' message to avoid reading from a + disconnected endpoint, allowing for more robust error reporting. + + When connected over a socket (e.g. mitogen.master.create_child()), + ECONNRESET may be triggered by any read or write. + """ + try: + return func(*args), False + except OSError, e: + IOLOG.debug('io_op(%r) -> OSError: %s', func, e) + if e.errno not in (errno.EIO, errno.ECONNRESET): + raise + return None, True + + +def enable_debug_logging(): + root = logging.getLogger() + root.setLevel(logging.DEBUG) + IOLOG.setLevel(logging.DEBUG) + fp = open('/tmp/mitogen.%s.log' % (os.getpid(),), 'w', 1) + set_cloexec(fp.fileno()) + handler = logging.StreamHandler(fp) + handler.formatter = logging.Formatter( + '%(asctime)s %(levelname).1s %(name)s: %(message)s', + '%H:%M:%S' + ) + root.handlers.insert(0, handler) + + +class Message(object): + dst_id = None + src_id = None + handle = None + reply_to = None + data = None + + def __init__(self, **kwargs): + self.src_id = mitogen.context_id + vars(self).update(kwargs) + + _find_global = None + + @classmethod + def pickled(cls, obj, **kwargs): + self = cls(**kwargs) + try: + self.data = cPickle.dumps(obj, protocol=2) + except cPickle.PicklingError, e: + self.data = cPickle.dumps(CallError(e), protocol=2) + return self + + def unpickle(self): + """Deserialize `data` into an object.""" + IOLOG.debug('%r.unpickle()', self) + fp = cStringIO.StringIO(self.data) + unpickler = cPickle.Unpickler(fp) + if self._find_global: + unpickler.find_global = self._find_global + try: + return unpickler.load() + except (TypeError, ValueError), ex: + raise StreamError('invalid message: %s', ex) + + def __repr__(self): + return 'Message(%r, %r, %r, %r, %r..)' % ( + self.dst_id, self.src_id, self.handle, self.reply_to, + (self.data or '')[:50] + ) + + +class Sender(object): + def __init__(self, context, dst_handle): + self.context = context + self.dst_handle = dst_handle + + def __repr__(self): + return 'Sender(%r, %r)' % (self.context, self.dst_handle) + + def close(self): + """Indicate this channel is closed to the remote side.""" + IOLOG.debug('%r.close()', self) + self.context.send( + Message.pickled( + _DEAD, + handle=self.dst_handle + ) + ) + + def put(self, data): + """Send `data` to the remote.""" + IOLOG.debug('%r.send(%r)', self, data) + self.context.send( + Message.pickled( + data, + handle=self.dst_handle + ) + ) + + +class Receiver(object): + def __init__(self, router, handle=None): + self.router = router + self.handle = handle # Avoid __repr__ crash in add_handler() + self.handle = router.add_handler(self._on_receive, handle) + self._queue = Queue.Queue() + + def __repr__(self): + return 'Receiver(%r, %r)' % (self.router, self.handle) + + def _on_receive(self, msg): + """Callback from the Stream; appends data to the internal queue.""" + IOLOG.debug('%r._on_receive(%r)', self, msg) + self._queue.put(msg) + + def close(self): + self._queue.put(_DEAD) + + def get(self, timeout=None): + """Receive an object, or ``None`` if `timeout` is reached.""" + IOLOG.debug('%r.on_receive(timeout=%r)', self, timeout) + if timeout: + timeout += time.time() + + msg = None + while msg is None and (timeout is None or timeout < time.time()): + try: + msg = self._queue.get(True, 0.5) + except Queue.Empty: + continue + + if msg is None: + return + + IOLOG.debug('%r.on_receive() got %r', self, msg) + if msg == _DEAD: + raise ChannelError('Channel closed by local end.') + + # Must occur off the broker thread. + data = msg.unpickle() + if data == _DEAD: + raise ChannelError('Channel closed by remote end.') + + if isinstance(data, CallError): + raise data + + return msg, data + + def __iter__(self): + """Yield objects from this channel until it is closed.""" + while True: + try: + yield self.get() + except ChannelError: + return + + +class Channel(Sender, Receiver): + def __init__(self, router, context, dst_id, handle=None): + Sender.__init_(self, context, dst_id) + Receiver.__init__(self, router, handle) + + def __repr__(self): + return 'Channel(%s, %s)' % ( + Sender.__repr__(self), + Receiver.__repr__(self) + ) + + +class Importer(object): + """ + Import protocol implementation that fetches modules from the parent + process. + + :param context: Context to communicate via. + """ + def __init__(self, context, core_src): + self._context = context + self._present = {'mitogen': [ + 'mitogen.ansible', + 'mitogen.compat', + 'mitogen.compat.pkgutil', + 'mitogen.fakessh', + 'mitogen.master', + 'mitogen.ssh', + 'mitogen.sudo', + 'mitogen.utils', + ]} + self.tls = threading.local() + self._cache = {} + if core_src: + self._cache['mitogen.core'] = ( + None, + 'mitogen/core.py', + zlib.compress(core_src), + ) + + def __repr__(self): + return 'Importer()' + + def find_module(self, fullname, path=None): + if hasattr(self.tls, 'running'): + return None + + self.tls.running = True + fullname = fullname.rstrip('.') + try: + pkgname, _, _ = fullname.rpartition('.') + LOG.debug('%r.find_module(%r)', self, fullname) + if fullname not in self._present.get(pkgname, (fullname,)): + LOG.debug('%r: master doesn\'t know %r', self, fullname) + return None + + pkg = sys.modules.get(pkgname) + if pkg and getattr(pkg, '__loader__', None) is not self: + LOG.debug('%r: %r is submodule of a package we did not load', + self, fullname) + return None + + try: + __import__(fullname, {}, {}, ['']) + LOG.debug('%r: %r is available locally', self, fullname) + except ImportError: + LOG.debug('find_module(%r) returning self', fullname) + return self + finally: + del self.tls.running + + def load_module(self, fullname): + LOG.debug('Importer.load_module(%r)', fullname) + try: + ret = self._cache[fullname] + except KeyError: + self._cache[fullname] = ret = ( + self._context.send_await( + Message(data=fullname, handle=GET_MODULE) + ).unpickle() + ) + + if ret is None: + raise ImportError('Master does not have %r' % (fullname,)) + + pkg_present = ret[0] + mod = sys.modules.setdefault(fullname, imp.new_module(fullname)) + mod.__file__ = self.get_filename(fullname) + mod.__loader__ = self + if pkg_present is not None: # it's a package. + mod.__path__ = [] + mod.__package__ = fullname + self._present[fullname] = pkg_present + else: + mod.__package__ = fullname.rpartition('.')[0] or None + code = compile(self.get_source(fullname), mod.__file__, 'exec') + exec code in vars(mod) + return mod + + def get_filename(self, fullname): + if fullname in self._cache: + return 'master:' + self._cache[fullname][1] + + def get_source(self, fullname): + if fullname in self._cache: + return zlib.decompress(self._cache[fullname][2]) + + +class LogHandler(logging.Handler): + def __init__(self, context): + logging.Handler.__init__(self) + self.context = context + self.local = threading.local() + + def emit(self, rec): + if rec.name == 'mitogen.io' or \ + getattr(self.local, 'in_emit', False): + return + + self.local.in_emit = True + try: + msg = self.format(rec) + encoded = '%s\x00%s\x00%s' % (rec.name, rec.levelno, msg) + self.context.send(Message(data=encoded, handle=FORWARD_LOG)) + finally: + self.local.in_emit = False + + +class Side(object): + """ + Represent a single side of a :py:class:`BasicStream`. This exists to allow + streams implemented using unidirectional (e.g. UNIX pipe) and bidirectional + (e.g. UNIX socket) file descriptors to operate identically. + """ + def __init__(self, stream, fd, keep_alive=True): + #: The :py:class:`Stream` for which this is a read or write side. + self.stream = stream + #: Integer file descriptor to perform IO on. + self.fd = fd + #: If ``True``, causes presence of this side in :py:class:`Broker`'s + #: active reader set to defer shutdown until the side is disconnected. + self.keep_alive = keep_alive + + def __repr__(self): + return '' % (self.stream, self.fd) + + def fileno(self): + """Return :py:attr:`fd` if it is not ``None``, otherwise raise + ``StreamError``. This method is implemented so that :py:class:`Side` + can be used directly by :py:func:`select.select`.""" + if self.fd is None: + raise StreamError('%r.fileno() called but no FD set', self) + return self.fd + + def close(self): + """Call :py:func:`os.close` on :py:attr:`fd` if it is not ``None``, + then set it to ``None``.""" + if self.fd is not None: + IOLOG.debug('%r.close()', self) + os.close(self.fd) + self.fd = None + + def read(self, n=CHUNK_SIZE): + s, disconnected = io_op(os.read, self.fd, n) + if disconnected: + return '' + return s + + def write(self, s): + written, disconnected = io_op(os.write, self.fd, s[:CHUNK_SIZE]) + if disconnected: + return None + return written + + +class BasicStream(object): + """ + + .. method:: on_disconnect (broker) + + Called by :py:class:`Broker` to force disconnect the stream. The base + implementation simply closes :py:attr:`receive_side` and + :py:attr:`transmit_side` and unregisters the stream from the broker. + + .. method:: on_receive (broker) + + Called by :py:class:`Broker` when the stream's :py:attr:`receive_side` has + been marked readable using :py:meth:`Broker.start_receive` and the + broker has detected the associated file descriptor is ready for + reading. + + Subclasses must implement this method if + :py:meth:`Broker.start_receive` is ever called on them, and the method + must call :py:meth:`on_disconect` if reading produces an empty string. + + .. method:: on_transmit (broker) + + Called by :py:class:`Broker` when the stream's :py:attr:`transmit_side` + has been marked writeable using :py:meth:`Broker.start_transmit` and + the broker has detected the associated file descriptor is ready for + writing. + + Subclasses must implement this method if + :py:meth:`Broker.start_transmit` is ever called on them. + + .. method:: on_shutdown (broker) + + Called by :py:meth:`Broker.shutdown` to allow the stream time to + gracefully shutdown. The base implementation simply called + :py:meth:`on_disconnect`. + + """ + #: A :py:class:`Side` representing the stream's receive file descriptor. + receive_side = None + + #: A :py:class:`Side` representing the stream's transmit file descriptor. + transmit_side = None + + def on_disconnect(self, broker): + LOG.debug('%r.on_disconnect()', self) + broker.stop_receive(self) + broker.stop_transmit(self) + self.receive_side.close() + self.transmit_side.close() + fire(self, 'disconnect') + + def on_shutdown(self, broker): + LOG.debug('%r.on_shutdown()', self) + fire(self, 'shutdown') + self.on_disconnect(broker) + + +class Stream(BasicStream): + """ + :py:class:`BasicStream` subclass implementing mitogen's :ref:`stream + protocol `. + """ + _input_buf = '' + _output_buf = '' + message_class = Message + + def __init__(self, router, remote_id, key, **kwargs): + self._router = router + self.remote_id = remote_id + self.key = key + self.name = 'default' + self.construct(**kwargs) + + def construct(self): + pass + + def on_receive(self, broker): + """Handle the next complete message on the stream. Raise + :py:class:`StreamError` on failure.""" + IOLOG.debug('%r.on_receive()', self) + + buf = self.receive_side.read() + if buf is None: + buf = '' + + self._input_buf += buf + while self._receive_one(broker): + pass + + if not buf: + return self.on_disconnect(broker) + + HEADER_FMT = '>hhLLL' + HEADER_LEN = struct.calcsize(HEADER_FMT) + + def _receive_one(self, broker): + if len(self._input_buf) < self.HEADER_LEN: + return False + + msg = Message() + (msg.dst_id, msg.src_id, + msg.handle, msg.reply_to, msg_len) = struct.unpack( + self.HEADER_FMT, + self._input_buf[:self.HEADER_LEN] + ) + + if (len(self._input_buf) - self.HEADER_LEN) < msg_len: + IOLOG.debug('%r: Input too short (want %d, got %d)', + self, msg_len, len(self._input_buf) - self.HEADER_LEN) + return False + + msg.data = self._input_buf[self.HEADER_LEN:self.HEADER_LEN+msg_len] + self._input_buf = self._input_buf[self.HEADER_LEN+msg_len:] + self._router._async_route(msg) + return True + + def on_transmit(self, broker): + """Transmit buffered messages.""" + IOLOG.debug('%r.on_transmit()', self) + written = self.transmit_side.write(self._output_buf) + if written is None: + LOG.debug('%r.on_transmit(): disconnection detected', self) + self.on_disconnect() + return + + IOLOG.debug('%r.on_transmit() -> len %d', self, written) + self._output_buf = self._output_buf[written:] + if not self._output_buf: + broker.stop_transmit(self) + + def send(self, msg): + """Send `data` to `handle`, and tell the broker we have output. May + be called from any thread.""" + IOLOG.debug('%r._send(%r)', self, msg) + pkt = struct.pack('>hhLLL', msg.dst_id, msg.src_id, + msg.handle, msg.reply_to or 0, len(msg.data) + ) + msg.data + self._output_buf += pkt + self._router.broker.start_transmit(self) + + def on_disconnect(self, broker): + super(Stream, self).on_disconnect(broker) + self._router.on_disconnect(self, broker) + + def on_shutdown(self, broker): + """Override BasicStream behaviour of immediately disconnecting.""" + LOG.debug('%r.on_shutdown(%r)', self, broker) + + def accept(self, rfd, wfd): + self.receive_side = Side(self, os.dup(rfd)) + self.transmit_side = Side(self, os.dup(wfd)) + set_cloexec(self.receive_side.fd) + set_cloexec(self.transmit_side.fd) + + def __repr__(self): + cls = type(self) + return '%s.%s(%r)' % (cls.__module__, cls.__name__, self.name) + + +class Context(object): + """ + Represent a remote context regardless of connection method. + """ + remote_name = None + + def __init__(self, router, context_id, name=None, key=None): + self.router = router + self.context_id = context_id + self.name = name + self.key = key or ('%016x' % random.getrandbits(128)) + + def on_disconnect(self, broker): + LOG.debug('Parent stream is gone, dying.') + fire(self, 'disconnect') + broker.shutdown() + + def on_shutdown(self, broker): + pass + + def send(self, msg): + """send `obj` to `handle`, and tell the broker we have output. May + be called from any thread.""" + msg.dst_id = self.context_id + if msg.src_id is None: + msg.src_id = mitogen.context_id + self.router.route(msg) + + def send_await(self, msg, deadline=None): + """Send `msg` and wait for a response with an optional timeout.""" + if self.router.broker._thread == threading.currentThread(): # TODO + raise SystemError('Cannot making blocking call on broker thread') + + queue = Queue.Queue() + msg.reply_to = self.router.add_handler(queue.put, + persist=False, + respondent=self) + LOG.debug('%r.send_await(%r)', self, msg) + + self.send(msg) + try: + msg = queue.get(True, deadline) + except Queue.Empty: + # self.broker.defer(self.stream.on_disconnect, self.broker) + raise TimeoutError('deadline exceeded.') + + if msg == _DEAD: + raise StreamError('lost connection during call.') + + IOLOG.debug('%r._send_await() -> %r', self, msg) + return msg + + def __repr__(self): + return 'Context(%s, %r)' % (self.context_id, self.name) + + +class Waker(BasicStream): + """ + :py:class:`BasicStream` subclass implementing the + `UNIX self-pipe trick`_. Used internally to wake the IO multiplexer when + some of its state has been changed by another thread. + + .. _UNIX self-pipe trick: https://cr.yp.to/docs/selfpipe.html + """ + def __init__(self, broker): + self._broker = broker + rfd, wfd = os.pipe() + set_cloexec(rfd) + set_cloexec(wfd) + self.receive_side = Side(self, rfd) + self.transmit_side = Side(self, wfd) + + def __repr__(self): + return 'Waker(%r)' % (self._broker,) + + def wake(self): + """ + Write a byte to the self-pipe, causing the IO multiplexer to wake up. + Nothing is written if the current thread is the IO multiplexer thread. + """ + if threading.currentThread() != self._broker._thread and \ + self.transmit_side.fd: + os.write(self.transmit_side.fd, ' ') + + def on_receive(self, broker): + """ + Read a byte from the self-pipe. + """ + os.read(self.receive_side.fd, 256) + + +class IoLogger(BasicStream): + """ + :py:class:`BasicStream` subclass that sets up redirection of a standard + UNIX file descriptor back into the Python :py:mod:`logging` package. + """ + _buf = '' + + def __init__(self, broker, name, dest_fd): + self._broker = broker + self._name = name + self._log = logging.getLogger(name) + + self._rsock, self._wsock = socket.socketpair() + os.dup2(self._wsock.fileno(), dest_fd) + set_cloexec(self._rsock.fileno()) + set_cloexec(self._wsock.fileno()) + + self.receive_side = Side(self, self._rsock.fileno()) + self.transmit_side = Side(self, dest_fd) + self._broker.start_receive(self) + + def __repr__(self): + return '' % (self._name,) + + def _log_lines(self): + while self._buf.find('\n') != -1: + line, _, self._buf = self._buf.partition('\n') + self._log.info('%s', line.rstrip('\n')) + + def on_shutdown(self, broker): + """Shut down the write end of the logging socket.""" + LOG.debug('%r.on_shutdown()', self) + self._wsock.shutdown(socket.SHUT_WR) + self._wsock.close() + self.transmit_side.close() + + def on_receive(self, broker): + IOLOG.debug('%r.on_receive()', self) + buf = os.read(self.receive_side.fd, CHUNK_SIZE) + if not buf: + return self.on_disconnect(broker) + + self._buf += buf + self._log_lines() + + +class Router(object): + """ + Route messages between parent and child contexts, and invoke handlers + defined on our parent context. Router.route() straddles the Broker and user + threads, it is save to call from anywhere. + """ + def __init__(self, broker): + self.broker = broker + listen(broker, 'shutdown', self.on_broker_shutdown) + + #: context ID -> Stream + self._stream_by_id = {} + #: List of contexts to notify of shutdown. + self._context_by_id = {} + self._last_handle = itertools.count(1000) + #: handle -> (persistent?, func(msg)) + self._handle_map = { + ADD_ROUTE: (True, self._on_add_route) + } + + def __repr__(self): + return 'Router(%r)' % (self.broker,) + + def on_disconnect(self, stream, broker): + """Invoked by Stream.on_disconnect().""" + for context in self._context_by_id.itervalues(): + stream_ = self._stream_by_id.get(context.context_id) + if stream_ is stream: + del self._stream_by_id[context.context_id] + context.on_disconnect(broker) + + def on_broker_shutdown(self): + for context in self._context_by_id.itervalues(): + context.on_shutdown(self.broker) + + def add_route(self, target_id, via_id): + LOG.debug('%r.add_route(%r, %r)', self, target_id, via_id) + try: + self._stream_by_id[target_id] = self._stream_by_id[via_id] + except KeyError: + LOG.error('%r: cant add route to %r via %r: no such stream', + self, target_id, via_id) + + def _on_add_route(self, msg): + if msg != _DEAD: + target_id, via_id = map(int, msg.data.split('\x00')) + self.add_route(target_id, via_id) + + def register(self, context, stream): + LOG.debug('register(%r, %r)', context, stream) + self._stream_by_id[context.context_id] = stream + self._context_by_id[context.context_id] = context + self.broker.start_receive(stream) + + def add_handler(self, fn, handle=None, persist=True, respondent=None): + """Invoke `fn(msg)` for each Message sent to `handle` from this + context. Unregister after one invocation if `persist` is ``False``. If + `handle` is ``None``, a new handle is allocated and returned.""" + handle = handle or self._last_handle.next() + IOLOG.debug('%r.add_handler(%r, %r, %r)', self, fn, handle, persist) + self._handle_map[handle] = persist, fn + + if respondent: + def on_disconnect(): + if handle in self._handle_map: + fn(_DEAD) + del self._handle_map[handle] + listen(respondent, 'disconnect', on_disconnect) + + return handle + + def on_shutdown(self, broker): + """Called during :py:meth:`Broker.shutdown`, informs callbacks + registered with :py:meth:`add_handle_cb` the connection is dead.""" + LOG.debug('%r.on_shutdown(%r)', self, broker) + fire(self, 'shutdown') + for handle, (persist, fn) in self._handle_map.iteritems(): + LOG.debug('%r.on_shutdown(): killing %r: %r', self, handle, fn) + fn(_DEAD) + + def _invoke(self, msg): + #IOLOG.debug('%r._invoke(%r)', self, msg) + try: + persist, fn = self._handle_map[msg.handle] + except KeyError: + LOG.error('%r: invalid handle: %r', self, msg) + return + + if not persist: + del self._handle_map[msg.handle] + + try: + fn(msg) + except Exception: + LOG.exception('%r._invoke(%r): %r crashed', self, msg, fn) + + def _async_route(self, msg): + IOLOG.debug('%r._async_route(%r)', self, msg) + if msg.dst_id == mitogen.context_id: + return self._invoke(msg) + + stream = self._stream_by_id.get(msg.dst_id) + if stream is None: + stream = self._stream_by_id.get(mitogen.parent_id) + + if stream is None: + LOG.error('%r: no route for %r, my ID is %r', + self, msg, mitogen.context_id) + return + + stream.send(msg) + + def route(self, msg): + """ + Arrange for the :py:class:`Message` `msg` to be delivered to its + destination using any relevant downstream context, or if none is found, + by forwarding the message upstream towards the master context. If `msg` + is destined for the local context, it is dispatched using the handles + registered with :py:meth:`add_handler`. + """ + self.broker.defer(self._async_route, msg) + + +class Broker(object): + """ + Responsible for tracking contexts, their associated streams and I/O + multiplexing. + """ + _waker = None + _thread = None + + #: Seconds grace to allow :py:class:`Streams ` to shutdown + #: gracefully before force-disconnecting them during :py:meth:`shutdown`. + shutdown_timeout = 3.0 + + def __init__(self): + self.on_shutdown = [] + self._alive = True + self._queue = Queue.Queue() + self._readers = set() + self._writers = set() + self._waker = Waker(self) + self.start_receive(self._waker) + self._thread = threading.Thread(target=self._broker_main, + name='mitogen-broker') + self._thread.start() + + def defer(self, func, *args, **kwargs): + if threading.currentThread() == self._thread: + func(*args, **kwargs) + else: + self._queue.put((func, args, kwargs)) + self._waker.wake() + + def start_receive(self, stream): + """Mark the :py:attr:`receive_side ` on `stream` as + ready for reading. May be called from any thread. When the associated + file descriptor becomes ready for reading, + :py:meth:`BasicStream.on_transmit` will be called.""" + IOLOG.debug('%r.start_receive(%r)', self, stream) + assert stream.receive_side and stream.receive_side.fd is not None + self.defer(self._readers.add, stream.receive_side) + + def stop_receive(self, stream): + IOLOG.debug('%r.stop_receive(%r)', self, stream) + self.defer(self._readers.discard, stream.receive_side) + + def start_transmit(self, stream): + IOLOG.debug('%r.start_transmit(%r)', self, stream) + assert stream.transmit_side and stream.transmit_side.fd is not None + self.defer(self._writers.add, stream.transmit_side) + + def stop_transmit(self, stream): + IOLOG.debug('%r.stop_transmit(%r)', self, stream) + self.defer(self._writers.discard, stream.transmit_side) + + def _call(self, stream, func): + try: + func(self) + except Exception: + LOG.exception('%r crashed', stream) + stream.on_disconnect(self) + + def _run_defer(self): + while not self._queue.empty(): + func, args, kwargs = self._queue.get() + try: + func(*args, **kwargs) + except Exception: + LOG.exception('defer() crashed: %r(*%r, **%r)', + func, args, kwargs) + self.shutdown() + + def _loop_once(self, timeout=None): + IOLOG.debug('%r._loop_once(%r)', self, timeout) + self._run_defer() + + #IOLOG.debug('readers = %r', self._readers) + #IOLOG.debug('writers = %r', self._writers) + rsides, wsides, _ = select.select(self._readers, self._writers, + (), timeout) + for side in rsides: + IOLOG.debug('%r: POLLIN for %r', self, side) + self._call(side.stream, side.stream.on_receive) + + for side in wsides: + IOLOG.debug('%r: POLLOUT for %r', self, side) + self._call(side.stream, side.stream.on_transmit) + + def keep_alive(self): + """Return ``True`` if any reader's :py:attr:`Side.keep_alive` + attribute is ``True``, or any :py:class:`Context` is still registered + that is not the master. Used to delay shutdown while some important + work is in progress (e.g. log draining).""" + return sum((side.keep_alive for side in self._readers), 0) + + def _broker_main(self): + """Handle events until :py:meth:`shutdown`. On shutdown, invoke + :py:meth:`Stream.on_shutdown` for every active stream, then allow up to + :py:attr:`shutdown_timeout` seconds for the streams to unregister + themselves before forcefully calling + :py:meth:`Stream.on_disconnect`.""" + try: + while self._alive: + self._loop_once() + + fire(self, 'shutdown') + + for side in self._readers | self._writers: + self._call(side.stream, side.stream.on_shutdown) + + deadline = time.time() + self.shutdown_timeout + while self.keep_alive() and time.time() < deadline: + self._loop_once(max(0, deadline - time.time())) + + if self.keep_alive(): + LOG.error('%r: some streams did not close gracefully. ' + 'The most likely cause for this is one or ' + 'more child processes still connected to ' + 'our stdout/stderr pipes.', self) + + for side in self._readers | self._writers: + LOG.error('_broker_main() force disconnecting %r', side) + side.stream.on_disconnect(self) + except Exception: + LOG.exception('_broker_main() crashed') + + def shutdown(self): + """Request broker gracefully disconnect streams and stop.""" + LOG.debug('%r.shutdown()', self) + self._alive = False + self._waker.wake() + + def join(self): + """Wait for the broker to stop, expected to be called after + :py:meth:`shutdown`.""" + self._thread.join() + + def __repr__(self): + return 'Broker()' + + +class ExternalContext(object): + """ + External context implementation. + + .. attribute:: broker + + The :py:class:`mitogen.core.Broker` instance. + + .. attribute:: context + + The :py:class:`mitogen.core.Context` instance. + + .. attribute:: channel + + The :py:class:`mitogen.core.Channel` over which + :py:data:`CALL_FUNCTION` requests are received. + + .. attribute:: stdout_log + + The :py:class:`mitogen.core.IoLogger` connected to ``stdout``. + + .. attribute:: importer + + The :py:class:`mitogen.core.Importer` instance. + + .. attribute:: stdout_log + + The :py:class:`IoLogger` connected to ``stdout``. + + .. attribute:: stderr_log + + The :py:class:`IoLogger` connected to ``stderr``. + """ + def _on_broker_shutdown(self): + self.channel.close() + + def _setup_master(self, parent_id, context_id, key, in_fd, out_fd): + self.broker = Broker() + self.router = Router(self.broker) + self.master = Context(self.router, 0, 'master') + if parent_id == 0: + self.parent = self.master + else: + self.parent = Context(self.router, parent_id, 'parent') + + self.channel = Receiver(self.router, CALL_FUNCTION) + self.stream = Stream(self.router, parent_id, key) + self.stream.name = 'parent' + self.stream.accept(in_fd, out_fd) + self.stream.receive_side.keep_alive = False + + listen(self.broker, 'shutdown', self._on_broker_shutdown) + + os.close(in_fd) + try: + os.wait() # Reap first stage. + except OSError: + pass # No first stage exists (e.g. fakessh) + + def _setup_logging(self, debug, log_level): + root = logging.getLogger() + root.setLevel(log_level) + root.handlers = [LogHandler(self.master)] + if debug: + enable_debug_logging() + + def _setup_importer(self, core_src_fd): + if core_src_fd: + with os.fdopen(101, 'r', 1) as fp: + core_size = int(fp.readline()) + core_src = fp.read(core_size) + # Strip "ExternalContext.main()" call from last line. + core_src = '\n'.join(core_src.splitlines()[:-1]) + fp.close() + else: + core_src = None + + self.importer = Importer(self.parent, core_src) + sys.meta_path.append(self.importer) + + def _setup_package(self, context_id, parent_id): + global mitogen + mitogen = imp.new_module('mitogen') + mitogen.__package__ = 'mitogen' + mitogen.__path__ = [] + mitogen.__loader__ = self.importer + mitogen.master = False + mitogen.context_id = context_id + mitogen.parent_id = parent_id + mitogen.core = sys.modules['__main__'] + mitogen.core.__file__ = 'x/mitogen/core.py' # For inspect.getsource() + mitogen.core.__loader__ = self.importer + sys.modules['mitogen'] = mitogen + sys.modules['mitogen.core'] = mitogen.core + del sys.modules['__main__'] + + def _setup_stdio(self): + self.stdout_log = IoLogger(self.broker, 'stdout', 1) + self.stderr_log = IoLogger(self.broker, 'stderr', 2) + # Reopen with line buffering. + sys.stdout = os.fdopen(1, 'w', 1) + + fp = file('/dev/null') + try: + os.dup2(fp.fileno(), 0) + finally: + fp.close() + + def _dispatch_calls(self): + for msg, data in self.channel: + LOG.debug('_dispatch_calls(%r)', data) + with_context, modname, klass, func, args, kwargs = data + if with_context: + args = (self,) + args + + try: + obj = __import__(modname, {}, {}, ['']) + if klass: + obj = getattr(obj, klass) + fn = getattr(obj, func) + ret = fn(*args, **kwargs) + self.router.route( + Message.pickled(ret, dst_id=msg.src_id, handle=msg.reply_to) + ) + except Exception, e: + LOG.debug('_dispatch_calls: %s', e) + e = CallError(e) + self.router.route( + Message.pickled(e, dst_id=msg.src_id, handle=msg.reply_to) + ) + + def main(self, parent_id, context_id, key, debug, log_level, + in_fd=100, out_fd=1, core_src_fd=101, setup_stdio=True): + self._setup_master(parent_id, context_id, key, in_fd, out_fd) + try: + try: + self._setup_logging(debug, log_level) + self._setup_importer(core_src_fd) + self._setup_package(context_id, parent_id) + if setup_stdio: + self._setup_stdio() + + self.router.register(self.parent, self.stream) + + sys.executable, = eval(os.environ.pop('ARGV0')) + LOG.debug('Connected to %s; my ID is %r, PID is %r', + self.parent, context_id, os.getpid()) + LOG.debug('Recovered sys.executable: %r', sys.executable) + + self._dispatch_calls() + LOG.debug('ExternalContext.main() normal exit') + except BaseException: + LOG.exception('ExternalContext.main() crashed') + raise + finally: + self.broker.shutdown() + self.broker.join() diff --git a/mitogen/fakessh.py b/mitogen/fakessh.py new file mode 100644 index 00000000..d313477d --- /dev/null +++ b/mitogen/fakessh.py @@ -0,0 +1,377 @@ +""" +fakessh is a stream implementation that starts a local subprocess with its +environment modified such that ``PATH`` searches for `ssh` return an mitogen +implementation of the SSH command. When invoked, this tool arranges for the +command line supplied by the calling program to be executed in a context +already established by the master process, reusing the master's (possibly +proxied) connection to that context. + +This allows tools like `rsync` and `scp` to transparently reuse the connections +and tunnels already established by the host program to connect to a target +machine, without wasteful redundant SSH connection setup, 3-way handshakes, +or firewall hopping configurations, and enables these tools to be used in +impossible scenarios, such as over `sudo` with ``requiretty`` enabled. + +The fake `ssh` command source is written to a temporary file on disk, and +consists of a copy of the :py:mod:`mitogen.core` source code (just like any +other child context), with a line appended to cause it to connect back to the +host process over an FD it inherits. As there is no reliance on an existing +filesystem file, it is possible for child contexts to use fakessh. + +As a consequence of connecting back through an inherited FD, only one SSH +invocation is possible, which is fine for tools like `rsync`, however in future +this restriction will be lifted. + +Sequence: + + 1. ``fakessh`` Context and Stream created by parent context. The stream's + buffer has a `_fakessh_main()` ``CALL_FUNCTION`` enqueued. + 2. Target program (`rsync/scp/sftp`) invoked, which internally executes + `ssh` from ``PATH``. + 3. :py:mod:`mitogen.core` bootstrap begins, recovers the stream FD + inherited via the target program, established itself as the fakessh + context. + 4. `_fakessh_main()` ``CALL_FUNCTION`` is read by fakessh context, + a. sets up :py:class:`mitogen.fakessh.IoPump` for stdio, registers + stdin_handle for local context. + b. Enqueues ``CALL_FUNCTION`` for `_start_slave()` invoked in target context, + i. the program from the `ssh` command line is started + ii. sets up :py:class:`mitogen.fakessh.IoPump` for `ssh` command + line process's stdio pipes + iii. returns `(control_handle, stdin_handle)` to `_fakessh_main()` + 5. `_fakessh_main()` receives control/stdin handles from from `_start_slave()`, + a. registers remote's stdin_handle with local IoPump + b. sends `("start", local_stdin_handle)` to remote's control_handle + c. registers local IoPump with Broker + d. loops waiting for 'local stdout closed && remote stdout closed' + 6. `_start_slave()` control channel receives `("start", stdin_handle)`, + a. registers remote's stdin_handle with local IoPump + b. registers local IoPump with Broker + c. loops waiting for 'local stdout closed && remote stdout closed' +""" + +import getopt +import inspect +import logging +import os +import shutil +import signal +import socket +import subprocess +import sys +import tempfile +import threading + +import mitogen.core +import mitogen.master + +from mitogen.core import LOG, IOLOG + + +SSH_GETOPTS = ( + "1246ab:c:e:fgi:kl:m:no:p:qstvx" + "ACD:E:F:I:KL:MNO:PQ:R:S:TVw:W:XYy" +) + +_mitogen = None + + +class IoPump(mitogen.core.BasicStream): + _output_buf = '' + _closed = False + + def __init__(self, process, broker, stdin_fd, stdout_fd): + self.process = process + self._broker = broker + self.receive_side = mitogen.core.Side(self, stdout_fd) + self.transmit_side = mitogen.core.Side(self, stdin_fd) + + def write(self, s): + self._output_buf += s + self._broker.start_transmit(self) + + def close(self): + self._closed = True + # If local process hasn't exitted yet, ensure its write buffer is + # drained before lazily triggering disconnect in on_transmit. + if self.transmit_side.fd is not None: + self._broker.start_transmit(self) + + def on_shutdown(self, broker): + self.close() + + def on_transmit(self, broker): + written = self.transmit_side.write(self._output_buf) + IOLOG.debug('%r.on_transmit() -> len %r', self, written) + if written is None: + self.on_disconnect(broker) + else: + self._output_buf = self._output_buf[written:] + + if not self._output_buf: + broker.stop_transmit(self) + if self._closed: + self.on_disconnect(broker) + + def on_receive(self, broker): + s = self.receive_side.read() + IOLOG.debug('%r.on_receive() -> len %r', self, len(s)) + if s: + mitogen.core.fire(self, 'receive', s) + else: + self.on_disconnect(broker) + + def __repr__(self): + return 'IoPump(%r)' % ( + self.process, + ) + + +class Process(object): + """ + Manages the lifetime and pipe connections of the SSH command running in the + slave. + """ + def __init__(self, router, stdin_fd, stdout_fd, proc=None): + self.router = router + self.stdin_fd = stdin_fd + self.stdout_fd = stdout_fd + self.proc = proc + self.control_handle = router.add_handler(self._on_control) + self.stdin_handle = router.add_handler(self._on_stdin) + self.pump = IoPump(self, router.broker, stdin_fd, stdout_fd) + self.stdin = None + self.control = None + self.wake_event = threading.Event() + + mitogen.core.listen(self.pump, 'disconnect', self._on_pump_disconnect) + mitogen.core.listen(self.pump, 'receive', self._on_pump_receive) + + if proc: + pmon = mitogen.master.ProcessMonitor.instance() + pmon.add(proc.pid, self._on_proc_exit) + + def __repr__(self): + return 'Process(%r, %r)' % (self.stdin_fd, self.stdout_fd) + + def _on_proc_exit(self, status): + LOG.debug('%r._on_proc_exit(%r)', self, status) + self.control.put(('exit', status)) + + def _on_stdin(self, msg): + if msg == mitogen.core._DEAD: + return + + data = msg.unpickle() + IOLOG.debug('%r._on_stdin(%r)', self, data) + + if data == mitogen.core._DEAD: + self.pump.close() + else: + self.pump.write(data) + + def _on_control(self, msg): + if msg != mitogen.core._DEAD: + command, arg = msg.unpickle() + LOG.debug('%r._on_control(%r, %s)', self, command, arg) + + func = getattr(self, '_on_%s' % (command,), None) + if func: + return func(msg, arg) + + LOG.warning('%r: unknown command %r', self, command) + + def _on_start(self, msg, arg): + dest = mitogen.core.Context(self.router, msg.src_id) + self.control = mitogen.core.Sender(dest, arg[0]) + self.stdin = mitogen.core.Sender(dest, arg[1]) + self.router.broker.start_receive(self.pump) + + def _on_exit(self, msg, arg): + LOG.debug('on_exit: proc = %r', self.proc) + if self.proc: + self.proc.terminate() + else: + self.router.broker.shutdown() + + def _on_pump_receive(self, s): + IOLOG.info('%r._on_pump_receive()', self) + self.stdin.put(s) + + def _on_pump_disconnect(self): + LOG.debug('%r._on_pump_disconnect()', self) + mitogen.core.fire(self, 'disconnect') + self.stdin.close() + self.wake_event.set() + + def start_master(self, stdin, control): + self.stdin = stdin + self.control = control + control.put(('start', (self.control_handle, self.stdin_handle))) + self.router.broker.start_receive(self.pump) + + def wait(self): + while not self.wake_event.wait(0.1): + pass + + +def _start_slave(mitogen_, src_id, args): + """ + This runs in the target context, it is invoked by _fakessh_main running in + the fakessh context immediately after startup. It starts the slave process + (the the point where it has a stdin_handle to target but not stdout_chan to + write to), and waits for main to. + """ + LOG.debug('_start_slave(%r, %r)', mitogen_, args) + + proc = subprocess.Popen(args, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + ) + + process = Process(mitogen_.router, + proc.stdin.fileno(), + proc.stdout.fileno(), + proc, + ) + + return process.control_handle, process.stdin_handle + + +# +# SSH client interface. +# + + +def exit(): + _mitogen.broker.shutdown() + + +def die(msg, *args): + if args: + msg %= args + print msg + exit() + + +def parse_args(): + hostname = None + remain = sys.argv[1:] + allopts = [] + restarted = 0 + + while remain and restarted < 2: + opts, args = getopt.getopt(remain, SSH_GETOPTS) + remain = remain[:] # getopt bug! + allopts += opts + if not args: + break + + if not hostname: + hostname = args.pop(0) + remain = remain[remain.index(hostname) + 1:] + + restarted += 1 + + return hostname, allopts, args + + +def _fakessh_main(mitogen_, dest_context_id): + hostname, opts, args = parse_args() + if not hostname: + die('Missing hostname') + + for opt, optarg in opts: + if 0 and opt == '-s': + subsystem = True + else: + LOG.debug('Warning option %s %s is ignored.', opt, optarg) + + LOG.debug('hostname: %r', hostname) + LOG.debug('opts: %r', opts) + LOG.debug('args: %r', args) + + dest = mitogen.master.Context(mitogen_.router, dest_context_id) + control_handle, stdin_handle = dest.call_with_deadline(None, True, + _start_slave, mitogen.context_id, args) + + LOG.debug('_fakessh_main: received control_handle=%r, stdin_handle=%r', + control_handle, stdin_handle) + + process = Process(mitogen_.router, 1, 0) + process.start_master( + stdin=mitogen.core.Sender(dest, stdin_handle), + control=mitogen.core.Sender(dest, control_handle), + ) + process.wait() + process.control.put(('exit', None)) + + +# +# Public API. +# + +def run(dest, router, args, deadline=None): + """ + Run the command specified by the argument vector `args` such that ``PATH`` + searches for SSH by the command will cause its attempt to use SSH to + execute a remote program to be redirected to use mitogen to execute that + program using the context `dest` instead. + + :param mitogen.core.Context dest: + The destination context to execute the SSH command line in. + + :param mitogen.core.Router router: + + :param list[str] args: + Command line arguments for local program, e.g. ``['rsync', '/tmp', 'remote:/tmp']`` + """ + context_id = router.context_id_counter.next() + fakessh = mitogen.master.Context(router, context_id) + fakessh.name = 'fakessh' + + sock1, sock2 = socket.socketpair() + mitogen.core.set_cloexec(sock1.fileno()) + + stream = mitogen.core.Stream(router, context_id, fakessh.key) + stream.name = 'fakessh' + stream.accept(sock1.fileno(), sock1.fileno()) + router.register(fakessh, stream) + + # Held in socket buffer until process is booted. + fakessh.call_async(True, _fakessh_main, dest.context_id) + + tmp_path = tempfile.mkdtemp(prefix='mitogen_fakessh') + try: + ssh_path = os.path.join(tmp_path, 'ssh') + fp = file(ssh_path, 'w') + try: + fp.write('#!/usr/bin/env python\n') + fp.write(inspect.getsource(mitogen.core)) + fp.write('\n') + fp.write('ExternalContext().main%r\n' % (( + mitogen.context_id, # parent_id + context_id, # context_id + fakessh.key, # key + router.debug, # debug + logging.getLogger().level, # log_level + sock2.fileno(), # in_fd + sock2.fileno(), # out_fd + None, # core_src_fd + False, # setup_stdio + ),)) + finally: + fp.close() + + os.chmod(ssh_path, 0755) + env = os.environ.copy() + env.update({ + 'PATH': '%s:%s' % (tmp_path, env.get('PATH', '')), + 'ARGV0': `[sys.executable]`, + 'SSH_PATH': ssh_path, + }) + + proc = subprocess.Popen(args, env=env) + proc.wait() + finally: + shutil.rmtree(tmp_path) diff --git a/mitogen/master.py b/mitogen/master.py new file mode 100644 index 00000000..237fc680 --- /dev/null +++ b/mitogen/master.py @@ -0,0 +1,642 @@ +""" +This module implements functionality required by master processes, such as +starting new contexts via SSH. Its size is also restricted, since it must be +sent to any context that will be used to establish additional child contexts. +""" + +import errno +import getpass +import imp +import inspect +import itertools +import logging +import os +import pkgutil +import re +import select +import signal +import socket +import sys +import textwrap +import time +import types +import zlib + +if not hasattr(pkgutil, 'find_loader'): + # find_loader() was new in >=2.5, but the modern pkgutil.py syntax has + # been kept intentionally 2.3 compatible so we can reuse it. + from mitogen.compat import pkgutil + +import mitogen.core + + +LOG = logging.getLogger('mitogen') +IOLOG = logging.getLogger('mitogen.io') +RLOG = logging.getLogger('ctx') + +DOCSTRING_RE = re.compile(r'""".+?"""', re.M | re.S) +COMMENT_RE = re.compile(r'^[ ]*#[^\n]*$', re.M) +IOLOG_RE = re.compile(r'^[ ]*IOLOG.debug\(.+?\)$', re.M) + +PERMITTED_CLASSES = set([ + ('mitogen.core', 'CallError'), + ('mitogen.core', 'Dead'), +]) + + +def minimize_source(source): + """Remove comments and docstrings from Python `source`, preserving line + numbers and syntax of empty blocks.""" + subber = lambda match: '""' + ('\n' * match.group(0).count('\n')) + source = DOCSTRING_RE.sub(subber, source) + source = COMMENT_RE.sub('', source) + return source.replace(' ', '\t') + + +def get_child_modules(path, fullname): + """Return the canonical names of all submodules of a package `module`.""" + it = pkgutil.iter_modules([os.path.dirname(path)]) + return ['%s.%s' % (fullname, name) for _, name, _ in it] + + +def create_child(*args): + """Create a child process whose stdin/stdout is connected to a socket, + returning `(pid, socket_obj)`.""" + parentfp, childfp = socket.socketpair() + pid = os.fork() + if not pid: + os.dup2(childfp.fileno(), 0) + os.dup2(childfp.fileno(), 1) + childfp.close() + parentfp.close() + os.execvp(args[0], args) + raise SystemExit + + childfp.close() + LOG.debug('create_child() child %d fd %d, parent %d, args %r', + pid, parentfp.fileno(), os.getpid(), args) + return pid, os.dup(parentfp.fileno()) + + +def write_all(fd, s): + written = 0 + while written < len(s): + rc = os.write(fd, buffer(s, written)) + if not rc: + raise IOError('short write') + written += rc + return written + + +def read_with_deadline(fd, size, deadline): + timeout = deadline - time.time() + if timeout > 0: + rfds, _, _ = select.select([fd], [], [], timeout) + if rfds: + return os.read(fd, size) + + raise mitogen.core.TimeoutError('read timed out') + + +def iter_read(fd, deadline): + if deadline is not None: + LOG.debug('Warning: iter_read(.., deadline=...) unimplemented') + + bits = [] + while True: + s, disconnected = mitogen.core.io_op(os.read, fd, 4096) + if disconnected: + s = '' + + if not s: + raise mitogen.core.StreamError( + 'EOF on stream; last 100 bytes received: %r' % + (''.join(bits)[-100:],) + ) + + bits.append(s) + yield s + + +def discard_until(fd, s, deadline): + for buf in iter_read(fd, deadline): + if buf.endswith(s): + return + + +class LogForwarder(object): + def __init__(self, router): + self._router = router + self._cache = {} + router.add_handler(self._on_forward_log, mitogen.core.FORWARD_LOG) + + def _on_forward_log(self, msg): + if msg == mitogen.core._DEAD: + return + + logger = self._cache.get(msg.src_id) + if logger is None: + context = self._router.context_by_id(msg.src_id) + if context is None: + LOG.error('FORWARD_LOG received from src_id %d', msg.src_id) + return + + name = '%s.%s' % (RLOG.name, context.name) + self._cache[msg.src_id] = logger = logging.getLogger(name) + + name, level_s, s = msg.data.split('\x00', 2) + logger.log(int(level_s), '%s: %s', name, s) + + def __repr__(self): + return 'LogForwarder(%r)' % (self._router,) + + +class ModuleResponder(object): + def __init__(self, router): + self._router = router + router.add_handler(self._on_get_module, mitogen.core.GET_MODULE) + + def __repr__(self): + return 'ModuleResponder(%r)' % (self._router,) + + def _get_module_via_pkgutil(self, fullname): + """Attempt to fetch source code via pkgutil. In an ideal world, this + would be the only required implementation of get_module().""" + loader = pkgutil.find_loader(fullname) + LOG.debug('pkgutil.find_loader(%r) -> %r', fullname, loader) + if not loader: + return + + try: + path = loader.get_filename(fullname) + source = loader.get_source(fullname) + if path is not None and source is not None: + return path, source, loader.is_package(fullname) + except AttributeError: + return + + def _get_module_via_sys_modules(self, fullname): + """Attempt to fetch source code via sys.modules. This is specifically + to support __main__, but it may catch a few more cases.""" + if fullname not in sys.modules: + LOG.debug('%r does not appear in sys.modules', fullname) + return + + is_pkg = hasattr(sys.modules[fullname], '__path__') + try: + source = inspect.getsource(sys.modules[fullname]) + except IOError: + # Work around inspect.getsourcelines() bug. + if not is_pkg: + raise + source = '\n' + + return (sys.modules[fullname].__file__.rstrip('co'), + source, + hasattr(sys.modules[fullname], '__path__')) + + def _get_module_via_parent_enumeration(self, fullname): + """Attempt to fetch source code by examining the module's (hopefully + less insane) parent package. Required for ansible.compat.six.""" + pkgname, _, modname = fullname.rpartition('.') + pkg = sys.modules.get(pkgname) + if pkg is None or not hasattr(pkg, '__file__'): + return + + pkg_path = os.path.dirname(pkg.__file__) + try: + fp, path, ext = imp.find_module(modname, [pkg_path]) + LOG.error('%r', (fp, path, ext)) + return path, fp.read(), False + except ImportError, e: + LOG.debug('imp.find_module(%r, %r) -> %s', modname, [pkg_path], e) + + get_module_methods = [_get_module_via_pkgutil, + _get_module_via_sys_modules, + _get_module_via_parent_enumeration] + + def _on_get_module(self, msg): + LOG.debug('%r.get_module(%r)', self, msg) + if msg == mitogen.core._DEAD: + return + + fullname = msg.data + try: + for method in self.get_module_methods: + tup = method(self, fullname) + if tup: + break + + try: + path, source, is_pkg = tup + except TypeError: + raise ImportError('could not find %r' % (fullname,)) + + LOG.debug('%s found %r: (%r, .., %r)', + method.__name__, fullname, path, is_pkg) + if is_pkg: + pkg_present = get_child_modules(path, fullname) + LOG.debug('get_child_modules(%r, %r) -> %r', + path, fullname, pkg_present) + else: + pkg_present = None + + compressed = zlib.compress(source) + self._router.route( + mitogen.core.Message.pickled( + (pkg_present, path, compressed), + dst_id=msg.src_id, + handle=msg.reply_to, + ) + ) + except Exception: + LOG.debug('While importing %r', fullname, exc_info=True) + self._router.route( + mitogen.core.Message.pickled( + None, + dst_id=msg.src_id, + handle=msg.reply_to, + ) + ) + + +class ModuleForwarder(object): + """ + Respond to GET_MODULE requests in a slave by forwarding the request to our + parent context, or satisfying the request from our local Importer cache. + """ + def __init__(self, router, parent_context, importer): + self.router = router + self.parent_context = parent_context + self.importer = importer + router.add_handler(self._on_get_module, mitogen.core.GET_MODULE) + + def __repr__(self): + return 'ModuleForwarder(%r)' % (self.router,) + + def _on_get_module(self, msg): + LOG.debug('%r._on_get_module(%r)', self, msg) + if msg == mitogen.core._DEAD: + return + + fullname = msg.data + cached = self.importer._cache.get(fullname) + if cached: + LOG.debug('%r._on_get_module(): using cached %r', self, fullname) + self.router.route( + mitogen.core.Message.pickled( + cached, + dst_id=msg.src_id, + handle=msg.reply_to, + ) + ) + else: + LOG.debug('%r._on_get_module(): requesting %r', self, fullname) + self.parent_context.send( + mitogen.core.Message( + data=msg.data, + handle=mitogen.core.GET_MODULE, + reply_to=self.router.add_handler( + lambda m: self._on_got_source(m, msg), + persist=False + ) + ) + ) + + def _on_got_source(self, msg, original_msg): + LOG.debug('%r._on_got_source(%r, %r)', self, msg, original_msg) + fullname = original_msg.data + self.importer._cache[fullname] = msg.unpickle() + self.router.route( + mitogen.core.Message( + data=msg.data, + dst_id=original_msg.src_id, + handle=original_msg.reply_to, + ) + ) + + +class Message(mitogen.core.Message): + """ + Message subclass that controls unpickling. + """ + def _find_global(self, module_name, class_name): + """Return the class implementing `module_name.class_name` or raise + `StreamError` if the module is not whitelisted.""" + if (module_name, class_name) not in PERMITTED_CLASSES: + raise mitogen.core.StreamError( + '%r attempted to unpickle %r in module %r', + self._context, class_name, module_name) + return getattr(sys.modules[module_name], class_name) + + +class Stream(mitogen.core.Stream): + """ + Base for streams capable of starting new slaves. + """ + message_class = Message + + #: The path to the remote Python interpreter. + python_path = 'python2.7' + + #: True to cause context to write verbose /tmp/mitogen..log. + debug = False + + def construct(self, remote_name=None, python_path=None, debug=False, **kwargs): + """Get the named context running on the local machine, creating it if + it does not exist.""" + super(Stream, self).construct(**kwargs) + if python_path: + self.python_path = python_path + + if remote_name is None: + remote_name = '%s@%s:%d' + remote_name %= (getpass.getuser(), socket.gethostname(), os.getpid()) + self.remote_name = remote_name + self.debug = debug + + def on_shutdown(self, broker): + """Request the slave gracefully shut itself down.""" + LOG.debug('%r closing CALL_FUNCTION channel', self) + self.send( + mitogen.core.Message.pickled( + mitogen.core._DEAD, + src_id=mitogen.context_id, + dst_id=self.remote_id, + handle=mitogen.core.CALL_FUNCTION + ) + ) + + # base64'd and passed to 'python -c'. It forks, dups 0->100, creates a + # pipe, then execs a new interpreter with a custom argv. 'CONTEXT_NAME' is + # replaced with the context name. Optimized for size. + def _first_stage(): + import os,sys,zlib + R,W=os.pipe() + R2,W2=os.pipe() + if os.fork(): + os.dup2(0,100) + os.dup2(R,0) + os.dup2(R2,101) + for f in R,R2,W,W2: os.close(f) + os.environ['ARGV0'] = `[sys.executable]` + os.execv(sys.executable,['mitogen:CONTEXT_NAME']) + else: + os.write(1, 'EC0\n') + C = zlib.decompress(sys.stdin.read(input())) + os.fdopen(W,'w',0).write(C) + os.fdopen(W2,'w',0).write('%s\n%s' % (len(C),C)) + os.write(1, 'EC1\n') + sys.exit(0) + + def get_boot_command(self): + source = inspect.getsource(self._first_stage) + source = textwrap.dedent('\n'.join(source.strip().split('\n')[1:])) + source = source.replace(' ', '\t') + source = source.replace('CONTEXT_NAME', self.remote_name) + encoded = source.encode('base64').replace('\n', '') + return [self.python_path, '-c', + 'exec("%s".decode("base64"))' % (encoded,)] + + def get_preamble(self): + source = inspect.getsource(mitogen.core) + source += '\nExternalContext().main%r\n' % (( + mitogen.context_id, # parent_id + self.remote_id, # context_id + self.key, + self.debug, + LOG.level or logging.getLogger().level or logging.INFO, + ),) + + compressed = zlib.compress(minimize_source(source)) + return str(len(compressed)) + '\n' + compressed + + create_child = staticmethod(create_child) + + def connect(self): + LOG.debug('%r.connect()', self) + pid, fd = self.create_child(*self.get_boot_command()) + self.name = 'local.%s' % (pid,) + self.receive_side = mitogen.core.Side(self, fd) + self.transmit_side = mitogen.core.Side(self, os.dup(fd)) + LOG.debug('%r.connect(): child process stdin/stdout=%r', + self, self.receive_side.fd) + + self._connect_bootstrap() + + def _ec0_received(self): + LOG.debug('%r._ec0_received()', self) + write_all(self.transmit_side.fd, self.get_preamble()) + discard_until(self.receive_side.fd, 'EC1\n', time.time() + 10.0) + + def _connect_bootstrap(self): + discard_until(self.receive_side.fd, 'EC0\n', time.time() + 10.0) + self._ec0_received() + + +class Broker(mitogen.core.Broker): + shutdown_timeout = 5.0 + + +class Context(mitogen.core.Context): + via = None + + def on_disconnect(self, broker): + """ + Override base behaviour of triggering Broker shutdown on parent stream + disconnection. + """ + mitogen.core.fire(self, 'disconnect') + + def _discard_result(self, msg): + data = msg.unpickle() + if isinstance(data, Exception): + try: + raise data + except Exception: + LOG.exception('_discard_result') + else: + LOG.debug('_discard_result: %r', data) + + def call_async(self, with_context, fn, *args, **kwargs): + LOG.debug('%r.call_async(%r, %r, *%r, **%r)', + self, with_context, fn, args, kwargs) + + if isinstance(fn, types.MethodType) and \ + isinstance(fn.im_self, (type, types.ClassType)): + klass = fn.im_self.__name__ + else: + klass = None + + call = (with_context, fn.__module__, klass, fn.__name__, args, kwargs) + self.send( + mitogen.core.Message.pickled( + call, + handle=mitogen.core.CALL_FUNCTION, + reply_to=self.router.add_handler(self._discard_result), + ) + ) + + def call_with_deadline(self, deadline, with_context, fn, *args, **kwargs): + """Invoke `fn([context,] *args, **kwargs)` in the external context. + + If `with_context` is ``True``, pass its + :py:class:`ExternalContext ` instance as + the first parameter. + + If `deadline` is not ``None``, expire the call after `deadline` + seconds. If `deadline` is ``None``, the invocation may block + indefinitely.""" + LOG.debug('%r.call_with_deadline(%r, %r, %r, *%r, **%r)', + self, deadline, with_context, fn, args, kwargs) + + if isinstance(fn, types.MethodType) and \ + isinstance(fn.im_self, (type, types.ClassType)): + klass = fn.im_self.__name__ + else: + klass = None + + call = (with_context, fn.__module__, klass, fn.__name__, args, kwargs) + response = self.send_await( + mitogen.core.Message.pickled( + call, + handle=mitogen.core.CALL_FUNCTION + ), + deadline + ) + + decoded = response.unpickle() + if isinstance(decoded, mitogen.core.CallError): + raise decoded + return decoded + + def call(self, fn, *args, **kwargs): + """Invoke `fn(*args, **kwargs)` in the external context.""" + return self.call_with_deadline(None, False, fn, *args, **kwargs) + + +def _proxy_connect(mitogen, name, context_id, klass, kwargs): + if not isinstance(mitogen.router, Router): # TODO + mitogen.router.__class__ = Router # TODO + LOG.debug('_proxy_connect(): constructing ModuleForwarder') + ModuleForwarder(mitogen.router, mitogen.parent, mitogen.importer) + + context = mitogen.router._connect( + context_id, + klass, + name=name, + **kwargs + ) + return context.name + + +class Router(mitogen.core.Router): + context_id_counter = itertools.count(1) + + debug = False + + def __init__(self, *args, **kwargs): + super(Router, self).__init__(*args, **kwargs) + self.responder = ModuleResponder(self) + self.log_forwarder = LogForwarder(self) + + def enable_debug(self): + """ + Cause this context and any descendant child contexts to write debug + logs to /tmp/mitogen..log. + """ + mitogen.core.enable_debug_logging() + self.debug = True + + def __enter__(self): + return self + + def __exit__(self, e_type, e_val, tb): + self.broker.shutdown() + self.broker.join() + + def context_by_id(self, context_id): + return self._context_by_id.get(context_id) + + def local(self, **kwargs): + return self.connect(Stream, **kwargs) + + def sudo(self, **kwargs): + import mitogen.sudo + return self.connect(mitogen.sudo.Stream, **kwargs) + + def ssh(self, **kwargs): + import mitogen.ssh + return self.connect(mitogen.ssh.Stream, **kwargs) + + def _connect(self, context_id, klass, name=None, **kwargs): + context = Context(self, context_id) + stream = klass(self, context.context_id, context.key, **kwargs) + if name is not None: + stream.name = name + stream.connect() + context.name = stream.name + self.register(context, stream) + return context + + def connect(self, klass, name=None, **kwargs): + kwargs.setdefault('debug', self.debug) + + via = kwargs.pop('via', None) + if via is not None: + return self.proxy_connect(via, klass, name=name, **kwargs) + + context_id = self.context_id_counter.next() + return self._connect(context_id, klass, name=name, **kwargs) + + def proxy_connect(self, via_context, klass, name=None, **kwargs): + context_id = self.context_id_counter.next() + # Must be added prior to _proxy_connect() to avoid a race. + self.add_route(context_id, via_context.context_id) + name = via_context.call_with_deadline(None, True, + _proxy_connect, name, context_id, klass, kwargs + ) + # name = '%s.%s' % (via_context.name, name) + context = Context(self, context_id, name=name) + context.via = via_context + + child = via_context + parent = via_context.via + while parent is not None: + LOG.debug('Adding route to %r for %r via %r', parent, context, child) + parent.send( + mitogen.core.Message( + data='%s\x00%s' % (context_id, child.context_id), + handle=mitogen.core.ADD_ROUTE, + ) + ) + child = parent + parent = parent.via + + self._context_by_id[context.context_id] = context + return context + + +class ProcessMonitor(object): + def __init__(self): + # pid -> callback() + self.callback_by_pid = {} + signal.signal(signal.SIGCHLD, self._on_sigchld) + + def _on_sigchld(self, _signum, _frame): + for pid, callback in self.callback_by_pid.items(): + pid, status = os.waitpid(pid, os.WNOHANG) + if pid: + callback(status) + del self.callback_by_pid[pid] + + def add(self, pid, callback): + self.callback_by_pid[pid] = callback + + _instance = None + + @classmethod + def instance(cls): + if cls._instance is None: + cls._instance = cls() + return cls._instance diff --git a/mitogen/ssh.py b/mitogen/ssh.py new file mode 100644 index 00000000..250a13f1 --- /dev/null +++ b/mitogen/ssh.py @@ -0,0 +1,33 @@ +""" +Functionality to allow establishing new slave contexts over an SSH connection. +""" + +import commands + +import mitogen.master + + +class Stream(mitogen.master.Stream): + python_path = 'python' + + #: The path to the SSH binary. + ssh_path = 'ssh' + + def construct(self, hostname, username=None, ssh_path=None, **kwargs): + super(Stream, self).construct(**kwargs) + self.hostname = hostname + self.username = username + if ssh_path: + self.ssh_path = ssh_path + + def get_boot_command(self): + bits = [self.ssh_path] + if self.username: + bits += ['-l', self.username] + bits.append(self.hostname) + base = super(Stream, self).get_boot_command() + return bits + map(commands.mkarg, base) + + def connect(self): + super(Stream, self).connect() + self.name = 'ssh.' + self.hostname diff --git a/mitogen/sudo.py b/mitogen/sudo.py new file mode 100644 index 00000000..51c59186 --- /dev/null +++ b/mitogen/sudo.py @@ -0,0 +1,155 @@ + +import logging +import os +import pty +import termios +import time + +import mitogen.core +import mitogen.master + + +LOG = logging.getLogger(__name__) +PASSWORD_PROMPT = 'password' + + +class PasswordError(mitogen.core.Error): + pass + + +def flags(names): + """Return the result of ORing a set of (space separated) :py:mod:`termios` + module constants together.""" + return sum(getattr(termios, name) for name in names.split()) + + +def cfmakeraw((iflag, oflag, cflag, lflag, ispeed, ospeed, cc)): + """Given a list returned by :py:func:`termios.tcgetattr`, return a list + that has been modified in the same manner as the `cfmakeraw()` C library + function.""" + iflag &= ~flags('IGNBRK BRKINT PARMRK ISTRIP INLCR IGNCR ICRNL IXON') + oflag &= ~flags('OPOST IXOFF') + lflag &= ~flags('ECHO ECHOE ECHONL ICANON ISIG IEXTEN') + cflag &= ~flags('CSIZE PARENB') + cflag |= flags('CS8') + + iflag = 0 + oflag = 0 + lflag = 0 + return [iflag, oflag, cflag, lflag, ispeed, ospeed, cc] + + +def disable_echo(fd): + old = termios.tcgetattr(fd) + new = cfmakeraw(old) + flags = ( + termios.TCSAFLUSH | + getattr(termios, 'TCSASOFT', 0) + ) + termios.tcsetattr(fd, flags, new) + + +def close_nonstandard_fds(): + for fd in xrange(3, 1024): + try: + os.close(fd) + except OSError: + pass + + +def tty_create_child(*args): + """ + Return a file descriptor connected to the master end of a pseudo-terminal, + whose slave end is connected to stdin/stdout/stderr of a new child process. + The child is created such that the pseudo-terminal becomes its controlling + TTY, ensuring access to /dev/tty returns a new file descriptor open on the + slave end. + + :param args: + execl() arguments. + """ + master_fd, slave_fd = os.openpty() + disable_echo(master_fd) + disable_echo(slave_fd) + + pid = os.fork() + if not pid: + os.dup2(slave_fd, 0) + os.dup2(slave_fd, 1) + os.dup2(slave_fd, 2) + close_nonstandard_fds() + os.setsid() + os.close(os.open(os.ttyname(1), os.O_RDWR)) + os.execvp(args[0], args) + raise SystemExit + + os.close(slave_fd) + LOG.debug('tty_create_child() child %d fd %d, parent %d, args %r', + pid, master_fd, os.getpid(), args) + return pid, master_fd + + +class Stream(mitogen.master.Stream): + create_child = staticmethod(tty_create_child) + sudo_path = 'sudo' + password = None + + def construct(self, username=None, sudo_path=None, password=None, **kwargs): + """ + Get the named sudo context, creating it if it does not exist. + + :param mitogen.core.Broker broker: + The broker that will own the context. + + :param str username: + Username to pass to sudo as the ``-u`` parameter, defaults to ``root``. + + :param str sudo_path: + Filename or complete path to the sudo binary. ``PATH`` will be searched + if given as a filename. Defaults to ``sudo``. + + :param str python_path: + Filename or complete path to the Python binary. ``PATH`` will be + searched if given as a filename. Defaults to :py:data:`sys.executable`. + + :param str password: + The password to use when authenticating to sudo. Depending on the sudo + configuration, this is either the current account password or the + target account password. :py:class:`mitogen.sudo.PasswordError` will + be raised if sudo requests a password but none is provided. + + """ + super(Stream, self).construct(**kwargs) + self.username = username or 'root' + if sudo_path: + self.sudo_path = sudo_path + if password: + self.password = password + self.name = 'sudo.' + self.username + + def get_boot_command(self): + bits = [self.sudo_path, '-u', self.username] + bits = bits + super(Stream, self).get_boot_command() + LOG.debug('sudo command line: %r', bits) + return bits + + password_incorrect_msg = 'sudo password is incorrect' + password_required_msg = 'sudo password is required' + + def _connect_bootstrap(self): + password_sent = False + for buf in mitogen.master.iter_read(self.receive_side.fd, + time.time() + 10.0): + LOG.debug('%r: received %r', self, buf) + if buf.endswith('EC0\n'): + return self._ec0_received() + elif PASSWORD_PROMPT in buf.lower(): + if self.password is None: + raise PasswordError(self.password_required_msg) + if password_sent: + raise PasswordError(self.password_incorrect_msg) + LOG.debug('sending password') + os.write(self.transmit_side.fd, self.password + '\n') + password_sent = True + else: + raise mitogen.core.StreamError('bootstrap failed') diff --git a/mitogen/tcp.py b/mitogen/tcp.py new file mode 100644 index 00000000..b0b7c6d3 --- /dev/null +++ b/mitogen/tcp.py @@ -0,0 +1,42 @@ +""" +Functionality to allow a slave context to reconnect back to its master using a +plain TCP connection. +""" + +import socket + +import mitogen.core + + +class Listener(mitogen.core.BasicStream): + def __init__(self, broker, address=None, backlog=30): + self._broker = broker + self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + self._sock.bind(address or ('0.0.0.0', 0)) + self._sock.listen(backlog) + mitogen.core.set_cloexec(self._sock.fileno()) + self.address = self._sock.getsockname() + self.receive_side = mitogen.core.Side(self, self._sock.fileno()) + broker.start_receive(self) + + def on_receive(self, broker): + sock, addr = self._sock.accept() + context = Context(self._broker, name=addr) + stream = mitogen.core.Stream(context) + stream.accept(sock.fileno(), sock.fileno()) + + +def listen(broker, address=None, backlog=30): + """Listen on `address` for connections from newly spawned contexts.""" + return Listener(broker, address, backlog) + + +def connect(context): + """Connect to a Broker at the address specified in our associated + Context.""" + LOG.debug('%s.connect()', __name__) + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + self.receive_side = mitogen.core.Side(self, sock.fileno()) + self.transmit_side = mitogen.core.Side(self, sock.fileno()) + sock.connect(self._context.parent_addr) + self.enqueue(0, self._context.name) diff --git a/mitogen/utils.py b/mitogen/utils.py new file mode 100644 index 00000000..51f9a3c7 --- /dev/null +++ b/mitogen/utils.py @@ -0,0 +1,78 @@ +""" +A random assortment of utility functions useful on masters and slaves. +""" + +import logging +import sys + +import mitogen +import mitogen.core +import mitogen.master + + +LOG = logging.getLogger('mitogen') + + +def disable_site_packages(): + """Remove all entries mentioning site-packages or Extras from the system + path. Used primarily for testing on OS X within a virtualenv, where OS X + bundles some ancient version of the 'six' module.""" + for entry in sys.path[:]: + if 'site-packages' in entry or 'Extras' in entry: + sys.path.remove(entry) + + +def log_to_tmp(): + import os + log_to_file(path='/tmp/mitogen.%s.log' % (os.getpid(),)) + + +def log_to_file(path=None, io=True, level=logging.INFO): + """Install a new :py:class:`logging.Handler` writing applications logs to + the filesystem. Useful when debugging slave IO problems.""" + log = logging.getLogger('') + if path: + fp = open(path, 'w', 1) + mitogen.core.set_cloexec(fp.fileno()) + else: + fp = sys.stderr + + log.setLevel(level) + if io: + logging.getLogger('mitogen.io').setLevel(level) + + fmt = '%(asctime)s %(levelname).1s %(name)s: %(message)s' + datefmt = '%H:%M:%S' + handler = logging.StreamHandler(fp) + handler.formatter = logging.Formatter(fmt, datefmt) + log.handlers.insert(0, handler) + + +def run_with_router(func, *args, **kwargs): + """Arrange for `func(broker, *args, **kwargs)` to run with a temporary + :py:class:`mitogen.master.Router`, ensuring the Router and Broker are + correctly shut down during normal or exceptional return.""" + broker = mitogen.master.Broker() + router = mitogen.master.Router(broker) + try: + return func(router, *args, **kwargs) + finally: + broker.shutdown() + broker.join() + + +def with_router(func): + """Decorator version of :py:func:`run_with_broker`. Example: + + .. code-block:: python + + @with_broker + def do_stuff(broker, arg): + pass + + do_stuff(blah, 123) + """ + def wrapper(*args, **kwargs): + return run_with_router(func, *args, **kwargs) + wrapper.func_name = func.func_name + return wrapper diff --git a/preamble_size.py b/preamble_size.py new file mode 100644 index 00000000..5bd38fa3 --- /dev/null +++ b/preamble_size.py @@ -0,0 +1,32 @@ +""" +Print the size of a typical SSH command line and the bootstrap code sent to new +contexts. +""" + +import inspect +import zlib + +import mitogen.master +import mitogen.ssh +import mitogen.sudo + +broker = mitogen.master.Broker() + +router = mitogen.core.Router(broker) +context = mitogen.master.Context(router, 0) +stream = mitogen.ssh.Stream(router, 0, context.key, hostname='foo') +broker.shutdown() + +print 'SSH command size: %s' % (len(' '.join(stream.get_boot_command())),) +print 'Preamble size: %s (%.2fKiB)' % ( + len(stream.get_preamble()), + len(stream.get_preamble()) / 1024.0, +) + +for mod in ( + mitogen.master, + mitogen.ssh, + mitogen.sudo, + ): + sz = len(zlib.compress(mitogen.master.minimize_source(inspect.getsource(mod)))) + print '%s size: %s (%.2fKiB)' % (mod.__name__, sz, sz / 1024.0) diff --git a/setup.py b/setup.py new file mode 100644 index 00000000..0ce5f345 --- /dev/null +++ b/setup.py @@ -0,0 +1,13 @@ + +from distutils.core import setup + +setup( + name = 'mitogen', + version = '0.0.0', + description = 'Library for writing distributed self-replicating programs. THIS PACKAGE IS INCOMPLETE. IT IS BEING UPLOADED BECAUSE PYPI MAINTAINERS BROKE THE REGISTER COMMAND', + author = 'David Wilson', + license = 'OpenLDAP BSD', + url = 'http://github.com/dw/mitogen/', + py_packages = ['Mitogen'], + zip_safe = False +) diff --git a/tests/data/fakessh.py b/tests/data/fakessh.py new file mode 100755 index 00000000..4601d0bd --- /dev/null +++ b/tests/data/fakessh.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python + +import optparse +import os +import shlex +import sys + +parser = optparse.OptionParser() +parser.add_option('--user', '-l', action='store') +parser.disable_interspersed_args() + +opts, args = parser.parse_args(sys.argv[1:]) +args.pop(0) # hostname +args = [''.join(shlex.split(s)) for s in args] +print args +os.execvp(args[0], args) diff --git a/tests/data/plain_old_module.py b/tests/data/plain_old_module.py new file mode 100755 index 00000000..5e4943d4 --- /dev/null +++ b/tests/data/plain_old_module.py @@ -0,0 +1,10 @@ +""" +I am a plain old module with no interesting dependencies or import machinery +fiddlery. +""" + +import math + + +def pow(x, y): + return x ** y diff --git a/tests/data/self_contained_program.py b/tests/data/self_contained_program.py new file mode 100644 index 00000000..f3a82ce3 --- /dev/null +++ b/tests/data/self_contained_program.py @@ -0,0 +1,22 @@ +""" +I am a self-contained program! +""" + +import mitogen.master + + +def repr_stuff(): + return repr([__name__, 50]) + + +def main(): + broker = mitogen.master.Broker() + try: + context = mitogen.master.connect(broker) + print context.call(repr_stuff) + finally: + broker.shutdown() + broker.join() + +if __name__ == '__main__' and not mitogen.slave: + main() diff --git a/tests/data/simple_pkg/__init__.py b/tests/data/simple_pkg/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/data/simple_pkg/a.py b/tests/data/simple_pkg/a.py new file mode 100644 index 00000000..ce5f1887 --- /dev/null +++ b/tests/data/simple_pkg/a.py @@ -0,0 +1,6 @@ + +import simple_pkg.b + + +def subtract_one_add_two(n): + return simple_pkg.b.subtract_one(n) + 2 diff --git a/tests/data/simple_pkg/b.py b/tests/data/simple_pkg/b.py new file mode 100644 index 00000000..5e5d67b2 --- /dev/null +++ b/tests/data/simple_pkg/b.py @@ -0,0 +1,3 @@ + +def subtract_one(n): + return n - 1 diff --git a/tests/data/six_brokenpkg/__init__.py b/tests/data/six_brokenpkg/__init__.py new file mode 100644 index 00000000..e4b64348 --- /dev/null +++ b/tests/data/six_brokenpkg/__init__.py @@ -0,0 +1,56 @@ +# (c) 2014, Toshio Kuratomi +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see . + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +''' +Compat six library. RHEL7 has python-six 1.3.0 which is too old +''' +# The following makes it easier for us to script updates of the bundled code +_BUNDLED_METADATA = { "pypi_name": "six", "version": "1.10.0" } + +import os.path + +try: + import six as _system_six + print('unsocks', _system_six) +except ImportError, e: + print('cocks', e) + _system_six = None + +if _system_six: + # If we need some things from even newer versions of six, then we need to + # use our bundled copy instead + + if ( # Added in six-1.8.0 + not hasattr(_system_six.moves, 'shlex_quote') or + # Added in six-1.4.0 + not hasattr(_system_six, 'byte2int') or + not hasattr(_system_six, 'add_metaclass') or + not hasattr(_system_six.moves, 'urllib') + ): + + _system_six = False + +if _system_six: + six = _system_six +else: + from . import _six as six +six_py_file = '{0}.py'.format(os.path.splitext(six.__file__)[0]) +exec(open(six_py_file, 'rb').read()) diff --git a/tests/data/six_brokenpkg/_six.py b/tests/data/six_brokenpkg/_six.py new file mode 100644 index 00000000..190c0239 --- /dev/null +++ b/tests/data/six_brokenpkg/_six.py @@ -0,0 +1,868 @@ +"""Utilities for writing code that runs on Python 2 and 3""" + +# Copyright (c) 2010-2015 Benjamin Peterson +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +from __future__ import absolute_import + +import functools +import itertools +import operator +import sys +import types + +__author__ = "Benjamin Peterson " +__version__ = "1.10.0" + + +# Useful for very coarse version differentiation. +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 +PY34 = sys.version_info[0:2] >= (3, 4) + +if PY3: + string_types = str, + integer_types = int, + class_types = type, + text_type = str + binary_type = bytes + + MAXSIZE = sys.maxsize +else: + string_types = basestring, + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + + if sys.platform.startswith("java"): + # Jython always uses 32 bits. + MAXSIZE = int((1 << 31) - 1) + else: + # It's possible to have sizeof(long) != sizeof(Py_ssize_t). + class X(object): + + def __len__(self): + return 1 << 31 + try: + len(X()) + except OverflowError: + # 32-bit + MAXSIZE = int((1 << 31) - 1) + else: + # 64-bit + MAXSIZE = int((1 << 63) - 1) + del X + + +def _add_doc(func, doc): + """Add documentation to a function.""" + func.__doc__ = doc + + +def _import_module(name): + """Import module, returning the module after the last dot.""" + __import__(name) + return sys.modules[name] + + +class _LazyDescr(object): + + def __init__(self, name): + self.name = name + + def __get__(self, obj, tp): + result = self._resolve() + setattr(obj, self.name, result) # Invokes __set__. + try: + # This is a bit ugly, but it avoids running this again by + # removing this descriptor. + delattr(obj.__class__, self.name) + except AttributeError: + pass + return result + + +class MovedModule(_LazyDescr): + + def __init__(self, name, old, new=None): + super(MovedModule, self).__init__(name) + if PY3: + if new is None: + new = name + self.mod = new + else: + self.mod = old + + def _resolve(self): + return _import_module(self.mod) + + def __getattr__(self, attr): + _module = self._resolve() + value = getattr(_module, attr) + setattr(self, attr, value) + return value + + +class _LazyModule(types.ModuleType): + + def __init__(self, name): + super(_LazyModule, self).__init__(name) + self.__doc__ = self.__class__.__doc__ + + def __dir__(self): + attrs = ["__doc__", "__name__"] + attrs += [attr.name for attr in self._moved_attributes] + return attrs + + # Subclasses should override this + _moved_attributes = [] + + +class MovedAttribute(_LazyDescr): + + def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): + super(MovedAttribute, self).__init__(name) + if PY3: + if new_mod is None: + new_mod = name + self.mod = new_mod + if new_attr is None: + if old_attr is None: + new_attr = name + else: + new_attr = old_attr + self.attr = new_attr + else: + self.mod = old_mod + if old_attr is None: + old_attr = name + self.attr = old_attr + + def _resolve(self): + module = _import_module(self.mod) + return getattr(module, self.attr) + + +class _SixMetaPathImporter(object): + + """ + A meta path importer to import six.moves and its submodules. + + This class implements a PEP302 finder and loader. It should be compatible + with Python 2.5 and all existing versions of Python3 + """ + + def __init__(self, six_module_name): + self.name = six_module_name + self.known_modules = {} + + def _add_module(self, mod, *fullnames): + for fullname in fullnames: + self.known_modules[self.name + "." + fullname] = mod + + def _get_module(self, fullname): + return self.known_modules[self.name + "." + fullname] + + def find_module(self, fullname, path=None): + if fullname in self.known_modules: + return self + return None + + def __get_module(self, fullname): + try: + return self.known_modules[fullname] + except KeyError: + raise ImportError("This loader does not know module " + fullname) + + def load_module(self, fullname): + try: + # in case of a reload + return sys.modules[fullname] + except KeyError: + pass + mod = self.__get_module(fullname) + if isinstance(mod, MovedModule): + mod = mod._resolve() + else: + mod.__loader__ = self + sys.modules[fullname] = mod + return mod + + def is_package(self, fullname): + """ + Return true, if the named module is a package. + + We need this method to get correct spec objects with + Python 3.4 (see PEP451) + """ + return hasattr(self.__get_module(fullname), "__path__") + + def get_code(self, fullname): + """Return None + + Required, if is_package is implemented""" + self.__get_module(fullname) # eventually raises ImportError + return None + get_source = get_code # same as get_code + +_importer = _SixMetaPathImporter(__name__) + + +class _MovedItems(_LazyModule): + + """Lazy loading of moved objects""" + __path__ = [] # mark as package + + +_moved_attributes = [ + MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), + MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), + MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), + MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("intern", "__builtin__", "sys"), + MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), + MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), + MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), + MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), + MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("UserDict", "UserDict", "collections"), + MovedAttribute("UserList", "UserList", "collections"), + MovedAttribute("UserString", "UserString", "collections"), + MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), + MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), + MovedModule("builtins", "__builtin__"), + MovedModule("configparser", "ConfigParser"), + MovedModule("copyreg", "copy_reg"), + MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), + MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), + MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), + MovedModule("http_cookies", "Cookie", "http.cookies"), + MovedModule("html_entities", "htmlentitydefs", "html.entities"), + MovedModule("html_parser", "HTMLParser", "html.parser"), + MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), + MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), + MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), + MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), + MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), + MovedModule("cPickle", "cPickle", "pickle"), + MovedModule("queue", "Queue"), + MovedModule("reprlib", "repr"), + MovedModule("socketserver", "SocketServer"), + MovedModule("_thread", "thread", "_thread"), + MovedModule("tkinter", "Tkinter"), + MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), + MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), + MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), + MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), + MovedModule("tkinter_tix", "Tix", "tkinter.tix"), + MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), + MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), + MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), + MovedModule("tkinter_colorchooser", "tkColorChooser", + "tkinter.colorchooser"), + MovedModule("tkinter_commondialog", "tkCommonDialog", + "tkinter.commondialog"), + MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), + MovedModule("tkinter_font", "tkFont", "tkinter.font"), + MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), + MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", + "tkinter.simpledialog"), + MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), + MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), + MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), + MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), + MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), + MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), +] +# Add windows specific modules. +if sys.platform == "win32": + _moved_attributes += [ + MovedModule("winreg", "_winreg"), + ] + +for attr in _moved_attributes: + setattr(_MovedItems, attr.name, attr) + if isinstance(attr, MovedModule): + _importer._add_module(attr, "moves." + attr.name) +del attr + +_MovedItems._moved_attributes = _moved_attributes + +moves = _MovedItems(__name__ + ".moves") +_importer._add_module(moves, "moves") + + +class Module_six_moves_urllib_parse(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_parse""" + + +_urllib_parse_moved_attributes = [ + MovedAttribute("ParseResult", "urlparse", "urllib.parse"), + MovedAttribute("SplitResult", "urlparse", "urllib.parse"), + MovedAttribute("parse_qs", "urlparse", "urllib.parse"), + MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), + MovedAttribute("urldefrag", "urlparse", "urllib.parse"), + MovedAttribute("urljoin", "urlparse", "urllib.parse"), + MovedAttribute("urlparse", "urlparse", "urllib.parse"), + MovedAttribute("urlsplit", "urlparse", "urllib.parse"), + MovedAttribute("urlunparse", "urlparse", "urllib.parse"), + MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), + MovedAttribute("quote", "urllib", "urllib.parse"), + MovedAttribute("quote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote", "urllib", "urllib.parse"), + MovedAttribute("unquote_plus", "urllib", "urllib.parse"), + MovedAttribute("urlencode", "urllib", "urllib.parse"), + MovedAttribute("splitquery", "urllib", "urllib.parse"), + MovedAttribute("splittag", "urllib", "urllib.parse"), + MovedAttribute("splituser", "urllib", "urllib.parse"), + MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), + MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), + MovedAttribute("uses_params", "urlparse", "urllib.parse"), + MovedAttribute("uses_query", "urlparse", "urllib.parse"), + MovedAttribute("uses_relative", "urlparse", "urllib.parse"), +] +for attr in _urllib_parse_moved_attributes: + setattr(Module_six_moves_urllib_parse, attr.name, attr) +del attr + +Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes + +_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), + "moves.urllib_parse", "moves.urllib.parse") + + +class Module_six_moves_urllib_error(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_error""" + + +_urllib_error_moved_attributes = [ + MovedAttribute("URLError", "urllib2", "urllib.error"), + MovedAttribute("HTTPError", "urllib2", "urllib.error"), + MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), +] +for attr in _urllib_error_moved_attributes: + setattr(Module_six_moves_urllib_error, attr.name, attr) +del attr + +Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes + +_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), + "moves.urllib_error", "moves.urllib.error") + + +class Module_six_moves_urllib_request(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_request""" + + +_urllib_request_moved_attributes = [ + MovedAttribute("urlopen", "urllib2", "urllib.request"), + MovedAttribute("install_opener", "urllib2", "urllib.request"), + MovedAttribute("build_opener", "urllib2", "urllib.request"), + MovedAttribute("pathname2url", "urllib", "urllib.request"), + MovedAttribute("url2pathname", "urllib", "urllib.request"), + MovedAttribute("getproxies", "urllib", "urllib.request"), + MovedAttribute("Request", "urllib2", "urllib.request"), + MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), + MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), + MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), + MovedAttribute("BaseHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), + MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), + MovedAttribute("FileHandler", "urllib2", "urllib.request"), + MovedAttribute("FTPHandler", "urllib2", "urllib.request"), + MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), + MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), + MovedAttribute("urlretrieve", "urllib", "urllib.request"), + MovedAttribute("urlcleanup", "urllib", "urllib.request"), + MovedAttribute("URLopener", "urllib", "urllib.request"), + MovedAttribute("FancyURLopener", "urllib", "urllib.request"), + MovedAttribute("proxy_bypass", "urllib", "urllib.request"), +] +for attr in _urllib_request_moved_attributes: + setattr(Module_six_moves_urllib_request, attr.name, attr) +del attr + +Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes + +_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), + "moves.urllib_request", "moves.urllib.request") + + +class Module_six_moves_urllib_response(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_response""" + + +_urllib_response_moved_attributes = [ + MovedAttribute("addbase", "urllib", "urllib.response"), + MovedAttribute("addclosehook", "urllib", "urllib.response"), + MovedAttribute("addinfo", "urllib", "urllib.response"), + MovedAttribute("addinfourl", "urllib", "urllib.response"), +] +for attr in _urllib_response_moved_attributes: + setattr(Module_six_moves_urllib_response, attr.name, attr) +del attr + +Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes + +_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), + "moves.urllib_response", "moves.urllib.response") + + +class Module_six_moves_urllib_robotparser(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_robotparser""" + + +_urllib_robotparser_moved_attributes = [ + MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), +] +for attr in _urllib_robotparser_moved_attributes: + setattr(Module_six_moves_urllib_robotparser, attr.name, attr) +del attr + +Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes + +_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), + "moves.urllib_robotparser", "moves.urllib.robotparser") + + +class Module_six_moves_urllib(types.ModuleType): + + """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" + __path__ = [] # mark as package + parse = _importer._get_module("moves.urllib_parse") + error = _importer._get_module("moves.urllib_error") + request = _importer._get_module("moves.urllib_request") + response = _importer._get_module("moves.urllib_response") + robotparser = _importer._get_module("moves.urllib_robotparser") + + def __dir__(self): + return ['parse', 'error', 'request', 'response', 'robotparser'] + +_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), + "moves.urllib") + + +def add_move(move): + """Add an item to six.moves.""" + setattr(_MovedItems, move.name, move) + + +def remove_move(name): + """Remove item from six.moves.""" + try: + delattr(_MovedItems, name) + except AttributeError: + try: + del moves.__dict__[name] + except KeyError: + raise AttributeError("no such move, %r" % (name,)) + + +if PY3: + _meth_func = "__func__" + _meth_self = "__self__" + + _func_closure = "__closure__" + _func_code = "__code__" + _func_defaults = "__defaults__" + _func_globals = "__globals__" +else: + _meth_func = "im_func" + _meth_self = "im_self" + + _func_closure = "func_closure" + _func_code = "func_code" + _func_defaults = "func_defaults" + _func_globals = "func_globals" + + +try: + advance_iterator = next +except NameError: + def advance_iterator(it): + return it.next() +next = advance_iterator + + +try: + callable = callable +except NameError: + def callable(obj): + return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) + + +if PY3: + def get_unbound_function(unbound): + return unbound + + create_bound_method = types.MethodType + + def create_unbound_method(func, cls): + return func + + Iterator = object +else: + def get_unbound_function(unbound): + return unbound.im_func + + def create_bound_method(func, obj): + return types.MethodType(func, obj, obj.__class__) + + def create_unbound_method(func, cls): + return types.MethodType(func, None, cls) + + class Iterator(object): + + def next(self): + return type(self).__next__(self) + + callable = callable +_add_doc(get_unbound_function, + """Get the function out of a possibly unbound function""") + + +get_method_function = operator.attrgetter(_meth_func) +get_method_self = operator.attrgetter(_meth_self) +get_function_closure = operator.attrgetter(_func_closure) +get_function_code = operator.attrgetter(_func_code) +get_function_defaults = operator.attrgetter(_func_defaults) +get_function_globals = operator.attrgetter(_func_globals) + + +if PY3: + def iterkeys(d, **kw): + return iter(d.keys(**kw)) + + def itervalues(d, **kw): + return iter(d.values(**kw)) + + def iteritems(d, **kw): + return iter(d.items(**kw)) + + def iterlists(d, **kw): + return iter(d.lists(**kw)) + + viewkeys = operator.methodcaller("keys") + + viewvalues = operator.methodcaller("values") + + viewitems = operator.methodcaller("items") +else: + def iterkeys(d, **kw): + return d.iterkeys(**kw) + + def itervalues(d, **kw): + return d.itervalues(**kw) + + def iteritems(d, **kw): + return d.iteritems(**kw) + + def iterlists(d, **kw): + return d.iterlists(**kw) + + viewkeys = operator.methodcaller("viewkeys") + + viewvalues = operator.methodcaller("viewvalues") + + viewitems = operator.methodcaller("viewitems") + +_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") +_add_doc(itervalues, "Return an iterator over the values of a dictionary.") +_add_doc(iteritems, + "Return an iterator over the (key, value) pairs of a dictionary.") +_add_doc(iterlists, + "Return an iterator over the (key, [values]) pairs of a dictionary.") + + +if PY3: + def b(s): + return s.encode("latin-1") + + def u(s): + return s + unichr = chr + import struct + int2byte = struct.Struct(">B").pack + del struct + byte2int = operator.itemgetter(0) + indexbytes = operator.getitem + iterbytes = iter + import io + StringIO = io.StringIO + BytesIO = io.BytesIO + _assertCountEqual = "assertCountEqual" + if sys.version_info[1] <= 1: + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + else: + _assertRaisesRegex = "assertRaisesRegex" + _assertRegex = "assertRegex" +else: + def b(s): + return s + # Workaround for standalone backslash + + def u(s): + return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") + unichr = unichr + int2byte = chr + + def byte2int(bs): + return ord(bs[0]) + + def indexbytes(buf, i): + return ord(buf[i]) + iterbytes = functools.partial(itertools.imap, ord) + import StringIO + StringIO = BytesIO = StringIO.StringIO + _assertCountEqual = "assertItemsEqual" + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" +_add_doc(b, """Byte literal""") +_add_doc(u, """Text literal""") + + +def assertCountEqual(self, *args, **kwargs): + return getattr(self, _assertCountEqual)(*args, **kwargs) + + +def assertRaisesRegex(self, *args, **kwargs): + return getattr(self, _assertRaisesRegex)(*args, **kwargs) + + +def assertRegex(self, *args, **kwargs): + return getattr(self, _assertRegex)(*args, **kwargs) + + +if PY3: + exec_ = getattr(moves.builtins, "exec") + + def reraise(tp, value, tb=None): + if value is None: + value = tp() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + +else: + def exec_(_code_, _globs_=None, _locs_=None): + """Execute code in a namespace.""" + if _globs_ is None: + frame = sys._getframe(1) + _globs_ = frame.f_globals + if _locs_ is None: + _locs_ = frame.f_locals + del frame + elif _locs_ is None: + _locs_ = _globs_ + exec("""exec _code_ in _globs_, _locs_""") + + exec_("""def reraise(tp, value, tb=None): + raise tp, value, tb +""") + + +if sys.version_info[:2] == (3, 2): + exec_("""def raise_from(value, from_value): + if from_value is None: + raise value + raise value from from_value +""") +elif sys.version_info[:2] > (3, 2): + exec_("""def raise_from(value, from_value): + raise value from from_value +""") +else: + def raise_from(value, from_value): + raise value + + +print_ = getattr(moves.builtins, "print", None) +if print_ is None: + def print_(*args, **kwargs): + """The new-style print function for Python 2.4 and 2.5.""" + fp = kwargs.pop("file", sys.stdout) + if fp is None: + return + + def write(data): + if not isinstance(data, basestring): + data = str(data) + # If the file has an encoding, encode unicode with it. + if (isinstance(fp, file) and + isinstance(data, unicode) and + fp.encoding is not None): + errors = getattr(fp, "errors", None) + if errors is None: + errors = "strict" + data = data.encode(fp.encoding, errors) + fp.write(data) + want_unicode = False + sep = kwargs.pop("sep", None) + if sep is not None: + if isinstance(sep, unicode): + want_unicode = True + elif not isinstance(sep, str): + raise TypeError("sep must be None or a string") + end = kwargs.pop("end", None) + if end is not None: + if isinstance(end, unicode): + want_unicode = True + elif not isinstance(end, str): + raise TypeError("end must be None or a string") + if kwargs: + raise TypeError("invalid keyword arguments to print()") + if not want_unicode: + for arg in args: + if isinstance(arg, unicode): + want_unicode = True + break + if want_unicode: + newline = unicode("\n") + space = unicode(" ") + else: + newline = "\n" + space = " " + if sep is None: + sep = space + if end is None: + end = newline + for i, arg in enumerate(args): + if i: + write(sep) + write(arg) + write(end) +if sys.version_info[:2] < (3, 3): + _print = print_ + + def print_(*args, **kwargs): + fp = kwargs.get("file", sys.stdout) + flush = kwargs.pop("flush", False) + _print(*args, **kwargs) + if flush and fp is not None: + fp.flush() + +_add_doc(reraise, """Reraise an exception.""") + +if sys.version_info[0:2] < (3, 4): + def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + def wrapper(f): + f = functools.wraps(wrapped, assigned, updated)(f) + f.__wrapped__ = wrapped + return f + return wrapper +else: + wraps = functools.wraps + + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(meta): + + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + return type.__new__(metaclass, 'temporary_class', (), {}) + + +def add_metaclass(metaclass): + """Class decorator for creating a class with a metaclass.""" + def wrapper(cls): + orig_vars = cls.__dict__.copy() + slots = orig_vars.get('__slots__') + if slots is not None: + if isinstance(slots, str): + slots = [slots] + for slots_var in slots: + orig_vars.pop(slots_var) + orig_vars.pop('__dict__', None) + orig_vars.pop('__weakref__', None) + return metaclass(cls.__name__, cls.__bases__, orig_vars) + return wrapper + + +def python_2_unicode_compatible(klass): + """ + A decorator that defines __unicode__ and __str__ methods under Python 2. + Under Python 3 it does nothing. + + To support Python 2 and 3 with a single code base, define a __str__ method + returning text and apply this decorator to the class. + """ + if PY2: + if '__str__' not in klass.__dict__: + raise ValueError("@python_2_unicode_compatible cannot be applied " + "to %s because it doesn't define __str__()." % + klass.__name__) + klass.__unicode__ = klass.__str__ + klass.__str__ = lambda self: self.__unicode__().encode('utf-8') + return klass + + +# Complete the moves implementation. +# This code is at the end of this module to speed up module loading. +# Turn this module into a package. +__path__ = [] # required for PEP 302 and PEP 451 +__package__ = __name__ # see PEP 366 @ReservedAssignment +if globals().get("__spec__") is not None: + __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable +# Remove other six meta path importers, since they cause problems. This can +# happen if six is removed from sys.modules and then reloaded. (Setuptools does +# this for some reason.) +if sys.meta_path: + for i, importer in enumerate(sys.meta_path): + # Here's some real nastiness: Another "instance" of the six module might + # be floating around. Therefore, we can't use isinstance() to check for + # the six meta path importer, since the other six instance will have + # inserted an importer with different class. + if (type(importer).__name__ == "_SixMetaPathImporter" and + importer.name == __name__): + del sys.meta_path[i] + break + del i, importer +# Finally, add the importer to the meta path import hook. +sys.meta_path.append(_importer) diff --git a/tests/data/webproject/manage.py b/tests/data/webproject/manage.py new file mode 100755 index 00000000..92a4a08f --- /dev/null +++ b/tests/data/webproject/manage.py @@ -0,0 +1,10 @@ +#!/usr/bin/env python +import os +import sys + +if __name__ == "__main__": + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "webproject.settings") + + from django.core.management import execute_from_command_line + + execute_from_command_line(sys.argv) diff --git a/tests/data/webproject/serve_django_app.py b/tests/data/webproject/serve_django_app.py new file mode 100644 index 00000000..31eae9ad --- /dev/null +++ b/tests/data/webproject/serve_django_app.py @@ -0,0 +1,37 @@ + +import os +import sys + +import mitogen +import mitogen.master +import mitogen.utils + + +import sys +sys.path.insert(0, '..') + + +def serve_django_app(settings_name): + os.listdir = lambda path: [] + + os.environ['DJANGO_SETTINGS_MODULE'] = settings_name + import django + args = ['manage.py', 'runserver', '0:9191', '--noreload'] + from django.conf import settings + #settings.configure() + django.setup() + from django.core.management.commands import runserver + runserver.Command().run_from_argv(args) + #django.core.management.execute_from_command_line(args) + + +def main(broker): + import logging + mitogen.utils.log_to_file(level=logging.INFO, io=False) + context = mitogen.master.connect(broker) + context.call(os.chdir, '/') + #context.call(mitogen.utils.log_to_file, '/tmp/log') + context.call(serve_django_app, 'webproject.settings') + +if __name__ == '__main__' and not mitogen.slave: + mitogen.utils.run_with_broker(main) diff --git a/tests/data/webproject/webapp/__init__.py b/tests/data/webproject/webapp/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/data/webproject/webapp/admin.py b/tests/data/webproject/webapp/admin.py new file mode 100644 index 00000000..8c38f3f3 --- /dev/null +++ b/tests/data/webproject/webapp/admin.py @@ -0,0 +1,3 @@ +from django.contrib import admin + +# Register your models here. diff --git a/tests/data/webproject/webapp/apps.py b/tests/data/webproject/webapp/apps.py new file mode 100644 index 00000000..62712810 --- /dev/null +++ b/tests/data/webproject/webapp/apps.py @@ -0,0 +1,7 @@ +from __future__ import unicode_literals + +from django.apps import AppConfig + + +class WebappConfig(AppConfig): + name = 'webapp' diff --git a/tests/data/webproject/webapp/migrations/__init__.py b/tests/data/webproject/webapp/migrations/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/data/webproject/webapp/models.py b/tests/data/webproject/webapp/models.py new file mode 100644 index 00000000..bd4b2abe --- /dev/null +++ b/tests/data/webproject/webapp/models.py @@ -0,0 +1,5 @@ +from __future__ import unicode_literals + +from django.db import models + +# Create your models here. diff --git a/tests/data/webproject/webapp/tests.py b/tests/data/webproject/webapp/tests.py new file mode 100644 index 00000000..7ce503c2 --- /dev/null +++ b/tests/data/webproject/webapp/tests.py @@ -0,0 +1,3 @@ +from django.test import TestCase + +# Create your tests here. diff --git a/tests/data/webproject/webapp/views.py b/tests/data/webproject/webapp/views.py new file mode 100644 index 00000000..d0f9cc49 --- /dev/null +++ b/tests/data/webproject/webapp/views.py @@ -0,0 +1,9 @@ +from django.http import HTTPResponse + + +def index(request): + return HTTPResponse('hello, world') + + +def shutdown(request): + raise KeyboardInterrupt diff --git a/tests/data/webproject/webproject/__init__.py b/tests/data/webproject/webproject/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/data/webproject/webproject/settings.py b/tests/data/webproject/webproject/settings.py new file mode 100644 index 00000000..18f1e725 --- /dev/null +++ b/tests/data/webproject/webproject/settings.py @@ -0,0 +1,122 @@ +""" +Django settings for webproject project. + +Generated by 'django-admin startproject' using Django 1.9.1. + +For more information on this file, see +https://docs.djangoproject.com/en/1.9/topics/settings/ + +For the full list of settings and their values, see +https://docs.djangoproject.com/en/1.9/ref/settings/ +""" + +import os + +# Build paths inside the project like this: os.path.join(BASE_DIR, ...) +BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + + +# Quick-start development settings - unsuitable for production +# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/ + +# SECURITY WARNING: keep the secret key used in production secret! +SECRET_KEY = 'db)-3@sbmu5d91tpio#h*9=iew@12-n1bh-de!xbrb0e4la(9n' + +# SECURITY WARNING: don't run with debug turned on in production! +DEBUG = True + +ALLOWED_HOSTS = [] +AUTH_PASSWORD_VALIDATORS = ['cats'] + + +# Application definition + +INSTALLED_APPS = [ + 'django.contrib.admin', + 'django.contrib.auth', + 'django.contrib.contenttypes', + 'django.contrib.sessions', + 'django.contrib.messages', + 'django.contrib.staticfiles', +] + +MIDDLEWARE_CLASSES = [ + 'django.middleware.security.SecurityMiddleware', + 'django.contrib.sessions.middleware.SessionMiddleware', + 'django.middleware.common.CommonMiddleware', + 'django.middleware.csrf.CsrfViewMiddleware', + 'django.contrib.auth.middleware.AuthenticationMiddleware', + 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', + 'django.contrib.messages.middleware.MessageMiddleware', + 'django.middleware.clickjacking.XFrameOptionsMiddleware', +] + +ROOT_URLCONF = 'webproject.urls' + +TEMPLATES = [ + { + 'BACKEND': 'django.template.backends.django.DjangoTemplates', + 'DIRS': [], + 'APP_DIRS': True, + 'OPTIONS': { + 'context_processors': [ + 'django.template.context_processors.debug', + 'django.template.context_processors.request', + 'django.contrib.auth.context_processors.auth', + 'django.contrib.messages.context_processors.messages', + ], + }, + }, +] + +WSGI_APPLICATION = 'webproject.wsgi.application' + + +# Database +# https://docs.djangoproject.com/en/1.9/ref/settings/#databases + +DATABASES = { + 'default': { + 'ENGINE': 'django.db.backends.sqlite3', + 'NAME': '' + } +} + + +# Password validation +# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators + +AUTH_PASSWORD_VALIDATORS = [ + { + 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', + }, +] + + +# Internationalization +# https://docs.djangoproject.com/en/1.9/topics/i18n/ + +LANGUAGE_CODE = 'en-us' + +TIME_ZONE = 'UTC' + +USE_I18N = True + +USE_L10N = True + +USE_TZ = True + + +# Static files (CSS, JavaScript, Images) +# https://docs.djangoproject.com/en/1.9/howto/static-files/ + +STATIC_URL = '/static/' diff --git a/tests/data/webproject/webproject/urls.py b/tests/data/webproject/webproject/urls.py new file mode 100644 index 00000000..f4b8e560 --- /dev/null +++ b/tests/data/webproject/webproject/urls.py @@ -0,0 +1,24 @@ +"""webproject URL Configuration + +The `urlpatterns` list routes URLs to views. For more information please see: + https://docs.djangoproject.com/en/1.9/topics/http/urls/ +Examples: +Function views + 1. Add an import: from my_app import views + 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') +Class-based views + 1. Add an import: from other_app.views import Home + 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') +Including another URLconf + 1. Import the include() function: from django.conf.urls import url, include + 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) +""" +from django.conf.urls import url +from django.contrib import admin +import webapp.views + +urlpatterns = [ + url(r'^$', webapp.views.index), + url(r'^shutdown/$', webapp.views.shutdown), + url(r'^admin/', admin.site.urls), +] diff --git a/tests/data/webproject/webproject/wsgi.py b/tests/data/webproject/webproject/wsgi.py new file mode 100644 index 00000000..2f67662a --- /dev/null +++ b/tests/data/webproject/webproject/wsgi.py @@ -0,0 +1,16 @@ +""" +WSGI config for webproject project. + +It exposes the WSGI callable as a module-level variable named ``application``. + +For more information on this file, see +https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/ +""" + +import os + +from django.core.wsgi import get_wsgi_application + +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "webproject.settings") + +application = get_wsgi_application() diff --git a/tests/importer_test.py b/tests/importer_test.py new file mode 100644 index 00000000..d4ca941d --- /dev/null +++ b/tests/importer_test.py @@ -0,0 +1,127 @@ + +import email.utils +import sys +import types +import unittest +import zlib + +import mock +import pytest + +import mitogen.core +import testlib + + +class ImporterMixin(object): + modname = None + + def setUp(self): + super(ImporterMixin, self).setUp() + self.context = mock.Mock() + self.importer = mitogen.core.Importer(self.context) + + def tearDown(self): + sys.modules.pop(self.modname, None) + super(ImporterMixin, self).tearDown() + + +class LoadModuleTest(ImporterMixin, unittest.TestCase): + data = zlib.compress("data = 1\n\n") + path = 'fake_module.py' + modname = 'fake_module' + response = (None, path, data) + + def test_no_such_module(self): + self.context.enqueue_await_reply.return_value = None + self.assertRaises(ImportError, + lambda: self.importer.load_module(self.modname)) + + def test_module_added_to_sys_modules(self): + self.context.enqueue_await_reply.return_value = self.response + mod = self.importer.load_module(self.modname) + self.assertTrue(sys.modules[self.modname] is mod) + self.assertTrue(isinstance(mod, types.ModuleType)) + + def test_module_file_set(self): + self.context.enqueue_await_reply.return_value = self.response + mod = self.importer.load_module(self.modname) + self.assertEquals(mod.__file__, 'master:' + self.path) + + def test_module_loader_set(self): + self.context.enqueue_await_reply.return_value = self.response + mod = self.importer.load_module(self.modname) + self.assertTrue(mod.__loader__ is self.importer) + + def test_module_package_unset(self): + self.context.enqueue_await_reply.return_value = self.response + mod = self.importer.load_module(self.modname) + self.assertTrue(mod.__package__ is None) + + +class LoadSubmoduleTest(ImporterMixin, unittest.TestCase): + data = zlib.compress("data = 1\n\n") + path = 'fake_module.py' + modname = 'mypkg.fake_module' + response = (None, path, data) + + def test_module_package_unset(self): + self.context.enqueue_await_reply.return_value = self.response + mod = self.importer.load_module(self.modname) + self.assertEquals(mod.__package__, 'mypkg') + + +class LoadModulePackageTest(ImporterMixin, unittest.TestCase): + data = zlib.compress("func = lambda: 1\n\n") + path = 'fake_pkg/__init__.py' + modname = 'fake_pkg' + response = ([], path, data) + + def test_module_file_set(self): + self.context.enqueue_await_reply.return_value = self.response + mod = self.importer.load_module(self.modname) + self.assertEquals(mod.__file__, 'master:' + self.path) + + def test_get_filename(self): + self.context.enqueue_await_reply.return_value = self.response + mod = self.importer.load_module(self.modname) + filename = mod.__loader__.get_filename(self.modname) + self.assertEquals('master:fake_pkg/__init__.py', filename) + + def test_get_source(self): + self.context.enqueue_await_reply.return_value = self.response + mod = self.importer.load_module(self.modname) + source = mod.__loader__.get_source(self.modname) + self.assertEquals(source, zlib.decompress(self.data)) + + def test_module_loader_set(self): + self.context.enqueue_await_reply.return_value = self.response + mod = self.importer.load_module(self.modname) + self.assertTrue(mod.__loader__ is self.importer) + + def test_module_path_present(self): + self.context.enqueue_await_reply.return_value = self.response + mod = self.importer.load_module(self.modname) + self.assertEquals(mod.__path__, []) + + def test_module_package_set(self): + self.context.enqueue_await_reply.return_value = self.response + mod = self.importer.load_module(self.modname) + self.assertEquals(mod.__package__, self.modname) + + def test_module_data(self): + self.context.enqueue_await_reply.return_value = self.response + mod = self.importer.load_module(self.modname) + self.assertTrue(isinstance(mod.func, types.FunctionType)) + self.assertEquals(mod.func.__module__, self.modname) + + +class EmailParseAddrSysTest(testlib.BrokerMixin, unittest.TestCase): + @pytest.fixture(autouse=True) + def initdir(self, caplog): + self.caplog = caplog + + def test_sys_module_not_fetched(self): + # An old version of core.Importer would request the email.sys module + # while executing email.utils.parseaddr(). Ensure this needless + # roundtrip has not reappeared. + pass diff --git a/tests/responder_test.py b/tests/responder_test.py new file mode 100644 index 00000000..6e6aede2 --- /dev/null +++ b/tests/responder_test.py @@ -0,0 +1,67 @@ + +import mock +import subprocess +import unittest +import sys + +import mitogen.master +import testlib + +import plain_old_module +import simple_pkg.a + + +class GoodModulesTest(testlib.BrokerMixin, unittest.TestCase): + def test_plain_old_module(self): + # The simplest case: a top-level module with no interesting imports or + # package machinery damage. + context = mitogen.master.connect(self.broker) + self.assertEquals(256, context.call(plain_old_module.pow, 2, 8)) + + def test_simple_pkg(self): + # Ensure success of a simple package containing two submodules, one of + # which imports the other. + context = mitogen.master.connect(self.broker) + self.assertEquals(3, + context.call(simple_pkg.a.subtract_one_add_two, 2)) + + def test_self_contained_program(self): + # Ensure a program composed of a single script can be imported + # successfully. + args = [sys.executable, testlib.data_path('self_contained_program.py')] + output = subprocess.check_output(args) + self.assertEquals(output, "['__main__', 50]\n") + + +class BrokenModulesTest(unittest.TestCase): + def test_obviously_missing(self): + # Ensure we don't crash in the case of a module legitimately being + # unavailable. Should never happen in the real world. + + context = mock.Mock() + responder = mitogen.master.ModuleResponder(context) + responder.get_module((50, 'non_existent_module')) + self.assertEquals(1, len(context.enqueue.mock_calls)) + + call = context.enqueue.mock_calls[0] + reply_to, data = call[1] + self.assertEquals(50, reply_to) + self.assertTrue(data is None) + + def test_ansible_six_messed_up_path(self): + # The copy of six.py shipped with Ansible appears in a package whose + # __path__ subsequently ends up empty, which prevents pkgutil from + # finding its submodules. After ansible.compat.six is initialized in + # the parent, attempts to execute six/__init__.py on the slave will + # cause an attempt to request ansible.compat.six._six from the master. + import six_brokenpkg + + context = mock.Mock() + responder = mitogen.master.ModuleResponder(context) + responder.get_module((50, 'six_brokenpkg._six')) + self.assertEquals(1, len(context.enqueue.mock_calls)) + + call = context.enqueue.mock_calls[0] + reply_to, data = call[1] + self.assertEquals(50, reply_to) + self.assertTrue(isinstance(data, tuple)) diff --git a/tests/ssh_test.py b/tests/ssh_test.py new file mode 100644 index 00000000..dac4bda0 --- /dev/null +++ b/tests/ssh_test.py @@ -0,0 +1,25 @@ + +import unittest + +import mitogen +import mitogen.master +import mitogen.ssh +import mitogen.utils + +import testlib + + +def add(x, y): + return x + y + + +class SshTest(unittest.TestCase): + def test_okay(self): + @mitogen.utils.run_with_broker + def test(broker): + context = mitogen.ssh.connect(broker, + hostname='hostname', + ssh_path=testlib.data_path('fakessh.py')) + context.call(mitogen.utils.log_to_file, '/tmp/log') + context.call(mitogen.utils.disable_site_packages) + self.assertEquals(3, context.call(add, 1, 2)) diff --git a/tests/testlib.py b/tests/testlib.py new file mode 100644 index 00000000..dcde32e4 --- /dev/null +++ b/tests/testlib.py @@ -0,0 +1,34 @@ + +import os +import sys +import unittest + +import mock + +import mitogen.master + + +DATA_DIR = os.path.join(os.path.dirname(__file__), 'data') +sys.path.append(DATA_DIR) + + +def set_debug(): + import logging + logging.getLogger('mitogen').setLevel(logging.DEBUG) + + +def data_path(suffix): + return os.path.join(DATA_DIR, suffix) + + +class BrokerMixin(object): + broker_class = mitogen.master.Broker + + def setUp(self): + super(BrokerMixin, self).setUp() + self.broker = self.broker_class() + + def tearDown(self): + self.broker.shutdown() + self.broker.join() + super(BrokerMixin, self).tearDown() diff --git a/tests/timing_test.py b/tests/timing_test.py new file mode 100644 index 00000000..36bb9da7 --- /dev/null +++ b/tests/timing_test.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python + +import socket +import time +import unittest + +import mitogen.master +import mitogen.utils + + +@mitogen.utils.with_broker +def do_stuff(broker): + context = mitogen.master.connect(broker) + t0 = time.time() + ncalls = 1000 + for x in xrange(ncalls): + context.call(socket.gethostname) + return (1e6 * (time.time() - t0)) / ncalls + + +class LocalContextTimingTest(unittest.TestCase): + def test_timing(self): + self.assertLess(do_stuff(), 1000) diff --git a/tests/utils_test.py b/tests/utils_test.py new file mode 100644 index 00000000..6fc25e8c --- /dev/null +++ b/tests/utils_test.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python + +import unittest + +import mitogen.master +import mitogen.utils + + +def func0(broker): + return broker + + +@mitogen.utils.with_broker +def func(broker): + return broker + + +class RunWithBrokerTest(unittest.TestCase): + # test_shutdown_on_exception + # test_shutdown_on_success + + def test_run_with_broker(self): + broker = mitogen.utils.run_with_broker(func0) + self.assertTrue(isinstance(broker, mitogen.master.Broker)) + self.assertFalse(broker._thread.isAlive()) + + +class WithBrokerTest(unittest.TestCase): + def test_with_broker(self): + broker = func() + self.assertTrue(isinstance(broker, mitogen.master.Broker)) + self.assertFalse(broker._thread.isAlive()) diff --git a/todo/six-module-import-error.py b/todo/six-module-import-error.py new file mode 100644 index 00000000..3e0e3b71 --- /dev/null +++ b/todo/six-module-import-error.py @@ -0,0 +1,332 @@ +[11:46:20 Eldil!8 mitogen] py.test tests/ssh_test.py +=============================================================================== test session starts ================================================================================ +platform darwin -- Python 2.7.10, pytest-2.8.6, py-1.4.31, pluggy-0.3.1 +rootdir: /Users/dmw/src/mitogen, inifile: +plugins: capturelog-0.7, timeout-1.0.0 +collected 1 items + +tests/ssh_test.py F + +===================================================================================== FAILURES ===================================================================================== +________________________________________________________________________________ SshTest.test_okay _________________________________________________________________________________ + +self = + + def test_okay(self): +> @mitogen.utils.run_with_broker + def test(broker): + +tests/ssh_test.py:18: +_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ +mitogen/utils.py:52: in run_with_broker + return func(broker, *args, **kwargs) +tests/ssh_test.py:25: in test + self.assertEquals(3, context.call(add, 1, 2)) +mitogen/master.py:319: in call + return self.call_with_deadline(None, False, fn, *args, **kwargs) +_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ + +self = Context('hostname', 'hostname'), deadline = None, with_context = False, fn = , args = (1, 2), kwargs = {}, klass = None +call = (False, 'ssh_test', None, 'add', (1, 2), {}) +result = CallError('call failed: __builtin__.str: call failed: exceptions.KeyError: \'p...ern/__init__.py", line 43, in load_module\n mod = sys.modules[extant]\n\n',) + + def call_with_deadline(self, deadline, with_context, fn, *args, **kwargs): + """Invoke `fn([context,] *args, **kwargs)` in the external context. + + If `with_context` is ``True``, pass its + :py:class:`ExternalContext ` instance as + the first parameter. + + If `deadline` is not ``None``, expire the call after `deadline` + seconds. If `deadline` is ``None``, the invocation may block + indefinitely.""" + LOG.debug('%r.call_with_deadline(%r, %r, %r, *%r, **%r)', + self, deadline, with_context, fn, args, kwargs) + + if isinstance(fn, types.MethodType) and \ + isinstance(fn.im_self, (type, types.ClassType)): + klass = fn.im_self.__name__ + else: + klass = None + + call = (with_context, fn.__module__, klass, fn.__name__, args, kwargs) + result = self.enqueue_await_reply(mitogen.core.CALL_FUNCTION, + deadline, call) + if isinstance(result, mitogen.core.CallError): +> raise result +E CallError: call failed: __builtin__.str: call failed: exceptions.KeyError: 'pkg_resources._vendor.six.moves.' +E File "", line 862, in _dispatch_calls +E File "", line 220, in load_module +E File "master:/Users/dmw/src/mitogen/tests/ssh_test.py", line 9, in +E import testlib +E File "", line 220, in load_module +E File "master:/Users/dmw/src/mitogen/tests/testlib.py", line 6, in +E import mock +E File "", line 220, in load_module +E File "master:/Users/dmw/.venv/lib/python2.7/site-packages/mock/__init__.py", line 2, in +E import mock.mock as _mock +E File "", line 220, in load_module +E File "master:/Users/dmw/.venv/lib/python2.7/site-packages/mock/mock.py", line 69, in +E from pbr.version import VersionInfo +E File "", line 220, in load_module +E File "master:/Users/dmw/.venv/lib/python2.7/site-packages/pbr/version.py", line 25, in +E import pkg_resources +E File "", line 220, in load_module +E File "master:/Users/dmw/.venv/lib/python2.7/site-packages/pkg_resources/__init__.py", line 49, in +E from pkg_resources.extern.six.moves import urllib, map, filter +E File "", line 190, in find_module +E File "master:/Users/dmw/.venv/lib/python2.7/site-packages/pkg_resources/extern/__init__.py", line 43, in load_module +E mod = sys.modules[extant] + +mitogen/master.py:314: CallError +----------------------------------------------------------------------------------- Captured log ----------------------------------------------------------------------------------- +master.py 265 DEBUG Stream(Context('hostname', 'hostname')).connect() +master.py 67 DEBUG create_child() child 41405 fd 12, parent 41402, args ('/Users/dmw/src/mitogen/tests/data/fakessh.py', 'hostname', " 'python'", " '-c'", ' \'exec("aW1wb3J0IG9zLHN5cyx6bGliClIsVz1vcy5waXBlKCkKaWYgb3MuZm9yaygpOgoJb3MuZHVwMigwLDEwMCkKCW9zLmR1cDIoUiwwKQoJb3MuY2xvc2UoUikKCW9zLmNsb3NlKFcpCglvcy5leGVjdihzeXMuZXhlY3V0YWJsZSxbJ2Vjb250ZXh0OmRtd0BFbGRpbC5ob21lOjQxNDAyJ10pCmVsc2U6Cglvcy5mZG9wZW4oVywnd2InLDApLndyaXRlKHpsaWIuZGVjb21wcmVzcyhzeXMuc3RkaW4ucmVhZChpbnB1dCgpKSkpCglwcmludCgnT0snKQoJc3lzLmV4aXQoMCk=".decode("base64"))\'') +master.py 270 DEBUG Stream(Context('hostname', 'hostname')).connect(): child process stdin/stdout=13 +core.py 679 DEBUG Broker().register(Context('hostname', 'hostname')) -> r= w= +master.py 302 DEBUG Context('hostname', 'hostname').call_with_deadline(None, False, , *('/tmp/log',), **{}) +core.py 516 DEBUG Context('hostname', 'hostname').enqueue_await_reply(101, None, (False, 'mitogen.utils', None, 'log_to_file', ('/tmp/log',), {})) -> reply handle 1000 +master.py 153 DEBUG ModuleResponder(Context('hostname', 'hostname')).get_module((1000, 'mitogen.utils')) +master.py 100 DEBUG pkgutil.find_loader('mitogen.utils') -> +master.py 170 DEBUG _get_module_via_pkgutil found 'mitogen.utils': ('/Users/dmw/src/mitogen/mitogen/utils.py', .., False) +master.py 153 DEBUG ModuleResponder(Context('hostname', 'hostname')).get_module((1001, 'mitogen.master')) +master.py 100 DEBUG pkgutil.find_loader('mitogen.master') -> +master.py 170 DEBUG _get_module_via_pkgutil found 'mitogen.master': ('/Users/dmw/src/mitogen/mitogen/master.py', .., False) +master.py 302 DEBUG Context('hostname', 'hostname').call_with_deadline(None, False, , *(), **{}) +core.py 516 DEBUG Context('hostname', 'hostname').enqueue_await_reply(101, None, (False, 'mitogen.utils', None, 'disable_site_packages', (), {})) -> reply handle 1001 +master.py 84 DEBUG mitogen: _dispatch_calls((1001, False, 'mitogen.utils', None, 'disable_site_packages', (), {})) +master.py 302 DEBUG Context('hostname', 'hostname').call_with_deadline(None, False, , *(1, 2), **{}) +core.py 516 DEBUG Context('hostname', 'hostname').enqueue_await_reply(101, None, (False, 'ssh_test', None, 'add', (1, 2), {})) -> reply handle 1002 +master.py 84 DEBUG mitogen: _dispatch_calls((1002, False, 'ssh_test', None, 'add', (1, 2), {})) +master.py 84 DEBUG mitogen: Importer().find_module('ssh_test') +master.py 84 DEBUG mitogen: find_module('ssh_test') returning self +master.py 84 DEBUG mitogen: Importer.load_module('ssh_test') +master.py 84 DEBUG mitogen: Context('master').enqueue_await_reply(100, None, ('ssh_test',)) -> reply handle 1002 +master.py 153 DEBUG ModuleResponder(Context('hostname', 'hostname')).get_module((1002, 'ssh_test')) +master.py 100 DEBUG pkgutil.find_loader('ssh_test') -> <_pytest.assertion.rewrite.AssertionRewritingHook object at 0x10222b5d0> +master.py 170 DEBUG _get_module_via_sys_modules found 'ssh_test': ('/Users/dmw/src/mitogen/tests/ssh_test.py', .., False) +master.py 84 DEBUG mitogen: Importer().find_module('unittest') +master.py 84 DEBUG mitogen: Importer(): 'unittest' is available locally +master.py 84 DEBUG mitogen: Importer().find_module('mitogen.ssh') +master.py 84 DEBUG mitogen: find_module('mitogen.ssh') returning self +master.py 84 DEBUG mitogen: Importer.load_module('mitogen.ssh') +master.py 84 DEBUG mitogen: Context('master').enqueue_await_reply(100, None, ('mitogen.ssh',)) -> reply handle 1003 +master.py 153 DEBUG ModuleResponder(Context('hostname', 'hostname')).get_module((1003, 'mitogen.ssh')) +master.py 100 DEBUG pkgutil.find_loader('mitogen.ssh') -> +master.py 170 DEBUG _get_module_via_pkgutil found 'mitogen.ssh': ('/Users/dmw/src/mitogen/mitogen/ssh.py', .., False) +master.py 84 DEBUG mitogen: Importer().find_module('mitogen.commands') +master.py 84 DEBUG mitogen: Importer(): master doesn't know 'mitogen.commands' +master.py 84 DEBUG mitogen: Importer().find_module('commands') +master.py 84 DEBUG mitogen: Importer(): 'commands' is available locally +master.py 84 DEBUG mitogen: Importer().find_module('testlib') +master.py 84 DEBUG mitogen: find_module('testlib') returning self +master.py 84 DEBUG mitogen: Importer.load_module('testlib') +master.py 84 DEBUG mitogen: Context('master').enqueue_await_reply(100, None, ('testlib',)) -> reply handle 1004 +master.py 153 DEBUG ModuleResponder(Context('hostname', 'hostname')).get_module((1004, 'testlib')) +master.py 100 DEBUG pkgutil.find_loader('testlib') -> +master.py 170 DEBUG _get_module_via_pkgutil found 'testlib': ('/Users/dmw/src/mitogen/tests/testlib.py', .., False) +master.py 84 DEBUG mitogen: Importer().find_module('mock') +master.py 84 DEBUG mitogen: find_module('mock') returning self +master.py 84 DEBUG mitogen: Importer.load_module('mock') +master.py 84 DEBUG mitogen: Context('master').enqueue_await_reply(100, None, ('mock',)) -> reply handle 1005 +master.py 153 DEBUG ModuleResponder(Context('hostname', 'hostname')).get_module((1005, 'mock')) +master.py 100 DEBUG pkgutil.find_loader('mock') -> +master.py 170 DEBUG _get_module_via_pkgutil found 'mock': ('/Users/dmw/.venv/lib/python2.7/site-packages/mock/__init__.py', .., True) +master.py 174 DEBUG get_child_modules('/Users/dmw/.venv/lib/python2.7/site-packages/mock/__init__.py', 'mock') -> ['mock.mock', 'mock.tests'] +master.py 84 DEBUG mitogen: Importer().find_module('mock.mock') +master.py 84 DEBUG mitogen: find_module('mock.mock') returning self +master.py 84 DEBUG mitogen: Importer.load_module('mock.mock') +master.py 84 DEBUG mitogen: Context('master').enqueue_await_reply(100, None, ('mock.mock',)) -> reply handle 1006 +master.py 153 DEBUG ModuleResponder(Context('hostname', 'hostname')).get_module((1006, 'mock.mock')) +master.py 100 DEBUG pkgutil.find_loader('mock.mock') -> +master.py 170 DEBUG _get_module_via_pkgutil found 'mock.mock': ('/Users/dmw/.venv/lib/python2.7/site-packages/mock/mock.py', .., False) +master.py 84 DEBUG mitogen: Importer().find_module('builtins') +master.py 84 DEBUG mitogen: find_module('builtins') returning self +master.py 84 DEBUG mitogen: Importer.load_module('builtins') +master.py 84 DEBUG mitogen: Context('master').enqueue_await_reply(100, None, ('builtins',)) -> reply handle 1007 +master.py 153 DEBUG ModuleResponder(Context('hostname', 'hostname')).get_module((1007, 'builtins')) +master.py 100 DEBUG pkgutil.find_loader('builtins') -> None +master.py 116 DEBUG 'builtins' does not appear in sys.modules +master.py 182 DEBUG While importing 'builtins' +Traceback (most recent call last): + File "/Users/dmw/src/mitogen/mitogen/master.py", line 167, in get_module + raise ImportError('could not find %r' % (fullname,)) +ImportError: could not find 'builtins' +master.py 84 DEBUG mitogen: Importer().find_module('six') +master.py 84 DEBUG mitogen: find_module('six') returning self +master.py 84 DEBUG mitogen: Importer.load_module('six') +master.py 84 DEBUG mitogen: Context('master').enqueue_await_reply(100, None, ('six',)) -> reply handle 1008 +master.py 153 DEBUG ModuleResponder(Context('hostname', 'hostname')).get_module((1008, 'six')) +master.py 100 DEBUG pkgutil.find_loader('six') -> +master.py 170 DEBUG _get_module_via_pkgutil found 'six': ('/Users/dmw/.venv/lib/python2.7/site-packages/six.py', .., False) +master.py 84 DEBUG mitogen: Importer().find_module('pbr') +master.py 84 DEBUG mitogen: find_module('pbr') returning self +master.py 84 DEBUG mitogen: Importer.load_module('pbr') +master.py 84 DEBUG mitogen: Context('master').enqueue_await_reply(100, None, ('pbr',)) -> reply handle 1009 +master.py 153 DEBUG ModuleResponder(Context('hostname', 'hostname')).get_module((1009, 'pbr')) +master.py 100 DEBUG pkgutil.find_loader('pbr') -> +master.py 170 DEBUG _get_module_via_pkgutil found 'pbr': ('/Users/dmw/.venv/lib/python2.7/site-packages/pbr/__init__.py', .., True) +master.py 174 DEBUG get_child_modules('/Users/dmw/.venv/lib/python2.7/site-packages/pbr/__init__.py', 'pbr') -> ['pbr.builddoc', 'pbr.cmd', 'pbr.core', 'pbr.extra_files', 'pbr.find_package', 'pbr.git', 'pbr.hooks', 'pbr.options', 'pbr.packaging', 'pbr.pbr_json', 'pbr.testr_command', 'pbr.tests', 'pbr.util', 'pbr.version'] +master.py 84 DEBUG mitogen: Importer().find_module('pbr.version') +master.py 84 DEBUG mitogen: find_module('pbr.version') returning self +master.py 84 DEBUG mitogen: Importer.load_module('pbr.version') +master.py 84 DEBUG mitogen: Context('master').enqueue_await_reply(100, None, ('pbr.version',)) -> reply handle 1010 +master.py 153 DEBUG ModuleResponder(Context('hostname', 'hostname')).get_module((1010, 'pbr.version')) +master.py 100 DEBUG pkgutil.find_loader('pbr.version') -> +master.py 170 DEBUG _get_module_via_pkgutil found 'pbr.version': ('/Users/dmw/.venv/lib/python2.7/site-packages/pbr/version.py', .., False) +master.py 84 DEBUG mitogen: Importer().find_module('pbr.itertools') +master.py 84 DEBUG mitogen: Importer(): master doesn't know 'pbr.itertools' +master.py 84 DEBUG mitogen: Importer().find_module('pbr.operator') +master.py 84 DEBUG mitogen: Importer(): master doesn't know 'pbr.operator' +master.py 84 DEBUG mitogen: Importer().find_module('pbr.sys') +master.py 84 DEBUG mitogen: Importer(): master doesn't know 'pbr.sys' +master.py 84 DEBUG mitogen: Importer().find_module('pbr.pkg_resources') +master.py 84 DEBUG mitogen: Importer(): master doesn't know 'pbr.pkg_resources' +master.py 84 DEBUG mitogen: Importer().find_module('pkg_resources') +master.py 84 DEBUG mitogen: find_module('pkg_resources') returning self +master.py 84 DEBUG mitogen: Importer.load_module('pkg_resources') +master.py 84 DEBUG mitogen: Context('master').enqueue_await_reply(100, None, ('pkg_resources',)) -> reply handle 1011 +master.py 153 DEBUG ModuleResponder(Context('hostname', 'hostname')).get_module((1011, 'pkg_resources')) +master.py 100 DEBUG pkgutil.find_loader('pkg_resources') -> +master.py 170 DEBUG _get_module_via_pkgutil found 'pkg_resources': ('/Users/dmw/.venv/lib/python2.7/site-packages/pkg_resources/__init__.py', .., True) +master.py 174 DEBUG get_child_modules('/Users/dmw/.venv/lib/python2.7/site-packages/pkg_resources/__init__.py', 'pkg_resources') -> ['pkg_resources._vendor', 'pkg_resources.extern'] +master.py 84 DEBUG mitogen: Importer().find_module('io') +master.py 84 DEBUG mitogen: Importer(): 'io' is available locally +master.py 84 DEBUG mitogen: Importer().find_module('zipfile') +master.py 84 DEBUG mitogen: Importer(): 'zipfile' is available locally +master.py 84 DEBUG mitogen: Importer().find_module('symbol') +master.py 84 DEBUG mitogen: Importer(): 'symbol' is available locally +master.py 84 DEBUG mitogen: Importer().find_module('platform') +master.py 84 DEBUG mitogen: Importer(): 'platform' is available locally +master.py 84 DEBUG mitogen: Importer().find_module('plistlib') +master.py 84 DEBUG mitogen: Importer(): 'plistlib' is available locally +master.py 84 DEBUG mitogen: Importer().find_module('email') +master.py 84 DEBUG mitogen: Importer(): 'email' is available locally +master.py 84 DEBUG mitogen: Importer().find_module('email.parser') +master.py 84 DEBUG mitogen: Importer(): 'email.parser' is submodule of a package we did not load +master.py 84 DEBUG mitogen: Importer().find_module('email.warnings') +master.py 84 DEBUG mitogen: Importer(): 'email.warnings' is submodule of a package we did not load +master.py 84 DEBUG mitogen: Importer().find_module('email.cStringIO') +master.py 84 DEBUG mitogen: Importer(): 'email.cStringIO' is submodule of a package we did not load +master.py 84 DEBUG mitogen: Importer().find_module('email.feedparser') +master.py 84 DEBUG mitogen: Importer(): 'email.feedparser' is submodule of a package we did not load +master.py 84 DEBUG mitogen: Importer().find_module('email.re') +master.py 84 DEBUG mitogen: Importer(): 'email.re' is submodule of a package we did not load +master.py 84 DEBUG mitogen: Importer().find_module('email.errors') +master.py 84 DEBUG mitogen: Importer(): 'email.errors' is submodule of a package we did not load +master.py 84 DEBUG mitogen: Importer().find_module('email.message') +master.py 84 DEBUG mitogen: Importer(): 'email.message' is submodule of a package we did not load +master.py 84 DEBUG mitogen: Importer().find_module('email.uu') +master.py 84 DEBUG mitogen: Importer(): 'email.uu' is submodule of a package we did not load +master.py 84 DEBUG mitogen: Importer().find_module('uu') +master.py 84 DEBUG mitogen: Importer(): 'uu' is available locally +master.py 84 DEBUG mitogen: Importer().find_module('email.binascii') +master.py 84 DEBUG mitogen: Importer(): 'email.binascii' is submodule of a package we did not load +master.py 84 DEBUG mitogen: Importer().find_module('email.charset') +master.py 84 DEBUG mitogen: Importer(): 'email.charset' is submodule of a package we did not load +master.py 84 DEBUG mitogen: Importer().find_module('email.codecs') +master.py 84 DEBUG mitogen: Importer(): 'email.codecs' is submodule of a package we did not load +master.py 84 DEBUG mitogen: Importer().find_module('email.base64mime') +master.py 84 DEBUG mitogen: Importer(): 'email.base64mime' is submodule of a package we did not load +master.py 84 DEBUG mitogen: Importer().find_module('email.utils') +master.py 84 DEBUG mitogen: Importer(): 'email.utils' is submodule of a package we did not load +master.py 84 DEBUG mitogen: Importer().find_module('email.os') +master.py 84 DEBUG mitogen: Importer(): 'email.os' is submodule of a package we did not load +master.py 84 DEBUG mitogen: Importer().find_module('email.time') +master.py 84 DEBUG mitogen: Importer(): 'email.time' is submodule of a package we did not load +master.py 84 DEBUG mitogen: Importer().find_module('email.base64') +master.py 84 DEBUG mitogen: Importer(): 'email.base64' is submodule of a package we did not load +master.py 84 DEBUG mitogen: Importer().find_module('base64') +master.py 84 DEBUG mitogen: Importer(): 'base64' is available locally +master.py 84 DEBUG mitogen: Importer().find_module('email.random') +master.py 84 DEBUG mitogen: Importer(): 'email.random' is submodule of a package we did not load +master.py 84 DEBUG mitogen: Importer().find_module('email.socket') +master.py 84 DEBUG mitogen: Importer(): 'email.socket' is submodule of a package we did not load +master.py 84 DEBUG mitogen: Importer().find_module('email.urllib') +master.py 84 DEBUG mitogen: Importer(): 'email.urllib' is submodule of a package we did not load +master.py 84 DEBUG mitogen: Importer().find_module('urllib') +master.py 84 DEBUG mitogen: Importer(): 'urllib' is available locally +master.py 84 DEBUG mitogen: Importer().find_module('email._parseaddr') +master.py 84 DEBUG mitogen: Importer(): 'email._parseaddr' is submodule of a package we did not load +master.py 84 DEBUG mitogen: Importer().find_module('email.calendar') +master.py 84 DEBUG mitogen: Importer(): 'email.calendar' is submodule of a package we did not load +master.py 84 DEBUG mitogen: Importer().find_module('calendar') +master.py 84 DEBUG mitogen: Importer(): 'calendar' is available locally +master.py 84 DEBUG mitogen: Importer().find_module('email.quopri') +master.py 84 DEBUG mitogen: Importer(): 'email.quopri' is submodule of a package we did not load +master.py 84 DEBUG mitogen: Importer().find_module('quopri') +master.py 84 DEBUG mitogen: Importer(): 'quopri' is available locally +master.py 84 DEBUG mitogen: Importer().find_module('email.encoders') +master.py 84 DEBUG mitogen: Importer(): 'email.encoders' is submodule of a package we did not load +master.py 84 DEBUG mitogen: Importer().find_module('email.quoprimime') +master.py 84 DEBUG mitogen: Importer(): 'email.quoprimime' is submodule of a package we did not load +master.py 84 DEBUG mitogen: Importer().find_module('email.string') +master.py 84 DEBUG mitogen: Importer(): 'email.string' is submodule of a package we did not load +master.py 84 DEBUG mitogen: Importer().find_module('email.iterators') +master.py 84 DEBUG mitogen: Importer(): 'email.iterators' is submodule of a package we did not load +master.py 84 DEBUG mitogen: Importer().find_module('tempfile') +master.py 84 DEBUG mitogen: Importer(): 'tempfile' is available locally +master.py 84 DEBUG mitogen: Importer().find_module('_imp') +master.py 84 DEBUG mitogen: find_module('_imp') returning self +master.py 84 DEBUG mitogen: Importer.load_module('_imp') +master.py 84 DEBUG mitogen: Context('master').enqueue_await_reply(100, None, ('_imp',)) -> reply handle 1012 +master.py 153 DEBUG ModuleResponder(Context('hostname', 'hostname')).get_module((1012, '_imp')) +master.py 100 DEBUG pkgutil.find_loader('_imp') -> None +master.py 116 DEBUG '_imp' does not appear in sys.modules +master.py 182 DEBUG While importing '_imp' +Traceback (most recent call last): + File "/Users/dmw/src/mitogen/mitogen/master.py", line 167, in get_module + raise ImportError('could not find %r' % (fullname,)) +ImportError: could not find '_imp' +master.py 84 DEBUG mitogen: Importer().find_module('pkg_resources.extern') +master.py 84 DEBUG mitogen: find_module('pkg_resources.extern') returning self +master.py 84 DEBUG mitogen: Importer.load_module('pkg_resources.extern') +master.py 84 DEBUG mitogen: Context('master').enqueue_await_reply(100, None, ('pkg_resources.extern',)) -> reply handle 1013 +master.py 153 DEBUG ModuleResponder(Context('hostname', 'hostname')).get_module((1013, 'pkg_resources.extern')) +master.py 100 DEBUG pkgutil.find_loader('pkg_resources.extern') -> +master.py 170 DEBUG _get_module_via_pkgutil found 'pkg_resources.extern': ('/Users/dmw/.venv/lib/python2.7/site-packages/pkg_resources/extern/__init__.py', .., True) +master.py 174 DEBUG get_child_modules('/Users/dmw/.venv/lib/python2.7/site-packages/pkg_resources/extern/__init__.py', 'pkg_resources.extern') -> [] +master.py 84 DEBUG mitogen: Importer().find_module('pkg_resources.extern.sys') +master.py 84 DEBUG mitogen: Importer(): master doesn't know 'pkg_resources.extern.sys' +master.py 84 DEBUG mitogen: Importer().find_module('pkg_resources.extern.six') +master.py 84 DEBUG mitogen: Importer(): master doesn't know 'pkg_resources.extern.six' +master.py 84 DEBUG mitogen: Importer().find_module('pkg_resources._vendor') +master.py 84 DEBUG mitogen: find_module('pkg_resources._vendor') returning self +master.py 84 DEBUG mitogen: Importer.load_module('pkg_resources._vendor') +master.py 84 DEBUG mitogen: Context('master').enqueue_await_reply(100, None, ('pkg_resources._vendor',)) -> reply handle 1014 +master.py 153 DEBUG ModuleResponder(Context('hostname', 'hostname')).get_module((1014, 'pkg_resources._vendor')) +master.py 100 DEBUG pkgutil.find_loader('pkg_resources._vendor') -> +master.py 170 DEBUG _get_module_via_pkgutil found 'pkg_resources._vendor': ('/Users/dmw/.venv/lib/python2.7/site-packages/pkg_resources/_vendor/__init__.py', .., True) +master.py 174 DEBUG get_child_modules('/Users/dmw/.venv/lib/python2.7/site-packages/pkg_resources/_vendor/__init__.py', 'pkg_resources._vendor') -> ['pkg_resources._vendor.packaging', 'pkg_resources._vendor.six'] +master.py 84 DEBUG mitogen: Importer().find_module('pkg_resources._vendor.six') +master.py 84 DEBUG mitogen: find_module('pkg_resources._vendor.six') returning self +master.py 84 DEBUG mitogen: Importer.load_module('pkg_resources._vendor.six') +master.py 84 DEBUG mitogen: Context('master').enqueue_await_reply(100, None, ('pkg_resources._vendor.six',)) -> reply handle 1015 +master.py 153 DEBUG ModuleResponder(Context('hostname', 'hostname')).get_module((1015, 'pkg_resources._vendor.six')) +master.py 100 DEBUG pkgutil.find_loader('pkg_resources._vendor.six') -> +master.py 170 DEBUG _get_module_via_pkgutil found 'pkg_resources._vendor.six': ('/Users/dmw/.venv/lib/python2.7/site-packages/pkg_resources/_vendor/six.py', .., False) +master.py 84 DEBUG mitogen: Importer().find_module('pkg_resources.extern.six.moves') +core.py 760 DEBUG Broker().shutdown() +master.py 202 DEBUG Stream(Context('hostname', 'hostname')) closing CALL_FUNCTION channel +core.py 337 DEBUG Waker(Broker()).on_shutdown() +core.py 330 DEBUG Waker(Broker()).on_disconnect() +master.py 84 DEBUG mitogen: Waker(Broker()).on_shutdown() +master.py 84 DEBUG mitogen: Waker(Broker()).on_disconnect() +master.py 84 DEBUG mitogen: .on_shutdown() +master.py 84 DEBUG mitogen: .on_shutdown() +master.py 84 DEBUG mitogen: Stream(Context('master')).on_shutdown(Broker()) +master.py 84 DEBUG mitogen: ExternalContext.main() normal exit +master.py 84 DEBUG mitogen: Broker().shutdown() +master.py 84 DEBUG mitogen: .on_receive() +master.py 84 DEBUG mitogen: .on_disconnect() +master.py 84 DEBUG mitogen: .on_receive() +master.py 84 DEBUG mitogen: .on_disconnect() +core.py 330 DEBUG Stream(Context('hostname', 'hostname')).on_disconnect() +core.py 486 DEBUG Context('hostname', 'hostname').on_shutdown(Broker()) +core.py 488 DEBUG Context('hostname', 'hostname').on_disconnect(): killing 100: +master.py 153 DEBUG ModuleResponder(Context('hostname', 'hostname')).get_module() +core.py 488 DEBUG Context('hostname', 'hostname').on_disconnect(): killing 102: > +------------------------------------------------------------------------------ Captured stdout setup ------------------------------------------------------------------------------- +[] +=================================================================== 1 failed, 1 pytest-warnings in 0.57 seconds ==================================================================== +[22:43:16 Eldil!8 mitogen] n