From 65d5844d1ab5acde100ed7f2730a3ed2608837c3 Mon Sep 17 00:00:00 2001 From: David Wilson Date: Thu, 7 Sep 2017 16:38:05 +0530 Subject: [PATCH] Tidy up docs. --- docs/api.rst | 27 ++++++----- docs/howitworks.rst | 10 ++--- econtext/__init__.py | 2 +- econtext/core.py | 7 +++ econtext/fakessh.py | 105 ++++++++++++++++++++++++++++--------------- econtext/master.py | 2 +- 6 files changed, 99 insertions(+), 54 deletions(-) diff --git a/docs/api.rst b/docs/api.rst index accfe275..859ada97 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -12,6 +12,10 @@ econtext Package .. automodule:: econtext +.. autodata:: econtext.slave +.. autodata:: econtext.context_id +.. autodata:: econtext.parent_id + econtext.core ------------- @@ -25,13 +29,20 @@ econtext.master .. automodule:: econtext.master +econtext.fakessh +--------------- -Context Factories -================= +.. automodule:: econtext.fakessh -.. autofunction:: econtext.master.connect -.. autofunction:: econtext.ssh.connect -.. autofunction:: econtext.sudo.connect +.. autofunction:: econtext.fakessh.run_with_fake_ssh + + +Router Class +============ + +.. autoclass:: econtext.master.Router + :members: + :inherited-members: Broker Class @@ -64,12 +75,6 @@ Context Class :members: -Detecting A Slave -================= - -.. autodata:: econtext.slave - - Utility Functions ================= diff --git a/docs/howitworks.rst b/docs/howitworks.rst index cd6d69b4..45bf3cb4 100644 --- a/docs/howitworks.rst +++ b/docs/howitworks.rst @@ -101,11 +101,11 @@ Preserving The `econtext.core` Source ##################################### One final trick is implemented in the first stage: after bootstrapping the new -slave, it writes a duplicate copy of the `econtext.core` source it just used to -bootstrap it back into another pipe connected to the slave. The slave's module -importer cache is initialized with a copy of the source, so that subsequent -bootstraps of slave-of-slaves do not require the source to be fetched from the -master a second time. +slave, it writes a duplicate copy of the :py:mod:`econtext.core` source it just +used to bootstrap it back into another pipe connected to the slave. The slave's +module importer cache is initialized with a copy of the source, so that +subsequent bootstraps of slave-of-slaves do not require the source to be +fetched from the master a second time. Signalling Success diff --git a/econtext/__init__.py b/econtext/__init__.py index 517068e9..d9481b00 100644 --- a/econtext/__init__.py +++ b/econtext/__init__.py @@ -24,7 +24,7 @@ slave = False #: This is ``0`` in a master, otherwise it is a master-generated ID unique to -#: the slave context. +#: the slave context used for message routing. context_id = 0 diff --git a/econtext/core.py b/econtext/core.py index bb51a099..c6ecefb5 100644 --- a/econtext/core.py +++ b/econtext/core.py @@ -835,6 +835,13 @@ class Router(object): stream.send(msg) def route(self, msg): + """ + Arrange for the :py:class:`Message` `msg` to be delivered to its + destination using any relevant downstream context, or if none is found, + by forwarding the message upstream towards the master context. If `msg` + is destined for the local context, it is dispatched using the handles + registered with :py:meth:`add_handler`. + """ self.broker.on_thread(self._route, msg) diff --git a/econtext/fakessh.py b/econtext/fakessh.py index 22e0a2b5..d89a901f 100644 --- a/econtext/fakessh.py +++ b/econtext/fakessh.py @@ -1,43 +1,61 @@ """ -fakessh is a stream implementation that starts a local subprocess, substituting -one of the user-supplied arguments with the name of a "fake SSH command". When -invoked, this command passes its arguments back into the host context, and -begins tunnelling stdio from the child back to the configured target host. - -This allows tools like rsync and scp to be invoked as subprocesses and reuse -the connections and tunnels already established by the host program to connect -to a target machine, without wasteful redundant SSH connection setup, 3-way -handshakes, or firewall hopping configurations. - -The fake SSH command source is written to a temporary file on disk, and -consists of a copy of the econtext.core source code (just like any other child -context), with a line appended to cause it to connect back to the host process -over an FD it inherits. +fakessh is a stream implementation that starts a local subprocess with its +environment modified such that ``PATH`` searches for `ssh` return an econtext +implementation of the SSH command. When invoked, this tool arranges for the +command line supplied by the calling program to be executed in a context +already established by the master process, reusing the master's (possibly +proxied) connection to that context. + +This allows tools like `rsync` and `scp` to transparently reuse the connections +and tunnels already established by the host program to connect to a target +machine, without wasteful redundant SSH connection setup, 3-way handshakes, +or firewall hopping configurations, and enables these tools to be used in +impossible scenarios, such as over `sudo` with ``requiretty`` enabled. + +The fake `ssh` command source is written to a temporary file on disk, and +consists of a copy of the :py:mod:`econtext.core` source code (just like any +other child context), with a line appended to cause it to connect back to the +host process over an FD it inherits. As there is no reliance on an existing +filesystem file, it is possible for child contexts to use fakessh. As a consequence of connecting back through an inherited FD, only one SSH -invocation is possible, which is fine for tools like rsync. - - -Start sequence: - - 1. fakessh invoked, captures command line. - 2. _fakessh_main invoked by parent, - a. sets up IoPump for stdio, registers stdin_handle for local context - b. _start_slave_process invoked in target context, passing stdin_handle - 3. _start_slave_process invoked in target context, - a. the program from the SSH command line is invoked - b. sets up IoPump for command line's pipes - c. returns (control_handle, stdin_handle) to fakessh_main - 4. _fakessh_main receives (control_handle, stdin_handle), +invocation is possible, which is fine for tools like `rsync`, however in future +this restriction will be lifted. + +Sequence: + + 1. ``fakessh`` Context and Stream created by parent context. The stream's + buffer has a `_fakessh_main()` ``CALL_FUNCTION`` enqueued. + 2. Target program (`rsync/scp/sftp`) invoked, which internally executes + `ssh` from ``PATH``. + 3. :py:mod:`econtext.core` bootstrap begins, recovers the stream FD + inherited via the target program, established itself as the fakessh + context. + 4. `_fakessh_main()` ``CALL_FUNCTION`` is read by fakessh context, + a. sets up :py:class:`econtext.fakessh.IoPump` for stdio, registers + stdin_handle for local context. + b. Enqueues ``CALL_FUNCTION`` for `_start_slave()` invoked in target context, + i. the program from the `ssh` command line is started + ii. sets up :py:class:`econtext.fakessh.IoPump` for `ssh` command + line process's stdio pipes + iii. returns `(control_handle, stdin_handle)` to `_fakessh_main()` + 5. `_fakessh_main()` receives control/stdin handles from from `_start_slave()`, a. registers remote's stdin_handle with local IoPump - b. sends ("start", local_stdin_handle) to control_handle - c. registers local IoPump with broker + b. sends `("start", local_stdin_handle)` to remote's control_handle + c. registers local IoPump with Broker d. loops waiting for 'local stdout closed && remote stdout closed' - 5. _start_slave_process control channel receives ("start", stdin_handle), + 6. `_start_slave()` control channel receives `("start", stdin_handle)`, a. registers remote's stdin_handle with local IoPump - b. registers local IoPump with broker + b. registers local IoPump with Broker c. loops waiting for 'local stdout closed && remote stdout closed' +Future: + +1. Allow multiple invocations of fake SSH command. +2. Name the fakessh context after its PID (dep: 1) +3. Allow originating context to abort the pipeline gracefully +4. Investigate alternative approach of embedding econtext bootstrap command as + an explicit parameter to rsync/scp/sftp, allowing temp file to be avoided. """ import getopt @@ -170,14 +188,15 @@ class Process(object): import time time.sleep(3) -def _start_slave_process(econtext_, src_id, args): + +def _start_slave(econtext_, src_id, args): """ This runs in the target context, it is invoked by _fakessh_main running in the fakessh context immediately after startup. It starts the slave process (the the point where it has a stdin_handle to target but not stdout_chan to - write to), and waits for main to + write to), and waits for main to. """ - LOG.debug('_start_slave_process(%r, %r)', econtext_, args) + LOG.debug('_start_slave(%r, %r)', econtext_, args) proc = subprocess.Popen(args, stdin=subprocess.PIPE, @@ -247,7 +266,7 @@ def _fakessh_main(econtext_, dest_context_id): dest = econtext.master.Context(econtext_.router, dest_context_id) control_handle, stdin_handle = dest.call_with_deadline(None, True, - _start_slave_process, econtext.context_id, args) + _start_slave, econtext.context_id, args) LOG.debug('_fakessh_main: received control_handle=%r, stdin_handle=%r', control_handle, stdin_handle) @@ -266,6 +285,20 @@ def _fakessh_main(econtext_, dest_context_id): # def run_with_fake_ssh(dest, router, args, deadline=None): + """ + Run the command specified by the argument vector `args` such that ``PATH`` + searches for SSH by the command will cause its attempt to use SSH to + execute a remote program to be redirected to use econtext to execute that + program using the context `dest` instead. + + :param econtext.core.Context dest: + The destination context to execute the SSH command line in. + + :param econtext.core.Router router: + + :param list[str] args: + Command line arguments for local program, e.g. ``['rsync', '/tmp', 'remote:/tmp']`` + """ context_id = router.context_id_counter.next() fakessh = econtext.master.Context(router, context_id) fakessh.name = 'fakessh' diff --git a/econtext/master.py b/econtext/master.py index 61176b26..53d13681 100644 --- a/econtext/master.py +++ b/econtext/master.py @@ -293,7 +293,7 @@ class ModuleForwarder(object): econtext.core.Message( data=msg.data, handle=econtext.core.GET_MODULE, - reply_to=self.parent_context.add_handler( + reply_to=self.router.add_handler( lambda m: self._on_got_source(m, msg), persist=False )