Merge branch 'dmw'

- Build matrix simplification
- Ansible 2.6 support
- New 0-RTT temp dir scheme
- don't generate illegal default remote_name
- fix connection delegation config issues
- /etc/environment emulation
- fix LXD calling wrong commands
pull/350/head
David Wilson 6 years ago
commit 9fe0a2fbd3

@ -17,11 +17,21 @@ install:
- pip install -r dev_requirements.txt - pip install -r dev_requirements.txt
script: script:
- ${TRAVIS_BUILD_DIR}/.travis/${MODE}_tests.sh - |
if [ -f "${TRAVIS_BUILD_DIR}/.travis/${MODE}_tests.sh" ]; then
${TRAVIS_BUILD_DIR}/.travis/${MODE}_tests.sh;
else
${TRAVIS_BUILD_DIR}/.travis/${MODE}_tests.py;
fi
services: services:
- docker - docker
# To avoid matrix explosion, just test against oldest->newest and
# newest->oldest in various configuartions.
matrix: matrix:
include: include:
# Mitogen tests. # Mitogen tests.
@ -34,85 +44,32 @@ matrix:
# 2.6 -> 2.7 # 2.6 -> 2.7
- python: "2.6" - python: "2.6"
env: MODE=mitogen DISTRO=centos7 env: MODE=mitogen DISTRO=centos7
# 2.6 -> 2.6 # 3.6 -> 2.6
- python: "2.6"
env: MODE=mitogen DISTRO=centos6
# 3.6 -> 2.7
- python: "3.6" - python: "3.6"
env: MODE=mitogen DISTRO=debian env: MODE=mitogen DISTRO=centos6
# Debops tests. # Debops tests.
# 2.4.3.0; 2.7 -> 2.7 # 2.4.6.0; 2.7 -> 2.7
- python: "2.7"
env: MODE=debops_common VER=2.4.3.0
# 2.5.5; 2.7 -> 2.7
- python: "2.7" - python: "2.7"
env: MODE=debops_common VER=2.6.1 env: MODE=debops_common VER=2.4.6.0
# 2.5.5; 3.6 -> 2.7 # 2.5.7; 3.6 -> 2.7
- python: "3.6" - python: "3.6"
env: MODE=debops_common VER=2.6.1 env: MODE=debops_common VER=2.6.2
# ansible_mitogen tests. # ansible_mitogen tests.
# 2.4.3.0; Debian; 2.7 -> 2.7
- python: "2.7"
env: MODE=ansible VER=2.4.3.0 DISTRO=debian
# 2.5.5; Debian; 2.7 -> 2.7
- python: "2.7"
env: MODE=ansible VER=2.5.5 DISTRO=debian
# 2.6.0; Debian; 2.7 -> 2.7
- python: "2.7"
env: MODE=ansible VER=2.6.0 DISTRO=debian
# 2.6.1; Debian; 2.7 -> 2.7
- python: "2.7"
env: MODE=ansible VER=2.6.1 DISTRO=debian
# Centos 7 Python2 # 2.6 -> {debian, centos6, centos7}
# Latest
- python: "2.6" - python: "2.6"
env: MODE=ansible VER=2.6.1 DISTRO=centos7 env: MODE=ansible VER=2.4.6.0
# Backward Compatiability
- python: "2.7"
env: MODE=ansible VER=2.5.5 DISTRO=centos7
- python: "2.7"
env: MODE=ansible VER=2.6.0 DISTRO=centos7
- python: "2.7"
env: MODE=ansible VER=2.6.1 DISTRO=centos7
# Centos 7 Python3
- python: "3.6"
env: MODE=ansible VER=2.5.5 DISTRO=centos7
- python: "3.6"
env: MODE=ansible VER=2.6.0 DISTRO=centos7
- python: "3.6"
env: MODE=ansible VER=2.6.1 DISTRO=centos7
# Centos 6 Python2
# Latest
- python: "2.6"
env: MODE=ansible VER=2.6.1 DISTRO=centos6
# Backward Compatiability
- python: "2.6" - python: "2.6"
env: MODE=ansible VER=2.5.5 DISTRO=centos6 env: MODE=ansible VER=2.6.2
- python: "2.6"
env: MODE=ansible VER=2.6.0 DISTRO=centos6
- python: "2.7"
env: MODE=ansible VER=2.6.1 DISTRO=centos6
# Centos 6 Python3 # 3.6 -> {debian, centos6, centos7}
- python: "3.6" - python: "3.6"
env: MODE=ansible VER=2.5.5 DISTRO=centos6 env: MODE=ansible VER=2.4.6.0
- python: "3.6" - python: "3.6"
env: MODE=ansible VER=2.6.0 DISTRO=centos6 env: MODE=ansible VER=2.6.2
- python: "3.6"
env: MODE=ansible VER=2.6.1 DISTRO=centos6
# Sanity check our tests against vanilla Ansible, they should pass. # Sanity check against vanilla Ansible. One job suffices.
- python: "2.7"
env: MODE=ansible VER=2.5.5 DISTRO=debian STRATEGY=linear
- python: "2.7" - python: "2.7"
env: MODE=ansible VER=2.6.0 DISTRO=debian STRATEGY=linear env: MODE=ansible VER=2.6.2 DISTRO=debian STRATEGY=linear
- python: "2.7"
env: MODE=ansible VER=2.6.1 DISTRO=debian STRATEGY=linear

@ -0,0 +1,65 @@
#!/usr/bin/env python
# Run tests/ansible/all.yml under Ansible and Ansible-Mitogen
import os
import sys
import ci_lib
from ci_lib import run
BASE_PORT = 2201
TESTS_DIR = os.path.join(ci_lib.GIT_ROOT, 'tests/ansible')
HOSTS_DIR = os.path.join(ci_lib.TMP, 'hosts')
with ci_lib.Fold('docker_setup'):
for i, distro in enumerate(ci_lib.DISTROS):
try:
run("docker rm -f target-%s", distro)
except: pass
run("""
docker run
--rm
--detach
--publish 0.0.0.0:%s:22/tcp
--name=target-%s
mitogen/%s-test
""", BASE_PORT + i, distro, distro,)
with ci_lib.Fold('job_setup'):
os.chdir(TESTS_DIR)
os.chmod('../data/docker/mitogen__has_sudo_pubkey.key', int('0600', 7))
# Don't set -U as that will upgrade Paramiko to a non-2.6 compatible version.
run("pip install -q ansible==%s", ci_lib.ANSIBLE_VERSION)
run("mkdir %s", HOSTS_DIR)
run("ln -s %s/common-hosts %s", TESTS_DIR, HOSTS_DIR)
with open(os.path.join(HOSTS_DIR, 'target'), 'w') as fp:
fp.write('[test-targets]\n')
for i, distro in enumerate(ci_lib.DISTROS):
fp.write("target-%s "
"ansible_host=%s "
"ansible_port=%s "
"ansible_user=mitogen__has_sudo_nopw "
"ansible_password=has_sudo_nopw_password"
"\n" % (
distro,
ci_lib.DOCKER_HOSTNAME,
BASE_PORT + i,
))
# Build the binaries.
run("make -C %s", TESTS_DIR)
if not ci_lib.exists_in_path('sshpass'):
run("sudo apt-get update")
run("sudo apt-get install -y sshpass")
with ci_lib.Fold('ansible'):
run('/usr/bin/time ./run_ansible_playbook.sh all.yml -i "%s" %s',
HOSTS_DIR, ' '.join(sys.argv[1:]))

@ -1,64 +0,0 @@
#!/bin/bash -ex
# Run tests/ansible/all.yml under Ansible and Ansible-Mitogen
TRAVIS_BUILD_DIR="${TRAVIS_BUILD_DIR:-`pwd`}"
TMPDIR="/tmp/ansible-tests-$$"
ANSIBLE_VERSION="${VER:-2.6.1}"
export ANSIBLE_STRATEGY="${STRATEGY:-mitogen_linear}"
DISTRO="${DISTRO:-debian}"
export PYTHONPATH="${PYTHONPATH}:${TRAVIS_BUILD_DIR}"
# SSH passes these through to the container when run interactively, causing
# stdout to get messed up with libc warnings.
unset LANG LC_ALL
function on_exit()
{
rm -rf "$TMPDIR"
docker kill target || true
}
trap on_exit EXIT
mkdir "$TMPDIR"
echo travis_fold:start:docker_setup
DOCKER_HOSTNAME="$(python ${TRAVIS_BUILD_DIR}/tests/show_docker_hostname.py)"
docker run \
--rm \
--detach \
--publish 0.0.0.0:2201:22/tcp \
--name=target \
mitogen/${DISTRO}-test
echo travis_fold:end:docker_setup
echo travis_fold:start:job_setup
pip install ansible=="${ANSIBLE_VERSION}"
cd ${TRAVIS_BUILD_DIR}/tests/ansible
chmod go= ${TRAVIS_BUILD_DIR}/tests/data/docker/mitogen__has_sudo_pubkey.key
echo '[test-targets]' > ${TMPDIR}/hosts
echo \
target \
ansible_host=$DOCKER_HOSTNAME \
ansible_port=2201 \
ansible_user=mitogen__has_sudo_nopw \
ansible_password=has_sudo_nopw_password \
>> ${TMPDIR}/hosts
# Build the binaries.
make -C ${TRAVIS_BUILD_DIR}/tests/ansible
[ ! "$(type -p sshpass)" ] && sudo apt install -y sshpass
echo travis_fold:end:job_setup
echo travis_fold:start:ansible
/usr/bin/time ./run_ansible_playbook.sh \
all.yml \
-i "${TMPDIR}/hosts" "$@"
echo travis_fold:end:ansible

@ -0,0 +1,100 @@
from __future__ import absolute_import
from __future__ import print_function
import atexit
import os
import subprocess
import sys
import shlex
import shutil
import tempfile
#
# check_output() monkeypatch cutpasted from testlib.py
#
def subprocess__check_output(*popenargs, **kwargs):
# Missing from 2.6.
process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs)
output, _ = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise subprocess.CalledProcessError(retcode, cmd)
return output
if not hasattr(subprocess, 'check_output'):
subprocess.check_output = subprocess__check_output
# -----------------
def _argv(s, *args):
if args:
s %= args
return shlex.split(s)
def run(s, *args, **kwargs):
argv = _argv(s, *args)
print('Running: %s' % (argv,))
return subprocess.check_call(argv, **kwargs)
def get_output(s, *args, **kwargs):
argv = _argv(s, *args)
print('Running: %s' % (argv,))
return subprocess.check_output(argv, **kwargs)
def exists_in_path(progname):
return any(os.path.exists(os.path.join(dirname, progname))
for dirname in os.environ['PATH'].split(os.pathsep))
class TempDir(object):
def __init__(self):
self.path = tempfile.mkdtemp(prefix='mitogen_ci_lib')
atexit.register(self.destroy)
def destroy(self, rmtree=shutil.rmtree):
rmtree(self.path)
class Fold(object):
def __init__(self, name):
self.name = name
def __enter__(self):
print('travis_fold:start:%s' % (self.name))
def __exit__(self, _1, _2, _3):
print('')
print('travis_fold:end:%s' % (self.name))
os.environ.setdefault('ANSIBLE_STRATEGY',
os.environ.get('STRATEGY', 'mitogen_linear'))
ANSIBLE_VERSION = os.environ.get('VER', '2.6.2')
GIT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
DISTROS = os.environ.get('DISTROS', 'debian centos6 centos7').split()
TMP = TempDir().path
os.environ['PYTHONDONTWRITEBYTECODE'] = 'x'
os.environ['PYTHONPATH'] = '%s:%s' % (
os.environ.get('PYTHONPATH', ''),
GIT_ROOT
)
DOCKER_HOSTNAME = subprocess.check_output([
sys.executable,
os.path.join(GIT_ROOT, 'tests/show_docker_hostname.py'),
]).decode().strip()
# SSH passes these through to the container when run interactively, causing
# stdout to get messed up with libc warnings.
os.environ.pop('LANG', None)
os.environ.pop('LC_ALL', None)

@ -53,7 +53,28 @@ import ansible_mitogen.target
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
def optional_secret(value):
"""
Wrap `value` in :class:`mitogen.core.Secret` if it is not :data:`None`,
otherwise return :data:`None`.
"""
if value is not None:
return mitogen.core.Secret(value)
def parse_python_path(s):
"""
Given the string set for ansible_python_interpeter, parse it using shell
syntax and return an appropriate argument vector.
"""
if s:
return ansible.utils.shlex.shlex_split(s)
def _connect_local(spec): def _connect_local(spec):
"""
Return ContextService arguments for a local connection.
"""
return { return {
'method': 'local', 'method': 'local',
'kwargs': { 'kwargs': {
@ -62,12 +83,10 @@ def _connect_local(spec):
} }
def wrap_or_none(klass, value):
if value is not None:
return klass(value)
def _connect_ssh(spec): def _connect_ssh(spec):
"""
Return ContextService arguments for an SSH connection.
"""
if C.HOST_KEY_CHECKING: if C.HOST_KEY_CHECKING:
check_host_keys = 'enforce' check_host_keys = 'enforce'
else: else:
@ -79,7 +98,7 @@ def _connect_ssh(spec):
'check_host_keys': check_host_keys, 'check_host_keys': check_host_keys,
'hostname': spec['remote_addr'], 'hostname': spec['remote_addr'],
'username': spec['remote_user'], 'username': spec['remote_user'],
'password': wrap_or_none(mitogen.core.Secret, spec['password']), 'password': optional_secret(spec['password']),
'port': spec['port'], 'port': spec['port'],
'python_path': spec['python_path'], 'python_path': spec['python_path'],
'identity_file': spec['private_key_file'], 'identity_file': spec['private_key_file'],
@ -92,6 +111,9 @@ def _connect_ssh(spec):
def _connect_docker(spec): def _connect_docker(spec):
"""
Return ContextService arguments for a Docker connection.
"""
return { return {
'method': 'docker', 'method': 'docker',
'kwargs': { 'kwargs': {
@ -104,6 +126,9 @@ def _connect_docker(spec):
def _connect_jail(spec): def _connect_jail(spec):
"""
Return ContextService arguments for a FreeBSD jail connection.
"""
return { return {
'method': 'jail', 'method': 'jail',
'kwargs': { 'kwargs': {
@ -116,6 +141,9 @@ def _connect_jail(spec):
def _connect_lxc(spec): def _connect_lxc(spec):
"""
Return ContextService arguments for an LXC Classic container connection.
"""
return { return {
'method': 'lxc', 'method': 'lxc',
'kwargs': { 'kwargs': {
@ -126,11 +154,31 @@ def _connect_lxc(spec):
} }
def _connect_lxd(spec):
"""
Return ContextService arguments for an LXD container connection.
"""
return {
'method': 'lxd',
'kwargs': {
'container': spec['remote_addr'],
'python_path': spec['python_path'],
'connect_timeout': spec['ansible_ssh_timeout'] or spec['timeout'],
}
}
def _connect_machinectl(spec): def _connect_machinectl(spec):
"""
Return ContextService arguments for a machinectl connection.
"""
return _connect_setns(dict(spec, mitogen_kind='machinectl')) return _connect_setns(dict(spec, mitogen_kind='machinectl'))
def _connect_setns(spec): def _connect_setns(spec):
"""
Return ContextService arguments for a mitogen_setns connection.
"""
return { return {
'method': 'setns', 'method': 'setns',
'kwargs': { 'kwargs': {
@ -146,12 +194,15 @@ def _connect_setns(spec):
def _connect_su(spec): def _connect_su(spec):
"""
Return ContextService arguments for su as a become method.
"""
return { return {
'method': 'su', 'method': 'su',
'enable_lru': True, 'enable_lru': True,
'kwargs': { 'kwargs': {
'username': spec['become_user'], 'username': spec['become_user'],
'password': wrap_or_none(mitogen.core.Secret, spec['become_pass']), 'password': optional_secret(spec['become_pass']),
'python_path': spec['python_path'], 'python_path': spec['python_path'],
'su_path': spec['become_exe'], 'su_path': spec['become_exe'],
'connect_timeout': spec['timeout'], 'connect_timeout': spec['timeout'],
@ -160,12 +211,15 @@ def _connect_su(spec):
def _connect_sudo(spec): def _connect_sudo(spec):
"""
Return ContextService arguments for sudo as a become method.
"""
return { return {
'method': 'sudo', 'method': 'sudo',
'enable_lru': True, 'enable_lru': True,
'kwargs': { 'kwargs': {
'username': spec['become_user'], 'username': spec['become_user'],
'password': wrap_or_none(mitogen.core.Secret, spec['become_pass']), 'password': optional_secret(spec['become_pass']),
'python_path': spec['python_path'], 'python_path': spec['python_path'],
'sudo_path': spec['become_exe'], 'sudo_path': spec['become_exe'],
'connect_timeout': spec['timeout'], 'connect_timeout': spec['timeout'],
@ -175,12 +229,15 @@ def _connect_sudo(spec):
def _connect_doas(spec): def _connect_doas(spec):
"""
Return ContextService arguments for doas as a become method.
"""
return { return {
'method': 'doas', 'method': 'doas',
'enable_lru': True, 'enable_lru': True,
'kwargs': { 'kwargs': {
'username': spec['become_user'], 'username': spec['become_user'],
'password': wrap_or_none(mitogen.core.Secret, spec['become_pass']), 'password': optional_secret(spec['become_pass']),
'python_path': spec['python_path'], 'python_path': spec['python_path'],
'doas_path': spec['become_exe'], 'doas_path': spec['become_exe'],
'connect_timeout': spec['timeout'], 'connect_timeout': spec['timeout'],
@ -189,12 +246,14 @@ def _connect_doas(spec):
def _connect_mitogen_su(spec): def _connect_mitogen_su(spec):
# su as a first-class proxied connection, not a become method. """
Return ContextService arguments for su as a first class connection.
"""
return { return {
'method': 'su', 'method': 'su',
'kwargs': { 'kwargs': {
'username': spec['remote_user'], 'username': spec['remote_user'],
'password': wrap_or_none(mitogen.core.Secret, spec['password']), 'password': optional_secret(spec['password']),
'python_path': spec['python_path'], 'python_path': spec['python_path'],
'su_path': spec['become_exe'], 'su_path': spec['become_exe'],
'connect_timeout': spec['timeout'], 'connect_timeout': spec['timeout'],
@ -203,12 +262,14 @@ def _connect_mitogen_su(spec):
def _connect_mitogen_sudo(spec): def _connect_mitogen_sudo(spec):
# sudo as a first-class proxied connection, not a become method. """
Return ContextService arguments for sudo as a first class connection.
"""
return { return {
'method': 'sudo', 'method': 'sudo',
'kwargs': { 'kwargs': {
'username': spec['remote_user'], 'username': spec['remote_user'],
'password': wrap_or_none(mitogen.core.Secret, spec['password']), 'password': optional_secret(spec['password']),
'python_path': spec['python_path'], 'python_path': spec['python_path'],
'sudo_path': spec['become_exe'], 'sudo_path': spec['become_exe'],
'connect_timeout': spec['timeout'], 'connect_timeout': spec['timeout'],
@ -218,12 +279,14 @@ def _connect_mitogen_sudo(spec):
def _connect_mitogen_doas(spec): def _connect_mitogen_doas(spec):
# doas as a first-class proxied connection, not a become method. """
Return ContextService arguments for doas as a first class connection.
"""
return { return {
'method': 'doas', 'method': 'doas',
'kwargs': { 'kwargs': {
'username': spec['remote_user'], 'username': spec['remote_user'],
'password': wrap_or_none(mitogen.core.Secret, spec['password']), 'password': optional_secret(spec['password']),
'python_path': spec['python_path'], 'python_path': spec['python_path'],
'doas_path': spec['become_exe'], 'doas_path': spec['become_exe'],
'connect_timeout': spec['timeout'], 'connect_timeout': spec['timeout'],
@ -231,12 +294,15 @@ def _connect_mitogen_doas(spec):
} }
#: Mapping of connection method names to functions invoked as `func(spec)`
#: generating ContextService keyword arguments matching a connection
#: specification.
CONNECTION_METHOD = { CONNECTION_METHOD = {
'docker': _connect_docker, 'docker': _connect_docker,
'jail': _connect_jail, 'jail': _connect_jail,
'local': _connect_local, 'local': _connect_local,
'lxc': _connect_lxc, 'lxc': _connect_lxc,
'lxd': _connect_lxc, 'lxd': _connect_lxd,
'machinectl': _connect_machinectl, 'machinectl': _connect_machinectl,
'setns': _connect_setns, 'setns': _connect_setns,
'ssh': _connect_ssh, 'ssh': _connect_ssh,
@ -249,17 +315,6 @@ CONNECTION_METHOD = {
} }
def parse_python_path(s):
"""
Given the string set for ansible_python_interpeter, parse it using shell
syntax and return an appropriate argument vector.
"""
if not s:
return None
return ansible.utils.shlex.shlex_split(s)
def config_from_play_context(transport, inventory_name, connection): def config_from_play_context(transport, inventory_name, connection):
""" """
Return a dict representing all important connection configuration, allowing Return a dict representing all important connection configuration, allowing
@ -318,7 +373,7 @@ def config_from_hostvars(transport, inventory_name, connection,
config = config_from_play_context(transport, inventory_name, connection) config = config_from_play_context(transport, inventory_name, connection)
hostvars = dict(hostvars) hostvars = dict(hostvars)
return dict(config, **{ return dict(config, **{
'remote_addr': hostvars.get('ansible_hostname', inventory_name), 'remote_addr': hostvars.get('ansible_host', inventory_name),
'become': bool(become_user), 'become': bool(become_user),
'become_user': become_user, 'become_user': become_user,
'become_pass': None, 'become_pass': None,
@ -393,12 +448,17 @@ class Connection(ansible.plugins.connection.ConnectionBase):
#: Set to 'hostvars' by on_action_run() #: Set to 'hostvars' by on_action_run()
host_vars = None host_vars = None
#: Set to '_loader.get_basedir()' by on_action_run(). #: Set to '_loader.get_basedir()' by on_action_run(). Used by mitogen_local
#: to change the working directory to that of the current playbook,
#: matching vanilla Ansible behaviour.
loader_basedir = None loader_basedir = None
#: Set after connection to the target context's home directory. #: Set after connection to the target context's home directory.
home_dir = None home_dir = None
#: Set after connection to the target context's home directory.
_temp_dir = None
def __init__(self, play_context, new_stdin, **kwargs): def __init__(self, play_context, new_stdin, **kwargs):
assert ansible_mitogen.process.MuxProcess.unix_listener_path, ( assert ansible_mitogen.process.MuxProcess.unix_listener_path, (
'Mitogen connection types may only be instantiated ' 'Mitogen connection types may only be instantiated '
@ -415,11 +475,20 @@ class Connection(ansible.plugins.connection.ConnectionBase):
# https://github.com/dw/mitogen/issues/140 # https://github.com/dw/mitogen/issues/140
self.close() self.close()
def on_action_run(self, task_vars, loader_basedir): def on_action_run(self, task_vars, delegate_to_hostname, loader_basedir):
""" """
Invoked by ActionModuleMixin to indicate a new task is about to start Invoked by ActionModuleMixin to indicate a new task is about to start
executing. We use the opportunity to grab relevant bits from the executing. We use the opportunity to grab relevant bits from the
task-specific data. task-specific data.
:param dict task_vars:
Task variable dictionary.
:param str delegate_to_hostname:
:data:`None`, or the template-expanded inventory hostname this task
is being delegated to. A similar variable exists on PlayContext
when ``delegate_to:`` is active, however it is unexpanded.
:param str loader_basedir:
Loader base directory; see :attr:`loader_basedir`.
""" """
self.ansible_ssh_timeout = task_vars.get('ansible_ssh_timeout', self.ansible_ssh_timeout = task_vars.get('ansible_ssh_timeout',
C.DEFAULT_TIMEOUT) C.DEFAULT_TIMEOUT)
@ -433,6 +502,7 @@ class Connection(ansible.plugins.connection.ConnectionBase):
self.mitogen_ssh_debug_level = task_vars.get('mitogen_ssh_debug_level') self.mitogen_ssh_debug_level = task_vars.get('mitogen_ssh_debug_level')
self.inventory_hostname = task_vars['inventory_hostname'] self.inventory_hostname = task_vars['inventory_hostname']
self.host_vars = task_vars['hostvars'] self.host_vars = task_vars['hostvars']
self.delegate_to_hostname = delegate_to_hostname
self.loader_basedir = loader_basedir self.loader_basedir = loader_basedir
self.close(new_task=True) self.close(new_task=True)
@ -446,6 +516,10 @@ class Connection(ansible.plugins.connection.ConnectionBase):
return self.context is not None return self.context is not None
def _config_from_via(self, via_spec): def _config_from_via(self, via_spec):
"""
Produce a dict connection specifiction given a string `via_spec`, of
the form `[become_user@]inventory_hostname`.
"""
become_user, _, inventory_name = via_spec.rpartition('@') become_user, _, inventory_name = via_spec.rpartition('@')
via_vars = self.host_vars[inventory_name] via_vars = self.host_vars[inventory_name]
if isinstance(via_vars, jinja2.runtime.Undefined): if isinstance(via_vars, jinja2.runtime.Undefined):
@ -492,20 +566,11 @@ class Connection(ansible.plugins.connection.ConnectionBase):
return stack, seen_names return stack, seen_names
def _connect(self): def _connect_broker(self):
""" """
Establish a connection to the master process's UNIX listener socket, Establish a reference to the Broker, Router and parent context used for
constructing a mitogen.master.Router to communicate with the master, connections.
and a mitogen.parent.Context to represent it.
Depending on the original transport we should emulate, trigger one of
the _connect_*() service calls defined above to cause the master
process to establish the real connection on our behalf, or return a
reference to the existing one.
""" """
if self.connected:
return
if not self.broker: if not self.broker:
self.broker = mitogen.master.Broker() self.broker = mitogen.master.Broker()
self.router, self.parent = mitogen.unix.connect( self.router, self.parent = mitogen.unix.connect(
@ -513,14 +578,47 @@ class Connection(ansible.plugins.connection.ConnectionBase):
broker=self.broker, broker=self.broker,
) )
stack, _ = self._stack_from_config( def _config_from_direct_connection(self):
config_from_play_context( """
"""
return config_from_play_context(
transport=self.transport, transport=self.transport,
inventory_name=self.inventory_hostname, inventory_name=self.inventory_hostname,
connection=self connection=self
) )
def _config_from_delegate_to(self):
return config_from_hostvars(
transport=self._play_context.connection,
inventory_name=self.delegate_to_hostname,
connection=self,
hostvars=self.host_vars[self._play_context.delegate_to],
become_user=(self._play_context.become_user
if self._play_context.become
else None),
) )
def _build_stack(self):
"""
Construct a list of dictionaries representing the connection
configuration between the controller and the target. This is
additionally used by the integration tests "mitogen_get_stack" action
to fetch the would-be connection configuration.
"""
if self.delegate_to_hostname is not None:
target_config = self._config_from_delegate_to()
else:
target_config = self._config_from_direct_connection()
stack, _ = self._stack_from_config(target_config)
return stack
def _connect_stack(self, stack):
"""
Pass `stack` to ContextService, requesting a copy of the context object
representing the target. If no connection exists yet, ContextService
will establish it before returning it or throwing an error.
"""
dct = self.parent.call_service( dct = self.parent.call_service(
service_name='ansible_mitogen.services.ContextService', service_name='ansible_mitogen.services.ContextService',
method_name='get', method_name='get',
@ -540,6 +638,29 @@ class Connection(ansible.plugins.connection.ConnectionBase):
self.fork_context = dct['init_child_result']['fork_context'] self.fork_context = dct['init_child_result']['fork_context']
self.home_dir = dct['init_child_result']['home_dir'] self.home_dir = dct['init_child_result']['home_dir']
self._temp_dir = dct['init_child_result']['temp_dir']
def get_temp_dir(self):
self._connect()
return self._temp_dir
def _connect(self):
"""
Establish a connection to the master process's UNIX listener socket,
constructing a mitogen.master.Router to communicate with the master,
and a mitogen.parent.Context to represent it.
Depending on the original transport we should emulate, trigger one of
the _connect_*() service calls defined above to cause the master
process to establish the real connection on our behalf, or return a
reference to the existing one.
"""
if self.connected:
return
self._connect_broker()
stack = self._build_stack()
self._connect_stack(stack)
def close(self, new_task=False): def close(self, new_task=False):
""" """

@ -37,10 +37,12 @@ try:
from ansible.plugins.loader import connection_loader from ansible.plugins.loader import connection_loader
from ansible.plugins.loader import module_loader from ansible.plugins.loader import module_loader
from ansible.plugins.loader import module_utils_loader from ansible.plugins.loader import module_utils_loader
from ansible.plugins.loader import shell_loader
from ansible.plugins.loader import strategy_loader from ansible.plugins.loader import strategy_loader
except ImportError: # Ansible <2.4 except ImportError: # Ansible <2.4
from ansible.plugins import action_loader from ansible.plugins import action_loader
from ansible.plugins import connection_loader from ansible.plugins import connection_loader
from ansible.plugins import module_loader from ansible.plugins import module_loader
from ansible.plugins import module_utils_loader from ansible.plugins import module_utils_loader
from ansible.plugins import shell_loader
from ansible.plugins import strategy_loader from ansible.plugins import strategy_loader

@ -110,6 +110,7 @@ class ActionModuleMixin(ansible.plugins.action.ActionBase):
""" """
self._connection.on_action_run( self._connection.on_action_run(
task_vars=task_vars, task_vars=task_vars,
delegate_to_hostname=self._task.delegate_to,
loader_basedir=self._loader.get_basedir(), loader_basedir=self._loader.get_basedir(),
) )
return super(ActionModuleMixin, self).run(tmp, task_vars) return super(ActionModuleMixin, self).run(tmp, task_vars)
@ -179,47 +180,25 @@ class ActionModuleMixin(ansible.plugins.action.ActionBase):
""" """
assert False, "_is_pipelining_enabled() should never be called." assert False, "_is_pipelining_enabled() should never be called."
def _get_remote_tmp(self):
"""
Mitogen-only: return the 'remote_tmp' setting.
"""
try:
s = self._connection._shell.get_option('remote_tmp')
except AttributeError:
s = ansible.constants.DEFAULT_REMOTE_TMP # <=2.4.x
return self._remote_expand_user(s, sudoable=False)
def _make_tmp_path(self, remote_user=None): def _make_tmp_path(self, remote_user=None):
""" """
Replace the base implementation's use of shell to implement mkdtemp() Return the temporary directory created by the persistent interpreter at
with an actual call to mkdtemp(). Like vanilla, the directory is always startup.
created in the login account context.
""" """
LOG.debug('_make_tmp_path(remote_user=%r)', remote_user) LOG.debug('_make_tmp_path(remote_user=%r)', remote_user)
# _make_tmp_path() is basically a global stashed away as Shell.tmpdir. # _make_tmp_path() is basically a global stashed away as Shell.tmpdir.
# The copy action plugin violates layering and grabs this attribute self._connection._shell.tmpdir = self._connection.get_temp_dir()
# directly.
self._connection._shell.tmpdir = self._connection.call(
ansible_mitogen.target.make_temp_directory,
base_dir=self._get_remote_tmp(),
use_login_context=True,
)
LOG.debug('Temporary directory: %r', self._connection._shell.tmpdir) LOG.debug('Temporary directory: %r', self._connection._shell.tmpdir)
self._cleanup_remote_tmp = True self._cleanup_remote_tmp = True
return self._connection._shell.tmpdir return self._connection._shell.tmpdir
def _remove_tmp_path(self, tmp_path): def _remove_tmp_path(self, tmp_path):
""" """
Replace the base implementation's invocation of rm -rf with a call to Stub out the base implementation's invocation of rm -rf, replacing it
shutil.rmtree(). with nothing, as the persistent interpreter automatically cleans up
after itself without introducing roundtrips.
""" """
LOG.debug('_remove_tmp_path(%r)', tmp_path) LOG.debug('_remove_tmp_path(%r)', tmp_path)
if tmp_path is None:
tmp_path = self._connection._shell.tmpdir
if self._should_remove_tmp_path(tmp_path):
self.call(shutil.rmtree, tmp_path)
self._connection._shell.tmpdir = None self._connection._shell.tmpdir = None
def _transfer_data(self, remote_path, data): def _transfer_data(self, remote_path, data):
@ -331,7 +310,15 @@ class ActionModuleMixin(ansible.plugins.action.ActionBase):
env = {} env = {}
self._compute_environment_string(env) self._compute_environment_string(env)
# Always set _ansible_tmpdir regardless of whether _make_remote_tmp()
# has ever been called. This short-circuits all the .tmpdir logic in
# module_common and ensures no second temporary directory or atexit
# handler is installed.
self._connection._connect() self._connection._connect()
if ansible.__version__ > '2.5':
module_args['_ansible_tmpdir'] = self._connection.get_temp_dir()
return ansible_mitogen.planner.invoke( return ansible_mitogen.planner.invoke(
ansible_mitogen.planner.Invocation( ansible_mitogen.planner.Invocation(
action=self, action=self,

@ -55,6 +55,7 @@ import ansible_mitogen.target
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
NO_METHOD_MSG = 'Mitogen: no invocation method found for: ' NO_METHOD_MSG = 'Mitogen: no invocation method found for: '
NO_INTERPRETER_MSG = 'module (%s) is missing interpreter line' NO_INTERPRETER_MSG = 'module (%s) is missing interpreter line'
NO_MODULE_MSG = 'The module %s was not found in configured module paths.'
class Invocation(object): class Invocation(object):
@ -393,6 +394,9 @@ _planners = [
def get_module_data(name): def get_module_data(name):
path = ansible_mitogen.loaders.module_loader.find_plugin(name, '') path = ansible_mitogen.loaders.module_loader.find_plugin(name, '')
if path is None:
raise ansible.errors.AnsibleError(NO_MODULE_MSG % (name,))
with open(path, 'rb') as fp: with open(path, 'rb') as fp:
source = fp.read() source = fp.read()
return mitogen.core.to_text(path), source return mitogen.core.to_text(path), source

@ -26,6 +26,7 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
import os.path import os.path
import sys import sys

@ -26,6 +26,7 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
import os.path import os.path
import sys import sys

@ -26,6 +26,7 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
import os.path import os.path
import sys import sys

@ -26,6 +26,7 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
import os.path import os.path
import sys import sys

@ -26,6 +26,7 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
import os.path import os.path
import sys import sys

@ -26,6 +26,7 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
import os.path import os.path
import sys import sys

@ -26,6 +26,7 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
import os.path import os.path
import sys import sys

@ -26,6 +26,7 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
import os.path import os.path
import sys import sys

@ -26,6 +26,7 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
import os.path import os.path
import sys import sys

@ -26,6 +26,7 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
import os.path import os.path
import sys import sys

@ -26,6 +26,7 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
import os.path import os.path
import sys import sys

@ -26,6 +26,7 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
import os.path import os.path
import sys import sys

@ -26,6 +26,7 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
import os.path import os.path
import sys import sys

@ -26,6 +26,7 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
import os.path import os.path
import sys import sys

@ -27,6 +27,7 @@
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import from __future__ import absolute_import
import atexit
import errno import errno
import logging import logging
import os import os
@ -53,6 +54,22 @@ from mitogen.core import b
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
def clean_shutdown(sock):
"""
Shut the write end of `sock`, causing `recv` in the worker process to wake
up with a 0-byte read and initiate mux process exit, then wait for a 0-byte
read from the read end, which will occur after the the child closes the
descriptor on exit.
This is done using :mod:`atexit` since Ansible lacks any more sensible hook
to run code during exit, and unless some synchronization exists with
MuxProcess, debug logs may appear on the user's terminal *after* the prompt
has been printed.
"""
sock.shutdown(socket.SHUT_WR)
sock.recv(1)
class MuxProcess(object): class MuxProcess(object):
""" """
Implement a subprocess forked from the Ansible top-level, as a safe place Implement a subprocess forked from the Ansible top-level, as a safe place
@ -112,6 +129,7 @@ class MuxProcess(object):
cls.unix_listener_path = mitogen.unix.make_socket_path() cls.unix_listener_path = mitogen.unix.make_socket_path()
cls.worker_sock, cls.child_sock = socket.socketpair() cls.worker_sock, cls.child_sock = socket.socketpair()
atexit.register(lambda: clean_shutdown(cls.worker_sock))
mitogen.core.set_cloexec(cls.worker_sock.fileno()) mitogen.core.set_cloexec(cls.worker_sock.fileno())
mitogen.core.set_cloexec(cls.child_sock.fileno()) mitogen.core.set_cloexec(cls.child_sock.fileno())
@ -143,7 +161,6 @@ class MuxProcess(object):
# Let the parent know our listening socket is ready. # Let the parent know our listening socket is ready.
mitogen.core.io_op(self.child_sock.send, b('1')) mitogen.core.io_op(self.child_sock.send, b('1'))
self.child_sock.send(b('1'))
# Block until the socket is closed, which happens on parent exit. # Block until the socket is closed, which happens on parent exit.
mitogen.core.io_op(self.child_sock.recv, 1) mitogen.core.io_op(self.child_sock.recv, 1)

@ -44,6 +44,7 @@ import imp
import json import json
import logging import logging
import os import os
import shlex
import sys import sys
import tempfile import tempfile
import types import types
@ -65,6 +66,9 @@ except ImportError:
# Prevent accidental import of an Ansible module from hanging on stdin read. # Prevent accidental import of an Ansible module from hanging on stdin read.
import ansible.module_utils.basic import ansible.module_utils.basic
ansible.module_utils.basic._ANSIBLE_ARGS = '{}' ansible.module_utils.basic._ANSIBLE_ARGS = '{}'
ansible.module_utils.basic.get_module_path = lambda: (
ansible_mitogen.target.temp_dir
)
# For tasks that modify /etc/resolv.conf, non-Debian derivative glibcs cache # For tasks that modify /etc/resolv.conf, non-Debian derivative glibcs cache
# resolv.conf at startup and never implicitly reload it. Cope with that via an # resolv.conf at startup and never implicitly reload it. Cope with that via an
@ -82,6 +86,110 @@ iteritems = getattr(dict, 'iteritems', dict.items)
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
class EnvironmentFileWatcher(object):
"""
Usually Ansible edits to /etc/environment and ~/.pam_environment are
reflected in subsequent tasks if become:true or SSH multiplexing is
disabled, due to sudo and/or SSH reinvoking pam_env. Rather than emulate
existing semantics, do our best to ensure edits are always reflected.
This can't perfectly replicate the existing behaviour, but it can safely
update and remove keys that appear to originate in `path`, and that do not
conflict with any existing environment key inherited from elsewhere.
A more robust future approach may simply be to arrange for the persistent
interpreter to restart when a change is detected.
"""
def __init__(self, path):
self.path = os.path.expanduser(path)
#: Inode data at time of last check.
self._st = self._stat()
#: List of inherited keys appearing to originated from this file.
self._keys = [key for key, value in self._load()
if value == os.environ.get(key)]
LOG.debug('%r installed; existing keys: %r', self, self._keys)
def __repr__(self):
return 'EnvironmentFileWatcher(%r)' % (self.path,)
def _stat(self):
try:
return os.stat(self.path)
except OSError:
return None
def _load(self):
try:
with open(self.path, 'r') as fp:
return list(self._parse(fp))
except IOError:
return []
def _parse(self, fp):
"""
linux-pam-1.3.1/modules/pam_env/pam_env.c#L207
"""
for line in fp:
# ' #export foo=some var ' -> ['#export', 'foo=some var ']
bits = shlex.split(line, comments=True)
if (not bits) or bits[0].startswith('#'):
continue
if bits[0] == 'export':
bits.pop(0)
key, sep, value = (' '.join(bits)).partition('=')
if key and sep:
yield key, value
def _on_file_changed(self):
LOG.debug('%r: file changed, reloading', self)
for key, value in self._load():
if key in os.environ:
LOG.debug('%r: existing key %r=%r exists, not setting %r',
self, key, os.environ[key], value)
else:
LOG.debug('%r: setting key %r to %r', self, key, value)
self._keys.append(key)
os.environ[key] = value
def _remove_existing(self):
"""
When a change is detected, remove keys that existed in the old file.
"""
for key in self._keys:
if key in os.environ:
LOG.debug('%r: removing old key %r', self, key)
del os.environ[key]
self._keys = []
def check(self):
"""
Compare the :func:`os.stat` for the pam_env style environmnt file
`path` with the previous result `old_st`, which may be :data:`None` if
the previous stat attempt failed. Reload its contents if the file has
changed or appeared since last attempt.
:returns:
New :func:`os.stat` result. The new call to :func:`reload_env` should
pass it as the value of `old_st`.
"""
st = self._stat()
if self._st == st:
return
self._st = st
self._remove_existing()
if st is None:
LOG.debug('%r: file has disappeared', self)
else:
self._on_file_changed()
_pam_env_watcher = EnvironmentFileWatcher('~/.pam_environment')
_etc_env_watcher = EnvironmentFileWatcher('/etc/environment')
def utf8(s): def utf8(s):
""" """
Coerce an object to bytes if it is Unicode. Coerce an object to bytes if it is Unicode.
@ -154,12 +262,25 @@ class Runner(object):
from the parent, as :meth:`run` may detach prior to beginning from the parent, as :meth:`run` may detach prior to beginning
execution. The base implementation simply prepares the environment. execution. The base implementation simply prepares the environment.
""" """
self._setup_cwd()
self._setup_environ()
def _setup_cwd(self):
"""
For situations like sudo to a non-privileged account, CWD could be
$HOME of the old account, which could have mode go=, which means it is
impossible to restore the old directory, so don't even try.
"""
if self.cwd: if self.cwd:
# For situations like sudo to another non-privileged account, the
# CWD could be $HOME of the old account, which could have mode go=,
# which means it is impossible to restore the old directory, so
# don't even bother.
os.chdir(self.cwd) os.chdir(self.cwd)
def _setup_environ(self):
"""
Apply changes from /etc/environment files before creating a
TemporaryEnvironment to snapshot environment state prior to module run.
"""
_pam_env_watcher.check()
_etc_env_watcher.check()
env = dict(self.extra_env or {}) env = dict(self.extra_env or {})
if self.env: if self.env:
env.update(self.env) env.update(self.env)
@ -548,6 +669,14 @@ class NewStyleRunner(ScriptRunner):
for fullname in self.module_map['builtin']: for fullname in self.module_map['builtin']:
mitogen.core.import_module(fullname) mitogen.core.import_module(fullname)
def _setup_excepthook(self):
"""
Starting with Ansible 2.6, some modules (file.py) install a
sys.excepthook and never clean it up. So we must preserve the original
excepthook and restore it after the run completes.
"""
self.original_excepthook = sys.excepthook
def setup(self): def setup(self):
super(NewStyleRunner, self).setup() super(NewStyleRunner, self).setup()
@ -561,12 +690,17 @@ class NewStyleRunner(ScriptRunner):
module_utils=self.module_map['custom'], module_utils=self.module_map['custom'],
) )
self._setup_imports() self._setup_imports()
self._setup_excepthook()
if libc__res_init: if libc__res_init:
libc__res_init() libc__res_init()
def _revert_excepthook(self):
sys.excepthook = self.original_excepthook
def revert(self): def revert(self):
self._argv.revert() self._argv.revert()
self._stdio.revert() self._stdio.revert()
self._revert_excepthook()
super(NewStyleRunner, self).revert() super(NewStyleRunner, self).revert()
def _get_program_filename(self): def _get_program_filename(self):
@ -600,6 +734,20 @@ class NewStyleRunner(ScriptRunner):
else: else:
main_module_name = b'__main__' main_module_name = b'__main__'
def _handle_magic_exception(self, mod, exc):
"""
Beginning with Ansible >2.6, some modules (file.py) install a
sys.excepthook which is a closure over AnsibleModule, redirecting the
magical exception to AnsibleModule.fail_json().
For extra special needs bonus points, the class is not defined in
module_utils, but is defined in the module itself, meaning there is no
type for isinstance() that outlasts the invocation.
"""
klass = getattr(mod, 'AnsibleModuleError', None)
if klass and isinstance(exc, klass):
mod.module.fail_json(**exc.results)
def _run(self): def _run(self):
code = self._get_code() code = self._get_code()
@ -615,11 +763,15 @@ class NewStyleRunner(ScriptRunner):
) )
exc = None exc = None
try:
try: try:
if mitogen.core.PY3: if mitogen.core.PY3:
exec(code, vars(mod)) exec(code, vars(mod))
else: else:
exec('exec code in vars(mod)') exec('exec code in vars(mod)')
except Exception as e:
self._handle_magic_exception(mod, e)
raise
except SystemExit as e: except SystemExit as e:
exc = e exc = e

@ -46,14 +46,23 @@ import os.path
import sys import sys
import threading import threading
import ansible.constants
import mitogen import mitogen
import mitogen.service import mitogen.service
import mitogen.utils
import ansible_mitogen.loaders
import ansible_mitogen.module_finder import ansible_mitogen.module_finder
import ansible_mitogen.target import ansible_mitogen.target
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
# Force load of plugin to ensure ConfigManager has definitions loaded. Done
# during module import to ensure a single-threaded environment; PluginLoader
# is not thread-safe.
ansible_mitogen.loaders.shell_loader.get('sh')
if sys.version_info[0] == 3: if sys.version_info[0] == 3:
def reraise(tp, value, tb): def reraise(tp, value, tb):
@ -69,6 +78,17 @@ else:
) )
def _get_candidate_temp_dirs():
options = ansible.constants.config.get_plugin_options('shell', 'sh')
# Pre 2.5 this came from ansible.constants.
remote_tmp = (options.get('remote_tmp') or
ansible.constants.DEFAULT_REMOTE_TMP)
dirs = list(options.get('system_tmpdirs', ('/var/tmp', '/tmp')))
dirs.insert(0, remote_tmp)
return mitogen.utils.cast(dirs)
class Error(Exception): class Error(Exception):
pass pass
@ -252,6 +272,18 @@ class ContextService(mitogen.service.Service):
for fullname in self.ALWAYS_PRELOAD: for fullname in self.ALWAYS_PRELOAD:
self.router.responder.forward_module(context, fullname) self.router.responder.forward_module(context, fullname)
_candidate_temp_dirs = None
def _get_candidate_temp_dirs(self):
"""
Return a list of locations to try to create the single temporary
directory used by the run. This simply caches the (expensive) plugin
load of :func:`_get_candidate_temp_dirs`.
"""
if self._candidate_temp_dirs is None:
self._candidate_temp_dirs = _get_candidate_temp_dirs()
return self._candidate_temp_dirs
def _connect(self, key, spec, via=None): def _connect(self, key, spec, via=None):
""" """
Actual connect implementation. Arranges for the Mitogen connection to Actual connect implementation. Arranges for the Mitogen connection to
@ -298,8 +330,11 @@ class ContextService(mitogen.service.Service):
lambda: self._on_stream_disconnect(stream)) lambda: self._on_stream_disconnect(stream))
self._send_module_forwards(context) self._send_module_forwards(context)
init_child_result = context.call(ansible_mitogen.target.init_child, init_child_result = context.call(
log_level=LOG.getEffectiveLevel()) ansible_mitogen.target.init_child,
log_level=LOG.getEffectiveLevel(),
candidate_temp_dirs=self._get_candidate_temp_dirs(),
)
if os.environ.get('MITOGEN_DUMP_THREAD_STACKS'): if os.environ.get('MITOGEN_DUMP_THREAD_STACKS'):
from mitogen import debug from mitogen import debug

@ -69,14 +69,30 @@ import ansible_mitogen.runner
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
#: Set by init_child() to the single temporary directory that will exist for MAKE_TEMP_FAILED_MSG = (
#: the duration of the process. "Unable to find a useable temporary directory. This likely means no\n"
temp_dir = None "system-supplied TMP directory can be written to, or all directories\n"
"were mounted on 'noexec' filesystems.\n"
"\n"
"The following paths were tried:\n"
" %(namelist)s\n"
"\n"
"Please check '-vvv' output for a log of individual path errors."
)
#: Initialized to an econtext.parent.Context pointing at a pristine fork of #: Initialized to an econtext.parent.Context pointing at a pristine fork of
#: the target Python interpreter before it executes any code or imports. #: the target Python interpreter before it executes any code or imports.
_fork_parent = None _fork_parent = None
#: Set by init_child() to a list of candidate $variable-expanded and
#: tilde-expanded directory paths that may be usable as a temporary directory.
_candidate_temp_dirs = None
#: Set by reset_temp_dir() to the single temporary directory that will exist
#: for the duration of the process.
temp_dir = None
def get_small_file(context, path): def get_small_file(context, path):
""" """
@ -190,6 +206,53 @@ def _on_broker_shutdown():
prune_tree(temp_dir) prune_tree(temp_dir)
def find_good_temp_dir():
"""
Given a list of candidate temp directories extracted from ``ansible.cfg``
and stored in _candidate_temp_dirs, combine it with the Python-builtin list
of candidate directories used by :mod:`tempfile`, then iteratively try each
in turn until one is found that is both writeable and executable.
"""
paths = [os.path.expandvars(os.path.expanduser(p))
for p in _candidate_temp_dirs]
paths.extend(tempfile._candidate_tempdir_list())
for path in paths:
try:
tmp = tempfile.NamedTemporaryFile(
prefix='ansible_mitogen_find_good_temp_dir',
dir=path,
)
except (OSError, IOError) as e:
LOG.debug('temp dir %r unusable: %s', path, e)
continue
try:
try:
os.chmod(tmp.name, int('0700', 8))
except OSError as e:
LOG.debug('temp dir %r unusable: %s: chmod failed: %s',
path, e)
continue
try:
# access(.., X_OK) is sufficient to detect noexec.
if not os.access(tmp.name, os.X_OK):
raise OSError('filesystem appears to be mounted noexec')
except OSError as e:
LOG.debug('temp dir %r unusable: %s: %s', path, e)
continue
LOG.debug('Selected temp directory: %r (from %r)', path, paths)
return path
finally:
tmp.close()
raise IOError(MAKE_TEMP_FAILED_MSG % {
'paths': '\n '.join(paths),
})
@mitogen.core.takes_econtext @mitogen.core.takes_econtext
def reset_temp_dir(econtext): def reset_temp_dir(econtext):
""" """
@ -204,7 +267,9 @@ def reset_temp_dir(econtext):
""" """
global temp_dir global temp_dir
# https://github.com/dw/mitogen/issues/239 # https://github.com/dw/mitogen/issues/239
temp_dir = tempfile.mkdtemp(prefix='ansible_mitogen_')
basedir = find_good_temp_dir()
temp_dir = tempfile.mkdtemp(prefix='ansible_mitogen_', dir=basedir)
# This must be reinstalled in forked children too, since the Broker # This must be reinstalled in forked children too, since the Broker
# instance from the parent process does not carry over to the new child. # instance from the parent process does not carry over to the new child.
@ -212,7 +277,7 @@ def reset_temp_dir(econtext):
@mitogen.core.takes_econtext @mitogen.core.takes_econtext
def init_child(econtext, log_level): def init_child(econtext, log_level, candidate_temp_dirs):
""" """
Called by ContextService immediately after connection; arranges for the Called by ContextService immediately after connection; arranges for the
(presently) spotless Python interpreter to be forked, where the newly (presently) spotless Python interpreter to be forked, where the newly
@ -225,6 +290,9 @@ def init_child(econtext, log_level):
:param int log_level: :param int log_level:
Logging package level active in the master. Logging package level active in the master.
:param list[str] candidate_temp_dirs:
List of $variable-expanded and tilde-expanded directory names to add to
candidate list of temporary directories.
:returns: :returns:
Dict like:: Dict like::
@ -238,6 +306,9 @@ def init_child(econtext, log_level):
the controller will use to start forked jobs, and `home_dir` is the the controller will use to start forked jobs, and `home_dir` is the
home directory for the active user account. home directory for the active user account.
""" """
global _candidate_temp_dirs
_candidate_temp_dirs = candidate_temp_dirs
global _fork_parent global _fork_parent
mitogen.parent.upgrade_router(econtext) mitogen.parent.upgrade_router(econtext)
_fork_parent = econtext.router.fork() _fork_parent = econtext.router.fork()
@ -252,6 +323,7 @@ def init_child(econtext, log_level):
return { return {
'fork_context': _fork_parent, 'fork_context': _fork_parent,
'home_dir': mitogen.core.to_text(os.path.expanduser('~')), 'home_dir': mitogen.core.to_text(os.path.expanduser('~')),
'temp_dir': temp_dir,
} }
@ -416,27 +488,6 @@ def run_module_async(kwargs, job_id, timeout_secs, econtext):
arunner.run() arunner.run()
def make_temp_directory(base_dir):
"""
Handle creation of `base_dir` if it is absent, in addition to a unique
temporary directory within `base_dir`. This is the temporary directory that
becomes 'remote_tmp', not the one used by Ansiballz. It always uses the
system temporary directory.
:returns:
Newly created temporary directory.
"""
# issue #301: remote_tmp may contain $vars.
base_dir = os.path.expandvars(base_dir)
if not os.path.exists(base_dir):
os.makedirs(base_dir, mode=int('0700', 8))
return tempfile.mkdtemp(
dir=base_dir,
prefix='ansible-mitogen-tmp-',
)
def get_user_shell(): def get_user_shell():
""" """
For commands executed directly via an SSH command-line, SSH looks up the For commands executed directly via an SSH command-line, SSH looks up the

@ -271,8 +271,8 @@ command line, or as host and group variables.
File Transfer File Transfer
~~~~~~~~~~~~~ ~~~~~~~~~~~~~
Normally `sftp <https://linux.die.net/man/1/sftp>`_ or Normally `sftp(1) <https://linux.die.net/man/1/sftp>`_ or
`scp <https://linux.die.net/man/1/scp>`_ are used to copy files by the `scp(1) <https://linux.die.net/man/1/scp>`_ are used to copy files by the
`assemble <http://docs.ansible.com/ansible/latest/modules/assemble_module.html>`_, `assemble <http://docs.ansible.com/ansible/latest/modules/assemble_module.html>`_,
`copy <http://docs.ansible.com/ansible/latest/modules/copy_module.html>`_, `copy <http://docs.ansible.com/ansible/latest/modules/copy_module.html>`_,
`patch <http://docs.ansible.com/ansible/latest/modules/patch_module.html>`_, `patch <http://docs.ansible.com/ansible/latest/modules/patch_module.html>`_,
@ -302,7 +302,7 @@ to rename over any existing file. This ensures the file remains consistent at
all times, in the event of a crash, or when overlapping `ansible-playbook` runs all times, in the event of a crash, or when overlapping `ansible-playbook` runs
deploy differing file contents. deploy differing file contents.
The `sftp <https://linux.die.net/man/1/sftp>`_ and `scp The `sftp(1) <https://linux.die.net/man/1/sftp>`_ and `scp(1)
<https://linux.die.net/man/1/sftp>`_ tools may cause undetected data corruption <https://linux.die.net/man/1/sftp>`_ tools may cause undetected data corruption
in the form of truncated files, or files containing intermingled data segments in the form of truncated files, or files containing intermingled data segments
from overlapping runs. As part of normal operation, both tools expose a window from overlapping runs. As part of normal operation, both tools expose a window
@ -401,6 +401,129 @@ this precisely, to avoid breaking playbooks that expect text to appear in
specific variables with a particular linefeed style. specific variables with a particular linefeed style.
.. _ansible_tempfiles:
Temporary Files
~~~~~~~~~~~~~~~
Ansible creates a variety of temporary files and directories depending on its
operating mode.
In the best case when pipelining is enabled and no temporary uploads are
required, for each task Ansible will create one directory below a
system-supplied temporary directory returned by :func:`tempfile.mkdtemp`, owned
by the target account a new-style module will execute in.
In other cases depending on the task type, whether become is active, whether
the target become user is privileged, whether the associated action plugin
needs to upload files, and whether the associated module needs to store files,
Ansible may:
* Create a directory owned by the SSH user either under ``remote_tmp``, or a
system-default directory,
* Upload action dependencies such as non-new style modules or rendered
templates to that directory via `sftp(1) <https://linux.die.net/man/1/sftp>`_
or `scp(1) <https://linux.die.net/man/1/scp>`_.
* Attempt to modify the directory's access control list to grant access to the
target user using `setfacl(1) <https://linux.die.net/man/1/setfacl>`_,
requiring that tool to be installed and a supported filesystem to be in use,
or for the ``allow_world_readable_tmpfiles`` setting to be :data:`True`.
* Create a directory owned by the target user either under ``remote_tmp``, or
a system-default directory, if a new-style module needs a temporary directory
and one was not previously created for a supporting file earlier in the
invocation.
In summary, for each task Ansible may create one or more of:
* ``~ssh_user/<remote_tmp>/...`` owned by the login user,
* ``$TMPDIR/ansible-tmp-...`` owned by the login user,
* ``$TMPDIR/ansible-tmp-...`` owned by the login user with ACLs permitting
write access by the become user,
* ``~become_user/<remote_tmp>/...`` owned by the become user,
* ``$TMPDIR/ansible_<modname>_payload_.../`` owned by the become user,
* ``$TMPDIR/ansible-module-tmp-.../`` owned by the become user.
A directory must exist to maintain compatibility with Ansible, as many modules
introspect :data:`sys.argv` to find a directory where they may write files,
however only one directory exists for the lifetime of each interpreter, its
location is consistent for each target account, and it is always privately
owned by that account.
The paths below are tried until one is found that is writeable and lives on a
filesystem with ``noexec`` disabled:
1. ``$variable`` and tilde-expanded ``remote_tmp`` setting from
``ansible.cfg``
2. ``$variable`` and tilde-expanded ``system_tmpdirs`` setting from
``ansible.cfg``
3. ``TMPDIR`` environment variable
4. ``TEMP`` environment variable
5. ``TMP`` environment variable
6. ``/tmp``
7. ``/var/tmp``
8. ``/usr/tmp``
9. Current working directory
As the directory is created once at startup, and its content is managed by code
running remotely, no additional network roundtrips are required to manage it
for each task requiring temporary storage.
.. _ansible_process_env:
Process Environment Emulation
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Since Ansible discards processes after each module invocation, follow-up tasks
often (but not always) receive a new environment that will usually include
changes made by previous tasks. As such modifications are common, for
compatibility the extension emulates the existing behaviour as closely as
possible.
Some scenarios exist where emulation is impossible, for example, applying
``nsswitch.conf`` changes when ``nscd`` is not in use. If future scenarios
appear that cannot be solved through emulation, the extension will be updated
to automatically restart affected interpreters instead.
DNS Resolution
^^^^^^^^^^^^^^
Modifications to ``/etc/resolv.conf`` cause the glibc resolver configuration to
be reloaded via `res_init(3) <https://linux.die.net/man/3/res_init>`_. This
isn't necessary on some Linux distributions carrying glibc patches to
automatically check ``/etc/resolv.conf`` periodically, however it is necessary
on at least Debian and BSD derivatives.
``/etc/environment``
^^^^^^^^^^^^^^^^^^^^
When ``become: true`` is active or SSH multiplexing is disabled, modifications
by previous tasks to ``/etc/environment`` and ``$HOME/.pam_environment`` are
normally reflected, since the content of those files is reapplied by `PAM
<https://en.wikipedia.org/wiki/Pluggable_authentication_module>`_ via `pam_env`
on each authentication of ``sudo`` or ``sshd``.
Both files are monitored for changes, and changes are applied where it appears
safe to do so:
* New keys are added if they did not otherwise exist in the inherited
environment, or previously had the same value as found in the file before it
changed.
* Given a key (such as ``http_proxy``) added to the file where no such key
exists in the environment, the key will be added.
* Given a key (such as ``PATH``) where an existing environment key exists with
a different value, the update or deletion will be ignored, as it is likely
the key was overridden elsewhere after `pam_env` ran, such as by
``/etc/profile``.
* Given a key removed from the file that had the same value as the existing
environment key, the key will be removed.
How Modules Execute How Modules Execute
~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~
@ -569,10 +692,10 @@ additional differences exist that may break existing playbooks.
LXC LXC
~~~ ~~~
Like `lxc <https://docs.ansible.com/ansible/2.6/plugins/connection/lxc.html>`_ Connect to classic LXC containers, like `lxc
and `lxd <https://docs.ansible.com/ansible/2.6/plugins/connection/lxd.html>`_ <https://docs.ansible.com/ansible/2.6/plugins/connection/lxc.html>`_ except
except connection delegation is supported, and ``lxc-attach`` is always used connection delegation is supported, and ``lxc-attach`` is always used rather
rather than the LXC Python bindings, as is usual with ``lxc``. than the LXC Python bindings, as is usual with ``lxc``.
The ``lxc-attach`` command must be available on the host machine. The ``lxc-attach`` command must be available on the host machine.
@ -580,6 +703,20 @@ The ``lxc-attach`` command must be available on the host machine.
* ``ansible_host``: Name of LXC container (default: inventory hostname). * ``ansible_host``: Name of LXC container (default: inventory hostname).
.. _method-lxd:
LXD
~~~
Connect to modern LXD containers, like `lxd
<https://docs.ansible.com/ansible/2.6/plugins/connection/lxd.html>`_ except
connection delegation is supported. The ``lxc`` command must be available on
the host machine.
* ``ansible_python_interpreter``
* ``ansible_host``: Name of LXC container (default: inventory hostname).
.. _machinectl: .. _machinectl:
Machinectl Machinectl
@ -602,21 +739,23 @@ Setns
~~~~~ ~~~~~
The ``setns`` method connects to Linux containers via `setns(2) The ``setns`` method connects to Linux containers via `setns(2)
<https://linux.die.net/man/2/setns>`_. Unlike :ref:`method-docker` and <https://linux.die.net/man/2/setns>`_. Unlike :ref:`method-docker`,
:ref:`method-lxc` the namespace transition is handled internally, ensuring :ref:`method-lxc`, and :ref:`method-lxd` the namespace transition is handled
optimal throughput to the child. This is necessary for :ref:`machinectl` where internally, ensuring optimal throughput to the child. This is necessary for
only PTY channels are supported. :ref:`machinectl` where only PTY channels are supported.
A utility program must be installed to discover the PID of the container's root A utility program must be installed to discover the PID of the container's root
process. process.
* ``mitogen_kind``: one of ``docker``, ``lxc`` or ``machinectl``. * ``mitogen_kind``: one of ``docker``, ``lxc``, ``lxd`` or ``machinectl``.
* ``ansible_host``: Name of container as it is known to the corresponding tool * ``ansible_host``: Name of container as it is known to the corresponding tool
(default: inventory hostname). (default: inventory hostname).
* ``ansible_user``: Name of user within the container to execute as. * ``ansible_user``: Name of user within the container to execute as.
* ``mitogen_docker_path``: path to Docker if not available on the system path. * ``mitogen_docker_path``: path to Docker if not available on the system path.
* ``mitogen_lxc_info_path``: path to ``lxc-info`` command if not available as * ``mitogen_lxc_path``: path to LXD's ``lxc`` command if not available as
``/usr/bin/lxc-info``. ``lxc-info``.
* ``mitogen_lxc_info_path``: path to LXC classic's ``lxc-info`` command if not
available as ``lxc-info``.
* ``mitogen_machinectl_path``: path to ``machinectl`` command if not available * ``mitogen_machinectl_path``: path to ``machinectl`` command if not available
as ``/bin/machinectl``. as ``/bin/machinectl``.

@ -87,10 +87,10 @@ Message Class
.. class:: Message .. class:: Message
Messages are the fundamental unit of communication, comprising the fields Messages are the fundamental unit of communication, comprising fields from
from in the :ref:`stream-protocol` header, an optional reference to the the :ref:`stream-protocol` header, an optional reference to the receiving
receiving :class:`mitogen.core.Router` for ingress messages, and helper :class:`mitogen.core.Router` for ingress messages, and helper methods for
methods for deserialization and generating replies. deserialization and generating replies.
.. attribute:: router .. attribute:: router
@ -238,16 +238,16 @@ Router Class
.. method:: add_handler (fn, handle=None, persist=True, respondent=None, policy=None) .. method:: add_handler (fn, handle=None, persist=True, respondent=None, policy=None)
Invoke `fn(msg)` for each Message sent to `handle` from this context. Invoke `fn(msg)` for each Message sent to `handle` from this context.
Unregister after one invocation if `persist` is ``False``. If `handle` Unregister after one invocation if `persist` is :data:`False`. If
is ``None``, a new handle is allocated and returned. `handle` is :data:`None`, a new handle is allocated and returned.
:param int handle: :param int handle:
If not ``None``, an explicit handle to register, usually one of the If not :data:`None`, an explicit handle to register, usually one of
``mitogen.core.*`` constants. If unspecified, a new unused handle the ``mitogen.core.*`` constants. If unspecified, a new unused
will be allocated. handle will be allocated.
:param bool persist: :param bool persist:
If ``False``, the handler will be unregistered after a single If :data:`False`, the handler will be unregistered after a single
message has been received. message has been received.
:param mitogen.core.Context respondent: :param mitogen.core.Context respondent:
@ -281,7 +281,8 @@ Router Class
sender indicating refusal occurred. sender indicating refusal occurred.
:return: :return:
`handle`, or if `handle` was ``None``, the newly allocated handle. `handle`, or if `handle` was :data:`None`, the newly allocated
handle.
.. method:: del_handler (handle) .. method:: del_handler (handle)
@ -300,10 +301,10 @@ Router Class
called from the I/O multiplexer thread. called from the I/O multiplexer thread.
:param mitogen.core.Stream stream: :param mitogen.core.Stream stream:
If not ``None``, a reference to the stream the message arrived on. If not :data:`None`, a reference to the stream the message arrived
Used for performing source route verification, to ensure sensitive on. Used for performing source route verification, to ensure
messages such as ``CALL_FUNCTION`` arrive only from trusted sensitive messages such as ``CALL_FUNCTION`` arrive only from
contexts. trusted contexts.
.. method:: route(msg) .. method:: route(msg)
@ -515,8 +516,8 @@ Router Class
otherwise. otherwise.
:param mitogen.core.Context via: :param mitogen.core.Context via:
If not ``None``, arrange for construction to occur via RPCs made to If not :data:`None`, arrange for construction to occur via RPCs
the context `via`, and for :py:data:`ADD_ROUTE made to the context `via`, and for :py:data:`ADD_ROUTE
<mitogen.core.ADD_ROUTE>` messages to be generated as appropriate. <mitogen.core.ADD_ROUTE>` messages to be generated as appropriate.
.. code-block:: python .. code-block:: python
@ -567,7 +568,7 @@ Router Class
:data:`None`, which Docker interprets as ``root``. :data:`None`, which Docker interprets as ``root``.
:param str image: :param str image:
Image tag to use to construct a temporary container. Defaults to Image tag to use to construct a temporary container. Defaults to
``None``. :data:`None`.
:param str docker_path: :param str docker_path:
Filename or complete path to the Docker binary. ``PATH`` will be Filename or complete path to the Docker binary. ``PATH`` will be
searched if given as a filename. Defaults to ``docker``. searched if given as a filename. Defaults to ``docker``.
@ -590,18 +591,31 @@ Router Class
.. method:: lxc (container, lxc_attach_path=None, \**kwargs) .. method:: lxc (container, lxc_attach_path=None, \**kwargs)
Construct a context on the local machine within an LXC container using Construct a context on the local machine within an LXC classic
the ``lxc-attach`` program. container using the ``lxc-attach`` program.
Accepts all parameters accepted by :py:meth:`local`, in addition to: Accepts all parameters accepted by :py:meth:`local`, in addition to:
:param str container: :param str container:
Existing container to connect to. Defaults to ``None``. Existing container to connect to. Defaults to :data:`None`.
:param str lxc_attach_path: :param str lxc_attach_path:
Filename or complete path to the ``lxc-attach`` binary. ``PATH`` Filename or complete path to the ``lxc-attach`` binary. ``PATH``
will be searched if given as a filename. Defaults to will be searched if given as a filename. Defaults to
``lxc-attach``. ``lxc-attach``.
.. method:: lxc (container, lxc_attach_path=None, \**kwargs)
Construct a context on the local machine within a LXD container using
the ``lxc`` program.
Accepts all parameters accepted by :py:meth:`local`, in addition to:
:param str container:
Existing container to connect to. Defaults to :data:`None`.
:param str lxc_path:
Filename or complete path to the ``lxc`` binary. ``PATH`` will be
searched if given as a filename. Defaults to ``lxc``.
.. method:: setns (container, kind, docker_path=None, lxc_info_path=None, machinectl_path=None, \**kwargs) .. method:: setns (container, kind, docker_path=None, lxc_info_path=None, machinectl_path=None, \**kwargs)
Construct a context in the style of :meth:`local`, but change the Construct a context in the style of :meth:`local`, but change the
@ -609,7 +623,8 @@ Router Class
executing Python. executing Python.
The namespaces to use, and the active root file system are taken from The namespaces to use, and the active root file system are taken from
the root PID of a running Docker, LXC, or systemd-nspawn container. the root PID of a running Docker, LXC, LXD, or systemd-nspawn
container.
A program is required only to find the root PID, after which management A program is required only to find the root PID, after which management
of the child Python interpreter is handled directly. of the child Python interpreter is handled directly.
@ -617,14 +632,16 @@ Router Class
:param str container: :param str container:
Container to connect to. Container to connect to.
:param str kind: :param str kind:
One of ``docker``, ``lxc`` or ``machinectl``. One of ``docker``, ``lxc``, ``lxd`` or ``machinectl``.
:param str docker_path: :param str docker_path:
Filename or complete path to the Docker binary. ``PATH`` will be Filename or complete path to the Docker binary. ``PATH`` will be
searched if given as a filename. Defaults to ``docker``. searched if given as a filename. Defaults to ``docker``.
:param str lxc_path:
Filename or complete path to the LXD ``lxc`` binary. ``PATH`` will
be searched if given as a filename. Defaults to ``lxc``.
:param str lxc_info_path: :param str lxc_info_path:
Filename or complete path to the ``lxc-info`` binary. ``PATH`` Filename or complete path to the LXC ``lxc-info`` binary. ``PATH``
will be searched if given as a filename. Defaults to will be searched if given as a filename. Defaults to ``lxc-info``.
``lxc-info``.
:param str machinectl_path: :param str machinectl_path:
Filename or complete path to the ``machinectl`` binary. ``PATH`` Filename or complete path to the ``machinectl`` binary. ``PATH``
will be searched if given as a filename. Defaults to will be searched if given as a filename. Defaults to
@ -774,7 +791,7 @@ Context Class
handle which is placed in the message's `reply_to`. handle which is placed in the message's `reply_to`.
:param bool persist: :param bool persist:
If ``False``, the handler will be unregistered after a single If :data:`False`, the handler will be unregistered after a single
message has been received. message has been received.
:param mitogen.core.Message msg: :param mitogen.core.Message msg:
@ -793,7 +810,7 @@ Context Class
The message. The message.
:param float deadline: :param float deadline:
If not ``None``, seconds before timing out waiting for a reply. If not :data:`None`, seconds before timing out waiting for a reply.
:raises mitogen.core.TimeoutError: :raises mitogen.core.TimeoutError:
No message was received and `deadline` passed. No message was received and `deadline` passed.
@ -915,8 +932,8 @@ Receiver Class
Router to register the handler on. Router to register the handler on.
:param int handle: :param int handle:
If not ``None``, an explicit handle to register, otherwise an unused If not :data:`None`, an explicit handle to register, otherwise an
handle is chosen. unused handle is chosen.
:param bool persist: :param bool persist:
If :data:`True`, do not unregister the receiver's handler after the If :data:`True`, do not unregister the receiver's handler after the
@ -924,13 +941,13 @@ Receiver Class
:param mitogen.core.Context respondent: :param mitogen.core.Context respondent:
Reference to the context this receiver is receiving from. If not Reference to the context this receiver is receiving from. If not
``None``, arranges for the receiver to receive a dead message if :data:`None`, arranges for the receiver to receive a dead message if
messages can no longer be routed to the context, due to disconnection messages can no longer be routed to the context, due to disconnection
or exit. or exit.
.. attribute:: notify = None .. attribute:: notify = None
If not ``None``, a reference to a function invoked as If not :data:`None`, a reference to a function invoked as
`notify(receiver)` when a new message is delivered to this receiver. `notify(receiver)` when a new message is delivered to this receiver.
Used by :py:class:`mitogen.select.Select` to implement waiting on Used by :py:class:`mitogen.select.Select` to implement waiting on
multiple receivers. multiple receivers.
@ -984,7 +1001,7 @@ Receiver Class
Sleep waiting for a message to arrive on this receiver. Sleep waiting for a message to arrive on this receiver.
:param float timeout: :param float timeout:
If not ``None``, specifies a timeout in seconds. If not :data:`None`, specifies a timeout in seconds.
:raises mitogen.core.ChannelError: :raises mitogen.core.ChannelError:
The remote end indicated the channel should be closed, or The remote end indicated the channel should be closed, or
@ -1167,10 +1184,10 @@ Select Class
message may be posted at any moment between :py:meth:`empty` and message may be posted at any moment between :py:meth:`empty` and
:py:meth:`get`. :py:meth:`get`.
:py:meth:`empty` may return ``False`` even when :py:meth:`get` would :py:meth:`empty` may return :data:`False` even when :py:meth:`get`
block if another thread has drained a receiver added to this select. would block if another thread has drained a receiver added to this
This can be avoided by only consuming each receiver from a single select. This can be avoided by only consuming each receiver from a
thread. single thread.
.. py:method:: __iter__ (self) .. py:method:: __iter__ (self)
@ -1354,8 +1371,8 @@ A random assortment of utility functions useful on masters and children.
variables. See :ref:`logging-env-vars`. variables. See :ref:`logging-env-vars`.
:param str path: :param str path:
If not ``None``, a filesystem path to write logs to. Otherwise, logs If not :data:`None`, a filesystem path to write logs to. Otherwise,
are written to :py:data:`sys.stderr`. logs are written to :py:data:`sys.stderr`.
:param bool io: :param bool io:
If :data:`True`, include extremely verbose IO logs in the output. If :data:`True`, include extremely verbose IO logs in the output.
@ -1395,29 +1412,9 @@ Exceptions
.. currentmodule:: mitogen.core .. currentmodule:: mitogen.core
.. class:: Error (fmt, \*args) .. autoclass:: Error
.. autoclass:: CallError
Base for all exceptions raised by Mitogen. .. autoclass:: ChannelError
.. autoclass:: LatchError
.. class:: CallError (e) .. autoclass:: StreamError
.. autoclass:: TimeoutError
Raised when :py:meth:`Context.call() <mitogen.parent.Context.call>` fails.
A copy of the traceback from the external context is appended to the
exception message.
.. class:: ChannelError (fmt, \*args)
Raised when a channel dies or has been closed.
.. class:: LatchError (fmt, \*args)
Raised when an attempt is made to use a :py:class:`mitogen.core.Latch` that
has been marked closed.
.. class:: StreamError (fmt, \*args)
Raised when a stream cannot be established.
.. class:: TimeoutError (fmt, \*args)
Raised when a timeout occurs on a stream.

@ -27,6 +27,99 @@ Release Notes
* Compatible with development versions of Ansible post https://github.com/ansible/ansible/pull/41749 * Compatible with development versions of Ansible post https://github.com/ansible/ansible/pull/41749
v0.2.3 (2018-08-??)
-------------------
Mitogen for Ansible
~~~~~~~~~~~~~~~~~~~
* `#251 <https://github.com/dw/mitogen/issues/251>`_,
`#340 <https://github.com/dw/mitogen/issues/340>`_: Connection Delegation
could establish connections to the wrong target when ``delegate_to:`` is
present.
* `#291 <https://github.com/dw/mitogen/issues/291>`_: when Mitogen had
previously been installed using ``pip`` or ``setuptools``, the globally
installed version could conflict with a newer version bundled with an
extension that had been installed using the documented steps. Now the bundled
library always overrides over any system-installed copy.
* `#321 <https://github.com/dw/mitogen/issues/321>`_: temporary file handling
has been simplified and additional network roundtrips have been removed,
undoing earlier damage caused by compatibility fixes, and improving 2.6
compatibility. One directory is created at startup for each persistent
interpreter. See :ref:`ansible_tempfiles` for a complete description.
* `#324 <https://github.com/dw/mitogen/issues/324>`_: plays with a custom
``module_utils`` would fail due to fallout from the Python 3 port and related
tests being disabled.
* `#331 <https://github.com/dw/mitogen/issues/331>`_: fixed known issue: the
connection multiplexer subprocess always exits before the main Ansible
process exits, ensuring logs generated by it do not overwrite the user's
prompt when ``-vvv`` is enabled.
* `#332 <https://github.com/dw/mitogen/issues/332>`_: support a new
:data:`sys.excepthook`-based module exit mechanism added in Ansible 2.6.
* `#338 <https://github.com/dw/mitogen/issues/338>`_: compatibility: changes to
``/etc/environment`` and ``~/.pam_environment`` made by a task are reflected
in the runtime environment of subsequent tasks. See
:ref:`ansible_process_env` for a complete description.
* `#343 <https://github.com/dw/mitogen/issues/343>`_: the sudo ``--login``
option is supported.
* `#344 <https://github.com/dw/mitogen/issues/344>`_: connections no longer
fail when the parent machine's logged in username contains slashes.
* Runs with many targets executed the module dependency scanner redundantly
due to missing synchronization, causing significant wasted computation in the
connection multiplexer subprocess. For one real-world playbook the scanner
runtime was reduced by 95%, which may manifest as shorter runs.
* A missing check caused an exception traceback to appear when using the
``ansible`` command-line tool with a missing or misspelled module name.
* Ansible since >2.6 began importing ``__main__`` from
``ansible.module_utils.basic``, causing an error during execution, due to the
controller being configured to refuse network imports outside the
``ansible.*`` namespace. Update the target implementation to construct a stub
``__main__`` module to satisfy the otherwise seemingly vestigial import.
Core Library
~~~~~~~~~~~~
* `#313 <https://github.com/dw/mitogen/issues/313>`_:
:meth:`mitogen.parent.Context.call` was documented as capable of accepting
static methods. While possible on Python 2.x the result is very ugly, and in
every case it should be trivially possible to replace with a class method.
The API docs were updated to remove mention of static methods.
* `#339 <https://github.com/dw/mitogen/issues/339>`_: the LXD connection method
was erroneously executing LXC Classic commands.
* Add a :func:`mitogen.fork.on_fork` function to allow non-Mitogen managed
process forks to clean up Mitogen resources in the forked chlid.
Thanks!
~~~~~~~
Mitogen would not be possible without the support of users. A huge thanks for
the bug reports in this release contributed by
`Alex Russu <https://github.com/alexrussu>`_,
`atoom <https://github.com/atoom>`_,
`Dan Quackenbush <https://github.com/danquack>`_,
`Jesse London <https://github.com/jesteria>`_,
`Luca Nunzi <https://github.com/0xlc>`_,
`Pateek Jain <https://github.com/prateekj201>`_,
`Pierre-Henry Muller <https://github.com/pierrehenrymuller>`_,
`Rick Box <https://github.com/boxrick>`_, and
`Timo Beckers <https://github.com/ti-mo>`_.
v0.2.2 (2018-07-26) v0.2.2 (2018-07-26)
------------------- -------------------
@ -204,12 +297,17 @@ Mitogen for Ansible
for Message(..., 102, ...), my ID is ...* may be visible. These are due to a for Message(..., 102, ...), my ID is ...* may be visible. These are due to a
minor race while initializing logging and can be ignored. minor race while initializing logging and can be ignored.
* When running with ``-vvv``, log messages will be printed to the console .. * When running with ``-vvv``, log messages will be printed to the console
*after* the Ansible run completes, as connection multiplexer shutdown only *after* the Ansible run completes, as connection multiplexer shutdown only
begins after Ansible exits. This is due to a lack of suitable shutdown hook begins after Ansible exits. This is due to a lack of suitable shutdown hook
in Ansible, and is fairly harmless, albeit cosmetically annoying. A future in Ansible, and is fairly harmless, albeit cosmetically annoying. A future
release may include a solution. release may include a solution.
.. * Configurations will break that rely on the `hashbang argument splitting
behaviour <https://github.com/ansible/ansible/issues/15635>`_ of the
``ansible_python_interpreter`` setting, contrary to the Ansible
documentation. This will be addressed in a future 0.2 release.
* Performance does not scale linearly with target count. This requires * Performance does not scale linearly with target count. This requires
significant additional work, as major bottlenecks exist in the surrounding significant additional work, as major bottlenecks exist in the surrounding
Ansible code. Performance-related bug reports for any scenario remain Ansible code. Performance-related bug reports for any scenario remain
@ -237,11 +335,6 @@ Mitogen for Ansible
actions, such as the ``synchronize`` module. This will be addressed in the actions, such as the ``synchronize`` module. This will be addressed in the
0.3 series. 0.3 series.
* Configurations will break that rely on the `hashbang argument splitting
behaviour <https://github.com/ansible/ansible/issues/15635>`_ of the
``ansible_python_interpreter`` setting, contrary to the Ansible
documentation. This will be addressed in a future 0.2 release.
Core Library Core Library
~~~~~~~~~~~~ ~~~~~~~~~~~~

@ -19,7 +19,7 @@ html_theme_options = {
'head_font_family': "Georgia, serif", 'head_font_family': "Georgia, serif",
} }
htmlhelp_basename = 'mitogendoc' htmlhelp_basename = 'mitogendoc'
intersphinx_mapping = {'python': ('https://docs.python.org/2', None)} intersphinx_mapping = {'python': ('https://docs.python.org/3', None)}
language = None language = None
master_doc = 'toc' master_doc = 'toc'
project = u'Mitogen' project = u'Mitogen'

@ -332,7 +332,7 @@ Masters listen on the following handles:
Receives the name of a module to load `fullname`, locates the source code Receives the name of a module to load `fullname`, locates the source code
for `fullname`, and routes one or more :py:data:`LOAD_MODULE` messages back for `fullname`, and routes one or more :py:data:`LOAD_MODULE` messages back
towards the sender of the :py:data:`GET_MODULE` request. If lookup fails, towards the sender of the :py:data:`GET_MODULE` request. If lookup fails,
``None`` is sent instead. :data:`None` is sent instead.
See :ref:`import-preloading` for a deeper discussion of See :ref:`import-preloading` for a deeper discussion of
:py:data:`GET_MODULE`/:py:data:`LOAD_MODULE`. :py:data:`GET_MODULE`/:py:data:`LOAD_MODULE`.
@ -355,12 +355,13 @@ Children listen on the following handles:
Receives `(pkg_present, path, compressed, related)` tuples, composed of: Receives `(pkg_present, path, compressed, related)` tuples, composed of:
* **pkg_present**: Either ``None`` for a plain ``.py`` module, or a list of * **pkg_present**: Either :data:`None` for a plain ``.py`` module, or a
canonical names of submodules existing witin this package. For example, a list of canonical names of submodules existing witin this package. For
:py:data:`LOAD_MODULE` for the :py:mod:`mitogen` package would return a example, a :py:data:`LOAD_MODULE` for the :py:mod:`mitogen` package would
list like: `["mitogen.core", "mitogen.fakessh", "mitogen.master", ..]`. return a list like: `["mitogen.core", "mitogen.fakessh",
This list is used by children to avoid generating useless round-trips due "mitogen.master", ..]`. This list is used by children to avoid generating
to Python 2.x's :keyword:`import` statement behavior. useless round-trips due to Python 2.x's :keyword:`import` statement
behavior.
* **path**: Original filesystem where the module was found on the master. * **path**: Original filesystem where the module was found on the master.
* **compressed**: :py:mod:`zlib`-compressed module source code. * **compressed**: :py:mod:`zlib`-compressed module source code.
* **related**: list of canonical module names on which this module appears * **related**: list of canonical module names on which this module appears

@ -8,17 +8,27 @@ Internal API Reference
signals signals
mitogen.core Constants
============ =========
.. currentmodule:: mitogen.core
.. autodata:: CHUNK_SIZE
Latch Class Latch Class
----------- ===========
.. currentmodule:: mitogen.core .. currentmodule:: mitogen.core
.. autoclass:: Latch
:members:
.. autoclass:: Latch () PidfulStreamHandler Class
=========================
.. currentmodule:: mitogen.core
.. autoclass:: PidfulStreamHandler
:members:
Side Class Side Class
@ -50,24 +60,24 @@ Side Class
.. attribute:: fd .. attribute:: fd
Integer file descriptor to perform IO on, or ``None`` if Integer file descriptor to perform IO on, or :data:`None` if
:py:meth:`close` has been called. :py:meth:`close` has been called.
.. attribute:: keep_alive .. attribute:: keep_alive
If ``True``, causes presence of this side in :py:class:`Broker`'s If :data:`True`, causes presence of this side in :py:class:`Broker`'s
active reader set to defer shutdown until the side is disconnected. active reader set to defer shutdown until the side is disconnected.
.. method:: fileno .. method:: fileno
Return :py:attr:`fd` if it is not ``None``, otherwise raise Return :py:attr:`fd` if it is not :data:`None`, otherwise raise
:py:class:`StreamError`. This method is implemented so that :py:class:`StreamError`. This method is implemented so that
:py:class:`Side` can be used directly by :py:func:`select.select`. :py:class:`Side` can be used directly by :py:func:`select.select`.
.. method:: close .. method:: close
Call :py:func:`os.close` on :py:attr:`fd` if it is not ``None``, then Call :py:func:`os.close` on :py:attr:`fd` if it is not :data:`None`,
set it to ``None``. then set it to :data:`None`.
.. method:: read (n=CHUNK_SIZE) .. method:: read (n=CHUNK_SIZE)
@ -89,12 +99,9 @@ Side Class
wrapping the underlying :py:func:`os.write` call with :py:func:`io_op` wrapping the underlying :py:func:`os.write` call with :py:func:`io_op`
to trap common disconnection connditions. to trap common disconnection connditions.
:py:meth:`read` always behaves as if it is writing to a regular UNIX
file; socket, pipe, and TTY disconnection errors are masked and result
in a 0-sized write.
:returns: :returns:
Number of bytes written, or ``None`` if disconnection was detected. Number of bytes written, or :data:`None` if disconnection was
detected.
Stream Classes Stream Classes
@ -302,123 +309,47 @@ mitogen.master
Blocking I/O Functions Blocking I/O Functions
---------------------- ======================
These functions exist to support the blocking phase of setting up a new These functions exist to support the blocking phase of setting up a new
context. They will eventually be replaced with asynchronous equivalents. context. They will eventually be replaced with asynchronous equivalents.
.. currentmodule:: mitogen.master
.. function:: iter_read(fd, deadline=None)
Return a generator that arranges for up to 4096-byte chunks to be read at a
time from the file descriptor `fd` until the generator is destroyed.
:param fd:
File descriptor to read from.
:param deadline:
If not ``None``, an absolute UNIX timestamp after which timeout should
occur.
:raises mitogen.core.TimeoutError:
Attempt to read beyond deadline.
:raises mitogen.core.StreamError:
Attempt to read past end of file.
.. currentmodule:: mitogen.master
.. function:: write_all (fd, s, deadline=None)
Arrange for all of bytestring `s` to be written to the file descriptor
`fd`.
:param int fd:
File descriptor to write to.
:param bytes s:
Bytestring to write to file descriptor.
:param float deadline:
If not ``None``, an absolute UNIX timestamp after which timeout should
occur.
:raises mitogen.core.TimeoutError:
Bytestring could not be written entirely before deadline was exceeded.
:raises mitogen.core.StreamError:
File descriptor was disconnected before write could complete.
Helper Functions
----------------
.. currentmodule:: mitogen.core
.. function:: io_op (func, \*args)
Wrap a function that may raise :py:class:`OSError`, trapping common error
codes relating to disconnection events in various subsystems:
* When performing IO against a TTY, disconnection of the remote end is
signalled by :py:data:`errno.EIO`.
* When performing IO against a socket, disconnection of the remote end is
signalled by :py:data:`errno.ECONNRESET`.
* When performing IO against a pipe, disconnection of the remote end is
signalled by :py:data:`errno.EPIPE`.
:returns:
Tuple of `(return_value, disconnected)`, where `return_value` is the
return value of `func(\*args)`, and `disconnected` is ``True`` if
disconnection was detected, otherwise ``False``.
.. currentmodule:: mitogen.parent .. currentmodule:: mitogen.parent
.. autofunction:: discard_until
.. autofunction:: iter_read
.. autofunction:: write_all
.. autofunction:: create_child
Subprocess Creation Functions
=============================
.. currentmodule:: mitogen.parent .. currentmodule:: mitogen.parent
.. autofunction:: create_child
.. autofunction:: hybrid_tty_create_child
.. autofunction:: tty_create_child .. autofunction:: tty_create_child
.. currentmodule:: mitogen.parent Helper Functions
================
.. autofunction:: hybrid_tty_create_child .. currentmodule:: mitogen.core
.. autofunction:: to_text
.. autofunction:: has_parent_authority
.. autofunction:: set_cloexec
.. autofunction:: set_nonblock
.. autofunction:: set_block
.. autofunction:: io_op
.. currentmodule:: mitogen.parent
.. autofunction:: close_nonstandard_fds
.. autofunction:: create_socketpair
.. currentmodule:: mitogen.master .. currentmodule:: mitogen.master
.. autofunction:: get_child_modules
.. function:: get_child_modules (path)
Return the suffixes of submodules directly neated beneath of the package
directory at `path`.
:param str path:
Path to the module's source code on disk, or some PEP-302-recognized
equivalent. Usually this is the module's ``__file__`` attribute, but
is specified explicitly to avoid loading the module.
:return:
List of submodule name suffixes.
.. currentmodule:: mitogen.minify .. currentmodule:: mitogen.minify
.. autofunction:: minimize_source
.. autofunction:: minimize_source (source)
Remove comments and docstrings from Python `source`, preserving line
numbers and syntax of empty blocks.
:param str source:
The source to minimize.
:returns str:
The minimized source.
Signals Signals

@ -19,16 +19,10 @@ functions registered to receive it will be called back.
Functions Functions
--------- ---------
.. function:: mitogen.core.listen (obj, name, func) .. currentmodule:: mitogen.core
Arrange for `func(\*args, \*\*kwargs)` to be invoked when the named signal
is fired by `obj`.
.. function:: mitogen.core.fire (obj, name, \*args, \*\*kwargs)
Arrange for `func(\*args, \*\*kwargs)` to be invoked for every function
registered for the named signal on `obj`.
.. autofunction:: listen
.. autofunction:: fire
List List

@ -116,7 +116,34 @@ AnyTextType = (BytesType, UnicodeType)
if sys.version_info < (2, 5): if sys.version_info < (2, 5):
next = lambda it: it.next() next = lambda it: it.next()
#: Default size for calls to :meth:`Side.read` or :meth:`Side.write`, and the
#: size of buffers configured by :func:`mitogen.parent.create_socketpair`. This
#: value has many performance implications, 128KiB seems to be a sweet spot.
#:
#: * When set low, large messages cause many :class:`Broker` IO loop
#: iterations, burning CPU and reducing throughput.
#: * When set high, excessive RAM is reserved by the OS for socket buffers (2x
#: per child), and an identically sized temporary userspace buffer is
#: allocated on each read that requires zeroing, and over a particular size
#: may require two system calls to allocate/deallocate.
#:
#: Care must be taken to ensure the underlying kernel object and receiving
#: program support the desired size. For example,
#:
#: * Most UNIXes have TTYs with fixed 2KiB-4KiB buffers, making them unsuitable
#: for efficient IO.
#: * Different UNIXes have varying presets for pipes, which may not be
#: configurable. On recent Linux the default pipe buffer size is 64KiB, but
#: under memory pressure may be as low as 4KiB for unprivileged processes.
#: * When communication is via an intermediary process, its internal buffers
#: effect the speed OS buffers will drain. For example OpenSSH uses 64KiB
#: reads.
#:
#: An ideal :class:`Message` has a size that is a multiple of
#: :data:`CHUNK_SIZE` inclusive of headers, to avoid wasting IO loop iterations
#: writing small trailer chunks.
CHUNK_SIZE = 131072 CHUNK_SIZE = 131072
_tls = threading.local() _tls = threading.local()
@ -131,6 +158,13 @@ else:
class Error(Exception): class Error(Exception):
"""Base for all exceptions raised by Mitogen.
:param str fmt:
Exception text, or format string if `args` is non-empty.
:param tuple args:
Format string arguments.
"""
def __init__(self, fmt=None, *args): def __init__(self, fmt=None, *args):
if args: if args:
fmt %= args fmt %= args
@ -140,10 +174,14 @@ class Error(Exception):
class LatchError(Error): class LatchError(Error):
"""Raised when an attempt is made to use a :py:class:`mitogen.core.Latch`
that has been marked closed."""
pass pass
class Blob(BytesType): class Blob(BytesType):
"""A serializable bytes subclass whose content is summarized in repr()
output, making it suitable for logging binary data."""
def __repr__(self): def __repr__(self):
return '[blob: %d bytes]' % len(self) return '[blob: %d bytes]' % len(self)
@ -152,6 +190,8 @@ class Blob(BytesType):
class Secret(UnicodeType): class Secret(UnicodeType):
"""A serializable unicode subclass whose content is masked in repr()
output, making it suitable for logging passwords."""
def __repr__(self): def __repr__(self):
return '[secret]' return '[secret]'
@ -165,6 +205,10 @@ class Secret(UnicodeType):
class Kwargs(dict): class Kwargs(dict):
"""A serializable dict subclass that indicates the contained keys should be
be coerced to Unicode on Python 3 as required. Python 2 produces keyword
argument dicts whose keys are bytestrings, requiring a helper to ensure
compatibility with Python 3."""
if PY3: if PY3:
def __init__(self, dct): def __init__(self, dct):
for k, v in dct.items(): for k, v in dct.items():
@ -181,6 +225,10 @@ class Kwargs(dict):
class CallError(Error): class CallError(Error):
"""Serializable :class:`Error` subclass raised when
:py:meth:`Context.call() <mitogen.parent.Context.call>` fails. A copy of
the traceback from the external context is appended to the exception
message."""
def __init__(self, fmt=None, *args): def __init__(self, fmt=None, *args):
if not isinstance(fmt, BaseException): if not isinstance(fmt, BaseException):
Error.__init__(self, fmt, *args) Error.__init__(self, fmt, *args)
@ -207,37 +255,54 @@ def _unpickle_call_error(s):
class ChannelError(Error): class ChannelError(Error):
"""Raised when a channel dies or has been closed."""
remote_msg = 'Channel closed by remote end.' remote_msg = 'Channel closed by remote end.'
local_msg = 'Channel closed by local end.' local_msg = 'Channel closed by local end.'
class StreamError(Error): class StreamError(Error):
"""Raised when a stream cannot be established."""
pass pass
class TimeoutError(Error): class TimeoutError(Error):
"""Raised when a timeout occurs on a stream."""
pass pass
def to_text(o): def to_text(o):
if isinstance(o, UnicodeType): """Coerce `o` to Unicode by decoding it from UTF-8 if it is an instance of
return UnicodeType(o) :class:`bytes`, otherwise pass it to the :class:`str` constructor. The
returned object is always a plain :class:`str`, any subclass is removed."""
if isinstance(o, BytesType): if isinstance(o, BytesType):
return o.decode('utf-8') return o.decode('utf-8')
return UnicodeType(o) return UnicodeType(o)
def has_parent_authority(msg, _stream=None): def has_parent_authority(msg, _stream=None):
"""Policy function for use with :class:`Receiver` and
:meth:`Router.add_handler` that requires incoming messages to originate
from a parent context, or on a :class:`Stream` whose :attr:`auth_id
<Stream.auth_id>` has been set to that of a parent context or the current
context."""
return (msg.auth_id == mitogen.context_id or return (msg.auth_id == mitogen.context_id or
msg.auth_id in mitogen.parent_ids) msg.auth_id in mitogen.parent_ids)
def listen(obj, name, func): def listen(obj, name, func):
"""
Arrange for `func(*args, **kwargs)` to be invoked when the named signal is
fired by `obj`.
"""
signals = vars(obj).setdefault('_signals', {}) signals = vars(obj).setdefault('_signals', {})
signals.setdefault(name, []).append(func) signals.setdefault(name, []).append(func)
def fire(obj, name, *args, **kwargs): def fire(obj, name, *args, **kwargs):
"""
Arrange for `func(*args, **kwargs)` to be invoked for every function
registered for the named signal on `obj`.
"""
signals = vars(obj).get('_signals', {}) signals = vars(obj).get('_signals', {})
return [func(*args, **kwargs) for func in signals.get(name, ())] return [func(*args, **kwargs) for func in signals.get(name, ())]
@ -253,7 +318,8 @@ def takes_router(func):
def is_blacklisted_import(importer, fullname): def is_blacklisted_import(importer, fullname):
"""Return ``True`` if `fullname` is part of a blacklisted package, or if """
Return :data:`True` if `fullname` is part of a blacklisted package, or if
any packages have been whitelisted and `fullname` is not part of one. any packages have been whitelisted and `fullname` is not part of one.
NB: NB:
@ -266,22 +332,51 @@ def is_blacklisted_import(importer, fullname):
def set_cloexec(fd): def set_cloexec(fd):
"""Set the file descriptor `fd` to automatically close on
:func:`os.execve`. This has no effect on file descriptors inherited across
:func:`os.fork`, they must be explicitly closed through some other means,
such as :func:`mitogen.fork.on_fork`."""
flags = fcntl.fcntl(fd, fcntl.F_GETFD) flags = fcntl.fcntl(fd, fcntl.F_GETFD)
assert fd > 2 assert fd > 2
fcntl.fcntl(fd, fcntl.F_SETFD, flags | fcntl.FD_CLOEXEC) fcntl.fcntl(fd, fcntl.F_SETFD, flags | fcntl.FD_CLOEXEC)
def set_nonblock(fd): def set_nonblock(fd):
"""Set the file descriptor `fd` to non-blocking mode. For most underlying
file types, this causes :func:`os.read` or :func:`os.write` to raise
:class:`OSError` with :data:`errno.EAGAIN` rather than block the thread
when the underlying kernel buffer is exhausted."""
flags = fcntl.fcntl(fd, fcntl.F_GETFL) flags = fcntl.fcntl(fd, fcntl.F_GETFL)
fcntl.fcntl(fd, fcntl.F_SETFL, flags | os.O_NONBLOCK) fcntl.fcntl(fd, fcntl.F_SETFL, flags | os.O_NONBLOCK)
def set_block(fd): def set_block(fd):
"""Inverse of :func:`set_nonblock`, i.e. cause `fd` to block the thread
when the underlying kernel buffer is exhausted."""
flags = fcntl.fcntl(fd, fcntl.F_GETFL) flags = fcntl.fcntl(fd, fcntl.F_GETFL)
fcntl.fcntl(fd, fcntl.F_SETFL, flags & ~os.O_NONBLOCK) fcntl.fcntl(fd, fcntl.F_SETFL, flags & ~os.O_NONBLOCK)
def io_op(func, *args): def io_op(func, *args):
"""Wrap `func(*args)` that may raise :class:`select.error`,
:class:`IOError`, or :class:`OSError`, trapping UNIX error codes relating
to disconnection and retry events in various subsystems:
* When a signal is delivered to the process on Python 2, system call retry
is signalled through :data:`errno.EINTR`. The invocation is automatically
restarted.
* When performing IO against a TTY, disconnection of the remote end is
signalled by :data:`errno.EIO`.
* When performing IO against a socket, disconnection of the remote end is
signalled by :data:`errno.ECONNRESET`.
* When performing IO against a pipe, disconnection of the remote end is
signalled by :data:`errno.EPIPE`.
:returns:
Tuple of `(return_value, disconnected)`, where `return_value` is the
return value of `func(\*args)`, and `disconnected` is :data:`True` if
disconnection was detected, otherwise :data:`False`.
"""
while True: while True:
try: try:
return func(*args), False return func(*args), False
@ -296,7 +391,19 @@ def io_op(func, *args):
class PidfulStreamHandler(logging.StreamHandler): class PidfulStreamHandler(logging.StreamHandler):
"""A :class:`logging.StreamHandler` subclass used when
:meth:`Router.enable_debug() <mitogen.master.Router.enable_debug>` has been
called, or the `debug` parameter was specified during context construction.
Verifies the process ID has not changed on each call to :meth:`emit`,
reopening the associated log file when a change is detected.
This ensures logging to the per-process output files happens correctly even
when uncooperative third party components call :func:`os.fork`.
"""
#: PID that last opened the log file.
open_pid = None open_pid = None
#: Output path template.
template = '/tmp/mitogen.%s.%s.log' template = '/tmp/mitogen.%s.%s.log'
def _reopen(self): def _reopen(self):
@ -614,6 +721,7 @@ class Importer(object):
'fork', 'fork',
'jail', 'jail',
'lxc', 'lxc',
'lxd',
'master', 'master',
'minify', 'minify',
'parent', 'parent',
@ -935,7 +1043,7 @@ class Stream(BasicStream):
:py:class:`BasicStream` subclass implementing mitogen's :ref:`stream :py:class:`BasicStream` subclass implementing mitogen's :ref:`stream
protocol <stream-protocol>`. protocol <stream-protocol>`.
""" """
#: If not ``None``, :py:class:`Router` stamps this into #: If not :data:`None`, :py:class:`Router` stamps this into
#: :py:attr:`Message.auth_id` of every message received on this stream. #: :py:attr:`Message.auth_id` of every message received on this stream.
auth_id = None auth_id = None
@ -958,6 +1066,16 @@ class Stream(BasicStream):
def construct(self): def construct(self):
pass pass
def _internal_receive(self, broker, buf):
if self._input_buf and self._input_buf_len < 128:
self._input_buf[0] += buf
else:
self._input_buf.append(buf)
self._input_buf_len += len(buf)
while self._receive_one(broker):
pass
def on_receive(self, broker): def on_receive(self, broker):
"""Handle the next complete message on the stream. Raise """Handle the next complete message on the stream. Raise
:py:class:`StreamError` on failure.""" :py:class:`StreamError` on failure."""
@ -967,14 +1085,7 @@ class Stream(BasicStream):
if not buf: if not buf:
return self.on_disconnect(broker) return self.on_disconnect(broker)
if self._input_buf and self._input_buf_len < 128: self._internal_receive(broker, buf)
self._input_buf[0] += buf
else:
self._input_buf.append(buf)
self._input_buf_len += len(buf)
while self._receive_one(broker):
pass
HEADER_FMT = '>LLLLLL' HEADER_FMT = '>LLLLLL'
HEADER_LEN = struct.calcsize(HEADER_FMT) HEADER_LEN = struct.calcsize(HEADER_FMT)

@ -43,6 +43,11 @@ class Stream(mitogen.parent.Stream):
username = None username = None
docker_path = 'docker' docker_path = 'docker'
# TODO: better way of capturing errors such as "No such container."
create_child_args = {
'merge_stdio': True
}
def construct(self, container=None, image=None, def construct(self, container=None, image=None,
docker_path=None, username=None, docker_path=None, username=None,
**kwargs): **kwargs):

@ -52,7 +52,7 @@ class Stream(mitogen.parent.Stream):
super(Stream, self).construct(**kwargs) super(Stream, self).construct(**kwargs)
self.container = container self.container = container
if lxc_attach_path: if lxc_attach_path:
self.lxc_attach_path = lxc_attach_apth self.lxc_attach_path = lxc_attach_path
def connect(self): def connect(self):
super(Stream, self).connect() super(Stream, self).connect()

@ -0,0 +1,70 @@
# Copyright 2017, David Wilson
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import logging
import mitogen.core
import mitogen.parent
LOG = logging.getLogger(__name__)
class Stream(mitogen.parent.Stream):
child_is_immediate_subprocess = False
create_child_args = {
# If lxc finds any of stdin, stdout, stderr connected to a TTY, to
# prevent input injection it creates a proxy pty, forcing all IO to be
# buffered in <4KiB chunks. So ensure stderr is also routed to the
# socketpair.
'merge_stdio': True
}
container = None
lxc_path = 'lxc'
python_path = 'python'
def construct(self, container, lxc_path=None, **kwargs):
super(Stream, self).construct(**kwargs)
self.container = container
if lxc_path:
self.lxc_path = lxc_path
def connect(self):
super(Stream, self).connect()
self.name = u'lxd.' + self.container
def get_boot_command(self):
bits = [
self.lxc_path,
'exec',
'--force-noninteractive',
self.container,
'--',
]
return bits + super(Stream, self).get_boot_command()

@ -84,6 +84,17 @@ def _stdlib_paths():
def get_child_modules(path): def get_child_modules(path):
"""Return the suffixes of submodules directly neated beneath of the package
directory at `path`.
:param str path:
Path to the module's source code on disk, or some PEP-302-recognized
equivalent. Usually this is the module's ``__file__`` attribute, but
is specified explicitly to avoid loading the module.
:return:
List of submodule name suffixes.
"""
it = pkgutil.iter_modules([os.path.dirname(path)]) it = pkgutil.iter_modules([os.path.dirname(path)])
return [to_text(name) for _, name, _ in it] return [to_text(name) for _, name, _ in it]
@ -276,7 +287,7 @@ def is_stdlib_path(path):
def is_stdlib_name(modname): def is_stdlib_name(modname):
"""Return ``True`` if `modname` appears to come from the standard """Return :data:`True` if `modname` appears to come from the standard
library.""" library."""
if imp.is_builtin(modname) != 0: if imp.is_builtin(modname) != 0:
return True return True
@ -412,8 +423,8 @@ class ModuleFinder(object):
source code. source code.
:returns: :returns:
Tuple of `(module path, source text, is package?)`, or ``None`` if Tuple of `(module path, source text, is package?)`, or :data:`None`
the source cannot be found. if the source cannot be found.
""" """
tup = self._found_cache.get(fullname) tup = self._found_cache.get(fullname)
if tup: if tup:

@ -48,10 +48,16 @@ except ImportError:
@lru_cache() @lru_cache()
def minimize_source(source): def minimize_source(source):
"""Remove most comments and docstrings from Python source code. """Remove comments and docstrings from Python `source`, preserving line
numbers and syntax of empty blocks.
:param str source:
The source to minimize.
:returns str:
The minimized source.
""" """
if not isinstance(source, mitogen.core.UnicodeType): source = mitogen.core.to_text(source)
source = source.decode('utf-8')
tokens = tokenize.generate_tokens(StringIO(source).readline) tokens = tokenize.generate_tokens(StringIO(source).readline)
tokens = strip_comments(tokens) tokens = strip_comments(tokens)
tokens = strip_docstrings(tokens) tokens = strip_docstrings(tokens)

@ -93,6 +93,19 @@ def get_core_source():
return inspect.getsource(mitogen.core) return inspect.getsource(mitogen.core)
def get_default_remote_name():
"""
Return the default name appearing in argv[0] of remote machines.
"""
s = u'%s@%s:%d'
s %= (getpass.getuser(), socket.gethostname(), os.getpid())
# In mixed UNIX/Windows environments, the username may contain slashes.
return s.translate({
ord(u'\\'): ord(u'_'),
ord(u'/'): ord(u'_')
})
def is_immediate_child(msg, stream): def is_immediate_child(msg, stream):
""" """
Handler policy that requires messages to arrive only from immediately Handler policy that requires messages to arrive only from immediately
@ -144,6 +157,14 @@ def close_nonstandard_fds():
def create_socketpair(): def create_socketpair():
"""
Create a :func:`socket.socketpair` to use for use as a child process's UNIX
stdio channels. As socket pairs are bidirectional, they are economical on
file descriptor usage as the same descriptor can be used for ``stdin`` and
``stdout``. As they are sockets their buffers are tunable, allowing large
buffers to be configured in order to improve throughput for file transfers
and reduce :class:`mitogen.core.Broker` IO loop iterations.
"""
parentfp, childfp = socket.socketpair() parentfp, childfp = socket.socketpair()
parentfp.setsockopt(socket.SOL_SOCKET, parentfp.setsockopt(socket.SOL_SOCKET,
socket.SO_SNDBUF, socket.SO_SNDBUF,
@ -284,6 +305,22 @@ def hybrid_tty_create_child(args):
def write_all(fd, s, deadline=None): def write_all(fd, s, deadline=None):
"""Arrange for all of bytestring `s` to be written to the file descriptor
`fd`.
:param int fd:
File descriptor to write to.
:param bytes s:
Bytestring to write to file descriptor.
:param float deadline:
If not :data:`None`, absolute UNIX timestamp after which timeout should
occur.
:raises mitogen.core.TimeoutError:
Bytestring could not be written entirely before deadline was exceeded.
:raises mitogen.core.StreamError:
File descriptor was disconnected before write could complete.
"""
timeout = None timeout = None
written = 0 written = 0
poller = PREFERRED_POLLER() poller = PREFERRED_POLLER()
@ -312,6 +349,20 @@ def write_all(fd, s, deadline=None):
def iter_read(fds, deadline=None): def iter_read(fds, deadline=None):
"""Return a generator that arranges for up to 4096-byte chunks to be read
at a time from the file descriptor `fd` until the generator is destroyed.
:param int fd:
File descriptor to read from.
:param float deadline:
If not :data:`None`, an absolute UNIX timestamp after which timeout
should occur.
:raises mitogen.core.TimeoutError:
Attempt to read beyond deadline.
:raises mitogen.core.StreamError:
Attempt to read past end of file.
"""
poller = PREFERRED_POLLER() poller = PREFERRED_POLLER()
for fd in fds: for fd in fds:
poller.start_receive(fd) poller.start_receive(fd)
@ -346,6 +397,24 @@ def iter_read(fds, deadline=None):
def discard_until(fd, s, deadline): def discard_until(fd, s, deadline):
"""Read chunks from `fd` until one is encountered that ends with `s`. This
is used to skip output produced by ``/etc/profile``, ``/etc/motd`` and
mandatory SSH banners while waiting for :attr:`Stream.EC0_MARKER` to
appear, indicating the first stage is ready to receive the compressed
:mod:`mitogen.core` source.
:param int fd:
File descriptor to read from.
:param bytes s:
Marker string to discard until encountered.
:param float deadline:
Absolute UNIX timestamp after which timeout should occur.
:raises mitogen.core.TimeoutError:
Attempt to read beyond deadline.
:raises mitogen.core.StreamError:
Attempt to read past end of file.
"""
for buf in iter_read([fd], deadline): for buf in iter_read([fd], deadline):
if IOLOG.level == logging.DEBUG: if IOLOG.level == logging.DEBUG:
for line in buf.splitlines(): for line in buf.splitlines():
@ -765,8 +834,7 @@ class Stream(mitogen.core.Stream):
if connect_timeout: if connect_timeout:
self.connect_timeout = connect_timeout self.connect_timeout = connect_timeout
if remote_name is None: if remote_name is None:
remote_name = '%s@%s:%d' remote_name = get_default_remote_name()
remote_name %= (getpass.getuser(), socket.gethostname(), os.getpid())
if '/' in remote_name or '\\' in remote_name: if '/' in remote_name or '\\' in remote_name:
raise ValueError('remote_name= cannot contain slashes') raise ValueError('remote_name= cannot contain slashes')
self.remote_name = remote_name self.remote_name = remote_name
@ -968,7 +1036,9 @@ class Stream(mitogen.core.Stream):
self._reap_child() self._reap_child()
raise raise
#: For ssh.py, this must be at least max(len('password'), len('debug1:')) #: Sentinel value emitted by the first stage to indicate it is ready to
#: receive the compressed bootstrap. For :mod:`mitogen.ssh` this must have
#: length of at least `max(len('password'), len('debug1:'))`
EC0_MARKER = mitogen.core.b('MITO000\n') EC0_MARKER = mitogen.core.b('MITO000\n')
EC1_MARKER = mitogen.core.b('MITO001\n') EC1_MARKER = mitogen.core.b('MITO001\n')
@ -1288,6 +1358,9 @@ class Router(mitogen.core.Router):
def lxc(self, **kwargs): def lxc(self, **kwargs):
return self.connect(u'lxc', **kwargs) return self.connect(u'lxc', **kwargs)
def lxd(self, **kwargs):
return self.connect(u'lxd', **kwargs)
def setns(self, **kwargs): def setns(self, **kwargs):
return self.connect(u'setns', **kwargs) return self.connect(u'setns', **kwargs)

@ -372,8 +372,9 @@ class DeduplicatingInvoker(Invoker):
class Service(object): class Service(object):
#: Sentinel object to suppress reply generation, since returning ``None`` #: Sentinel object to suppress reply generation, since returning
#: will trigger a response message containing the pickled ``None``. #: :data:`None` will trigger a response message containing the pickled
#: :data:`None`.
NO_REPLY = object() NO_REPLY = object()
invoker_class = Invoker invoker_class = Invoker

@ -94,6 +94,16 @@ def get_lxc_pid(path, name):
raise Error("could not find PID from lxc-info output.\n%s", output) raise Error("could not find PID from lxc-info output.\n%s", output)
def get_lxd_pid(path, name):
output = _run_command([path, 'info', name])
for line in output.splitlines():
bits = line.split()
if bits and bits[0] == 'Pid:':
return int(bits[1])
raise Error("could not find PID from lxc output.\n%s", output)
def get_machinectl_pid(path, name): def get_machinectl_pid(path, name):
output = _run_command([path, 'status', name]) output = _run_command([path, 'status', name])
for line in output.splitlines(): for line in output.splitlines():
@ -110,18 +120,22 @@ class Stream(mitogen.parent.Stream):
container = None container = None
username = None username = None
kind = None kind = None
python_path = 'python'
docker_path = 'docker' docker_path = 'docker'
lxc_path = 'lxc'
lxc_info_path = 'lxc-info' lxc_info_path = 'lxc-info'
machinectl_path = 'machinectl' machinectl_path = 'machinectl'
GET_LEADER_BY_KIND = { GET_LEADER_BY_KIND = {
'docker': ('docker_path', get_docker_pid), 'docker': ('docker_path', get_docker_pid),
'lxc': ('lxc_info_path', get_lxc_pid), 'lxc': ('lxc_info_path', get_lxc_pid),
'lxd': ('lxc_path', get_lxd_pid),
'machinectl': ('machinectl_path', get_machinectl_pid), 'machinectl': ('machinectl_path', get_machinectl_pid),
} }
def construct(self, container, kind, username=None, docker_path=None, def construct(self, container, kind, username=None, docker_path=None,
lxc_info_path=None, machinectl_path=None, **kwargs): lxc_path=None, lxc_info_path=None, machinectl_path=None,
**kwargs):
super(Stream, self).construct(**kwargs) super(Stream, self).construct(**kwargs)
if kind not in self.GET_LEADER_BY_KIND: if kind not in self.GET_LEADER_BY_KIND:
raise Error('unsupported container kind: %r', kind) raise Error('unsupported container kind: %r', kind)
@ -132,6 +146,8 @@ class Stream(mitogen.parent.Stream):
self.username = username self.username = username
if docker_path: if docker_path:
self.docker_path = docker_path self.docker_path = docker_path
if lxc_path:
self.lxc_path = lxc_path
if lxc_info_path: if lxc_info_path:
self.lxc_info_path = lxc_info_path self.lxc_info_path = lxc_info_path
if machinectl_path: if machinectl_path:

@ -49,7 +49,7 @@ SUDO_OPTIONS = [
#(False, 'str', '--group', '-g') #(False, 'str', '--group', '-g')
(True, 'bool', '--set-home', '-H'), (True, 'bool', '--set-home', '-H'),
#(False, 'str', '--host', '-h') #(False, 'str', '--host', '-h')
#(False, 'bool', '--login', '-i') (False, 'bool', '--login', '-i'),
#(False, 'bool', '--remove-timestamp', '-K') #(False, 'bool', '--remove-timestamp', '-K')
#(False, 'bool', '--reset-timestamp', '-k') #(False, 'bool', '--reset-timestamp', '-k')
#(False, 'bool', '--list', '-l') #(False, 'bool', '--list', '-l')
@ -116,10 +116,11 @@ class Stream(mitogen.parent.Stream):
password = None password = None
preserve_env = False preserve_env = False
set_home = False set_home = False
login = False
def construct(self, username=None, sudo_path=None, password=None, def construct(self, username=None, sudo_path=None, password=None,
preserve_env=None, set_home=None, sudo_args=None, preserve_env=None, set_home=None, sudo_args=None,
**kwargs): login=None, **kwargs):
super(Stream, self).construct(**kwargs) super(Stream, self).construct(**kwargs)
opts = parse_sudo_flags(sudo_args or []) opts = parse_sudo_flags(sudo_args or [])
@ -133,6 +134,8 @@ class Stream(mitogen.parent.Stream):
self.preserve_env = preserve_env or opts.preserve_env self.preserve_env = preserve_env or opts.preserve_env
if (set_home or opts.set_home) is not None: if (set_home or opts.set_home) is not None:
self.set_home = set_home or opts.set_home self.set_home = set_home or opts.set_home
if (login or opts.login) is not None:
self.login = True
def connect(self): def connect(self):
super(Stream, self).connect() super(Stream, self).connect()
@ -144,13 +147,16 @@ class Stream(mitogen.parent.Stream):
def get_boot_command(self): def get_boot_command(self):
# Note: sudo did not introduce long-format option processing until July # Note: sudo did not introduce long-format option processing until July
# 2013, so even though we parse long-format options, we always supply # 2013, so even though we parse long-format options, supply short-form
# short-form to the sudo command. # to the sudo command.
bits = [self.sudo_path, '-u', self.username] bits = [self.sudo_path, '-u', self.username]
if self.preserve_env: if self.preserve_env:
bits += ['-E'] bits += ['-E']
if self.set_home: if self.set_home:
bits += ['-H'] bits += ['-H']
if self.login:
bits += ['-i']
bits = bits + super(Stream, self).get_boot_command() bits = bits + super(Stream, self).get_boot_command()
LOG.debug('sudo command line: %r', bits) LOG.debug('sudo command line: %r', bits)
return bits return bits

@ -4,6 +4,7 @@ contexts.
""" """
import inspect import inspect
import sys
import zlib import zlib
import mitogen.fakessh import mitogen.fakessh
@ -24,6 +25,10 @@ print('Preamble size: %s (%.2fKiB)' % (
len(stream.get_preamble()), len(stream.get_preamble()),
len(stream.get_preamble()) / 1024.0, len(stream.get_preamble()) / 1024.0,
)) ))
if '--dump' in sys.argv:
print(zlib.decompress(stream.get_preamble()))
exit()
print( print(
' ' ' '

@ -73,13 +73,16 @@ also by Ansible's `osx_setup.yml`.
used to target this account, the parent session requires a TTY and the used to target this account, the parent session requires a TTY and the
account password must be entered. account password must be entered.
`mitogen__user1` .. `mitogen__user21` `mitogen__user1` .. `mitogen__user5`
These accounts do not have passwords set. They exist to test the Ansible These accounts do not have passwords set. They exist to test the Ansible
interpreter recycling logic. interpreter recycling logic.
`mitogen__sudo1` .. `mitogen__sudo4`
May passwordless sudo to any account.
`mitogen__webapp` `mitogen__webapp`
A plain old account with no sudo access, used as the target for fakessh A plain old account with no sudo access, used as the target for fakessh
tddests. tests.
# Ansible Integration Test Environment # Ansible Integration Test Environment

@ -1,10 +1,14 @@
all: \ TARGETS+=lib/modules/custom_binary_producing_junk
lib/modules/custom_binary_producing_junk \ TARGETS+=lib/modules/custom_binary_producing_json
lib/modules/custom_binary_producing_json
all: clean $(TARGETS)
lib/modules/custom_binary_producing_junk: lib/modules.src/custom_binary_producing_junk.c lib/modules/custom_binary_producing_junk: lib/modules.src/custom_binary_producing_junk.c
$(CC) -o $@ $< $(CC) -o $@ $<
lib/modules/custom_binary_producing_json: lib/modules.src/custom_binary_producing_json.c lib/modules/custom_binary_producing_json: lib/modules.src/custom_binary_producing_json.c
$(CC) -o $@ $< $(CC) -o $@ $<
clean:
rm -f $(TARGETS)

@ -1,5 +1,5 @@
# ``tests/ansible`` Directory # `tests/ansible` Directory
This is an an organically growing collection of integration and regression This is an an organically growing collection of integration and regression
tests used for development and end-user bug reports. tests used for development and end-user bug reports.
@ -10,10 +10,10 @@ demonstrator for what does and doesn't work.
## Preparation ## Preparation
For OS X, run the ``osx_setup.yml`` script to create a bunch of users. See `../image_prep/README.md`.
## ``run_ansible_playbook.sh`` ## `run_ansible_playbook.sh`
This is necessary to set some environment variables used by future tests, as This is necessary to set some environment variables used by future tests, as
there appears to be no better way to inject them into the top-level process there appears to be no better way to inject them into the top-level process
@ -22,6 +22,19 @@ environment before the Mitogen connection process forks.
## Running Everything ## Running Everything
``` `ANSIBLE_STRATEGY=mitogen_linear ./run_ansible_playbook.sh all.yml`
ANSIBLE_STRATEGY=mitogen_linear ./run_ansible_playbook.sh all.yml
```
## `hosts/` and `common-hosts`
To support running the tests against a dev machine that has the requisite user
accounts, the the default inventory is a directory containing a 'localhost'
file that defines 'localhost' to be named 'target' in Ansible inventory, and a
symlink to 'common-hosts', which defines additional targets that all derive
from 'target'.
This allows `ansible_tests.sh` to reuse the common-hosts definitions while
replacing localhost as the test target by creating a new directory that
similarly symlinks in common-hosts.
There may be a better solution for this, but it works fine for now.

@ -17,10 +17,6 @@ timeout = 10
# On Travis, paramiko check fails due to host key checking enabled. # On Travis, paramiko check fails due to host key checking enabled.
host_key_checking = False host_key_checking = False
# "mitogen-tests" required by integration/runner/remote_tmp.yml
# "$HOME" required by integration/action/make_tmp_path.yml
remote_tmp = $HOME/.ansible/mitogen-tests/
[ssh_connection] [ssh_connection]
ssh_args = -o ForwardAgent=yes -o ControlMaster=auto -o ControlPersist=60s ssh_args = -o ForwardAgent=yes -o ControlMaster=auto -o ControlPersist=60s
pipelining = True pipelining = True

@ -0,0 +1,10 @@
# Execute 'hostname' 100 times in a loop. Loops execute within TaskExecutor
# within a single WorkerProcess, each iteration is a fair approximation of the
# non-controller overhead involved in executing a task.
#
# See also: loop-100-tasks.yml
#
- hosts: all
tasks:
- command: hostname
with_sequence: start=1 end=100

@ -0,0 +1,112 @@
# Execute 'hostname' 100 times, using 100 individual tasks. Each task causes a
# new WorkerProcess to be forked, along with get_vars() calculation, and in the
# Mitogen extension, reestablishment of the UNIX socket connectionto the
# multiplexer process.
#
# It does not measure at least module dependency scanning (cached after first
# iteration).
#
# See also: loop-100-items.yml
#
- hosts: all
tasks:
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname
- command: hostname

@ -0,0 +1,51 @@
# vim: syntax=dosini
[connection-delegation-test]
cd-bastion
cd-rack11 mitogen_via=ssh-user@cd-bastion
cd-rack11a mitogen_via=root@cd-rack11
cd-rack11a-docker mitogen_via=docker-admin@cd-rack11a ansible_connection=docker
[connection-delegation-cycle]
# Create cycle with Docker container.
cdc-bastion mitogen_via=cdc-rack11a-docker
cdc-rack11 mitogen_via=ssh-user@cdc-bastion
cdc-rack11a mitogen_via=root@cdc-rack11
cdc-rack11a-docker mitogen_via=docker-admin@cdc-rack11a ansible_connection=docker
[conn-delegation]
cd-user1 ansible_user=mitogen__user1 ansible_connection=mitogen_sudo mitogen_via=target
# Connection delegation scenarios. It's impossible to connection to them, but
# you can inspect the would-be config via "mitogen_get_stack" action.
[cd-no-connect]
# Normal inventory host, no aliasing.
cd-normal ansible_connection=mitogen_doas ansible_user=normal-user
# Inventory host that is really a different host.
cd-alias ansible_connection=ssh ansible_user=alias-user ansible_host=alias-host
# Via one normal host.
cd-normal-normal mitogen_via=cd-normal
# Via one aliased host.
cd-normal-alias mitogen_via=cd-alias
# newuser@host via host with explicit username.
cd-newuser-normal-normal mitogen_via=cd-normal ansible_user=newuser-normal-normal-user
# doas:newuser via host.
cd-newuser-doas-normal mitogen_via=cd-normal ansible_connection=mitogen_doas ansible_user=newuser-doas-normal-user
# Connection Delegation issue #340 reproduction.
# Path to jails is SSH to H -> mitogen_sudo to root -> jail to J
[issue340]
# 'target' plays the role of the normal host machine H.
# 'mitogen__sudo1' plays the role of root@H via mitogen_sudo.
# 'mitogen__user1' plays the role of root@J via mitogen__user1.
# 'mitogen__user2' plays the role of E, the delgate_to target for certs.
i340-root ansible_user=mitogen__sudo1 ansible_connection=mitogen_sudo mitogen_via=target
i340-jail ansible_user=mitogen__user1 ansible_connection=mitogen_sudo mitogen_via=i340-root
i340-certs ansible_user=mitogen__user2 ansible_connection=mitogen_sudo mitogen_via=target

@ -1,5 +1,9 @@
- hosts: controller - hosts: controller
vars:
git_username: '{{ lookup("pipe", "git config --global user.name") }}'
git_email: '{{ lookup("pipe", "git config --global user.email") }}'
tasks: tasks:
- lineinfile: - lineinfile:
line: "net.ipv4.ip_forward=1" line: "net.ipv4.ip_forward=1"
@ -32,6 +36,11 @@
- shell: "rsync -a ~/.ssh {{inventory_hostname}}:" - shell: "rsync -a ~/.ssh {{inventory_hostname}}:"
connection: local connection: local
- shell: |
git config --global user.email "{{git_username}}"
git config --global user.name "{{git_email}}"
name: set_git_config
- git: - git:
dest: ~/mitogen dest: ~/mitogen
repo: https://github.com/dw/mitogen.git repo: https://github.com/dw/mitogen.git

@ -1,16 +0,0 @@
[test-targets]
localhost
[connection-delegation-test]
cd-bastion
cd-rack11 mitogen_via=ssh-user@cd-bastion
cd-rack11a mitogen_via=root@cd-rack11
cd-rack11a-docker mitogen_via=docker-admin@cd-rack11a ansible_connection=docker
[connection-delegation-cycle]
# Create cycle with Docker container.
cdc-bastion mitogen_via=cdc-rack11a-docker
cdc-rack11 mitogen_via=ssh-user@cdc-bastion
cdc-rack11a mitogen_via=root@cdc-rack11
cdc-rack11a-docker mitogen_via=docker-admin@cdc-rack11a ansible_connection=docker

@ -0,0 +1,2 @@
[test-targets]
target ansible_host=localhost

@ -1,63 +1,127 @@
#
# Ensure _make_tmp_path returns the same result across invocations for a single
# user account, and that the path returned cleans itself up on connection
# termination.
#
# Related bugs prior to the new-style handling:
# https://github.com/dw/mitogen/issues/239
# https://github.com/dw/mitogen/issues/301
- name: integration/action/make_tmp_path.yml - name: integration/action/make_tmp_path.yml
hosts: test-targets hosts: test-targets
any_errors_fatal: true any_errors_fatal: true
tasks: tasks:
- name: "Find out root's homedir." - meta: end_play
# Runs first because it blats regular Ansible facts with junk, so when: not is_mitogen
# non-become run fixes that up.
setup: gather_subset=min
become: true
register: root_facts
- name: "Find regular homedir"
setup: gather_subset=min
register: user_facts
# #
# non-become # non-root
# #
- action_passthrough: - name: "Find regular temp path"
action_passthrough:
method: _make_tmp_path method: _make_tmp_path
register: out register: tmp_path
- name: "Write some junk in regular temp path"
shell: hostname > {{tmp_path.result}}/hostname
- name: "Verify junk did not persist across tasks"
stat: path={{tmp_path.result}}/hostname
register: junk_stat
- name: "Verify junk did not persist across tasks"
assert:
that:
- not junk_stat.stat.exists
- name: "Verify temp path hasn't changed since start"
action_passthrough:
method: _make_tmp_path
register: tmp_path2
- name: "Verify temp path hasn't changed since start"
assert:
that:
- tmp_path2.result == tmp_path.result
- name: "Verify temp path changes across connection reset"
mitogen_shutdown_all:
- name: "Verify temp path changes across connection reset"
action_passthrough:
method: _make_tmp_path
register: tmp_path2
- assert: - name: "Verify temp path changes across connection reset"
# This string must match ansible.cfg::remote_tmp assert:
that: out.result.startswith("{{user_facts.ansible_facts.ansible_user_dir}}/.ansible/mitogen-tests/") that:
- tmp_path2.result != tmp_path.result
- stat: - name: "Verify old path disappears across connection reset"
path: "{{out.result}}" stat: path={{tmp_path.result}}
register: st register: junk_stat
- assert: - name: "Verify old path disappears across connection reset"
that: st.stat.exists and st.stat.isdir and st.stat.mode == "0700" assert:
that:
- not junk_stat.stat.exists
- file: #
path: "{{out.result}}" # root
state: absent #
- name: "Find root temp path"
become: true
action_passthrough:
method: _make_tmp_path
register: tmp_path_root
- name: "Verify root temp path differs from regular path"
assert:
that:
- tmp_path2.result != tmp_path_root.result
# #
# become. make_tmp_path() must evaluate HOME in the context of the SSH # readonly homedir
# user, not the become user.
# #
- action_passthrough: - name: "Try writing to temp directory for the readonly_homedir user"
become: true
become_user: mitogen__readonly_homedir
action_passthrough:
method: _make_tmp_path method: _make_tmp_path
register: out register: tmp_path
- name: "Try writing to temp directory for the readonly_homedir user"
become: true become: true
become_user: mitogen__readonly_homedir
shell: hostname > {{tmp_path.result}}/hostname
- assert: #
# This string must match ansible.cfg::remote_tmp # modules get the same temp dir
that: out.result.startswith("{{user_facts.ansible_facts.ansible_user_dir}}/.ansible/mitogen-tests/") #
- stat: - name: "Verify modules get the same tmpdir as the action plugin"
path: "{{out.result}}" action_passthrough:
register: st method: _make_tmp_path
register: tmp_path
- name: "Verify modules get the same tmpdir as the action plugin"
custom_python_detect_environment:
register: out
- assert: # v2.6 related: https://github.com/ansible/ansible/pull/39833
that: st.stat.exists and st.stat.isdir and st.stat.mode == "0700" - name: "Verify modules get the same tmpdir as the action plugin (<2.5)"
when: ansible_version.full < '2.5'
assert:
that:
- out.module_path == tmp_path.result
- out.module_tmpdir == None
- file: - name: "Verify modules get the same tmpdir as the action plugin (>2.5)"
path: "{{out.result}}" when: ansible_version.full > '2.5'
state: absent assert:
that:
- out.module_path == tmp_path.result
- out.module_tmpdir == tmp_path.result

@ -8,11 +8,11 @@
- import_playbook: become/all.yml - import_playbook: become/all.yml
- import_playbook: connection_loader/all.yml - import_playbook: connection_loader/all.yml
- import_playbook: context_service/all.yml - import_playbook: context_service/all.yml
- import_playbook: delegation/all.yml
- import_playbook: glibc_caches/all.yml
- import_playbook: local/all.yml - import_playbook: local/all.yml
- import_playbook: module_utils/all.yml - import_playbook: module_utils/all.yml
- import_playbook: playbook_semantics/all.yml - import_playbook: playbook_semantics/all.yml
- import_playbook: remote_tmp/all.yml
- import_playbook: runner/all.yml - import_playbook: runner/all.yml
- import_playbook: ssh/all.yml - import_playbook: ssh/all.yml
- import_playbook: strategy/all.yml - import_playbook: strategy/all.yml
- import_playbook: glibc_caches/all.yml

@ -0,0 +1,2 @@
- import_playbook: delegate_to_template.yml
- import_playbook: stack_construction.yml

@ -0,0 +1,41 @@
# Ensure templated delegate_to field works.
- name: integration/delegation/delegate_to_template.yml
vars:
physical_host: "cd-normal-alias"
physical_hosts: ["cd-normal-alias", "cd-normal-normal"]
hosts: test-targets
gather_facts: no
any_errors_fatal: true
tasks:
- mitogen_get_stack:
delegate_to: "{{ physical_host }}"
register: out
- assert:
that: |
out.result == [
{
'kwargs': {
'check_host_keys': 'ignore',
'connect_timeout': 10,
'hostname': 'cd-normal-alias',
'identity_file': None,
'password': None,
'port': None,
'python_path': None,
'ssh_args': [
'-o',
'ForwardAgent=yes',
'-o',
'ControlMaster=auto',
'-o',
'ControlPersist=60s',
],
'ssh_debug_level': None,
'ssh_path': 'ssh',
'username': None,
},
'method': 'ssh',
},
]

@ -0,0 +1,336 @@
# https://github.com/dw/mitogen/issues/251
# ansible_mitogen.connection internally reinterprets Ansible state into a
# 'connection stack' -- this is just a list of dictionaries specifying a
# sequence of proxied Router connection methods and their kwargs used to
# establish the connection. That list is passed to ContextService, which loops
# over the stack specifying via=(None or previous entry) for each connection
# method.
# mitogen_get_stack is a magic action that returns the stack, so we can test
# all kinds of scenarios without actually needing a real environmnt.
# Updating this file? Install 'pprintpp' and hack lib/callbacks/nice_stdout.py
# to use it instead of the built-in function, then simply s/'/'/ to get the
# cutpasteable formatted dicts below. WARNING: remove the trailing comma from
# the result list element, it seems to cause assert to silently succeed!
- name: integration/delegation/stack_construction.yml
hosts: cd-normal
tasks:
# used later for local_action test.
- local_action: custom_python_detect_environment
register: local_env
- hosts: cd-normal
any_errors_fatal: true
tasks:
- mitogen_get_stack:
register: out
- assert:
that: |
out.result == [
{
"kwargs": {
"connect_timeout": 10,
"doas_path": None,
"password": None,
"python_path": ["/usr/bin/python"],
"username": "normal-user",
},
"method": "doas",
}
]
- hosts: cd-normal
tasks:
- mitogen_get_stack:
delegate_to: cd-alias
register: out
- assert:
that: |
out.result == [
{
'kwargs': {
'check_host_keys': 'ignore',
'connect_timeout': 10,
'hostname': 'alias-host',
'identity_file': None,
'password': None,
'port': None,
'python_path': None,
'ssh_args': [
'-o',
'ForwardAgent=yes',
'-o',
'ControlMaster=auto',
'-o',
'ControlPersist=60s',
],
'ssh_debug_level': None,
'ssh_path': 'ssh',
'username': 'alias-user',
},
'method': 'ssh',
},
]
- hosts: cd-alias
tasks:
- mitogen_get_stack:
register: out
- assert:
that: |
out.result == [
{
'kwargs': {
'check_host_keys': 'ignore',
'connect_timeout': 10,
'hostname': 'alias-host',
'identity_file': None,
'password': None,
'port': None,
'python_path': ['/usr/bin/python'],
'ssh_args': [
'-o',
'ForwardAgent=yes',
'-o',
'ControlMaster=auto',
'-o',
'ControlPersist=60s',
],
'ssh_debug_level': None,
'ssh_path': 'ssh',
'username': 'alias-user',
},
'method': 'ssh',
},
]
- hosts: cd-normal-normal
tasks:
- mitogen_get_stack:
register: out
- assert:
that: |
out.result == [
{
'kwargs': {
'connect_timeout': 10,
'doas_path': None,
'password': None,
'python_path': None,
'username': 'normal-user',
},
'method': 'doas',
},
{
'kwargs': {
'check_host_keys': 'ignore',
'connect_timeout': 10,
'hostname': 'cd-normal-normal',
'identity_file': None,
'password': None,
'port': None,
'python_path': ['/usr/bin/python'],
'ssh_args': [
'-o',
'ForwardAgent=yes',
'-o',
'ControlMaster=auto',
'-o',
'ControlPersist=60s',
],
'ssh_debug_level': None,
'ssh_path': 'ssh',
'username': None,
},
'method': 'ssh',
},
]
- hosts: cd-normal-alias
tasks:
- mitogen_get_stack:
register: out
- assert:
that: |
out.result == [
{
'kwargs': {
'check_host_keys': 'ignore',
'connect_timeout': 10,
'hostname': 'alias-host',
'identity_file': None,
'password': None,
'port': None,
'python_path': None,
'ssh_args': [
'-o',
'ForwardAgent=yes',
'-o',
'ControlMaster=auto',
'-o',
'ControlPersist=60s',
],
'ssh_debug_level': None,
'ssh_path': 'ssh',
'username': 'alias-user',
},
'method': 'ssh',
},
{
'kwargs': {
'check_host_keys': 'ignore',
'connect_timeout': 10,
'hostname': 'cd-normal-alias',
'identity_file': None,
'password': None,
'port': None,
'python_path': ['/usr/bin/python'],
'ssh_args': [
'-o',
'ForwardAgent=yes',
'-o',
'ControlMaster=auto',
'-o',
'ControlPersist=60s',
],
'ssh_debug_level': None,
'ssh_path': 'ssh',
'username': None,
},
'method': 'ssh',
},
]
- hosts: cd-newuser-normal-normal
tasks:
- mitogen_get_stack:
register: out
- assert:
that: |
out.result == [
{
'kwargs': {
'connect_timeout': 10,
'doas_path': None,
'password': None,
'python_path': None,
'username': 'normal-user',
},
'method': 'doas',
},
{
'kwargs': {
'check_host_keys': 'ignore',
'connect_timeout': 10,
'hostname': 'cd-newuser-normal-normal',
'identity_file': None,
'password': None,
'port': None,
'python_path': ['/usr/bin/python'],
'ssh_args': [
'-o',
'ForwardAgent=yes',
'-o',
'ControlMaster=auto',
'-o',
'ControlPersist=60s',
],
'ssh_debug_level': None,
'ssh_path': 'ssh',
'username': 'newuser-normal-normal-user',
},
'method': 'ssh',
},
]
- hosts: cd-newuser-normal-normal
tasks:
- mitogen_get_stack:
delegate_to: cd-alias
register: out
- assert:
that: |
out.result == [
{
'kwargs': {
'check_host_keys': 'ignore',
'connect_timeout': 10,
'hostname': 'alias-host',
'identity_file': None,
'password': None,
'port': None,
'python_path': None,
'ssh_args': [
'-o',
'ForwardAgent=yes',
'-o',
'ControlMaster=auto',
'-o',
'ControlPersist=60s',
],
'ssh_debug_level': None,
'ssh_path': 'ssh',
'username': 'alias-user',
},
'method': 'ssh',
},
]
- hosts: cd-newuser-normal-normal
tasks:
- local_action: mitogen_get_stack
register: out
- assert:
that: |
out.result == [
{
'kwargs': {
'python_path': [
hostvars['cd-normal'].local_env.sys_executable
],
},
'method': 'local',
},
]
- hosts: cd-newuser-doas-normal
tasks:
- mitogen_get_stack:
register: out
- assert:
that: |
out.result == [
{
'kwargs': {
'connect_timeout': 10,
'doas_path': None,
'password': None,
'python_path': None,
'username': 'normal-user',
},
'method': 'doas',
},
{
'kwargs': {
'connect_timeout': 10,
'doas_path': None,
'password': None,
'python_path': ['/usr/bin/python'],
'username': 'newuser-doas-normal-user',
},
'method': 'doas',
},
]

@ -1,2 +0,0 @@
- import_playbook: readonly_homedir.yml

@ -1,20 +0,0 @@
# https://github.com/dw/mitogen/issues/239
# While remote_tmp is used in the context of the SSH user by action code
# running on the controller, Ansiballz ignores it and uses the system default
# instead.
- name: integration/remote_tmp/readonly_homedir.yml
hosts: test-targets
any_errors_fatal: true
tasks:
- custom_python_detect_environment:
become: true
become_user: mitogen__readonly_homedir
register: out
vars:
ansible_become_pass: readonly_homedir_password
- name: Verify system temp directory was used.
assert:
that:
- out.__file__.startswith("/tmp/ansible_")

@ -1,7 +1,7 @@
- import_playbook: builtin_command_module.yml - import_playbook: builtin_command_module.yml
- import_playbook: custom_bash_hashbang_argument.yml
- import_playbook: custom_bash_old_style_module.yml - import_playbook: custom_bash_old_style_module.yml
- import_playbook: custom_bash_want_json_module.yml - import_playbook: custom_bash_want_json_module.yml
- import_playbook: custom_bash_hashbang_argument.yml
- import_playbook: custom_binary_producing_json.yml - import_playbook: custom_binary_producing_json.yml
- import_playbook: custom_binary_producing_junk.yml - import_playbook: custom_binary_producing_junk.yml
- import_playbook: custom_binary_single_null.yml - import_playbook: custom_binary_single_null.yml
@ -13,4 +13,6 @@
- import_playbook: custom_python_want_json_module.yml - import_playbook: custom_python_want_json_module.yml
- import_playbook: custom_script_interpreter.yml - import_playbook: custom_script_interpreter.yml
- import_playbook: environment_isolation.yml - import_playbook: environment_isolation.yml
- import_playbook: etc_environment.yml
- import_playbook: forking_behaviour.yml - import_playbook: forking_behaviour.yml
- import_playbook: missing_module.yml

@ -0,0 +1,80 @@
# issue #338: ensure /etc/environment is reloaded if it changes.
# Actually this test uses ~/.pam_environment, which is using the same logic,
# but less likely to brick a development workstation
- name: integration/runner/etc_environment.yml
hosts: test-targets
any_errors_fatal: true
gather_facts: true
tasks:
# ~/.pam_environment
- file:
path: ~/.pam_environment
state: absent
- shell: echo $MAGIC_PAM_ENV
register: echo
- assert:
that: echo.stdout == ""
- copy:
dest: ~/.pam_environment
content: |
MAGIC_PAM_ENV=321
- shell: echo $MAGIC_PAM_ENV
register: echo
- assert:
that: echo.stdout == "321"
- file:
path: ~/.pam_environment
state: absent
- shell: echo $MAGIC_PAM_ENV
register: echo
- assert:
that: echo.stdout == ""
# /etc/environment
- meta: end_play
when: ansible_virtualization_type != "docker"
- file:
path: /etc/environment
state: absent
become: true
- shell: echo $MAGIC_ETC_ENV
register: echo
- assert:
that: echo.stdout == ""
- copy:
dest: /etc/environment
content: |
MAGIC_ETC_ENV=555
become: true
- shell: echo $MAGIC_ETC_ENV
register: echo
- assert:
that: echo.stdout == "555"
- file:
path: /etc/environment
state: absent
become: true
- shell: echo $MAGIC_ETC_ENV
register: echo
- assert:
that: echo.stdout == ""

@ -0,0 +1,19 @@
- name: integration/runner/missing_module.yml
hosts: test-targets
connection: local
tasks:
- connection: local
command: |
ansible -vvv
-i "{{inventory_file}}"
test-targets
-m missing_module
args:
chdir: ../..
register: out
ignore_errors: true
- assert:
that: |
'The module missing_module was not found in configured module paths.' in out.stdout

@ -0,0 +1,22 @@
"""
Fetch the connection configuration stack that would be used to connect to a
target, without actually connecting to it.
"""
import ansible_mitogen.connection
from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
def run(self, tmp=None, task_vars=None):
if not isinstance(self._connection,
ansible_mitogen.connection.Connection):
return {
'skipped': True,
}
return {
'changed': True,
'result': self._connection._build_stack(),
}

@ -3,9 +3,6 @@ Arrange for all ContextService connections to be torn down unconditionally,
required for reliable LRU tests. required for reliable LRU tests.
""" """
import traceback
import sys
import ansible_mitogen.connection import ansible_mitogen.connection
import ansible_mitogen.services import ansible_mitogen.services
import mitogen.service import mitogen.service

@ -3,6 +3,7 @@
# interpreter I run within. # interpreter I run within.
from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import get_module_path
from ansible.module_utils import six from ansible.module_utils import six
import os import os
@ -29,6 +30,8 @@ def main():
mitogen_loaded='mitogen.core' in sys.modules, mitogen_loaded='mitogen.core' in sys.modules,
hostname=socket.gethostname(), hostname=socket.gethostname(),
username=pwd.getpwuid(os.getuid()).pw_name, username=pwd.getpwuid(os.getuid()).pw_name,
module_tmpdir=getattr(module, 'tmpdir', None),
module_path=get_module_path(),
) )
if __name__ == '__main__': if __name__ == '__main__':

@ -1,155 +0,0 @@
#
# Add users expected by tests to an OS X machine. Assumes passwordless sudo to
# root.
#
# WARNING: this creates non-privilged accounts with pre-set passwords!
#
- hosts: test-targets
gather_facts: true
become: true
tasks:
- name: Disable non-localhost SSH for Mitogen users
blockinfile:
path: /etc/ssh/sshd_config
block: |
Match User mitogen__* Address !127.0.0.1
DenyUsers *
#
# Hashed passwords.
#
- name: Create Mitogen test group
group:
name: "mitogen__group"
- name: Create Mitogen test users
user:
name: "mitogen__{{item}}"
shell: /bin/bash
groups: mitogen__group
password: "{{ (item + '_password') | password_hash('sha256') }}"
with_items:
- has_sudo
- has_sudo_pubkey
- require_tty
- pw_required
- readonly_homedir
- require_tty_pw_required
- slow_user
when: ansible_system != 'Darwin'
- name: Create Mitogen test users
user:
name: "mitogen__user{{item}}"
shell: /bin/bash
password: "{{ ('user' + item + '_password') | password_hash('sha256') }}"
with_sequence: start=1 end=21
when: ansible_system != 'Darwin'
#
# Plaintext passwords
#
- name: Create Mitogen test users
user:
name: "mitogen__{{item}}"
shell: /bin/bash
groups: mitogen__group
password: "{{item}}_password"
with_items:
- has_sudo
- has_sudo_pubkey
- require_tty
- pw_required
- require_tty_pw_required
- readonly_homedir
- slow_user
when: ansible_system == 'Darwin'
- name: Create Mitogen test users
user:
name: "mitogen__user{{item}}"
shell: /bin/bash
password: "user{{item}}_password"
with_sequence: start=1 end=21
when: ansible_system == 'Darwin'
- name: Hide test users from login window.
shell: >
defaults
write
/Library/Preferences/com.apple.loginwindow
HiddenUsersList
-array-add '{{item}}'
with_items:
- mitogen__require_tty
- mitogen__pw_required
- mitogen__require_tty_pw_required
when: ansible_system == 'Darwin'
- name: Hide test users from login window.
shell: >
defaults
write
/Library/Preferences/com.apple.loginwindow
HiddenUsersList
-array-add 'mitogen__user{{item}}'
with_sequence: start=1 end=21
when: ansible_distribution == 'MacOSX'
- name: Readonly homedir for one account
shell: "chown -R root: ~mitogen__readonly_homedir"
- name: Slow bash profile for one account
copy:
dest: ~mitogen__slow_user/.{{item}}
src: ../data/docker/mitogen__slow_user.profile
with_items:
- bashrc
- profile
- name: Install pubkey for one account
file:
path: ~mitogen__has_sudo_pubkey/.ssh
state: directory
mode: go=
owner: mitogen__has_sudo_pubkey
- name: Install pubkey for one account
copy:
dest: ~mitogen__has_sudo_pubkey/.ssh/authorized_keys
src: ../data/docker/mitogen__has_sudo_pubkey.key.pub
mode: go=
owner: mitogen__has_sudo_pubkey
- name: Require a TTY for two accounts
lineinfile:
path: /etc/sudoers
line: "{{item}}"
with_items:
- Defaults>mitogen__pw_required targetpw
- Defaults>mitogen__require_tty requiretty
- Defaults>mitogen__require_tty_pw_required requiretty,targetpw
- name: Require password for two accounts
lineinfile:
path: /etc/sudoers
line: "{{lookup('pipe', 'whoami')}} ALL = ({{item}}) ALL"
with_items:
- mitogen__pw_required
- mitogen__require_tty_pw_required
- name: Allow passwordless for two accounts
lineinfile:
path: /etc/sudoers
line: "{{lookup('pipe', 'whoami')}} ALL = ({{item}}) NOPASSWD:ALL"
with_items:
- mitogen__require_tty
- mitogen__readonly_homedir
- name: Allow passwordless for many accounts
lineinfile:
path: /etc/sudoers
line: "{{lookup('pipe', 'whoami')}} ALL = (mitogen__user{{item}}) NOPASSWD:ALL"
with_sequence: start=1 end=21

@ -7,3 +7,4 @@
- import_playbook: issue_152__virtualenv_python_fails.yml - import_playbook: issue_152__virtualenv_python_fails.yml
- import_playbook: issue_154__module_state_leaks.yml - import_playbook: issue_154__module_state_leaks.yml
- import_playbook: issue_177__copy_module_failing.yml - import_playbook: issue_177__copy_module_failing.yml
- import_playbook: issue_332_ansiblemoduleerror_first_occurrence.yml

@ -0,0 +1,14 @@
# issue #332: Ansible 2.6 file.py started defining an excepthook and private
# AnsibleModuleError. Ensure file fails correctly.
- name: regression/issue_332_ansiblemoduleerror_first_occurrence.yml
hosts: test-targets
tasks:
- file: path=/usr/bin/does-not-exist mode='a-s' state=file follow=yes
ignore_errors: true
register: out
- assert:
that:
- out.state == 'absent'
- out.msg == 'file (/usr/bin/does-not-exist) is absent, cannot continue'

@ -1,120 +0,0 @@
#!/usr/bin/env python
"""
Build the Docker images used for testing.
"""
import commands
import os
import shlex
import subprocess
import tempfile
DEBIAN_DOCKERFILE = r"""
FROM debian:stretch
RUN apt-get update
RUN \
apt-get install -y python2.7 openssh-server sudo rsync git strace \
libjson-perl python-virtualenv && \
apt-get clean && \
rm -rf /var/cache/apt
"""
CENTOS6_DOCKERFILE = r"""
FROM centos:6
RUN yum clean all && \
yum -y install -y python2.6 openssh-server sudo rsync git strace sudo \
perl-JSON python-virtualenv && \
yum clean all && \
groupadd sudo && \
ssh-keygen -t rsa -f /etc/ssh/ssh_host_rsa_key
"""
CENTOS7_DOCKERFILE = r"""
FROM centos:7
RUN yum clean all && \
yum -y install -y python2.7 openssh-server sudo rsync git strace sudo \
perl-JSON python-virtualenv && \
yum clean all && \
groupadd sudo && \
ssh-keygen -t rsa -f /etc/ssh/ssh_host_rsa_key
"""
DOCKERFILE = r"""
COPY data/001-mitogen.sudo /etc/sudoers.d/001-mitogen
COPY data/docker/ssh_login_banner.txt /etc/ssh/banner.txt
RUN \
chsh -s /bin/bash && \
mkdir -p /var/run/sshd && \
echo i-am-mitogen-test-docker-image > /etc/sentinel && \
echo "Banner /etc/ssh/banner.txt" >> /etc/ssh/sshd_config && \
groupadd mitogen__sudo_nopw && \
useradd -s /bin/bash -m mitogen__has_sudo -G SUDO_GROUP && \
useradd -s /bin/bash -m mitogen__has_sudo_pubkey -G SUDO_GROUP && \
useradd -s /bin/bash -m mitogen__has_sudo_nopw -G mitogen__sudo_nopw && \
useradd -s /bin/bash -m mitogen__webapp && \
useradd -s /bin/bash -m mitogen__pw_required && \
useradd -s /bin/bash -m mitogen__require_tty && \
useradd -s /bin/bash -m mitogen__require_tty_pw_required && \
useradd -s /bin/bash -m mitogen__readonly_homedir && \
useradd -s /bin/bash -m mitogen__slow_user && \
chown -R root: ~mitogen__readonly_homedir && \
( for i in `seq 1 21`; do useradd -s /bin/bash -m mitogen__user${i}; done; ) && \
( for i in `seq 1 21`; do echo mitogen__user${i}:user${i}_password | chpasswd; done; ) && \
( echo 'root:rootpassword' | chpasswd; ) && \
( echo 'mitogen__has_sudo:has_sudo_password' | chpasswd; ) && \
( echo 'mitogen__has_sudo_pubkey:has_sudo_pubkey_password' | chpasswd; ) && \
( echo 'mitogen__has_sudo_nopw:has_sudo_nopw_password' | chpasswd; ) && \
( echo 'mitogen__webapp:webapp_password' | chpasswd; ) && \
( echo 'mitogen__pw_required:pw_required_password' | chpasswd; ) && \
( echo 'mitogen__require_tty:require_tty_password' | chpasswd; ) && \
( echo 'mitogen__require_tty_pw_required:require_tty_pw_required_password' | chpasswd; ) && \
( echo 'mitogen__readonly_homedir:readonly_homedir_password' | chpasswd; ) && \
( echo 'mitogen__slow_user:slow_user_password' | chpasswd; ) && \
mkdir ~mitogen__has_sudo_pubkey/.ssh && \
( echo '#!/bin/bash\nexec strace -ff -o /tmp/pywrap$$.trace python2.7 "$@"' > /usr/local/bin/pywrap; chmod +x /usr/local/bin/pywrap; )
COPY data/docker/mitogen__has_sudo_pubkey.key.pub /home/mitogen__has_sudo_pubkey/.ssh/authorized_keys
COPY data/docker/mitogen__slow_user.profile /home/mitogen__slow_user/.profile
COPY data/docker/mitogen__slow_user.profile /home/mitogen__slow_user/.bashrc
RUN \
chown -R mitogen__has_sudo_pubkey ~mitogen__has_sudo_pubkey && \
chmod -R go= ~mitogen__has_sudo_pubkey
RUN sed -i 's/PermitRootLogin prohibit-password/PermitRootLogin yes/' /etc/ssh/sshd_config
RUN sed 's@session\s*required\s*pam_loginuid.so@session optional pam_loginuid.so@g' -i /etc/pam.d/sshd
ENV NOTVISIBLE "in users profile"
RUN echo "export VISIBLE=now" >> /etc/profile
EXPOSE 22
CMD ["/usr/sbin/sshd", "-D"]
"""
def sh(s, *args):
if args:
s %= tuple(map(commands.mkarg, args))
return shlex.split(s)
for (distro, wheel, prefix) in (
('debian', 'sudo', DEBIAN_DOCKERFILE),
('centos6', 'wheel', CENTOS6_DOCKERFILE),
('centos7', 'wheel', CENTOS7_DOCKERFILE),
):
mydir = os.path.abspath(os.path.dirname(__file__))
with tempfile.NamedTemporaryFile(dir=mydir) as dockerfile_fp:
dockerfile_fp.write(prefix)
dockerfile_fp.write(DOCKERFILE.replace('SUDO_GROUP', wheel))
dockerfile_fp.flush()
subprocess.check_call(sh('docker build %s -t %s -f %s',
mydir,
'mitogen/%s-test' % (distro,),
dockerfile_fp.name
))

@ -0,0 +1,7 @@
#!/usr/bin/env python
import sys
import os
os.environ['ORIGINAL_ARGV'] = repr(sys.argv)
os.execv(sys.executable, sys.argv[sys.argv.index('--') + 1:])

@ -0,0 +1,7 @@
#!/usr/bin/env python
import sys
import os
os.environ['ORIGINAL_ARGV'] = repr(sys.argv)
os.execv(sys.executable, sys.argv[sys.argv.index('--') + 1:])

@ -0,0 +1,25 @@
# `image_prep`
This directory contains Ansible playbooks for building the Docker containers
used for testing, or for setting up an OS X laptop so the tests can (mostly)
run locally.
The Docker config is more heavily jinxed to trigger adverse conditions in the
code, the OS X config just has the user accounts.
See ../README.md for a (mostly) description of the accounts created.
## Building the containers
``./build_docker_images.sh``
## Preparing an OS X box
WARNING: this creates a ton of accounts with preconfigured passwords. It is
generally impossible to restrict remote access to these, so your only option is
to disable remote login and sharing.
``ansible-playbook -b -c local -i localhost, -l localhost setup.yml``

@ -0,0 +1,117 @@
- hosts: all
strategy: linear
gather_facts: false
tasks:
- raw: >
if ! python -c ''; then
if type -p yum; then
yum -y install python;
else
apt-get -y update && apt-get -y install python;
fi;
fi
- hosts: all
strategy: mitogen_free
# Can't gather facts before here.
gather_facts: true
vars:
distro: "{{ansible_distribution}}"
ver: "{{ansible_distribution_major_version}}"
packages:
common:
- git
- openssh-server
- rsync
- strace
- sudo
Debian:
"9":
- libjson-perl
- python-virtualenv
CentOS:
"6":
- perl-JSON
"7":
- perl-JSON
- python-virtualenv
tasks:
- when: ansible_virtualization_type != "docker"
meta: end_play
- apt:
name: "{{packages.common + packages[distro][ver]}}"
state: installed
update_cache: true
when: distro == "Debian"
- yum:
name: "{{packages.common + packages[distro][ver]}}"
state: installed
update_cache: true
when: distro == "CentOS"
- command: apt-get clean
when: distro == "Debian"
- command: yum clean all
when: distro == "CentOS"
- shell: rm -rf {{item}}/*
with_items:
- /var/cache/apt
- /var/lib/apt/lists
when: distro == "Debian"
- user:
name: root
password: "{{ 'rootpassword' | password_hash('sha256') }}"
shell: /bin/bash
- file:
path: /var/run/sshd
state: directory
- command: ssh-keygen -t rsa -f /etc/ssh/ssh_host_rsa_key
args:
creates: /etc/ssh/ssh_host_rsa_key
- group:
name: "{{sudo_group[distro]}}"
- copy:
dest: /etc/sentinel
content: |
i-am-mitogen-test-docker-image
- copy:
dest: /etc/ssh/banner.txt
src: ../data/docker/ssh_login_banner.txt
- copy:
dest: /etc/sudoers.d/001-mitogen
src: ../data/docker/001-mitogen.sudo
- lineinfile:
path: /etc/ssh/sshd_config
line: Banner /etc/ssh/banner.txt
- lineinfile:
path: /etc/ssh/sshd_config
line: PermitRootLogin yes
regexp: '.*PermitRootLogin.*'
- lineinfile:
path: /etc/pam.d/sshd
regexp: '.*session.*required.*pam_loginuid.so'
line: session optional pam_loginuid.so
- copy:
mode: 'u+rwx,go=rx'
dest: /usr/local/bin/pywrap
content: |
#!/bin/bash
exec strace -ff -o /tmp/pywrap$$.trace python2.7 "$@"'

@ -0,0 +1,152 @@
#
# Add users expected by tests. Assumes passwordless sudo to root.
#
# WARNING: this creates non-privilged accounts with pre-set passwords!
#
- hosts: all
gather_facts: true
strategy: mitogen_free
become: true
vars:
distro: "{{ansible_distribution}}"
ver: "{{ansible_distribution_major_version}}"
special_users:
- has_sudo
- has_sudo_nopw
- has_sudo_pubkey
- pw_required
- readonly_homedir
- require_tty
- require_tty_pw_required
- slow_user
- webapp
- sudo1
- sudo2
- sudo3
- sudo4
user_groups:
has_sudo: ['mitogen__group', '{{sudo_group[distro]}}']
has_sudo_pubkey: ['mitogen__group', '{{sudo_group[distro]}}']
has_sudo_nopw: ['mitogen__group', 'mitogen__sudo_nopw']
sudo1: ['mitogen__group', 'mitogen__sudo_nopw']
sudo2: ['mitogen__group', '{{sudo_group[distro]}}']
sudo3: ['mitogen__group', '{{sudo_group[distro]}}']
sudo4: ['mitogen__group', '{{sudo_group[distro]}}']
normal_users: "{{
lookup('sequence', 'start=1 end=5 format=user%d', wantlist=True)
}}"
all_users: "{{
special_users +
normal_users
}}"
tasks:
- name: Disable non-localhost SSH for Mitogen users
when: false
blockinfile:
path: /etc/ssh/sshd_config
block: |
Match User mitogen__* Address !127.0.0.1
DenyUsers *
- name: Create Mitogen test groups
group:
name: "mitogen__{{item}}"
with_items:
- group
- sudo_nopw
- name: Create user accounts
block:
- user:
name: "mitogen__{{item}}"
shell: /bin/bash
groups: "{{user_groups[item]|default(['mitogen__group'])}}"
password: "{{ (item + '_password') | password_hash('sha256') }}"
loop: "{{all_users}}"
when: ansible_system != 'Darwin'
- user:
name: "mitogen__{{item}}"
shell: /bin/bash
groups: "{{user_groups[item]|default(['mitogen__group'])}}"
password: "{{item}}_password"
loop: "{{all_users}}"
when: ansible_system == 'Darwin'
- name: Hide users from login window.
loop: "{{all_users}}"
when: ansible_system == 'Darwin'
osx_defaults:
array_add: true
domain: /Library/Preferences/com.apple.loginwindow
type: array
key: HiddenUsersList
value: ['mitogen_{{item}}']
- name: Readonly homedir for one account
shell: "chown -R root: ~mitogen__readonly_homedir"
- name: Slow bash profile for one account
copy:
dest: ~mitogen__slow_user/.{{item}}
src: ../data/docker/mitogen__slow_user.profile
with_items:
- bashrc
- profile
- name: Install pubkey for mitogen__has_sudo_pubkey
block:
- file:
path: ~mitogen__has_sudo_pubkey/.ssh
state: directory
mode: go=
owner: mitogen__has_sudo_pubkey
- copy:
dest: ~mitogen__has_sudo_pubkey/.ssh/authorized_keys
src: ../data/docker/mitogen__has_sudo_pubkey.key.pub
mode: go=
owner: mitogen__has_sudo_pubkey
- name: Install slow profile for one account
block:
- copy:
dest: ~mitogen__slow_user/.profile
src: ../data/docker/mitogen__slow_user.profile
- copy:
dest: ~mitogen__slow_user/.bashrc
src: ../data/docker/mitogen__slow_user.profile
- name: Require a TTY for two accounts
lineinfile:
path: /etc/sudoers
line: "{{item}}"
with_items:
- Defaults>mitogen__pw_required targetpw
- Defaults>mitogen__require_tty requiretty
- Defaults>mitogen__require_tty_pw_required requiretty,targetpw
- name: Require password for two accounts
lineinfile:
path: /etc/sudoers
line: "{{lookup('pipe', 'whoami')}} ALL = ({{item}}) ALL"
with_items:
- mitogen__pw_required
- mitogen__require_tty_pw_required
- name: Allow passwordless sudo for require_tty/readonly_homedir
lineinfile:
path: /etc/sudoers
line: "{{lookup('pipe', 'whoami')}} ALL = ({{item}}) NOPASSWD:ALL"
with_items:
- mitogen__require_tty
- mitogen__readonly_homedir
- name: Allow passwordless for many accounts
lineinfile:
path: /etc/sudoers
line: "{{lookup('pipe', 'whoami')}} ALL = (mitogen__{{item}}) NOPASSWD:ALL"
loop: "{{normal_users}}"

@ -0,0 +1,4 @@
[defaults]
strategy_plugins = ../../ansible_mitogen/plugins/strategy
retry_files_enabled = false

@ -0,0 +1,57 @@
#!/usr/bin/env python
"""
Build the Docker images used for testing.
"""
import commands
import os
import tempfile
import shlex
import subprocess
BASEDIR = os.path.dirname(os.path.abspath(__file__))
def sh(s, *args):
if args:
s %= args
return shlex.split(s)
label_by_id = {}
for base_image, label in [
('debian:stretch', 'debian'),
('centos:6', 'centos6'),
('centos:7', 'centos7')
]:
args = sh('docker run --rm -it -d -h mitogen-%s %s /bin/bash',
label, base_image)
container_id = subprocess.check_output(args).strip()
label_by_id[container_id] = label
with tempfile.NamedTemporaryFile() as fp:
fp.write('[all]\n')
for id_, label in label_by_id.items():
fp.write('%s ansible_host=%s\n' % (label, id_))
fp.flush()
try:
subprocess.check_call(
cwd=BASEDIR,
args=sh('ansible-playbook -i %s -c docker setup.yml', fp.name),
)
for container_id, label in label_by_id.items():
subprocess.check_call(sh('''
docker commit
--change 'EXPOSE 22'
--change 'CMD ["/usr/sbin/sshd", "-D"]'
%s
mitogen/%s-test
''', container_id, label))
finally:
subprocess.check_call(sh('docker rm -f %s', ' '.join(label_by_id)))

@ -0,0 +1,13 @@
- hosts: all
gather_facts: false
tasks:
- set_fact:
# Hacktacular.. but easiest place for it with current structure.
sudo_group:
MacOSX: admin
Debian: sudo
CentOS: wheel
- import_playbook: _container_setup.yml
- import_playbook: _user_accounts.yml

@ -0,0 +1,29 @@
import os
import mitogen
import unittest2
import testlib
def has_subseq(seq, subseq):
return any(seq[x:x+len(subseq)] == subseq for x in range(0, len(seq)))
class FakeLxcAttachTest(testlib.RouterMixin, unittest2.TestCase):
def test_okay(self):
lxc_attach_path = testlib.data_path('fake_lxc_attach.py')
context = self.router.lxc(
container='container_name',
lxc_attach_path=lxc_attach_path,
)
argv = eval(context.call(os.getenv, 'ORIGINAL_ARGV'))
self.assertEquals(argv[0], lxc_attach_path)
self.assertTrue('--clear-env' in argv)
self.assertTrue(has_subseq(argv, ['--name', 'container_name']))
if __name__ == '__main__':
unittest2.main()

@ -0,0 +1,26 @@
import os
import mitogen
import unittest2
import testlib
class FakeLxcTest(testlib.RouterMixin, unittest2.TestCase):
def test_okay(self):
lxc_path = testlib.data_path('fake_lxc.py')
context = self.router.lxd(
container='container_name',
lxc_path=lxc_path,
)
argv = eval(context.call(os.getenv, 'ORIGINAL_ARGV'))
self.assertEquals(argv[0], lxc_path)
self.assertEquals(argv[1], 'exec')
self.assertEquals(argv[2], '--force-noninteractive')
self.assertEquals(argv[3], 'container_name')
if __name__ == '__main__':
unittest2.main()

@ -5,6 +5,7 @@ import sys
import tempfile import tempfile
import time import time
import mock
import unittest2 import unittest2
import testlib import testlib
@ -28,6 +29,21 @@ def wait_for_child(pid, timeout=1.0):
assert False, "wait_for_child() timed out" assert False, "wait_for_child() timed out"
class GetDefaultRemoteNameTest(testlib.TestCase):
func = staticmethod(mitogen.parent.get_default_remote_name)
@mock.patch('os.getpid')
@mock.patch('getpass.getuser')
@mock.patch('socket.gethostname')
def test_slashes(self, mock_gethostname, mock_getuser, mock_getpid):
# Ensure slashes appearing in the remote name are replaced with
# underscores.
mock_gethostname.return_value = 'box'
mock_getuser.return_value = 'ECORP\\Administrator'
mock_getpid.return_value = 123
self.assertEquals("ECORP_Administrator@box:123", self.func())
class ReapChildTest(testlib.RouterMixin, testlib.TestCase): class ReapChildTest(testlib.RouterMixin, testlib.TestCase):
def test_connect_timeout(self): def test_connect_timeout(self):
# Ensure the child process is reaped if the connection times out. # Ensure the child process is reaped if the connection times out.

Loading…
Cancel
Save