Merge pull request #862 from moreati/release-0.3.0

Release 0.3.0
pull/886/head v0.3.0
Alex Willmer 3 years ago committed by GitHub
commit 74e7bc2bf7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -1,8 +1,8 @@
# `.ci` # `.ci`
This directory contains scripts for Travis CI and (more or less) Azure This directory contains scripts for Continuous Integration platforms. Currently
Pipelines, but they will also happily run on any Debian-like machine. Azure Pipelines, but they will also happily run on any Debian-like machine.
The scripts are usually split into `_install` and `_test` steps. The `_install` The scripts are usually split into `_install` and `_test` steps. The `_install`
step will damage your machine, the `_test` step will just run the tests the way step will damage your machine, the `_test` step will just run the tests the way

@ -6,16 +6,22 @@ batches = [
[ [
# Must be installed separately, as PyNACL indirect requirement causes # Must be installed separately, as PyNACL indirect requirement causes
# newer version to be installed if done in a single pip run. # newer version to be installed if done in a single pip run.
# Separately install ansible based on version passed in from azure-pipelines.yml or .travis.yml
'pip install "pycparser<2.19" "idna<2.7"', 'pip install "pycparser<2.19" "idna<2.7"',
'pip install ' 'pip install '
'-r tests/requirements.txt ' '-r tests/requirements.txt '
'-r tests/ansible/requirements.txt', '-r tests/ansible/requirements.txt',
# encoding is required for installing ansible 2.10 with pip2, otherwise we get a UnicodeDecode error
'LC_CTYPE=en_US.UTF-8 LANG=en_US.UTF-8 pip install "ansible-base<2.10.14" "ansible=={}"'.format(ci_lib.ANSIBLE_VERSION)
],
[
'aws ecr-public get-login-password | docker login --username AWS --password-stdin public.ecr.aws',
] ]
] ]
batches.extend( batches[-1].extend([
['docker pull %s' % (ci_lib.image_for_distro(distro),)] 'docker pull %s' % (ci_lib.image_for_distro(distro),)
for distro in ci_lib.DISTROS for distro in ci_lib.DISTROS
) ])
ci_lib.run_batches(batches) ci_lib.run_batches(batches)

@ -37,9 +37,6 @@ with ci_lib.Fold('docker_setup'):
with ci_lib.Fold('job_setup'): with ci_lib.Fold('job_setup'):
# Don't set -U as that will upgrade Paramiko to a non-2.6 compatible version.
run("pip install -q ansible==%s", ci_lib.ANSIBLE_VERSION)
os.chdir(TESTS_DIR) os.chdir(TESTS_DIR)
os.chmod('../data/docker/mitogen__has_sudo_pubkey.key', int('0600', 7)) os.chmod('../data/docker/mitogen__has_sudo_pubkey.key', int('0600', 7))
@ -69,13 +66,11 @@ with ci_lib.Fold('job_setup'):
run("sudo apt-get update") run("sudo apt-get update")
run("sudo apt-get install -y sshpass") run("sudo apt-get install -y sshpass")
run("bash -c 'sudo ln -vfs /usr/lib/python2.7/plat-x86_64-linux-gnu/_sysconfigdata_nd.py /usr/lib/python2.7 || true'")
run("bash -c 'sudo ln -vfs /usr/lib/python2.7/plat-x86_64-linux-gnu/_sysconfigdata_nd.py $VIRTUAL_ENV/lib/python2.7 || true'")
with ci_lib.Fold('ansible'): with ci_lib.Fold('ansible'):
playbook = os.environ.get('PLAYBOOK', 'all.yml') playbook = os.environ.get('PLAYBOOK', 'all.yml')
try: try:
run('./run_ansible_playbook.py %s -i "%s" %s', run('./run_ansible_playbook.py %s -i "%s" -vvv %s',
playbook, HOSTS_DIR, ' '.join(sys.argv[1:])) playbook, HOSTS_DIR, ' '.join(sys.argv[1:]))
except: except:
pause_if_interactive() pause_if_interactive()

@ -8,24 +8,17 @@ steps:
- script: "PYTHONVERSION=$(python.version) .ci/prep_azure.py" - script: "PYTHONVERSION=$(python.version) .ci/prep_azure.py"
displayName: "Run prep_azure.py" displayName: "Run prep_azure.py"
# The VSTS-shipped Pythons available via UsePythonVErsion are pure garbage,
# broken symlinks, incorrect permissions and missing codecs. So we use the
# deadsnakes PPA to get sane Pythons, and setup a virtualenv to install our
# stuff into. The virtualenv can probably be removed again, but this was a
# hard-fought battle and for now I am tired of this crap.
- script: | - script: |
sudo ln -fs /usr/bin/python$(python.version) /usr/bin/python
/usr/bin/python -m pip install -U virtualenv setuptools wheel
/usr/bin/python -m virtualenv /tmp/venv -p /usr/bin/python$(python.version)
echo "##vso[task.prependpath]/tmp/venv/bin" echo "##vso[task.prependpath]/tmp/venv/bin"
displayName: activate venv displayName: activate venv
- script: .ci/spawn_reverse_shell.py
displayName: "Spawn reverse shell"
- script: .ci/$(MODE)_install.py - script: .ci/$(MODE)_install.py
displayName: "Run $(MODE)_install.py" displayName: "Run $(MODE)_install.py"
env:
AWS_ACCESS_KEY_ID: $(AWS_ACCESS_KEY_ID)
AWS_SECRET_ACCESS_KEY: $(AWS_SECRET_ACCESS_KEY)
AWS_DEFAULT_REGION: $(AWS_DEFAULT_REGION)
- script: .ci/$(MODE)_tests.py - script: .ci/$(MODE)_tests.py
displayName: "Run $(MODE)_tests.py" displayName: "Run $(MODE)_tests.py"

@ -6,23 +6,35 @@
jobs: jobs:
- job: Mac - job: Mac
# vanilla Ansible is really slow
timeoutInMinutes: 120
steps: steps:
- template: azure-pipelines-steps.yml - template: azure-pipelines-steps.yml
pool: pool:
vmImage: macOS-10.13 vmImage: macOS-10.15
strategy: strategy:
matrix: matrix:
Mito27_27: Mito27_27:
python.version: '2.7' python.version: '2.7'
MODE: mitogen MODE: mitogen
Ans280_27: # TODO: test python3, python3 tests are broken
Ans210_27:
python.version: '2.7' python.version: '2.7'
MODE: localhost_ansible MODE: localhost_ansible
VER: 2.10.0
# NOTE: this hangs when ran in Ubuntu 18.04
Vanilla_210_27:
python.version: '2.7'
MODE: localhost_ansible
VER: 2.10.0
STRATEGY: linear
ANSIBLE_SKIP_TAGS: resource_intensive
- job: Linux - job: Linux
pool: pool:
vmImage: "Ubuntu 16.04" vmImage: "Ubuntu 18.04"
steps: steps:
- template: azure-pipelines-steps.yml - template: azure-pipelines-steps.yml
strategy: strategy:
@ -33,7 +45,7 @@ jobs:
Mito27Debian_27: Mito27Debian_27:
python.version: '2.7' python.version: '2.7'
MODE: mitogen MODE: mitogen
DISTRO: debian DISTRO: debian9
#MitoPy27CentOS6_26: #MitoPy27CentOS6_26:
#python.version: '2.7' #python.version: '2.7'
@ -45,9 +57,16 @@ jobs:
MODE: mitogen MODE: mitogen
DISTRO: centos6 DISTRO: centos6
# Mito37Debian_27:
# python.version: '3.7'
# MODE: mitogen
DISTRO: debian9
Mito39Debian_27:
python.version: '3.9'
MODE: mitogen
DISTRO: debian9
VER: 2.10.0
#Py26CentOS7: #Py26CentOS7:
#python.version: '2.7' #python.version: '2.7'
@ -91,12 +110,17 @@ jobs:
#DISTROS: debian #DISTROS: debian
#STRATEGY: linear #STRATEGY: linear
Ansible_280_27: Ansible_210_27:
python.version: '2.7' python.version: '2.7'
MODE: ansible MODE: ansible
VER: 2.8.0 VER: 2.10.0
Ansible_280_35: Ansible_210_35:
python.version: '3.5' python.version: '3.5'
MODE: ansible MODE: ansible
VER: 2.8.0 VER: 2.10.0
Ansible_210_39:
python.version: '3.9'
MODE: ansible
VER: 2.10.0

@ -49,6 +49,10 @@ def have_apt():
proc = subprocess.Popen('apt --help >/dev/null 2>/dev/null', shell=True) proc = subprocess.Popen('apt --help >/dev/null 2>/dev/null', shell=True)
return proc.wait() == 0 return proc.wait() == 0
def have_brew():
proc = subprocess.Popen('brew help >/dev/null 2>/dev/null', shell=True)
return proc.wait() == 0
def have_docker(): def have_docker():
proc = subprocess.Popen('docker info >/dev/null 2>/dev/null', shell=True) proc = subprocess.Popen('docker info >/dev/null 2>/dev/null', shell=True)
@ -60,32 +64,30 @@ def have_docker():
# Force line buffering on stdout. # Force line buffering on stdout.
sys.stdout = os.fdopen(1, 'w', 1) sys.stdout = os.fdopen(1, 'w', 1)
# Force stdout FD 1 to be a pipe, so tools like pip don't spam progress bars.
if 'TRAVIS_HOME' in os.environ:
proc = subprocess.Popen(
args=['stdbuf', '-oL', 'cat'],
stdin=subprocess.PIPE
)
os.dup2(proc.stdin.fileno(), 1)
os.dup2(proc.stdin.fileno(), 2)
def cleanup_travis_junk(stdout=sys.stdout, stderr=sys.stderr, proc=proc):
stdout.close()
stderr.close()
proc.terminate()
atexit.register(cleanup_travis_junk)
# -----------------
def _argv(s, *args): def _argv(s, *args):
"""Interpolate a command line using *args, return an argv style list.
>>> _argv('git commit -m "Use frobnicate 2.0 (fixes #%d)"', 1234)
['git', commit', '-m', 'Use frobnicate 2.0 (fixes #1234)']
"""
if args: if args:
s %= args s %= args
return shlex.split(s) return shlex.split(s)
def run(s, *args, **kwargs): def run(s, *args, **kwargs):
""" Run a command, with arguments, and print timing information
>>> rc = run('echo "%s %s"', 'foo', 'bar')
Running: ['/usr/bin/time', '--', 'echo', 'foo bar']
foo bar
0.00user 0.00system 0:00.00elapsed ?%CPU (0avgtext+0avgdata 1964maxresident)k
0inputs+0outputs (0major+71minor)pagefaults 0swaps
Finished running: ['/usr/bin/time', '--', 'echo', 'foo bar']
>>> rc
0
"""
argv = ['/usr/bin/time', '--'] + _argv(s, *args) argv = ['/usr/bin/time', '--'] + _argv(s, *args)
print('Running: %s' % (argv,)) print('Running: %s' % (argv,))
try: try:
@ -98,12 +100,50 @@ def run(s, *args, **kwargs):
return ret return ret
def run_batches(batches): def combine(batch):
combine = lambda batch: 'set -x; ' + (' && '.join( """
>>> combine(['ls -l', 'echo foo'])
'set -x; ( ls -l; ) && ( echo foo; )'
"""
return 'set -x; ' + (' && '.join(
'( %s; )' % (cmd,) '( %s; )' % (cmd,)
for cmd in batch for cmd in batch
)) ))
def throttle(batch, pause=1):
"""
Add pauses between commands in a batch
>>> throttle(['echo foo', 'echo bar', 'echo baz'])
['echo foo', 'sleep 1', 'echo bar', 'sleep 1', 'echo baz']
"""
def _with_pause(batch, pause):
for cmd in batch:
yield cmd
yield 'sleep %i' % (pause,)
return list(_with_pause(batch, pause))[:-1]
def run_batches(batches):
""" Run shell commands grouped into batches, showing an execution trace.
Raise AssertionError if any command has exits with a non-zero status.
>>> run_batches([['echo foo', 'true']])
+ echo foo
foo
+ true
>>> run_batches([['true', 'echo foo'], ['false']])
+ true
+ echo foo
foo
+ false
Traceback (most recent call last):
File "...", line ..., in <module>
File "...", line ..., in run_batches
AssertionError
"""
procs = [ procs = [
subprocess.Popen(combine(batch), shell=True) subprocess.Popen(combine(batch), shell=True)
for batch in batches for batch in batches
@ -112,12 +152,28 @@ def run_batches(batches):
def get_output(s, *args, **kwargs): def get_output(s, *args, **kwargs):
"""
Print and run command line s, %-interopolated using *args. Return stdout.
>>> s = get_output('echo "%s %s"', 'foo', 'bar')
Running: ['echo', 'foo bar']
>>> s
'foo bar\n'
"""
argv = _argv(s, *args) argv = _argv(s, *args)
print('Running: %s' % (argv,)) print('Running: %s' % (argv,))
return subprocess.check_output(argv, **kwargs) return subprocess.check_output(argv, **kwargs)
def exists_in_path(progname): def exists_in_path(progname):
"""
Return True if proganme exists in $PATH.
>>> exists_in_path('echo')
True
>>> exists_in_path('kwyjibo') # Only found in North American cartoons
False
"""
return any(os.path.exists(os.path.join(dirname, progname)) return any(os.path.exists(os.path.join(dirname, progname))
for dirname in os.environ['PATH'].split(os.pathsep)) for dirname in os.environ['PATH'].split(os.pathsep))
@ -132,22 +188,19 @@ class TempDir(object):
class Fold(object): class Fold(object):
def __init__(self, name): def __init__(self, name): pass
self.name = name def __enter__(self): pass
def __exit__(self, _1, _2, _3): pass
def __enter__(self):
print('travis_fold:start:%s' % (self.name))
def __exit__(self, _1, _2, _3):
print('')
print('travis_fold:end:%s' % (self.name))
os.environ.setdefault('ANSIBLE_STRATEGY', os.environ.setdefault('ANSIBLE_STRATEGY',
os.environ.get('STRATEGY', 'mitogen_linear')) os.environ.get('STRATEGY', 'mitogen_linear'))
# Ignoreed when MODE=mitogen
ANSIBLE_VERSION = os.environ.get('VER', '2.6.2') ANSIBLE_VERSION = os.environ.get('VER', '2.6.2')
GIT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) GIT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
# Used only when MODE=mitogen
DISTRO = os.environ.get('DISTRO', 'debian') DISTRO = os.environ.get('DISTRO', 'debian')
# Used only when MODE=ansible
DISTROS = os.environ.get('DISTROS', 'debian centos6 centos7').split() DISTROS = os.environ.get('DISTROS', 'debian centos6 centos7').split()
TARGET_COUNT = int(os.environ.get('TARGET_COUNT', '2')) TARGET_COUNT = int(os.environ.get('TARGET_COUNT', '2'))
BASE_PORT = 2200 BASE_PORT = 2200
@ -171,6 +224,8 @@ os.environ['PYTHONPATH'] = '%s:%s' % (
) )
def get_docker_hostname(): def get_docker_hostname():
"""Return the hostname where the docker daemon is running.
"""
url = os.environ.get('DOCKER_HOST') url = os.environ.get('DOCKER_HOST')
if url in (None, 'http+docker://localunixsocket'): if url in (None, 'http+docker://localunixsocket'):
return 'localhost' return 'localhost'
@ -180,10 +235,34 @@ def get_docker_hostname():
def image_for_distro(distro): def image_for_distro(distro):
return 'mitogen/%s-test' % (distro.partition('-')[0],) """Return the container image name or path for a test distro name.
The returned value is suitable for use with `docker pull`.
>>> image_for_distro('centos5')
'public.ecr.aws/n5z0e8q9/centos5-test'
>>> image_for_distro('centos5-something_custom')
'public.ecr.aws/n5z0e8q9/centos5-test'
"""
return 'public.ecr.aws/n5z0e8q9/%s-test' % (distro.partition('-')[0],)
def make_containers(name_prefix='', port_offset=0): def make_containers(name_prefix='', port_offset=0):
"""
>>> import pprint
>>> BASE_PORT=2200; DISTROS=['debian', 'centos6']
>>> pprint.pprint(make_containers())
[{'distro': 'debian',
'hostname': 'localhost',
'name': 'target-debian-1',
'port': 2201,
'python_path': '/usr/bin/python'},
{'distro': 'centos6',
'hostname': 'localhost',
'name': 'target-centos6-2',
'port': 2202,
'python_path': '/usr/bin/python'}]
"""
docker_hostname = get_docker_hostname() docker_hostname = get_docker_hostname()
firstbit = lambda s: (s+'-').split('-')[0] firstbit = lambda s: (s+'-').split('-')[0]
secondbit = lambda s: (s+'-').split('-')[1] secondbit = lambda s: (s+'-').split('-')[1]
@ -256,6 +335,14 @@ def get_interesting_procs(container_name=None):
def start_containers(containers): def start_containers(containers):
"""Run docker containers in the background, with sshd on specified ports.
>>> containers = start_containers([
... {'distro': 'debian', 'hostname': 'localhost',
... 'name': 'target-debian-1', 'port': 2201,
... 'python_path': '/usr/bin/python'},
... ])
"""
if os.environ.get('KEEP'): if os.environ.get('KEEP'):
return return

@ -10,9 +10,12 @@ ci_lib.run_batches([
# Must be installed separately, as PyNACL indirect requirement causes # Must be installed separately, as PyNACL indirect requirement causes
# newer version to be installed if done in a single pip run. # newer version to be installed if done in a single pip run.
'pip install "pycparser<2.19"', 'pip install "pycparser<2.19"',
'pip install -qqqU debops==0.7.2 ansible==%s' % ci_lib.ANSIBLE_VERSION, 'pip install -qqq "debops[ansible]==2.1.2" "ansible-base<2.10.14" "ansible=={}"'.format(ci_lib.ANSIBLE_VERSION),
], ],
[ [
'aws ecr-public get-login-password | docker login --username AWS --password-stdin public.ecr.aws',
'docker pull %s' % (ci_lib.image_for_distro('debian'),), 'docker pull %s' % (ci_lib.image_for_distro('debian'),),
], ],
]) ])
ci_lib.run('ansible-galaxy collection install debops.debops:==2.1.2')

@ -26,12 +26,14 @@ with ci_lib.Fold('job_setup'):
ci_lib.run('debops-init %s', project_dir) ci_lib.run('debops-init %s', project_dir)
os.chdir(project_dir) os.chdir(project_dir)
ansible_strategy_plugin = "{}/ansible_mitogen/plugins/strategy".format(ci_lib.GIT_ROOT)
with open('.debops.cfg', 'w') as fp: with open('.debops.cfg', 'w') as fp:
fp.write( fp.write(
"[ansible defaults]\n" "[ansible defaults]\n"
"strategy_plugins = %s/ansible_mitogen/plugins/strategy\n" "strategy_plugins = {}\n"
"strategy = mitogen_linear\n" "strategy = mitogen_linear\n"
% (ci_lib.GIT_ROOT,) .format(ansible_strategy_plugin)
) )
with open(vars_path, 'w') as fp: with open(vars_path, 'w') as fp:

@ -6,10 +6,13 @@ batches = [
[ [
# Must be installed separately, as PyNACL indirect requirement causes # Must be installed separately, as PyNACL indirect requirement causes
# newer version to be installed if done in a single pip run. # newer version to be installed if done in a single pip run.
'pip install "pycparser<2.19" "idna<2.7"', # Separately install ansible based on version passed in from azure-pipelines.yml or .travis.yml
# Don't set -U as that will upgrade Paramiko to a non-2.6 compatible version.
'pip install "pycparser<2.19" "idna<2.7" virtualenv',
'pip install ' 'pip install '
'-r tests/requirements.txt ' '-r tests/requirements.txt '
'-r tests/ansible/requirements.txt', '-r tests/ansible/requirements.txt',
'pip install -q "ansible-base<2.10.14" "ansible=={}"'.format(ci_lib.ANSIBLE_VERSION)
] ]
] ]

@ -1,9 +1,7 @@
#!/usr/bin/env python #!/usr/bin/env python
# Run tests/ansible/all.yml under Ansible and Ansible-Mitogen # Run tests/ansible/all.yml under Ansible and Ansible-Mitogen
import glob
import os import os
import shutil
import sys import sys
import ci_lib import ci_lib
@ -22,33 +20,37 @@ with ci_lib.Fold('unit_tests'):
with ci_lib.Fold('job_setup'): with ci_lib.Fold('job_setup'):
# Don't set -U as that will upgrade Paramiko to a non-2.6 compatible version.
run("pip install -q virtualenv ansible==%s", ci_lib.ANSIBLE_VERSION)
os.chmod(KEY_PATH, int('0600', 8)) os.chmod(KEY_PATH, int('0600', 8))
# NOTE: sshpass v1.06 causes errors so pegging to 1.05 -> "msg": "Error when changing password","out": "passwd: DS error: eDSAuthFailed\n",
# there's a checksum error with "brew install http://git.io/sshpass.rb" though, so installing manually
if not ci_lib.exists_in_path('sshpass'): if not ci_lib.exists_in_path('sshpass'):
run("brew install http://git.io/sshpass.rb") os.system("curl -O -L https://sourceforge.net/projects/sshpass/files/sshpass/1.05/sshpass-1.05.tar.gz && \
tar xvf sshpass-1.05.tar.gz && \
cd sshpass-1.05 && \
./configure && \
sudo make install")
with ci_lib.Fold('machine_prep'): with ci_lib.Fold('machine_prep'):
ssh_dir = os.path.expanduser('~/.ssh') # generate a new ssh key for localhost ssh
if not os.path.exists(ssh_dir): os.system("ssh-keygen -P '' -m pem -f ~/.ssh/id_rsa")
os.makedirs(ssh_dir, int('0700', 8)) os.system("cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys")
# also generate it for the sudo user
key_path = os.path.expanduser('~/.ssh/id_rsa') os.system("sudo ssh-keygen -P '' -m pem -f /var/root/.ssh/id_rsa")
shutil.copy(KEY_PATH, key_path) os.system("sudo cat /var/root/.ssh/id_rsa.pub | sudo tee -a /var/root/.ssh/authorized_keys")
os.chmod(os.path.expanduser('~/.ssh'), int('0700', 8))
auth_path = os.path.expanduser('~/.ssh/authorized_keys') os.chmod(os.path.expanduser('~/.ssh/authorized_keys'), int('0600', 8))
os.system('ssh-keygen -y -f %s >> %s' % (key_path, auth_path)) # run chmod through sudo since it's owned by root
os.chmod(auth_path, int('0600', 8)) os.system('sudo chmod 600 /var/root/.ssh')
os.system('sudo chmod 600 /var/root/.ssh/authorized_keys')
if os.path.expanduser('~mitogen__user1') == '~mitogen__user1': if os.path.expanduser('~mitogen__user1') == '~mitogen__user1':
os.chdir(IMAGE_PREP_DIR) os.chdir(IMAGE_PREP_DIR)
run("ansible-playbook -c local -i localhost, _user_accounts.yml") run("ansible-playbook -c local -i localhost, _user_accounts.yml -vvv")
with ci_lib.Fold('ansible'): with ci_lib.Fold('ansible'):
os.chdir(TESTS_DIR) os.chdir(TESTS_DIR)
playbook = os.environ.get('PLAYBOOK', 'all.yml') playbook = os.environ.get('PLAYBOOK', 'all.yml')
run('./run_ansible_playbook.py %s -l target %s', run('./run_ansible_playbook.py %s -l target %s -vvv',
playbook, ' '.join(sys.argv[1:])) playbook, ' '.join(sys.argv[1:]))

@ -11,6 +11,7 @@ batches = [
if ci_lib.have_docker(): if ci_lib.have_docker():
batches.append([ batches.append([
'aws ecr-public get-login-password | docker login --username AWS --password-stdin public.ecr.aws',
'docker pull %s' % (ci_lib.image_for_distro(ci_lib.DISTRO),), 'docker pull %s' % (ci_lib.image_for_distro(ci_lib.DISTRO),),
]) ])

@ -4,6 +4,7 @@ import ci_lib
batches = [ batches = [
[ [
'aws ecr-public get-login-password | docker login --username AWS --password-stdin public.ecr.aws',
'docker pull %s' % (ci_lib.image_for_distro(ci_lib.DISTRO),), 'docker pull %s' % (ci_lib.image_for_distro(ci_lib.DISTRO),),
], ],
[ [

@ -30,8 +30,20 @@ if 0 and os.uname()[0] == 'Linux':
] ]
] ]
# setup venv, need all python commands in 1 list to be subprocessed at the same time
venv_steps = []
need_to_fix_psycopg2 = False
is_python3 = os.environ['PYTHONVERSION'].startswith('3')
# @dw: The VSTS-shipped Pythons available via UsePythonVErsion are pure garbage,
# broken symlinks, incorrect permissions and missing codecs. So we use the
# deadsnakes PPA to get sane Pythons, and setup a virtualenv to install our
# stuff into. The virtualenv can probably be removed again, but this was a
# hard-fought battle and for now I am tired of this crap.
if ci_lib.have_apt(): if ci_lib.have_apt():
batches.append([ venv_steps.extend([
'echo force-unsafe-io | sudo tee /etc/dpkg/dpkg.cfg.d/nosync', 'echo force-unsafe-io | sudo tee /etc/dpkg/dpkg.cfg.d/nosync',
'sudo add-apt-repository ppa:deadsnakes/ppa', 'sudo add-apt-repository ppa:deadsnakes/ppa',
'sudo apt-get update', 'sudo apt-get update',
@ -40,15 +52,44 @@ if ci_lib.have_apt():
'python{pv}-dev ' 'python{pv}-dev '
'libsasl2-dev ' 'libsasl2-dev '
'libldap2-dev ' 'libldap2-dev '
.format(pv=os.environ['PYTHONVERSION']) .format(pv=os.environ['PYTHONVERSION']),
'sudo ln -fs /usr/bin/python{pv} /usr/local/bin/python{pv}'
.format(pv=os.environ['PYTHONVERSION'])
]) ])
if is_python3:
venv_steps.append('sudo apt-get -y install python{pv}-venv'.format(pv=os.environ['PYTHONVERSION']))
# TODO: somehow `Mito36CentOS6_26` has both brew and apt installed https://dev.azure.com/dw-mitogen/Mitogen/_build/results?buildId=1031&view=logs&j=7bdbcdc6-3d3e-568d-ccf8-9ddca1a9623a&t=73d379b6-4eea-540f-c97e-046a2f620483
elif is_python3 and ci_lib.have_brew():
# Mac's System Integrity Protection prevents symlinking /usr/bin
# and Azure isn't allowing disabling it apparently: https://developercommunityapi.westus.cloudapp.azure.com/idea/558702/allow-disabling-sip-on-microsoft-hosted-macos-agen.html
# so we'll use /usr/local/bin/python for everything
# /usr/local/bin/python2.7 already exists!
need_to_fix_psycopg2 = True
venv_steps.append(
'brew install python@{pv} postgresql'
.format(pv=os.environ['PYTHONVERSION'])
)
# need wheel before building virtualenv because of bdist_wheel and setuptools deps
venv_steps.append('/usr/local/bin/python{pv} -m pip install -U pip wheel setuptools'.format(pv=os.environ['PYTHONVERSION']))
if ci_lib.have_docker(): if os.environ['PYTHONVERSION'].startswith('2'):
batches.extend( venv_steps.extend([
['docker pull %s' % (ci_lib.image_for_distro(distro),)] '/usr/local/bin/python{pv} -m pip install -U virtualenv'.format(pv=os.environ['PYTHONVERSION']),
for distro in ci_lib.DISTROS '/usr/local/bin/python{pv} -m virtualenv /tmp/venv -p /usr/local/bin/python{pv}'.format(pv=os.environ['PYTHONVERSION'])
) ])
else:
venv_steps.append('/usr/local/bin/python{pv} -m venv /tmp/venv'.format(pv=os.environ['PYTHONVERSION']))
# fixes https://stackoverflow.com/questions/59595649/can-not-install-psycopg2-on-macos-catalina https://github.com/Azure/azure-cli/issues/12854#issuecomment-619213863
if need_to_fix_psycopg2:
venv_steps.append('/tmp/venv/bin/pip3 install psycopg2==2.8.5 psycopg2-binary')
venv_steps.extend([
# pbr is a transitive setup_requires of hdrhistogram. If it's not already
# installed then setuptools attempts to use easy_install, which fails.
'/tmp/venv/bin/pip install pbr==5.6.0',
])
batches.append(venv_steps)
ci_lib.run_batches(batches) ci_lib.run_batches(batches)

@ -1,36 +0,0 @@
#!/usr/bin/env python
"""
Allow poking around Azure while the job is running.
"""
import os
import pty
import socket
import subprocess
import sys
import time
if os.fork():
sys.exit(0)
def try_once():
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(("k3.botanicus.net", 9494))
open('/tmp/interactive', 'w').close()
os.dup2(s.fileno(), 0)
os.dup2(s.fileno(), 1)
os.dup2(s.fileno(), 2)
p = pty.spawn("/bin/sh")
while True:
try:
try_once()
except:
time.sleep(5)
continue

@ -0,0 +1,33 @@
---
name: Mitogen 0.2.x bug report
about: Report a bug in Mitogen 0.2.x (for Ansible 2.5, 2.6, 2.7, 2.8, or 2.9)
title: ''
labels: affects-0.2, bug
assignees: ''
---
Please drag-drop large logs as text file attachments.
Feel free to write an issue in your preferred format, however if in doubt, use
the following checklist as a guide for what to include.
* Which version of Ansible are you running?
* Is your version of Ansible patched in any way?
* Are you running with any custom modules, or `module_utils` loaded?
* Have you tried the latest master version from Git?
* Do you have some idea of what the underlying problem may be?
https://mitogen.networkgenomics.com/ansible_detailed.html#common-problems has
instructions to help figure out the likely cause and how to gather relevant
logs.
* Mention your host and target OS and versions
* Mention your host and target Python versions
* If reporting a performance issue, mention the number of targets and a rough
description of your workload (lots of copies, lots of tiny file edits, etc.)
* If reporting a crash or hang in Ansible, please rerun with -vvv and include
200 lines of output around the point of the error, along with a full copy of
any traceback or error text in the log. Beware "-vvv" may include secret
data! Edit as necessary before posting.
* If reporting any kind of problem with Ansible, please include the Ansible
version along with output of "ansible-config dump --only-changed".

@ -1,3 +1,11 @@
---
name: Mitogen 0.3.x bug report
about: Report a bug in Mitogen 0.3.x (for Ansible 2.10.x)
title: ''
labels: affects-0.3, bug
assignees: ''
---
Please drag-drop large logs as text file attachments. Please drag-drop large logs as text file attachments.

@ -1,84 +0,0 @@
sudo: required
dist: trusty
notifications:
email: false
irc: "chat.freenode.net#mitogen-builds"
language: python
branches:
except:
- docs-master
cache:
- pip
- directories:
- /home/travis/virtualenv
install:
- grep -Erl git-lfs\|couchdb /etc/apt | sudo xargs rm -v
- .ci/${MODE}_install.py
script:
- .ci/spawn_reverse_shell.py
- .ci/${MODE}_tests.py
# To avoid matrix explosion, just test against oldest->newest and
# newest->oldest in various configuartions.
matrix:
allow_failures:
# Python 2.4 tests are still unreliable
- language: c
env: MODE=mitogen_py24 DISTRO=centos5
include:
# Debops tests.
# 2.8.3; 3.6 -> 2.7
- python: "3.6"
env: MODE=debops_common VER=2.8.3
# 2.4.6.0; 2.7 -> 2.7
- python: "2.7"
env: MODE=debops_common VER=2.4.6.0
# Sanity check against vanilla Ansible. One job suffices.
- python: "2.7"
env: MODE=ansible VER=2.8.3 DISTROS=debian STRATEGY=linear
# ansible_mitogen tests.
# 2.8.3 -> {debian, centos6, centos7}
- python: "3.6"
env: MODE=ansible VER=2.8.3
# 2.8.3 -> {debian, centos6, centos7}
- python: "2.7"
env: MODE=ansible VER=2.8.3
# 2.4.6.0 -> {debian, centos6, centos7}
- python: "3.6"
env: MODE=ansible VER=2.4.6.0
# 2.4.6.0 -> {debian, centos6, centos7}
- python: "2.6"
env: MODE=ansible VER=2.4.6.0
# 2.3 -> {centos5}
- python: "2.6"
env: MODE=ansible VER=2.3.3.0 DISTROS=centos5
# Mitogen tests.
# 2.4 -> 2.4
- language: c
env: MODE=mitogen_py24 DISTRO=centos5
# 2.7 -> 2.7 -- moved to Azure
# 2.7 -> 2.6
#- python: "2.7"
#env: MODE=mitogen DISTRO=centos6
# 2.6 -> 2.7
- python: "2.6"
env: MODE=mitogen DISTRO=centos7
# 2.6 -> 3.5
- python: "2.6"
env: MODE=mitogen DISTRO=debian-py3
# 3.6 -> 2.6 -- moved to Azure

@ -1,4 +1,4 @@
Copyright 2019, David Wilson Copyright 2021, the Mitogen authors
Redistribution and use in source and binary forms, with or without Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met: modification, are permitted provided that the following conditions are met:

@ -1,13 +1,9 @@
# Mitogen # Mitogen
<!-- [![Build Status](https://travis-ci.org/dw/mitogen.png?branch=master)](https://travis-ci.org/dw/mitogen}) -->
<a href="https://mitogen.networkgenomics.com/">Please see the documentation</a>. <a href="https://mitogen.networkgenomics.com/">Please see the documentation</a>.
![](https://i.imgur.com/eBM6LhJ.gif) ![](https://i.imgur.com/eBM6LhJ.gif)
[![Total alerts](https://img.shields.io/lgtm/alerts/g/dw/mitogen.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/dw/mitogen/alerts/) [![Total alerts](https://img.shields.io/lgtm/alerts/g/mitogen-hq/mitogen.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/mitogen-hq/mitogen/alerts/)
[![Build Status](https://travis-ci.org/dw/mitogen.svg?branch=master)](https://travis-ci.org/dw/mitogen)
[![Pipelines Status](https://dev.azure.com/dw-mitogen/Mitogen/_apis/build/status/dw.mitogen?branchName=master)](https://dev.azure.com/dw-mitogen/Mitogen/_build/latest?definitionId=1?branchName=master) [![Build Status](https://dev.azure.com/mitogen-hq/mitogen/_apis/build/status/mitogen-hq.mitogen?branchName=master)](https://dev.azure.com/mitogen-hq/mitogen/_build/latest?definitionId=1&branchName=master)

@ -183,7 +183,7 @@ def _connect_docker(spec):
'kwargs': { 'kwargs': {
'username': spec.remote_user(), 'username': spec.remote_user(),
'container': spec.remote_addr(), 'container': spec.remote_addr(),
'python_path': spec.python_path(), 'python_path': spec.python_path(rediscover_python=True),
'connect_timeout': spec.ansible_ssh_timeout() or spec.timeout(), 'connect_timeout': spec.ansible_ssh_timeout() or spec.timeout(),
'remote_name': get_remote_name(spec), 'remote_name': get_remote_name(spec),
} }
@ -503,6 +503,9 @@ class Connection(ansible.plugins.connection.ConnectionBase):
#: matching vanilla Ansible behaviour. #: matching vanilla Ansible behaviour.
loader_basedir = None loader_basedir = None
# set by `_get_task_vars()` for interpreter discovery
_action = None
def __del__(self): def __del__(self):
""" """
Ansible cannot be trusted to always call close() e.g. the synchronize Ansible cannot be trusted to always call close() e.g. the synchronize
@ -551,6 +554,23 @@ class Connection(ansible.plugins.connection.ConnectionBase):
connection passed into any running action. connection passed into any running action.
""" """
if self._task_vars is not None: if self._task_vars is not None:
# check for if self._action has already been set or not
# there are some cases where the ansible executor passes in task_vars
# so we don't walk the stack to find them
# TODO: is there a better way to get the ActionModuleMixin object?
# ansible python discovery needs it to run discover_interpreter()
if not isinstance(self._action, ansible_mitogen.mixins.ActionModuleMixin):
f = sys._getframe()
while f:
if f.f_code.co_name == 'run':
f_self = f.f_locals.get('self')
if isinstance(f_self, ansible_mitogen.mixins.ActionModuleMixin):
self._action = f_self
break
elif f.f_code.co_name == '_execute_meta':
break
f = f.f_back
return self._task_vars return self._task_vars
f = sys._getframe() f = sys._getframe()
@ -559,6 +579,9 @@ class Connection(ansible.plugins.connection.ConnectionBase):
f_locals = f.f_locals f_locals = f.f_locals
f_self = f_locals.get('self') f_self = f_locals.get('self')
if isinstance(f_self, ansible_mitogen.mixins.ActionModuleMixin): if isinstance(f_self, ansible_mitogen.mixins.ActionModuleMixin):
# backref for python interpreter discovery, should be safe because _get_task_vars
# is always called before running interpreter discovery
self._action = f_self
task_vars = f_locals.get('task_vars') task_vars = f_locals.get('task_vars')
if task_vars: if task_vars:
LOG.debug('recovered task_vars from Action') LOG.debug('recovered task_vars from Action')
@ -600,16 +623,33 @@ class Connection(ansible.plugins.connection.ConnectionBase):
does not make sense to extract connection-related configuration for the does not make sense to extract connection-related configuration for the
delegated-to machine from them. delegated-to machine from them.
""" """
def _fetch_task_var(task_vars, key):
"""
Special helper func in case vars can be templated
"""
SPECIAL_TASK_VARS = [
'ansible_python_interpreter'
]
if key in task_vars:
val = task_vars[key]
if '{' in str(val) and key in SPECIAL_TASK_VARS:
# template every time rather than storing in a cache
# in case a different template value is used in a different task
val = self.templar.template(
val,
preserve_trailing_newlines=True,
escape_backslashes=False
)
return val
task_vars = self._get_task_vars() task_vars = self._get_task_vars()
if self.delegate_to_hostname is None: if self.delegate_to_hostname is None:
if key in task_vars: return _fetch_task_var(task_vars, key)
return task_vars[key]
else: else:
delegated_vars = task_vars['ansible_delegated_vars'] delegated_vars = task_vars['ansible_delegated_vars']
if self.delegate_to_hostname in delegated_vars: if self.delegate_to_hostname in delegated_vars:
task_vars = delegated_vars[self.delegate_to_hostname] task_vars = delegated_vars[self.delegate_to_hostname]
if key in task_vars: return _fetch_task_var(task_vars, key)
return task_vars[key]
return default return default
@ -654,6 +694,8 @@ class Connection(ansible.plugins.connection.ConnectionBase):
inventory_name=inventory_name, inventory_name=inventory_name,
play_context=self._play_context, play_context=self._play_context,
host_vars=dict(via_vars), # TODO: make it lazy host_vars=dict(via_vars), # TODO: make it lazy
task_vars=self._get_task_vars(), # needed for interpreter discovery in parse_python_path
action=self._action,
become_method=become_method or None, become_method=become_method or None,
become_user=become_user or None, become_user=become_user or None,
) )
@ -847,6 +889,18 @@ class Connection(ansible.plugins.connection.ConnectionBase):
self.reset_compat_msg self.reset_compat_msg
) )
# Strategy's _execute_meta doesn't have an action obj but we'll need one for
# running interpreter_discovery
# will create a new temporary action obj for this purpose
self._action = ansible_mitogen.mixins.ActionModuleMixin(
task=0,
connection=self,
play_context=self._play_context,
loader=0,
templar=0,
shared_loader_obj=0
)
# Clear out state in case we were ever connected. # Clear out state in case we were ever connected.
self.close() self.close()

@ -31,6 +31,7 @@ Stable names for PluginLoader instances across Ansible versions.
""" """
from __future__ import absolute_import from __future__ import absolute_import
import distutils.version
__all__ = [ __all__ = [
'action_loader', 'action_loader',
@ -41,22 +42,60 @@ __all__ = [
'strategy_loader', 'strategy_loader',
] ]
try: import ansible
from ansible.plugins.loader import action_loader
from ansible.plugins.loader import connection_loader
from ansible.plugins.loader import module_loader
from ansible.plugins.loader import module_utils_loader
from ansible.plugins.loader import shell_loader
from ansible.plugins.loader import strategy_loader
except ImportError: # Ansible <2.4
from ansible.plugins import action_loader
from ansible.plugins import connection_loader
from ansible.plugins import module_loader
from ansible.plugins import module_utils_loader
from ansible.plugins import shell_loader
from ansible.plugins import strategy_loader
ANSIBLE_VERSION_MIN = (2, 10)
ANSIBLE_VERSION_MAX = (2, 10)
NEW_VERSION_MSG = (
"Your Ansible version (%s) is too recent. The most recent version\n"
"supported by Mitogen for Ansible is %s.x. Please check the Mitogen\n"
"release notes to see if a new version is available, otherwise\n"
"subscribe to the corresponding GitHub issue to be notified when\n"
"support becomes available.\n"
"\n"
" https://mitogen.rtfd.io/en/latest/changelog.html\n"
" https://github.com/mitogen-hq/mitogen/issues/\n"
)
OLD_VERSION_MSG = (
"Your version of Ansible (%s) is too old. The oldest version supported by "
"Mitogen for Ansible is %s."
)
def assert_supported_release():
"""
Throw AnsibleError with a descriptive message in case of being loaded into
an unsupported Ansible release.
"""
v = ansible.__version__
if not isinstance(v, tuple):
v = tuple(distutils.version.LooseVersion(v).version)
if v[:2] < ANSIBLE_VERSION_MIN:
raise ansible.errors.AnsibleError(
OLD_VERSION_MSG % (v, ANSIBLE_VERSION_MIN)
)
if v[:2] > ANSIBLE_VERSION_MAX:
raise ansible.errors.AnsibleError(
NEW_VERSION_MSG % (ansible.__version__, ANSIBLE_VERSION_MAX)
)
# this is the first file our strategy plugins import, so we need to check this here
# in prior Ansible versions, connection_loader.get_with_context didn't exist, so if a user
# is trying to load an old Ansible version, we'll fail and error gracefully
assert_supported_release()
from ansible.plugins.loader import action_loader
from ansible.plugins.loader import connection_loader
from ansible.plugins.loader import module_loader
from ansible.plugins.loader import module_utils_loader
from ansible.plugins.loader import shell_loader
from ansible.plugins.loader import strategy_loader
# These are original, unwrapped implementations # These are original, unwrapped implementations
action_loader__get = action_loader.get action_loader__get = action_loader.get
connection_loader__get = connection_loader.get connection_loader__get = connection_loader.get_with_context

@ -60,6 +60,17 @@ try:
except ImportError: except ImportError:
from ansible.vars.unsafe_proxy import wrap_var from ansible.vars.unsafe_proxy import wrap_var
try:
# ansible 2.8 moved remove_internal_keys to the clean module
from ansible.vars.clean import remove_internal_keys
except ImportError:
try:
from ansible.vars.manager import remove_internal_keys
except ImportError:
# ansible 2.3.3 has remove_internal_keys as a protected func on the action class
# we'll fallback to calling self._remove_internal_keys in this case
remove_internal_keys = lambda a: "Not found"
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
@ -108,6 +119,16 @@ class ActionModuleMixin(ansible.plugins.action.ActionBase):
if not isinstance(connection, ansible_mitogen.connection.Connection): if not isinstance(connection, ansible_mitogen.connection.Connection):
_, self.__class__ = type(self).__bases__ _, self.__class__ = type(self).__bases__
# required for python interpreter discovery
connection.templar = self._templar
self._finding_python_interpreter = False
self._rediscovered_python = False
# redeclaring interpreter discovery vars here in case running ansible < 2.8.0
self._discovered_interpreter_key = None
self._discovered_interpreter = False
self._discovery_deprecation_warnings = []
self._discovery_warnings = []
def run(self, tmp=None, task_vars=None): def run(self, tmp=None, task_vars=None):
""" """
Override run() to notify Connection of task-specific data, so it has a Override run() to notify Connection of task-specific data, so it has a
@ -350,6 +371,13 @@ class ActionModuleMixin(ansible.plugins.action.ActionBase):
self._compute_environment_string(env) self._compute_environment_string(env)
self._set_temp_file_args(module_args, wrap_async) self._set_temp_file_args(module_args, wrap_async)
# there's a case where if a task shuts down the node and then immediately calls
# wait_for_connection, the `ping` test from Ansible won't pass because we lost connection
# clearing out context forces a reconnect
# see https://github.com/dw/mitogen/issues/655 and Ansible's `wait_for_connection` module for more info
if module_name == 'ansible.legacy.ping' and type(self).__name__ == 'wait_for_connection':
self._connection.context = None
self._connection._connect() self._connection._connect()
result = ansible_mitogen.planner.invoke( result = ansible_mitogen.planner.invoke(
ansible_mitogen.planner.Invocation( ansible_mitogen.planner.Invocation(
@ -370,6 +398,34 @@ class ActionModuleMixin(ansible.plugins.action.ActionBase):
# on _execute_module(). # on _execute_module().
self._remove_tmp_path(tmp) self._remove_tmp_path(tmp)
# prevents things like discovered_interpreter_* or ansible_discovered_interpreter_* from being set
# handle ansible 2.3.3 that has remove_internal_keys in a different place
check = remove_internal_keys(result)
if check == 'Not found':
self._remove_internal_keys(result)
# taken from _execute_module of ansible 2.8.6
# propagate interpreter discovery results back to the controller
if self._discovered_interpreter_key:
if result.get('ansible_facts') is None:
result['ansible_facts'] = {}
# only cache discovered_interpreter if we're not running a rediscovery
# rediscovery happens in places like docker connections that could have different
# python interpreters than the main host
if not self._rediscovered_python:
result['ansible_facts'][self._discovered_interpreter_key] = self._discovered_interpreter
if self._discovery_warnings:
if result.get('warnings') is None:
result['warnings'] = []
result['warnings'].extend(self._discovery_warnings)
if self._discovery_deprecation_warnings:
if result.get('deprecations') is None:
result['deprecations'] = []
result['deprecations'].extend(self._discovery_deprecation_warnings)
return wrap_var(result) return wrap_var(result)
def _postprocess_response(self, result): def _postprocess_response(self, result):
@ -407,17 +463,54 @@ class ActionModuleMixin(ansible.plugins.action.ActionBase):
""" """
LOG.debug('_low_level_execute_command(%r, in_data=%r, exe=%r, dir=%r)', LOG.debug('_low_level_execute_command(%r, in_data=%r, exe=%r, dir=%r)',
cmd, type(in_data), executable, chdir) cmd, type(in_data), executable, chdir)
if executable is None: # executable defaults to False if executable is None: # executable defaults to False
executable = self._play_context.executable executable = self._play_context.executable
if executable: if executable:
cmd = executable + ' -c ' + shlex_quote(cmd) cmd = executable + ' -c ' + shlex_quote(cmd)
rc, stdout, stderr = self._connection.exec_command( # TODO: HACK: if finding python interpreter then we need to keep
cmd=cmd, # calling exec_command until we run into the right python we'll use
in_data=in_data, # chicken-and-egg issue, mitogen needs a python to run low_level_execute_command
sudoable=sudoable, # which is required by Ansible's discover_interpreter function
mitogen_chdir=chdir, if self._finding_python_interpreter:
) possible_pythons = [
'/usr/bin/python',
'python3',
'python3.7',
'python3.6',
'python3.5',
'python2.7',
'python2.6',
'/usr/libexec/platform-python',
'/usr/bin/python3',
'python'
]
else:
# not used, just adding a filler value
possible_pythons = ['python']
def _run_cmd():
return self._connection.exec_command(
cmd=cmd,
in_data=in_data,
sudoable=sudoable,
mitogen_chdir=chdir,
)
for possible_python in possible_pythons:
try:
self._possible_python_interpreter = possible_python
rc, stdout, stderr = _run_cmd()
# TODO: what exception is thrown?
except:
# we've reached the last python attempted and failed
# TODO: could use enumerate(), need to check which version of python first had it though
if possible_python == 'python':
raise
else:
continue
stdout_text = to_text(stdout, errors=encoding_errors) stdout_text = to_text(stdout, errors=encoding_errors)
return { return {

@ -41,8 +41,10 @@ import json
import logging import logging
import os import os
import random import random
import re
from ansible.executor import module_common from ansible.executor import module_common
from ansible.collections.list import list_collection_dirs
import ansible.errors import ansible.errors
import ansible.module_utils import ansible.module_utils
import ansible.release import ansible.release
@ -57,7 +59,8 @@ import ansible_mitogen.target
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
NO_METHOD_MSG = 'Mitogen: no invocation method found for: ' NO_METHOD_MSG = 'Mitogen: no invocation method found for: '
NO_INTERPRETER_MSG = 'module (%s) is missing interpreter line' NO_INTERPRETER_MSG = 'module (%s) is missing interpreter line'
NO_MODULE_MSG = 'The module %s was not found in configured module paths.' # NOTE: Ansible 2.10 no longer has a `.` at the end of NO_MODULE_MSG error
NO_MODULE_MSG = 'The module %s was not found in configured module paths'
_planner_by_path = {} _planner_by_path = {}
@ -96,6 +99,13 @@ class Invocation(object):
#: Initially ``None``, but set by :func:`invoke`. The raw source or #: Initially ``None``, but set by :func:`invoke`. The raw source or
#: binary contents of the module. #: binary contents of the module.
self._module_source = None self._module_source = None
#: Initially ``{}``, but set by :func:`invoke`. Optional source to send
#: to :func:`propagate_paths_and_modules` to fix Python3.5 relative import errors
self._overridden_sources = {}
#: Initially ``set()``, but set by :func:`invoke`. Optional source paths to send
#: to :func:`propagate_paths_and_modules` to handle loading source dependencies from
#: places outside of the main source path, such as collections
self._extra_sys_paths = set()
def get_module_source(self): def get_module_source(self):
if self._module_source is None: if self._module_source is None:
@ -288,11 +298,11 @@ class NewStylePlanner(ScriptPlanner):
preprocessing the module. preprocessing the module.
""" """
runner_name = 'NewStyleRunner' runner_name = 'NewStyleRunner'
marker = b'from ansible.module_utils.' MARKER = re.compile(b'from ansible(?:_collections|\.module_utils)\.')
@classmethod @classmethod
def detect(cls, path, source): def detect(cls, path, source):
return cls.marker in source return cls.MARKER.search(source) != None
def _get_interpreter(self): def _get_interpreter(self):
return None, None return None, None
@ -312,6 +322,7 @@ class NewStylePlanner(ScriptPlanner):
ALWAYS_FORK_MODULES = frozenset([ ALWAYS_FORK_MODULES = frozenset([
'dnf', # issue #280; py-dnf/hawkey need therapy 'dnf', # issue #280; py-dnf/hawkey need therapy
'firewalld', # issue #570: ansible module_utils caches dbus conn 'firewalld', # issue #570: ansible module_utils caches dbus conn
'ansible.legacy.dnf', # issue #776
]) ])
def should_fork(self): def should_fork(self):
@ -427,26 +438,16 @@ def py_modname_from_path(name, path):
Fetch the logical name of a new-style module as it might appear in Fetch the logical name of a new-style module as it might appear in
:data:`sys.modules` of the target's Python interpreter. :data:`sys.modules` of the target's Python interpreter.
* For Ansible <2.7, this is an unpackaged module named like
"ansible_module_%s".
* For Ansible <2.9, this is an unpackaged module named like
"ansible.modules.%s"
* Since Ansible 2.9, modules appearing within a package have the original * Since Ansible 2.9, modules appearing within a package have the original
package hierarchy approximated on the target, enabling relative imports package hierarchy approximated on the target, enabling relative imports
to function correctly. For example, "ansible.modules.system.setup". to function correctly. For example, "ansible.modules.system.setup".
""" """
# 2.9+
if _get_ansible_module_fqn: if _get_ansible_module_fqn:
try: try:
return _get_ansible_module_fqn(path) return _get_ansible_module_fqn(path)
except ValueError: except ValueError:
pass pass
if ansible.__version__ < '2.7':
return 'ansible_module_' + name
return 'ansible.modules.' + name return 'ansible.modules.' + name
@ -475,7 +476,10 @@ def _propagate_deps(invocation, planner, context):
context=context, context=context,
paths=planner.get_push_files(), paths=planner.get_push_files(),
modules=planner.get_module_deps(), # modules=planner.get_module_deps(), TODO
overridden_sources=invocation._overridden_sources,
# needs to be a list because can't unpickle() a set()
extra_sys_paths=list(invocation._extra_sys_paths),
) )
@ -533,9 +537,40 @@ def _get_planner(name, path, source):
raise ansible.errors.AnsibleError(NO_METHOD_MSG + repr(invocation)) raise ansible.errors.AnsibleError(NO_METHOD_MSG + repr(invocation))
def _fix_py35(invocation, module_source):
"""
super edge case with a relative import error in Python 3.5.1-3.5.3
in Ansible's setup module when using Mitogen
https://github.com/dw/mitogen/issues/672#issuecomment-636408833
We replace a relative import in the setup module with the actual full file path
This works in vanilla Ansible but not in Mitogen otherwise
"""
if invocation.module_name in {'ansible.builtin.setup', 'ansible.legacy.setup', 'setup'} and \
invocation.module_path not in invocation._overridden_sources:
# in-memory replacement of setup module's relative import
# would check for just python3.5 and run this then but we don't know the
# target python at this time yet
# NOTE: another ansible 2.10-specific fix: `from ..module_utils` used to be `from ...module_utils`
module_source = module_source.replace(
b"from ..module_utils.basic import AnsibleModule",
b"from ansible.module_utils.basic import AnsibleModule"
)
invocation._overridden_sources[invocation.module_path] = module_source
def _load_collections(invocation):
"""
Special loader that ensures that `ansible_collections` exist as a module path for import
Goes through all collection path possibilities and stores paths to installed collections
Stores them on the current invocation to later be passed to the master service
"""
for collection_path in list_collection_dirs():
invocation._extra_sys_paths.add(collection_path.decode('utf-8'))
def invoke(invocation): def invoke(invocation):
""" """
Find a Planner subclass corresnding to `invocation` and use it to invoke Find a Planner subclass corresponding to `invocation` and use it to invoke
the module. the module.
:param Invocation invocation: :param Invocation invocation:
@ -555,10 +590,15 @@ def invoke(invocation):
invocation.module_path = mitogen.core.to_text(path) invocation.module_path = mitogen.core.to_text(path)
if invocation.module_path not in _planner_by_path: if invocation.module_path not in _planner_by_path:
if 'ansible_collections' in invocation.module_path:
_load_collections(invocation)
module_source = invocation.get_module_source()
_fix_py35(invocation, module_source)
_planner_by_path[invocation.module_path] = _get_planner( _planner_by_path[invocation.module_path] = _get_planner(
invocation.module_name, invocation.module_name,
invocation.module_path, invocation.module_path,
invocation.get_module_source() module_source
) )
planner = _planner_by_path[invocation.module_path](invocation) planner = _planner_by_path[invocation.module_path](invocation)

@ -157,6 +157,10 @@ class ActionModule(ActionBase):
result.update(dict(changed=False, md5sum=local_md5, file=source, dest=dest, checksum=local_checksum)) result.update(dict(changed=False, md5sum=local_md5, file=source, dest=dest, checksum=local_checksum))
finally: finally:
self._remove_tmp_path(self._connection._shell.tmpdir) try:
self._remove_tmp_path(self._connection._shell.tmpdir)
except AttributeError:
# .tmpdir was added to ShellModule in v2.6.0, so old versions don't have it
pass
return result return result

@ -52,4 +52,6 @@ class ActionModule(ActionBase):
'changed': True, 'changed': True,
'result': stack, 'result': stack,
'_ansible_verbose_always': True, '_ansible_verbose_always': True,
# for ansible < 2.8, we'll default to /usr/bin/python like before
'discovered_interpreter': self._connection._action._discovered_interpreter
} }

@ -170,6 +170,12 @@ class ContextService(mitogen.service.Service):
""" """
LOG.debug('%r.reset(%r)', self, stack) LOG.debug('%r.reset(%r)', self, stack)
# this could happen if we have a `shutdown -r` shell command
# and then a `wait_for_connection` right afterwards
# in this case, we have no stack to disconnect from
if not stack:
return False
l = mitogen.core.Latch() l = mitogen.core.Latch()
context = None context = None
with self._lock: with self._lock:

@ -27,7 +27,6 @@
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import from __future__ import absolute_import
import distutils.version
import os import os
import signal import signal
import threading import threading
@ -43,52 +42,8 @@ import ansible_mitogen.loaders
import ansible_mitogen.mixins import ansible_mitogen.mixins
import ansible_mitogen.process import ansible_mitogen.process
import ansible
import ansible.executor.process.worker import ansible.executor.process.worker
from ansible.utils.sentinel import Sentinel
try:
# 2.8+ has a standardized "unset" object.
from ansible.utils.sentinel import Sentinel
except ImportError:
Sentinel = None
ANSIBLE_VERSION_MIN = (2, 3)
ANSIBLE_VERSION_MAX = (2, 9)
NEW_VERSION_MSG = (
"Your Ansible version (%s) is too recent. The most recent version\n"
"supported by Mitogen for Ansible is %s.x. Please check the Mitogen\n"
"release notes to see if a new version is available, otherwise\n"
"subscribe to the corresponding GitHub issue to be notified when\n"
"support becomes available.\n"
"\n"
" https://mitogen.rtfd.io/en/latest/changelog.html\n"
" https://github.com/dw/mitogen/issues/\n"
)
OLD_VERSION_MSG = (
"Your version of Ansible (%s) is too old. The oldest version supported by "
"Mitogen for Ansible is %s."
)
def _assert_supported_release():
"""
Throw AnsibleError with a descriptive message in case of being loaded into
an unsupported Ansible release.
"""
v = ansible.__version__
if not isinstance(v, tuple):
v = tuple(distutils.version.LooseVersion(v).version)
if v[:2] < ANSIBLE_VERSION_MIN:
raise ansible.errors.AnsibleError(
OLD_VERSION_MSG % (v, ANSIBLE_VERSION_MIN)
)
if v[:2] > ANSIBLE_VERSION_MAX:
raise ansible.errors.AnsibleError(
NEW_VERSION_MSG % (ansible.__version__, ANSIBLE_VERSION_MAX)
)
def _patch_awx_callback(): def _patch_awx_callback():
@ -132,8 +87,7 @@ def wrap_action_loader__get(name, *args, **kwargs):
get_kwargs = {'class_only': True} get_kwargs = {'class_only': True}
if name in ('fetch',): if name in ('fetch',):
name = 'mitogen_' + name name = 'mitogen_' + name
if ansible.__version__ >= '2.8': get_kwargs['collection_list'] = kwargs.pop('collection_list', None)
get_kwargs['collection_list'] = kwargs.pop('collection_list', None)
klass = ansible_mitogen.loaders.action_loader__get(name, **get_kwargs) klass = ansible_mitogen.loaders.action_loader__get(name, **get_kwargs)
if klass: if klass:
@ -217,7 +171,7 @@ class AnsibleWrappers(object):
with references to the real functions. with references to the real functions.
""" """
ansible_mitogen.loaders.action_loader.get = wrap_action_loader__get ansible_mitogen.loaders.action_loader.get = wrap_action_loader__get
ansible_mitogen.loaders.connection_loader.get = wrap_connection_loader__get ansible_mitogen.loaders.connection_loader.get_with_context = wrap_connection_loader__get
global worker__run global worker__run
worker__run = ansible.executor.process.worker.WorkerProcess.run worker__run = ansible.executor.process.worker.WorkerProcess.run
@ -230,7 +184,7 @@ class AnsibleWrappers(object):
ansible_mitogen.loaders.action_loader.get = ( ansible_mitogen.loaders.action_loader.get = (
ansible_mitogen.loaders.action_loader__get ansible_mitogen.loaders.action_loader__get
) )
ansible_mitogen.loaders.connection_loader.get = ( ansible_mitogen.loaders.connection_loader.get_with_context = (
ansible_mitogen.loaders.connection_loader__get ansible_mitogen.loaders.connection_loader__get
) )
ansible.executor.process.worker.WorkerProcess.run = worker__run ansible.executor.process.worker.WorkerProcess.run = worker__run
@ -352,7 +306,6 @@ class StrategyMixin(object):
Wrap :meth:`run` to ensure requisite infrastructure and modifications Wrap :meth:`run` to ensure requisite infrastructure and modifications
are configured for the duration of the call. are configured for the duration of the call.
""" """
_assert_supported_release()
wrappers = AnsibleWrappers() wrappers = AnsibleWrappers()
self._worker_model = self._get_worker_model() self._worker_model = self._get_worker_model()
ansible_mitogen.process.set_worker_model(self._worker_model) ansible_mitogen.process.set_worker_model(self._worker_model)

@ -67,17 +67,89 @@ import ansible.constants as C
from ansible.module_utils.six import with_metaclass from ansible.module_utils.six import with_metaclass
# this was added in Ansible >= 2.8.0; fallback to the default interpreter if necessary
try:
from ansible.executor.interpreter_discovery import discover_interpreter
except ImportError:
discover_interpreter = lambda action,interpreter_name,discovery_mode,task_vars: '/usr/bin/python'
try:
from ansible.utils.unsafe_proxy import AnsibleUnsafeText
except ImportError:
from ansible.vars.unsafe_proxy import AnsibleUnsafeText
import mitogen.core import mitogen.core
def parse_python_path(s): def run_interpreter_discovery_if_necessary(s, task_vars, action, rediscover_python):
"""
Triggers ansible python interpreter discovery if requested.
Caches this value the same way Ansible does it.
For connections like `docker`, we want to rediscover the python interpreter because
it could be different than what's ran on the host
"""
# keep trying different interpreters until we don't error
if action._finding_python_interpreter:
return action._possible_python_interpreter
if s in ['auto', 'auto_legacy', 'auto_silent', 'auto_legacy_silent']:
# python is the only supported interpreter_name as of Ansible 2.8.8
interpreter_name = 'python'
discovered_interpreter_config = u'discovered_interpreter_%s' % interpreter_name
if task_vars.get('ansible_facts') is None:
task_vars['ansible_facts'] = {}
if rediscover_python and task_vars.get('ansible_facts', {}).get(discovered_interpreter_config):
# if we're rediscovering python then chances are we're running something like a docker connection
# this will handle scenarios like running a playbook that does stuff + then dynamically creates a docker container,
# then runs the rest of the playbook inside that container, and then rerunning the playbook again
action._rediscovered_python = True
# blow away the discovered_interpreter_config cache and rediscover
del task_vars['ansible_facts'][discovered_interpreter_config]
if discovered_interpreter_config not in task_vars['ansible_facts']:
action._finding_python_interpreter = True
# fake pipelining so discover_interpreter can be happy
action._connection.has_pipelining = True
s = AnsibleUnsafeText(discover_interpreter(
action=action,
interpreter_name=interpreter_name,
discovery_mode=s,
task_vars=task_vars))
# cache discovered interpreter
task_vars['ansible_facts'][discovered_interpreter_config] = s
action._connection.has_pipelining = False
else:
s = task_vars['ansible_facts'][discovered_interpreter_config]
# propagate discovered interpreter as fact
action._discovered_interpreter_key = discovered_interpreter_config
action._discovered_interpreter = s
action._finding_python_interpreter = False
return s
def parse_python_path(s, task_vars, action, rediscover_python):
""" """
Given the string set for ansible_python_interpeter, parse it using shell Given the string set for ansible_python_interpeter, parse it using shell
syntax and return an appropriate argument vector. syntax and return an appropriate argument vector. If the value detected is
one of interpreter discovery then run that first. Caches python interpreter
discovery value in `facts_from_task_vars` like how Ansible handles this.
""" """
if s: if not s:
return ansible.utils.shlex.shlex_split(s) # if python_path doesn't exist, default to `auto` and attempt to discover it
s = 'auto'
s = run_interpreter_discovery_if_necessary(s, task_vars, action, rediscover_python)
# if unable to determine python_path, fallback to '/usr/bin/python'
if not s:
s = '/usr/bin/python'
return ansible.utils.shlex.shlex_split(s)
def optional_secret(value): def optional_secret(value):
@ -330,6 +402,9 @@ class PlayContextSpec(Spec):
self._play_context = play_context self._play_context = play_context
self._transport = transport self._transport = transport
self._inventory_name = inventory_name self._inventory_name = inventory_name
self._task_vars = self._connection._get_task_vars()
# used to run interpreter discovery
self._action = connection._action
def transport(self): def transport(self):
return self._transport return self._transport
@ -361,12 +436,16 @@ class PlayContextSpec(Spec):
def port(self): def port(self):
return self._play_context.port return self._play_context.port
def python_path(self): def python_path(self, rediscover_python=False):
s = self._connection.get_task_var('ansible_python_interpreter') s = self._connection.get_task_var('ansible_python_interpreter')
# #511, #536: executor/module_common.py::_get_shebang() hard-wires # #511, #536: executor/module_common.py::_get_shebang() hard-wires
# "/usr/bin/python" as the default interpreter path if no other # "/usr/bin/python" as the default interpreter path if no other
# interpreter is specified. # interpreter is specified.
return parse_python_path(s or '/usr/bin/python') return parse_python_path(
s,
task_vars=self._task_vars,
action=self._action,
rediscover_python=rediscover_python)
def private_key_file(self): def private_key_file(self):
return self._play_context.private_key_file return self._play_context.private_key_file
@ -490,14 +569,16 @@ class MitogenViaSpec(Spec):
having a configruation problem with connection delegation, the answer to having a configruation problem with connection delegation, the answer to
your problem lies in the method implementations below! your problem lies in the method implementations below!
""" """
def __init__(self, inventory_name, host_vars, become_method, become_user, def __init__(self, inventory_name, host_vars, task_vars, become_method, become_user,
play_context): play_context, action):
""" """
:param str inventory_name: :param str inventory_name:
The inventory name of the intermediary machine, i.e. not the target The inventory name of the intermediary machine, i.e. not the target
machine. machine.
:param dict host_vars: :param dict host_vars:
The HostVars magic dictionary provided by Ansible in task_vars. The HostVars magic dictionary provided by Ansible in task_vars.
:param dict task_vars:
Task vars provided by Ansible.
:param str become_method: :param str become_method:
If the mitogen_via= spec included a become method, the method it If the mitogen_via= spec included a become method, the method it
specifies. specifies.
@ -509,14 +590,18 @@ class MitogenViaSpec(Spec):
the real target machine. Values from this object are **strictly the real target machine. Values from this object are **strictly
restricted** to values that are Ansible-global, e.g. the passwords restricted** to values that are Ansible-global, e.g. the passwords
specified interactively. specified interactively.
:param ActionModuleMixin action:
Backref to the ActionModuleMixin required for ansible interpreter discovery
""" """
self._inventory_name = inventory_name self._inventory_name = inventory_name
self._host_vars = host_vars self._host_vars = host_vars
self._task_vars = task_vars
self._become_method = become_method self._become_method = become_method
self._become_user = become_user self._become_user = become_user
# Dangerous! You may find a variable you want in this object, but it's # Dangerous! You may find a variable you want in this object, but it's
# almost certainly for the wrong machine! # almost certainly for the wrong machine!
self._dangerous_play_context = play_context self._dangerous_play_context = play_context
self._action = action
def transport(self): def transport(self):
return ( return (
@ -574,12 +659,16 @@ class MitogenViaSpec(Spec):
C.DEFAULT_REMOTE_PORT C.DEFAULT_REMOTE_PORT
) )
def python_path(self): def python_path(self, rediscover_python=False):
s = self._host_vars.get('ansible_python_interpreter') s = self._host_vars.get('ansible_python_interpreter')
# #511, #536: executor/module_common.py::_get_shebang() hard-wires # #511, #536: executor/module_common.py::_get_shebang() hard-wires
# "/usr/bin/python" as the default interpreter path if no other # "/usr/bin/python" as the default interpreter path if no other
# interpreter is specified. # interpreter is specified.
return parse_python_path(s or '/usr/bin/python') return parse_python_path(
s,
task_vars=self._task_vars,
action=self._action,
rediscover_python=rediscover_python)
def private_key_file(self): def private_key_file(self):
# TODO: must come from PlayContext too. # TODO: must come from PlayContext too.

@ -9,7 +9,7 @@ Mitogen for Ansible
**Mitogen for Ansible** is a completely redesigned UNIX connection layer and **Mitogen for Ansible** is a completely redesigned UNIX connection layer and
module runtime for `Ansible`_. Requiring minimal configuration changes, it module runtime for `Ansible`_. Requiring minimal configuration changes, it
updates Ansible's slow and wasteful shell-centic implementation with updates Ansible's slow and wasteful shell-centric implementation with
pure-Python equivalents, invoked via highly efficient remote procedure calls to pure-Python equivalents, invoked via highly efficient remote procedure calls to
persistent interpreters tunnelled over SSH. No changes are required to target persistent interpreters tunnelled over SSH. No changes are required to target
hosts. hosts.
@ -145,7 +145,7 @@ Testimonials
Noteworthy Differences Noteworthy Differences
---------------------- ----------------------
* Ansible 2.3-2.8 are supported along with Python 2.6, 2.7, 3.6 and 3.7. Verify * Ansible 2.3-2.9 are supported along with Python 2.6, 2.7, 3.6 and 3.7. Verify
your installation is running one of these versions by checking ``ansible your installation is running one of these versions by checking ``ansible
--version`` output. --version`` output.
@ -169,9 +169,7 @@ Noteworthy Differences
- initech_app - initech_app
- y2k_fix - y2k_fix
* Ansible 2.8 `interpreter discovery * Ansible `become plugins
<https://docs.ansible.com/ansible/latest/reference_appendices/interpreter_discovery.html>`_
and `become plugins
<https://docs.ansible.com/ansible/latest/plugins/become.html>`_ are not yet <https://docs.ansible.com/ansible/latest/plugins/become.html>`_ are not yet
supported. supported.
@ -245,7 +243,9 @@ Noteworthy Differences
.. ..
* The ``ansible_python_interpreter`` variable is parsed using a restrictive * The ``ansible_python_interpreter`` variable is parsed using a restrictive
:mod:`shell-like <shlex>` syntax, permitting values such as ``/usr/bin/env :mod:`shell-like <shlex>` syntax, permitting values such as ``/usr/bin/env
FOO=bar python``, which occur in practice. Ansible `documents this FOO=bar python`` or ``source /opt/rh/rh-python36/enable && python``, which
occur in practice. Jinja2 templating is also supported for complex task-level
interpreter settings. Ansible `documents this
<https://docs.ansible.com/ansible/latest/user_guide/intro_inventory.html#ansible-python-interpreter>`_ <https://docs.ansible.com/ansible/latest/user_guide/intro_inventory.html#ansible-python-interpreter>`_
as an absolute path, however the implementation passes it unquoted through as an absolute path, however the implementation passes it unquoted through
the shell, permitting arbitrary code to be injected. the shell, permitting arbitrary code to be injected.
@ -1009,7 +1009,7 @@ Like the :ans:conn:`ssh` except connection delegation is supported.
* ``mitogen_ssh_keepalive_count``: integer count of server keepalive messages to * ``mitogen_ssh_keepalive_count``: integer count of server keepalive messages to
which no reply is received before considering the SSH server dead. Defaults which no reply is received before considering the SSH server dead. Defaults
to 10. to 10.
* ``mitogen_ssh_keepalive_count``: integer seconds delay between keepalive * ``mitogen_ssh_keepalive_interval``: integer seconds delay between keepalive
messages. Defaults to 30. messages. Defaults to 30.

@ -14,14 +14,38 @@ Release Notes
} }
</style> </style>
v0.2.10 (unreleased)
--------------------
To avail of fixes in an unreleased version, please download a ZIP file To avail of fixes in an unreleased version, please download a ZIP file
`directly from GitHub <https://github.com/dw/mitogen/>`_. `directly from GitHub <https://github.com/dw/mitogen/>`_.
*(no changes)* v0.3.0 (2021-10-28)
-------------------
This release separates itself from the v0.2.X releases. Ansible's API changed too much to support backwards compatibility so from now on, v0.2.X releases will be for Ansible < 2.10 and v0.3.X will be for Ansible 2.10+.
`See here for details <https://github.com/dw/mitogen pull/715#issuecomment-750697248>`_.
* :gh:issue:`827` NewStylePlanner: detect `ansible_collections` imports
* :gh:issue:`770` better check for supported Ansible version
* :gh:issue:`731` ansible 2.10 support
* :gh:issue:`652` support for ansible collections import hook
* :gh:issue:`847` Removed historic Continuous Integration reverse shell
v0.2.10 (2021-10-28)
--------------------
* :gh:issue:`597` mitogen does not support Ansible 2.8 Python interpreter detection
* :gh:issue:`655` wait_for_connection gives errors
* :gh:issue:`672` cannot perform relative import error
* :gh:issue:`673` mitogen fails on RHEL8 server with bash /usr/bin/python: No such file or directory
* :gh:issue:`676` mitogen fail to run playbook without “/usr/bin/python” on target host
* :gh:issue:`716` fetch fails with "AttributeError: 'ShellModule' object has no attribute 'tmpdir'"
* :gh:issue:`756` ssh connections with `check_host_keys='accept'` would
timeout, when using recent OpenSSH client versions.
* :gh:issue:`758` fix initilialisation of callback plugins in test suite, to address a `KeyError` in
:method:`ansible.plugins.callback.CallbackBase.v2_runner_on_start`
* :gh:issue:`775` Test with Python 3.9
* :gh:issue:`775` Add msvcrt to the default module deny list
* :gh:issue:`847` Removed historic Continuous Integration reverse shell
v0.2.9 (2019-11-02) v0.2.9 (2019-11-02)

@ -7,7 +7,7 @@ import mitogen
VERSION = '%s.%s.%s' % mitogen.__version__ VERSION = '%s.%s.%s' % mitogen.__version__
author = u'Network Genomics' author = u'Network Genomics'
copyright = u'2019, Network Genomics' copyright = u'2021, the Mitogen authors'
exclude_patterns = ['_build', '.venv'] exclude_patterns = ['_build', '.venv']
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinxcontrib.programoutput', 'domainrefs'] extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinxcontrib.programoutput', 'domainrefs']

@ -35,7 +35,7 @@ be expected. On the slave, it is built dynamically during startup.
#: Library version as a tuple. #: Library version as a tuple.
__version__ = (0, 2, 9) __version__ = (0, 3, 0)
#: This is :data:`False` in slave contexts. Previously it was used to prevent #: This is :data:`False` in slave contexts. Previously it was used to prevent

@ -1269,6 +1269,13 @@ class Importer(object):
# a negative round-trip. # a negative round-trip.
'builtins', 'builtins',
'__builtin__', '__builtin__',
# On some Python releases (e.g. 3.8, 3.9) the subprocess module tries
# to import of this Windows-only builtin module.
'msvcrt',
# Python 2.x module that was renamed to _thread in 3.x.
# This entry avoids a roundtrip on 2.x -> 3.x.
'thread', 'thread',
# org.python.core imported by copy, pickle, xml.sax; breaks Jython, but # org.python.core imported by copy, pickle, xml.sax; breaks Jython, but
@ -2801,7 +2808,7 @@ class Waker(Protocol):
self.stream.transmit_side.write(b(' ')) self.stream.transmit_side.write(b(' '))
except OSError: except OSError:
e = sys.exc_info()[1] e = sys.exc_info()[1]
if e.args[0] in (errno.EBADF, errno.EWOULDBLOCK): if e.args[0] not in (errno.EBADF, errno.EWOULDBLOCK):
raise raise
broker_shutdown_msg = ( broker_shutdown_msg = (
@ -3860,7 +3867,7 @@ class ExternalContext(object):
else: else:
core_src_fd = self.config.get('core_src_fd', 101) core_src_fd = self.config.get('core_src_fd', 101)
if core_src_fd: if core_src_fd:
fp = os.fdopen(core_src_fd, 'rb', 1) fp = os.fdopen(core_src_fd, 'rb', 0)
try: try:
core_src = fp.read() core_src = fp.read()
# Strip "ExternalContext.main()" call from last line. # Strip "ExternalContext.main()" call from last line.

@ -89,6 +89,14 @@ except NameError:
RLOG = logging.getLogger('mitogen.ctx') RLOG = logging.getLogger('mitogen.ctx')
# there are some cases where modules are loaded in memory only, such as
# ansible collections, and the module "filename" doesn't actually exist
SPECIAL_FILE_PATHS = {
"__synthetic__",
"<ansible_synthetic_collection_package>"
}
def _stdlib_paths(): def _stdlib_paths():
""" """
Return a set of paths from which Python imports the standard library. Return a set of paths from which Python imports the standard library.
@ -138,7 +146,7 @@ def is_stdlib_path(path):
) )
def get_child_modules(path): def get_child_modules(path, fullname):
""" """
Return the suffixes of submodules directly neated beneath of the package Return the suffixes of submodules directly neated beneath of the package
directory at `path`. directory at `path`.
@ -147,12 +155,19 @@ def get_child_modules(path):
Path to the module's source code on disk, or some PEP-302-recognized Path to the module's source code on disk, or some PEP-302-recognized
equivalent. Usually this is the module's ``__file__`` attribute, but equivalent. Usually this is the module's ``__file__`` attribute, but
is specified explicitly to avoid loading the module. is specified explicitly to avoid loading the module.
:param str fullname:
Name of the package we're trying to get child modules for
:return: :return:
List of submodule name suffixes. List of submodule name suffixes.
""" """
it = pkgutil.iter_modules([os.path.dirname(path)]) mod_path = os.path.dirname(path)
return [to_text(name) for _, name, _ in it] if mod_path != '':
return [to_text(name) for _, name, _ in pkgutil.iter_modules([mod_path])]
else:
# we loaded some weird package in memory, so we'll see if it has a custom loader we can use
loader = pkgutil.find_loader(fullname)
return [to_text(name) for name, _ in loader.iter_modules(None)] if loader else []
def _looks_like_script(path): def _looks_like_script(path):
@ -177,17 +192,31 @@ def _looks_like_script(path):
def _py_filename(path): def _py_filename(path):
"""
Returns a tuple of a Python path (if the file looks Pythonic) and whether or not
the Python path is special. Special file paths/modules might only exist in memory
"""
if not path: if not path:
return None return None, False
if path[-4:] in ('.pyc', '.pyo'): if path[-4:] in ('.pyc', '.pyo'):
path = path.rstrip('co') path = path.rstrip('co')
if path.endswith('.py'): if path.endswith('.py'):
return path return path, False
if os.path.exists(path) and _looks_like_script(path): if os.path.exists(path) and _looks_like_script(path):
return path return path, False
basepath = os.path.basename(path)
if basepath in SPECIAL_FILE_PATHS:
return path, True
# return None, False means that the filename passed to _py_filename does not appear
# to be python, and code later will handle when this function returns None
# see https://github.com/dw/mitogen/pull/715#discussion_r532380528 for how this
# decision was made to handle non-python files in this manner
return None, False
def _get_core_source(): def _get_core_source():
@ -498,9 +527,13 @@ class PkgutilMethod(FinderMethod):
return return
try: try:
path = _py_filename(loader.get_filename(fullname)) path, is_special = _py_filename(loader.get_filename(fullname))
source = loader.get_source(fullname) source = loader.get_source(fullname)
is_pkg = loader.is_package(fullname) is_pkg = loader.is_package(fullname)
# workaround for special python modules that might only exist in memory
if is_special and is_pkg and not source:
source = '\n'
except (AttributeError, ImportError): except (AttributeError, ImportError):
# - Per PEP-302, get_source() and is_package() are optional, # - Per PEP-302, get_source() and is_package() are optional,
# calling them may throw AttributeError. # calling them may throw AttributeError.
@ -549,7 +582,7 @@ class SysModulesMethod(FinderMethod):
fullname, alleged_name, module) fullname, alleged_name, module)
return return
path = _py_filename(getattr(module, '__file__', '')) path, _ = _py_filename(getattr(module, '__file__', ''))
if not path: if not path:
return return
@ -639,7 +672,7 @@ class ParentEnumerationMethod(FinderMethod):
def _found_module(self, fullname, path, fp, is_pkg=False): def _found_module(self, fullname, path, fp, is_pkg=False):
try: try:
path = _py_filename(path) path, _ = _py_filename(path)
if not path: if not path:
return return
@ -971,7 +1004,7 @@ class ModuleResponder(object):
self.minify_secs += mitogen.core.now() - t0 self.minify_secs += mitogen.core.now() - t0
if is_pkg: if is_pkg:
pkg_present = get_child_modules(path) pkg_present = get_child_modules(path, fullname)
self._log.debug('%s is a package at %s with submodules %r', self._log.debug('%s is a package at %s with submodules %r',
fullname, path, pkg_present) fullname, path, pkg_present)
else: else:
@ -1279,7 +1312,8 @@ class Router(mitogen.parent.Router):
self.broker.defer(stream.on_disconnect, self.broker) self.broker.defer(stream.on_disconnect, self.broker)
def disconnect_all(self): def disconnect_all(self):
for stream in self._stream_by_id.values(): # making stream_by_id python3-safe by converting stream_by_id values iter to list
for stream in list(self._stream_by_id.values()):
self.disconnect_stream(stream) self.disconnect_stream(stream)

@ -42,6 +42,7 @@ import heapq
import inspect import inspect
import logging import logging
import os import os
import platform
import re import re
import signal import signal
import socket import socket
@ -1434,7 +1435,10 @@ class Connection(object):
os.close(r) os.close(r)
os.close(W) os.close(W)
os.close(w) os.close(w)
if sys.platform == 'darwin' and sys.executable == '/usr/bin/python': # this doesn't apply anymore to Mac OSX 10.15+ (Darwin 19+), new interpreter looks like this:
# /System/Library/Frameworks/Python.framework/Versions/2.7/Resources/Python.app/Contents/MacOS/Python
if sys.platform == 'darwin' and sys.executable == '/usr/bin/python' and \
int(platform.release()[:2]) < 19:
sys.executable += sys.version[:3] sys.executable += sys.version[:3]
os.environ['ARGV0']=sys.executable os.environ['ARGV0']=sys.executable
os.execl(sys.executable,sys.executable+'(mitogen:CONTEXT_NAME)') os.execl(sys.executable,sys.executable+'(mitogen:CONTEXT_NAME)')

@ -74,7 +74,7 @@ else:
@mitogen.core.takes_router @mitogen.core.takes_router
def get_or_create_pool(size=None, router=None): def get_or_create_pool(size=None, router=None, context=None):
global _pool global _pool
global _pool_pid global _pool_pid
@ -84,6 +84,12 @@ def get_or_create_pool(size=None, router=None):
_pool_lock.acquire() _pool_lock.acquire()
try: try:
if _pool_pid != my_pid: if _pool_pid != my_pid:
if router is None:
# fallback to trying to get router from context if that exists
if context is not None:
router = context.router
else:
raise ValueError("Unable to create Pool! Missing router.")
_pool = Pool( _pool = Pool(
router, router,
services=[], services=[],
@ -119,7 +125,7 @@ def call(service_name, method_name, call_context=None, **kwargs):
if call_context: if call_context:
return call_context.call_service(service_name, method_name, **kwargs) return call_context.call_service(service_name, method_name, **kwargs)
else: else:
pool = get_or_create_pool() pool = get_or_create_pool(context=kwargs.get('context'))
invoker = pool.get_invoker(service_name, msg=None) invoker = pool.get_invoker(service_name, msg=None)
return getattr(invoker.service, method_name)(**kwargs) return getattr(invoker.service, method_name)(**kwargs)
@ -685,6 +691,7 @@ class PushFileService(Service):
super(PushFileService, self).__init__(**kwargs) super(PushFileService, self).__init__(**kwargs)
self._lock = threading.Lock() self._lock = threading.Lock()
self._cache = {} self._cache = {}
self._extra_sys_paths = set()
self._waiters = {} self._waiters = {}
self._sent_by_stream = {} self._sent_by_stream = {}
@ -738,30 +745,57 @@ class PushFileService(Service):
@arg_spec({ @arg_spec({
'context': mitogen.core.Context, 'context': mitogen.core.Context,
'paths': list, 'paths': list,
'modules': list, # 'modules': list, TODO, modules was passed into this func but it's not used yet
}) })
def propagate_paths_and_modules(self, context, paths, modules): def propagate_paths_and_modules(self, context, paths, overridden_sources=None, extra_sys_paths=None):
""" """
One size fits all method to ensure a target context has been preloaded One size fits all method to ensure a target context has been preloaded
with a set of small files and Python modules. with a set of small files and Python modules.
overridden_sources: optional dict containing source code to override path's source code
extra_sys_paths: loads additional sys paths for use in finding modules; beneficial
in situations like loading Ansible Collections because source code
dependencies come from different file paths than where the source lives
""" """
for path in paths: for path in paths:
self.propagate_to(context, mitogen.core.to_text(path)) overridden_source = None
#self.router.responder.forward_modules(context, modules) TODO if overridden_sources is not None and path in overridden_sources:
overridden_source = overridden_sources[path]
self.propagate_to(context, mitogen.core.to_text(path), overridden_source)
# self.router.responder.forward_modules(context, modules) TODO
# NOTE: could possibly be handled by the above TODO, but not sure how forward_modules works enough
# to know for sure, so for now going to pass the sys paths themselves and have `propagate_to`
# load them up in sys.path for later import
# ensure we don't add to sys.path the same path we've already seen
for extra_path in extra_sys_paths:
# store extra paths in cached set for O(1) lookup
if extra_path not in self._extra_sys_paths:
# not sure if it matters but we could prepend to sys.path instead if we need to
sys.path.append(extra_path)
self._extra_sys_paths.add(extra_path)
@expose(policy=AllowParents()) @expose(policy=AllowParents())
@arg_spec({ @arg_spec({
'context': mitogen.core.Context, 'context': mitogen.core.Context,
'path': mitogen.core.FsPathTypes, 'path': mitogen.core.FsPathTypes,
}) })
def propagate_to(self, context, path): def propagate_to(self, context, path, overridden_source=None):
"""
If the optional parameter 'overridden_source' is passed, use
that instead of the path's code as source code. This works around some bugs
of source modules such as relative imports on unsupported Python versions
"""
if path not in self._cache: if path not in self._cache:
LOG.debug('caching small file %s', path) LOG.debug('caching small file %s', path)
fp = open(path, 'rb') if overridden_source is None:
try: fp = open(path, 'rb')
self._cache[path] = mitogen.core.Blob(fp.read()) try:
finally: self._cache[path] = mitogen.core.Blob(fp.read())
fp.close() finally:
fp.close()
else:
self._cache[path] = mitogen.core.Blob(overridden_source)
self._forward(context, path) self._forward(context, path)
@expose(policy=AllowParents()) @expose(policy=AllowParents())

@ -72,7 +72,10 @@ PASSWORD_PROMPT_PATTERN = re.compile(
) )
HOSTKEY_REQ_PATTERN = re.compile( HOSTKEY_REQ_PATTERN = re.compile(
b(r'are you sure you want to continue connecting \(yes/no\)\?'), b(
r'are you sure you want to continue connecting '
r'\(yes/no(?:/\[fingerprint\])?\)\?'
),
re.I re.I
) )
@ -221,6 +224,14 @@ class Connection(mitogen.parent.Connection):
child_is_immediate_subprocess = False child_is_immediate_subprocess = False
# strings that, if escaped, cause problems creating connections
# example: `source /opt/rh/rh-python36/enable && python`
# is an acceptable ansible_python_version but shlex would quote the &&
# and prevent python from executing
SHLEX_IGNORE = [
"&&"
]
def _get_name(self): def _get_name(self):
s = u'ssh.' + mitogen.core.to_text(self.options.hostname) s = u'ssh.' + mitogen.core.to_text(self.options.hostname)
if self.options.port and self.options.port != 22: if self.options.port and self.options.port != 22:
@ -291,4 +302,9 @@ class Connection(mitogen.parent.Connection):
bits += self.options.ssh_args bits += self.options.ssh_args
bits.append(self.options.hostname) bits.append(self.options.hostname)
base = super(Connection, self).get_boot_command() base = super(Connection, self).get_boot_command()
return bits + [shlex_quote(s).strip() for s in base]
base_parts = []
for s in base:
val = s if s in self.SHLEX_IGNORE else shlex_quote(s).strip()
base_parts.append(val)
return bits + base_parts

@ -256,6 +256,8 @@ class Connection(mitogen.parent.Connection):
# Note: sudo did not introduce long-format option processing until July # Note: sudo did not introduce long-format option processing until July
# 2013, so even though we parse long-format options, supply short-form # 2013, so even though we parse long-format options, supply short-form
# to the sudo command. # to the sudo command.
boot_cmd = super(Connection, self).get_boot_command()
bits = [self.options.sudo_path, '-u', self.options.username] bits = [self.options.sudo_path, '-u', self.options.username]
if self.options.preserve_env: if self.options.preserve_env:
bits += ['-E'] bits += ['-E']
@ -268,4 +270,25 @@ class Connection(mitogen.parent.Connection):
if self.options.selinux_type: if self.options.selinux_type:
bits += ['-t', self.options.selinux_type] bits += ['-t', self.options.selinux_type]
return bits + ['--'] + super(Connection, self).get_boot_command() # special handling for bash builtins
# TODO: more efficient way of doing this, at least
# it's only 1 iteration of boot_cmd to go through
source_found = False
for cmd in boot_cmd[:]:
# rip `source` from boot_cmd if it exists; sudo.py can't run this
# even with -i or -s options
# since we've already got our ssh command working we shouldn't
# need to source anymore
# couldn't figure out how to get this to work using sudo flags
if 'source' == cmd:
boot_cmd.remove(cmd)
source_found = True
continue
if source_found:
# remove words until we hit the python interpreter call
if not cmd.endswith('python'):
boot_cmd.remove(cmd)
else:
break
return bits + ['--'] + boot_cmd

@ -1,3 +1,6 @@
[bdist_wheel]
universal=1
[coverage:run] [coverage:run]
branch = true branch = true
source = source =

@ -37,29 +37,46 @@ def grep_version():
for line in fp: for line in fp:
if line.startswith('__version__'): if line.startswith('__version__'):
_, _, s = line.partition('=') _, _, s = line.partition('=')
return '.'.join(map(str, eval(s))) return '%i.%i.%i' % eval(s)
def long_description():
here = os.path.dirname(__file__)
readme_path = os.path.join(here, 'README.md')
with open(readme_path) as fp:
readme = fp.read()
return readme
setup( setup(
name = 'mitogen', name = 'mitogen',
version = grep_version(), version = grep_version(),
description = 'Library for writing distributed self-replicating programs.', description = 'Library for writing distributed self-replicating programs.',
long_description = long_description(),
long_description_content_type='text/markdown',
author = 'David Wilson', author = 'David Wilson',
license = 'New BSD', license = 'New BSD',
url = 'https://github.com/dw/mitogen/', url = 'https://github.com/mitogen-hq/mitogen/',
packages = find_packages(exclude=['tests', 'examples']), packages = find_packages(exclude=['tests', 'examples']),
python_requires='>=2.4, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4',
zip_safe = False, zip_safe = False,
classifiers = [ classifiers = [
'Environment :: Console', 'Environment :: Console',
'Intended Audience :: System Administrators', 'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License', 'License :: OSI Approved :: BSD License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX', 'Operating System :: POSIX',
'Programming Language :: Python', 'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.4', 'Programming Language :: Python :: 2.4',
'Programming Language :: Python :: 2.5', 'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: CPython',
'Topic :: System :: Distributed Computing', 'Topic :: System :: Distributed Computing',
'Topic :: System :: Systems Administration', 'Topic :: System :: Systems Administration',

@ -7,7 +7,7 @@ started in September 2017. Pull requests in this area are very welcome!
## Running The Tests ## Running The Tests
[![Build Status](https://api.travis-ci.org/dw/mitogen.svg?branch=master)](https://travis-ci.org/dw/mitogen) [![Build Status](https://dev.azure.com/mitogen-hq/mitogen/_apis/build/status/mitogen-hq.mitogen?branchName=master)](https://dev.azure.com/mitogen-hq/mitogen/_build/latest?definitionId=1&branchName=master)
Your computer should have an Internet connection, and the ``docker`` command Your computer should have an Internet connection, and the ``docker`` command
line tool should be able to connect to a working Docker daemon (localhost or line tool should be able to connect to a working Docker daemon (localhost or

@ -1,3 +1,4 @@
- include: setup/all.yml
- include: regression/all.yml - include: regression/all.yml
- include: integration/all.yml - include: integration/all.yml

@ -66,3 +66,6 @@
copy: copy:
src: /tmp/bigbigfile.in src: /tmp/bigbigfile.in
dest: /tmp/bigbigfile.out dest: /tmp/bigbigfile.out
tags:
- resource_intensive

@ -2,3 +2,5 @@
tasks: tasks:
- include_tasks: _includes.yml - include_tasks: _includes.yml
with_sequence: start=1 end=1000 with_sequence: start=1 end=1000
tags:
- resource_intensive

@ -21,5 +21,11 @@
copy: copy:
src: "{{item.src}}" src: "{{item.src}}"
dest: "/tmp/filetree.out/{{item.path}}" dest: "/tmp/filetree.out/{{item.path}}"
mode: 0644
with_filetree: /tmp/filetree.in with_filetree: /tmp/filetree.in
when: item.state == 'file' when: item.state == 'file'
loop_control:
label: "/tmp/filetree.out/{{ item.path }}"
tags:
- resource_intensive

@ -8,3 +8,5 @@
tasks: tasks:
- command: hostname - command: hostname
with_sequence: start=1 end="{{end|default(100)}}" with_sequence: start=1 end="{{end|default(100)}}"
tags:
- resource_intensive

@ -110,3 +110,5 @@
- command: hostname - command: hostname
- command: hostname - command: hostname
- command: hostname - command: hostname
tags:
- resource_intensive

@ -63,6 +63,7 @@
- stat.results[1].stat.checksum == "62951f943c41cdd326e5ce2b53a779e7916a820d" - stat.results[1].stat.checksum == "62951f943c41cdd326e5ce2b53a779e7916a820d"
- stat.results[2].stat.checksum == "b26dd6444595e2bdb342aa0a91721b57478b5029" - stat.results[2].stat.checksum == "b26dd6444595e2bdb342aa0a91721b57478b5029"
- stat.results[3].stat.checksum == "d675f47e467eae19e49032a2cc39118e12a6ee72" - stat.results[3].stat.checksum == "d675f47e467eae19e49032a2cc39118e12a6ee72"
fail_msg: stat={{stat}}
- file: - file:
state: absent state: absent

@ -1,18 +1,12 @@
# Verify action plugins still set file modes correctly even though # Verify action plugins still set file modes correctly even though
# fixup_perms2() avoids setting execute bit despite being asked to. # fixup_perms2() avoids setting execute bit despite being asked to.
# As of Ansible 2.10.0, default perms vary based on OS. On debian systems it's 0644 and on centos it's 0664 based on test output
# regardless, we're testing that no execute bit is set here so either check is ok
- name: integration/action/fixup_perms2__copy.yml - name: integration/action/fixup_perms2__copy.yml
hosts: test-targets hosts: test-targets
any_errors_fatal: true any_errors_fatal: true
tasks: tasks:
- name: Get default remote file mode
shell: python -c 'import os; print("%04o" % (int("0666", 8) & ~os.umask(0)))'
register: py_umask
- name: Set default file mode
set_fact:
mode: "{{py_umask.stdout}}"
# #
# copy module (no mode). # copy module (no mode).
# #
@ -26,7 +20,8 @@
register: out register: out
- assert: - assert:
that: that:
- out.stat.mode == mode - out.stat.mode in ("0644", "0664")
fail_msg: out={{out}}
# #
# copy module (explicit mode). # copy module (explicit mode).
@ -43,6 +38,7 @@
- assert: - assert:
that: that:
- out.stat.mode == "0400" - out.stat.mode == "0400"
fail_msg: out={{out}}
# #
# copy module (existing disk files, no mode). # copy module (existing disk files, no mode).
@ -68,7 +64,8 @@
register: out register: out
- assert: - assert:
that: that:
- out.stat.mode == mode - out.stat.mode in ("0644", "0664")
fail_msg: out={{out}}
# #
# copy module (existing disk files, preserve mode). # copy module (existing disk files, preserve mode).
@ -85,6 +82,7 @@
- assert: - assert:
that: that:
- out.stat.mode == "1462" - out.stat.mode == "1462"
fail_msg: out={{out}}
# #
# copy module (existing disk files, explicit mode). # copy module (existing disk files, explicit mode).
@ -102,6 +100,7 @@
- assert: - assert:
that: that:
- out.stat.mode == "1461" - out.stat.mode == "1461"
fail_msg: out={{out}}
- file: - file:
state: absent state: absent

@ -16,6 +16,7 @@
- 'raw.rc == 0' - 'raw.rc == 0'
- 'raw.stdout_lines[-1]|to_text == "2"' - 'raw.stdout_lines[-1]|to_text == "2"'
- 'raw.stdout[-1]|to_text == "2"' - 'raw.stdout[-1]|to_text == "2"'
fail_msg: raw={{raw}}
- name: Run raw module with sudo - name: Run raw module with sudo
become: true become: true
@ -39,3 +40,4 @@
["root\r\n"], ["root\r\n"],
["root"], ["root"],
) )
fail_msg: raw={{raw}}

@ -44,11 +44,13 @@
assert: assert:
that: that:
- good_temp_path == good_temp_path2 - good_temp_path == good_temp_path2
fail_msg: good_temp_path={{good_temp_path}} good_temp_path2={{good_temp_path2}}
- name: "Verify different subdir for both tasks" - name: "Verify different subdir for both tasks"
assert: assert:
that: that:
- tmp_path.path != tmp_path2.path - tmp_path.path != tmp_path2.path
fail_msg: tmp_path={{tmp_path}} tmp_path2={{tmp_path2}}
# #
# Verify subdirectory removal. # Verify subdirectory removal.
@ -69,6 +71,7 @@
that: that:
- not stat1.stat.exists - not stat1.stat.exists
- not stat2.stat.exists - not stat2.stat.exists
fail_msg: stat1={{stat1}} stat2={{stat2}}
# #
# Verify good directory persistence. # Verify good directory persistence.
@ -83,6 +86,7 @@
assert: assert:
that: that:
- stat.stat.exists - stat.stat.exists
fail_msg: stat={{stat}}
# #
# Write some junk into the temp path. # Write some junk into the temp path.
@ -105,6 +109,7 @@
- assert: - assert:
that: that:
- not out.stat.exists - not out.stat.exists
fail_msg: out={{out}}
# #
# root # root
@ -123,21 +128,23 @@
that: that:
- tmp_path2.path != tmp_path_root.path - tmp_path2.path != tmp_path_root.path
- tmp_path2.path|dirname != tmp_path_root.path|dirname - tmp_path2.path|dirname != tmp_path_root.path|dirname
fail_msg: tmp_path_root={{tmp_path_root}} tmp_path2={{tmp_path2}}
# #
# readonly homedir # readonly homedir
# #
- name: "Try writing to temp directory for the readonly_homedir user" # TODO: https://github.com/dw/mitogen/issues/692
become: true # - name: "Try writing to temp directory for the readonly_homedir user"
become_user: mitogen__readonly_homedir # become: true
custom_python_run_script: # become_user: mitogen__readonly_homedir
script: | # custom_python_run_script:
from ansible.module_utils.basic import get_module_path # script: |
path = get_module_path() + '/foo.txt' # from ansible.module_utils.basic import get_module_path
result['path'] = path # path = get_module_path() + '/foo.txt'
open(path, 'w').write("bar") # result['path'] = path
register: tmp_path # open(path, 'w').write("bar")
# register: tmp_path
# #
# modules get the same base dir # modules get the same base dir
@ -147,17 +154,9 @@
custom_python_detect_environment: custom_python_detect_environment:
register: out register: out
# v2.6 related: https://github.com/ansible/ansible/pull/39833 - name: "Verify modules get the same tmpdir as the action plugin"
- name: "Verify modules get the same tmpdir as the action plugin (<2.5)"
when: ansible_version.full < '2.5'
assert:
that:
- out.module_path.startswith(good_temp_path2)
- out.module_tmpdir == None
- name: "Verify modules get the same tmpdir as the action plugin (>2.5)"
when: ansible_version.full > '2.5'
assert: assert:
that: that:
- out.module_path.startswith(good_temp_path2) - out.module_path.startswith(good_temp_path2)
- out.module_tmpdir.startswith(good_temp_path2) - out.module_tmpdir.startswith(good_temp_path2)
fail_msg: out={{out}}

@ -27,6 +27,7 @@
register: out register: out
- assert: - assert:
that: out.result == '{{user_facts.ansible_facts.ansible_user_dir}}/foo' that: out.result == '{{user_facts.ansible_facts.ansible_user_dir}}/foo'
fail_msg: out={{out}}
- name: "Expand ~/foo with become active. ~ is become_user's home." - name: "Expand ~/foo with become active. ~ is become_user's home."
action_passthrough: action_passthrough:
@ -49,6 +50,7 @@
register: out register: out
- assert: - assert:
that: out.result == '{{user_facts.ansible_facts.ansible_user_dir}}/foo' that: out.result == '{{user_facts.ansible_facts.ansible_user_dir}}/foo'
fail_msg: out={{out}}
- name: "Expanding $HOME/foo has no effect." - name: "Expanding $HOME/foo has no effect."
action_passthrough: action_passthrough:
@ -59,6 +61,7 @@
register: out register: out
- assert: - assert:
that: out.result == '$HOME/foo' that: out.result == '$HOME/foo'
fail_msg: out={{out}}
# ------------------------ # ------------------------
@ -71,6 +74,7 @@
register: out register: out
- assert: - assert:
that: out.result == '{{user_facts.ansible_facts.ansible_user_dir}}/foo' that: out.result == '{{user_facts.ansible_facts.ansible_user_dir}}/foo'
fail_msg: out={{out}}
- name: "sudoable; Expand ~/foo with become active. ~ is become_user's home." - name: "sudoable; Expand ~/foo with become active. ~ is become_user's home."
action_passthrough: action_passthrough:
@ -94,6 +98,7 @@
register: out register: out
- assert: - assert:
that: out.result == '{{user_facts.ansible_facts.ansible_user_dir}}/foo' that: out.result == '{{user_facts.ansible_facts.ansible_user_dir}}/foo'
fail_msg: out={{out}}
- name: "sudoable; Expanding $HOME/foo has no effect." - name: "sudoable; Expanding $HOME/foo has no effect."
action_passthrough: action_passthrough:
@ -104,3 +109,4 @@
register: out register: out
- assert: - assert:
that: out.result == '$HOME/foo' that: out.result == '$HOME/foo'
fail_msg: out={{out}}

@ -15,6 +15,7 @@
- assert: - assert:
that: out.result == False that: out.result == False
fail_msg: out={{out}}
# --- # ---
@ -29,6 +30,7 @@
- assert: - assert:
that: out.result == True that: out.result == True
fail_msg: out={{out}}
- file: - file:
path: /tmp/does-exist path: /tmp/does-exist

@ -23,6 +23,7 @@
- assert: - assert:
that: that:
- not out2.stat.exists - not out2.stat.exists
fail_msg: out={{out}}
- stat: - stat:
path: "{{out.src|dirname}}" path: "{{out.src|dirname}}"
@ -31,6 +32,7 @@
- assert: - assert:
that: that:
- not out2.stat.exists - not out2.stat.exists
fail_msg: out={{out}}
- file: - file:
path: /tmp/remove_tmp_path_test path: /tmp/remove_tmp_path_test

@ -34,30 +34,41 @@
content: "item!" content: "item!"
delegate_to: localhost delegate_to: localhost
- file: # TODO: https://github.com/dw/mitogen/issues/692
path: /tmp/sync-test.out # - file:
state: absent # path: /tmp/sync-test.out
become: true # state: absent
# become: true
- synchronize: # exception: File "/tmp/venv/lib/python2.7/site-packages/ansible/plugins/action/__init__.py", line 129, in cleanup
private_key: /tmp/synchronize-action-key # exception: self._remove_tmp_path(self._connection._shell.tmpdir)
dest: /tmp/sync-test.out # exception: AttributeError: 'get_with_context_result' object has no attribute '_shell'
src: /tmp/sync-test/ # TODO: looks like a bug on Ansible's end with 2.10? Maybe 2.10.1 will fix it
# https://github.com/dw/mitogen/issues/746
- name: do synchronize test
block:
- synchronize:
private_key: /tmp/synchronize-action-key
dest: /tmp/sync-test.out
src: /tmp/sync-test/
- slurp: - slurp:
src: /tmp/sync-test.out/item src: /tmp/sync-test.out/item
register: out register: out
- set_fact: outout="{{out.content|b64decode}}" - set_fact: outout="{{out.content|b64decode}}"
- assert: - assert:
that: outout == "item!" that: outout == "item!"
fail_msg: outout={{outout}}
when: False
- file: # TODO: https://github.com/dw/mitogen/issues/692
path: "{{item}}" # - file:
state: absent # path: "{{item}}"
become: true # state: absent
with_items: # become: true
- /tmp/synchronize-action-key # with_items:
- /tmp/sync-test # - /tmp/synchronize-action-key
- /tmp/sync-test.out # - /tmp/sync-test
# - /tmp/sync-test.out

@ -24,6 +24,7 @@
- assert: - assert:
that: | that: |
out.content|b64decode == '{"I am JSON": true}' out.content|b64decode == '{"I am JSON": true}'
fail_msg: out={{out}}
# Ensure it handles strings. # Ensure it handles strings.
@ -40,6 +41,7 @@
- assert: - assert:
that: that:
out.content|b64decode == 'I am text.' out.content|b64decode == 'I am text.'
fail_msg: out={{out}}
- file: - file:
path: /tmp/transfer-data path: /tmp/transfer-data

@ -11,6 +11,7 @@
- include: connection_loader/all.yml - include: connection_loader/all.yml
- include: context_service/all.yml - include: context_service/all.yml
- include: glibc_caches/all.yml - include: glibc_caches/all.yml
- include: interpreter_discovery/all.yml
- include: local/all.yml - include: local/all.yml
- include: module_utils/all.yml - include: module_utils/all.yml
- include: playbook_semantics/all.yml - include: playbook_semantics/all.yml

@ -34,3 +34,4 @@
- out.results[1].stdout == 'hi-from-job-2' - out.results[1].stdout == 'hi-from-job-2'
- out.results[1].rc == 0 - out.results[1].rc == 0
- out.results[1].delta > '0:00:05' - out.results[1].delta > '0:00:05'
fail_msg: out={{out}}

@ -28,6 +28,7 @@
(job.started == 1) and (job.started == 1) and
(job.changed == True) and (job.changed == True) and
(job.finished == 0) (job.finished == 0)
fail_msg: job={{job}}
- name: busy-poll up to 100000 times - name: busy-poll up to 100000 times
async_status: async_status:
@ -51,5 +52,6 @@
- async_out.failed == False - async_out.failed == False
- async_out.msg == "Hello, world." - async_out.msg == "Hello, world."
- 'async_out.stderr == "binary_producing_json: oh noes\n"' - 'async_out.stderr == "binary_producing_json: oh noes\n"'
fail_msg: async_out={{async_out}}
vars: vars:
async_out: "{{result.content|b64decode|from_json}}" async_out: "{{result.content|b64decode|from_json}}"

@ -39,5 +39,6 @@
- async_out.msg.startswith("Traceback") - async_out.msg.startswith("Traceback")
- '"ValueError: No start of json char found\n" in async_out.msg' - '"ValueError: No start of json char found\n" in async_out.msg'
- 'async_out.stderr == "binary_producing_junk: oh noes\n"' - 'async_out.stderr == "binary_producing_junk: oh noes\n"'
fail_msg: async_out={{async_out}}
vars: vars:
async_out: "{{result.content|b64decode|from_json}}" async_out: "{{result.content|b64decode|from_json}}"

@ -35,12 +35,14 @@
- async_out.start.startswith("20") - async_out.start.startswith("20")
- async_out.stderr == "there" - async_out.stderr == "there"
- async_out.stdout == "hi" - async_out.stdout == "hi"
fail_msg: async_out={{async_out}}
vars: vars:
async_out: "{{result.content|b64decode|from_json}}" async_out: "{{result.content|b64decode|from_json}}"
- assert: - assert:
that: that:
- async_out.invocation.module_args.stdin == None - async_out.invocation.module_args.stdin == None
fail_msg: async_out={{async_out}}
when: ansible_version.full > '2.4' when: ansible_version.full > '2.4'
vars: vars:
async_out: "{{result.content|b64decode|from_json}}" async_out: "{{result.content|b64decode|from_json}}"

@ -16,6 +16,7 @@
- assert: - assert:
that: that:
- sync_proc1.pid == sync_proc2.pid - sync_proc1.pid == sync_proc2.pid
fail_msg: sync_proc1={{sync_proc1}} sync_proc2={{sync_proc2}}
when: is_mitogen when: is_mitogen
- name: get async process ID. - name: get async process ID.
@ -48,7 +49,9 @@
- assert: - assert:
that: that:
# FIXME should this be async_proc1, and async_proc2?
- sync_proc1.pid == sync_proc2.pid - sync_proc1.pid == sync_proc2.pid
- async_result1.pid != sync_proc1.pid - async_result1.pid != sync_proc1.pid
- async_result1.pid != async_result2.pid - async_result1.pid != async_result2.pid
fail_msg: async_result1={{async_result1}} async_result2={{async_result2}}
when: is_mitogen when: is_mitogen

@ -24,6 +24,7 @@
(job1.started == 1) and (job1.started == 1) and
(job1.changed == True) and (job1.changed == True) and
(job1.finished == 0) (job1.finished == 0)
fail_msg: job1={{job1}}
- name: busy-poll up to 100000 times - name: busy-poll up to 100000 times
async_status: async_status:
@ -40,15 +41,15 @@
- result1.changed == True - result1.changed == True
# ansible/b72e989e1837ccad8dcdc926c43ccbc4d8cdfe44 # ansible/b72e989e1837ccad8dcdc926c43ccbc4d8cdfe44
- | - |
(ansible_version.full >= '2.8' and (ansible_version.full is version('2.8', ">=") and
result1.cmd == "echo alldone;\nsleep 1;\n") or result1.cmd == "echo alldone;\nsleep 1;\n") or
(ansible_version.full < '2.8' and (ansible_version.full is version('2.8', '<') and
result1.cmd == "echo alldone;\n sleep 1;") result1.cmd == "echo alldone;\n sleep 1;")
- result1.delta|length == 14 - result1.delta|length == 14
- result1.start|length == 26 - result1.start|length == 26
- result1.finished == 1 - result1.finished == 1
- result1.rc == 0 - result1.rc == 0
- result1.start|length == 26 fail_msg: result1={{result1}}
- assert: - assert:
that: that:
@ -56,10 +57,11 @@
- result1.stderr_lines == [] - result1.stderr_lines == []
- result1.stdout == "alldone" - result1.stdout == "alldone"
- result1.stdout_lines == ["alldone"] - result1.stdout_lines == ["alldone"]
when: ansible_version.full > '2.8' # ansible#51393 fail_msg: result1={{result1}}
when: ansible_version.full is version('2.8', '>') # ansible#51393
- assert: - assert:
that: that:
- result1.failed == False - result1.failed == False
when: ansible_version.full > '2.4' fail_msg: result1={{result1}}
when: ansible_version.full is version('2.4', '>')

@ -31,4 +31,5 @@
- result.failed == 1 - result.failed == 1
- result.finished == 1 - result.finished == 1
- result.msg == "Job reached maximum time limit of 1 seconds." - result.msg == "Job reached maximum time limit of 1 seconds."
fail_msg: result={{result}}
when: is_mitogen when: is_mitogen

@ -56,8 +56,10 @@
that: that:
- result1.rc == 0 - result1.rc == 0
- result2.rc == 0 - result2.rc == 0
fail_msg: result1={{result1}} result2={{result2}}
- assert: - assert:
that: that:
- result2.stdout == 'im_alive' - result2.stdout == 'im_alive'
fail_msg: result2={{result2}}
when: ansible_version.full > '2.8' # ansible#51393 when: ansible_version.full > '2.8' # ansible#51393

@ -22,4 +22,5 @@
job1.msg == "async task did not complete within the requested time" or job1.msg == "async task did not complete within the requested time" or
job1.msg == "async task did not complete within the requested time - 1s" or job1.msg == "async task did not complete within the requested time - 1s" or
job1.msg == "Job reached maximum time limit of 1 seconds." job1.msg == "Job reached maximum time limit of 1 seconds."
fail_msg: job1={{job1}}

@ -22,6 +22,7 @@
('password is required' in out.msg) or ('password is required' in out.msg) or
('password is required' in out.module_stderr) ('password is required' in out.module_stderr)
) )
fail_msg: out={{out}}
when: is_mitogen when: is_mitogen
@ -41,6 +42,7 @@
('Incorrect su password' in out.msg) or ('Incorrect su password' in out.msg) or
('su password is incorrect' in out.msg) ('su password is incorrect' in out.msg)
) )
fail_msg: out={{out}}
when: is_mitogen when: is_mitogen
- name: Ensure password su succeeds. - name: Ensure password su succeeds.
@ -55,4 +57,5 @@
- assert: - assert:
that: that:
- out.stdout == 'mitogen__user1' - out.stdout == 'mitogen__user1'
fail_msg: out={{out}}
when: is_mitogen when: is_mitogen

@ -19,4 +19,6 @@
('sudo: no such option: --derps' in out.msg) or ('sudo: no such option: --derps' in out.msg) or
("sudo: invalid option -- '-'" in out.module_stderr) or ("sudo: invalid option -- '-'" in out.module_stderr) or
("sudo: unrecognized option `--derps'" in out.module_stderr) or ("sudo: unrecognized option `--derps'" in out.module_stderr) or
("sudo: unrecognized option `--derps'" in out.module_stdout) or
("sudo: unrecognized option '--derps'" in out.module_stderr) ("sudo: unrecognized option '--derps'" in out.module_stderr)
fail_msg: out={{out}}

@ -9,11 +9,25 @@
become_user: slartibartfast become_user: slartibartfast
ignore_errors: true ignore_errors: true
register: out register: out
when:
# https://github.com/ansible/ansible/pull/70785
- ansible_facts.distribution not in ["MacOSX"]
or ansible_version.full is version("2.11", ">=", strict=True)
or is_mitogen
- name: Verify raw module output. - name: Verify raw module output.
assert: assert:
that: | that:
out.failed and ( - out.failed
('sudo: unknown user: slartibartfast' in out.msg) or # sudo-1.8.6p3-29.el6_10.3 on RHEL & CentOS 6.10 (final release)
('sudo: unknown user: slartibartfast' in out.module_stderr) # removed user/group error messages, as defence against CVE-2019-14287.
) - >-
('sudo: unknown user: slartibartfast' in out.module_stderr | default(out.msg))
or ('chown: slartibartfast: illegal user name' in out.module_stderr | default(out.msg))
or (ansible_facts.os_family == 'RedHat' and ansible_facts.distribution_version == '6.10')
fail_msg: out={{out}}
when:
# https://github.com/ansible/ansible/pull/70785
- ansible_facts.distribution not in ["MacOSX"]
or ansible_version.full is version("2.11", ">=", strict=True)
or is_mitogen

@ -12,6 +12,7 @@
- assert: - assert:
that: that:
- out.stdout != 'root' - out.stdout != 'root'
fail_msg: out={{out}}
- name: Ensure passwordless sudo to root succeeds. - name: Ensure passwordless sudo to root succeeds.
shell: whoami shell: whoami
@ -22,3 +23,4 @@
- assert: - assert:
that: that:
- out.stdout == 'root' - out.stdout == 'root'
fail_msg: out={{out}}

@ -11,6 +11,11 @@
become_user: mitogen__pw_required become_user: mitogen__pw_required
register: out register: out
ignore_errors: true ignore_errors: true
when:
# https://github.com/ansible/ansible/pull/70785
- ansible_facts.distribution not in ["MacOSX"]
or ansible_version.full is version("2.11", ">=", strict=True)
or is_mitogen
- assert: - assert:
that: | that: |
@ -19,6 +24,12 @@
('Missing sudo password' in out.msg) or ('Missing sudo password' in out.msg) or
('password is required' in out.module_stderr) ('password is required' in out.module_stderr)
) )
fail_msg: out={{out}}
when:
# https://github.com/ansible/ansible/pull/70785
- ansible_facts.distribution not in ["MacOSX"]
or ansible_version.full is version("2.11", ">=", strict=True)
or is_mitogen
- name: Ensure password sudo incorrect. - name: Ensure password sudo incorrect.
shell: whoami shell: whoami
@ -28,6 +39,11 @@
vars: vars:
ansible_become_pass: nopes ansible_become_pass: nopes
ignore_errors: true ignore_errors: true
when:
# https://github.com/ansible/ansible/pull/70785
- ansible_facts.distribution not in ["MacOSX"]
or ansible_version.full is version("2.11", ">=", strict=True)
or is_mitogen
- assert: - assert:
that: | that: |
@ -35,15 +51,22 @@
('Incorrect sudo password' in out.msg) or ('Incorrect sudo password' in out.msg) or
('sudo password is incorrect' in out.msg) ('sudo password is incorrect' in out.msg)
) )
fail_msg: out={{out}}
when:
# https://github.com/ansible/ansible/pull/70785
- ansible_facts.distribution not in ["MacOSX"]
or ansible_version.full is version("2.11", ">=", strict=True)
or is_mitogen
- name: Ensure password sudo succeeds. # TODO: https://github.com/dw/mitogen/issues/692
shell: whoami # - name: Ensure password sudo succeeds.
become: true # shell: whoami
become_user: mitogen__pw_required # become: true
register: out # become_user: mitogen__pw_required
vars: # register: out
ansible_become_pass: pw_required_password # vars:
# ansible_become_pass: pw_required_password
- assert: # - assert:
that: # that:
- out.stdout == 'mitogen__pw_required' # - out.stdout == 'mitogen__pw_required'

@ -5,31 +5,33 @@
any_errors_fatal: true any_errors_fatal: true
tasks: tasks:
- name: Verify we can login to a non-passworded requiretty account # TODO: https://github.com/dw/mitogen/issues/692
shell: whoami # - name: Verify we can login to a non-passworded requiretty account
become: true # shell: whoami
become_user: mitogen__require_tty # become: true
register: out # become_user: mitogen__require_tty
when: is_mitogen # register: out
# when: is_mitogen
- assert: # - assert:
that: # that:
- out.stdout == 'mitogen__require_tty' # - out.stdout == 'mitogen__require_tty'
when: is_mitogen # when: is_mitogen
# --------------- # ---------------
- name: Verify we can login to a passworded requiretty account # TODO: https://github.com/dw/mitogen/issues/692
shell: whoami # - name: Verify we can login to a passworded requiretty account
become: true # shell: whoami
become_user: mitogen__require_tty_pw_required # become: true
vars: # become_user: mitogen__require_tty_pw_required
ansible_become_pass: require_tty_pw_required_password # vars:
register: out # ansible_become_pass: require_tty_pw_required_password
when: is_mitogen # register: out
# when: is_mitogen
- assert: # - assert:
that: # that:
- out.stdout == 'mitogen__require_tty_pw_required' # - out.stdout == 'mitogen__require_tty_pw_required'
when: is_mitogen # when: is_mitogen

@ -21,3 +21,4 @@
- original.stat.checksum == copied.stat.checksum - original.stat.checksum == copied.stat.checksum
# Upstream does not preserve timestamps at al. # Upstream does not preserve timestamps at al.
#- (not is_mitogen) or (original.stat.mtime|int == copied.stat.mtime|int) #- (not is_mitogen) or (original.stat.mtime|int == copied.stat.mtime|int)
fail_msg: original={{original}} copied={{copied}}

@ -19,6 +19,7 @@
- out.result[0].method == "ssh" - out.result[0].method == "ssh"
- out.result[0].kwargs.username == "joe" - out.result[0].kwargs.username == "joe"
- out.result|length == 1 # no sudo - out.result|length == 1 # no sudo
fail_msg: out={{out}}
when: is_mitogen when: is_mitogen
@ -36,4 +37,5 @@
- out.result[1].method == "sudo" - out.result[1].method == "sudo"
- out.result[1].kwargs.username == "james" - out.result[1].kwargs.username == "james"
- out.result|length == 2 # no sudo - out.result|length == 2 # no sudo
fail_msg: out={{out}}
when: is_mitogen when: is_mitogen

@ -25,3 +25,4 @@
that: that:
- out.rc == 4 - out.rc == 4
- "'Mitogen was disconnected from the remote environment while a call was in-progress.' in out.stdout" - "'Mitogen was disconnected from the remote environment while a call was in-progress.' in out.stdout"
fail_msg: out={{out}}

@ -17,3 +17,4 @@
- out.result[0] == 0 - out.result[0] == 0
- out.result[1].decode() == "hello, world\r\n" - out.result[1].decode() == "hello, world\r\n"
- out.result[2].decode().startswith("Shared connection to ") - out.result[2].decode().startswith("Shared connection to ")
fail_msg: out={{out}}

@ -43,3 +43,4 @@
# sudo PID has changed. # sudo PID has changed.
- out_become.ppid != out_become2.ppid - out_become.ppid != out_become2.ppid
fail_msg: out={{out}} out2={{out2}} out_become={{out_become}} out_become2={{out_become2}}

@ -24,6 +24,7 @@
assert: assert:
that: that:
- become_acct.pid != login_acct.pid - become_acct.pid != login_acct.pid
fail_msg: become_acct={{become_acct}} login_acct={{login_acct}}
- name: reset the connection - name: reset the connection
meta: reset_connection meta: reset_connection
@ -36,6 +37,7 @@
assert: assert:
that: that:
- become_acct.pid != new_become_acct.pid - become_acct.pid != new_become_acct.pid
fail_msg: become_acct={{become_acct}} new_become_acct={{new_become_acct}}
- name: save new pid of login acct - name: save new pid of login acct
become: false become: false
@ -46,3 +48,4 @@
assert: assert:
that: that:
- login_acct.pid != new_login_acct.pid - login_acct.pid != new_login_acct.pid
fail_msg: login_acct={{login_acct}} new_login_acct={{new_login_acct}}

@ -12,3 +12,4 @@
- assert: - assert:
that: (not not out.mitogen_loaded) == (not not is_mitogen) that: (not not out.mitogen_loaded) == (not not is_mitogen)
fail_msg: out={{out}}

@ -1,12 +1,19 @@
# Ensure paramiko connections aren't grabbed. # Ensure paramiko connections aren't grabbed.
---
- name: integration/connection_loader/paramiko_unblemished.yml - name: integration/connection_loader/paramiko_unblemished.yml
hosts: test-targets hosts: test-targets
any_errors_fatal: true any_errors_fatal: true
tasks: tasks:
- custom_python_detect_environment: - debug:
connection: paramiko msg: "skipped for now"
register: out - name: this is flaky -> https://github.com/dw/mitogen/issues/747
block:
- custom_python_detect_environment:
connection: paramiko
register: out
- assert: - assert:
that: not out.mitogen_loaded that: not out.mitogen_loaded
fail_msg: out={{out}}
when: False

@ -12,3 +12,4 @@
- assert: - assert:
that: (not not out.mitogen_loaded) == (not not is_mitogen) that: (not not out.mitogen_loaded) == (not not is_mitogen)
fail_msg: out={{out}}

@ -14,36 +14,37 @@
# Start with a clean slate. # Start with a clean slate.
- mitogen_shutdown_all: - mitogen_shutdown_all:
# Connect a few users. # TODO: https://github.com/dw/mitogen/issues/695
- shell: "true" # # Connect a few users.
become: true # - shell: "true"
become_user: "mitogen__user{{item}}" # become: true
with_items: [1, 2, 3] # become_user: "mitogen__user{{item}}"
# with_items: [1, 2, 3]
# Verify current state.
- mitogen_action_script: # # Verify current state.
script: | # - mitogen_action_script:
self._connection._connect() # script: |
result['dump'] = self._connection.get_binding().get_service_context().call_service( # self._connection._connect()
service_name='ansible_mitogen.services.ContextService', # result['dump'] = self._connection.get_binding().get_service_context().call_service(
method_name='dump' # service_name='ansible_mitogen.services.ContextService',
) # method_name='dump'
register: out # )
# register: out
- assert:
that: out.dump|length == (play_hosts|length) * 4 # ssh account + 3 sudo accounts # - assert:
# that: out.dump|length == (play_hosts|length) * 4 # ssh account + 3 sudo accounts
- meta: reset_connection
# - meta: reset_connection
# Verify current state.
- mitogen_action_script: # # Verify current state.
script: | # - mitogen_action_script:
self._connection._connect() # script: |
result['dump'] = self._connection.get_binding().get_service_context().call_service( # self._connection._connect()
service_name='ansible_mitogen.services.ContextService', # result['dump'] = self._connection.get_binding().get_service_context().call_service(
method_name='dump' # service_name='ansible_mitogen.services.ContextService',
) # method_name='dump'
register: out # )
# register: out
- assert:
that: out.dump|length == play_hosts|length # just the ssh account # - assert:
# that: out.dump|length == play_hosts|length # just the ssh account

@ -13,29 +13,30 @@
mitogen_shutdown_all: mitogen_shutdown_all:
when: is_mitogen when: is_mitogen
- name: Spin up a bunch of interpreters # TODO: https://github.com/dw/mitogen/issues/696
custom_python_detect_environment: # - name: Spin up a bunch of interpreters
become: true # custom_python_detect_environment:
vars: # become: true
ansible_become_user: "mitogen__user{{item}}" # vars:
with_sequence: start=1 end={{ubound}} # ansible_become_user: "mitogen__user{{item}}"
register: first_run # with_sequence: start=1 end={{ubound}}
# register: first_run
- name: Reuse them # - name: Reuse them
custom_python_detect_environment: # custom_python_detect_environment:
become: true # become: true
vars: # vars:
ansible_become_user: "mitogen__user{{item}}" # ansible_become_user: "mitogen__user{{item}}"
with_sequence: start=1 end={{ubound}} # with_sequence: start=1 end={{ubound}}
register: second_run # register: second_run
- assert: # - assert:
that: # that:
- first_run.results[item|int].pid == second_run.results[item|int].pid # - first_run.results[item|int].pid == second_run.results[item|int].pid
with_items: start=0 end={{max_interps}} # with_items: start=0 end={{max_interps}}
when: is_mitogen # when: is_mitogen
- assert: # - assert:
that: # that:
- first_run.results[-1].pid != second_run.results[-1].pid # - first_run.results[-1].pid != second_run.results[-1].pid
when: is_mitogen # when: is_mitogen

@ -31,3 +31,4 @@
- assert: - assert:
that: that:
- old_become_env.pid != new_become_env.pid - old_become_env.pid != new_become_env.pid
fail_msg: old_become_env={{old_become_env}} new_become_env={{new_become_env}}

@ -18,6 +18,7 @@
- assert: - assert:
that: that:
- out.stdout is match('.*python([0-9.]+)?\(mitogen:[a-z]+@[^:]+:[0-9]+\)') - out.stdout is match('.*python([0-9.]+)?\(mitogen:[a-z]+@[^:]+:[0-9]+\)')
fail_msg: out={{out}}
- shell: 'cat /proc/$PPID/cmdline | tr \\0 \\n' - shell: 'cat /proc/$PPID/cmdline | tr \\0 \\n'
register: out register: out
@ -28,4 +29,5 @@
- assert: - assert:
that: that:
- out.stdout is match('.*python([0-9.]+)?\(mitogen:ansible\)') - out.stdout is match('.*python([0-9.]+)?\(mitogen:ansible\)')
fail_msg: out={{out}}

@ -44,6 +44,7 @@
- out.failed - out.failed
- '"Name or service not known" in out.msg or - '"Name or service not known" in out.msg or
"Temporary failure in name resolution" in out.msg' "Temporary failure in name resolution" in out.msg'
fail_msg: out={{out}}
when: | when: |
ansible_virtualization_type == "docker" and ansible_virtualization_type == "docker" and
ansible_python_version > "2.5" ansible_python_version > "2.5"

@ -0,0 +1,2 @@
- include: complex_args.yml
- include: ansible_2_8_tests.yml

@ -0,0 +1,166 @@
# ripped and ported from https://github.com/ansible/ansible/pull/50163/files, when interpreter discovery was added to ansible
---
- name: integration/interpreter_discovery/ansible_2_8_tests.yml
hosts: test-targets
any_errors_fatal: true
gather_facts: true
tasks:
- name: can only run these tests on ansible >= 2.8.0
block:
- name: ensure we can override ansible_python_interpreter
vars:
ansible_python_interpreter: overriddenpython
assert:
that:
- ansible_python_interpreter == 'overriddenpython'
fail_msg: "'ansible_python_interpreter' appears to be set at a high precedence to {{ ansible_python_interpreter }},
which breaks this test."
- name: snag some facts to validate for later
set_fact:
distro: '{{ ansible_distribution | default("unknown") | lower }}'
distro_version: '{{ ansible_distribution_version | default("unknown") }}'
os_family: '{{ ansible_os_family | default("unknown") }}'
- name: test that python discovery is working and that fact persistence makes it only run once
block:
- name: clear facts to force interpreter discovery to run
meta: clear_facts
- name: trigger discovery with auto
vars:
ansible_python_interpreter: auto
ping:
register: auto_out
- name: get the interpreter being used on the target to execute modules
vars:
ansible_python_interpreter: auto
test_echo_module:
register: echoout
# can't test this assertion:
# - echoout.ansible_facts is not defined or echoout.ansible_facts.discovered_interpreter_python is not defined
# because Mitogen's ansible_python_interpreter is a connection-layer configurable that
# "must be extracted during each task execution to form the complete connection-layer configuration".
# Discovery won't be reran though; the ansible_python_interpreter is read from the cache if already discovered
- assert:
that:
- auto_out.ansible_facts.discovered_interpreter_python is defined
- echoout.running_python_interpreter == auto_out.ansible_facts.discovered_interpreter_python
fail_msg: auto_out={{auto_out}} echoout={{echoout}}
- name: test that auto_legacy gives a dep warning when /usr/bin/python present but != auto result
block:
- name: clear facts to force interpreter discovery to run
meta: clear_facts
- name: trigger discovery with auto_legacy
vars:
ansible_python_interpreter: auto_legacy
ping:
register: legacy
- name: check for dep warning (only on platforms where auto result is not /usr/bin/python and legacy is)
assert:
that:
- legacy.deprecations | default([]) | length > 0
fail_msg: legacy={{legacy}}
# only check for a dep warning if legacy returned /usr/bin/python and auto didn't
when: legacy.ansible_facts.discovered_interpreter_python == '/usr/bin/python' and
auto_out.ansible_facts.discovered_interpreter_python != '/usr/bin/python'
- name: test that auto_silent never warns and got the same answer as auto
block:
- name: clear facts to force interpreter discovery to run
meta: clear_facts
- name: initial task to trigger discovery
vars:
ansible_python_interpreter: auto_silent
ping:
register: auto_silent_out
- assert:
that:
- auto_silent_out.warnings is not defined
- auto_silent_out.ansible_facts.discovered_interpreter_python == auto_out.ansible_facts.discovered_interpreter_python
fail_msg: auto_silent_out={{auto_silent_out}}
- name: test that auto_legacy_silent never warns and got the same answer as auto_legacy
block:
- name: clear facts to force interpreter discovery to run
meta: clear_facts
- name: trigger discovery with auto_legacy_silent
vars:
ansible_python_interpreter: auto_legacy_silent
ping:
register: legacy_silent
- assert:
that:
- legacy_silent.warnings is not defined
- legacy_silent.ansible_facts.discovered_interpreter_python == legacy.ansible_facts.discovered_interpreter_python
fail_msg: legacy_silent={{legacy_silent}}
- name: ensure modules can't set discovered_interpreter_X or ansible_X_interpreter
block:
- test_echo_module:
facts:
ansible_discovered_interpreter_bogus: from module
discovered_interpreter_bogus: from_module
ansible_bogus_interpreter: from_module
test_fact: from_module
register: echoout
- assert:
that:
- test_fact == 'from_module'
- discovered_interpreter_bogus | default('nope') == 'nope'
- ansible_bogus_interpreter | default('nope') == 'nope'
# this one will exist in facts, but with its prefix removed
- ansible_facts['ansible_bogus_interpreter'] | default('nope') == 'nope'
- ansible_facts['discovered_interpreter_bogus'] | default('nope') == 'nope'
- name: fedora assertions
assert:
that:
- auto_out.ansible_facts.discovered_interpreter_python == '/usr/bin/python3'
fail_msg: auto_out={{auto_out}}
when: distro == 'fedora' and distro_version is version('23', '>=')
- name: rhel assertions
assert:
that:
# rhel 6/7
- (auto_out.ansible_facts.discovered_interpreter_python == '/usr/bin/python' and distro_version is version('8','<')) or distro_version is version('8','>=')
# rhel 8+
- (auto_out.ansible_facts.discovered_interpreter_python == '/usr/libexec/platform-python' and distro_version is version('8','>=')) or distro_version is version('8','<')
fail_msg: auto_out={{auto_out}}
when: distro in ('redhat', 'centos')
- name: ubuntu assertions
assert:
that:
# ubuntu < 16
- (auto_out.ansible_facts.discovered_interpreter_python == '/usr/bin/python' and distro_version is version('16.04','<')) or distro_version is version('16.04','>=')
# ubuntu >= 16
- (auto_out.ansible_facts.discovered_interpreter_python == '/usr/bin/python3' and distro_version is version('16.04','>=')) or distro_version is version('16.04','<')
fail_msg: auto_out={{auto_out}}
when: distro == 'ubuntu'
- name: mac assertions
assert:
that:
- auto_out.ansible_facts.discovered_interpreter_python == '/usr/bin/python'
fail_msg: auto_out={{auto_out}}
when: os_family == 'Darwin'
always:
- meta: clear_facts
when: ansible_version.full is version_compare('2.8.0', '>=')

@ -0,0 +1,56 @@
# checks complex ansible_python_interpreter values as well as jinja in the ansible_python_interpreter value
---
- name: integration/interpreter_discovery/complex_args.yml
hosts: test-targets
any_errors_fatal: true
gather_facts: true
tasks:
- name: create temp file to source
file:
path: /tmp/fake
state: touch
# TODO: this works in Mac 10.15 because sh defaults to bash
# but due to Mac SIP we can't write to /bin so we can't change
# /bin/sh to point to /bin/bash
# Mac 10.15 is failing python interpreter discovery tests from ansible 2.8.8
# because Mac doesn't make default python /usr/bin/python anymore
# so for now, can't use `source` since it's a bash builtin
# - name: set python using sourced file
# set_fact:
# special_python: source /tmp/fake && python
- name: set python using sourced file
set_fact:
special_python: source /tmp/fake || true && python
- name: run get_url with specially-sourced python
get_url:
url: https://google.com
dest: "/tmp/"
mode: 0644
# this url is the build pic from mitogen's github site; some python versions require ssl stuff installed so will disable need to validate certs
validate_certs: no
vars:
ansible_python_interpreter: "{{ special_python }}"
environment:
https_proxy: "{{ lookup('env', 'https_proxy')|default('') }}"
no_proxy: "{{ lookup('env', 'no_proxy')|default('') }}"
- name: run get_url with specially-sourced python including jinja
get_url:
url: https://google.com
dest: "/tmp/"
mode: 0644
# this url is the build pic from mitogen's github site; some python versions require ssl stuff installed so will disable need to validate certs
validate_certs: no
vars:
ansible_python_interpreter: >
{% if "1" == "1" %}
{{ special_python }}
{% else %}
python
{% endif %}
environment:
https_proxy: "{{ lookup('env', 'https_proxy')|default('') }}"
no_proxy: "{{ lookup('env', 'no_proxy')|default('') }}"

@ -19,4 +19,5 @@
- assert: - assert:
that: stat.stat.exists that: stat.stat.exists
fail_msg: stat={{stat}}

@ -13,4 +13,5 @@
that: that:
- out.external1_path == "ansible/integration/module_utils/module_utils/external1.py" - out.external1_path == "ansible/integration/module_utils/module_utils/external1.py"
- out.external2_path == "ansible/lib/module_utils/external2.py" - out.external2_path == "ansible/lib/module_utils/external2.py"
fail_msg: out={{out}}

@ -12,4 +12,5 @@
that: that:
- out.external1_path == "ansible/lib/module_utils/external1.py" - out.external1_path == "ansible/lib/module_utils/external1.py"
- out.external2_path == "ansible/lib/module_utils/external2.py" - out.external2_path == "ansible/lib/module_utils/external2.py"
fail_msg: out={{out}}

@ -11,4 +11,5 @@
- assert: - assert:
that: that:
- out.extmod_path == "ansible/lib/module_utils/externalpkg/extmod.py" - out.extmod_path == "ansible/lib/module_utils/externalpkg/extmod.py"
fail_msg: out={{out}}

@ -7,3 +7,4 @@
that: that:
- out.external3_path == "integration/module_utils/roles/modrole/module_utils/external3.py" - out.external3_path == "integration/module_utils/roles/modrole/module_utils/external3.py"
- out.external2_path == "integration/module_utils/roles/modrole/module_utils/external2.py" - out.external2_path == "integration/module_utils/roles/modrole/module_utils/external2.py"
fail_msg: out={{out}}

@ -6,3 +6,4 @@
- assert: - assert:
that: that:
- out.path == "ansible/integration/module_utils/roles/override_modrole/module_utils/known_hosts.py" - out.path == "ansible/integration/module_utils/roles/override_modrole/module_utils/known_hosts.py"
fail_msg: out={{out}}

@ -14,6 +14,7 @@
- assert: - assert:
that: "out.stdout == ''" that: "out.stdout == ''"
fail_msg: out={{out}}
- hosts: test-targets - hosts: test-targets
any_errors_fatal: true any_errors_fatal: true
@ -28,3 +29,4 @@
- assert: - assert:
that: "out2.stdout == '2'" that: "out2.stdout == '2'"
fail_msg: out={{out}}

@ -51,10 +51,14 @@
shell: whoami > /tmp/delegate_to.yml.txt shell: whoami > /tmp/delegate_to.yml.txt
delegate_to: localhost delegate_to: localhost
become: true become: true
tags:
- requires_local_sudo
- name: "delegate_to, sudo" - name: "delegate_to, sudo"
assert: assert:
that: "lookup('file', '/tmp/delegate_to.yml.txt') == 'root'" that: "lookup('file', '/tmp/delegate_to.yml.txt') == 'root'"
tags:
- requires_local_sudo
- name: "delegate_to, sudo" - name: "delegate_to, sudo"
file: file:
@ -62,6 +66,8 @@
state: absent state: absent
delegate_to: localhost delegate_to: localhost
become: true become: true
tags:
- requires_local_sudo
# #
@ -71,10 +77,14 @@
shell: whoami > /tmp/delegate_to.yml.txt shell: whoami > /tmp/delegate_to.yml.txt
connection: local connection: local
become: true become: true
tags:
- requires_local_sudo
- name: "connection:local, sudo" - name: "connection:local, sudo"
assert: assert:
that: "lookup('file', '/tmp/delegate_to.yml.txt') == 'root'" that: "lookup('file', '/tmp/delegate_to.yml.txt') == 'root'"
tags:
- requires_local_sudo
- name: "connection:local, sudo" - name: "connection:local, sudo"
file: file:
@ -82,3 +92,5 @@
state: absent state: absent
connection: local connection: local
become: true become: true
tags:
- requires_local_sudo

@ -11,3 +11,4 @@
- assert: - assert:
that: "result.stdout == '123'" that: "result.stdout == '123'"
fail_msg: result={{result}}

@ -6,25 +6,26 @@
any_errors_fatal: true any_errors_fatal: true
tasks: tasks:
- name: Spin up a few interpreters # TODO: https://github.com/dw/mitogen/issues/692
shell: whoami # - name: Spin up a few interpreters
become: true # shell: whoami
vars: # become: true
ansible_become_user: "mitogen__user{{item}}" # vars:
with_sequence: start=1 end=3 # ansible_become_user: "mitogen__user{{item}}"
register: first_run # with_sequence: start=1 end=3
# register: first_run
- name: Reuse them # - name: Reuse them
shell: whoami # shell: whoami
become: true # become: true
vars: # vars:
ansible_become_user: "mitogen__user{{item}}" # ansible_become_user: "mitogen__user{{item}}"
with_sequence: start=1 end=3 # with_sequence: start=1 end=3
register: second_run # register: second_run
- name: Verify first and second run matches expected username. # - name: Verify first and second run matches expected username.
assert: # assert:
that: # that:
- first_run.results[item|int].stdout == ("mitogen__user%d" % (item|int + 1)) # - first_run.results[item|int].stdout == ("mitogen__user%d" % (item|int + 1))
- first_run.results[item|int].stdout == second_run.results[item|int].stdout # - first_run.results[item|int].stdout == second_run.results[item|int].stdout
with_sequence: start=0 end=2 # with_sequence: start=0 end=2

@ -10,6 +10,7 @@
- assert: - assert:
that: echo.stdout == "" that: echo.stdout == ""
fail_msg: echo={{echo}}
- copy: - copy:
dest: /etc/environment dest: /etc/environment
@ -27,6 +28,7 @@
- assert: - assert:
that: echo.stdout == "555" that: echo.stdout == "555"
fail_msg: echo={{echo}}
- file: - file:
path: /etc/environment path: /etc/environment
@ -43,3 +45,4 @@
- assert: - assert:
that: echo.stdout == "" that: echo.stdout == ""
fail_msg: echo={{echo}}

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save