Merge branch 'master' into fix-fakessh

pull/683/head
Alex Willmer 2 years ago committed by GitHub
commit 164d91729c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -1,8 +1,8 @@
# `.ci`
This directory contains scripts for Travis CI and (more or less) Azure
Pipelines, but they will also happily run on any Debian-like machine.
This directory contains scripts for Continuous Integration platforms. Currently
Azure Pipelines, but they will also happily run on any Debian-like machine.
The scripts are usually split into `_install` and `_test` steps. The `_install`
step will damage your machine, the `_test` step will just run the tests the way
@ -28,8 +28,6 @@ for doing `setup.py install` while pulling a Docker container, for example.
### Environment Variables
* `VER`: Ansible version the `_install` script should install. Default changes
over time.
* `TARGET_COUNT`: number of targets for `debops_` run. Defaults to 2.
* `DISTRO`: the `mitogen_` tests need a target Docker container distro. This
name comes from the Docker Hub `mitogen` user, i.e. `mitogen/$DISTRO-test`

@ -4,21 +4,8 @@ import ci_lib
batches = [
[
# Must be installed separately, as PyNACL indirect requirement causes
# newer version to be installed if done in a single pip run.
# Separately install ansible based on version passed in from azure-pipelines.yml or .travis.yml
'pip install "pycparser<2.19" "idna<2.7"',
'pip install '
'-r tests/requirements.txt '
'-r tests/ansible/requirements.txt',
# encoding is required for installing ansible 2.10 with pip2, otherwise we get a UnicodeDecode error
'LC_CTYPE=en_US.UTF-8 LANG=en_US.UTF-8 pip install -q ansible=={0}'.format(ci_lib.ANSIBLE_VERSION)
'aws ecr-public get-login-password | docker login --username AWS --password-stdin public.ecr.aws',
]
]
batches.extend(
['docker pull %s' % (ci_lib.image_for_distro(distro),)]
for distro in ci_lib.DISTROS
)
ci_lib.run_batches(batches)

@ -7,7 +7,6 @@ import signal
import sys
import ci_lib
from ci_lib import run
TESTS_DIR = os.path.join(ci_lib.GIT_ROOT, 'tests/ansible')
@ -40,10 +39,10 @@ with ci_lib.Fold('job_setup'):
os.chdir(TESTS_DIR)
os.chmod('../data/docker/mitogen__has_sudo_pubkey.key', int('0600', 7))
run("mkdir %s", HOSTS_DIR)
ci_lib.run("mkdir %s", HOSTS_DIR)
for path in glob.glob(TESTS_DIR + '/hosts/*'):
if not path.endswith('default.hosts'):
run("ln -s %s %s", path, HOSTS_DIR)
ci_lib.run("ln -s %s %s", path, HOSTS_DIR)
inventory_path = os.path.join(HOSTS_DIR, 'target')
with open(inventory_path, 'w') as fp:
@ -63,16 +62,14 @@ with ci_lib.Fold('job_setup'):
ci_lib.dump_file(inventory_path)
if not ci_lib.exists_in_path('sshpass'):
run("sudo apt-get update")
run("sudo apt-get install -y sshpass")
ci_lib.run("sudo apt-get update")
ci_lib.run("sudo apt-get install -y sshpass")
run("bash -c 'sudo ln -vfs /usr/lib/python2.7/plat-x86_64-linux-gnu/_sysconfigdata_nd.py /usr/lib/python2.7 || true'")
run("bash -c 'sudo ln -vfs /usr/lib/python2.7/plat-x86_64-linux-gnu/_sysconfigdata_nd.py $VIRTUAL_ENV/lib/python2.7 || true'")
with ci_lib.Fold('ansible'):
playbook = os.environ.get('PLAYBOOK', 'all.yml')
try:
run('./run_ansible_playbook.py %s -i "%s" -vvv %s',
ci_lib.run('./run_ansible_playbook.py %s -i "%s" %s',
playbook, HOSTS_DIR, ' '.join(sys.argv[1:]))
except:
pause_if_interactive()

@ -5,19 +5,18 @@ parameters:
sign: false
steps:
- script: "PYTHONVERSION=$(python.version) .ci/prep_azure.py"
displayName: "Run prep_azure.py"
- task: UsePythonVersion@0
displayName: Install python
inputs:
versionSpec: '$(python.version)'
condition: ne(variables['python.version'], '')
- script: |
echo "##vso[task.prependpath]/tmp/venv/bin"
- script: python -mpip install tox
displayName: Install tooling
displayName: activate venv
- script: .ci/spawn_reverse_shell.py
displayName: "Spawn reverse shell"
- script: .ci/$(MODE)_install.py
displayName: "Run $(MODE)_install.py"
- script: .ci/$(MODE)_tests.py
displayName: "Run $(MODE)_tests.py"
- script: python -mtox -e "$(tox.env)"
displayName: "Run tests"
env:
AWS_ACCESS_KEY_ID: $(AWS_ACCESS_KEY_ID)
AWS_SECRET_ACCESS_KEY: $(AWS_SECRET_ACCESS_KEY)
AWS_DEFAULT_REGION: $(AWS_DEFAULT_REGION)

@ -3,72 +3,192 @@
# Add steps that analyze code, save the dist with the build record, publish to a PyPI-compatible index, and more:
# https://docs.microsoft.com/azure/devops/pipelines/languages/python
jobs:
# User defined variables are also injected as environment variables
# https://docs.microsoft.com/en-us/azure/devops/pipelines/process/variables#environment-variables
#variables:
#ANSIBLE_VERBOSITY: 3
- job: Mac
jobs:
- job: Mac1015
# vanilla Ansible is really slow
timeoutInMinutes: 120
steps:
- template: azure-pipelines-steps.yml
pool:
# https://github.com/actions/virtual-environments/blob/main/images/macos/macos-10.15-Readme.md
vmImage: macOS-10.15
strategy:
matrix:
Mito27_27:
Mito_27:
python.version: '2.7'
MODE: mitogen
VER: 2.10.0
tox.env: py27-mode_mitogen
Mito_36:
python.version: '3.6'
tox.env: py36-mode_mitogen
Mito_310:
python.version: '3.10'
tox.env: py310-mode_mitogen
# TODO: test python3, python3 tests are broken
Ans210_27:
Loc_27_210:
python.version: '2.7'
MODE: localhost_ansible
VER: 2.10.0
tox.env: py27-mode_localhost-ansible2.10
Loc_27_3:
python.version: '2.7'
tox.env: py27-mode_localhost-ansible3
Loc_27_4:
python.version: '2.7'
tox.env: py27-mode_localhost-ansible4
# NOTE: this hangs when ran in Ubuntu 18.04
Vanilla_210_27:
Van_27_210:
python.version: '2.7'
MODE: localhost_ansible
VER: 2.10.0
tox.env: py27-mode_localhost-ansible2.10
STRATEGY: linear
ANSIBLE_SKIP_TAGS: resource_intensive
Van_27_3:
python.version: '2.7'
tox.env: py27-mode_localhost-ansible3
STRATEGY: linear
ANSIBLE_SKIP_TAGS: resource_intensive
Van_27_4:
python.version: '2.7'
tox.env: py27-mode_localhost-ansible4
STRATEGY: linear
ANSIBLE_SKIP_TAGS: resource_intensive
- job: Mac11
# vanilla Ansible is really slow
timeoutInMinutes: 120
steps:
- template: azure-pipelines-steps.yml
pool:
# https://github.com/actions/virtual-environments/blob/main/images/macos/
vmImage: macOS-11
strategy:
matrix:
Mito_27:
tox.env: py27-mode_mitogen
Mito_37:
python.version: '3.7'
tox.env: py37-mode_mitogen
Mito_310:
python.version: '3.10'
tox.env: py310-mode_mitogen
# TODO: test python3, python3 tests are broken
Loc_27_210:
tox.env: py27-mode_localhost-ansible2.10
Loc_27_3:
tox.env: py27-mode_localhost-ansible3
Loc_27_4:
tox.env: py27-mode_localhost-ansible4
# NOTE: this hangs when ran in Ubuntu 18.04
Van_27_210:
tox.env: py27-mode_localhost-ansible2.10
STRATEGY: linear
ANSIBLE_SKIP_TAGS: resource_intensive
Van_27_3:
tox.env: py27-mode_localhost-ansible3
STRATEGY: linear
ANSIBLE_SKIP_TAGS: resource_intensive
Van_27_4:
tox.env: py27-mode_localhost-ansible4
STRATEGY: linear
ANSIBLE_SKIP_TAGS: resource_intensive
- job: Linux
pool:
# https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu1804-README.md
vmImage: "Ubuntu 18.04"
steps:
- template: azure-pipelines-steps.yml
strategy:
matrix:
#
# Confirmed working
#
Mito27Debian_27:
Mito_27_centos6:
python.version: '2.7'
MODE: mitogen
DISTRO: debian
VER: 2.10.0
#MitoPy27CentOS6_26:
#python.version: '2.7'
#MODE: mitogen
#DISTRO: centos6
tox.env: py27-mode_mitogen-distro_centos6
Mito_27_centos7:
python.version: '2.7'
tox.env: py27-mode_mitogen-distro_centos7
Mito_27_centos8:
python.version: '2.7'
tox.env: py27-mode_mitogen-distro_centos8
Mito_27_debian9:
python.version: '2.7'
tox.env: py27-mode_mitogen-distro_debian9
Mito_27_debian10:
python.version: '2.7'
tox.env: py27-mode_mitogen-distro_debian10
Mito_27_debian11:
python.version: '2.7'
tox.env: py27-mode_mitogen-distro_debian11
Mito_27_ubuntu1604:
python.version: '2.7'
tox.env: py27-mode_mitogen-distro_ubuntu1604
Mito_27_ubuntu1804:
python.version: '2.7'
tox.env: py27-mode_mitogen-distro_ubuntu1804
Mito_27_ubuntu2004:
python.version: '2.7'
tox.env: py27-mode_mitogen-distro_ubuntu2004
Mito36CentOS6_26:
Mito_36_centos6:
python.version: '3.6'
MODE: mitogen
DISTRO: centos6
VER: 2.10.0
Mito37Debian_27:
python.version: '3.7'
MODE: mitogen
DISTRO: debian
VER: 2.10.0
tox.env: py36-mode_mitogen-distro_centos6
Mito_36_centos7:
python.version: '3.6'
tox.env: py36-mode_mitogen-distro_centos7
Mito_36_centos8:
python.version: '3.6'
tox.env: py36-mode_mitogen-distro_centos8
Mito_36_debian9:
python.version: '3.6'
tox.env: py36-mode_mitogen-distro_debian9
Mito_36_debian10:
python.version: '3.6'
tox.env: py36-mode_mitogen-distro_debian10
Mito_36_debian11:
python.version: '3.6'
tox.env: py36-mode_mitogen-distro_debian11
Mito_36_ubuntu1604:
python.version: '3.6'
tox.env: py36-mode_mitogen-distro_ubuntu1604
Mito_36_ubuntu1804:
python.version: '3.6'
tox.env: py36-mode_mitogen-distro_ubuntu1804
Mito_36_ubuntu2004:
python.version: '3.6'
tox.env: py36-mode_mitogen-distro_ubuntu2004
#Py26CentOS7:
#python.version: '2.7'
#MODE: mitogen
#DISTRO: centos6
Mito_310_centos6:
python.version: '3.10'
tox.env: py310-mode_mitogen-distro_centos6
Mito_310_centos7:
python.version: '3.10'
tox.env: py310-mode_mitogen-distro_centos7
Mito_310_centos8:
python.version: '3.10'
tox.env: py310-mode_mitogen-distro_centos8
Mito_310_debian9:
python.version: '3.10'
tox.env: py310-mode_mitogen-distro_debian9
Mito_310_debian10:
python.version: '3.10'
tox.env: py310-mode_mitogen-distro_debian10
Mito_310_debian11:
python.version: '3.10'
tox.env: py310-mode_mitogen-distro_debian11
Mito_310_ubuntu1604:
python.version: '3.10'
tox.env: py310-mode_mitogen-distro_ubuntu1604
Mito_310_ubuntu1804:
python.version: '3.10'
tox.env: py310-mode_mitogen-distro_ubuntu1804
Mito_310_ubuntu2004:
python.version: '3.10'
tox.env: py310-mode_mitogen-distro_ubuntu2004
#DebOps_2460_27_27:
#python.version: '2.7'
@ -107,12 +227,35 @@ jobs:
#DISTROS: debian
#STRATEGY: linear
Ansible_210_27:
Ans_27_210:
python.version: '2.7'
MODE: ansible
VER: 2.10.0
tox.env: py27-mode_ansible-ansible2.10
Ans_27_3:
python.version: '2.7'
tox.env: py27-mode_ansible-ansible3
Ans_27_4:
python.version: '2.7'
tox.env: py27-mode_ansible-ansible4
Ans_36_210:
python.version: '3.6'
tox.env: py36-mode_ansible-ansible2.10
Ans_36_3:
python.version: '3.6'
tox.env: py36-mode_ansible-ansible3
Ans_36_4:
python.version: '3.6'
tox.env: py36-mode_ansible-ansible4
Ansible_210_35:
python.version: '3.5'
MODE: ansible
VER: 2.10.0
Ans_310_210:
python.version: '3.10'
tox.env: py310-mode_ansible-ansible2.10
Ans_310_3:
python.version: '3.10'
tox.env: py310-mode_ansible-ansible3
Ans_310_4:
python.version: '3.10'
tox.env: py310-mode_ansible-ansible4
Ans_310_5:
python.version: '3.10'
tox.env: py310-mode_ansible-ansible5

@ -1,4 +1,3 @@
from __future__ import absolute_import
from __future__ import print_function
@ -22,6 +21,14 @@ os.chdir(
)
)
_print = print
def print(*args, **kwargs):
file = kwargs.get('file', sys.stdout)
flush = kwargs.pop('flush', False)
_print(*args, **kwargs)
if flush:
file.flush()
#
# check_output() monkeypatch cutpasted from testlib.py
@ -59,55 +66,83 @@ def have_docker():
return proc.wait() == 0
# -----------------
# Force line buffering on stdout.
sys.stdout = os.fdopen(1, 'w', 1)
# Force stdout FD 1 to be a pipe, so tools like pip don't spam progress bars.
if 'TRAVIS_HOME' in os.environ:
proc = subprocess.Popen(
args=['stdbuf', '-oL', 'cat'],
stdin=subprocess.PIPE
)
os.dup2(proc.stdin.fileno(), 1)
os.dup2(proc.stdin.fileno(), 2)
def cleanup_travis_junk(stdout=sys.stdout, stderr=sys.stderr, proc=proc):
stdout.close()
stderr.close()
proc.terminate()
atexit.register(cleanup_travis_junk)
# -----------------
def _argv(s, *args):
"""Interpolate a command line using *args, return an argv style list.
>>> _argv('git commit -m "Use frobnicate 2.0 (fixes #%d)"', 1234)
['git', commit', '-m', 'Use frobnicate 2.0 (fixes #1234)']
"""
if args:
s %= args
return shlex.split(s)
def run(s, *args, **kwargs):
argv = ['/usr/bin/time', '--'] + _argv(s, *args)
print('Running: %s' % (argv,))
""" Run a command, with arguments
>>> rc = run('echo "%s %s"', 'foo', 'bar')
Running: ['echo', 'foo bar']
foo bar
Finished running: ['echo', 'foo bar']
>>> rc
0
"""
argv = _argv(s, *args)
print('Running: %s' % (argv,), flush=True)
try:
ret = subprocess.check_call(argv, **kwargs)
print('Finished running: %s' % (argv,))
print('Finished running: %s' % (argv,), flush=True)
except Exception:
print('Exception occurred while running: %s' % (argv,))
print('Exception occurred while running: %s' % (argv,), file=sys.stderr, flush=True)
raise
return ret
def run_batches(batches):
combine = lambda batch: 'set -x; ' + (' && '.join(
def combine(batch):
"""
>>> combine(['ls -l', 'echo foo'])
'set -x; ( ls -l; ) && ( echo foo; )'
"""
return 'set -x; ' + (' && '.join(
'( %s; )' % (cmd,)
for cmd in batch
))
def throttle(batch, pause=1):
"""
Add pauses between commands in a batch
>>> throttle(['echo foo', 'echo bar', 'echo baz'])
['echo foo', 'sleep 1', 'echo bar', 'sleep 1', 'echo baz']
"""
def _with_pause(batch, pause):
for cmd in batch:
yield cmd
yield 'sleep %i' % (pause,)
return list(_with_pause(batch, pause))[:-1]
def run_batches(batches):
""" Run shell commands grouped into batches, showing an execution trace.
Raise AssertionError if any command has exits with a non-zero status.
>>> run_batches([['echo foo', 'true']])
+ echo foo
foo
+ true
>>> run_batches([['true', 'echo foo'], ['false']])
+ true
+ echo foo
foo
+ false
Traceback (most recent call last):
File "...", line ..., in <module>
File "...", line ..., in run_batches
AssertionError
"""
procs = [
subprocess.Popen(combine(batch), shell=True)
for batch in batches
@ -116,12 +151,28 @@ def run_batches(batches):
def get_output(s, *args, **kwargs):
"""
Print and run command line s, %-interopolated using *args. Return stdout.
>>> s = get_output('echo "%s %s"', 'foo', 'bar')
Running: ['echo', 'foo bar']
>>> s
'foo bar\n'
"""
argv = _argv(s, *args)
print('Running: %s' % (argv,))
print('Running: %s' % (argv,), flush=True)
return subprocess.check_output(argv, **kwargs)
def exists_in_path(progname):
"""
Return True if progname exists in $PATH.
>>> exists_in_path('echo')
True
>>> exists_in_path('kwyjibo') # Only found in North American cartoons
False
"""
return any(os.path.exists(os.path.join(dirname, progname))
for dirname in os.environ['PATH'].split(os.pathsep))
@ -136,23 +187,16 @@ class TempDir(object):
class Fold(object):
def __init__(self, name):
self.name = name
def __init__(self, name): pass
def __enter__(self): pass
def __exit__(self, _1, _2, _3): pass
def __enter__(self):
print('travis_fold:start:%s' % (self.name))
def __exit__(self, _1, _2, _3):
print('')
print('travis_fold:end:%s' % (self.name))
os.environ.setdefault('ANSIBLE_STRATEGY',
os.environ.get('STRATEGY', 'mitogen_linear'))
ANSIBLE_VERSION = os.environ.get('VER', '2.6.2')
GIT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
DISTRO = os.environ.get('DISTRO', 'debian')
DISTROS = os.environ.get('DISTROS', 'debian centos6 centos7').split()
# Used only when MODE=mitogen
DISTRO = os.environ.get('DISTRO', 'debian9')
# Used only when MODE=ansible
DISTROS = os.environ.get('DISTROS', 'centos6 centos8 debian9 debian11 ubuntu1604 ubuntu2004').split()
TARGET_COUNT = int(os.environ.get('TARGET_COUNT', '2'))
BASE_PORT = 2200
TMP = TempDir().path
@ -175,6 +219,8 @@ os.environ['PYTHONPATH'] = '%s:%s' % (
)
def get_docker_hostname():
"""Return the hostname where the docker daemon is running.
"""
url = os.environ.get('DOCKER_HOST')
if url in (None, 'http+docker://localunixsocket'):
return 'localhost'
@ -184,10 +230,36 @@ def get_docker_hostname():
def image_for_distro(distro):
return 'mitogen/%s-test' % (distro.partition('-')[0],)
"""Return the container image name or path for a test distro name.
The returned value is suitable for use with `docker pull`.
>>> image_for_distro('centos5')
'public.ecr.aws/n5z0e8q9/centos5-test'
>>> image_for_distro('centos5-something_custom')
'public.ecr.aws/n5z0e8q9/centos5-test'
"""
return 'public.ecr.aws/n5z0e8q9/%s-test' % (distro.partition('-')[0],)
def make_containers(name_prefix='', port_offset=0):
"""
>>> import pprint
>>> BASE_PORT=2200; DISTROS=['debian', 'centos6']
>>> pprint.pprint(make_containers())
[{'distro': 'debian',
'hostname': 'localhost',
'image': 'public.ecr.aws/n5z0e8q9/debian-test',
'name': 'target-debian-1',
'port': 2201,
'python_path': '/usr/bin/python'},
{'distro': 'centos6',
'hostname': 'localhost',
'image': 'public.ecr.aws/n5z0e8q9/centos6-test',
'name': 'target-centos6-2',
'port': 2202,
'python_path': '/usr/bin/python'}]
"""
docker_hostname = get_docker_hostname()
firstbit = lambda s: (s+'-').split('-')[0]
secondbit = lambda s: (s+'-').split('-')[1]
@ -205,6 +277,7 @@ def make_containers(name_prefix='', port_offset=0):
for x in range(count):
lst.append({
"distro": firstbit(distro),
"image": image_for_distro(distro),
"name": name_prefix + ("target-%s-%s" % (distro, i)),
"hostname": docker_hostname,
"port": BASE_PORT + i + port_offset,
@ -260,6 +333,14 @@ def get_interesting_procs(container_name=None):
def start_containers(containers):
"""Run docker containers in the background, with sshd on specified ports.
>>> containers = start_containers([
... {'distro': 'debian', 'hostname': 'localhost',
... 'name': 'target-debian-1', 'port': 2201,
... 'python_path': '/usr/bin/python'},
... ])
"""
if os.environ.get('KEEP'):
return
@ -275,7 +356,7 @@ def start_containers(containers):
"--publish 0.0.0.0:%(port)s:22/tcp "
"--hostname=%(name)s "
"--name=%(name)s "
"mitogen/%(distro)s-test "
"%(image)s"
% container
]
for container in containers
@ -290,12 +371,10 @@ def start_containers(containers):
def verify_procs(hostname, old, new):
oldpids = set(pid for pid, _ in old)
if any(pid not in oldpids for pid, _ in new):
print('%r had stray processes running:' % (hostname,))
print('%r had stray processes running:' % (hostname,), file=sys.stderr, flush=True)
for pid, line in new:
if pid not in oldpids:
print('New process:', line)
print()
print('New process:', line, flush=True)
return False
return True
@ -319,13 +398,10 @@ def check_stray_processes(old, containers=None):
def dump_file(path):
print()
print('--- %s ---' % (path,))
print()
print('--- %s ---' % (path,), flush=True)
with open(path, 'r') as fp:
print(fp.read().rstrip())
print('---')
print()
print(fp.read().rstrip(), flush=True)
print('---', flush=True)
# SSH passes these through to the container when run interactively, causing

@ -7,13 +7,10 @@ ci_lib.DISTROS = ['debian']
ci_lib.run_batches([
[
# Must be installed separately, as PyNACL indirect requirement causes
# newer version to be installed if done in a single pip run.
'pip install "pycparser<2.19"',
'pip install -qqq debops[ansible]==2.1.2 ansible==%s' % ci_lib.ANSIBLE_VERSION,
'python -m pip --no-python-version-warning --disable-pip-version-check "debops[ansible]==2.1.2"',
],
[
'docker pull %s' % (ci_lib.image_for_distro('debian'),),
'aws ecr-public get-login-password | docker login --username AWS --password-stdin public.ecr.aws',
],
])

@ -1,8 +1,6 @@
#!/usr/bin/env python
from __future__ import print_function
import os
import shutil
import sys
import ci_lib
@ -60,11 +58,7 @@ with ci_lib.Fold('job_setup'):
for container in containers
)
print()
print(' echo --- ansible/inventory/hosts: ---')
ci_lib.run('cat ansible/inventory/hosts')
print('---')
print()
ci_lib.dump_file('ansible/inventory/hosts')
# Now we have real host key checking, we need to turn it off
os.environ['ANSIBLE_HOST_KEY_CHECKING'] = 'False'

@ -3,17 +3,6 @@
import ci_lib
batches = [
[
# Must be installed separately, as PyNACL indirect requirement causes
# newer version to be installed if done in a single pip run.
# Separately install ansible based on version passed in from azure-pipelines.yml or .travis.yml
# Don't set -U as that will upgrade Paramiko to a non-2.6 compatible version.
'pip install "pycparser<2.19" "idna<2.7" virtualenv',
'pip install '
'-r tests/requirements.txt '
'-r tests/ansible/requirements.txt',
'pip install -q ansible=={}'.format(ci_lib.ANSIBLE_VERSION)
]
]
ci_lib.run_batches(batches)

@ -2,10 +2,10 @@
# Run tests/ansible/all.yml under Ansible and Ansible-Mitogen
import os
import subprocess
import sys
import ci_lib
from ci_lib import run
TESTS_DIR = os.path.join(ci_lib.GIT_ROOT, 'tests/ansible')
@ -24,33 +24,38 @@ with ci_lib.Fold('job_setup'):
# NOTE: sshpass v1.06 causes errors so pegging to 1.05 -> "msg": "Error when changing password","out": "passwd: DS error: eDSAuthFailed\n",
# there's a checksum error with "brew install http://git.io/sshpass.rb" though, so installing manually
if not ci_lib.exists_in_path('sshpass'):
os.system("curl -O -L https://sourceforge.net/projects/sshpass/files/sshpass/1.05/sshpass-1.05.tar.gz && \
subprocess.check_call(
"curl -O -L https://sourceforge.net/projects/sshpass/files/sshpass/1.05/sshpass-1.05.tar.gz && \
tar xvf sshpass-1.05.tar.gz && \
cd sshpass-1.05 && \
./configure && \
sudo make install")
sudo make install",
shell=True,
)
with ci_lib.Fold('machine_prep'):
# generate a new ssh key for localhost ssh
os.system("ssh-keygen -P '' -m pem -f ~/.ssh/id_rsa")
os.system("cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys")
if not os.path.exists(os.path.expanduser("~/.ssh/id_rsa")):
subprocess.check_call("ssh-keygen -P '' -m pem -f ~/.ssh/id_rsa", shell=True)
subprocess.check_call("cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys", shell=True)
os.chmod(os.path.expanduser('~/.ssh'), int('0700', 8))
os.chmod(os.path.expanduser('~/.ssh/authorized_keys'), int('0600', 8))
# also generate it for the sudo user
os.system("sudo ssh-keygen -P '' -m pem -f /var/root/.ssh/id_rsa")
os.system("sudo cat /var/root/.ssh/id_rsa.pub | sudo tee -a /var/root/.ssh/authorized_keys")
os.chmod(os.path.expanduser('~/.ssh'), int('0700', 8))
os.chmod(os.path.expanduser('~/.ssh/authorized_keys'), int('0600', 8))
# run chmod through sudo since it's owned by root
os.system('sudo chmod 600 /var/root/.ssh')
os.system('sudo chmod 600 /var/root/.ssh/authorized_keys')
if os.system("sudo [ -f ~root/.ssh/id_rsa ]") != 0:
subprocess.check_call("sudo ssh-keygen -P '' -m pem -f ~root/.ssh/id_rsa", shell=True)
subprocess.check_call("sudo cat ~root/.ssh/id_rsa.pub | sudo tee -a ~root/.ssh/authorized_keys", shell=True)
subprocess.check_call('sudo chmod 700 ~root/.ssh', shell=True)
subprocess.check_call('sudo chmod 600 ~root/.ssh/authorized_keys', shell=True)
if os.path.expanduser('~mitogen__user1') == '~mitogen__user1':
os.chdir(IMAGE_PREP_DIR)
run("ansible-playbook -c local -i localhost, _user_accounts.yml -vvv")
ci_lib.run("ansible-playbook -c local -i localhost, _user_accounts.yml")
with ci_lib.Fold('ansible'):
os.chdir(TESTS_DIR)
playbook = os.environ.get('PLAYBOOK', 'all.yml')
run('./run_ansible_playbook.py %s -l target %s -vvv',
ci_lib.run('./run_ansible_playbook.py %s -l target %s',
playbook, ' '.join(sys.argv[1:]))

@ -3,15 +3,11 @@
import ci_lib
batches = [
[
'pip install "pycparser<2.19" "idna<2.7"',
'pip install -r tests/requirements.txt',
]
]
if ci_lib.have_docker():
batches.append([
'docker pull %s' % (ci_lib.image_for_distro(ci_lib.DISTRO),),
'aws ecr-public get-login-password | docker login --username AWS --password-stdin public.ecr.aws',
])

@ -4,7 +4,7 @@ import ci_lib
batches = [
[
'docker pull %s' % (ci_lib.image_for_distro(ci_lib.DISTRO),),
'aws ecr-public get-login-password | docker login --username AWS --password-stdin public.ecr.aws',
],
[
'curl https://dw.github.io/mitogen/binaries/ubuntu-python-2.4.6.tar.bz2 | sudo tar -C / -jxv',

@ -1,97 +0,0 @@
#!/usr/bin/env python
import os
import sys
import ci_lib
batches = []
if 0 and os.uname()[0] == 'Linux':
batches += [
[
"sudo chown `whoami`: ~",
"chmod u=rwx,g=rx,o= ~",
"sudo mkdir /var/run/sshd",
"sudo /etc/init.d/ssh start",
"mkdir -p ~/.ssh",
"chmod u=rwx,go= ~/.ssh",
"ssh-keyscan -H localhost >> ~/.ssh/known_hosts",
"chmod u=rw,go= ~/.ssh/known_hosts",
"cat tests/data/docker/mitogen__has_sudo_pubkey.key > ~/.ssh/id_rsa",
"chmod u=rw,go= ~/.ssh/id_rsa",
"cat tests/data/docker/mitogen__has_sudo_pubkey.key.pub > ~/.ssh/authorized_keys",
"chmod u=rw,go=r ~/.ssh/authorized_keys",
]
]
# setup venv, need all python commands in 1 list to be subprocessed at the same time
venv_steps = []
need_to_fix_psycopg2 = False
is_python3 = os.environ['PYTHONVERSION'].startswith('3')
# @dw: The VSTS-shipped Pythons available via UsePythonVErsion are pure garbage,
# broken symlinks, incorrect permissions and missing codecs. So we use the
# deadsnakes PPA to get sane Pythons, and setup a virtualenv to install our
# stuff into. The virtualenv can probably be removed again, but this was a
# hard-fought battle and for now I am tired of this crap.
if ci_lib.have_apt():
venv_steps.extend([
'echo force-unsafe-io | sudo tee /etc/dpkg/dpkg.cfg.d/nosync',
'sudo add-apt-repository ppa:deadsnakes/ppa',
'sudo apt-get update',
'sudo apt-get -y install '
'python{pv} '
'python{pv}-dev '
'libsasl2-dev '
'libldap2-dev '
.format(pv=os.environ['PYTHONVERSION']),
'sudo ln -fs /usr/bin/python{pv} /usr/local/bin/python{pv}'
.format(pv=os.environ['PYTHONVERSION'])
])
if is_python3:
venv_steps.append('sudo apt-get -y install python{pv}-venv'.format(pv=os.environ['PYTHONVERSION']))
# TODO: somehow `Mito36CentOS6_26` has both brew and apt installed https://dev.azure.com/dw-mitogen/Mitogen/_build/results?buildId=1031&view=logs&j=7bdbcdc6-3d3e-568d-ccf8-9ddca1a9623a&t=73d379b6-4eea-540f-c97e-046a2f620483
elif is_python3 and ci_lib.have_brew():
# Mac's System Integrity Protection prevents symlinking /usr/bin
# and Azure isn't allowing disabling it apparently: https://developercommunityapi.westus.cloudapp.azure.com/idea/558702/allow-disabling-sip-on-microsoft-hosted-macos-agen.html
# so we'll use /usr/local/bin/python for everything
# /usr/local/bin/python2.7 already exists!
need_to_fix_psycopg2 = True
venv_steps.append(
'brew install python@{pv} postgresql'
.format(pv=os.environ['PYTHONVERSION'])
)
# need wheel before building virtualenv because of bdist_wheel and setuptools deps
venv_steps.append('/usr/local/bin/python{pv} -m pip install -U pip wheel setuptools'.format(pv=os.environ['PYTHONVERSION']))
if os.environ['PYTHONVERSION'].startswith('2'):
venv_steps.extend([
'/usr/local/bin/python{pv} -m pip install -U virtualenv'.format(pv=os.environ['PYTHONVERSION']),
'/usr/local/bin/python{pv} -m virtualenv /tmp/venv -p /usr/local/bin/python{pv}'.format(pv=os.environ['PYTHONVERSION'])
])
else:
venv_steps.append('/usr/local/bin/python{pv} -m venv /tmp/venv'.format(pv=os.environ['PYTHONVERSION']))
# fixes https://stackoverflow.com/questions/59595649/can-not-install-psycopg2-on-macos-catalina https://github.com/Azure/azure-cli/issues/12854#issuecomment-619213863
if need_to_fix_psycopg2:
venv_steps.append('/tmp/venv/bin/pip3 install psycopg2==2.8.5 psycopg2-binary')
batches.append(venv_steps)
if ci_lib.have_docker():
batches.extend(
['docker pull %s' % (ci_lib.image_for_distro(distro),)]
for distro in ci_lib.DISTROS
)
ci_lib.run_batches(batches)

@ -1,36 +0,0 @@
#!/usr/bin/env python
"""
Allow poking around Azure while the job is running.
"""
import os
import pty
import socket
import subprocess
import sys
import time
if os.fork():
sys.exit(0)
def try_once():
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(("k3.botanicus.net", 9494))
open('/tmp/interactive', 'w').close()
os.dup2(s.fileno(), 0)
os.dup2(s.fileno(), 1)
os.dup2(s.fileno(), 2)
p = pty.spawn("/bin/sh")
while True:
try:
try_once()
except:
time.sleep(5)
continue

@ -1,35 +0,0 @@
#!/bin/bash
# workaround from https://stackoverflow.com/a/26082445 to handle Travis 4MB log limit
set -e
export PING_SLEEP=30s
export WORKDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
export BUILD_OUTPUT=$WORKDIR/build.out
touch $BUILD_OUTPUT
dump_output() {
echo Tailing the last 1000 lines of output:
tail -1000 $BUILD_OUTPUT
}
error_handler() {
echo ERROR: An error was encountered with the build.
dump_output
kill $PING_LOOP_PID
exit 1
}
# If an error occurs, run our error handler to output a tail of the build
trap 'error_handler' ERR
# Set up a repeating loop to send some output to Travis.
bash -c "while true; do echo \$(date) - building ...; sleep $PING_SLEEP; done" &
PING_LOOP_PID=$!
.ci/${MODE}_tests.py >> $BUILD_OUTPUT 2>&1
# The build finished without returning an error so dump a tail of the output
dump_output
# nicely terminate the ping output loop
kill $PING_LOOP_PID

@ -0,0 +1,33 @@
---
name: Mitogen 0.2.x bug report
about: Report a bug in Mitogen 0.2.x (for Ansible 2.5, 2.6, 2.7, 2.8, or 2.9)
title: ''
labels: affects-0.2, bug
assignees: ''
---
Please drag-drop large logs as text file attachments.
Feel free to write an issue in your preferred format, however if in doubt, use
the following checklist as a guide for what to include.
* Which version of Ansible are you running?
* Is your version of Ansible patched in any way?
* Are you running with any custom modules, or `module_utils` loaded?
* Have you tried the latest master version from Git?
* Do you have some idea of what the underlying problem may be?
https://mitogen.networkgenomics.com/ansible_detailed.html#common-problems has
instructions to help figure out the likely cause and how to gather relevant
logs.
* Mention your host and target OS and versions
* Mention your host and target Python versions
* If reporting a performance issue, mention the number of targets and a rough
description of your workload (lots of copies, lots of tiny file edits, etc.)
* If reporting a crash or hang in Ansible, please rerun with -vvv and include
200 lines of output around the point of the error, along with a full copy of
any traceback or error text in the log. Beware "-vvv" may include secret
data! Edit as necessary before posting.
* If reporting any kind of problem with Ansible, please include the Ansible
version along with output of "ansible-config dump --only-changed".

@ -1,3 +1,11 @@
---
name: Mitogen 0.3.x bug report
about: Report a bug in Mitogen 0.3.x (for Ansible 2.10.x)
title: ''
labels: affects-0.3, bug
assignees: ''
---
Please drag-drop large logs as text file attachments.

@ -1,82 +0,0 @@
sudo: required
dist: trusty
notifications:
email: false
irc: "chat.freenode.net#mitogen-builds"
language: python
branches:
except:
- docs-master
cache:
- pip
- directories:
- /home/travis/virtualenv
install:
- grep -Erl git-lfs\|couchdb /etc/apt | sudo xargs rm -v
- pip install -U pip==20.2.1
- .ci/${MODE}_install.py
# Travis has a 4MB log limit (https://github.com/travis-ci/travis-ci/issues/1382), but verbose Mitogen logs run larger than that
# in order to keep verbosity to debug a build failure, will run with this workaround: https://stackoverflow.com/a/26082445
script:
- .ci/spawn_reverse_shell.py
- MODE=${MODE} .ci/travis.sh
# To avoid matrix explosion, just test against oldest->newest and
# newest->oldest in various configuartions.
matrix:
include:
# Debops tests.
# NOTE: debops tests turned off for Ansible 2.10: https://github.com/debops/debops/issues/1521
# 2.10; 3.6 -> 2.7
# - python: "3.6"
# env: MODE=debops_common VER=2.10.0
# 2.10; 2.7 -> 2.7
# - python: "2.7"
# env: MODE=debops_common VER=2.10.0
# Sanity check against vanilla Ansible. One job suffices.
# https://github.com/dw/mitogen/pull/715#issuecomment-719266420 migrating to Azure for now due to Travis 50 min time limit cap
# azure lets us adjust the cap, and the current STRATEGY=linear tests take up to 1.5 hours to finish
# - python: "2.7"
# env: MODE=ansible VER=2.10.0 DISTROS=debian STRATEGY=linear
# ansible_mitogen tests.
# 2.10 -> {debian, centos6, centos7}
- python: "3.6"
env: MODE=ansible VER=2.10.0
# 2.10 -> {debian, centos6, centos7}
- python: "2.7"
env: MODE=ansible VER=2.10.0
# 2.10 -> {debian, centos6, centos7}
# - python: "2.6"
# env: MODE=ansible VER=2.10.0
# 2.10 -> {centos5}
# - python: "2.6"
# env: MODE=ansible DISTROS=centos5 VER=2.10.0
# Mitogen tests.
# 2.4 -> 2.4
# - language: c
# env: MODE=mitogen_py24 DISTROS=centos5 VER=2.10.0
# 2.7 -> 2.7 -- moved to Azure
# 2.7 -> 2.6
#- python: "2.7"
#env: MODE=mitogen DISTRO=centos6
- python: "3.6"
env: MODE=mitogen DISTROS=centos7 VER=2.10.0
# 2.6 -> 2.7
# - python: "2.6"
# env: MODE=mitogen DISTROS=centos7 VER=2.10.0
# 2.6 -> 3.5
# - python: "2.6"
# env: MODE=mitogen DISTROS=debian-py3 VER=2.10.0
# 3.6 -> 2.6 -- moved to Azure

@ -1,12 +1,9 @@
# Mitogen
<!-- [![Build Status](https://travis-ci.org/dw/mitogen.png?branch=master)](https://travis-ci.org/dw/mitogen}) -->
<a href="https://mitogen.networkgenomics.com/">Please see the documentation</a>.
![](https://i.imgur.com/eBM6LhJ.gif)
[![Total alerts](https://img.shields.io/lgtm/alerts/g/mitogen-hq/mitogen.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/mitogen-hq/mitogen/alerts/)
[![Build Status](https://api.travis-ci.com/mitogen-hq/mitogen.svg?branch=master)](https://api.travis-ci.com/mitogen-hq/mitogen)
[![Pipelines Status](https://dev.azure.com/mitogen-hq/mitogen/_apis/build/status/mitogen-hq.mitogen?branchName=master)](https://dev.azure.com/mitogen-hq/mitogen/_build/latest?definitionId=1&branchName=master)
[![Build Status](https://dev.azure.com/mitogen-hq/mitogen/_apis/build/status/mitogen-hq.mitogen?branchName=master)](https://dev.azure.com/mitogen-hq/mitogen/_build/latest?definitionId=1&branchName=master)

@ -73,7 +73,9 @@ necessarily involves preventing the scheduler from making load balancing
decisions.
"""
from __future__ import absolute_import
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import ctypes
import logging
import mmap

@ -26,8 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
__metaclass__ = type
import errno
import logging
@ -40,10 +41,8 @@ import time
import ansible.constants as C
import ansible.errors
import ansible.plugins.connection
import ansible.utils.shlex
import mitogen.core
import mitogen.fork
import mitogen.utils
import ansible_mitogen.mixins
@ -262,6 +261,21 @@ def _connect_machinectl(spec):
return _connect_setns(spec, kind='machinectl')
def _connect_podman(spec):
"""
Return ContextService arguments for a Docker connection.
"""
return {
'method': 'podman',
'kwargs': {
'username': spec.remote_user(),
'container': spec.remote_addr(),
'python_path': spec.python_path(rediscover_python=True),
'connect_timeout': spec.ansible_ssh_timeout() or spec.timeout(),
'remote_name': get_remote_name(spec),
}
}
def _connect_setns(spec, kind=None):
"""
Return ContextService arguments for a mitogen_setns connection.
@ -400,6 +414,7 @@ CONNECTION_METHOD = {
'lxc': _connect_lxc,
'lxd': _connect_lxd,
'machinectl': _connect_machinectl,
'podman': _connect_podman,
'setns': _connect_setns,
'ssh': _connect_ssh,
'smart': _connect_ssh, # issue #548.
@ -1081,7 +1096,7 @@ class Connection(ansible.plugins.connection.ConnectionBase):
s = fp.read(self.SMALL_FILE_LIMIT + 1)
finally:
fp.close()
except OSError:
except OSError as e:
self._throw_io_error(e, in_path)
raise

@ -30,7 +30,12 @@
Stable names for PluginLoader instances across Ansible versions.
"""
from __future__ import absolute_import
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import ansible.errors
import ansible_mitogen.utils
__all__ = [
'action_loader',
@ -41,21 +46,55 @@ __all__ = [
'strategy_loader',
]
try:
from ansible.plugins.loader import action_loader
from ansible.plugins.loader import connection_loader
from ansible.plugins.loader import module_loader
from ansible.plugins.loader import module_utils_loader
from ansible.plugins.loader import shell_loader
from ansible.plugins.loader import strategy_loader
except ImportError: # Ansible <2.4
from ansible.plugins import action_loader
from ansible.plugins import connection_loader
from ansible.plugins import module_loader
from ansible.plugins import module_utils_loader
from ansible.plugins import shell_loader
from ansible.plugins import strategy_loader
ANSIBLE_VERSION_MIN = (2, 10)
ANSIBLE_VERSION_MAX = (2, 12)
NEW_VERSION_MSG = (
"Your Ansible version (%s) is too recent. The most recent version\n"
"supported by Mitogen for Ansible is %s.x. Please check the Mitogen\n"
"release notes to see if a new version is available, otherwise\n"
"subscribe to the corresponding GitHub issue to be notified when\n"
"support becomes available.\n"
"\n"
" https://mitogen.rtfd.io/en/latest/changelog.html\n"
" https://github.com/mitogen-hq/mitogen/issues/\n"
)
OLD_VERSION_MSG = (
"Your version of Ansible (%s) is too old. The oldest version supported by "
"Mitogen for Ansible is %s."
)
def assert_supported_release():
"""
Throw AnsibleError with a descriptive message in case of being loaded into
an unsupported Ansible release.
"""
v = ansible_mitogen.utils.ansible_version
if v[:2] < ANSIBLE_VERSION_MIN:
raise ansible.errors.AnsibleError(
OLD_VERSION_MSG % (v, ANSIBLE_VERSION_MIN)
)
if v[:2] > ANSIBLE_VERSION_MAX:
raise ansible.errors.AnsibleError(
NEW_VERSION_MSG % (v, ANSIBLE_VERSION_MAX)
)
# this is the first file our strategy plugins import, so we need to check this here
# in prior Ansible versions, connection_loader.get_with_context didn't exist, so if a user
# is trying to load an old Ansible version, we'll fail and error gracefully
assert_supported_release()
from ansible.plugins.loader import action_loader
from ansible.plugins.loader import connection_loader
from ansible.plugins.loader import module_loader
from ansible.plugins.loader import module_utils_loader
from ansible.plugins.loader import shell_loader
from ansible.plugins.loader import strategy_loader
# These are original, unwrapped implementations
action_loader__get = action_loader.get

@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import logging
import os
@ -36,8 +38,8 @@ import mitogen.utils
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
import ansible.utils.display
display = ansible.utils.display.Display()
#: The process name set via :func:`set_process_name`.

@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import logging
import os
import pwd
@ -53,6 +55,8 @@ import mitogen.utils
import ansible_mitogen.connection
import ansible_mitogen.planner
import ansible_mitogen.target
import ansible_mitogen.utils
from ansible.module_utils._text import to_text
try:
@ -226,7 +230,7 @@ class ActionModuleMixin(ansible.plugins.action.ActionBase):
with a pipelined call to :func:`ansible_mitogen.target.prune_tree`.
"""
LOG.debug('_remove_tmp_path(%r)', tmp_path)
if tmp_path is None and ansible.__version__ > '2.6':
if tmp_path is None and ansible_mitogen.utils.ansible_version[:2] >= (2, 6):
tmp_path = self._connection._shell.tmpdir # 06f73ad578d
if tmp_path is not None:
self._connection.get_chain().call_no_reply(
@ -335,7 +339,7 @@ class ActionModuleMixin(ansible.plugins.action.ActionBase):
def _set_temp_file_args(self, module_args, wrap_async):
# Ansible>2.5 module_utils reuses the action's temporary directory if
# one exists. Older versions error if this key is present.
if ansible.__version__ > '2.5':
if ansible_mitogen.utils.ansible_version[:2] >= (2, 5):
if wrap_async:
# Sharing is not possible with async tasks, as in that case,
# the directory must outlive the action plug-in.
@ -346,7 +350,7 @@ class ActionModuleMixin(ansible.plugins.action.ActionBase):
# If _ansible_tmpdir is unset, Ansible>2.6 module_utils will use
# _ansible_remote_tmp as the location to create the module's temporary
# directory. Older versions error if this key is present.
if ansible.__version__ > '2.6':
if ansible_mitogen.utils.ansible_version[:2] >= (2, 6):
module_args['_ansible_remote_tmp'] = (
self._connection.get_good_temp_dir()
)
@ -393,7 +397,7 @@ class ActionModuleMixin(ansible.plugins.action.ActionBase):
)
)
if tmp and ansible.__version__ < '2.5' and delete_remote_tmp:
if tmp and delete_remote_tmp and ansible_mitogen.utils.ansible_version[:2] < (2, 5):
# Built-in actions expected tmpdir to be cleaned up automatically
# on _execute_module().
self._remove_tmp_path(tmp)

@ -26,8 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
__metaclass__ = type
import collections
import imp

@ -26,8 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
__metaclass__ = type
import mitogen.core

@ -34,19 +34,20 @@ files/modules known missing.
[0] "Ansible Module Architecture", developing_program_flow_modules.html
"""
from __future__ import absolute_import
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
__metaclass__ = type
import json
import logging
import os
import random
import re
from ansible.executor import module_common
from ansible.collections.list import list_collection_dirs
import ansible.collections.list
import ansible.errors
import ansible.module_utils
import ansible.release
import ansible.executor.module_common
import mitogen.core
import mitogen.select
@ -191,7 +192,7 @@ class BinaryPlanner(Planner):
@classmethod
def detect(cls, path, source):
return module_common._is_binary(source)
return ansible.executor.module_common._is_binary(source)
def get_push_files(self):
return [mitogen.core.to_text(self._inv.module_path)]
@ -268,7 +269,7 @@ class JsonArgsPlanner(ScriptPlanner):
@classmethod
def detect(cls, path, source):
return module_common.REPLACER_JSONARGS in source
return ansible.executor.module_common.REPLACER_JSONARGS in source
class WantJsonPlanner(ScriptPlanner):
@ -297,11 +298,11 @@ class NewStylePlanner(ScriptPlanner):
preprocessing the module.
"""
runner_name = 'NewStyleRunner'
marker = b'from ansible.module_utils.'
MARKER = re.compile(br'from ansible(?:_collections|\.module_utils)\.')
@classmethod
def detect(cls, path, source):
return cls.marker in source
return cls.MARKER.search(source) is not None
def _get_interpreter(self):
return None, None
@ -321,6 +322,7 @@ class NewStylePlanner(ScriptPlanner):
ALWAYS_FORK_MODULES = frozenset([
'dnf', # issue #280; py-dnf/hawkey need therapy
'firewalld', # issue #570: ansible module_utils caches dbus conn
'ansible.legacy.dnf', # issue #776
])
def should_fork(self):
@ -360,7 +362,7 @@ class NewStylePlanner(ScriptPlanner):
module_name='ansible_module_%s' % (self._inv.module_name,),
module_path=self._inv.module_path,
search_path=self.get_search_path(),
builtin_path=module_common._MODULE_UTILS_PATH,
builtin_path=ansible.executor.module_common._MODULE_UTILS_PATH,
context=self._inv.connection.context,
)
return self._module_map
@ -403,7 +405,7 @@ class ReplacerPlanner(NewStylePlanner):
@classmethod
def detect(cls, path, source):
return module_common.REPLACER in source
return ansible.executor.module_common.REPLACER in source
class OldStylePlanner(ScriptPlanner):
@ -425,36 +427,21 @@ _planners = [
]
try:
_get_ansible_module_fqn = module_common._get_ansible_module_fqn
except AttributeError:
_get_ansible_module_fqn = None
def py_modname_from_path(name, path):
"""
Fetch the logical name of a new-style module as it might appear in
:data:`sys.modules` of the target's Python interpreter.
* For Ansible <2.7, this is an unpackaged module named like
"ansible_module_%s".
* For Ansible <2.9, this is an unpackaged module named like
"ansible.modules.%s"
* Since Ansible 2.9, modules appearing within a package have the original
package hierarchy approximated on the target, enabling relative imports
to function correctly. For example, "ansible.modules.system.setup".
"""
# 2.9+
if _get_ansible_module_fqn:
try:
return _get_ansible_module_fqn(path)
except ValueError:
pass
if ansible.__version__ < '2.7':
return 'ansible_module_' + name
try:
return ansible.executor.module_common._get_ansible_module_fqn(path)
except AttributeError:
pass
except ValueError:
pass
return 'ansible.modules.' + name
@ -536,12 +523,15 @@ def _invoke_isolated_task(invocation, planner):
context.shutdown()
def _get_planner(name, path, source):
def _get_planner(invocation, source):
for klass in _planners:
if klass.detect(path, source):
LOG.debug('%r accepted %r (filename %r)', klass, name, path)
if klass.detect(invocation.module_path, source):
LOG.debug(
'%r accepted %r (filename %r)',
klass, invocation.module_name, invocation.module_path,
)
return klass
LOG.debug('%r rejected %r', klass, name)
LOG.debug('%r rejected %r', klass, invocation.module_name)
raise ansible.errors.AnsibleError(NO_METHOD_MSG + repr(invocation))
@ -572,7 +562,7 @@ def _load_collections(invocation):
Goes through all collection path possibilities and stores paths to installed collections
Stores them on the current invocation to later be passed to the master service
"""
for collection_path in list_collection_dirs():
for collection_path in ansible.collections.list.list_collection_dirs():
invocation._extra_sys_paths.add(collection_path.decode('utf-8'))
@ -604,8 +594,7 @@ def invoke(invocation):
module_source = invocation.get_module_source()
_fix_py35(invocation, module_source)
_planner_by_path[invocation.module_path] = _get_planner(
invocation.module_name,
invocation.module_path,
invocation,
module_source
)

@ -18,23 +18,17 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible.module_utils._text import to_bytes
import base64
from ansible.errors import AnsibleError, AnsibleActionFail, AnsibleActionSkip
from ansible.module_utils.common.text.converters import to_bytes, to_text
from ansible.module_utils.six import string_types
from ansible.module_utils.parsing.convert_bool import boolean
from ansible.plugins.action import ActionBase
from ansible.utils.hashing import checksum, md5, secure_hash
from ansible.utils.path import makedirs_safe
from ansible.utils.display import Display
from ansible.utils.hashing import checksum, checksum_s, md5, secure_hash
from ansible.utils.path import makedirs_safe, is_subpath
REMOTE_CHECKSUM_ERRORS = {
'0': "unable to calculate the checksum of the remote file",
'1': "the remote file does not exist",
'2': "no read permission on remote file",
'3': "remote file is a directory, fetch cannot work on directories",
'4': "python isn't present on the system. Unable to compute checksum",
'5': "stdlib json was not found on the remote machine. Only the raw module can work without those installed",
}
display = Display()
class ActionModule(ActionBase):
@ -45,36 +39,94 @@ class ActionModule(ActionBase):
task_vars = dict()
result = super(ActionModule, self).run(tmp, task_vars)
del tmp # tmp no longer has any effect
try:
if self._play_context.check_mode:
result['skipped'] = True
result['msg'] = 'check mode not (yet) supported for this module'
return result
raise AnsibleActionSkip('check mode not (yet) supported for this module')
source = self._task.args.get('src', None)
original_dest = dest = self._task.args.get('dest', None)
flat = boolean(self._task.args.get('flat'), strict=False)
fail_on_missing = boolean(self._task.args.get('fail_on_missing', True), strict=False)
validate_checksum = boolean(self._task.args.get('validate_checksum', True), strict=False)
msg = ''
# validate source and dest are strings FIXME: use basic.py and module specs
source = self._task.args.get('src')
if not isinstance(source, string_types):
result['msg'] = "Invalid type supplied for source option, it must be a string"
msg = "Invalid type supplied for source option, it must be a string"
dest = self._task.args.get('dest')
if not isinstance(dest, string_types):
result['msg'] = "Invalid type supplied for dest option, it must be a string"
msg = "Invalid type supplied for dest option, it must be a string"
if source is None or dest is None:
msg = "src and dest are required"
if result.get('msg'):
result['failed'] = True
return result
if msg:
raise AnsibleActionFail(msg)
source = self._connection._shell.join_path(source)
source = self._remote_expand_user(source)
# calculate checksum for the remote file, don't bother if using
# become as slurp will be used Force remote_checksum to follow
# symlinks because fetch always follows symlinks
remote_checksum = self._remote_checksum(source, all_vars=task_vars, follow=True)
remote_stat = {}
remote_checksum = None
if True:
# Get checksum for the remote file even using become. Mitogen doesn't need slurp.
# Follow symlinks because fetch always follows symlinks
try:
remote_stat = self._execute_remote_stat(source, all_vars=task_vars, follow=True)
except AnsibleError as ae:
result['changed'] = False
result['file'] = source
if fail_on_missing:
result['failed'] = True
result['msg'] = to_text(ae)
else:
result['msg'] = "%s, ignored" % to_text(ae, errors='surrogate_or_replace')
return result
remote_checksum = remote_stat.get('checksum')
if remote_stat.get('exists'):
if remote_stat.get('isdir'):
result['failed'] = True
result['changed'] = False
result['msg'] = "remote file is a directory, fetch cannot work on directories"
# Historically, these don't fail because you may want to transfer
# a log file that possibly MAY exist but keep going to fetch other
# log files. Today, this is better achieved by adding
# ignore_errors or failed_when to the task. Control the behaviour
# via fail_when_missing
if not fail_on_missing:
result['msg'] += ", not transferring, ignored"
del result['changed']
del result['failed']
return result
# use slurp if permissions are lacking or privilege escalation is needed
remote_data = None
if remote_checksum in (None, '1', ''):
slurpres = self._execute_module(module_name='ansible.legacy.slurp', module_args=dict(src=source), task_vars=task_vars)
if slurpres.get('failed'):
if not fail_on_missing:
result['file'] = source
result['changed'] = False
else:
result.update(slurpres)
if 'not found' in slurpres.get('msg', ''):
result['msg'] = "the remote file does not exist, not transferring, ignored"
elif slurpres.get('msg', '').startswith('source is a directory'):
result['msg'] = "remote file is a directory, fetch cannot work on directories"
return result
else:
if slurpres['encoding'] == 'base64':
remote_data = base64.b64decode(slurpres['content'])
if remote_data is not None:
remote_checksum = checksum_s(remote_data)
# calculate the destination name
if os.path.sep not in self._connection._shell.join_path('a', ''):
@ -83,13 +135,14 @@ class ActionModule(ActionBase):
else:
source_local = source
dest = os.path.expanduser(dest)
# ensure we only use file name, avoid relative paths
if not is_subpath(dest, original_dest):
# TODO: ? dest = os.path.expanduser(dest.replace(('../','')))
raise AnsibleActionFail("Detected directory traversal, expected to be contained in '%s' but got '%s'" % (original_dest, dest))
if flat:
if os.path.isdir(to_bytes(dest, errors='surrogate_or_strict')) and not dest.endswith(os.sep):
result['msg'] = "dest is an existing directory, use a trailing slash if you want to fetch src into that directory"
result['file'] = dest
result['failed'] = True
return result
raise AnsibleActionFail("dest is an existing directory, use a trailing slash if you want to fetch src into that directory")
if dest.endswith(os.sep):
# if the path ends with "/", we'll use the source filename as the
# destination filename
@ -106,23 +159,7 @@ class ActionModule(ActionBase):
target_name = self._play_context.remote_addr
dest = "%s/%s/%s" % (self._loader.path_dwim(dest), target_name, source_local)
dest = dest.replace("//", "/")
if remote_checksum in REMOTE_CHECKSUM_ERRORS:
result['changed'] = False
result['file'] = source
result['msg'] = REMOTE_CHECKSUM_ERRORS[remote_checksum]
# Historically, these don't fail because you may want to transfer
# a log file that possibly MAY exist but keep going to fetch other
# log files. Today, this is better achieved by adding
# ignore_errors or failed_when to the task. Control the behaviour
# via fail_when_missing
if fail_on_missing:
result['failed'] = True
del result['changed']
else:
result['msg'] += ", not transferring, ignored"
return result
dest = os.path.normpath(dest)
# calculate checksum for the local file
local_checksum = checksum(dest)
@ -132,7 +169,15 @@ class ActionModule(ActionBase):
makedirs_safe(os.path.dirname(dest))
# fetch the file and check for changes
self._connection.fetch_file(source, dest)
if remote_data is None:
self._connection.fetch_file(source, dest)
else:
try:
f = open(to_bytes(dest, errors='surrogate_or_strict'), 'wb')
f.write(remote_data)
f.close()
except (IOError, OSError) as e:
raise AnsibleActionFail("Failed to fetch the file: %s" % e)
new_checksum = secure_hash(dest)
# For backwards compatibility. We'll return None on FIPS enabled systems
try:
@ -157,10 +202,6 @@ class ActionModule(ActionBase):
result.update(dict(changed=False, md5sum=local_md5, file=source, dest=dest, checksum=local_checksum))
finally:
try:
self._remove_tmp_path(self._connection._shell.tmpdir)
except AttributeError:
# .tmpdir was added to ShellModule in v2.6.0, so old versions don't have it
pass
self._remove_tmp_path(self._connection._shell.tmpdir)
return result

@ -26,14 +26,15 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
from __future__ import unicode_literals
"""
Fetch the connection configuration stack that would be used to connect to a
target, without actually connecting to it.
"""
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
__metaclass__ = type
import ansible_mitogen.connection
from ansible.plugins.action import ActionBase

@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path
import sys

@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path
import sys

@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path
import sys

@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path
import sys

@ -27,12 +27,13 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path
import sys
from ansible.errors import AnsibleConnectionFailure
from ansible.module_utils.six import iteritems
import ansible.errors
try:
import ansible_mitogen
@ -45,17 +46,11 @@ import ansible_mitogen.connection
import ansible_mitogen.loaders
_class = ansible_mitogen.loaders.connection_loader__get(
_get_result = ansible_mitogen.loaders.connection_loader__get(
'kubectl',
class_only=True,
)
if _class:
kubectl = sys.modules[_class.__module__]
del _class
else:
kubectl = None
class Connection(ansible_mitogen.connection.Connection):
transport = 'kubectl'
@ -66,14 +61,22 @@ class Connection(ansible_mitogen.connection.Connection):
)
def __init__(self, *args, **kwargs):
if kubectl is None:
raise AnsibleConnectionFailure(self.not_supported_msg)
if not _get_result:
raise ansible.errors.AnsibleConnectionFailure(self.not_supported_msg)
super(Connection, self).__init__(*args, **kwargs)
def get_extra_args(self):
try:
# Ansible < 2.10, _get_result is the connection class
connection_options = _get_result.connection_options
except AttributeError:
# Ansible >= 2.10, _get_result is a get_with_context_result
connection_options = _get_result.object.connection_options
parameters = []
for key, option in iteritems(kubectl.CONNECTION_OPTIONS):
if self.get_task_var('ansible_' + key) is not None:
parameters += [ option, self.get_task_var('ansible_' + key) ]
for key in connection_options:
task_var_name = 'ansible_%s' % key
task_var = self.get_task_var(task_var_name)
if task_var is not None:
parameters += [connection_options[key], task_var]
return parameters

@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path
import sys

@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path
import sys

@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path
import sys

@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path
import sys

@ -0,0 +1,46 @@
# Copyright 2022, Mitogen contributers
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path
import sys
try:
import ansible_mitogen
except ImportError:
base_dir = os.path.dirname(__file__)
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
del base_dir
import ansible_mitogen.connection
class Connection(ansible_mitogen.connection.Connection):
transport = 'podman'

@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path
import sys

@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path
import sys

@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path
import sys

@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path
import sys

@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path
import sys

@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path
import sys

@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path
import sys

@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path
import sys

@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import atexit
import logging
import multiprocessing

@ -36,6 +36,9 @@ Each class in here has a corresponding Planner class in planners.py that knows
how to build arguments for it, preseed related data, etc.
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import atexit
import imp
import os

@ -39,18 +39,18 @@ connections, grant access to files by children, and register for notification
when a child has completed a job.
"""
from __future__ import absolute_import
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
__metaclass__ = type
import logging
import os
import os.path
import sys
import threading
import ansible.constants
import mitogen
import mitogen.core
import mitogen.service
import mitogen.utils
import ansible_mitogen.loaders

@ -26,8 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
import distutils.version
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os
import signal
import threading
@ -43,52 +44,8 @@ import ansible_mitogen.loaders
import ansible_mitogen.mixins
import ansible_mitogen.process
import ansible
import ansible.executor.process.worker
try:
# 2.8+ has a standardized "unset" object.
from ansible.utils.sentinel import Sentinel
except ImportError:
Sentinel = None
ANSIBLE_VERSION_MIN = (2, 10)
ANSIBLE_VERSION_MAX = (2, 10)
NEW_VERSION_MSG = (
"Your Ansible version (%s) is too recent. The most recent version\n"
"supported by Mitogen for Ansible is %s.x. Please check the Mitogen\n"
"release notes to see if a new version is available, otherwise\n"
"subscribe to the corresponding GitHub issue to be notified when\n"
"support becomes available.\n"
"\n"
" https://mitogen.rtfd.io/en/latest/changelog.html\n"
" https://github.com/dw/mitogen/issues/\n"
)
OLD_VERSION_MSG = (
"Your version of Ansible (%s) is too old. The oldest version supported by "
"Mitogen for Ansible is %s."
)
def _assert_supported_release():
"""
Throw AnsibleError with a descriptive message in case of being loaded into
an unsupported Ansible release.
"""
v = ansible.__version__
if not isinstance(v, tuple):
v = tuple(distutils.version.LooseVersion(v).version)
if v[:2] < ANSIBLE_VERSION_MIN:
raise ansible.errors.AnsibleError(
OLD_VERSION_MSG % (v, ANSIBLE_VERSION_MIN)
)
if v[:2] > ANSIBLE_VERSION_MAX:
raise ansible.errors.AnsibleError(
NEW_VERSION_MSG % (ansible.__version__, ANSIBLE_VERSION_MAX)
)
import ansible.utils.sentinel
def _patch_awx_callback():
@ -99,12 +56,11 @@ def _patch_awx_callback():
# AWX uses sitecustomize.py to force-load this package. If it exists, we're
# running under AWX.
try:
from awx_display_callback.events import EventContext
from awx_display_callback.events import event_context
import awx_display_callback.events
except ImportError:
return
if hasattr(EventContext(), '_local'):
if hasattr(awx_display_callback.events.EventContext(), '_local'):
# Patched version.
return
@ -113,8 +69,8 @@ def _patch_awx_callback():
ctx = tls.setdefault('_ctx', {})
ctx.update(kwargs)
EventContext._local = threading.local()
EventContext.add_local = patch_add_local
awx_display_callback.events.EventContext._local = threading.local()
awx_display_callback.events.EventContext.add_local = patch_add_local
_patch_awx_callback()
@ -152,6 +108,7 @@ REDIRECTED_CONNECTION_PLUGINS = (
'lxc',
'lxd',
'machinectl',
'podman',
'setns',
'ssh',
)
@ -323,7 +280,7 @@ class StrategyMixin(object):
name=task.action,
class_only=True,
)
if play_context.connection is not Sentinel:
if play_context.connection is not ansible.utils.sentinel.Sentinel:
# 2.8 appears to defer computing this until inside the worker.
# TODO: figure out where it has moved.
ansible_mitogen.loaders.connection_loader.get(
@ -351,7 +308,6 @@ class StrategyMixin(object):
Wrap :meth:`run` to ensure requisite infrastructure and modifications
are configured for the duration of the call.
"""
_assert_supported_release()
wrappers = AnsibleWrappers()
self._worker_model = self._get_worker_model()
ansible_mitogen.process.set_worker_model(self._worker_model)

@ -33,6 +33,9 @@ Helper functions intended to be executed on the target. These are entrypoints
for file transfer, module execution and sundry bits like changing file modes.
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import errno
import grp
import operator
@ -51,7 +54,6 @@ import types
logging = __import__('logging')
import mitogen.core
import mitogen.fork
import mitogen.parent
import mitogen.service
from mitogen.core import b
@ -144,7 +146,7 @@ def subprocess__Popen__close_fds(self, but):
if (
sys.platform.startswith(u'linux') and
sys.version < u'3.0' and
sys.version_info < (3,) and
hasattr(subprocess.Popen, u'_close_fds') and
not mitogen.is_master
):
@ -652,7 +654,8 @@ def read_path(path):
"""
Fetch the contents of a filesystem `path` as bytes.
"""
return open(path, 'rb').read()
with open(path, 'rb') as f:
return f.read()
def set_file_owner(path, owner, group=None, fd=None):

@ -26,9 +26,6 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
from __future__ import unicode_literals
"""
Mitogen extends Ansible's target configuration mechanism in several ways that
require some care:
@ -60,6 +57,10 @@ information from PlayContext, and another that takes (almost) all information
from HostVars.
"""
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
__metaclass__ = type
import abc
import os
import ansible.utils.shlex
@ -354,6 +355,12 @@ class Spec(with_metaclass(abc.ABCMeta, object)):
The path to the "machinectl" program for the 'setns' transport.
"""
@abc.abstractmethod
def mitogen_podman_path(self):
"""
The path to the "podman" program for the 'podman' transport.
"""
@abc.abstractmethod
def mitogen_ssh_keepalive_interval(self):
"""
@ -451,7 +458,7 @@ class PlayContextSpec(Spec):
return self._play_context.private_key_file
def ssh_executable(self):
return self._play_context.ssh_executable
return C.config.get_config_value("ssh_executable", plugin_type="connection", plugin_name="ssh", variables=self._task_vars.get("vars", {}))
def timeout(self):
return self._play_context.timeout
@ -467,9 +474,9 @@ class PlayContextSpec(Spec):
return [
mitogen.core.to_text(term)
for s in (
getattr(self._play_context, 'ssh_args', ''),
getattr(self._play_context, 'ssh_common_args', ''),
getattr(self._play_context, 'ssh_extra_args', '')
C.config.get_config_value("ssh_args", plugin_type="connection", plugin_name="ssh", variables=self._task_vars.get("vars", {})),
C.config.get_config_value("ssh_common_args", plugin_type="connection", plugin_name="ssh", variables=self._task_vars.get("vars", {})),
C.config.get_config_value("ssh_extra_args", plugin_type="connection", plugin_name="ssh", variables=self._task_vars.get("vars", {}))
)
for term in ansible.utils.shlex.shlex_split(s or '')
]
@ -527,6 +534,9 @@ class PlayContextSpec(Spec):
def mitogen_lxc_info_path(self):
return self._connection.get_task_var('mitogen_lxc_info_path')
def mitogen_podman_path(self):
return self._connection.get_task_var('mitogen_podman_path')
def mitogen_ssh_keepalive_interval(self):
return self._connection.get_task_var('mitogen_ssh_keepalive_interval')
@ -679,10 +689,7 @@ class MitogenViaSpec(Spec):
)
def ssh_executable(self):
return (
self._host_vars.get('ansible_ssh_executable') or
C.ANSIBLE_SSH_EXECUTABLE
)
return C.config.get_config_value("ssh_executable", plugin_type="connection", plugin_name="ssh", variables=self._task_vars.get("vars", {}))
def timeout(self):
# TODO: must come from PlayContext too.
@ -699,22 +706,9 @@ class MitogenViaSpec(Spec):
return [
mitogen.core.to_text(term)
for s in (
(
self._host_vars.get('ansible_ssh_args') or
getattr(C, 'ANSIBLE_SSH_ARGS', None) or
os.environ.get('ANSIBLE_SSH_ARGS')
# TODO: ini entry. older versions.
),
(
self._host_vars.get('ansible_ssh_common_args') or
os.environ.get('ANSIBLE_SSH_COMMON_ARGS')
# TODO: ini entry.
),
(
self._host_vars.get('ansible_ssh_extra_args') or
os.environ.get('ANSIBLE_SSH_EXTRA_ARGS')
# TODO: ini entry.
),
C.config.get_config_value("ssh_args", plugin_type="connection", plugin_name="ssh", variables=self._task_vars.get("vars", {})),
C.config.get_config_value("ssh_common_args", plugin_type="connection", plugin_name="ssh", variables=self._task_vars.get("vars", {})),
C.config.get_config_value("ssh_extra_args", plugin_type="connection", plugin_name="ssh", variables=self._task_vars.get("vars", {}))
)
for term in ansible.utils.shlex.shlex_split(s)
if s
@ -763,6 +757,9 @@ class MitogenViaSpec(Spec):
def mitogen_lxc_info_path(self):
return self._host_vars.get('mitogen_lxc_info_path')
def mitogen_podman_path(self):
return self._host_vars.get('mitogen_podman_path')
def mitogen_ssh_keepalive_interval(self):
return self._host_vars.get('mitogen_ssh_keepalive_interval')

@ -0,0 +1,14 @@
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import distutils.version
import ansible
__all__ = [
'ansible_version',
]
ansible_version = tuple(distutils.version.LooseVersion(ansible.__version__).version)
del distutils
del ansible

@ -145,9 +145,12 @@ Testimonials
Noteworthy Differences
----------------------
* Ansible 2.3-2.9 are supported along with Python 2.6, 2.7, 3.6 and 3.7. Verify
your installation is running one of these versions by checking ``ansible
--version`` output.
* Mitogen 0.2.x supports Ansible 2.3-2.9; with Python 2.6, 2.7, or 3.6.
Mitogen 0.3.1+ supports
- Ansible 2.10, 3, and 4; with Python 2.7, or 3.6-3.10
- Ansible 5; with Python 3.8-3.10
Verify your installation is running one of these versions by checking
``ansible --version`` output.
* The ``raw`` action executes as a regular Mitogen connection, which requires
Python on the target, precluding its use for installing Python. This will be
@ -185,9 +188,9 @@ Noteworthy Differences
your_ssh_username = (ALL) NOPASSWD:/usr/bin/python -c*
* The :ans:conn:`~buildah`, :ans:conn:`~docker`, :ans:conn:`~jail`,
:ans:conn:`~kubectl`, :ans:conn:`~local`, :ans:conn:`~lxd`, and
:ans:conn:`~ssh` built-in connection types are supported, along with
Mitogen-specific :ref:`machinectl <machinectl>`, :ref:`mitogen_doas <doas>`,
:ans:conn:`~kubectl`, :ans:conn:`~local`, :ans:conn:`~lxd`,
:ans:conn:`~podman`, & :ans:conn:`~ssh` connection types are supported; also
Mitogen-specific :ref:`mitogen_doas <doas>`, :ref:`machinectl <machinectl>`,
:ref:`mitogen_su <su>`, :ref:`mitogen_sudo <sudo>`, and :ref:`setns <setns>`
types. File bugs to register interest in others.
@ -816,6 +819,20 @@ Like the :ans:conn:`local` except connection delegation is supported.
* ``ansible_python_interpreter``
Podman
~~~~~~
Like :ans:conn:`podman` except connection delegation is supported.
* ``ansible_host``: Name of container (default: inventory hostname).
* ``ansible_user``: Name of user within the container to execute as.
* ``mitogen_mask_remote_name``: if :data:`True`, mask the identity of the
Ansible controller process on remote machines. To simplify diagnostics,
Mitogen produces remote processes named like
`"mitogen:user@controller.name:1234"`, however this may be a privacy issue in
some circumstances.
Process Model
^^^^^^^^^^^^^

@ -95,7 +95,7 @@ Connection Methods
:param str container:
The name of the Buildah container to connect to.
:param str doas_path:
:param str buildah_path:
Filename or complete path to the ``buildah`` binary. ``PATH`` will be
searched if given as a filename. Defaults to ``buildah``.
:param str username:
@ -367,6 +367,20 @@ Connection Methods
Filename or complete path to the ``lxc`` binary. ``PATH`` will be
searched if given as a filename. Defaults to ``lxc``.
.. currentmodule:: mitogen.parent
.. method:: Router.podman (container=None, podman_path=None, username=None, \**kwargs)
Construct a context on the local machine over a ``podman`` invocation.
Accepts all parameters accepted by :meth:`local`, in addition to:
:param str container:
The name of the Podman container to connect to.
:param str podman_path:
Filename or complete path to the ``podman`` binary. ``PATH`` will be
searched if given as a filename. Defaults to ``podman``.
:param str username:
Username to use, defaults to unset.
.. method:: Router.setns (container, kind, username=None, docker_path=None, lxc_info_path=None, machinectl_path=None, \**kwargs)
Construct a context in the style of :meth:`local`, but change the

@ -17,19 +17,53 @@ Release Notes
To avail of fixes in an unreleased version, please download a ZIP file
`directly from GitHub <https://github.com/dw/mitogen/>`_.
v0.3.0 (unreleased)
--------------------
v0.3.3.dev0
-------------------
* :gh:issue:`906` Support packages dynamically inserted into sys.modules, e.g. `distro` >= 1.7.0 as `ansible.module_utils.distro`.
* :gh:issue:`918` Support Python 3.10
* :gh:issue:`920` Support Ansible :ans:conn:`~podman` connection plugin
* :gh:issue:`836` :func:`mitogen.utils.with_router` decorator preserves the docstring in addition to the name.
* :gh:issue:`936` :ans:mod:`fetch` no longer emits `[DEPRECATION WARNING]: The '_remote_checksum()' method is deprecated.`
* :gh:pull:`683`: Previously broken :mod:`mitogen.fakessh` functionality is restored
v0.3.2 (2022-01-12)
-------------------
* :gh:issue:`891` Correct `Framework :: Ansible` Trove classifier
v0.3.1 (unreleased)
-------------------
* :gh:issue:`874` Support for Ansible 5 (ansible-core 2.12)
* :gh:issue:`774` Fix bootstrap failures on macOS 11.x and 12.x, involving Python 2.7 wrapper
* :gh:issue:`834` Support for Ansible 3 and 4 (ansible-core 2.11)
* :gh:issue:`869` Continuous Integration tests are now run with Tox
* :gh:issue:`869` Continuous Integration tests now cover CentOS 6 & 8, Debian 9 & 11, Ubuntu 16.04 & 20.04
* :gh:issue:`860` Add initial support for podman connection (w/o Ansible support yet)
* :gh:issue:`873` `python -c ...` first stage no longer uses :py:mod:`platform`` to detect the macOS release
* :gh:issue:`876` `python -c ...` first stage no longer contains tab characters, to reduce size
* :gh:issue:`878` Continuous Integration tests now correctly perform comparisons of 2 digit versions
* :gh:issue:`878` Kubectl connector fixed with Ansible 2.10 and above
v0.3.0 (2021-11-24)
-------------------
This release separates itself from the v0.2.X releases. Ansible's API changed too much to support backwards compatibility so from now on, v0.2.X releases will be for Ansible < 2.10 and v0.3.X will be for Ansible 2.10+.
`See here for details <https://github.com/dw/mitogen pull/715#issuecomment-750697248>`_.
* :gh:pull:`683`: Previously broken :mod:`mitogen.fakessh` functionality
is restored
* :gh:issue:`827` NewStylePlanner: detect `ansible_collections` imports
* :gh:issue:`770` better check for supported Ansible version
* :gh:issue:`731` ansible 2.10 support
* :gh:issue:`652` support for ansible collections import hook
* :gh:issue:`847` Removed historic Continuous Integration reverse shell
v0.2.10 (unreleased)
v0.2.10 (2021-11-24)
--------------------
* :gh:issue:`597` mitogen does not support Ansible 2.8 Python interpreter detection
@ -42,6 +76,8 @@ v0.2.10 (unreleased)
timeout, when using recent OpenSSH client versions.
* :gh:issue:`758` fix initilialisation of callback plugins in test suite, to address a `KeyError` in
:method:`ansible.plugins.callback.CallbackBase.v2_runner_on_start`
* :gh:issue:`775` Test with Python 3.9
* :gh:issue:`775` Add msvcrt to the default module deny list
v0.2.9 (2019-11-02)

@ -201,7 +201,7 @@ nested.py:
print('Connect local%d via %s' % (x, context))
context = router.local(via=context, name='local%d' % x)
context.call(os.system, 'pstree -s python -s mitogen')
context.call(subprocess.check_call, ['pstree', '-s', 'python', '-s', 'mitogen'])
Output:

@ -813,7 +813,7 @@ executes under the runtime importer lock, ensuring :py:keyword:`import`
statements executing in local threads are serialized.
.. note::
In Python 2, :py:exc:`ImportError` is raised when :py:keyword:`import` is
attempted while the runtime import lock is held by another thread,
therefore imports must be serialized by only attempting them from the main

@ -101,7 +101,7 @@ to your network topology**.
container='billing0',
)
internal_box.call(os.system, './run-nightly-billing.py')
internal_box.call(subprocess.check_call, ['./run-nightly-billing.py'])
The multiplexer also ensures the remote process is terminated if your Python
program crashes, communication is lost, or the application code running in the
@ -250,7 +250,7 @@ After:
"""
Install our application.
"""
os.system('tar zxvf app.tar.gz')
subprocess.check_call(['tar', 'zxvf', 'app.tar.gz'])
context.call(install_app)
@ -258,7 +258,7 @@ Or even:
.. code-block:: python
context.call(os.system, 'tar zxvf app.tar.gz')
context.call(subprocess.check_call, ['tar', 'zxvf', 'app.tar.gz'])
Exceptions raised by function calls are propagated back to the parent program,
and timeouts can be configured to ensure failed calls do not block progress of

@ -8,14 +8,14 @@ Usage:
Where:
<hostname> Hostname to install to.
"""
import os
import subprocess
import sys
import mitogen
def install_app():
os.system('tar zxvf my_app.tar.gz')
subprocess.check_call(['tar', 'zxvf', 'my_app.tar.gz'])
@mitogen.main()

@ -35,7 +35,7 @@ be expected. On the slave, it is built dynamically during startup.
#: Library version as a tuple.
__version__ = (0, 2, 9)
__version__ = (0, 3, 3, 'dev0')
#: This is :data:`False` in slave contexts. Previously it was used to prevent

@ -30,7 +30,6 @@
import logging
import mitogen.core
import mitogen.parent

@ -386,6 +386,20 @@ def _partition(s, sep, find):
return left, sep, s[len(left)+len(sep):]
def threading__current_thread():
try:
return threading.current_thread() # Added in Python 2.6+
except AttributeError:
return threading.currentThread() # Deprecated in Python 3.10+
def threading__thread_name(thread):
try:
return thread.name # Added in Python 2.6+
except AttributeError:
return thread.getName() # Deprecated in Python 3.10+
if hasattr(UnicodeType, 'rpartition'):
str_partition = UnicodeType.partition
str_rpartition = UnicodeType.rpartition
@ -1254,6 +1268,7 @@ class Importer(object):
'minify',
'os_fork',
'parent',
'podman',
'select',
'service',
'setns',
@ -1269,6 +1284,13 @@ class Importer(object):
# a negative round-trip.
'builtins',
'__builtin__',
# On some Python releases (e.g. 3.8, 3.9) the subprocess module tries
# to import of this Windows-only builtin module.
'msvcrt',
# Python 2.x module that was renamed to _thread in 3.x.
# This entry avoids a roundtrip on 2.x -> 3.x.
'thread',
# org.python.core imported by copy, pickle, xml.sax; breaks Jython, but
@ -1349,6 +1371,16 @@ class Importer(object):
fp.close()
def find_module(self, fullname, path=None):
"""
Return a loader (ourself) or None, for the module with fullname.
Implements importlib.abc.MetaPathFinder.find_module().
Deprecrated in Python 3.4+, replaced by find_spec().
Raises ImportWarning in Python 3.10+.
fullname A (fully qualified?) module name, e.g. "os.path".
path __path__ of parent packge. None for a top level module.
"""
if hasattr(_tls, 'running'):
return None
@ -1470,6 +1502,12 @@ class Importer(object):
callback()
def load_module(self, fullname):
"""
Return the loaded module specified by fullname.
Implements importlib.abc.Loader.load_module().
Deprecated in Python 3.4+, replaced by create_module() & exec_module().
"""
fullname = to_text(fullname)
_v and self._log.debug('requesting %s', fullname)
self._refuse_imports(fullname)
@ -2679,7 +2717,7 @@ class Latch(object):
raise e
assert cookie == got_cookie, (
"Cookie incorrect; got %r, expected %r" \
"Cookie incorrect; got %r, expected %r"
% (binascii.hexlify(got_cookie),
binascii.hexlify(cookie))
)
@ -2734,7 +2772,7 @@ class Latch(object):
return 'Latch(%#x, size=%d, t=%r)' % (
id(self),
len(self._queue),
threading.currentThread().getName(),
threading__thread_name(threading__current_thread()),
)
@ -3634,7 +3672,6 @@ class Dispatcher(object):
self._service_recv.notify = None
self.recv.close()
@classmethod
@takes_econtext
def forget_chain(cls, chain_id, econtext):
@ -3862,7 +3899,7 @@ class ExternalContext(object):
else:
core_src_fd = self.config.get('core_src_fd', 101)
if core_src_fd:
fp = os.fdopen(core_src_fd, 'rb', 1)
fp = os.fdopen(core_src_fd, 'rb', 0)
try:
core_src = fp.read()
# Strip "ExternalContext.main()" call from last line.

@ -115,7 +115,6 @@ import tempfile
import threading
import mitogen.core
import mitogen.master
import mitogen.parent
from mitogen.core import LOG, IOLOG

@ -28,7 +28,6 @@
# !mitogen: minify_safe
import mitogen.core
import mitogen.parent

@ -28,7 +28,6 @@
# !mitogen: minify_safe
import mitogen.core
import mitogen.parent

@ -28,7 +28,6 @@
# !mitogen: minify_safe
import mitogen.core
import mitogen.parent

@ -108,7 +108,7 @@ def _stdlib_paths():
]
prefixes = (getattr(sys, a, None) for a in attr_candidates)
version = 'python%s.%s' % sys.version_info[0:2]
s = set(os.path.abspath(os.path.join(p, 'lib', version))
s = set(os.path.realpath(os.path.join(p, 'lib', version))
for p in prefixes if p is not None)
# When running 'unit2 tests/module_finder_test.py' in a Py2 venv on Ubuntu
@ -122,6 +122,13 @@ def is_stdlib_name(modname):
"""
Return :data:`True` if `modname` appears to come from the standard library.
"""
# `imp.is_builtin()` isn't a documented as part of Python's stdlib API.
#
# """
# Main is a little special - imp.is_builtin("__main__") will return False,
# but BuiltinImporter is still the most appropriate initial setting for
# its __loader__ attribute.
# """ -- comment in CPython pylifecycle.c:add_main_module()
if imp.is_builtin(modname) != 0:
return True
@ -512,42 +519,57 @@ class PkgutilMethod(FinderMethod):
Find `fullname` using :func:`pkgutil.find_loader`.
"""
try:
# If fullname refers to a submodule that's not already imported
# then the containing package is imported.
# Pre-'import spec' this returned None, in Python3.6 it raises
# ImportError.
loader = pkgutil.find_loader(fullname)
except ImportError:
e = sys.exc_info()[1]
LOG.debug('%r._get_module_via_pkgutil(%r): %s',
self, fullname, e)
LOG.debug('%r: find_loader(%r) failed: %s', self, fullname, e)
return None
IOLOG.debug('%r._get_module_via_pkgutil(%r) -> %r',
self, fullname, loader)
if not loader:
LOG.debug('%r: find_loader(%r) returned %r, aborting',
self, fullname, loader)
return
try:
path, is_special = _py_filename(loader.get_filename(fullname))
source = loader.get_source(fullname)
is_pkg = loader.is_package(fullname)
# workaround for special python modules that might only exist in memory
if is_special and is_pkg and not source:
source = '\n'
path = loader.get_filename(fullname)
except (AttributeError, ImportError):
# - Per PEP-302, get_source() and is_package() are optional,
# calling them may throw AttributeError.
# - get_filename() may throw ImportError if pkgutil.find_loader()
# picks a "parent" package's loader for some crap that's been
# stuffed in sys.modules, for example in the case of urllib3:
# "loader for urllib3.contrib.pyopenssl cannot handle
# requests.packages.urllib3.contrib.pyopenssl"
e = sys.exc_info()[1]
LOG.debug('%r: loading %r using %r failed: %s',
self, fullname, loader, e)
LOG.debug('%r: %r.get_file_name(%r) failed: %r', self, loader, fullname, e)
return
path, is_special = _py_filename(path)
try:
source = loader.get_source(fullname)
except AttributeError:
# Per PEP-302, get_source() is optional,
e = sys.exc_info()[1]
LOG.debug('%r: %r.get_source() failed: %r', self, loader, fullname, e)
return
try:
is_pkg = loader.is_package(fullname)
except AttributeError:
# Per PEP-302, is_package() is optional,
e = sys.exc_info()[1]
LOG.debug('%r: %r.is_package(%r) failed: %r', self, loader, fullname, e)
return
# workaround for special python modules that might only exist in memory
if is_special and is_pkg and not source:
source = '\n'
if path is None or source is None:
LOG.debug('%r: path=%r, source=%r, aborting', self, path, source)
return
if isinstance(source, mitogen.core.UnicodeType):
@ -567,23 +589,37 @@ class SysModulesMethod(FinderMethod):
"""
Find `fullname` using its :data:`__file__` attribute.
"""
module = sys.modules.get(fullname)
try:
module = sys.modules[fullname]
except KeyError:
LOG.debug('%r: sys.modules[%r] absent, aborting', self, fullname)
return
if not isinstance(module, types.ModuleType):
LOG.debug('%r: sys.modules[%r] absent or not a regular module',
self, fullname)
LOG.debug('%r: sys.modules[%r] is %r, aborting',
self, fullname, module)
return
try:
resolved_name = module.__name__
except AttributeError:
LOG.debug('%r: %r has no __name__, aborting', self, module)
return
if resolved_name != fullname:
LOG.debug('%r: %r.__name__ is %r, aborting',
self, module, resolved_name)
return
LOG.debug('_get_module_via_sys_modules(%r) -> %r', fullname, module)
alleged_name = getattr(module, '__name__', None)
if alleged_name != fullname:
LOG.debug('sys.modules[%r].__name__ is incorrect, assuming '
'this is a hacky module alias and ignoring it. '
'Got %r, module object: %r',
fullname, alleged_name, module)
try:
path = module.__file__
except AttributeError:
LOG.debug('%r: %r has no __file__, aborting', self, module)
return
path, _ = _py_filename(getattr(module, '__file__', ''))
path, _ = _py_filename(path)
if not path:
LOG.debug('%r: %r.__file__ is %r, aborting', self, module, path)
return
LOG.debug('%r: sys.modules[%r]: found %s', self, fullname, path)
@ -628,10 +664,24 @@ class ParentEnumerationMethod(FinderMethod):
module object or any parent package's :data:`__path__`, since they have all
been overwritten. Some men just want to watch the world burn.
"""
@staticmethod
def _iter_parents(fullname):
"""
>>> list(ParentEnumerationMethod._iter_parents('a'))
[('', 'a')]
>>> list(ParentEnumerationMethod._iter_parents('a.b.c'))
[('a.b', 'c'), ('a', 'b'), ('', 'a')]
"""
while fullname:
fullname, _, modname = str_rpartition(fullname, u'.')
yield fullname, modname
def _find_sane_parent(self, fullname):
"""
Iteratively search :data:`sys.modules` for the least indirect parent of
`fullname` that is loaded and contains a :data:`__path__` attribute.
`fullname` that's from the same package and has a :data:`__path__`
attribute.
:return:
`(parent_name, path, modpath)` tuple, where:
@ -644,21 +694,40 @@ class ParentEnumerationMethod(FinderMethod):
* `modpath`: list of module name components leading from `path`
to the target module.
"""
path = None
modpath = []
while True:
pkgname, _, modname = str_rpartition(to_text(fullname), u'.')
for pkgname, modname in self._iter_parents(fullname):
modpath.insert(0, modname)
if not pkgname:
return [], None, modpath
pkg = sys.modules.get(pkgname)
path = getattr(pkg, '__path__', None)
if pkg and path:
return pkgname.split('.'), path, modpath
try:
pkg = sys.modules[pkgname]
except KeyError:
LOG.debug('%r: sys.modules[%r] absent, skipping', self, pkgname)
continue
try:
resolved_pkgname = pkg.__name__
except AttributeError:
LOG.debug('%r: %r has no __name__, skipping', self, pkg)
continue
if resolved_pkgname != pkgname:
LOG.debug('%r: %r.__name__ is %r, skipping',
self, pkg, resolved_pkgname)
continue
try:
path = pkg.__path__
except AttributeError:
LOG.debug('%r: %r has no __path__, skipping', self, pkg)
continue
if not path:
LOG.debug('%r: %r.__path__ is %r, skipping', self, pkg, path)
continue
LOG.debug('%r: %r lacks __path__ attribute', self, pkgname)
fullname = pkgname
return pkgname.split('.'), path, modpath
def _found_package(self, fullname, path):
path = os.path.join(path, '__init__.py')
@ -1167,7 +1236,7 @@ class Broker(mitogen.core.Broker):
def __init__(self, install_watcher=True):
if install_watcher:
self._watcher = ThreadWatcher.watch(
target=threading.currentThread(),
target=mitogen.core.threading__current_thread(),
on_join=self.shutdown,
)
super(Broker, self).__init__()

@ -35,7 +35,6 @@ Support for operating in a mixed threading/forking environment.
import os
import socket
import sys
import threading
import weakref
import mitogen.core
@ -158,7 +157,7 @@ class Corker(object):
held. This will not return until each thread acknowledges it has ceased
execution.
"""
current = threading.currentThread()
current = mitogen.core.threading__current_thread()
s = mitogen.core.b('CORK') * ((128 // 4) * 1024)
self._rsocks = []

@ -42,7 +42,6 @@ import heapq
import inspect
import logging
import os
import platform
import re
import signal
import socket
@ -1410,9 +1409,15 @@ class Connection(object):
# their respective values.
# * CONTEXT_NAME must be prefixed with the name of the Python binary in
# order to allow virtualenvs to detect their install prefix.
# * For Darwin, OS X installs a craptacular argv0-introspecting Python
# version switcher as /usr/bin/python. Override attempts to call it
# with an explicit call to python2.7
# * macOS <= 10.14 (Darwin <= 18) install an unreliable Python version
# switcher as /usr/bin/python, which introspects argv0. To workaround
# it we redirect attempts to call /usr/bin/python with an explicit
# call to /usr/bin/python2.7. macOS 10.15 (Darwin 19) removed it.
# * macOS 11.x (Darwin 20, Big Sur) and macOS 12.x (Darwin 21, Montery)
# do something slightly different. The Python executable is patched to
# perform an extra execvp(). I don't fully understand the details, but
# setting PYTHON_LAUNCHED_FROM_WRAPPER=1 avoids it.
# * macOS 13.x (Darwin 22?) may remove python 2.x entirely.
#
# Locals:
# R: read side of interpreter stdin.
@ -1435,11 +1440,8 @@ class Connection(object):
os.close(r)
os.close(W)
os.close(w)
# this doesn't apply anymore to Mac OSX 10.15+ (Darwin 19+), new interpreter looks like this:
# /System/Library/Frameworks/Python.framework/Versions/2.7/Resources/Python.app/Contents/MacOS/Python
if sys.platform == 'darwin' and sys.executable == '/usr/bin/python' and \
int(platform.release()[:2]) < 19:
sys.executable += sys.version[:3]
if os.uname()[0]=='Darwin'and os.uname()[2][:2]<'19'and sys.executable=='/usr/bin/python':sys.executable='/usr/bin/python2.7'
if os.uname()[0]=='Darwin'and os.uname()[2][:2]in'2021'and sys.version[:3]=='2.7':os.environ['PYTHON_LAUNCHED_FROM_WRAPPER']='1'
os.environ['ARGV0']=sys.executable
os.execl(sys.executable,sys.executable+'(mitogen:CONTEXT_NAME)')
os.write(1,'MITO000\n'.encode())
@ -1469,7 +1471,7 @@ class Connection(object):
def get_boot_command(self):
source = inspect.getsource(self._first_stage)
source = textwrap.dedent('\n'.join(source.strip().split('\n')[2:]))
source = source.replace(' ', '\t')
source = source.replace(' ', ' ')
source = source.replace('CONTEXT_NAME', self.options.remote_name)
preamble_compressed = self.get_preamble()
source = source.replace('PREAMBLE_COMPRESSED_LEN',
@ -1506,7 +1508,7 @@ class Connection(object):
def get_preamble(self):
suffix = (
'\nExternalContext(%r).main()\n' %\
'\nExternalContext(%r).main()\n' %
(self.get_econtext_config(),)
)
partial = get_core_source_partial()
@ -2505,6 +2507,9 @@ class Router(mitogen.core.Router):
def ssh(self, **kwargs):
return self.connect(u'ssh', **kwargs)
def podman(self, **kwargs):
return self.connect(u'podman', **kwargs)
class Reaper(object):
"""

@ -0,0 +1,73 @@
# Copyright 2019, David Wilson
# Copyright 2021, Mitogen contributors
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# !mitogen: minify_safe
import logging
import mitogen.parent
LOG = logging.getLogger(__name__)
class Options(mitogen.parent.Options):
container = None
username = None
podman_path = 'podman'
def __init__(self, container=None, podman_path=None, username=None,
**kwargs):
super(Options, self).__init__(**kwargs)
assert container is not None
self.container = container
if podman_path:
self.podman_path = podman_path
if username:
self.username = username
class Connection(mitogen.parent.Connection):
options_class = Options
child_is_immediate_subprocess = False
# TODO: better way of capturing errors such as "No such container."
create_child_args = {
'merge_stdio': True
}
def _get_name(self):
return u'podman.' + self.options.container
def get_boot_command(self):
args = [self.options.podman_path, 'exec']
if self.options.username:
args += ['--user=' + self.options.username]
args += ["--interactive", "--", self.options.container]
return args + super(Connection, self).get_boot_command()

@ -90,7 +90,7 @@ def merge_stats(outpath, inpaths):
break
time.sleep(0.2)
stats.dump_stats(outpath)
pstats.dump_stats(outpath)
def generate_stats(outpath, tmpdir):

@ -31,7 +31,6 @@
import grp
import logging
import os
import os.path
import pprint
import pwd
import stat
@ -109,7 +108,8 @@ def get_or_create_pool(size=None, router=None, context=None):
def get_thread_name():
return threading.currentThread().getName()
thread = mitogen.core.threading__current_thread()
return mitogen.core.threading__thread_name(thread)
def call(service_name, method_name, call_context=None, **kwargs):

@ -29,14 +29,13 @@
# !mitogen: minify_safe
import datetime
import functools
import logging
import os
import sys
import mitogen
import mitogen.core
import mitogen.master
import mitogen.parent
iteritems = getattr(dict, 'iteritems', dict.items)
@ -173,12 +172,9 @@ def with_router(func):
do_stuff(blah, 123)
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
return run_with_router(func, *args, **kwargs)
if mitogen.core.PY3:
wrapper.func_name = func.__name__
else:
wrapper.func_name = func.func_name
return wrapper

@ -1,3 +1,4 @@
#!/usr/bin/env python
"""
Print the size of a typical SSH command line and the bootstrap code sent to new
contexts.

@ -28,10 +28,6 @@ NOCOVERAGE="${NOCOVERAGE:-}"
NOCOVERAGE_ERASE="${NOCOVERAGE_ERASE:-$NOCOVERAGE}"
NOCOVERAGE_REPORT="${NOCOVERAGE_REPORT:-$NOCOVERAGE}"
if [ ! "$UNIT2" ]; then
UNIT2="$(which unit2)"
fi
if [ ! "$NOCOVERAGE_ERASE" ]; then
coverage erase
fi
@ -39,12 +35,12 @@ fi
# First run overwites coverage output.
[ "$SKIP_MITOGEN" ] || {
if [ ! "$NOCOVERAGE" ]; then
coverage run -a "${UNIT2}" discover \
coverage run -a -m unittest discover \
--start-directory "tests" \
--pattern '*_test.py' \
"$@"
else
"${UNIT2}" discover \
python -m unittest discover \
--start-directory "tests" \
--pattern '*_test.py' \
"$@"
@ -60,12 +56,12 @@ fi
[ "$SKIP_ANSIBLE" ] || {
export PYTHONPATH=`pwd`/tests:$PYTHONPATH
if [ ! "$NOCOVERAGE" ]; then
coverage run -a "${UNIT2}" discover \
coverage run -a -m unittest discover \
--start-directory "tests/ansible" \
--pattern '*_test.py' \
"$@"
else
"${UNIT2}" discover \
python -m unittest discover \
--start-directory "tests/ansible" \
--pattern '*_test.py' \
"$@"

@ -1,3 +1,6 @@
[bdist_wheel]
universal=1
[coverage:run]
branch = true
source =

@ -26,6 +26,7 @@
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import ast
import os
from setuptools import find_packages, setup
@ -37,29 +38,45 @@ def grep_version():
for line in fp:
if line.startswith('__version__'):
_, _, s = line.partition('=')
return '.'.join(map(str, eval(s)))
parts = ast.literal_eval(s.strip())
return '.'.join(str(part) for part in parts)
def long_description():
here = os.path.dirname(__file__)
readme_path = os.path.join(here, 'README.md')
with open(readme_path) as fp:
readme = fp.read()
return readme
setup(
name = 'mitogen',
version = grep_version(),
description = 'Library for writing distributed self-replicating programs.',
long_description = long_description(),
long_description_content_type='text/markdown',
author = 'David Wilson',
license = 'New BSD',
url = 'https://github.com/dw/mitogen/',
url = 'https://github.com/mitogen-hq/mitogen/',
packages = find_packages(exclude=['tests', 'examples']),
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*',
zip_safe = False,
classifiers = [
'Environment :: Console',
'Framework :: Ansible',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.4',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: System :: Distributed Computing',
'Topic :: System :: Systems Administration',

@ -7,7 +7,7 @@ started in September 2017. Pull requests in this area are very welcome!
## Running The Tests
[![Build Status](https://api.travis-ci.org/dw/mitogen.svg?branch=master)](https://travis-ci.org/dw/mitogen)
[![Build Status](https://dev.azure.com/mitogen-hq/mitogen/_apis/build/status/mitogen-hq.mitogen?branchName=master)](https://dev.azure.com/mitogen-hq/mitogen/_build/latest?definitionId=1&branchName=master)
Your computer should have an Internet connection, and the ``docker`` command
line tool should be able to connect to a working Docker daemon (localhost or

@ -1,3 +1,6 @@
- include: regression/all.yml
- include: integration/all.yml
- import_playbook: setup/all.yml
tags: setup
- import_playbook: regression/all.yml
tags: regression
- import_playbook: integration/all.yml
tags: integration

@ -5,7 +5,11 @@ strategy_plugins = ../../ansible_mitogen/plugins/strategy
inventory_plugins = lib/inventory
action_plugins = lib/action
callback_plugins = lib/callback
stdout_callback = nice_stdout
stdout_callback = yaml
stdout_whitelist =
profile_roles,
timer,
yaml
vars_plugins = lib/vars
library = lib/modules
filter_plugins = lib/filters
@ -31,6 +35,9 @@ timeout = 10
# On Travis, paramiko check fails due to host key checking enabled.
host_key_checking = False
[callback_profile_tasks]
task_output_limit = 10
[ssh_connection]
ssh_args = -o UserKnownHostsFile=/dev/null -o ForwardAgent=yes -o ControlMaster=auto -o ControlPersist=60s
pipelining = True

@ -66,3 +66,6 @@
copy:
src: /tmp/bigbigfile.in
dest: /tmp/bigbigfile.out
tags:
- resource_intensive

@ -2,3 +2,5 @@
tasks:
- include_tasks: _includes.yml
with_sequence: start=1 end=1000
tags:
- resource_intensive

@ -24,3 +24,8 @@
mode: 0644
with_filetree: /tmp/filetree.in
when: item.state == 'file'
loop_control:
label: "/tmp/filetree.out/{{ item.path }}"
tags:
- resource_intensive

@ -8,3 +8,5 @@
tasks:
- command: hostname
with_sequence: start=1 end="{{end|default(100)}}"
tags:
- resource_intensive

@ -110,3 +110,5 @@
- command: hostname
- command: hostname
- command: hostname
tags:
- resource_intensive

@ -1,10 +1,10 @@
- include: copy.yml
- include: fixup_perms2__copy.yml
- include: low_level_execute_command.yml
- include: make_tmp_path.yml
- include: make_tmp_path__double.yml
- include: remote_expand_user.yml
- include: remote_file_exists.yml
- include: remove_tmp_path.yml
- include: synchronize.yml
- include: transfer_data.yml
- import_playbook: copy.yml
- import_playbook: fixup_perms2__copy.yml
- import_playbook: low_level_execute_command.yml
- import_playbook: make_tmp_path.yml
- import_playbook: make_tmp_path__double.yml
- import_playbook: remote_expand_user.yml
- import_playbook: remote_file_exists.yml
- import_playbook: remove_tmp_path.yml
- import_playbook: synchronize.yml
- import_playbook: transfer_data.yml

@ -63,6 +63,7 @@
- stat.results[1].stat.checksum == "62951f943c41cdd326e5ce2b53a779e7916a820d"
- stat.results[2].stat.checksum == "b26dd6444595e2bdb342aa0a91721b57478b5029"
- stat.results[3].stat.checksum == "d675f47e467eae19e49032a2cc39118e12a6ee72"
fail_msg: stat={{stat}}
- file:
state: absent
@ -81,3 +82,5 @@
- /tmp/copy-large-inline-file.out
# end of cleaning out files (again)
tags:
- copy

@ -21,6 +21,7 @@
- assert:
that:
- out.stat.mode in ("0644", "0664")
fail_msg: out={{out}}
#
# copy module (explicit mode).
@ -37,6 +38,7 @@
- assert:
that:
- out.stat.mode == "0400"
fail_msg: out={{out}}
#
# copy module (existing disk files, no mode).
@ -63,6 +65,7 @@
- assert:
that:
- out.stat.mode in ("0644", "0664")
fail_msg: out={{out}}
#
# copy module (existing disk files, preserve mode).
@ -79,6 +82,7 @@
- assert:
that:
- out.stat.mode == "1462"
fail_msg: out={{out}}
#
# copy module (existing disk files, explicit mode).
@ -96,6 +100,7 @@
- assert:
that:
- out.stat.mode == "1461"
fail_msg: out={{out}}
- file:
state: absent
@ -109,3 +114,5 @@
- /tmp/copy-with-mode.out
# end of cleaning out files
tags:
- fixup_perms2__copy

@ -16,6 +16,7 @@
- 'raw.rc == 0'
- 'raw.stdout_lines[-1]|to_text == "2"'
- 'raw.stdout[-1]|to_text == "2"'
fail_msg: raw={{raw}}
- name: Run raw module with sudo
become: true
@ -39,3 +40,6 @@
["root\r\n"],
["root"],
)
fail_msg: raw={{raw}}
tags:
- low_level_execute_command

@ -44,11 +44,13 @@
assert:
that:
- good_temp_path == good_temp_path2
fail_msg: good_temp_path={{good_temp_path}} good_temp_path2={{good_temp_path2}}
- name: "Verify different subdir for both tasks"
assert:
that:
- tmp_path.path != tmp_path2.path
fail_msg: tmp_path={{tmp_path}} tmp_path2={{tmp_path2}}
#
# Verify subdirectory removal.
@ -69,6 +71,7 @@
that:
- not stat1.stat.exists
- not stat2.stat.exists
fail_msg: stat1={{stat1}} stat2={{stat2}}
#
# Verify good directory persistence.
@ -83,6 +86,7 @@
assert:
that:
- stat.stat.exists
fail_msg: stat={{stat}}
#
# Write some junk into the temp path.
@ -105,6 +109,7 @@
- assert:
that:
- not out.stat.exists
fail_msg: out={{out}}
#
# root
@ -123,6 +128,7 @@
that:
- tmp_path2.path != tmp_path_root.path
- tmp_path2.path|dirname != tmp_path_root.path|dirname
fail_msg: tmp_path_root={{tmp_path_root}} tmp_path2={{tmp_path2}}
#
# readonly homedir
@ -153,3 +159,6 @@
that:
- out.module_path.startswith(good_temp_path2)
- out.module_tmpdir.startswith(good_temp_path2)
fail_msg: out={{out}}
tags:
- make_tmp_path

@ -18,3 +18,5 @@
script: |
assert not self._remote_file_exists("{{ out.t1 }}")
assert not self._remote_file_exists("{{ out.t2 }}")
tags:
- make_tmp_path_double

@ -27,6 +27,7 @@
register: out
- assert:
that: out.result == '{{user_facts.ansible_facts.ansible_user_dir}}/foo'
fail_msg: out={{out}}
- name: "Expand ~/foo with become active. ~ is become_user's home."
action_passthrough:
@ -49,6 +50,7 @@
register: out
- assert:
that: out.result == '{{user_facts.ansible_facts.ansible_user_dir}}/foo'
fail_msg: out={{out}}
- name: "Expanding $HOME/foo has no effect."
action_passthrough:
@ -59,6 +61,7 @@
register: out
- assert:
that: out.result == '$HOME/foo'
fail_msg: out={{out}}
# ------------------------
@ -71,6 +74,7 @@
register: out
- assert:
that: out.result == '{{user_facts.ansible_facts.ansible_user_dir}}/foo'
fail_msg: out={{out}}
- name: "sudoable; Expand ~/foo with become active. ~ is become_user's home."
action_passthrough:
@ -94,6 +98,7 @@
register: out
- assert:
that: out.result == '{{user_facts.ansible_facts.ansible_user_dir}}/foo'
fail_msg: out={{out}}
- name: "sudoable; Expanding $HOME/foo has no effect."
action_passthrough:
@ -104,3 +109,6 @@
register: out
- assert:
that: out.result == '$HOME/foo'
fail_msg: out={{out}}
tags:
- remote_expand_user

@ -15,6 +15,7 @@
- assert:
that: out.result == False
fail_msg: out={{out}}
# ---
@ -29,8 +30,10 @@
- assert:
that: out.result == True
fail_msg: out={{out}}
- file:
path: /tmp/does-exist
state: absent
tags:
- remote_file_exists

@ -23,6 +23,7 @@
- assert:
that:
- not out2.stat.exists
fail_msg: out={{out}}
- stat:
path: "{{out.src|dirname}}"
@ -31,7 +32,10 @@
- assert:
that:
- not out2.stat.exists
fail_msg: out={{out}}
- file:
path: /tmp/remove_tmp_path_test
state: absent
tags:
- remove_tmp_path

@ -60,6 +60,7 @@
- assert:
that: outout == "item!"
fail_msg: outout={{outout}}
when: False
# TODO: https://github.com/dw/mitogen/issues/692
@ -71,3 +72,5 @@
# - /tmp/synchronize-action-key
# - /tmp/sync-test
# - /tmp/sync-test.out
tags:
- synchronize

@ -24,6 +24,7 @@
- assert:
that: |
out.content|b64decode == '{"I am JSON": true}'
fail_msg: out={{out}}
# Ensure it handles strings.
@ -40,7 +41,10 @@
- assert:
that:
out.content|b64decode == 'I am text.'
fail_msg: out={{out}}
- file:
path: /tmp/transfer-data
state: absent
tags:
- transfer_data

@ -3,21 +3,39 @@
# This playbook imports all tests that are known to work at present.
#
- include: action/all.yml
- include: async/all.yml
- include: become/all.yml
- include: connection/all.yml
- include: connection_delegation/all.yml
- include: connection_loader/all.yml
- include: context_service/all.yml
- include: glibc_caches/all.yml
- include: interpreter_discovery/all.yml
- include: local/all.yml
- include: module_utils/all.yml
- include: playbook_semantics/all.yml
- include: process/all.yml
- include: runner/all.yml
- include: ssh/all.yml
- include: strategy/all.yml
- include: stub_connections/all.yml
- include: transport_config/all.yml
- import_playbook: action/all.yml
tags: action
- import_playbook: async/all.yml
tags: async
- import_playbook: become/all.yml
tags: become
- import_playbook: connection/all.yml
tags: connection
- import_playbook: connection_delegation/all.yml
tags: connection_delegation
- import_playbook: connection_loader/all.yml
tags: connection_loader
- import_playbook: context_service/all.yml
tags: context_service
- import_playbook: glibc_caches/all.yml
tags: glibc_caches
- import_playbook: interpreter_discovery/all.yml
tags: interpreter_discovery
- import_playbook: local/all.yml
tags: local
- import_playbook: module_utils/all.yml
tags: module_utils
- import_playbook: playbook_semantics/all.yml
tags: playbook_semantics
- import_playbook: process/all.yml
tags: process
- import_playbook: runner/all.yml
tags: runner
- import_playbook: ssh/all.yml
tags: ssh
- import_playbook: strategy/all.yml
tags: strategy
- import_playbook: stub_connections/all.yml
tags: stub_connections
- import_playbook: transport_config/all.yml
tags: transport_config

@ -1,9 +1,9 @@
- include: multiple_items_loop.yml
- include: result_binary_producing_json.yml
- include: result_binary_producing_junk.yml
- include: result_shell_echo_hi.yml
- include: runner_new_process.yml
- include: runner_one_job.yml
- include: runner_timeout_then_polling.yml
- include: runner_two_simultaneous_jobs.yml
- include: runner_with_polling_and_timeout.yml
- import_playbook: multiple_items_loop.yml
- import_playbook: result_binary_producing_json.yml
- import_playbook: result_binary_producing_junk.yml
- import_playbook: result_shell_echo_hi.yml
- import_playbook: runner_new_process.yml
- import_playbook: runner_one_job.yml
- import_playbook: runner_timeout_then_polling.yml
- import_playbook: runner_two_simultaneous_jobs.yml
- import_playbook: runner_with_polling_and_timeout.yml

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save