Merge pull request #1003 from moreati/release-0.3.4

Release 0.3.4
pull/1044/head v0.3.4
Alex Willmer 1 year ago committed by GitHub
commit d26ded2ecb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -1,6 +1,7 @@
#!/usr/bin/env python
# Run tests/ansible/all.yml under Ansible and Ansible-Mitogen
import collections
import glob
import os
import signal
@ -44,6 +45,12 @@ with ci_lib.Fold('job_setup'):
if not path.endswith('default.hosts'):
ci_lib.run("ln -s %s %s", path, HOSTS_DIR)
distros = collections.defaultdict(list)
families = collections.defaultdict(list)
for container in containers:
distros[container['distro']].append(container['name'])
families[container['family']].append(container['name'])
inventory_path = os.path.join(HOSTS_DIR, 'target')
with open(inventory_path, 'w') as fp:
fp.write('[test-targets]\n')
@ -59,6 +66,16 @@ with ci_lib.Fold('job_setup'):
for container in containers
)
for distro, hostnames in sorted(distros.items(), key=lambda t: t[0]):
fp.write('\n[%s]\n' % distro)
fp.writelines('%s\n' % name for name in hostnames)
for family, hostnames in sorted(families.items(), key=lambda t: t[0]):
fp.write('\n[%s]\n' % family)
fp.writelines('%s\n' % name for name in hostnames)
fp.write('\n[linux:children]\ntest-targets\n')
ci_lib.dump_file(inventory_path)
if not ci_lib.exists_in_path('sshpass'):

@ -1,8 +1,9 @@
# Each step entry runs a task (Azure Pipelines analog of an Ansible module).
# https://learn.microsoft.com/en-us/azure/devops/pipelines/tasks/reference/?view=azure-pipelines&viewFallbackFrom=azure-devops#tool
parameters:
name: ''
pool: ''
sign: false
# `{script: ...}` is shorthand for `{task: CmdLine@<mumble>, inputs: {script: ...}}`.
# https://learn.microsoft.com/en-us/azure/devops/pipelines/yaml-schema/steps-script?view=azure-pipelines
# https://learn.microsoft.com/en-us/azure/devops/pipelines/tasks/reference/cmd-line-v2?view=azure-pipelines
steps:
- task: UsePythonVersion@0
@ -11,7 +12,18 @@ steps:
versionSpec: '$(python.version)'
condition: ne(variables['python.version'], '')
- script: python -mpip install tox
- script: |
type python
python --version
displayName: Show python version
- script: |
sudo apt-get update
sudo apt-get install -y python2-dev python3-pip virtualenv
displayName: Install build deps
condition: and(eq(variables['python.version'], ''), eq(variables['Agent.OS'], 'Linux'))
- script: python -mpip install "tox<4.0"
displayName: Install tooling
- script: python -mtox -e "$(tox.env)"

@ -9,69 +9,18 @@
#ANSIBLE_VERBOSITY: 3
jobs:
- job: Mac1015
- job: mac11
# vanilla Ansible is really slow
timeoutInMinutes: 120
steps:
- template: azure-pipelines-steps.yml
pool:
# https://github.com/actions/virtual-environments/blob/main/images/macos/macos-10.15-Readme.md
vmImage: macOS-10.15
strategy:
matrix:
Mito_27:
python.version: '2.7'
tox.env: py27-mode_mitogen
Mito_36:
python.version: '3.6'
tox.env: py36-mode_mitogen
Mito_310:
python.version: '3.10'
tox.env: py310-mode_mitogen
# TODO: test python3, python3 tests are broken
Loc_27_210:
python.version: '2.7'
tox.env: py27-mode_localhost-ansible2.10
Loc_27_3:
python.version: '2.7'
tox.env: py27-mode_localhost-ansible3
Loc_27_4:
python.version: '2.7'
tox.env: py27-mode_localhost-ansible4
# NOTE: this hangs when ran in Ubuntu 18.04
Van_27_210:
python.version: '2.7'
tox.env: py27-mode_localhost-ansible2.10
STRATEGY: linear
ANSIBLE_SKIP_TAGS: resource_intensive
Van_27_3:
python.version: '2.7'
tox.env: py27-mode_localhost-ansible3
STRATEGY: linear
ANSIBLE_SKIP_TAGS: resource_intensive
Van_27_4:
python.version: '2.7'
tox.env: py27-mode_localhost-ansible4
STRATEGY: linear
ANSIBLE_SKIP_TAGS: resource_intensive
- job: Mac11
# vanilla Ansible is really slow
timeoutInMinutes: 120
steps:
- template: azure-pipelines-steps.yml
pool:
# https://github.com/actions/virtual-environments/blob/main/images/macos/
# https://github.com/actions/runner-images/blob/main/images/macos/macos-11-Readme.md
vmImage: macOS-11
strategy:
matrix:
Mito_27:
tox.env: py27-mode_mitogen
Mito_37:
python.version: '3.7'
tox.env: py37-mode_mitogen
Mito_310:
python.version: '3.10'
tox.env: py310-mode_mitogen
@ -79,8 +28,6 @@ jobs:
# TODO: test python3, python3 tests are broken
Loc_27_210:
tox.env: py27-mode_localhost-ansible2.10
Loc_27_3:
tox.env: py27-mode_localhost-ansible3
Loc_27_4:
tox.env: py27-mode_localhost-ansible4
@ -89,10 +36,6 @@ jobs:
tox.env: py27-mode_localhost-ansible2.10
STRATEGY: linear
ANSIBLE_SKIP_TAGS: resource_intensive
Van_27_3:
tox.env: py27-mode_localhost-ansible3
STRATEGY: linear
ANSIBLE_SKIP_TAGS: resource_intensive
Van_27_4:
tox.env: py27-mode_localhost-ansible4
STRATEGY: linear
@ -100,38 +43,29 @@ jobs:
- job: Linux
pool:
# https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu1804-README.md
vmImage: "Ubuntu 18.04"
# https://github.com/actions/runner-images/blob/main/images/linux/Ubuntu2004-Readme.md
vmImage: ubuntu-20.04
steps:
- template: azure-pipelines-steps.yml
strategy:
matrix:
Mito_27_centos6:
python.version: '2.7'
tox.env: py27-mode_mitogen-distro_centos6
Mito_27_centos7:
python.version: '2.7'
tox.env: py27-mode_mitogen-distro_centos7
Mito_27_centos8:
python.version: '2.7'
tox.env: py27-mode_mitogen-distro_centos8
Mito_27_debian9:
python.version: '2.7'
tox.env: py27-mode_mitogen-distro_debian9
Mito_27_debian10:
python.version: '2.7'
tox.env: py27-mode_mitogen-distro_debian10
Mito_27_debian11:
python.version: '2.7'
tox.env: py27-mode_mitogen-distro_debian11
Mito_27_ubuntu1604:
python.version: '2.7'
tox.env: py27-mode_mitogen-distro_ubuntu1604
Mito_27_ubuntu1804:
python.version: '2.7'
tox.env: py27-mode_mitogen-distro_ubuntu1804
Mito_27_ubuntu2004:
python.version: '2.7'
tox.env: py27-mode_mitogen-distro_ubuntu2004
Mito_36_centos6:
@ -190,59 +124,14 @@ jobs:
python.version: '3.10'
tox.env: py310-mode_mitogen-distro_ubuntu2004
#DebOps_2460_27_27:
#python.version: '2.7'
#MODE: debops_common
#VER: 2.4.6.0
#DebOps_262_36_27:
#python.version: '3.6'
#MODE: debops_common
#VER: 2.6.2
#Ansible_2460_26:
#python.version: '2.7'
#MODE: ansible
#VER: 2.4.6.0
#Ansible_262_26:
#python.version: '2.7'
#MODE: ansible
#VER: 2.6.2
#Ansible_2460_36:
#python.version: '3.6'
#MODE: ansible
#VER: 2.4.6.0
#Ansible_262_36:
#python.version: '3.6'
#MODE: ansible
#VER: 2.6.2
#Vanilla_262_27:
#python.version: '2.7'
#MODE: ansible
#VER: 2.6.2
#DISTROS: debian
#STRATEGY: linear
Ans_27_210:
python.version: '2.7'
tox.env: py27-mode_ansible-ansible2.10
Ans_27_3:
python.version: '2.7'
tox.env: py27-mode_ansible-ansible3
Ans_27_4:
python.version: '2.7'
tox.env: py27-mode_ansible-ansible4
Ans_36_210:
python.version: '3.6'
tox.env: py36-mode_ansible-ansible2.10
Ans_36_3:
python.version: '3.6'
tox.env: py36-mode_ansible-ansible3
Ans_36_4:
python.version: '3.6'
tox.env: py36-mode_ansible-ansible4
@ -259,3 +148,6 @@ jobs:
Ans_310_5:
python.version: '3.10'
tox.env: py310-mode_ansible-ansible5
Ans_310_6:
python.version: '3.10'
tox.env: py310-mode_ansible-ansible6

@ -2,13 +2,16 @@ from __future__ import absolute_import
from __future__ import print_function
import atexit
import errno
import os
import re
import shlex
import shutil
import subprocess
import sys
import tempfile
import subprocess32 as subprocess
try:
import urlparse
except ImportError:
@ -30,40 +33,30 @@ def print(*args, **kwargs):
file.flush()
#
# check_output() monkeypatch cutpasted from testlib.py
#
def subprocess__check_output(*popenargs, **kwargs):
# Missing from 2.6.
process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs)
output, _ = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise subprocess.CalledProcessError(retcode, cmd)
return output
if not hasattr(subprocess, 'check_output'):
subprocess.check_output = subprocess__check_output
def _have_cmd(args):
try:
subprocess.run(
args, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL,
)
except OSError as exc:
if exc.errno == errno.ENOENT:
return False
raise
except subprocess.CallProcessError:
return False
return True
# ------------------
def have_apt():
proc = subprocess.Popen('apt --help >/dev/null 2>/dev/null', shell=True)
return proc.wait() == 0
return _have_cmd(['apt', '--help'])
def have_brew():
proc = subprocess.Popen('brew help >/dev/null 2>/dev/null', shell=True)
return proc.wait() == 0
return _have_cmd(['brew', 'help'])
def have_docker():
proc = subprocess.Popen('docker info >/dev/null 2>/dev/null', shell=True)
return proc.wait() == 0
return _have_cmd(['docker', 'info'])
def _argv(s, *args):
@ -229,31 +222,20 @@ def get_docker_hostname():
return parsed.netloc.partition(':')[0]
def image_for_distro(distro):
"""Return the container image name or path for a test distro name.
The returned value is suitable for use with `docker pull`.
>>> image_for_distro('centos5')
'public.ecr.aws/n5z0e8q9/centos5-test'
>>> image_for_distro('centos5-something_custom')
'public.ecr.aws/n5z0e8q9/centos5-test'
"""
return 'public.ecr.aws/n5z0e8q9/%s-test' % (distro.partition('-')[0],)
def make_containers(name_prefix='', port_offset=0):
"""
>>> import pprint
>>> BASE_PORT=2200; DISTROS=['debian', 'centos6']
>>> BASE_PORT=2200; DISTROS=['debian11', 'centos6']
>>> pprint.pprint(make_containers())
[{'distro': 'debian',
[{'distro': 'debian11',
'family': 'debian',
'hostname': 'localhost',
'image': 'public.ecr.aws/n5z0e8q9/debian-test',
'name': 'target-debian-1',
'image': 'public.ecr.aws/n5z0e8q9/debian11-test',
'name': 'target-debian11-1',
'port': 2201,
'python_path': '/usr/bin/python'},
{'distro': 'centos6',
'family': 'centos',
'hostname': 'localhost',
'image': 'public.ecr.aws/n5z0e8q9/centos6-test',
'name': 'target-centos6-2',
@ -261,31 +243,39 @@ def make_containers(name_prefix='', port_offset=0):
'python_path': '/usr/bin/python'}]
"""
docker_hostname = get_docker_hostname()
firstbit = lambda s: (s+'-').split('-')[0]
secondbit = lambda s: (s+'-').split('-')[1]
distro_pattern = re.compile(r'''
(?P<distro>(?P<family>[a-z]+)[0-9]+)
(?:-(?P<py>py3))?
(?:\*(?P<count>[0-9]+))?
''',
re.VERBOSE,
)
i = 1
lst = []
for distro in DISTROS:
distro, star, count = distro.partition('*')
if star:
d = distro_pattern.match(distro).groupdict(default=None)
distro = d['distro']
family = d['family']
image = 'public.ecr.aws/n5z0e8q9/%s-test' % (distro,)
if d['py'] == 'py3':
python_path = '/usr/bin/python3'
else:
python_path = '/usr/bin/python'
if d['count']:
count = int(count)
else:
count = 1
for x in range(count):
lst.append({
"distro": firstbit(distro),
"image": image_for_distro(distro),
"distro": distro, "family": family, "image": image,
"name": name_prefix + ("target-%s-%s" % (distro, i)),
"hostname": docker_hostname,
"port": BASE_PORT + i + port_offset,
"python_path": (
'/usr/bin/python3'
if secondbit(distro) == 'py3'
else '/usr/bin/python'
)
"python_path": python_path,
})
i += 1
@ -310,18 +300,24 @@ def proc_is_docker(pid):
def get_interesting_procs(container_name=None):
"""
Return a list of (pid, line) tuples for processes considered interesting.
"""
args = ['ps', 'ax', '-oppid=', '-opid=', '-ocomm=', '-ocommand=']
if container_name is not None:
args = ['docker', 'exec', container_name] + args
out = []
for line in subprocess__check_output(args).decode().splitlines():
for line in subprocess.check_output(args).decode().splitlines():
ppid, pid, comm, rest = line.split(None, 3)
if (
(
any(comm.startswith(s) for s in INTERESTING_COMMS) or
'mitogen:' in rest
) and
(
'WALinuxAgent' not in rest
) and
(
container_name is not None or
(not proc_is_docker(pid))

@ -1,318 +0,0 @@
r"""JSON (JavaScript Object Notation) <http://json.org> is a subset of
JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
interchange format.
:mod:`simplejson` exposes an API familiar to users of the standard library
:mod:`marshal` and :mod:`pickle` modules. It is the externally maintained
version of the :mod:`json` library contained in Python 2.6, but maintains
compatibility with Python 2.4 and Python 2.5 and (currently) has
significant performance advantages, even without using the optional C
extension for speedups.
Encoding basic Python object hierarchies::
>>> import simplejson as json
>>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
'["foo", {"bar": ["baz", null, 1.0, 2]}]'
>>> print json.dumps("\"foo\bar")
"\"foo\bar"
>>> print json.dumps(u'\u1234')
"\u1234"
>>> print json.dumps('\\')
"\\"
>>> print json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)
{"a": 0, "b": 0, "c": 0}
>>> from StringIO import StringIO
>>> io = StringIO()
>>> json.dump(['streaming API'], io)
>>> io.getvalue()
'["streaming API"]'
Compact encoding::
>>> import simplejson as json
>>> json.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':'))
'[1,2,3,{"4":5,"6":7}]'
Pretty printing::
>>> import simplejson as json
>>> s = json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4)
>>> print '\n'.join([l.rstrip() for l in s.splitlines()])
{
"4": 5,
"6": 7
}
Decoding JSON::
>>> import simplejson as json
>>> obj = [u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
>>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj
True
>>> json.loads('"\\"foo\\bar"') == u'"foo\x08ar'
True
>>> from StringIO import StringIO
>>> io = StringIO('["streaming API"]')
>>> json.load(io)[0] == 'streaming API'
True
Specializing JSON object decoding::
>>> import simplejson as json
>>> def as_complex(dct):
... if '__complex__' in dct:
... return complex(dct['real'], dct['imag'])
... return dct
...
>>> json.loads('{"__complex__": true, "real": 1, "imag": 2}',
... object_hook=as_complex)
(1+2j)
>>> import decimal
>>> json.loads('1.1', parse_float=decimal.Decimal) == decimal.Decimal('1.1')
True
Specializing JSON object encoding::
>>> import simplejson as json
>>> def encode_complex(obj):
... if isinstance(obj, complex):
... return [obj.real, obj.imag]
... raise TypeError(repr(o) + " is not JSON serializable")
...
>>> json.dumps(2 + 1j, default=encode_complex)
'[2.0, 1.0]'
>>> json.JSONEncoder(default=encode_complex).encode(2 + 1j)
'[2.0, 1.0]'
>>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j))
'[2.0, 1.0]'
Using simplejson.tool from the shell to validate and pretty-print::
$ echo '{"json":"obj"}' | python -m simplejson.tool
{
"json": "obj"
}
$ echo '{ 1.2:3.4}' | python -m simplejson.tool
Expecting property name: line 1 column 2 (char 2)
"""
__version__ = '2.0.9'
__all__ = [
'dump', 'dumps', 'load', 'loads',
'JSONDecoder', 'JSONEncoder',
]
__author__ = 'Bob Ippolito <bob@redivi.com>'
from decoder import JSONDecoder
from encoder import JSONEncoder
_default_encoder = JSONEncoder(
skipkeys=False,
ensure_ascii=True,
check_circular=True,
allow_nan=True,
indent=None,
separators=None,
encoding='utf-8',
default=None,
)
def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, **kw):
"""Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
``.write()``-supporting file-like object).
If ``skipkeys`` is true then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is false, then the some chunks written to ``fp``
may be ``unicode`` instances, subject to normal Python ``str`` to
``unicode`` coercion rules. Unless ``fp.write()`` explicitly
understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
to cause an error.
If ``check_circular`` is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
in strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If ``indent`` is a non-negative integer, then JSON array elements and object
members will be pretty-printed with that indent level. An indent level
of 0 will only insert newlines. ``None`` is the most compact representation.
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
then it will be used instead of the default ``(', ', ': ')`` separators.
``(',', ':')`` is the most compact JSON representation.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg.
"""
# cached encoder
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and not kw):
iterable = _default_encoder.iterencode(obj)
else:
if cls is None:
cls = JSONEncoder
iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding,
default=default, **kw).iterencode(obj)
# could accelerate with writelines in some versions of Python, at
# a debuggability cost
for chunk in iterable:
fp.write(chunk)
def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, **kw):
"""Serialize ``obj`` to a JSON formatted ``str``.
If ``skipkeys`` is false then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is false, then the return value will be a
``unicode`` instance subject to normal Python ``str`` to ``unicode``
coercion rules instead of being escaped to an ASCII ``str``.
If ``check_circular`` is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If ``indent`` is a non-negative integer, then JSON array elements and
object members will be pretty-printed with that indent level. An indent
level of 0 will only insert newlines. ``None`` is the most compact
representation.
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
then it will be used instead of the default ``(', ', ': ')`` separators.
``(',', ':')`` is the most compact JSON representation.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg.
"""
# cached encoder
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and not kw):
return _default_encoder.encode(obj)
if cls is None:
cls = JSONEncoder
return cls(
skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding, default=default,
**kw).encode(obj)
_default_decoder = JSONDecoder(encoding=None, object_hook=None)
def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, **kw):
"""Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
a JSON document) to a Python object.
If the contents of ``fp`` is encoded with an ASCII based encoding other
than utf-8 (e.g. latin-1), then an appropriate ``encoding`` name must
be specified. Encodings that are not ASCII based (such as UCS-2) are
not allowed, and should be wrapped with
``codecs.getreader(fp)(encoding)``, or simply decoded to a ``unicode``
object and passed to ``loads()``
``object_hook`` is an optional function that will be called with the
result of any object literal decode (a ``dict``). The return value of
``object_hook`` will be used instead of the ``dict``. This feature
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg.
"""
return loads(fp.read(),
encoding=encoding, cls=cls, object_hook=object_hook,
parse_float=parse_float, parse_int=parse_int,
parse_constant=parse_constant, **kw)
def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, **kw):
"""Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
document) to a Python object.
If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding
other than utf-8 (e.g. latin-1) then an appropriate ``encoding`` name
must be specified. Encodings that are not ASCII based (such as UCS-2)
are not allowed and should be decoded to ``unicode`` first.
``object_hook`` is an optional function that will be called with the
result of any object literal decode (a ``dict``). The return value of
``object_hook`` will be used instead of the ``dict``. This feature
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
``parse_float``, if specified, will be called with the string
of every JSON float to be decoded. By default this is equivalent to
float(num_str). This can be used to use another datatype or parser
for JSON floats (e.g. decimal.Decimal).
``parse_int``, if specified, will be called with the string
of every JSON int to be decoded. By default this is equivalent to
int(num_str). This can be used to use another datatype or parser
for JSON integers (e.g. float).
``parse_constant``, if specified, will be called with one of the
following strings: -Infinity, Infinity, NaN, null, true, false.
This can be used to raise an exception if invalid JSON numbers
are encountered.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg.
"""
if (cls is None and encoding is None and object_hook is None and
parse_int is None and parse_float is None and
parse_constant is None and not kw):
return _default_decoder.decode(s)
if cls is None:
cls = JSONDecoder
if object_hook is not None:
kw['object_hook'] = object_hook
if parse_float is not None:
kw['parse_float'] = parse_float
if parse_int is not None:
kw['parse_int'] = parse_int
if parse_constant is not None:
kw['parse_constant'] = parse_constant
return cls(encoding=encoding, **kw).decode(s)

@ -1,354 +0,0 @@
"""Implementation of JSONDecoder
"""
import re
import sys
import struct
from simplejson.scanner import make_scanner
try:
from simplejson._speedups import scanstring as c_scanstring
except ImportError:
c_scanstring = None
__all__ = ['JSONDecoder']
FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
def _floatconstants():
_BYTES = '7FF80000000000007FF0000000000000'.decode('hex')
if sys.byteorder != 'big':
_BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
nan, inf = struct.unpack('dd', _BYTES)
return nan, inf, -inf
NaN, PosInf, NegInf = _floatconstants()
def linecol(doc, pos):
lineno = doc.count('\n', 0, pos) + 1
if lineno == 1:
colno = pos
else:
colno = pos - doc.rindex('\n', 0, pos)
return lineno, colno
def errmsg(msg, doc, pos, end=None):
# Note that this function is called from _speedups
lineno, colno = linecol(doc, pos)
if end is None:
#fmt = '{0}: line {1} column {2} (char {3})'
#return fmt.format(msg, lineno, colno, pos)
fmt = '%s: line %d column %d (char %d)'
return fmt % (msg, lineno, colno, pos)
endlineno, endcolno = linecol(doc, end)
#fmt = '{0}: line {1} column {2} - line {3} column {4} (char {5} - {6})'
#return fmt.format(msg, lineno, colno, endlineno, endcolno, pos, end)
fmt = '%s: line %d column %d - line %d column %d (char %d - %d)'
return fmt % (msg, lineno, colno, endlineno, endcolno, pos, end)
_CONSTANTS = {
'-Infinity': NegInf,
'Infinity': PosInf,
'NaN': NaN,
}
STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
BACKSLASH = {
'"': u'"', '\\': u'\\', '/': u'/',
'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t',
}
DEFAULT_ENCODING = "utf-8"
def py_scanstring(s, end, encoding=None, strict=True, _b=BACKSLASH, _m=STRINGCHUNK.match):
"""Scan the string s for a JSON string. End is the index of the
character in s after the quote that started the JSON string.
Unescapes all valid JSON string escape sequences and raises ValueError
on attempt to decode an invalid string. If strict is False then literal
control characters are allowed in the string.
Returns a tuple of the decoded string and the index of the character in s
after the end quote."""
if encoding is None:
encoding = DEFAULT_ENCODING
chunks = []
_append = chunks.append
begin = end - 1
while 1:
chunk = _m(s, end)
if chunk is None:
raise ValueError(
errmsg("Unterminated string starting at", s, begin))
end = chunk.end()
content, terminator = chunk.groups()
# Content is contains zero or more unescaped string characters
if content:
if not isinstance(content, unicode):
content = unicode(content, encoding)
_append(content)
# Terminator is the end of string, a literal control character,
# or a backslash denoting that an escape sequence follows
if terminator == '"':
break
elif terminator != '\\':
if strict:
msg = "Invalid control character %r at" % (terminator,)
#msg = "Invalid control character {0!r} at".format(terminator)
raise ValueError(errmsg(msg, s, end))
else:
_append(terminator)
continue
try:
esc = s[end]
except IndexError:
raise ValueError(
errmsg("Unterminated string starting at", s, begin))
# If not a unicode escape sequence, must be in the lookup table
if esc != 'u':
try:
char = _b[esc]
except KeyError:
msg = "Invalid \\escape: " + repr(esc)
raise ValueError(errmsg(msg, s, end))
end += 1
else:
# Unicode escape sequence
esc = s[end + 1:end + 5]
next_end = end + 5
if len(esc) != 4:
msg = "Invalid \\uXXXX escape"
raise ValueError(errmsg(msg, s, end))
uni = int(esc, 16)
# Check for surrogate pair on UCS-4 systems
if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535:
msg = "Invalid \\uXXXX\\uXXXX surrogate pair"
if not s[end + 5:end + 7] == '\\u':
raise ValueError(errmsg(msg, s, end))
esc2 = s[end + 7:end + 11]
if len(esc2) != 4:
raise ValueError(errmsg(msg, s, end))
uni2 = int(esc2, 16)
uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
next_end += 6
char = unichr(uni)
end = next_end
# Append the unescaped character
_append(char)
return u''.join(chunks), end
# Use speedup if available
scanstring = c_scanstring or py_scanstring
WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS)
WHITESPACE_STR = ' \t\n\r'
def JSONObject((s, end), encoding, strict, scan_once, object_hook, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
pairs = {}
# Use a slice to prevent IndexError from being raised, the following
# check will raise a more specific ValueError if the string is empty
nextchar = s[end:end + 1]
# Normally we expect nextchar == '"'
if nextchar != '"':
if nextchar in _ws:
end = _w(s, end).end()
nextchar = s[end:end + 1]
# Trivial empty object
if nextchar == '}':
return pairs, end + 1
elif nextchar != '"':
raise ValueError(errmsg("Expecting property name", s, end))
end += 1
while True:
key, end = scanstring(s, end, encoding, strict)
# To skip some function call overhead we optimize the fast paths where
# the JSON key separator is ": " or just ":".
if s[end:end + 1] != ':':
end = _w(s, end).end()
if s[end:end + 1] != ':':
raise ValueError(errmsg("Expecting : delimiter", s, end))
end += 1
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
try:
value, end = scan_once(s, end)
except StopIteration:
raise ValueError(errmsg("Expecting object", s, end))
pairs[key] = value
try:
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar == '}':
break
elif nextchar != ',':
raise ValueError(errmsg("Expecting , delimiter", s, end - 1))
try:
nextchar = s[end]
if nextchar in _ws:
end += 1
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar != '"':
raise ValueError(errmsg("Expecting property name", s, end - 1))
if object_hook is not None:
pairs = object_hook(pairs)
return pairs, end
def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
values = []
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
# Look-ahead for trivial empty array
if nextchar == ']':
return values, end + 1
_append = values.append
while True:
try:
value, end = scan_once(s, end)
except StopIteration:
raise ValueError(errmsg("Expecting object", s, end))
_append(value)
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
end += 1
if nextchar == ']':
break
elif nextchar != ',':
raise ValueError(errmsg("Expecting , delimiter", s, end))
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
return values, end
class JSONDecoder(object):
"""Simple JSON <http://json.org> decoder
Performs the following translations in decoding by default:
+---------------+-------------------+
| JSON | Python |
+===============+===================+
| object | dict |
+---------------+-------------------+
| array | list |
+---------------+-------------------+
| string | unicode |
+---------------+-------------------+
| number (int) | int, long |
+---------------+-------------------+
| number (real) | float |
+---------------+-------------------+
| true | True |
+---------------+-------------------+
| false | False |
+---------------+-------------------+
| null | None |
+---------------+-------------------+
It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
their corresponding ``float`` values, which is outside the JSON spec.
"""
def __init__(self, encoding=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, strict=True):
"""``encoding`` determines the encoding used to interpret any ``str``
objects decoded by this instance (utf-8 by default). It has no
effect when decoding ``unicode`` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as ``unicode``.
``object_hook``, if specified, will be called with the result
of every JSON object decoded and its return value will be used in
place of the given ``dict``. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
``parse_float``, if specified, will be called with the string
of every JSON float to be decoded. By default this is equivalent to
float(num_str). This can be used to use another datatype or parser
for JSON floats (e.g. decimal.Decimal).
``parse_int``, if specified, will be called with the string
of every JSON int to be decoded. By default this is equivalent to
int(num_str). This can be used to use another datatype or parser
for JSON integers (e.g. float).
``parse_constant``, if specified, will be called with one of the
following strings: -Infinity, Infinity, NaN.
This can be used to raise an exception if invalid JSON numbers
are encountered.
"""
self.encoding = encoding
self.object_hook = object_hook
self.parse_float = parse_float or float
self.parse_int = parse_int or int
self.parse_constant = parse_constant or _CONSTANTS.__getitem__
self.strict = strict
self.parse_object = JSONObject
self.parse_array = JSONArray
self.parse_string = scanstring
self.scan_once = make_scanner(self)
def decode(self, s, _w=WHITESPACE.match):
"""Return the Python representation of ``s`` (a ``str`` or ``unicode``
instance containing a JSON document)
"""
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
end = _w(s, end).end()
if end != len(s):
raise ValueError(errmsg("Extra data", s, end, len(s)))
return obj
def raw_decode(self, s, idx=0):
"""Decode a JSON document from ``s`` (a ``str`` or ``unicode`` beginning
with a JSON document) and return a 2-tuple of the Python
representation and the index in ``s`` where the document ended.
This can be used to decode a JSON document from a string that may
have extraneous data at the end.
"""
try:
obj, end = self.scan_once(s, idx)
except StopIteration:
raise ValueError("No JSON object could be decoded")
return obj, end

@ -1,440 +0,0 @@
"""Implementation of JSONEncoder
"""
import re
try:
from simplejson._speedups import encode_basestring_ascii as c_encode_basestring_ascii
except ImportError:
c_encode_basestring_ascii = None
try:
from simplejson._speedups import make_encoder as c_make_encoder
except ImportError:
c_make_encoder = None
ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
HAS_UTF8 = re.compile(r'[\x80-\xff]')
ESCAPE_DCT = {
'\\': '\\\\',
'"': '\\"',
'\b': '\\b',
'\f': '\\f',
'\n': '\\n',
'\r': '\\r',
'\t': '\\t',
}
for i in range(0x20):
#ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
# Assume this produces an infinity on all machines (probably not guaranteed)
INFINITY = float('1e66666')
FLOAT_REPR = repr
def encode_basestring(s):
"""Return a JSON representation of a Python string
"""
def replace(match):
return ESCAPE_DCT[match.group(0)]
return '"' + ESCAPE.sub(replace, s) + '"'
def py_encode_basestring_ascii(s):
"""Return an ASCII-only JSON representation of a Python string
"""
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
s = match.group(0)
try:
return ESCAPE_DCT[s]
except KeyError:
n = ord(s)
if n < 0x10000:
#return '\\u{0:04x}'.format(n)
return '\\u%04x' % (n,)
else:
# surrogate pair
n -= 0x10000
s1 = 0xd800 | ((n >> 10) & 0x3ff)
s2 = 0xdc00 | (n & 0x3ff)
#return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
return '\\u%04x\\u%04x' % (s1, s2)
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
encode_basestring_ascii = c_encode_basestring_ascii or py_encode_basestring_ascii
class JSONEncoder(object):
"""Extensible JSON <http://json.org> encoder for Python data structures.
Supports the following objects and types by default:
+-------------------+---------------+
| Python | JSON |
+===================+===============+
| dict | object |
+-------------------+---------------+
| list, tuple | array |
+-------------------+---------------+
| str, unicode | string |
+-------------------+---------------+
| int, long, float | number |
+-------------------+---------------+
| True | true |
+-------------------+---------------+
| False | false |
+-------------------+---------------+
| None | null |
+-------------------+---------------+
To extend this to recognize other objects, subclass and implement a
``.default()`` method with another method that returns a serializable
object for ``o`` if possible, otherwise it should call the superclass
implementation (to raise ``TypeError``).
"""
item_separator = ', '
key_separator = ': '
def __init__(self, skipkeys=False, ensure_ascii=True,
check_circular=True, allow_nan=True, sort_keys=False,
indent=None, separators=None, encoding='utf-8', default=None):
"""Constructor for JSONEncoder, with sensible defaults.
If skipkeys is false, then it is a TypeError to attempt
encoding of keys that are not str, int, long, float or None. If
skipkeys is True, such items are simply skipped.
If ensure_ascii is true, the output is guaranteed to be str
objects with all incoming unicode characters escaped. If
ensure_ascii is false, the output will be unicode object.
If check_circular is true, then lists, dicts, and custom encoded
objects will be checked for circular references during encoding to
prevent an infinite recursion (which would cause an OverflowError).
Otherwise, no such check takes place.
If allow_nan is true, then NaN, Infinity, and -Infinity will be
encoded as such. This behavior is not JSON specification compliant,
but is consistent with most JavaScript based encoders and decoders.
Otherwise, it will be a ValueError to encode such floats.
If sort_keys is true, then the output of dictionaries will be
sorted by key; this is useful for regression tests to ensure
that JSON serializations can be compared on a day-to-day basis.
If indent is a non-negative integer, then JSON array
elements and object members will be pretty-printed with that
indent level. An indent level of 0 will only insert newlines.
None is the most compact representation.
If specified, separators should be a (item_separator, key_separator)
tuple. The default is (', ', ': '). To get the most compact JSON
representation you should specify (',', ':') to eliminate whitespace.
If specified, default is a function that gets called for objects
that can't otherwise be serialized. It should return a JSON encodable
version of the object or raise a ``TypeError``.
If encoding is not None, then all input strings will be
transformed into unicode using that encoding prior to JSON-encoding.
The default is UTF-8.
"""
self.skipkeys = skipkeys
self.ensure_ascii = ensure_ascii
self.check_circular = check_circular
self.allow_nan = allow_nan
self.sort_keys = sort_keys
self.indent = indent
if separators is not None:
self.item_separator, self.key_separator = separators
if default is not None:
self.default = default
self.encoding = encoding
def default(self, o):
"""Implement this method in a subclass such that it returns
a serializable object for ``o``, or calls the base implementation
(to raise a ``TypeError``).
For example, to support arbitrary iterators, you could
implement default like this::
def default(self, o):
try:
iterable = iter(o)
except TypeError:
pass
else:
return list(iterable)
return JSONEncoder.default(self, o)
"""
raise TypeError(repr(o) + " is not JSON serializable")
def encode(self, o):
"""Return a JSON string representation of a Python data structure.
>>> JSONEncoder().encode({"foo": ["bar", "baz"]})
'{"foo": ["bar", "baz"]}'
"""
# This is for extremely simple cases and benchmarks.
if isinstance(o, basestring):
if isinstance(o, str):
_encoding = self.encoding
if (_encoding is not None
and not (_encoding == 'utf-8')):
o = o.decode(_encoding)
if self.ensure_ascii:
return encode_basestring_ascii(o)
else:
return encode_basestring(o)
# This doesn't pass the iterator directly to ''.join() because the
# exceptions aren't as detailed. The list call should be roughly
# equivalent to the PySequence_Fast that ''.join() would do.
chunks = self.iterencode(o, _one_shot=True)
if not isinstance(chunks, (list, tuple)):
chunks = list(chunks)
return ''.join(chunks)
def iterencode(self, o, _one_shot=False):
"""Encode the given object and yield each string
representation as available.
For example::
for chunk in JSONEncoder().iterencode(bigobject):
mysocket.write(chunk)
"""
if self.check_circular:
markers = {}
else:
markers = None
if self.ensure_ascii:
_encoder = encode_basestring_ascii
else:
_encoder = encode_basestring
if self.encoding != 'utf-8':
def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
if isinstance(o, str):
o = o.decode(_encoding)
return _orig_encoder(o)
def floatstr(o, allow_nan=self.allow_nan, _repr=FLOAT_REPR, _inf=INFINITY, _neginf=-INFINITY):
# Check for specials. Note that this type of test is processor- and/or
# platform-specific, so do tests which don't depend on the internals.
if o != o:
text = 'NaN'
elif o == _inf:
text = 'Infinity'
elif o == _neginf:
text = '-Infinity'
else:
return _repr(o)
if not allow_nan:
raise ValueError(
"Out of range float values are not JSON compliant: " +
repr(o))
return text
if _one_shot and c_make_encoder is not None and not self.indent and not self.sort_keys:
_iterencode = c_make_encoder(
markers, self.default, _encoder, self.indent,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, self.allow_nan)
else:
_iterencode = _make_iterencode(
markers, self.default, _encoder, self.indent, floatstr,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, _one_shot)
return _iterencode(o, 0)
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, _key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
## HACK: hand-optimized bytecode; turn globals into locals
False=False,
True=True,
ValueError=ValueError,
basestring=basestring,
dict=dict,
float=float,
id=id,
int=int,
isinstance=isinstance,
list=list,
long=long,
str=str,
tuple=tuple,
):
def _iterencode_list(lst, _current_indent_level):
if not lst:
yield '[]'
return
if markers is not None:
markerid = id(lst)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = lst
buf = '['
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
separator = _item_separator + newline_indent
buf += newline_indent
else:
newline_indent = None
separator = _item_separator
first = True
for value in lst:
if first:
first = False
else:
buf = separator
if isinstance(value, basestring):
yield buf + _encoder(value)
elif value is None:
yield buf + 'null'
elif value is True:
yield buf + 'true'
elif value is False:
yield buf + 'false'
elif isinstance(value, (int, long)):
yield buf + str(value)
elif isinstance(value, float):
yield buf + _floatstr(value)
else:
yield buf
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (' ' * (_indent * _current_indent_level))
yield ']'
if markers is not None:
del markers[markerid]
def _iterencode_dict(dct, _current_indent_level):
if not dct:
yield '{}'
return
if markers is not None:
markerid = id(dct)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = dct
yield '{'
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
item_separator = _item_separator + newline_indent
yield newline_indent
else:
newline_indent = None
item_separator = _item_separator
first = True
if _sort_keys:
items = dct.items()
items.sort(key=lambda kv: kv[0])
else:
items = dct.iteritems()
for key, value in items:
if isinstance(key, basestring):
pass
# JavaScript is weakly typed for these, so it makes sense to
# also allow them. Many encoders seem to do something like this.
elif isinstance(key, float):
key = _floatstr(key)
elif key is True:
key = 'true'
elif key is False:
key = 'false'
elif key is None:
key = 'null'
elif isinstance(key, (int, long)):
key = str(key)
elif _skipkeys:
continue
else:
raise TypeError("key " + repr(key) + " is not a string")
if first:
first = False
else:
yield item_separator
yield _encoder(key)
yield _key_separator
if isinstance(value, basestring):
yield _encoder(value)
elif value is None:
yield 'null'
elif value is True:
yield 'true'
elif value is False:
yield 'false'
elif isinstance(value, (int, long)):
yield str(value)
elif isinstance(value, float):
yield _floatstr(value)
else:
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (' ' * (_indent * _current_indent_level))
yield '}'
if markers is not None:
del markers[markerid]
def _iterencode(o, _current_indent_level):
if isinstance(o, basestring):
yield _encoder(o)
elif o is None:
yield 'null'
elif o is True:
yield 'true'
elif o is False:
yield 'false'
elif isinstance(o, (int, long)):
yield str(o)
elif isinstance(o, float):
yield _floatstr(o)
elif isinstance(o, (list, tuple)):
for chunk in _iterencode_list(o, _current_indent_level):
yield chunk
elif isinstance(o, dict):
for chunk in _iterencode_dict(o, _current_indent_level):
yield chunk
else:
if markers is not None:
markerid = id(o)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = o
o = _default(o)
for chunk in _iterencode(o, _current_indent_level):
yield chunk
if markers is not None:
del markers[markerid]
return _iterencode

@ -1,65 +0,0 @@
"""JSON token scanner
"""
import re
try:
from simplejson._speedups import make_scanner as c_make_scanner
except ImportError:
c_make_scanner = None
__all__ = ['make_scanner']
NUMBER_RE = re.compile(
r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?',
(re.VERBOSE | re.MULTILINE | re.DOTALL))
def py_make_scanner(context):
parse_object = context.parse_object
parse_array = context.parse_array
parse_string = context.parse_string
match_number = NUMBER_RE.match
encoding = context.encoding
strict = context.strict
parse_float = context.parse_float
parse_int = context.parse_int
parse_constant = context.parse_constant
object_hook = context.object_hook
def _scan_once(string, idx):
try:
nextchar = string[idx]
except IndexError:
raise StopIteration
if nextchar == '"':
return parse_string(string, idx + 1, encoding, strict)
elif nextchar == '{':
return parse_object((string, idx + 1), encoding, strict, _scan_once, object_hook)
elif nextchar == '[':
return parse_array((string, idx + 1), _scan_once)
elif nextchar == 'n' and string[idx:idx + 4] == 'null':
return None, idx + 4
elif nextchar == 't' and string[idx:idx + 4] == 'true':
return True, idx + 4
elif nextchar == 'f' and string[idx:idx + 5] == 'false':
return False, idx + 5
m = match_number(string, idx)
if m is not None:
integer, frac, exp = m.groups()
if frac or exp:
res = parse_float(integer + (frac or '') + (exp or ''))
else:
res = parse_int(integer)
return res, m.end()
elif nextchar == 'N' and string[idx:idx + 3] == 'NaN':
return parse_constant('NaN'), idx + 3
elif nextchar == 'I' and string[idx:idx + 8] == 'Infinity':
return parse_constant('Infinity'), idx + 8
elif nextchar == '-' and string[idx:idx + 9] == '-Infinity':
return parse_constant('-Infinity'), idx + 9
else:
raise StopIteration
return _scan_once
make_scanner = c_make_scanner or py_make_scanner

@ -484,6 +484,7 @@ class Connection(ansible.plugins.connection.ConnectionBase):
login_context = None
#: Only sudo, su, and doas are supported for now.
# Ansible ConnectionBase attribute, removed in Ansible >= 2.8
become_methods = ['sudo', 'su', 'doas']
#: Dict containing init_child() return value as recorded at startup by
@ -521,15 +522,6 @@ class Connection(ansible.plugins.connection.ConnectionBase):
# set by `_get_task_vars()` for interpreter discovery
_action = None
def __del__(self):
"""
Ansible cannot be trusted to always call close() e.g. the synchronize
action constructs a local connection like this. So provide a destructor
in the hopes of catching these cases.
"""
# https://github.com/dw/mitogen/issues/140
self.close()
def on_action_run(self, task_vars, delegate_to_hostname, loader_basedir):
"""
Invoked by ActionModuleMixin to indicate a new task is about to start
@ -684,6 +676,9 @@ class Connection(ansible.plugins.connection.ConnectionBase):
@property
def connected(self):
"""
Ansible connection plugin property. Used by ansible-connection command.
"""
return self.context is not None
def _spec_from_via(self, proxied_inventory_name, via_spec):
@ -842,7 +837,11 @@ class Connection(ansible.plugins.connection.ConnectionBase):
the _connect_*() service calls defined above to cause the master
process to establish the real connection on our behalf, or return a
reference to the existing one.
Ansible connection plugin method.
"""
# In some Ansible connection plugins this method returns self.
# However nothing I've found uses it, it's not even assigned.
if self.connected:
return
@ -880,6 +879,8 @@ class Connection(ansible.plugins.connection.ConnectionBase):
Arrange for the mitogen.master.Router running in the worker to
gracefully shut down, and wait for shutdown to complete. Safe to call
multiple times.
Ansible connection plugin method.
"""
self._put_connection()
if self.binding:
@ -896,6 +897,8 @@ class Connection(ansible.plugins.connection.ConnectionBase):
any local state we hold for the connection, returns the Connection to
the 'disconnected' state, and informs ContextService the connection is
bad somehow, and should be shut down and discarded.
Ansible connection plugin method.
"""
if self._play_context.remote_addr is None:
# <2.5.6 incorrectly populate PlayContext for reset_connection
@ -1002,6 +1005,8 @@ class Connection(ansible.plugins.connection.ConnectionBase):
Data to supply on ``stdin`` of the process.
:returns:
(return code, stdout bytes, stderr bytes)
Ansible connection plugin method.
"""
emulate_tty = (not in_data and sudoable)
rc, stdout, stderr = self.get_chain().call(
@ -1027,6 +1032,8 @@ class Connection(ansible.plugins.connection.ConnectionBase):
Remote filesystem path to read.
:param str out_path:
Local filesystem path to write.
Ansible connection plugin method.
"""
self._connect()
ansible_mitogen.target.transfer_file(
@ -1076,6 +1083,8 @@ class Connection(ansible.plugins.connection.ConnectionBase):
Local filesystem path to read.
:param str out_path:
Remote filesystem path to write.
Ansible connection plugin method.
"""
try:
st = os.stat(in_path)

@ -39,6 +39,7 @@ import ansible_mitogen.utils
__all__ = [
'action_loader',
'become_loader',
'connection_loader',
'module_loader',
'module_utils_loader',
@ -48,7 +49,7 @@ __all__ = [
ANSIBLE_VERSION_MIN = (2, 10)
ANSIBLE_VERSION_MAX = (2, 12)
ANSIBLE_VERSION_MAX = (2, 13)
NEW_VERSION_MSG = (
"Your Ansible version (%s) is too recent. The most recent version\n"
@ -90,6 +91,7 @@ assert_supported_release()
from ansible.plugins.loader import action_loader
from ansible.plugins.loader import become_loader
from ansible.plugins.loader import connection_loader
from ansible.plugins.loader import module_loader
from ansible.plugins.loader import module_utils_loader

@ -323,6 +323,7 @@ class NewStylePlanner(ScriptPlanner):
'dnf', # issue #280; py-dnf/hawkey need therapy
'firewalld', # issue #570: ansible module_utils caches dbus conn
'ansible.legacy.dnf', # issue #776
'ansible.builtin.dnf', # issue #832
])
def should_fork(self):

@ -42,6 +42,24 @@ DOCUMENTATION = """
accepts.
version_added: "2.5"
options:
ssh_args:
type: str
vars:
- name: ssh_args
- name: ansible_ssh_args
- name: ansible_mitogen_ssh_args
ssh_common_args:
type: str
vars:
- name: ssh_args
- name: ansible_ssh_common_args
- name: ansible_mitogen_ssh_common_args
ssh_extra_args:
type: str
vars:
- name: ssh_args
- name: ansible_ssh_extra_args
- name: ansible_mitogen_ssh_extra_args
"""
try:

@ -180,42 +180,6 @@ def setup_pool(pool):
LOG.debug('Service pool configured: size=%d', pool.size)
def _setup_simplejson(responder):
"""
We support serving simplejson for Python 2.4 targets on Ansible 2.3, at
least so the package's own CI Docker scripts can run without external
help, however newer versions of simplejson no longer support Python
2.4. Therefore override any installed/loaded version with a
2.4-compatible version we ship in the compat/ directory.
"""
responder.whitelist_prefix('simplejson')
# issue #536: must be at end of sys.path, in case existing newer
# version is already loaded.
compat_path = os.path.join(os.path.dirname(__file__), 'compat')
sys.path.append(compat_path)
for fullname, is_pkg, suffix in (
(u'simplejson', True, '__init__.py'),
(u'simplejson.decoder', False, 'decoder.py'),
(u'simplejson.encoder', False, 'encoder.py'),
(u'simplejson.scanner', False, 'scanner.py'),
):
path = os.path.join(compat_path, 'simplejson', suffix)
fp = open(path, 'rb')
try:
source = fp.read()
finally:
fp.close()
responder.add_source_override(
fullname=fullname,
path=path,
source=source,
is_pkg=is_pkg,
)
def _setup_responder(responder):
"""
Configure :class:`mitogen.master.ModuleResponder` to only permit
@ -223,7 +187,6 @@ def _setup_responder(responder):
"""
responder.whitelist_prefix('ansible')
responder.whitelist_prefix('ansible_mitogen')
_setup_simplejson(responder)
# Ansible 2.3 is compatible with Python 2.4 targets, however
# ansible/__init__.py is not. Instead, executor/module_common.py writes

@ -41,6 +41,7 @@ __metaclass__ = type
import atexit
import imp
import json
import os
import re
import shlex
@ -63,12 +64,6 @@ except ImportError:
# Python 2.4
ctypes = None
try:
import json
except ImportError:
# Python 2.4
import simplejson as json
try:
# Cannot use cStringIO as it does not support Unicode.
from StringIO import StringIO

@ -38,6 +38,7 @@ __metaclass__ = type
import errno
import grp
import json
import operator
import os
import pwd
@ -58,11 +59,6 @@ import mitogen.parent
import mitogen.service
from mitogen.core import b
try:
import json
except ImportError:
import simplejson as json
try:
reduce
except NameError:
@ -371,11 +367,6 @@ def init_child(econtext, log_level, candidate_temp_dirs):
LOG.setLevel(log_level)
logging.getLogger('ansible_mitogen').setLevel(log_level)
# issue #536: if the json module is available, remove simplejson from the
# importer whitelist to avoid confusing certain Ansible modules.
if json.__name__ == 'json':
econtext.importer.whitelist.remove('simplejson')
global _fork_parent
if FORK_SUPPORTED:
mitogen.parent.upgrade_router(econtext)

@ -79,6 +79,7 @@ try:
except ImportError:
from ansible.vars.unsafe_proxy import AnsibleUnsafeText
import ansible_mitogen.loaders
import mitogen.core
@ -435,7 +436,10 @@ class PlayContextSpec(Spec):
return self._play_context.become_user
def become_pass(self):
return optional_secret(self._play_context.become_pass)
become_method = self.become_method()
become_plugin = ansible_mitogen.loaders.become_loader.get(become_method)
become_pass = become_plugin.get_option('become_pass', hostvars=self._task_vars)
return optional_secret(become_pass)
def password(self):
return optional_secret(self._play_context.password)
@ -652,8 +656,8 @@ class MitogenViaSpec(Spec):
def become_pass(self):
return optional_secret(
self._host_vars.get('ansible_become_password') or
self._host_vars.get('ansible_become_pass')
self._host_vars.get('ansible_become_pass') or
self._host_vars.get('ansible_become_password')
)
def password(self):
@ -749,7 +753,7 @@ class MitogenViaSpec(Spec):
return self._host_vars.get('mitogen_kubectl_path')
def mitogen_lxc_path(self):
return self.host_vars.get('mitogen_lxc_path')
return self._host_vars.get('mitogen_lxc_path')
def mitogen_lxc_attach_path(self):
return self._host_vars.get('mitogen_lxc_attach_path')

@ -148,7 +148,7 @@ Noteworthy Differences
* Mitogen 0.2.x supports Ansible 2.3-2.9; with Python 2.6, 2.7, or 3.6.
Mitogen 0.3.1+ supports
- Ansible 2.10, 3, and 4; with Python 2.7, or 3.6-3.10
- Ansible 5; with Python 3.8-3.10
- Ansible 5 and 6; with Python 3.8-3.10
Verify your installation is running one of these versions by checking
``ansible --version`` output.
@ -1248,18 +1248,17 @@ with ``-vvv``.
However, certain controller hangs may render ``MITOGEN_DUMP_THREAD_STACKS``
ineffective, or occur too infrequently for interactive reproduction. In these
cases `faulthandler <https://faulthandler.readthedocs.io/>`_ may be used:
cases :py:mod:`faulthandler` may be used with Python >= 3.3:
1. For Python 2, ``pip install faulthandler``. This is unnecessary on Python 3.
2. Once the hang occurs, observe the process tree using ``pstree`` or ``ps
1. Once the hang occurs, observe the process tree using ``pstree`` or ``ps
--forest``.
3. The most likely process to be hung is the connection multiplexer, which can
2. The most likely process to be hung is the connection multiplexer, which can
easily be identified as the parent of all SSH client processes.
4. Send ``kill -SEGV <pid>`` to the multiplexer PID, causing it to print all
3. Send ``kill -SEGV <pid>`` to the multiplexer PID, causing it to print all
thread stacks.
5. `File a bug <https://github.com/dw/mitogen/issues/new/>`_ including a copy
of the stacks, along with a description of the last task executing prior to
the hang.
4. `File a bug <https://github.com/mitogen-hq/mitogen/issues/new/>`_
including a copy of the stacks and a description of the last task executing
before the hang
It is possible the hang occurred in a process on a target. If ``strace`` is
available, look for the host name not listed in Ansible output as reporting a

@ -17,6 +17,18 @@ Release Notes
To avail of fixes in an unreleased version, please download a ZIP file
`directly from GitHub <https://github.com/dw/mitogen/>`_.
v0.3.4 (2023-07-02)
-------------------
* :gh:issue:`929` Support Ansible 6 and ansible-core 2.13
* :gh:issue:`832` Fix runtime error when using the ansible.builtin.dnf module multiple times
* :gh:issue:`925` :class:`ansible_mitogen.connection.Connection` no longer tries to close the
connection on destruction. This is expected to reduce cases of `mitogen.core.Error: An attempt
was made to enqueue a message with a Broker that has already exitted`. However it may result in
resource leaks.
* :gh:issue:`659` Removed :mod:`mitogen.compat.simplejson`, not needed with Python 2.7+, contained Python 3.x syntax errors
* :gh:issue:`983` CI: Removed PyPI faulthandler requirement from tests
* :gh:issue:`1001` CI: Fixed Debian 9 & 11 tests
v0.3.3 (2022-06-03)
-------------------
@ -53,7 +65,7 @@ v0.3.0 (2021-10-28)
-------------------
This release separates itself from the v0.2.X releases. Ansible's API changed too much to support backwards compatibility so from now on, v0.2.X releases will be for Ansible < 2.10 and v0.3.X will be for Ansible 2.10+.
`See here for details <https://github.com/dw/mitogen pull/715#issuecomment-750697248>`_.
`See here for details <https://github.com/dw/mitogen/pull/715#issuecomment-750697248>`_.
* :gh:issue:`827` NewStylePlanner: detect `ansible_collections` imports
* :gh:issue:`770` better check for supported Ansible version

@ -35,7 +35,7 @@ be expected. On the slave, it is built dynamically during startup.
#: Library version as a tuple.
__version__ = (0, 3, 3)
__version__ = (0, 3, 4)
#: This is :data:`False` in slave contexts. Previously it was used to prevent

@ -536,7 +536,7 @@ class PkgutilMethod(FinderMethod):
try:
path = loader.get_filename(fullname)
except (AttributeError, ImportError):
except (AttributeError, ImportError, ValueError):
# - get_filename() may throw ImportError if pkgutil.find_loader()
# picks a "parent" package's loader for some crap that's been
# stuffed in sys.modules, for example in the case of urllib3:

@ -1,4 +1,12 @@
[defaults]
any_errors_fatal = true
# callback_whitelist naming will be deprecated in ansible-core >= 2.15.
# callbacks_enabled naming was added in ansible-core 2.11
# profile_tasks: Displays timing for each task and summary table of top N tasks
# timer: Displays "Playbook run took 0 days, 0 hours, ..."
callback_whitelist =
profile_tasks,
timer
inventory = hosts
gathering = explicit
strategy_plugins = ../../ansible_mitogen/plugins/strategy
@ -6,15 +14,12 @@ inventory_plugins = lib/inventory
action_plugins = lib/action
callback_plugins = lib/callback
stdout_callback = yaml
stdout_whitelist =
profile_roles,
timer,
yaml
vars_plugins = lib/vars
library = lib/modules
filter_plugins = lib/filters
module_utils = lib/module_utils
retry_files_enabled = False
show_task_path_on_failure = true # Added in ansible-core 2.11
display_args_to_stdout = True
forks = 100
@ -30,11 +35,15 @@ transport = ssh
no_target_syslog = True
# Required by integration/ssh/timeouts.yml
timeout = 10
timeout = 30
# On Travis, paramiko check fails due to host key checking enabled.
host_key_checking = False
[inventory]
any_unparsed_is_failed = true
host_pattern_mismatch = error
[callback_profile_tasks]
task_output_limit = 10

@ -1,7 +1,6 @@
- name: bench/file_transfer.yml
hosts: test-targets
any_errors_fatal: true
tasks:
- name: Make 32MiB file
@ -52,6 +51,8 @@
with_items:
- /tmp/bigfile.out
- /tmp/bigbigfile.out
tags:
- requires_local_sudo
- name: Copy 32MiB file via localhost sudo
delegate_to: localhost
@ -59,6 +60,8 @@
copy:
src: /tmp/bigfile.in
dest: /tmp/bigfile.out
tags:
- requires_local_sudo
- name: Copy 320MiB file via localhost sudo
delegate_to: localhost
@ -66,6 +69,8 @@
copy:
src: /tmp/bigbigfile.in
dest: /tmp/bigbigfile.out
tags:
- requires_local_sudo
tags:
- resource_intensive

@ -1,4 +1,5 @@
- hosts: test-targets
- name: bench/includes.yml
hosts: test-targets
tasks:
- include_tasks: _includes.yml
with_sequence: start=1 end=1000

@ -1,6 +1,5 @@
- hosts: all
any_errors_fatal: true
- name: bench/loop-100-copies.yml
hosts: all
tasks:
- name: Create file tree

@ -4,7 +4,8 @@
#
# See also: loop-100-tasks.yml
#
- hosts: all
- name: bench/loop-100-items.yml
hosts: all
tasks:
- command: hostname
with_sequence: start=1 end="{{end|default(100)}}"

@ -8,7 +8,8 @@
#
# See also: loop-100-items.yml
#
- hosts: all
- name: bench/loop-100-tasks.yml
hosts: all
tasks:
- command: hostname
- command: hostname

@ -1,12 +1,14 @@
- hosts: all
- name: bench/loop-20-templates.yml
hosts: all
tasks:
- file:
- name: Create loop templates dir
file:
dest: /tmp/templates
state: "{{item}}"
with_items: ["absent", "directory"]
- copy:
- name: Copy loop files
copy:
dest: /tmp/templates/{{item}}
mode: 0755
content:

@ -0,0 +1,2 @@
---
pkg_mgr_python_interpreter: python

@ -0,0 +1,2 @@
---
pkg_mgr_python_interpreter: /usr/libexec/platform-python

@ -1,49 +1,76 @@
# integration/transport_config
# Hosts with twiddled configs that need to be checked somehow.
[transport_config:children]
transport_config_undiscover
tc_python_path
# tansport()
[transport_config_undiscover:children]
tc_become
tc_become_method
tc_become_pass
tc_become_user
tc_password
tc_port
tc_remote_addr
tc_remote_user
tc_transport
[transport_config_undiscover:vars]
# If python interpreter path is unset, Ansible tries to connect & discover it.
# That causes approx 10 seconds timeout per task - there's no host to connect to.
# This optimisation should not be relied in any test.
# Note: tc-python-path-* are intentionally not included.
ansible_python_interpreter = python3000 # Not expected to exist
[tc_transport]
tc-transport-unset
tc-transport-local ansible_connection=local
tc-transport-smart ansible_connection=smart
# python_path()
[tc_python_path]
tc-python-path-unset
tc-python-path-hostvar ansible_python_interpreter=/hostvar/path/to/python
tc-python-path-local-unset ansible_connection=local
tc-python-path-local-explicit ansible_connection=local ansible_python_interpreter=/a/b/c
# remote_addr()
[tc_remote_addr]
tc-remote-addr-unset # defaults to inventory_hostname
tc-remote-addr-explicit-ssh ansible_ssh_host=ansi.ssh.host
tc-remote-addr-explicit-host ansible_host=ansi.host
tc-remote-addr-explicit-both ansible_ssh_host=a.b.c ansible_host=b.c.d
# password()
[tc_password]
tc-password-unset
tc-password-explicit-ssh ansible_ssh_pass=ansi-ssh-pass
tc-password-explicit-user ansible_password=ansi-pass
tc-password-explicit-pass ansible_password=ansi-pass
tc-password-explicit-both ansible_password=a.b.c ansible_ssh_pass=c.b.a
# become()
[tc_remote_user]
tc-remote-user-unset # defaults to C.DEFAULT_REMOTE_USER
tc-remote-user-explicit-ssh ansible_ssh_user=ansi-ssh-user
tc-remote-user-explicit-user ansible_user=ansi-user
tc-remote-user-explicit-both ansible_user=a.b.c ansible_ssh_user=c.b.a
[tc_become]
tc-become-unset
tc-become-set
# become_method()
[tc_become_method]
tc-become-method-unset
tc-become-method-su ansible_become_method=su
# become_user()
[tc_become_user]
tc-become-user-unset
tc-become-user-set ansible_become_user=ansi-become-user
# become_pass()
[tc_become_pass]
tc-become-pass-unset
tc-become-pass-password ansible_become_password=apassword
tc-become-pass-pass ansible_become_pass=apass
tc-become-pass-both ansible_become_password=a.b.c ansible_become_pass=c.b.a
tc-become-pass-both ansible_become_pass=bpass ansible_become_password=bpassword
# port()
[tc_port]
tc-port-unset
tc-port-explicit-port ansible_port=1234
tc-port-explicit-ssh ansible_ssh_port=4321

@ -2,23 +2,23 @@
- name: integration/action/copy.yml
hosts: test-targets
any_errors_fatal: true
tasks:
- copy:
- name: Create tiny file
copy:
dest: /tmp/copy-tiny-file
content:
this is a tiny file.
delegate_to: localhost
- copy:
- name: Create large file
copy:
dest: /tmp/copy-large-file
# Must be larger than Connection.SMALL_SIZE_LIMIT.
content: "{% for x in range(200000) %}x{% endfor %}"
delegate_to: localhost
# end of making files
- file:
- name: Cleanup copied files
file:
state: absent
path: "{{item}}"
with_items:
@ -27,28 +27,31 @@
- /tmp/copy-tiny-inline-file.out
- /tmp/copy-large-inline-file.out
# end of cleaning out files
- copy:
- name: Copy large file
copy:
dest: /tmp/copy-large-file.out
src: /tmp/copy-large-file
- copy:
- name: Copy tiny file
copy:
dest: /tmp/copy-tiny-file.out
src: /tmp/copy-tiny-file
- copy:
- name: Copy tiny inline file
copy:
dest: /tmp/copy-tiny-inline-file.out
content: "tiny inline content"
- copy:
- name: Copy large inline file
copy:
dest: /tmp/copy-large-inline-file.out
content: |
{% for x in range(200000) %}y{% endfor %}
# stat results
- stat:
- name: Stat copied files
stat:
path: "{{item}}"
with_items:
- /tmp/copy-tiny-file.out
@ -65,7 +68,8 @@
- stat.results[3].stat.checksum == "d675f47e467eae19e49032a2cc39118e12a6ee72"
fail_msg: stat={{stat}}
- file:
- name: Cleanup files
file:
state: absent
path: "{{item}}"
with_items:

@ -5,12 +5,7 @@
- name: integration/action/fixup_perms2__copy.yml
hosts: test-targets
any_errors_fatal: true
tasks:
#
# copy module (no mode).
#
- name: "Copy files (no mode)"
copy:
content: ""
@ -23,10 +18,6 @@
- out.stat.mode in ("0644", "0664")
fail_msg: out={{out}}
#
# copy module (explicit mode).
#
- name: "Copy files from content: arg"
copy:
content: ""
@ -40,22 +31,20 @@
- out.stat.mode == "0400"
fail_msg: out={{out}}
#
# copy module (existing disk files, no mode).
#
- file:
- name: Cleanup local weird mode file
file:
path: /tmp/weird-mode.out
state: absent
- name: Create local test file.
- name: Create local weird mode file
delegate_to: localhost
copy:
content: "weird mode"
dest: "/tmp/weird-mode"
mode: "1462"
- copy:
- name: Copy file with weird mode
copy:
src: "/tmp/weird-mode"
dest: "/tmp/weird-mode.out"
@ -67,11 +56,8 @@
- out.stat.mode in ("0644", "0664")
fail_msg: out={{out}}
#
# copy module (existing disk files, preserve mode).
#
- copy:
- name: Copy file with weird mode, preserving mode
copy:
src: "/tmp/weird-mode"
dest: "/tmp/weird-mode"
mode: preserve
@ -84,11 +70,8 @@
- out.stat.mode == "1462"
fail_msg: out={{out}}
#
# copy module (existing disk files, explicit mode).
#
- copy:
- name: Copy file with weird mode, explicit mode
copy:
src: "/tmp/weird-mode"
dest: "/tmp/weird-mode"
mode: "1461"
@ -102,7 +85,8 @@
- out.stat.mode == "1461"
fail_msg: out={{out}}
- file:
- name: Cleanup
file:
state: absent
path: "{{item}}"
with_items:

@ -2,7 +2,6 @@
- name: integration/action/low_level_execute_command.yml
hosts: test-targets
any_errors_fatal: true
tasks:
# "echo -en" to test we actually hit bash shell too.

@ -9,7 +9,6 @@
- name: integration/action/make_tmp_path.yml
hosts: test-targets
any_errors_fatal: true
tasks:
- meta: end_play
when: not is_mitogen

@ -1,7 +1,8 @@
# issue #554: double calls to make_tmp_path() fail with assertion error. Ensure
# they succeed and are cleaned up correctly.
- hosts: target
- name: integration/action/make_tmp_path__double.yml
hosts: test-targets
tasks:
- mitogen_action_script:
script: |

@ -3,7 +3,6 @@
- name: integration/action/remote_expand_user.yml
hosts: test-targets
any_errors_fatal: true
tasks:
- name: "Find out root's homedir."
# Runs first because it blats regular Ansible facts with junk, so

@ -1,38 +1,33 @@
- name: integration/action/remote_file_exists.yml
hosts: test-targets
any_errors_fatal: true
tasks:
- file:
- name: Ensure does-not-exist doesnt
file:
path: /tmp/does-not-exist
state: absent
- action_passthrough:
method: _remote_file_exists
args: ['/tmp/does-not-exist']
register: out
- assert:
that: out.result == False
fail_msg: out={{out}}
# ---
- copy:
- name: Ensure does-exist does
copy:
dest: /tmp/does-exist
content: "I think, therefore I am"
- action_passthrough:
method: _remote_file_exists
args: ['/tmp/does-exist']
register: out
- assert:
that: out.result == True
fail_msg: out={{out}}
- file:
- name: Cleanup
file:
path: /tmp/does-exist
state: absent
tags:

@ -4,14 +4,14 @@
#
- name: integration/action/remove_tmp_path.yml
hosts: test-targets
any_errors_fatal: true
tasks:
#
# Use the copy module to cause a temporary directory to be created, and
# return a result with a 'src' attribute pointing into that directory.
#
- copy:
- name: Ensure remove_tmp_path_test
copy:
dest: /tmp/remove_tmp_path_test
content: "{{ 123123 | random }}"
register: out

@ -2,7 +2,6 @@
- name: integration/action/synchronize.yml
hosts: test-targets
any_errors_fatal: true
vars:
ansible_user: mitogen__has_sudo_pubkey
ansible_become_pass: has_sudo_pubkey_password
@ -13,23 +12,27 @@
ansible_password: ''
tasks:
# must copy git file to set proper file mode.
- copy:
- name: Copy synchronize-action-key
copy:
dest: /tmp/synchronize-action-key
src: ../../../data/docker/mitogen__has_sudo_pubkey.key
mode: u=rw,go=
delegate_to: localhost
- file:
- name: Cleanup sync-test
file:
path: /tmp/sync-test
state: absent
delegate_to: localhost
- file:
- name: Create sync-test
file:
path: /tmp/sync-test
state: directory
delegate_to: localhost
- copy:
- name: Create syn-test item
copy:
dest: /tmp/sync-test/item
content: "item!"
delegate_to: localhost

@ -1,49 +1,46 @@
- name: integration/action/transfer_data.yml
hosts: test-targets
any_errors_fatal: true
tasks:
- file:
- name: Cleanup transfer data
file:
path: /tmp/transfer-data
state: absent
# Ensure it JSON-encodes dicts.
- action_passthrough:
- name: Create JSON transfer data
action_passthrough:
method: _transfer_data
kwargs:
remote_path: /tmp/transfer-data
data: {
"I am JSON": true
}
- slurp:
- name: Slurp JSON transfer data
slurp:
src: /tmp/transfer-data
register: out
- assert:
that: |
out.content|b64decode == '{"I am JSON": true}'
fail_msg: out={{out}}
# Ensure it handles strings.
- action_passthrough:
- name: Create text transfer data
action_passthrough:
method: _transfer_data
kwargs:
remote_path: /tmp/transfer-data
data: "I am text."
- slurp:
- name: Slurp text transfer data
slurp:
src: /tmp/transfer-data
register: out
- assert:
that:
out.content|b64decode == 'I am text.'
fail_msg: out={{out}}
- file:
- name: Cleanup transfer data
file:
path: /tmp/transfer-data
state: absent
tags:

@ -2,7 +2,6 @@
- name: integration/async/multiple_items_loop.yml
hosts: test-targets
any_errors_fatal: true
tasks:
- name: start long running ops

@ -2,7 +2,6 @@
- name: integration/async/result_binary_producing_json.yml
gather_facts: true
hosts: test-targets
any_errors_fatal: true
tasks:
- block:
@ -38,7 +37,8 @@
retries: 100000
delay: 0
- slurp:
- name: Slurp async busy-poll
slurp:
src: "{{ansible_user_dir}}/.ansible_async/{{job.ansible_job_id}}"
register: result

@ -2,7 +2,6 @@
- name: integration/async/result_binary_producing_junk.yml
gather_facts: true
hosts: test-targets
any_errors_fatal: true
tasks:
- block:

@ -2,17 +2,19 @@
- name: integration/async/result_shell_echo_hi.yml
gather_facts: true
hosts: test-targets
any_errors_fatal: true
tasks:
- shell: echo hi; echo there >&2
- name: Async shell
shell: echo hi; echo there >&2
async: 100
poll: 0
register: job
- shell: sleep 1
- name: Sleepy shell
shell: sleep 1
- slurp:
- name: Slurp async shell
slurp:
src: "{{ansible_user_dir}}/.ansible_async/{{job.ansible_job_id}}"
register: result

@ -2,7 +2,6 @@
- name: integration/async/runner_new_process.yml
hosts: test-targets
any_errors_fatal: true
tasks:
- name: get process ID.

@ -3,7 +3,6 @@
- name: integration/async/runner_one_job.yml
hosts: test-targets
any_errors_fatal: true
tasks:
# Verify output of a single async job.

@ -2,7 +2,6 @@
- name: integration/async/runner_timeout_then_polling.yml
hosts: test-targets
any_errors_fatal: true
tasks:
# Verify async-with-timeout-then-poll behaviour.

@ -1,12 +1,12 @@
- name: integration/async/runner_two_simultaneous_jobs.yml
hosts: test-targets
any_errors_fatal: true
tasks:
# Start 2 duplicate jobs, verify they run concurrently.
- file:
- name: Cleanup semaphore file
file:
path: /tmp/flurp
state: absent

@ -2,7 +2,6 @@
- name: integration/async/runner_with_polling_and_timeout.yml
hosts: test-targets
any_errors_fatal: true
tasks:
# Verify async-with-polling-and-timeout behaviour.

@ -4,7 +4,6 @@
- name: integration/become/su_password.yml
hosts: test-targets
become_method: su
any_errors_fatal: true
tasks:
- name: Ensure su password absent but required.

@ -1,6 +1,5 @@
- name: integration/become/sudo_flags_failure.yml
hosts: test-targets
any_errors_fatal: true
tasks:
- name: Verify behaviour for bad sudo flags.

@ -1,6 +1,5 @@
- name: integration/become/sudo_nonexistent.yml
hosts: test-targets
any_errors_fatal: true
tasks:
- name: Verify behaviour for non-existent accounts.

@ -2,7 +2,6 @@
- name: integration/become/sudo_nopassword.yml
hosts: test-targets
any_errors_fatal: true
tasks:
- name: Verify we aren't root

@ -2,7 +2,6 @@
- name: integration/become/sudo_password.yml
hosts: test-targets
any_errors_fatal: true
tasks:
- name: Ensure sudo password absent but required.

@ -2,7 +2,6 @@
- name: integration/become/sudo_requiretty.yml
hosts: test-targets
any_errors_fatal: true
tasks:
# TODO: https://github.com/dw/mitogen/issues/692

@ -1,19 +1,23 @@
---
- shell: dd if=/dev/urandom of=/tmp/{{file_name}} bs=1024 count={{file_size}}
- name: Create {{ file_name }}
shell: dd if=/dev/urandom of=/tmp/{{ file_name }} bs=1024 count={{ file_size }}
args:
creates: /tmp/{{file_name}}
delegate_to: localhost
- copy:
- name: Copy {{ file_name }}
copy:
dest: /tmp/{{file_name}}.out
src: /tmp/{{file_name}}
- stat: path=/tmp/{{file_name}}
- name: Stat created {{ file_name }}
stat: path=/tmp/{{ file_name }}
register: original
delegate_to: localhost
- stat: path=/tmp/{{file_name}}.out
- name: Stat copied {{ file_name }}
stat: path=/tmp/{{ file_name }}.out
register: copied
- assert:

@ -4,7 +4,6 @@
- name: integration/connection/become_same_user.yml
hosts: bsu-joe
gather_facts: no
any_errors_fatal: true
tasks:
# bsu-joe's login user is joe, so become should be ignored.

@ -9,10 +9,13 @@
- meta: end_play
when: not is_mitogen
- delegate_to: localhost
- name: Run _disconnect_during_module.yml
delegate_to: localhost
command: |
ansible-playbook
-i "{{MITOGEN_INVENTORY_FILE}}"
{% for inv in ansible_inventory_sources %}
-i "{{ inv }}"
{% endfor %}
integration/connection/_disconnect_during_module.yml
args:
chdir: ../..

@ -12,7 +12,6 @@
- name: integration/connection/disconnect_resets_connection.yml
hosts: test-targets
gather_facts: no
any_errors_fatal: true
tasks:
- meta: end_play
when: not is_mitogen

@ -4,7 +4,6 @@
- name: integration/connection/exec_command.yml
hosts: test-targets
gather_facts: no
any_errors_fatal: true
tasks:
- connection_passthrough:
method: exec_command

@ -2,7 +2,6 @@
- name: integration/connection/home_dir.yml
hosts: test-targets
any_errors_fatal: true
tasks:
- name: "Find out root's homedir."
# Runs first because it blats regular Ansible facts with junk, so

@ -4,7 +4,6 @@
- name: integration/connection/put_large_file.yml
hosts: test-targets
gather_facts: no
any_errors_fatal: true
vars:
file_name: large-file
file_size: 512

@ -4,7 +4,6 @@
- name: integration/connection/put_small_file.yml
hosts: test-targets
gather_facts: no
any_errors_fatal: true
vars:
file_name: small-file
file_size: 123

@ -1,7 +1,8 @@
# issue #633: Connection.reset() should ignore "become", and apply to the login
# account.
- hosts: test-targets
- name: integration/connection/reset_become.yml
hosts: test-targets
become: true
gather_facts: false
tasks:

@ -1,5 +1,5 @@
- import_playbook: delegate_to_template.yml
- import_playbook: local_action.yml
- import_playbook: osa_container_standalone.yml
- import_playbook: osa_delegate_to_self.yml
#- import_playbook: osa_container_standalone.yml
#- import_playbook: osa_delegate_to_self.yml
- import_playbook: stack_construction.yml

@ -14,7 +14,6 @@
physical_hosts: ["cd-normal-alias", "cd-normal-normal"]
hosts: test-targets
gather_facts: no
any_errors_fatal: true
tasks:
- meta: end_play
when: not is_mitogen
@ -34,7 +33,7 @@
'kwargs': {
'check_host_keys': 'ignore',
'compression': True,
'connect_timeout': 10,
'connect_timeout': 30,
'hostname': 'alias-host',
'identities_only': False,
'identity_file': null,
@ -64,7 +63,7 @@
'kwargs': {
'check_host_keys': 'ignore',
'compression': True,
'connect_timeout': 10,
'connect_timeout': 30,
'hostname': 'cd-normal-alias',
'identities_only': False,
'identity_file': null,

@ -1,7 +1,7 @@
# issue #251: local_action with mitogen_via= builds wrong stack.
- hosts: cd-newuser-normal-normal
- name: integration/connection_delegation/local_action.yml
hosts: cd-newuser-normal-normal
tasks:
- meta: end_play
when: not is_mitogen
@ -22,7 +22,7 @@
{
'enable_lru': true,
'kwargs': {
'connect_timeout': 10,
'connect_timeout': 30,
'python_path': ["{{ansible_playbook_python}}"],
'remote_name': null,
'password': null,

@ -30,7 +30,6 @@
- hosts: cd-normal
any_errors_fatal: true
tasks:
- meta: end_play
when: not is_mitogen
@ -42,7 +41,7 @@
right: [
{
"kwargs": {
"connect_timeout": 10,
"connect_timeout": 30,
"doas_path": null,
"password": null,
"python_path": ["/usr/bin/python"],
@ -71,7 +70,7 @@
'kwargs': {
'check_host_keys': 'ignore',
'compression': True,
'connect_timeout': 10,
'connect_timeout': 30,
'hostname': 'alias-host',
'identities_only': False,
'identity_file': null,
@ -116,7 +115,7 @@
'kwargs': {
'check_host_keys': 'ignore',
'compression': True,
'connect_timeout': 10,
'connect_timeout': 30,
'hostname': 'alias-host',
'identities_only': False,
'identity_file': null,
@ -159,7 +158,7 @@
right: [
{
'kwargs': {
'connect_timeout': 10,
'connect_timeout': 30,
'doas_path': null,
'password': null,
"python_path": ["/usr/bin/python"],
@ -172,7 +171,7 @@
'kwargs': {
'check_host_keys': 'ignore',
'compression': True,
'connect_timeout': 10,
'connect_timeout': 30,
'hostname': 'cd-normal-normal',
'identities_only': False,
'identity_file': null,
@ -217,7 +216,7 @@
'kwargs': {
'check_host_keys': 'ignore',
'compression': True,
'connect_timeout': 10,
'connect_timeout': 30,
'hostname': 'alias-host',
'identities_only': False,
'identity_file': null,
@ -247,7 +246,7 @@
'kwargs': {
'check_host_keys': 'ignore',
'compression': True,
'connect_timeout': 10,
'connect_timeout': 30,
'hostname': 'cd-normal-alias',
'identities_only': False,
'identity_file': null,
@ -290,7 +289,7 @@
right: [
{
'kwargs': {
'connect_timeout': 10,
'connect_timeout': 30,
'doas_path': null,
'password': null,
"python_path": ["/usr/bin/python"],
@ -303,7 +302,7 @@
'kwargs': {
'check_host_keys': 'ignore',
'compression': True,
'connect_timeout': 10,
'connect_timeout': 30,
'hostname': 'cd-newuser-normal-normal',
'identities_only': False,
'identity_file': null,
@ -349,7 +348,7 @@
'kwargs': {
'check_host_keys': 'ignore',
'compression': True,
'connect_timeout': 10,
'connect_timeout': 30,
'hostname': 'alias-host',
'identities_only': False,
'identity_file': null,
@ -413,7 +412,7 @@
right: [
{
'kwargs': {
'connect_timeout': 10,
'connect_timeout': 30,
'doas_path': null,
'password': null,
'python_path': ["/usr/bin/python"],
@ -424,7 +423,7 @@
},
{
'kwargs': {
'connect_timeout': 10,
'connect_timeout': 30,
'doas_path': null,
'password': null,
'python_path': ["/usr/bin/python"],
@ -433,6 +432,6 @@
},
'method': 'doas',
},
]
]
tags:
- stack_construction

@ -2,7 +2,6 @@
- name: integration/connection_loader/local_blemished.yml
hosts: test-targets
any_errors_fatal: true
tasks:
- determine_strategy:

@ -3,7 +3,6 @@
- name: integration/connection_loader/paramiko_unblemished.yml
hosts: test-targets
any_errors_fatal: true
tasks:
- debug:
msg: "skipped for now"

@ -2,7 +2,6 @@
- name: integration/connection_loader__ssh_blemished.yml
hosts: test-targets
any_errors_fatal: true
tasks:
- determine_strategy:

@ -3,7 +3,6 @@
- name: integration/context_service/disconnect_cleanup.yml
hosts: test-targets[0]
any_errors_fatal: true
tasks:
- meta: end_play
when: not is_mitogen

@ -2,7 +2,6 @@
- name: integration/context_service/lru_one_target.yml
hosts: test-targets
any_errors_fatal: true
vars:
max_interps: "{{lookup('env', 'MITOGEN_MAX_INTERPRETERS')}}"
ubound: "{{max_interps|int + 1}}"

@ -3,7 +3,6 @@
- name: integration/context_service/reconnection.yml
hosts: test-targets
any_errors_fatal: true
tasks:
- mitogen_shutdown_all:
@ -14,7 +13,8 @@
custom_python_detect_environment:
register: old_become_env
- become: true
- name: Kill ssh process
become: true
shell: |
bash -c "( sleep 3; kill -9 {{ssh_account_env.pid}}; ) & disown"

@ -2,7 +2,6 @@
- name: integration/context_service/remote_name.yml
hosts: test-targets[0]
any_errors_fatal: true
tasks:
- meta: end_play
when: not is_mitogen
@ -11,7 +10,8 @@
- meta: end_play
when: ansible_system != 'Linux'
- shell: 'cat /proc/$PPID/cmdline | tr \\0 \\n'
- name: Get cmdline
shell: 'cat /proc/$PPID/cmdline | tr \\0 \\n'
register: out
- debug: var=out
@ -20,7 +20,8 @@
- out.stdout is match('.*python([0-9.]+)?\(mitogen:[a-z]+@[^:]+:[0-9]+\)')
fail_msg: out={{out}}
- shell: 'cat /proc/$PPID/cmdline | tr \\0 \\n'
- name: Get cmdline, with mitogen_mask_remote_name
shell: 'cat /proc/$PPID/cmdline | tr \\0 \\n'
register: out
vars:
mitogen_mask_remote_name: true

@ -15,12 +15,14 @@
- ansible_facts.virtualization_type == "docker"
- ansible_facts.python.version_info[:2] >= [2, 5]
- shell: cp /etc/resolv.conf /tmp/resolv.conf
- name: Backup resolv.conf
shell: cp /etc/resolv.conf /tmp/resolv.conf
when:
- ansible_facts.virtualization_type == "docker"
- ansible_facts.python.version_info[:2] >= [2, 5]
- shell: echo > /etc/resolv.conf
- name: Truncate resolv.conf
shell: echo > /etc/resolv.conf
when:
- ansible_facts.virtualization_type == "docker"
- ansible_facts.python.version_info[:2] >= [2, 5]
@ -33,7 +35,8 @@
- ansible_facts.virtualization_type == "docker"
- ansible_facts.python.version_info[:2] >= [2, 5]
- shell: cat /tmp/resolv.conf > /etc/resolv.conf
- name: Restore resolv.conf
shell: cat /tmp/resolv.conf > /etc/resolv.conf
when:
- ansible_facts.virtualization_type == "docker"
- ansible_facts.python.version_info[:2] >= [2, 5]

@ -3,7 +3,6 @@
- name: integration/interpreter_discovery/ansible_2_8_tests.yml
hosts: test-targets
any_errors_fatal: true
gather_facts: true
tasks:
- name: can only run these tests on ansible >= 2.8.0

@ -3,7 +3,6 @@
- name: integration/interpreter_discovery/complex_args.yml
hosts: test-targets
any_errors_fatal: true
gather_facts: true
tasks:
- name: create temp file to source

@ -5,10 +5,10 @@
# https://github.com/ansible/ansible/issues/14489
- name: integration/local/cwd_preserved.yml
any_errors_fatal: true
hosts: test-targets
tasks:
- connection: local
- name: Get local cwd
connection: local
command: pwd
register: pwd

@ -1,8 +1,8 @@
# Execution environment should be that of WorkerProcess --
# https://github.com/dw/mitogen/issues/297
- hosts: localhost
- name: integration/local/env_preserved.yml
hosts: localhost
connection: local
tasks:
- shell: "env | grep EVIL_VARS_PLUGIN"

@ -3,7 +3,6 @@
- name: integration/module_utils/adjacent_to_playbook.yml
hosts: test-targets
any_errors_fatal: true
tasks:
- custom_python_external_module:

@ -3,7 +3,6 @@
- name: integration/module_utils/adjacent_to_playbook.yml
hosts: test-targets
any_errors_fatal: true
roles:
- modrole
tags:

@ -2,7 +2,6 @@
- name: integration/module_utils/from_config_path.yml
hosts: test-targets
any_errors_fatal: true
tasks:
- custom_python_external_module:

@ -2,7 +2,6 @@
- name: integration/module_utils/from_config_path.yml
hosts: test-targets
any_errors_fatal: true
tasks:
- custom_python_external_pkg:

@ -1,7 +1,6 @@
- name: integration/module_utils/overrides_builtin.yml
hosts: test-targets
any_errors_fatal: true
roles:
- overrides_modrole
tags:

@ -1,4 +1,4 @@
#!/usr/bin/python
#!/usr/bin/env python
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils import external3

@ -1,4 +1,4 @@
#!/usr/bin/python
#!/usr/bin/env python
import json
import ansible.module_utils.basic

@ -4,7 +4,6 @@
- name: integration/playbook_semantics/become_flags.yml
hosts: test-targets
any_errors_fatal: true
tasks:
- name: "without -E"
@ -19,7 +18,6 @@
- become_flags
- hosts: test-targets
any_errors_fatal: true
become_flags: -E
tasks:
- name: "with -E"

@ -1,6 +1,5 @@
- name: integration/playbook_semantics/delegate_to.yml
hosts: test-targets
any_errors_fatal: true
tasks:
#
# delegate_to, no sudo

@ -2,9 +2,9 @@
- name: integration/playbook_semantics/environment.yml
hosts: test-targets
any_errors_fatal: true
tasks:
- shell: echo $SOME_ENV
- name: Echo $SOME_ENV
shell: echo $SOME_ENV
environment:
SOME_ENV: 123
register: result

@ -3,7 +3,6 @@
- name: integration/playbook_semantics/with_items.yml
hosts: test-targets
any_errors_fatal: true
tasks:
# TODO: https://github.com/dw/mitogen/issues/692

@ -1,5 +1,5 @@
- hosts: test-targets[0]
- name: integration/process/unix_socket_cleanup.yml
hosts: test-targets[0]
tasks:
- mitogen_action_script:
script: |
@ -10,7 +10,11 @@
- shell: >
ANSIBLE_STRATEGY=mitogen_linear
ANSIBLE_SSH_ARGS=""
ansible -m shell -c local -a whoami -i "{{MITOGEN_INVENTORY_FILE}}" test-targets
ansible -m shell -c local -a whoami
{% for inv in ansible_inventory_sources %}
-i "{{ inv }}"
{% endfor %}
test-targets
args:
chdir: ../..
register: out

@ -1,18 +1,21 @@
# /etc/environment
- file:
- name: Remove /etc/environment
file:
path: /etc/environment
state: absent
become: true
- shell: echo $MAGIC_ETC_ENV
- name: Check MAGIC_ETC_ENV without
shell: echo $MAGIC_ETC_ENV
register: echo
- assert:
that: echo.stdout == ""
fail_msg: echo={{echo}}
- copy:
- name: Create /etc/environment
copy:
dest: /etc/environment
content: |
MAGIC_ETC_ENV=555
@ -23,14 +26,16 @@
#- mitogen_shutdown_all:
#when: not is_mitogen
- shell: echo $MAGIC_ETC_ENV
- name: Check MAGIC_ETC_ENV with
shell: echo $MAGIC_ETC_ENV
register: echo
- assert:
that: echo.stdout == "555"
fail_msg: echo={{echo}}
- file:
- name: Cleanup /etc/environment
file:
path: /etc/environment
state: absent
become: true
@ -40,7 +45,8 @@
- mitogen_shutdown_all:
when: not is_mitogen
- shell: echo $MAGIC_ETC_ENV
- name: Check MAGIC_ETC_ENV without+shutdown
shell: echo $MAGIC_ETC_ENV
register: echo
- assert:

@ -1,6 +1,7 @@
# ~/.pam_environment
- file:
- name: Remove pam_environment
file:
path: ~/.pam_environment
state: absent
@ -11,7 +12,8 @@
that: echo.stdout == ""
fail_msg: echo={{echo}}
- copy:
- name: Copy pam_environment
copy:
dest: ~/.pam_environment
content: |
MAGIC_PAM_ENV=321
@ -23,7 +25,8 @@
that: echo.stdout == "321"
fail_msg: echo={{echo}}
- file:
- name: Cleanup pam_environment
file:
path: ~/.pam_environment
state: absent

@ -9,20 +9,16 @@
vars:
path: /tmp/atexit-should-delete-this
tasks:
#
# Verify a run with a healthy atexit handler. Broken handlers cause an
# exception to be raised.
#
- custom_python_run_script:
- name: Verify a run with a healthy atexit handler
custom_python_run_script:
script: |
import atexit, os, shutil
path = '{{path}}'
os.mkdir(path, int('777', 8))
atexit.register(shutil.rmtree, path)
- stat:
- name: Stat atexit file
stat:
path: "{{path}}"
register: out

@ -1,10 +1,10 @@
- name: integration/runner/builtin_command_module.yml
hosts: test-targets
any_errors_fatal: true
gather_facts: true
tasks:
- command: hostname
- name: Run hostname
command: hostname
with_sequence: start=1 end={{end|default(1)}}
register: out

@ -1,7 +1,6 @@
# https://github.com/dw/mitogen/issues/291
- name: integration/runner/custom_bash_hashbang_argument.yml
hosts: test-targets
any_errors_fatal: true
tasks:
- custom_bash_old_style_module:

@ -1,6 +1,5 @@
- name: integration/runner/custom_bash_old_style_module.yml
hosts: test-targets
any_errors_fatal: true
tasks:
- custom_bash_old_style_module:

@ -1,6 +1,5 @@
- name: integration/runner/custom_bash_want_json_module.yml
hosts: test-targets
any_errors_fatal: true
tasks:
- custom_bash_want_json_module:
foo: true

@ -1,6 +1,5 @@
- name: integration/runner/custom_binary_producing_json.yml
hosts: test-targets
any_errors_fatal: true
gather_facts: true
tasks:
- block:

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save