[stable-2.17] Unsafe persistence (#82779)

* Ensure that unsafe is more difficult to lose [stable-2.16] (#82293)

* Ensure that unsafe is more difficult to lose

* Add Task.untemplated_args, and switch assert over to use it
* Don't use re in first_found, switch to using native string methods
* If nested templating results in unsafe, just error, don't continue

* ci_complete

(cherry picked from commit 270b39f6ff)

* Fix various issues in unsafe_proxy (#82326)

- Use str/bytes directly instead of text_type/binary_type
- Fix AnsibleUnsafeBytes.__str__ implementation
- Fix AnsibleUnsafeBytes.__format__ return type
- Remove invalid methods from AnsibleUnsafeBytes (casefold, format, format_map)
- Use `chars` instead of `bytes` to match stdlib naming
- Remove commented out code

(cherry picked from commit 59aa0145d2)

* Additional Unsafe fixes (#82376)

* Allow older pickle protocols to pickle unsafe classes. Fixes #82356

* Address issues when iterating or getting single index from AnsibleUnsafeBytes. Fixes #82375

* clog frag

(cherry picked from commit afe3fc184f)

* [stable-2.16] Enable directly using `AnsibleUnsafeText` with Python `pathlib` (#82510)

* Enable directly using `AnsibleUnsafeText` with Python `pathlib`. Fixes #82414

(cherry picked from commit c6a652c081)

* Prevent failures due to unsafe plugin name (#82759)

(cherry picked from commit 56f31126ad)

* Address issues from merge conflicts

---------

Co-authored-by: Matt Clay <matt@mystile.com>
Co-authored-by: Martin Krizek <martin.krizek@gmail.com>
pull/82991/head
Matt Martz 8 months ago committed by GitHub
parent 4bc6ffb2aa
commit 9e622ddb67
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -0,0 +1,2 @@
bugfixes:
- "Fix an issue when setting a plugin name from an unsafe source resulted in ``ValueError: unmarshallable object`` (https://github.com/ansible/ansible/issues/82708)"

@ -0,0 +1,6 @@
security_fixes:
- templating - Address issues where internal templating can cause unsafe
variables to lose their unsafe designation (CVE-2023-5764)
breaking_changes:
- assert - Nested templating may result in an inability for the conditional
to be evaluated. See the porting guide for more information.

@ -0,0 +1,3 @@
bugfixes:
- unsafe data - Address an incompatibility with ``AnsibleUnsafeText`` and ``AnsibleUnsafeBytes`` when pickling with ``protocol=0``
- unsafe data - Address an incompatibility when iterating or getting a single index from ``AnsibleUnsafeBytes``

@ -0,0 +1,3 @@
bugfixes:
- unsafe data - Enable directly using ``AnsibleUnsafeText`` with Python ``pathlib``
(https://github.com/ansible/ansible/issues/82414)

@ -28,7 +28,7 @@ def _preprocess_unsafe_encode(value):
Used in ``AnsibleJSONEncoder.iterencode`` Used in ``AnsibleJSONEncoder.iterencode``
""" """
if _is_unsafe(value): if _is_unsafe(value):
value = {'__ansible_unsafe': to_text(value, errors='surrogate_or_strict', nonstring='strict')} value = {'__ansible_unsafe': to_text(value._strip_unsafe(), errors='surrogate_or_strict', nonstring='strict')}
elif is_sequence(value): elif is_sequence(value):
value = [_preprocess_unsafe_encode(v) for v in value] value = [_preprocess_unsafe_encode(v) for v in value]
elif isinstance(value, Mapping): elif isinstance(value, Mapping):
@ -61,7 +61,7 @@ class AnsibleJSONEncoder(json.JSONEncoder):
value = {'__ansible_vault': to_text(o._ciphertext, errors='surrogate_or_strict', nonstring='strict')} value = {'__ansible_vault': to_text(o._ciphertext, errors='surrogate_or_strict', nonstring='strict')}
elif getattr(o, '__UNSAFE__', False): elif getattr(o, '__UNSAFE__', False):
# unsafe object, this will never be triggered, see ``AnsibleJSONEncoder.iterencode`` # unsafe object, this will never be triggered, see ``AnsibleJSONEncoder.iterencode``
value = {'__ansible_unsafe': to_text(o, errors='surrogate_or_strict', nonstring='strict')} value = {'__ansible_unsafe': to_text(o._strip_unsafe(), errors='surrogate_or_strict', nonstring='strict')}
elif isinstance(o, Mapping): elif isinstance(o, Mapping):
# hostvars and other objects # hostvars and other objects
value = dict(o) value = dict(o)

@ -22,7 +22,7 @@ import yaml
from ansible.module_utils.six import text_type, binary_type from ansible.module_utils.six import text_type, binary_type
from ansible.module_utils.common.yaml import SafeDumper from ansible.module_utils.common.yaml import SafeDumper
from ansible.parsing.yaml.objects import AnsibleUnicode, AnsibleSequence, AnsibleMapping, AnsibleVaultEncryptedUnicode from ansible.parsing.yaml.objects import AnsibleUnicode, AnsibleSequence, AnsibleMapping, AnsibleVaultEncryptedUnicode
from ansible.utils.unsafe_proxy import AnsibleUnsafeText, AnsibleUnsafeBytes, NativeJinjaUnsafeText, NativeJinjaText from ansible.utils.unsafe_proxy import AnsibleUnsafeText, AnsibleUnsafeBytes, NativeJinjaUnsafeText, NativeJinjaText, _is_unsafe
from ansible.template import AnsibleUndefined from ansible.template import AnsibleUndefined
from ansible.vars.hostvars import HostVars, HostVarsVars from ansible.vars.hostvars import HostVars, HostVarsVars
from ansible.vars.manager import VarsWithSources from ansible.vars.manager import VarsWithSources
@ -45,10 +45,14 @@ def represent_vault_encrypted_unicode(self, data):
def represent_unicode(self, data): def represent_unicode(self, data):
if _is_unsafe(data):
data = data._strip_unsafe()
return yaml.representer.SafeRepresenter.represent_str(self, text_type(data)) return yaml.representer.SafeRepresenter.represent_str(self, text_type(data))
def represent_binary(self, data): def represent_binary(self, data):
if _is_unsafe(data):
data = data._strip_unsafe()
return yaml.representer.SafeRepresenter.represent_binary(self, binary_type(data)) return yaml.representer.SafeRepresenter.represent_binary(self, binary_type(data))

@ -19,7 +19,7 @@ from __future__ import annotations
import typing as t import typing as t
from ansible.errors import AnsibleError, AnsibleUndefinedVariable from ansible.errors import AnsibleError, AnsibleUndefinedVariable, AnsibleTemplateError
from ansible.module_utils.common.text.converters import to_native from ansible.module_utils.common.text.converters import to_native
from ansible.playbook.attribute import FieldAttribute from ansible.playbook.attribute import FieldAttribute
from ansible.template import Templar from ansible.template import Templar
@ -100,14 +100,14 @@ class Conditional:
return False return False
# If the result of the first-pass template render (to resolve inline templates) is marked unsafe, # If the result of the first-pass template render (to resolve inline templates) is marked unsafe,
# explicitly disable lookups on the final pass to prevent evaluation of untrusted content in the # explicitly fail since the next templating operation would never evaluate
# constructed template. if hasattr(conditional, '__UNSAFE__'):
disable_lookups = hasattr(conditional, '__UNSAFE__') raise AnsibleTemplateError('Conditional is marked as unsafe, and cannot be evaluated.')
# NOTE The spaces around True and False are intentional to short-circuit literal_eval for # NOTE The spaces around True and False are intentional to short-circuit literal_eval for
# jinja2_native=False and avoid its expensive calls. # jinja2_native=False and avoid its expensive calls.
return templar.template( return templar.template(
"{%% if %s %%} True {%% else %%} False {%% endif %%}" % conditional, "{%% if %s %%} True {%% else %%} False {%% endif %%}" % conditional,
disable_lookups=disable_lookups).strip() == "True" ).strip() == "True"
except AnsibleUndefinedVariable as e: except AnsibleUndefinedVariable as e:
raise AnsibleUndefinedVariable("error while evaluating conditional (%s): %s" % (original, e)) raise AnsibleUndefinedVariable("error while evaluating conditional (%s): %s" % (original, e))

@ -287,6 +287,30 @@ class Task(Base, Conditional, Taggable, CollectionSearch, Notifiable, Delegatabl
super(Task, self).post_validate(templar) super(Task, self).post_validate(templar)
def _post_validate_args(self, attr, value, templar):
# smuggle an untemplated copy of the task args for actions that need more control over the templating of their
# input (eg, debug's var/msg, assert's "that" conditional expressions)
self.untemplated_args = value
# now recursively template the args dict
args = templar.template(value)
# FIXME: could we just nuke this entirely and/or wrap it up in ModuleArgsParser or something?
if '_variable_params' in args:
variable_params = args.pop('_variable_params')
if isinstance(variable_params, dict):
if C.INJECT_FACTS_AS_VARS:
display.warning("Using a variable for a task's 'args' is unsafe in some situations "
"(see https://docs.ansible.com/ansible/devel/reference_appendices/faq.html#argsplat-unsafe)")
variable_params.update(args)
args = variable_params
else:
# if we didn't get a dict, it means there's garbage remaining after k=v parsing, just give up
# see https://github.com/ansible/ansible/issues/79862
raise AnsibleError(f"invalid or malformed argument: '{variable_params}'")
return args
def _post_validate_loop(self, attr, value, templar): def _post_validate_loop(self, attr, value, templar):
''' '''
Override post validation for the loop field, which is templated Override post validation for the loop field, which is templated

@ -63,8 +63,29 @@ class ActionModule(ActionBase):
quiet = boolean(self._task.args.get('quiet', False), strict=False) quiet = boolean(self._task.args.get('quiet', False), strict=False)
# directly access 'that' via untemplated args from the task so we can intelligently trust embedded
# templates and preserve the original inputs/locations for better messaging on assert failures and
# errors.
# FIXME: even in devel, things like `that: item` don't always work properly (truthy string value
# is not really an embedded expression)
# we could fix that by doing direct var lookups on the inputs
# FIXME: some form of this code should probably be shared between debug, assert, and
# Task.post_validate, since they
# have a lot of overlapping needs
try:
thats = self._task.untemplated_args['that']
except KeyError:
# in the case of "we got our entire args dict from a template", we can just consult the
# post-templated dict (the damage has likely already been done for embedded templates anyway)
thats = self._task.args['that']
# FIXME: this is a case where we only want to resolve indirections, NOT recurse containers
# (and even then, the leaf-most expression being wrapped is at least suboptimal
# (since its expression will be "eaten").
if isinstance(thats, str):
thats = self._templar.template(thats)
# make sure the 'that' items are a list # make sure the 'that' items are a list
thats = self._task.args['that']
if not isinstance(thats, list): if not isinstance(thats, list):
thats = [thats] thats = [thats]

@ -36,7 +36,7 @@ from ansible.parsing.yaml.objects import AnsibleUnicode
from ansible.plugins import AnsiblePlugin from ansible.plugins import AnsiblePlugin
from ansible.utils.color import stringc from ansible.utils.color import stringc
from ansible.utils.display import Display from ansible.utils.display import Display
from ansible.utils.unsafe_proxy import AnsibleUnsafeText, NativeJinjaUnsafeText from ansible.utils.unsafe_proxy import AnsibleUnsafeText, NativeJinjaUnsafeText, _is_unsafe
from ansible.vars.clean import strip_internal_keys, module_response_deepcopy from ansible.vars.clean import strip_internal_keys, module_response_deepcopy
import yaml import yaml
@ -111,6 +111,8 @@ def _munge_data_for_lossy_yaml(scalar):
def _pretty_represent_str(self, data): def _pretty_represent_str(self, data):
"""Uses block style for multi-line strings""" """Uses block style for multi-line strings"""
if _is_unsafe(data):
data = data._strip_unsafe()
data = text_type(data) data = text_type(data)
if _should_use_block(data): if _should_use_block(data):
style = '|' style = '|'

@ -35,6 +35,7 @@ from ansible.utils.display import Display
from ansible.utils.encrypt import do_encrypt, PASSLIB_AVAILABLE from ansible.utils.encrypt import do_encrypt, PASSLIB_AVAILABLE
from ansible.utils.hashing import md5s, checksum_s from ansible.utils.hashing import md5s, checksum_s
from ansible.utils.unicode import unicode_wrap from ansible.utils.unicode import unicode_wrap
from ansible.utils.unsafe_proxy import _is_unsafe
from ansible.utils.vars import merge_hash from ansible.utils.vars import merge_hash
display = Display() display = Display()
@ -217,6 +218,8 @@ def from_yaml(data):
# The ``text_type`` call here strips any custom # The ``text_type`` call here strips any custom
# string wrapper class, so that CSafeLoader can # string wrapper class, so that CSafeLoader can
# read the data # read the data
if _is_unsafe(data):
data = data._strip_unsafe()
return yaml_load(text_type(to_text(data, errors='surrogate_or_strict'))) return yaml_load(text_type(to_text(data, errors='surrogate_or_strict')))
return data return data
@ -226,6 +229,8 @@ def from_yaml_all(data):
# The ``text_type`` call here strips any custom # The ``text_type`` call here strips any custom
# string wrapper class, so that CSafeLoader can # string wrapper class, so that CSafeLoader can
# read the data # read the data
if _is_unsafe(data):
data = data._strip_unsafe()
return yaml_load_all(text_type(to_text(data, errors='surrogate_or_strict'))) return yaml_load_all(text_type(to_text(data, errors='surrogate_or_strict')))
return data return data

@ -34,6 +34,7 @@ from ansible.utils.collection_loader import AnsibleCollectionConfig, AnsibleColl
from ansible.utils.collection_loader._collection_finder import _AnsibleCollectionFinder, _get_collection_metadata from ansible.utils.collection_loader._collection_finder import _AnsibleCollectionFinder, _get_collection_metadata
from ansible.utils.display import Display from ansible.utils.display import Display
from ansible.utils.plugin_docs import add_fragments from ansible.utils.plugin_docs import add_fragments
from ansible.utils.unsafe_proxy import _is_unsafe
# TODO: take the packaging dep, or vendor SpecifierSet? # TODO: take the packaging dep, or vendor SpecifierSet?
@ -861,6 +862,17 @@ class PluginLoader:
def get_with_context(self, name, *args, **kwargs): def get_with_context(self, name, *args, **kwargs):
''' instantiates a plugin of the given name using arguments ''' ''' instantiates a plugin of the given name using arguments '''
if _is_unsafe(name):
# Objects constructed using the name wrapped as unsafe remain
# (correctly) unsafe. Using such unsafe objects in places
# where underlying types (builtin string in this case) are
# expected can cause problems.
# One such case is importlib.abc.Loader.exec_module failing
# with "ValueError: unmarshallable object" because the module
# object is created with the __path__ attribute being wrapped
# as unsafe which isn't marshallable.
# Manually removing the unsafe wrapper prevents such issues.
name = name._strip_unsafe()
found_in_cache = True found_in_cache = True
class_only = kwargs.pop('class_only', False) class_only = kwargs.pop('class_only', False)

@ -138,7 +138,6 @@ RETURN = """
elements: path elements: path
""" """
import os import os
import re
from collections.abc import Mapping, Sequence from collections.abc import Mapping, Sequence
@ -150,10 +149,22 @@ from ansible.plugins.lookup import LookupBase
from ansible.utils.path import unfrackpath from ansible.utils.path import unfrackpath
def _splitter(value, chars):
chars = set(chars)
v = ''
for c in value:
if c in chars:
yield v
v = ''
continue
v += c
yield v
def _split_on(terms, spliters=','): def _split_on(terms, spliters=','):
termlist = [] termlist = []
if isinstance(terms, string_types): if isinstance(terms, string_types):
termlist = re.split(r'[%s]' % ''.join(map(re.escape, spliters)), terms) termlist = list(_splitter(terms, spliters))
else: else:
# added since options will already listify # added since options will already listify
for t in terms: for t in terms:

@ -30,7 +30,7 @@ from contextlib import contextmanager
from numbers import Number from numbers import Number
from traceback import format_exc from traceback import format_exc
from jinja2.exceptions import TemplateSyntaxError, UndefinedError from jinja2.exceptions import TemplateSyntaxError, UndefinedError, SecurityError
from jinja2.loaders import FileSystemLoader from jinja2.loaders import FileSystemLoader
from jinja2.nativetypes import NativeEnvironment from jinja2.nativetypes import NativeEnvironment
from jinja2.runtime import Context, StrictUndefined from jinja2.runtime import Context, StrictUndefined
@ -54,7 +54,7 @@ from ansible.template.vars import AnsibleJ2Vars
from ansible.utils.display import Display from ansible.utils.display import Display
from ansible.utils.listify import listify_lookup_plugin_terms from ansible.utils.listify import listify_lookup_plugin_terms
from ansible.utils.native_jinja import NativeJinjaText from ansible.utils.native_jinja import NativeJinjaText
from ansible.utils.unsafe_proxy import to_unsafe_text, wrap_var from ansible.utils.unsafe_proxy import to_unsafe_text, wrap_var, AnsibleUnsafeText, AnsibleUnsafeBytes, NativeJinjaUnsafeText
display = Display() display = Display()
@ -348,10 +348,21 @@ class AnsibleContext(Context):
flag is checked post-templating, and (when set) will result in the flag is checked post-templating, and (when set) will result in the
final templated result being wrapped in AnsibleUnsafe. final templated result being wrapped in AnsibleUnsafe.
''' '''
_disallowed_callables = frozenset({
AnsibleUnsafeText._strip_unsafe.__qualname__,
AnsibleUnsafeBytes._strip_unsafe.__qualname__,
NativeJinjaUnsafeText._strip_unsafe.__qualname__,
})
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super(AnsibleContext, self).__init__(*args, **kwargs) super(AnsibleContext, self).__init__(*args, **kwargs)
self.unsafe = False self.unsafe = False
def call(self, obj, *args, **kwargs):
if getattr(obj, '__qualname__', None) in self._disallowed_callables or obj in self._disallowed_callables:
raise SecurityError(f"{obj!r} is not safely callable")
return super().call(obj, *args, **kwargs)
def _is_unsafe(self, val): def _is_unsafe(self, val):
''' '''
Our helper function, which will also recursively check dict and Our helper function, which will also recursively check dict and

@ -52,11 +52,14 @@
from __future__ import annotations from __future__ import annotations
import sys
import types
import warnings
from sys import intern as _sys_intern
from collections.abc import Mapping, Set from collections.abc import Mapping, Set
from ansible.module_utils.common.text.converters import to_bytes, to_text from ansible.module_utils.common.text.converters import to_bytes, to_text
from ansible.module_utils.common.collections import is_sequence from ansible.module_utils.common.collections import is_sequence
from ansible.module_utils.six import binary_type, text_type
from ansible.utils.native_jinja import NativeJinjaText from ansible.utils.native_jinja import NativeJinjaText
@ -67,16 +70,256 @@ class AnsibleUnsafe(object):
__UNSAFE__ = True __UNSAFE__ = True
class AnsibleUnsafeBytes(binary_type, AnsibleUnsafe): class AnsibleUnsafeBytes(bytes, AnsibleUnsafe):
def decode(self, *args, **kwargs): def _strip_unsafe(self):
"""Wrapper method to ensure type conversions maintain unsafe context""" return super().__bytes__()
return AnsibleUnsafeText(super(AnsibleUnsafeBytes, self).decode(*args, **kwargs))
def __reduce__(self, /):
return (self.__class__, (self._strip_unsafe(),))
class AnsibleUnsafeText(text_type, AnsibleUnsafe): def __str__(self, /): # pylint: disable=invalid-str-returned
def encode(self, *args, **kwargs): return self.decode()
"""Wrapper method to ensure type conversions maintain unsafe context"""
return AnsibleUnsafeBytes(super(AnsibleUnsafeText, self).encode(*args, **kwargs)) def __bytes__(self, /): # pylint: disable=invalid-bytes-returned
return self
def __repr__(self, /): # pylint: disable=invalid-repr-returned
return AnsibleUnsafeText(super().__repr__())
def __format__(self, format_spec, /): # pylint: disable=invalid-format-returned
return AnsibleUnsafeText(super().__format__(format_spec))
def __getitem__(self, key, /):
if isinstance(key, int):
return super().__getitem__(key)
return self.__class__(super().__getitem__(key))
def __reversed__(self, /):
return self[::-1]
def __add__(self, value, /):
return self.__class__(super().__add__(value))
def __radd__(self, value, /):
return self.__class__(value.__add__(self))
def __mul__(self, value, /):
return self.__class__(super().__mul__(value))
__rmul__ = __mul__
def __mod__(self, value, /):
return self.__class__(super().__mod__(value))
def __rmod__(self, value, /):
return self.__class__(super().__rmod__(value))
def capitalize(self, /):
return self.__class__(super().capitalize())
def center(self, width, fillchar=b' ', /):
return self.__class__(super().center(width, fillchar))
def decode(self, /, encoding='utf-8', errors='strict'):
return AnsibleUnsafeText(super().decode(encoding=encoding, errors=errors))
def removeprefix(self, prefix, /):
return self.__class__(super().removeprefix(prefix))
def removesuffix(self, suffix, /):
return self.__class__(super().removesuffix(suffix))
def expandtabs(self, /, tabsize=8):
return self.__class__(super().expandtabs(tabsize))
def join(self, iterable_of_bytes, /):
return self.__class__(super().join(iterable_of_bytes))
def ljust(self, width, fillchar=b' ', /):
return self.__class__(super().ljust(width, fillchar))
def lower(self, /):
return self.__class__(super().lower())
def lstrip(self, chars=None, /):
return self.__class__(super().lstrip(chars))
def partition(self, sep, /):
cls = self.__class__
return tuple(cls(e) for e in super().partition(sep))
def replace(self, old, new, count=-1, /):
return self.__class__(super().replace(old, new, count))
def rjust(self, width, fillchar=b' ', /):
return self.__class__(super().rjust(width, fillchar))
def rpartition(self, sep, /):
cls = self.__class__
return tuple(cls(e) for e in super().rpartition(sep))
def rstrip(self, chars=None, /):
return self.__class__(super().rstrip(chars))
def split(self, /, sep=None, maxsplit=-1):
cls = self.__class__
return [cls(e) for e in super().split(sep=sep, maxsplit=maxsplit)]
def rsplit(self, /, sep=None, maxsplit=-1):
cls = self.__class__
return [cls(e) for e in super().rsplit(sep=sep, maxsplit=maxsplit)]
def splitlines(self, /, keepends=False):
cls = self.__class__
return [cls(e) for e in super().splitlines(keepends=keepends)]
def strip(self, chars=None, /):
return self.__class__(super().strip(chars))
def swapcase(self, /):
return self.__class__(super().swapcase())
def title(self, /):
return self.__class__(super().title())
def translate(self, table, /, delete=b''):
return self.__class__(super().translate(table, delete=delete))
def upper(self, /):
return self.__class__(super().upper())
def zfill(self, width, /):
return self.__class__(super().zfill(width))
class AnsibleUnsafeText(str, AnsibleUnsafe):
def _strip_unsafe(self, /):
return super().__str__()
def __reduce__(self, /):
return (self.__class__, (self._strip_unsafe(),))
def __str__(self, /): # pylint: disable=invalid-str-returned
return self
def __repr__(self, /): # pylint: disable=invalid-repr-returned
return self.__class__(super().__repr__())
def __format__(self, format_spec, /): # pylint: disable=invalid-format-returned
return self.__class__(super().__format__(format_spec))
def __getitem__(self, key, /):
return self.__class__(super().__getitem__(key))
def __iter__(self, /):
cls = self.__class__
return (cls(c) for c in super().__iter__())
def __reversed__(self, /):
return self[::-1]
def __add__(self, value, /):
return self.__class__(super().__add__(value))
def __radd__(self, value, /):
return self.__class__(value.__add__(self))
def __mul__(self, value, /):
return self.__class__(super().__mul__(value))
__rmul__ = __mul__
def __mod__(self, value, /):
return self.__class__(super().__mod__(value))
def __rmod__(self, value, /):
return self.__class__(super().__rmod__(value))
def capitalize(self, /):
return self.__class__(super().capitalize())
def casefold(self, /):
return self.__class__(super().casefold())
def center(self, width, fillchar=' ', /):
return self.__class__(super().center(width, fillchar))
def encode(self, /, encoding='utf-8', errors='strict'):
return AnsibleUnsafeBytes(super().encode(encoding=encoding, errors=errors))
def removeprefix(self, prefix, /):
return self.__class__(super().removeprefix(prefix))
def removesuffix(self, suffix, /):
return self.__class__(super().removesuffix(suffix))
def expandtabs(self, /, tabsize=8):
return self.__class__(super().expandtabs(tabsize))
def format(self, /, *args, **kwargs):
return self.__class__(super().format(*args, **kwargs))
def format_map(self, mapping, /):
return self.__class__(super().format_map(mapping))
def join(self, iterable, /):
return self.__class__(super().join(iterable))
def ljust(self, width, fillchar=' ', /):
return self.__class__(super().ljust(width, fillchar))
def lower(self, /):
return self.__class__(super().lower())
def lstrip(self, chars=None, /):
return self.__class__(super().lstrip(chars))
def partition(self, sep, /):
cls = self.__class__
return tuple(cls(e) for e in super().partition(sep))
def replace(self, old, new, count=-1, /):
return self.__class__(super().replace(old, new, count))
def rjust(self, width, fillchar=' ', /):
return self.__class__(super().rjust(width, fillchar))
def rpartition(self, sep, /):
cls = self.__class__
return tuple(cls(e) for e in super().rpartition(sep))
def rstrip(self, chars=None, /):
return self.__class__(super().rstrip(chars))
def split(self, /, sep=None, maxsplit=-1):
cls = self.__class__
return [cls(e) for e in super().split(sep=sep, maxsplit=maxsplit)]
def rsplit(self, /, sep=None, maxsplit=-1):
cls = self.__class__
return [cls(e) for e in super().rsplit(sep=sep, maxsplit=maxsplit)]
def splitlines(self, /, keepends=False):
cls = self.__class__
return [cls(e) for e in super().splitlines(keepends=keepends)]
def strip(self, chars=None, /):
return self.__class__(super().strip(chars))
def swapcase(self, /):
return self.__class__(super().swapcase())
def title(self, /):
return self.__class__(super().title())
def translate(self, table, /):
return self.__class__(super().translate(table))
def upper(self, /):
return self.__class__(super().upper())
def zfill(self, width, /):
return self.__class__(super().zfill(width))
class NativeJinjaUnsafeText(NativeJinjaText, AnsibleUnsafeText): class NativeJinjaUnsafeText(NativeJinjaText, AnsibleUnsafeText):
@ -111,9 +354,9 @@ def wrap_var(v):
v = _wrap_sequence(v) v = _wrap_sequence(v)
elif isinstance(v, NativeJinjaText): elif isinstance(v, NativeJinjaText):
v = NativeJinjaUnsafeText(v) v = NativeJinjaUnsafeText(v)
elif isinstance(v, binary_type): elif isinstance(v, bytes):
v = AnsibleUnsafeBytes(v) v = AnsibleUnsafeBytes(v)
elif isinstance(v, text_type): elif isinstance(v, str):
v = AnsibleUnsafeText(v) v = AnsibleUnsafeText(v)
return v return v
@ -125,3 +368,24 @@ def to_unsafe_bytes(*args, **kwargs):
def to_unsafe_text(*args, **kwargs): def to_unsafe_text(*args, **kwargs):
return wrap_var(to_text(*args, **kwargs)) return wrap_var(to_text(*args, **kwargs))
def _is_unsafe(obj):
return getattr(obj, '__UNSAFE__', False) is True
def _intern(string):
"""This is a monkey patch for ``sys.intern`` that will strip
the unsafe wrapper prior to interning the string.
This will not exist in future versions.
"""
if isinstance(string, AnsibleUnsafeText):
string = string._strip_unsafe()
return _sys_intern(string)
if isinstance(sys.intern, types.BuiltinFunctionType):
sys.intern = _intern
else:
warnings.warn("skipped sys.intern patch; appears to have already been patched", RuntimeWarning)

@ -13,22 +13,22 @@
in command.stdout_lines in command.stdout_lines
- >- - >-
"Installing 'namespace_1.collection_1:1.0.0' to "Installing 'namespace_1.collection_1:1.0.0' to
'{{ install_path }}/namespace_1/collection_1'" '" ~ install_path ~ "/namespace_1/collection_1'"
in command.stdout_lines in command.stdout_lines
- >- - >-
'Created collection for namespace_1.collection_1:1.0.0 at 'Created collection for namespace_1.collection_1:1.0.0 at
{{ install_path }}/namespace_1/collection_1' ' ~ install_path ~ '/namespace_1/collection_1'
in command.stdout_lines in command.stdout_lines
- >- - >-
'namespace_1.collection_1:1.0.0 was installed successfully' 'namespace_1.collection_1:1.0.0 was installed successfully'
in command.stdout_lines in command.stdout_lines
- >- - >-
"Installing 'namespace_2.collection_2:1.0.0' to "Installing 'namespace_2.collection_2:1.0.0' to
'{{ install_path }}/namespace_2/collection_2'" '" ~ install_path ~ "/namespace_2/collection_2'"
in command.stdout_lines in command.stdout_lines
- >- - >-
'Created collection for namespace_2.collection_2:1.0.0 at 'Created collection for namespace_2.collection_2:1.0.0 at
{{ install_path }}/namespace_2/collection_2' ' ~ install_path ~ '/namespace_2/collection_2'
in command.stdout_lines in command.stdout_lines
- >- - >-
'namespace_2.collection_2:1.0.0 was installed successfully' 'namespace_2.collection_2:1.0.0 was installed successfully'
@ -58,22 +58,22 @@
in command.stdout_lines in command.stdout_lines
- >- - >-
"Installing 'namespace_1.collection_1:1.0.0' to "Installing 'namespace_1.collection_1:1.0.0' to
'{{ install_path }}/namespace_1/collection_1'" '" ~ install_path ~ "/namespace_1/collection_1'"
in command.stdout_lines in command.stdout_lines
- >- - >-
'Created collection for namespace_1.collection_1:1.0.0 at 'Created collection for namespace_1.collection_1:1.0.0 at
{{ install_path }}/namespace_1/collection_1' ' ~ install_path ~ '/namespace_1/collection_1'
in command.stdout_lines in command.stdout_lines
- >- - >-
'namespace_1.collection_1:1.0.0 was installed successfully' 'namespace_1.collection_1:1.0.0 was installed successfully'
in command.stdout_lines in command.stdout_lines
- >- - >-
"Installing 'namespace_2.collection_2:1.0.0' to "Installing 'namespace_2.collection_2:1.0.0' to
'{{ install_path }}/namespace_2/collection_2'" '" ~ install_path ~ "/namespace_2/collection_2'"
in command.stdout_lines in command.stdout_lines
- >- - >-
'Created collection for namespace_2.collection_2:1.0.0 at 'Created collection for namespace_2.collection_2:1.0.0 at
{{ install_path }}/namespace_2/collection_2' ' ~ install_path ~ '/namespace_2/collection_2'
in command.stdout_lines in command.stdout_lines
- >- - >-
'namespace_2.collection_2:1.0.0 was installed successfully' 'namespace_2.collection_2:1.0.0 was installed successfully'

@ -2,7 +2,7 @@
- name: Assert that a embedded vault of a string with no newline works - name: Assert that a embedded vault of a string with no newline works
assert: assert:
that: that:
- '"{{ vault_encrypted_one_line_var }}" == "Setec Astronomy"' - 'vault_encrypted_one_line_var == "Setec Astronomy"'
- name: Assert that a multi line embedded vault works, including new line - name: Assert that a multi line embedded vault works, including new line
assert: assert:

@ -2,7 +2,7 @@
- name: Assert that a vault encrypted file with embedded vault of a string with no newline works - name: Assert that a vault encrypted file with embedded vault of a string with no newline works
assert: assert:
that: that:
- '"{{ vault_file_encrypted_with_encrypted_one_line_var }}" == "Setec Astronomy"' - 'vault_file_encrypted_with_encrypted_one_line_var == "Setec Astronomy"'
- name: Assert that a vault encrypted file with multi line embedded vault works, including new line - name: Assert that a vault encrypted file with multi line embedded vault works, including new line
assert: assert:

@ -44,7 +44,7 @@
that: that:
- 'result.changed' - 'result.changed'
- 'result.state == "present"' - 'result.state == "present"'
- 'result.repo == "{{test_ppa_name}}"' - 'result.repo == test_ppa_name'
- name: 'examine apt cache mtime' - name: 'examine apt cache mtime'
stat: path='/var/cache/apt/pkgcache.bin' stat: path='/var/cache/apt/pkgcache.bin'
@ -75,7 +75,7 @@
that: that:
- 'result.changed' - 'result.changed'
- 'result.state == "present"' - 'result.state == "present"'
- 'result.repo == "{{test_ppa_name}}"' - 'result.repo == test_ppa_name'
- name: 'examine apt cache mtime' - name: 'examine apt cache mtime'
stat: path='/var/cache/apt/pkgcache.bin' stat: path='/var/cache/apt/pkgcache.bin'
@ -106,7 +106,7 @@
that: that:
- 'result.changed' - 'result.changed'
- 'result.state == "present"' - 'result.state == "present"'
- 'result.repo == "{{test_ppa_name}}"' - 'result.repo == test_ppa_name'
- name: 'examine apt cache mtime' - name: 'examine apt cache mtime'
stat: path='/var/cache/apt/pkgcache.bin' stat: path='/var/cache/apt/pkgcache.bin'
@ -145,7 +145,7 @@
that: that:
- 'result.changed' - 'result.changed'
- 'result.state == "present"' - 'result.state == "present"'
- 'result.repo == "{{test_ppa_spec}}"' - 'result.repo == test_ppa_spec'
- '"sources_added" in result' - '"sources_added" in result'
- 'result.sources_added | length == 1' - 'result.sources_added | length == 1'
- '"git" in result.sources_added[0]' - '"git" in result.sources_added[0]'
@ -170,7 +170,7 @@
that: that:
- 'result.changed' - 'result.changed'
- 'result.state == "absent"' - 'result.state == "absent"'
- 'result.repo == "{{test_ppa_spec}}"' - 'result.repo == test_ppa_spec'
- '"sources_added" in result' - '"sources_added" in result'
- 'result.sources_added | length == 0' - 'result.sources_added | length == 0'
- '"sources_removed" in result' - '"sources_removed" in result'
@ -201,7 +201,7 @@
that: that:
- 'result.changed' - 'result.changed'
- 'result.state == "present"' - 'result.state == "present"'
- 'result.repo == "{{test_ppa_spec}}"' - 'result.repo == test_ppa_spec'
- name: 'examine source file' - name: 'examine source file'
stat: path='/etc/apt/sources.list.d/{{test_ppa_filename}}.list' stat: path='/etc/apt/sources.list.d/{{test_ppa_filename}}.list'

@ -0,0 +1,4 @@
+ ansible-playbook -i localhost, -c local nested_tmpl.yml
++ set +x
[WARNING]: conditional statements should not include jinja2 templating
delimiters such as {{ }} or {% %}. Found: "{{ foo }}" == "bar"

@ -0,0 +1,12 @@
PLAY [localhost] ***************************************************************
TASK [assert] ******************************************************************
ok: [localhost] => {
"changed": false,
"msg": "All assertions passed"
}
PLAY RECAP *********************************************************************
localhost : ok=1 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0

@ -0,0 +1,9 @@
- hosts: localhost
gather_facts: False
tasks:
- assert:
that:
- '"{{ foo }}" == "bar"'
- foo == "bar"
vars:
foo: bar

@ -5,12 +5,12 @@
item_A: yes item_A: yes
tasks: tasks:
- assert: - assert:
that: "{{ item }} is defined" that: "item is defined"
quiet: True quiet: True
with_items: with_items:
- item_A - item_A
- assert: - assert:
that: "{{ item }} is defined" that: "item is defined"
quiet: False quiet: False
with_items: with_items:
- item_A - item_A

@ -45,7 +45,7 @@ cleanup() {
fi fi
} }
BASEFILE=assert_quiet.out BASEFILE=assert.out
ORIGFILE="${BASEFILE}" ORIGFILE="${BASEFILE}"
OUTFILE="${BASEFILE}.new" OUTFILE="${BASEFILE}.new"
@ -69,3 +69,4 @@ export ANSIBLE_NOCOLOR=1
export ANSIBLE_RETRY_FILES_ENABLED=0 export ANSIBLE_RETRY_FILES_ENABLED=0
run_test quiet run_test quiet
run_test nested_tmpl

@ -320,7 +320,7 @@
assert: assert:
that: that:
- shell_result0 is changed - shell_result0 is changed
- shell_result0.cmd == '{{ remote_tmp_dir_test }}/test.sh' - shell_result0.cmd == remote_tmp_dir_test ~ '/test.sh'
- shell_result0.rc == 0 - shell_result0.rc == 0
- shell_result0.stderr == '' - shell_result0.stderr == ''
- shell_result0.stdout == 'win' - shell_result0.stdout == 'win'

@ -1262,7 +1262,7 @@
assert: assert:
that: that:
- "copy_result6.changed" - "copy_result6.changed"
- "copy_result6.dest == '{{remote_dir_expanded}}/multiline.txt'" - "copy_result6.dest == remote_dir_expanded ~ '/multiline.txt'"
- "copy_result6.checksum == '9cd0697c6a9ff6689f0afb9136fa62e0b3fee903'" - "copy_result6.checksum == '9cd0697c6a9ff6689f0afb9136fa62e0b3fee903'"
# test overwriting a file as an unprivileged user (pull request #8624) # test overwriting a file as an unprivileged user (pull request #8624)
@ -2165,26 +2165,26 @@
assert: assert:
that: that:
- testcase5 is changed - testcase5 is changed
- "stat_new_dir_with_chown.stat.uid == {{ ansible_copy_test_user.uid }}" - "stat_new_dir_with_chown.stat.uid == ansible_copy_test_user.uid"
- "stat_new_dir_with_chown.stat.gid == {{ ansible_copy_test_group.gid }}" - "stat_new_dir_with_chown.stat.gid == ansible_copy_test_group.gid"
- "stat_new_dir_with_chown.stat.pw_name == '{{ ansible_copy_test_user_name }}'" - "stat_new_dir_with_chown.stat.pw_name == ansible_copy_test_user_name"
- "stat_new_dir_with_chown.stat.gr_name == '{{ ansible_copy_test_user_name }}'" - "stat_new_dir_with_chown.stat.gr_name == ansible_copy_test_user_name"
- "stat_new_dir_with_chown_file1.stat.uid == {{ ansible_copy_test_user.uid }}" - "stat_new_dir_with_chown_file1.stat.uid == ansible_copy_test_user.uid"
- "stat_new_dir_with_chown_file1.stat.gid == {{ ansible_copy_test_group.gid }}" - "stat_new_dir_with_chown_file1.stat.gid == ansible_copy_test_group.gid"
- "stat_new_dir_with_chown_file1.stat.pw_name == '{{ ansible_copy_test_user_name }}'" - "stat_new_dir_with_chown_file1.stat.pw_name == ansible_copy_test_user_name"
- "stat_new_dir_with_chown_file1.stat.gr_name == '{{ ansible_copy_test_user_name }}'" - "stat_new_dir_with_chown_file1.stat.gr_name == ansible_copy_test_user_name"
- "stat_new_dir_with_chown_subdir.stat.uid == {{ ansible_copy_test_user.uid }}" - "stat_new_dir_with_chown_subdir.stat.uid == ansible_copy_test_user.uid"
- "stat_new_dir_with_chown_subdir.stat.gid == {{ ansible_copy_test_group.gid }}" - "stat_new_dir_with_chown_subdir.stat.gid == ansible_copy_test_group.gid"
- "stat_new_dir_with_chown_subdir.stat.pw_name == '{{ ansible_copy_test_user_name }}'" - "stat_new_dir_with_chown_subdir.stat.pw_name == ansible_copy_test_user_name"
- "stat_new_dir_with_chown_subdir.stat.gr_name == '{{ ansible_copy_test_user_name }}'" - "stat_new_dir_with_chown_subdir.stat.gr_name == ansible_copy_test_user_name"
- "stat_new_dir_with_chown_subdir_file12.stat.uid == {{ ansible_copy_test_user.uid }}" - "stat_new_dir_with_chown_subdir_file12.stat.uid == ansible_copy_test_user.uid"
- "stat_new_dir_with_chown_subdir_file12.stat.gid == {{ ansible_copy_test_group.gid }}" - "stat_new_dir_with_chown_subdir_file12.stat.gid == ansible_copy_test_group.gid"
- "stat_new_dir_with_chown_subdir_file12.stat.pw_name == '{{ ansible_copy_test_user_name }}'" - "stat_new_dir_with_chown_subdir_file12.stat.pw_name == ansible_copy_test_user_name"
- "stat_new_dir_with_chown_subdir_file12.stat.gr_name == '{{ ansible_copy_test_user_name }}'" - "stat_new_dir_with_chown_subdir_file12.stat.gr_name == ansible_copy_test_user_name"
- "stat_new_dir_with_chown_link_file12.stat.uid == {{ ansible_copy_test_user.uid }}" - "stat_new_dir_with_chown_link_file12.stat.uid == ansible_copy_test_user.uid"
- "stat_new_dir_with_chown_link_file12.stat.gid == {{ ansible_copy_test_group.gid }}" - "stat_new_dir_with_chown_link_file12.stat.gid == ansible_copy_test_group.gid"
- "stat_new_dir_with_chown_link_file12.stat.pw_name == '{{ ansible_copy_test_user_name }}'" - "stat_new_dir_with_chown_link_file12.stat.pw_name == ansible_copy_test_user_name"
- "stat_new_dir_with_chown_link_file12.stat.gr_name == '{{ ansible_copy_test_user_name }}'" - "stat_new_dir_with_chown_link_file12.stat.gr_name == ansible_copy_test_user_name"
always: always:
- name: execute - remove the user for test - name: execute - remove the user for test

@ -18,3 +18,5 @@ done
# ensure debug does not set top level vars when looking at ansible_facts # ensure debug does not set top level vars when looking at ansible_facts
ansible-playbook nosetfacts.yml "$@" ansible-playbook nosetfacts.yml "$@"
ansible-playbook unsafe.yml "$@"

@ -0,0 +1,13 @@
- hosts: localhost
gather_facts: false
vars:
unsafe_var: !unsafe undef()|mandatory
tasks:
- debug:
var: '{{ unsafe_var }}'
ignore_errors: true
register: result
- assert:
that:
- result is successful

@ -117,7 +117,7 @@
- name: assert chdir works - name: assert chdir works
assert: assert:
that: that:
- "'{{chdir_result.stdout | trim}}' == '{{remote_tmp_dir_real_path.stdout | trim}}'" - "chdir_result.stdout | trim == remote_tmp_dir_real_path.stdout | trim"
- name: test timeout option - name: test timeout option
expect: expect:

@ -927,7 +927,7 @@
that: that:
- "file_error3 is failed" - "file_error3 is failed"
- "file_error3.msg == 'src does not exist'" - "file_error3.msg == 'src does not exist'"
- "file_error3.dest == '{{ remote_tmp_dir_test }}/hard.txt' | expanduser" - "file_error3.dest == remote_tmp_dir_test | expanduser ~ '/hard.txt'"
- "file_error3.src == 'non-existing-file-that-does-not-exist.txt'" - "file_error3.src == 'non-existing-file-that-does-not-exist.txt'"
- block: - block:

@ -199,7 +199,7 @@
- "missing_dst_no_follow_enable_force_use_mode2 is changed" - "missing_dst_no_follow_enable_force_use_mode2 is changed"
- "missing_dst_no_follow_enable_force_use_mode3 is not changed" - "missing_dst_no_follow_enable_force_use_mode3 is not changed"
- "soft3_result['stat'].islnk" - "soft3_result['stat'].islnk"
- "soft3_result['stat'].lnk_target == '{{ user.home }}/nonexistent'" - "soft3_result['stat'].lnk_target == user.home ~ '/nonexistent'"
# #
# Test creating a link to a directory https://github.com/ansible/ansible/issues/1369 # Test creating a link to a directory https://github.com/ansible/ansible/issues/1369

@ -19,6 +19,13 @@
- "{'foo': 'bar', 'baz': 'buz'}|urlencode == 'foo=bar&baz=buz'" - "{'foo': 'bar', 'baz': 'buz'}|urlencode == 'foo=bar&baz=buz'"
- "()|urlencode == ''" - "()|urlencode == ''"
- name: verify urlencode works for unsafe strings
assert:
that:
- thing|urlencode == 'foo%3Abar'
vars:
thing: !unsafe foo:bar
# Needed (temporarily) due to coverage reports not including the last task. # Needed (temporarily) due to coverage reports not including the last task.
- assert: - assert:
that: true that: true

@ -313,7 +313,7 @@
- name: assert we skipped the ogg file - name: assert we skipped the ogg file
assert: assert:
that: that:
- '"{{ remote_tmp_dir_test }}/e/f/g/h/8.ogg" not in find_test3_list' - 'remote_tmp_dir_test ~ "/e/f/g/h/8.ogg" not in find_test3_list'
- name: patterns with regex - name: patterns with regex
find: find:
@ -363,7 +363,7 @@
assert: assert:
that: that:
- result.matched == 1 - result.matched == 1
- '"{{ remote_tmp_dir_test }}/astest/old.txt" in astest_list' - 'remote_tmp_dir_test ~ "/astest/old.txt" in astest_list'
- name: find files newer than 1 week - name: find files newer than 1 week
find: find:
@ -378,7 +378,7 @@
assert: assert:
that: that:
- result.matched == 1 - result.matched == 1
- '"{{ remote_tmp_dir_test }}/astest/new.txt" in astest_list' - 'remote_tmp_dir_test ~ "/astest/new.txt" in astest_list'
- name: add some content to the new file - name: add some content to the new file
shell: "echo hello world > {{ remote_tmp_dir_test }}/astest/new.txt" shell: "echo hello world > {{ remote_tmp_dir_test }}/astest/new.txt"
@ -398,7 +398,7 @@
assert: assert:
that: that:
- result.matched == 1 - result.matched == 1
- '"{{ remote_tmp_dir_test }}/astest/new.txt" in astest_list' - 'remote_tmp_dir_test ~ "/astest/new.txt" in astest_list'
- '"checksum" in result.files[0]' - '"checksum" in result.files[0]'
- name: find ANY item with LESS than 5 bytes, also get checksums - name: find ANY item with LESS than 5 bytes, also get checksums
@ -417,8 +417,8 @@
assert: assert:
that: that:
- result.matched == 2 - result.matched == 2
- '"{{ remote_tmp_dir_test }}/astest/old.txt" in astest_list' - 'remote_tmp_dir_test ~ "/astest/old.txt" in astest_list'
- '"{{ remote_tmp_dir_test }}/astest/.hidden.txt" in astest_list' - 'remote_tmp_dir_test ~ "/astest/.hidden.txt" in astest_list'
- '"checksum" in result.files[0]' - '"checksum" in result.files[0]'
# Test permission error is correctly handled by find module # Test permission error is correctly handled by find module

@ -61,7 +61,7 @@
- assert: - assert:
that: that:
- exact_mode_0644.files == exact_mode_0644_symbolic.files - exact_mode_0644.files == exact_mode_0644_symbolic.files
- exact_mode_0644.files[0].path == '{{ remote_tmp_dir_test }}/mode_0644' - exact_mode_0644.files[0].path == remote_tmp_dir_test ~ '/mode_0644'
- user_readable_octal.files == user_readable_symbolic.files - user_readable_octal.files == user_readable_symbolic.files
- user_readable_octal.files|map(attribute='path')|map('basename')|sort == ['mode_0400', 'mode_0444', 'mode_0644', 'mode_0666', 'mode_0700'] - user_readable_octal.files|map(attribute='path')|map('basename')|sort == ['mode_0400', 'mode_0444', 'mode_0644', 'mode_0666', 'mode_0700']
- other_readable_octal.files == other_readable_symbolic.files - other_readable_octal.files == other_readable_symbolic.files

@ -433,7 +433,7 @@
- name: Test reading facts from default fact_path - name: Test reading facts from default fact_path
assert: assert:
that: that:
- '"{{ ansible_local.testfact.fact_dir }}" == "default"' - 'ansible_local.testfact.fact_dir == "default"'
- hosts: facthost9 - hosts: facthost9
tags: [ 'fact_local'] tags: [ 'fact_local']
@ -444,7 +444,7 @@
- name: Test reading facts from custom fact_path - name: Test reading facts from custom fact_path
assert: assert:
that: that:
- '"{{ ansible_local.testfact.fact_dir }}" == "custom"' - 'ansible_local.testfact.fact_dir == "custom"'
- hosts: facthost20 - hosts: facthost20
tags: [ 'fact_facter_ohai' ] tags: [ 'fact_facter_ohai' ]

@ -172,7 +172,7 @@
- name: DEPTH | check update arrived - name: DEPTH | check update arrived
assert: assert:
that: that:
- "{{ a_file.content | b64decode | trim }} == 3" - a_file.content | b64decode | trim == "3"
- git_fetch is changed - git_fetch is changed
- name: DEPTH | clear checkout_dir - name: DEPTH | clear checkout_dir

@ -58,7 +58,7 @@
- name: LOCALMODS | check update arrived - name: LOCALMODS | check update arrived
assert: assert:
that: that:
- "{{ a_file.content | b64decode | trim }} == 2" - a_file.content | b64decode | trim == "2"
- git_fetch_force is changed - git_fetch_force is changed
- name: LOCALMODS | clear checkout_dir - name: LOCALMODS | clear checkout_dir
@ -127,7 +127,7 @@
- name: LOCALMODS | check update arrived - name: LOCALMODS | check update arrived
assert: assert:
that: that:
- "{{ a_file.content | b64decode | trim }} == 2" - a_file.content | b64decode | trim == "2"
- git_fetch_force is changed - git_fetch_force is changed
- name: LOCALMODS | clear checkout_dir - name: LOCALMODS | clear checkout_dir

@ -32,7 +32,7 @@
- name: SUBMODULES | Ensure submodu1 is at the appropriate commit - name: SUBMODULES | Ensure submodu1 is at the appropriate commit
assert: assert:
that: '{{ submodule1.stdout_lines | length }} == 2' that: 'submodule1.stdout_lines | length == 2'
- name: SUBMODULES | clear checkout_dir - name: SUBMODULES | clear checkout_dir
file: file:
@ -53,7 +53,7 @@
- name: SUBMODULES | Ensure submodule1 is at the appropriate commit - name: SUBMODULES | Ensure submodule1 is at the appropriate commit
assert: assert:
that: '{{ submodule1.stdout_lines | length }} == 4' that: 'submodule1.stdout_lines | length == 4'
- name: SUBMODULES | Copy the checkout so we can run several different tests on it - name: SUBMODULES | Copy the checkout so we can run several different tests on it
command: 'cp -pr {{ checkout_dir }} {{ checkout_dir }}.bak' command: 'cp -pr {{ checkout_dir }} {{ checkout_dir }}.bak'
@ -84,8 +84,8 @@
- name: SUBMODULES | Ensure both submodules are at the appropriate commit - name: SUBMODULES | Ensure both submodules are at the appropriate commit
assert: assert:
that: that:
- '{{ submodule1.stdout_lines|length }} == 4' - 'submodule1.stdout_lines|length == 4'
- '{{ submodule2.stdout_lines|length }} == 2' - 'submodule2.stdout_lines|length == 2'
- name: SUBMODULES | Remove checkout dir - name: SUBMODULES | Remove checkout dir
@ -112,7 +112,7 @@
- name: SUBMODULES | Ensure submodule1 is at the appropriate commit - name: SUBMODULES | Ensure submodule1 is at the appropriate commit
assert: assert:
that: '{{ submodule1.stdout_lines | length }} == 5' that: 'submodule1.stdout_lines | length == 5'
- name: SUBMODULES | Test that update with recursive found new submodules - name: SUBMODULES | Test that update with recursive found new submodules
@ -121,7 +121,7 @@
- name: SUBMODULES | Enusre submodule2 is at the appropriate commit - name: SUBMODULES | Enusre submodule2 is at the appropriate commit
assert: assert:
that: '{{ submodule2.stdout_lines | length }} == 4' that: 'submodule2.stdout_lines | length == 4'
- name: SUBMODULES | clear checkout_dir - name: SUBMODULES | clear checkout_dir
file: file:
@ -147,4 +147,4 @@
- name: SUBMODULES | Ensure submodule1 is at the appropriate commit - name: SUBMODULES | Ensure submodule1 is at the appropriate commit
assert: assert:
that: '{{ submodule1.stdout_lines | length }} == 4' that: 'submodule1.stdout_lines | length == 4'

@ -22,7 +22,7 @@
- name: Check that multiple duplicate lines collapse into a single commands - name: Check that multiple duplicate lines collapse into a single commands
assert: assert:
that: that:
- "{{ result.commands|length }} == 1" - "result.commands|length == 1"
- name: Check that set is correctly prepended - name: Check that set is correctly prepended
assert: assert:
@ -58,6 +58,6 @@
- assert: - assert:
that: that:
- "{{ result.filtered|length }} == 2" - "result.filtered|length == 2"
- debug: msg="END cli/config_check.yaml on connection={{ ansible_connection }}" - debug: msg="END cli/config_check.yaml on connection={{ ansible_connection }}"

@ -16,17 +16,17 @@
- name: Assert that the before dicts were correctly generated - name: Assert that the before dicts were correctly generated
assert: assert:
that: that:
- "{{ populate | symmetric_difference(result['before']) |length == 0 }}" - "populate | symmetric_difference(result['before']) |length == 0"
- name: Assert that the correct set of commands were generated - name: Assert that the correct set of commands were generated
assert: assert:
that: that:
- "{{ deleted['commands'] | symmetric_difference(result['commands']) |length == 0 }}" - "deleted['commands'] | symmetric_difference(result['commands']) |length == 0"
- name: Assert that the after dicts were correctly generated - name: Assert that the after dicts were correctly generated
assert: assert:
that: that:
- "{{ deleted['after'] | symmetric_difference(result['after']) |length == 0 }}" - "deleted['after'] | symmetric_difference(result['after']) |length == 0"
- name: Delete attributes of given interfaces (IDEMPOTENT) - name: Delete attributes of given interfaces (IDEMPOTENT)
vyos.vyos.vyos_lldp_interfaces: *deleted vyos.vyos.vyos_lldp_interfaces: *deleted
@ -41,6 +41,6 @@
- name: Assert that the before dicts were correctly generated - name: Assert that the before dicts were correctly generated
assert: assert:
that: that:
- "{{ deleted['after'] | symmetric_difference(result['before']) |length == 0 }}" - "deleted['after'] | symmetric_difference(result['before']) |length == 0"
always: always:
- include_tasks: _remove_config.yaml - include_tasks: _remove_config.yaml

@ -28,17 +28,17 @@
- name: Assert that before dicts were correctly generated - name: Assert that before dicts were correctly generated
assert: assert:
that: "{{ merged['before'] | symmetric_difference(result['before']) |length == 0 }}" that: "merged['before'] | symmetric_difference(result['before']) |length == 0"
- name: Assert that correct set of commands were generated - name: Assert that correct set of commands were generated
assert: assert:
that: that:
- "{{ merged['commands'] | symmetric_difference(result['commands']) |length == 0 }}" - "merged['commands'] | symmetric_difference(result['commands']) |length == 0"
- name: Assert that after dicts was correctly generated - name: Assert that after dicts was correctly generated
assert: assert:
that: that:
- "{{ merged['after'] | symmetric_difference(result['after']) |length == 0 }}" - "merged['after'] | symmetric_difference(result['after']) |length == 0"
- name: Merge the provided configuration with the existing running configuration (IDEMPOTENT) - name: Merge the provided configuration with the existing running configuration (IDEMPOTENT)
vyos.vyos.vyos_lldp_interfaces: *merged vyos.vyos.vyos_lldp_interfaces: *merged
@ -52,7 +52,7 @@
- name: Assert that before dicts were correctly generated - name: Assert that before dicts were correctly generated
assert: assert:
that: that:
- "{{ merged['after'] | symmetric_difference(result['before']) |length == 0 }}" - "merged['after'] | symmetric_difference(result['before']) |length == 0"
always: always:
- include_tasks: _remove_config.yaml - include_tasks: _remove_config.yaml

@ -19,17 +19,17 @@
- name: Assert that before dicts were correctly generated - name: Assert that before dicts were correctly generated
assert: assert:
that: that:
- "{{ populate_intf | symmetric_difference(result['before']) |length == 0 }}" - "populate_intf | symmetric_difference(result['before']) |length == 0"
- name: Assert that correct commands were generated - name: Assert that correct commands were generated
assert: assert:
that: that:
- "{{ overridden['commands'] | symmetric_difference(result['commands']) |length == 0 }}" - "overridden['commands'] | symmetric_difference(result['commands']) |length == 0"
- name: Assert that after dicts were correctly generated - name: Assert that after dicts were correctly generated
assert: assert:
that: that:
- "{{ overridden['after'] | symmetric_difference(result['after']) |length == 0 }}" - "overridden['after'] | symmetric_difference(result['after']) |length == 0"
- name: Overrides all device configuration with provided configurations (IDEMPOTENT) - name: Overrides all device configuration with provided configurations (IDEMPOTENT)
vyos.vyos.vyos_lldp_interfaces: *overridden vyos.vyos.vyos_lldp_interfaces: *overridden
@ -43,7 +43,7 @@
- name: Assert that before dicts were correctly generated - name: Assert that before dicts were correctly generated
assert: assert:
that: that:
- "{{ overridden['after'] | symmetric_difference(result['before']) |length == 0 }}" - "overridden['after'] | symmetric_difference(result['before']) |length == 0"
always: always:
- include_tasks: _remove_config.yaml - include_tasks: _remove_config.yaml

@ -33,17 +33,17 @@
- name: Assert that correct set of commands were generated - name: Assert that correct set of commands were generated
assert: assert:
that: that:
- "{{ replaced['commands'] | symmetric_difference(result['commands']) |length == 0 }}" - "replaced['commands'] | symmetric_difference(result['commands']) |length == 0"
- name: Assert that before dicts are correctly generated - name: Assert that before dicts are correctly generated
assert: assert:
that: that:
- "{{ populate | symmetric_difference(result['before']) |length == 0 }}" - "populate | symmetric_difference(result['before']) |length == 0"
- name: Assert that after dict is correctly generated - name: Assert that after dict is correctly generated
assert: assert:
that: that:
- "{{ replaced['after'] | symmetric_difference(result['after']) |length == 0 }}" - "replaced['after'] | symmetric_difference(result['after']) |length == 0"
- name: Replace device configurations of listed LLDP interfaces with provided configurarions (IDEMPOTENT) - name: Replace device configurations of listed LLDP interfaces with provided configurarions (IDEMPOTENT)
vyos.vyos.vyos_lldp_interfaces: *replaced vyos.vyos.vyos_lldp_interfaces: *replaced
@ -57,7 +57,7 @@
- name: Assert that before dict is correctly generated - name: Assert that before dict is correctly generated
assert: assert:
that: that:
- "{{ replaced['after'] | symmetric_difference(result['before']) |length == 0 }}" - "replaced['after'] | symmetric_difference(result['before']) |length == 0"
always: always:
- include_tasks: _remove_config.yaml - include_tasks: _remove_config.yaml

@ -15,7 +15,7 @@
that: that:
- "testing == 789" - "testing == 789"
- "base_dir == 'environments/development'" - "base_dir == 'environments/development'"
- "{{ included_one_file.ansible_included_var_files | length }} == 1" - "included_one_file.ansible_included_var_files | length == 1"
- "'vars/environments/development/all.yml' in included_one_file.ansible_included_var_files[0]" - "'vars/environments/development/all.yml' in included_one_file.ansible_included_var_files[0]"
- name: include the vars/environments/development/all.yml and save results in all - name: include the vars/environments/development/all.yml and save results in all
@ -51,7 +51,7 @@
assert: assert:
that: that:
- webapp_version is defined - webapp_version is defined
- "'file_without_extension' in '{{ include_without_file_extension.ansible_included_var_files | join(' ') }}'" - "'file_without_extension' in include_without_file_extension.ansible_included_var_files | join(' ')"
- name: include every directory in vars - name: include every directory in vars
include_vars: include_vars:
@ -67,7 +67,7 @@
- "testing == 456" - "testing == 456"
- "base_dir == 'services'" - "base_dir == 'services'"
- "webapp_containers == 10" - "webapp_containers == 10"
- "{{ include_every_dir.ansible_included_var_files | length }} == 7" - "include_every_dir.ansible_included_var_files | length == 7"
- "'vars/all/all.yml' in include_every_dir.ansible_included_var_files[0]" - "'vars/all/all.yml' in include_every_dir.ansible_included_var_files[0]"
- "'vars/environments/development/all.yml' in include_every_dir.ansible_included_var_files[1]" - "'vars/environments/development/all.yml' in include_every_dir.ansible_included_var_files[1]"
- "'vars/environments/development/services/webapp.yml' in include_every_dir.ansible_included_var_files[2]" - "'vars/environments/development/services/webapp.yml' in include_every_dir.ansible_included_var_files[2]"
@ -88,9 +88,9 @@
that: that:
- "testing == 789" - "testing == 789"
- "base_dir == 'environments/development'" - "base_dir == 'environments/development'"
- "{{ include_without_webapp.ansible_included_var_files | length }} == 4" - "include_without_webapp.ansible_included_var_files | length == 4"
- "'webapp.yml' not in '{{ include_without_webapp.ansible_included_var_files | join(' ') }}'" - "'webapp.yml' not in include_without_webapp.ansible_included_var_files | join(' ')"
- "'file_without_extension' not in '{{ include_without_webapp.ansible_included_var_files | join(' ') }}'" - "'file_without_extension' not in include_without_webapp.ansible_included_var_files | join(' ')"
- name: include only files matching webapp.yml - name: include only files matching webapp.yml
include_vars: include_vars:
@ -104,9 +104,9 @@
- "testing == 101112" - "testing == 101112"
- "base_dir == 'development/services'" - "base_dir == 'development/services'"
- "webapp_containers == 20" - "webapp_containers == 20"
- "{{ include_match_webapp.ansible_included_var_files | length }} == 1" - "include_match_webapp.ansible_included_var_files | length == 1"
- "'vars/environments/development/services/webapp.yml' in include_match_webapp.ansible_included_var_files[0]" - "'vars/environments/development/services/webapp.yml' in include_match_webapp.ansible_included_var_files[0]"
- "'all.yml' not in '{{ include_match_webapp.ansible_included_var_files | join(' ') }}'" - "'all.yml' not in include_match_webapp.ansible_included_var_files | join(' ')"
- name: include only files matching webapp.yml and store results in webapp - name: include only files matching webapp.yml and store results in webapp
include_vars: include_vars:
@ -173,10 +173,10 @@
- name: Verify the hash variable - name: Verify the hash variable
assert: assert:
that: that:
- "{{ config | length }} == 3" - "config | length == 3"
- "config.key0 == 0" - "config.key0 == 0"
- "config.key1 == 0" - "config.key1 == 0"
- "{{ config.key2 | length }} == 1" - "config.key2 | length == 1"
- "config.key2.a == 21" - "config.key2.a == 21"
- name: Include the second file to merge the hash variable - name: Include the second file to merge the hash variable
@ -187,10 +187,10 @@
- name: Verify that the hash is merged - name: Verify that the hash is merged
assert: assert:
that: that:
- "{{ config | length }} == 4" - "config | length == 4"
- "config.key0 == 0" - "config.key0 == 0"
- "config.key1 == 1" - "config.key1 == 1"
- "{{ config.key2 | length }} == 2" - "config.key2 | length == 2"
- "config.key2.a == 21" - "config.key2.a == 21"
- "config.key2.b == 22" - "config.key2.b == 22"
- "config.key3 == 3" - "config.key3 == 3"
@ -202,9 +202,9 @@
- name: Verify that the properties from the first file is cleared - name: Verify that the properties from the first file is cleared
assert: assert:
that: that:
- "{{ config | length }} == 3" - "config | length == 3"
- "config.key1 == 1" - "config.key1 == 1"
- "{{ config.key2 | length }} == 1" - "config.key2 | length == 1"
- "config.key2.b == 22" - "config.key2.b == 22"
- "config.key3 == 3" - "config.key3 == 3"
@ -216,10 +216,10 @@
- name: Verify that the hash is merged after vars files are accumulated - name: Verify that the hash is merged after vars files are accumulated
assert: assert:
that: that:
- "{{ config | length }} == 3" - "config | length == 3"
- "config.key0 is undefined" - "config.key0 is undefined"
- "config.key1 == 1" - "config.key1 == 1"
- "{{ config.key2 | length }} == 1" - "config.key2 | length == 1"
- "config.key2.b == 22" - "config.key2.b == 22"
- "config.key3 == 3" - "config.key3 == 3"

@ -109,8 +109,8 @@
- name: Load variables specific for OS family - name: Load variables specific for OS family
assert: assert:
that: that:
- "{{item|quote}} is file" - "item is file"
- "{{item|basename == 'itworks.yml'}}" - "item|basename == 'itworks.yml'"
with_first_found: with_first_found:
- files: - files:
- "{{ansible_id}}-{{ansible_lsb.major_release}}.yml" # invalid var, should be skipped - "{{ansible_id}}-{{ansible_lsb.major_release}}.yml" # invalid var, should be skipped
@ -124,8 +124,8 @@
- name: Load variables specific for OS family, but now as list of dicts, same options as above - name: Load variables specific for OS family, but now as list of dicts, same options as above
assert: assert:
that: that:
- "{{item|quote}} is file" - "item is file"
- "{{item|basename == 'itworks.yml'}}" - "item|basename == 'itworks.yml'"
with_first_found: with_first_found:
- files: - files:
- "{{ansible_id}}-{{ansible_lsb.major_release}}.yml" - "{{ansible_id}}-{{ansible_lsb.major_release}}.yml"

@ -10,7 +10,7 @@
field_with_space: "{{lookup('ini', 'field.with.space type=properties file=lookup.properties')}}" field_with_space: "{{lookup('ini', 'field.with.space type=properties file=lookup.properties')}}"
- assert: - assert:
that: "{{item}} is defined" that: "item is defined"
with_items: [ 'test1', 'test2', 'test_dot', 'field_with_space' ] with_items: [ 'test1', 'test2', 'test_dot', 'field_with_space' ]
- name: "read ini value" - name: "read ini value"

@ -133,7 +133,7 @@
- assert: - assert:
that: that:
- "'{{ item.0.name }}' != 'carol'" - "item.0.name != 'carol'"
with_subelements: with_subelements:
- "{{ users }}" - "{{ users }}"
- mysql.privs - mysql.privs
@ -220,5 +220,5 @@
- assert: - assert:
that: that:
- "'{{ user_alice }}' == 'localhost'" - "user_alice == 'localhost'"
- "'{{ user_bob }}' == 'db1'" - "user_bob == 'db1'"

@ -3,7 +3,7 @@
that: that:
- ansible_loop.index == ansible_loop.index0 + 1 - ansible_loop.index == ansible_loop.index0 + 1
- ansible_loop.revindex == ansible_loop.revindex0 + 1 - ansible_loop.revindex == ansible_loop.revindex0 + 1
- ansible_loop.first == {{ ansible_loop.index == 1 }} - ansible_loop.first == (ansible_loop.index == 1)
- ansible_loop.last == {{ ansible_loop.index == ansible_loop.length }} - ansible_loop.last == (ansible_loop.index == ansible_loop.length)
- ansible_loop.length == 3 - ansible_loop.length == 3
- ansible_loop.allitems|join(',') == 'first,second,third' - ansible_loop.allitems|join(',') == 'first,second,third'

@ -14,4 +14,4 @@
- assert: - assert:
that: that:
- '"location" in result' - '"location" in result'
- 'result["location"] == "{{ expected_location}}"' - 'result["location"] == expected_location'

@ -13,4 +13,4 @@
- assert: - assert:
that: that:
- '"location" in result' - '"location" in result'
- 'result["location"] == "{{ expected_location}}"' - 'result["location"] == expected_location'

@ -7,4 +7,4 @@
assert: assert:
that: that:
- '"location" in result' - '"location" in result'
- 'result["location"] == "{{ expected_location }}"' - 'result["location"] == expected_location'

@ -7,4 +7,4 @@
assert: assert:
that: that:
- '"location" in result' - '"location" in result'
- 'result["location"] == "{{ expected_location }}"' - 'result["location"] == expected_location'

@ -0,0 +1,8 @@
from __future__ import annotations
from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
def run(self, tmp=None, task_vars=None):
return {"nca_executed": True}

@ -37,3 +37,5 @@ ansible-playbook use_coll_name.yml -i ../../inventory -e 'ansible_connection=ans
# test filter loading ignoring duplicate file basename # test filter loading ignoring duplicate file basename
ansible-playbook file_collision/play.yml "$@" ansible-playbook file_collision/play.yml "$@"
ANSIBLE_COLLECTIONS_PATH=$PWD/collections ansible-playbook unsafe_plugin_name.yml "$@"

@ -0,0 +1,9 @@
- hosts: localhost
gather_facts: false
tasks:
- action: !unsafe n.c.a
register: r
- assert:
that:
- r.nca_executed

@ -209,7 +209,7 @@
assert: assert:
that: that:
- _check_mode_test2 is skipped - _check_mode_test2 is skipped
- '_check_mode_test2.msg == "{{ remote_tmp_dir_test | expanduser }}/afile2.txt exists, matching creates option"' - '_check_mode_test2.msg == remote_tmp_dir_test | expanduser ~ "/afile2.txt exists, matching creates option"'
- name: Remove afile2.txt - name: Remove afile2.txt
file: file:
@ -231,7 +231,7 @@
assert: assert:
that: that:
- _check_mode_test3 is skipped - _check_mode_test3 is skipped
- '_check_mode_test3.msg == "{{ remote_tmp_dir_test | expanduser }}/afile2.txt does not exist, matching removes option"' - '_check_mode_test3.msg == remote_tmp_dir_test | expanduser ~ "/afile2.txt does not exist, matching removes option"'
# executable # executable

@ -33,7 +33,7 @@
- 'slurp_existing.encoding == "base64"' - 'slurp_existing.encoding == "base64"'
- 'slurp_existing is not changed' - 'slurp_existing is not changed'
- 'slurp_existing is not failed' - 'slurp_existing is not failed'
- '"{{ slurp_existing.content | b64decode }}" == "We are at the café"' - 'slurp_existing.content | b64decode == "We are at the café"'
- name: Create a binary file to test with - name: Create a binary file to test with
copy: copy:

@ -357,7 +357,7 @@
- assert: - assert:
that: that:
- "\"foo t'e~m\\plated\" in unusual_results.stdout_lines" - "\"foo t'e~m\\plated\" in unusual_results.stdout_lines"
- "{{unusual_results.stdout_lines| length}} == 1" - "unusual_results.stdout_lines| length == 1"
- name: check that the unusual filename can be checked for changes - name: check that the unusual filename can be checked for changes
template: template:

@ -66,7 +66,7 @@
- zip_success.changed - zip_success.changed
# Verify that file list is generated # Verify that file list is generated
- "'files' in zip_success" - "'files' in zip_success"
- "{{zip_success['files']| length}} == 3" - "zip_success['files']| length == 3"
- "'foo-unarchive.txt' in zip_success['files']" - "'foo-unarchive.txt' in zip_success['files']"
- "'foo-unarchive-777.txt' in zip_success['files']" - "'foo-unarchive-777.txt' in zip_success['files']"
- "'FOO-UNAR.TXT' in zip_success['files']" - "'FOO-UNAR.TXT' in zip_success['files']"

@ -47,7 +47,7 @@
- "unarchive06_stat.stat.mode == '0600'" - "unarchive06_stat.stat.mode == '0600'"
# Verify that file list is generated # Verify that file list is generated
- "'files' in unarchive06" - "'files' in unarchive06"
- "{{unarchive06['files']| length}} == 1" - "unarchive06['files']| length == 1"
- "'foo-unarchive.txt' in unarchive06['files']" - "'foo-unarchive.txt' in unarchive06['files']"
- name: remove our tar.gz unarchive destination - name: remove our tar.gz unarchive destination
@ -97,7 +97,7 @@
- "unarchive07.changed == false" - "unarchive07.changed == false"
# Verify that file list is generated # Verify that file list is generated
- "'files' in unarchive07" - "'files' in unarchive07"
- "{{unarchive07['files']| length}} == 1" - "unarchive07['files']| length == 1"
- "'foo-unarchive.txt' in unarchive07['files']" - "'foo-unarchive.txt' in unarchive07['files']"
- name: remove our tar.gz unarchive destination - name: remove our tar.gz unarchive destination
@ -131,7 +131,7 @@
- "unarchive08_stat.stat.mode == '0601'" - "unarchive08_stat.stat.mode == '0601'"
# Verify that file list is generated # Verify that file list is generated
- "'files' in unarchive08" - "'files' in unarchive08"
- "{{unarchive08['files']| length}} == 3" - "unarchive08['files']| length == 3"
- "'foo-unarchive.txt' in unarchive08['files']" - "'foo-unarchive.txt' in unarchive08['files']"
- "'foo-unarchive-777.txt' in unarchive08['files']" - "'foo-unarchive-777.txt' in unarchive08['files']"
- "'FOO-UNAR.TXT' in unarchive08['files']" - "'FOO-UNAR.TXT' in unarchive08['files']"
@ -163,7 +163,7 @@
- "unarchive08_stat.stat.mode == '0601'" - "unarchive08_stat.stat.mode == '0601'"
# Verify that file list is generated # Verify that file list is generated
- "'files' in unarchive08" - "'files' in unarchive08"
- "{{unarchive08['files']| length}} == 3" - "unarchive08['files']| length == 3"
- "'foo-unarchive.txt' in unarchive08['files']" - "'foo-unarchive.txt' in unarchive08['files']"
- "'foo-unarchive-777.txt' in unarchive08['files']" - "'foo-unarchive-777.txt' in unarchive08['files']"
- "'FOO-UNAR.TXT' in unarchive08['files']" - "'FOO-UNAR.TXT' in unarchive08['files']"

@ -40,7 +40,7 @@
- unarchive10 is changed - unarchive10 is changed
# Verify that file list is generated # Verify that file list is generated
- "'files' in unarchive10" - "'files' in unarchive10"
- "{{unarchive10['files']| length}} == 1" - "unarchive10['files']| length == 1"
- "'foo-unarchive.txt' in unarchive10['files']" - "'foo-unarchive.txt' in unarchive10['files']"
- archive_path.stat.exists - archive_path.stat.exists

@ -17,7 +17,7 @@
- "unarchive03.changed == true" - "unarchive03.changed == true"
# Verify that file list is generated # Verify that file list is generated
- "'files' in unarchive03" - "'files' in unarchive03"
- "{{unarchive03['files']| length}} == 3" - "unarchive03['files']| length == 3"
- "'foo-unarchive.txt' in unarchive03['files']" - "'foo-unarchive.txt' in unarchive03['files']"
- "'foo-unarchive-777.txt' in unarchive03['files']" - "'foo-unarchive-777.txt' in unarchive03['files']"
- "'FOO-UNAR.TXT' in unarchive03['files']" - "'FOO-UNAR.TXT' in unarchive03['files']"

@ -40,7 +40,7 @@
assert: assert:
that: that:
- waitfor is successful - waitfor is successful
- waitfor.path == "{{ remote_tmp_dir | expanduser }}/wait_for_file" - waitfor.path == remote_tmp_dir | expanduser ~ "/wait_for_file"
- waitfor.elapsed >= 2 - waitfor.elapsed >= 2
- waitfor.elapsed <= 15 - waitfor.elapsed <= 15
@ -58,7 +58,7 @@
assert: assert:
that: that:
- waitfor is successful - waitfor is successful
- waitfor.path == "{{ remote_tmp_dir | expanduser }}/wait_for_file" - waitfor.path == remote_tmp_dir | expanduser ~ "/wait_for_file"
- waitfor.elapsed >= 2 - waitfor.elapsed >= 2
- waitfor.elapsed <= 15 - waitfor.elapsed <= 15
@ -163,7 +163,7 @@
that: that:
- waitfor is successful - waitfor is successful
- waitfor is not changed - waitfor is not changed
- "waitfor.port == {{ http_port }}" - "waitfor.port == http_port"
- name: install psutil using pip (non-Linux) - name: install psutil using pip (non-Linux)
pip: pip:
@ -191,7 +191,7 @@
that: that:
- waitfor is successful - waitfor is successful
- waitfor is not changed - waitfor is not changed
- "waitfor.port == {{ http_port }}" - "waitfor.port == http_port"
- name: test wait_for with delay - name: test wait_for with delay
wait_for: wait_for:

@ -541,6 +541,12 @@ def main():
"ignore", "ignore",
"AnsibleCollectionFinder has already been configured") "AnsibleCollectionFinder has already been configured")
# ansible.utils.unsafe_proxy attempts patching sys.intern generating a warning if it was already patched
warnings.filterwarnings(
"ignore",
"skipped sys.intern patch; appears to have already been patched"
)
try: try:
yield yield
finally: finally:

@ -25,7 +25,6 @@ from ansible.parsing import vault
from ansible.parsing.yaml import dumper, objects from ansible.parsing.yaml import dumper, objects
from ansible.parsing.yaml.loader import AnsibleLoader from ansible.parsing.yaml.loader import AnsibleLoader
from ansible.template import AnsibleUndefined from ansible.template import AnsibleUndefined
from ansible.utils.unsafe_proxy import AnsibleUnsafeText, AnsibleUnsafeBytes
from units.mock.yaml_helper import YamlTestUtils from units.mock.yaml_helper import YamlTestUtils
from units.mock.vault_helper import TextVaultSecret from units.mock.vault_helper import TextVaultSecret
@ -65,8 +64,7 @@ class TestAnsibleDumper(unittest.TestCase, YamlTestUtils):
def test_bytes(self): def test_bytes(self):
b_text = u'tréma'.encode('utf-8') b_text = u'tréma'.encode('utf-8')
unsafe_object = AnsibleUnsafeBytes(b_text) yaml_out = self._dump_string(b_text, dumper=self.dumper)
yaml_out = self._dump_string(unsafe_object, dumper=self.dumper)
stream = self._build_stream(yaml_out) stream = self._build_stream(yaml_out)
loader = self._loader(stream) loader = self._loader(stream)
@ -79,8 +77,7 @@ class TestAnsibleDumper(unittest.TestCase, YamlTestUtils):
def test_unicode(self): def test_unicode(self):
u_text = u'nöel' u_text = u'nöel'
unsafe_object = AnsibleUnsafeText(u_text) yaml_out = self._dump_string(u_text, dumper=self.dumper)
yaml_out = self._dump_string(unsafe_object, dumper=self.dumper)
stream = self._build_stream(yaml_out) stream = self._build_stream(yaml_out)
loader = self._loader(stream) loader = self._loader(stream)

@ -4,6 +4,9 @@
from __future__ import annotations from __future__ import annotations
import pathlib
import sys
from ansible.utils.unsafe_proxy import AnsibleUnsafe, AnsibleUnsafeBytes, AnsibleUnsafeText, wrap_var from ansible.utils.unsafe_proxy import AnsibleUnsafe, AnsibleUnsafeBytes, AnsibleUnsafeText, wrap_var
from ansible.module_utils.common.text.converters import to_text, to_bytes from ansible.module_utils.common.text.converters import to_text, to_bytes
@ -114,3 +117,10 @@ def test_to_text_unsafe():
def test_to_bytes_unsafe(): def test_to_bytes_unsafe():
assert isinstance(to_bytes(AnsibleUnsafeText(u'foo')), AnsibleUnsafeBytes) assert isinstance(to_bytes(AnsibleUnsafeText(u'foo')), AnsibleUnsafeBytes)
assert to_bytes(AnsibleUnsafeText(u'foo')) == AnsibleUnsafeBytes(b'foo') assert to_bytes(AnsibleUnsafeText(u'foo')) == AnsibleUnsafeBytes(b'foo')
def test_unsafe_with_sys_intern():
# Specifically this is actually about sys.intern, test of pathlib
# because that is a specific affected use
assert sys.intern(AnsibleUnsafeText('foo')) == 'foo'
assert pathlib.Path(AnsibleUnsafeText('/tmp')) == pathlib.Path('/tmp')

Loading…
Cancel
Save