Ensure that unsafe is more difficult to lose [stable-2.16] (#82293)

* Ensure that unsafe is more difficult to lose

* Add Task.untemplated_args, and switch assert over to use it
* Don't use re in first_found, switch to using native string methods
* If nested templating results in unsafe, just error, don't continue

* ci_complete
pull/82297/head
Matt Martz 1 year ago committed by GitHub
parent f302b2f592
commit 270b39f6ff
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -0,0 +1,6 @@
security_fixes:
- templating - Address issues where internal templating can cause unsafe
variables to lose their unsafe designation (CVE-2023-5764)
breaking_changes:
- assert - Nested templating may result in an inability for the conditional
to be evaluated. See the porting guide for more information.

@ -30,7 +30,7 @@ def _preprocess_unsafe_encode(value):
Used in ``AnsibleJSONEncoder.iterencode``
"""
if _is_unsafe(value):
value = {'__ansible_unsafe': to_text(value, errors='surrogate_or_strict', nonstring='strict')}
value = {'__ansible_unsafe': to_text(value._strip_unsafe(), errors='surrogate_or_strict', nonstring='strict')}
elif is_sequence(value):
value = [_preprocess_unsafe_encode(v) for v in value]
elif isinstance(value, Mapping):
@ -63,7 +63,7 @@ class AnsibleJSONEncoder(json.JSONEncoder):
value = {'__ansible_vault': to_text(o._ciphertext, errors='surrogate_or_strict', nonstring='strict')}
elif getattr(o, '__UNSAFE__', False):
# unsafe object, this will never be triggered, see ``AnsibleJSONEncoder.iterencode``
value = {'__ansible_unsafe': to_text(o, errors='surrogate_or_strict', nonstring='strict')}
value = {'__ansible_unsafe': to_text(o._strip_unsafe(), errors='surrogate_or_strict', nonstring='strict')}
elif isinstance(o, Mapping):
# hostvars and other objects
value = dict(o)

@ -24,7 +24,7 @@ import yaml
from ansible.module_utils.six import text_type, binary_type
from ansible.module_utils.common.yaml import SafeDumper
from ansible.parsing.yaml.objects import AnsibleUnicode, AnsibleSequence, AnsibleMapping, AnsibleVaultEncryptedUnicode
from ansible.utils.unsafe_proxy import AnsibleUnsafeText, AnsibleUnsafeBytes, NativeJinjaUnsafeText, NativeJinjaText
from ansible.utils.unsafe_proxy import AnsibleUnsafeText, AnsibleUnsafeBytes, NativeJinjaUnsafeText, NativeJinjaText, _is_unsafe
from ansible.template import AnsibleUndefined
from ansible.vars.hostvars import HostVars, HostVarsVars
from ansible.vars.manager import VarsWithSources
@ -47,10 +47,14 @@ def represent_vault_encrypted_unicode(self, data):
def represent_unicode(self, data):
if _is_unsafe(data):
data = data._strip_unsafe()
return yaml.representer.SafeRepresenter.represent_str(self, text_type(data))
def represent_binary(self, data):
if _is_unsafe(data):
data = data._strip_unsafe()
return yaml.representer.SafeRepresenter.represent_binary(self, binary_type(data))

@ -21,7 +21,7 @@ __metaclass__ = type
import typing as t
from ansible.errors import AnsibleError, AnsibleUndefinedVariable
from ansible.errors import AnsibleError, AnsibleUndefinedVariable, AnsibleTemplateError
from ansible.module_utils.common.text.converters import to_native
from ansible.playbook.attribute import FieldAttribute
from ansible.template import Templar
@ -102,14 +102,14 @@ class Conditional:
return False
# If the result of the first-pass template render (to resolve inline templates) is marked unsafe,
# explicitly disable lookups on the final pass to prevent evaluation of untrusted content in the
# constructed template.
disable_lookups = hasattr(conditional, '__UNSAFE__')
# explicitly fail since the next templating operation would never evaluate
if hasattr(conditional, '__UNSAFE__'):
raise AnsibleTemplateError('Conditional is marked as unsafe, and cannot be evaluated.')
# NOTE The spaces around True and False are intentional to short-circuit literal_eval for
# jinja2_native=False and avoid its expensive calls.
return templar.template(
"{%% if %s %%} True {%% else %%} False {%% endif %%}" % conditional,
disable_lookups=disable_lookups).strip() == "True"
).strip() == "True"
except AnsibleUndefinedVariable as e:
raise AnsibleUndefinedVariable("error while evaluating conditional (%s): %s" % (original, e))

@ -289,6 +289,30 @@ class Task(Base, Conditional, Taggable, CollectionSearch, Notifiable, Delegatabl
super(Task, self).post_validate(templar)
def _post_validate_args(self, attr, value, templar):
# smuggle an untemplated copy of the task args for actions that need more control over the templating of their
# input (eg, debug's var/msg, assert's "that" conditional expressions)
self.untemplated_args = value
# now recursively template the args dict
args = templar.template(value)
# FIXME: could we just nuke this entirely and/or wrap it up in ModuleArgsParser or something?
if '_variable_params' in args:
variable_params = args.pop('_variable_params')
if isinstance(variable_params, dict):
if C.INJECT_FACTS_AS_VARS:
display.warning("Using a variable for a task's 'args' is unsafe in some situations "
"(see https://docs.ansible.com/ansible/devel/reference_appendices/faq.html#argsplat-unsafe)")
variable_params.update(args)
args = variable_params
else:
# if we didn't get a dict, it means there's garbage remaining after k=v parsing, just give up
# see https://github.com/ansible/ansible/issues/79862
raise AnsibleError(f"invalid or malformed argument: '{variable_params}'")
return args
def _post_validate_loop(self, attr, value, templar):
'''
Override post validation for the loop field, which is templated

@ -64,8 +64,29 @@ class ActionModule(ActionBase):
quiet = boolean(self._task.args.get('quiet', False), strict=False)
# make sure the 'that' items are a list
# directly access 'that' via untemplated args from the task so we can intelligently trust embedded
# templates and preserve the original inputs/locations for better messaging on assert failures and
# errors.
# FIXME: even in devel, things like `that: item` don't always work properly (truthy string value
# is not really an embedded expression)
# we could fix that by doing direct var lookups on the inputs
# FIXME: some form of this code should probably be shared between debug, assert, and
# Task.post_validate, since they
# have a lot of overlapping needs
try:
thats = self._task.untemplated_args['that']
except KeyError:
# in the case of "we got our entire args dict from a template", we can just consult the
# post-templated dict (the damage has likely already been done for embedded templates anyway)
thats = self._task.args['that']
# FIXME: this is a case where we only want to resolve indirections, NOT recurse containers
# (and even then, the leaf-most expression being wrapped is at least suboptimal
# (since its expression will be "eaten").
if isinstance(thats, str):
thats = self._templar.template(thats)
# make sure the 'that' items are a list
if not isinstance(thats, list):
thats = [thats]

@ -38,7 +38,7 @@ from ansible.parsing.yaml.objects import AnsibleUnicode
from ansible.plugins import AnsiblePlugin
from ansible.utils.color import stringc
from ansible.utils.display import Display
from ansible.utils.unsafe_proxy import AnsibleUnsafeText, NativeJinjaUnsafeText
from ansible.utils.unsafe_proxy import AnsibleUnsafeText, NativeJinjaUnsafeText, _is_unsafe
from ansible.vars.clean import strip_internal_keys, module_response_deepcopy
import yaml
@ -113,6 +113,8 @@ def _munge_data_for_lossy_yaml(scalar):
def _pretty_represent_str(self, data):
"""Uses block style for multi-line strings"""
if _is_unsafe(data):
data = data._strip_unsafe()
data = text_type(data)
if _should_use_block(data):
style = '|'

@ -37,6 +37,7 @@ from ansible.utils.display import Display
from ansible.utils.encrypt import do_encrypt, PASSLIB_AVAILABLE
from ansible.utils.hashing import md5s, checksum_s
from ansible.utils.unicode import unicode_wrap
from ansible.utils.unsafe_proxy import _is_unsafe
from ansible.utils.vars import merge_hash
display = Display()
@ -215,6 +216,8 @@ def from_yaml(data):
# The ``text_type`` call here strips any custom
# string wrapper class, so that CSafeLoader can
# read the data
if _is_unsafe(data):
data = data._strip_unsafe()
return yaml_load(text_type(to_text(data, errors='surrogate_or_strict')))
return data
@ -224,6 +227,8 @@ def from_yaml_all(data):
# The ``text_type`` call here strips any custom
# string wrapper class, so that CSafeLoader can
# read the data
if _is_unsafe(data):
data = data._strip_unsafe()
return yaml_load_all(text_type(to_text(data, errors='surrogate_or_strict')))
return data

@ -139,7 +139,6 @@ RETURN = """
elements: path
"""
import os
import re
from collections.abc import Mapping, Sequence
@ -150,10 +149,22 @@ from ansible.module_utils.six import string_types
from ansible.plugins.lookup import LookupBase
def _splitter(value, chars):
chars = set(chars)
v = ''
for c in value:
if c in chars:
yield v
v = ''
continue
v += c
yield v
def _split_on(terms, spliters=','):
termlist = []
if isinstance(terms, string_types):
termlist = re.split(r'[%s]' % ''.join(map(re.escape, spliters)), terms)
termlist = list(_splitter(terms, spliters))
else:
# added since options will already listify
for t in terms:

@ -31,7 +31,7 @@ from contextlib import contextmanager
from numbers import Number
from traceback import format_exc
from jinja2.exceptions import TemplateSyntaxError, UndefinedError
from jinja2.exceptions import TemplateSyntaxError, UndefinedError, SecurityError
from jinja2.loaders import FileSystemLoader
from jinja2.nativetypes import NativeEnvironment
from jinja2.runtime import Context, StrictUndefined
@ -55,7 +55,7 @@ from ansible.template.vars import AnsibleJ2Vars
from ansible.utils.display import Display
from ansible.utils.listify import listify_lookup_plugin_terms
from ansible.utils.native_jinja import NativeJinjaText
from ansible.utils.unsafe_proxy import to_unsafe_text, wrap_var
from ansible.utils.unsafe_proxy import to_unsafe_text, wrap_var, AnsibleUnsafeText, AnsibleUnsafeBytes, NativeJinjaUnsafeText
display = Display()
@ -365,10 +365,21 @@ class AnsibleContext(Context):
flag is checked post-templating, and (when set) will result in the
final templated result being wrapped in AnsibleUnsafe.
'''
_disallowed_callables = frozenset({
AnsibleUnsafeText._strip_unsafe.__qualname__,
AnsibleUnsafeBytes._strip_unsafe.__qualname__,
NativeJinjaUnsafeText._strip_unsafe.__qualname__,
})
def __init__(self, *args, **kwargs):
super(AnsibleContext, self).__init__(*args, **kwargs)
self.unsafe = False
def call(self, obj, *args, **kwargs):
if getattr(obj, '__qualname__', None) in self._disallowed_callables or obj in self._disallowed_callables:
raise SecurityError(f"{obj!r} is not safely callable")
return super().call(obj, *args, **kwargs)
def _is_unsafe(self, val):
'''
Our helper function, which will also recursively check dict and

@ -69,15 +69,264 @@ class AnsibleUnsafe(object):
class AnsibleUnsafeBytes(binary_type, AnsibleUnsafe):
def decode(self, *args, **kwargs):
"""Wrapper method to ensure type conversions maintain unsafe context"""
return AnsibleUnsafeText(super(AnsibleUnsafeBytes, self).decode(*args, **kwargs))
def _strip_unsafe(self):
return super().__bytes__()
def __str__(self, /): # pylint: disable=invalid-str-returned
return self.encode()
def __bytes__(self, /): # pylint: disable=invalid-bytes-returned
return self
def __repr__(self, /): # pylint: disable=invalid-repr-returned
return AnsibleUnsafeText(super().__repr__())
def __format__(self, format_spec, /): # pylint: disable=invalid-format-returned
return self.__class__(super().__format__(format_spec))
def __getitem__(self, key, /):
return self.__class__(super().__getitem__(key))
def __iter__(self, /):
cls = self.__class__
return (cls(c) for c in super().__iter__())
def __reversed__(self, /):
return self[::-1]
def __add__(self, value, /):
return self.__class__(super().__add__(value))
def __radd__(self, value, /):
return self.__class__(value.__add__(self))
def __mul__(self, value, /):
return self.__class__(super().__mul__(value))
__rmul__ = __mul__
def __mod__(self, value, /):
return self.__class__(super().__mod__(value))
def __rmod__(self, value, /):
return self.__class__(super().__rmod__(value))
def capitalize(self, /):
return self.__class__(super().capitalize())
def casefold(self, /):
return self.__class__(super().casefold())
def center(self, width, fillchar=b' ', /):
return self.__class__(super().center(width, fillchar))
def decode(self, /, encoding='utf-8', errors='strict'):
return AnsibleUnsafeText(super().decode(encoding=encoding, errors=errors))
def removeprefix(self, prefix, /):
return self.__class__(super().removeprefix(prefix))
def removesuffix(self, suffix, /):
return self.__class__(super().removesuffix(suffix))
def expandtabs(self, /, tabsize=8):
return self.__class__(super().expandtabs(tabsize))
def format(self, /, *args, **kwargs):
return self.__class__(super().format(*args, **kwargs))
def format_map(self, mapping, /):
return self.__class__(super().format_map(mapping))
def join(self, iterable_of_bytes, /):
return self.__class__(super().join(iterable_of_bytes))
def ljust(self, width, fillchar=b' ', /):
return self.__class__(super().ljust(width, fillchar))
def lower(self, /):
return self.__class__(super().lower())
def lstrip(self, bytes=None, /):
return self.__class__(super().lstrip(bytes))
def partition(self, sep, /):
cls = self.__class__
return tuple(cls(e) for e in super().partition(sep))
def replace(self, old, new, count=-1, /):
return self.__class__(super().replace(old, new, count))
def rjust(self, width, fillchar=b' ', /):
return self.__class__(super().rjust(width, fillchar))
def rpartition(self, sep, /):
cls = self.__class__
return tuple(cls(e) for e in super().rpartition(sep))
def rstrip(self, bytes=None, /):
return self.__class__(super().rstrip(bytes))
def split(self, /, sep=None, maxsplit=-1):
cls = self.__class__
return [cls(e) for e in super().split(sep=sep, maxsplit=maxsplit)]
def rsplit(self, /, sep=None, maxsplit=-1):
cls = self.__class__
return [cls(e) for e in super().rsplit(sep=sep, maxsplit=maxsplit)]
def splitlines(self, /, keepends=False):
cls = self.__class__
return [cls(e) for e in super().splitlines(keepends=keepends)]
def strip(self, bytes=None, /):
return self.__class__(super().strip(bytes))
def swapcase(self, /):
return self.__class__(super().swapcase())
def title(self, /):
return self.__class__(super().title())
def translate(self, table, /, delete=b''):
return self.__class__(super().translate(table, delete=delete))
def upper(self, /):
return self.__class__(super().upper())
def zfill(self, width, /):
return self.__class__(super().zfill(width))
class AnsibleUnsafeText(text_type, AnsibleUnsafe):
def encode(self, *args, **kwargs):
"""Wrapper method to ensure type conversions maintain unsafe context"""
return AnsibleUnsafeBytes(super(AnsibleUnsafeText, self).encode(*args, **kwargs))
# def __getattribute__(self, name):
# print(f'attr: {name}')
# return object.__getattribute__(self, name)
def _strip_unsafe(self, /):
return super().__str__()
def __str__(self, /): # pylint: disable=invalid-str-returned
return self
def __repr__(self, /): # pylint: disable=invalid-repr-returned
return self.__class__(super().__repr__())
def __format__(self, format_spec, /): # pylint: disable=invalid-format-returned
return self.__class__(super().__format__(format_spec))
def __getitem__(self, key, /):
return self.__class__(super().__getitem__(key))
def __iter__(self, /):
cls = self.__class__
return (cls(c) for c in super().__iter__())
def __reversed__(self, /):
return self[::-1]
def __add__(self, value, /):
return self.__class__(super().__add__(value))
def __radd__(self, value, /):
return self.__class__(value.__add__(self))
def __mul__(self, value, /):
return self.__class__(super().__mul__(value))
__rmul__ = __mul__
def __mod__(self, value, /):
return self.__class__(super().__mod__(value))
def __rmod__(self, value, /):
return self.__class__(super().__rmod__(value))
def capitalize(self, /):
return self.__class__(super().capitalize())
def casefold(self, /):
return self.__class__(super().casefold())
def center(self, width, fillchar=' ', /):
return self.__class__(super().center(width, fillchar))
def encode(self, /, encoding='utf-8', errors='strict'):
return AnsibleUnsafeBytes(super().encode(encoding=encoding, errors=errors))
def removeprefix(self, prefix, /):
return self.__class__(super().removeprefix(prefix))
def removesuffix(self, suffix, /):
return self.__class__(super().removesuffix(suffix))
def expandtabs(self, /, tabsize=8):
return self.__class__(super().expandtabs(tabsize))
def format(self, /, *args, **kwargs):
return self.__class__(super().format(*args, **kwargs))
def format_map(self, mapping, /):
return self.__class__(super().format_map(mapping))
def join(self, iterable, /):
return self.__class__(super().join(iterable))
def ljust(self, width, fillchar=' ', /):
return self.__class__(super().ljust(width, fillchar))
def lower(self, /):
return self.__class__(super().lower())
def lstrip(self, chars=None, /):
return self.__class__(super().lstrip(chars))
def partition(self, sep, /):
cls = self.__class__
return tuple(cls(e) for e in super().partition(sep))
def replace(self, old, new, count=-1, /):
return self.__class__(super().replace(old, new, count))
def rjust(self, width, fillchar=' ', /):
return self.__class__(super().rjust(width, fillchar))
def rpartition(self, sep, /):
cls = self.__class__
return tuple(cls(e) for e in super().rpartition(sep))
def rstrip(self, chars=None, /):
return self.__class__(super().rstrip(chars))
def split(self, /, sep=None, maxsplit=-1):
cls = self.__class__
return [cls(e) for e in super().split(sep=sep, maxsplit=maxsplit)]
def rsplit(self, /, sep=None, maxsplit=-1):
cls = self.__class__
return [cls(e) for e in super().rsplit(sep=sep, maxsplit=maxsplit)]
def splitlines(self, /, keepends=False):
cls = self.__class__
return [cls(e) for e in super().splitlines(keepends=keepends)]
def strip(self, chars=None, /):
return self.__class__(super().strip(chars))
def swapcase(self, /):
return self.__class__(super().swapcase())
def title(self, /):
return self.__class__(super().title())
def translate(self, table, /):
return self.__class__(super().translate(table))
def upper(self, /):
return self.__class__(super().upper())
def zfill(self, width, /):
return self.__class__(super().zfill(width))
class NativeJinjaUnsafeText(NativeJinjaText, AnsibleUnsafeText):
@ -126,3 +375,7 @@ def to_unsafe_bytes(*args, **kwargs):
def to_unsafe_text(*args, **kwargs):
return wrap_var(to_text(*args, **kwargs))
def _is_unsafe(obj):
return getattr(obj, '__UNSAFE__', False) is True

@ -13,22 +13,22 @@
in command.stdout_lines
- >-
"Installing 'namespace_1.collection_1:1.0.0' to
'{{ install_path }}/namespace_1/collection_1'"
'" ~ install_path ~ "/namespace_1/collection_1'"
in command.stdout_lines
- >-
'Created collection for namespace_1.collection_1:1.0.0 at
{{ install_path }}/namespace_1/collection_1'
' ~ install_path ~ '/namespace_1/collection_1'
in command.stdout_lines
- >-
'namespace_1.collection_1:1.0.0 was installed successfully'
in command.stdout_lines
- >-
"Installing 'namespace_2.collection_2:1.0.0' to
'{{ install_path }}/namespace_2/collection_2'"
'" ~ install_path ~ "/namespace_2/collection_2'"
in command.stdout_lines
- >-
'Created collection for namespace_2.collection_2:1.0.0 at
{{ install_path }}/namespace_2/collection_2'
' ~ install_path ~ '/namespace_2/collection_2'
in command.stdout_lines
- >-
'namespace_2.collection_2:1.0.0 was installed successfully'
@ -58,22 +58,22 @@
in command.stdout_lines
- >-
"Installing 'namespace_1.collection_1:1.0.0' to
'{{ install_path }}/namespace_1/collection_1'"
'" ~ install_path ~ "/namespace_1/collection_1'"
in command.stdout_lines
- >-
'Created collection for namespace_1.collection_1:1.0.0 at
{{ install_path }}/namespace_1/collection_1'
' ~ install_path ~ '/namespace_1/collection_1'
in command.stdout_lines
- >-
'namespace_1.collection_1:1.0.0 was installed successfully'
in command.stdout_lines
- >-
"Installing 'namespace_2.collection_2:1.0.0' to
'{{ install_path }}/namespace_2/collection_2'"
'" ~ install_path ~ "/namespace_2/collection_2'"
in command.stdout_lines
- >-
'Created collection for namespace_2.collection_2:1.0.0 at
{{ install_path }}/namespace_2/collection_2'
' ~ install_path ~ '/namespace_2/collection_2'
in command.stdout_lines
- >-
'namespace_2.collection_2:1.0.0 was installed successfully'

@ -2,7 +2,7 @@
- name: Assert that a embedded vault of a string with no newline works
assert:
that:
- '"{{ vault_encrypted_one_line_var }}" == "Setec Astronomy"'
- 'vault_encrypted_one_line_var == "Setec Astronomy"'
- name: Assert that a multi line embedded vault works, including new line
assert:

@ -2,7 +2,7 @@
- name: Assert that a vault encrypted file with embedded vault of a string with no newline works
assert:
that:
- '"{{ vault_file_encrypted_with_encrypted_one_line_var }}" == "Setec Astronomy"'
- 'vault_file_encrypted_with_encrypted_one_line_var == "Setec Astronomy"'
- name: Assert that a vault encrypted file with multi line embedded vault works, including new line
assert:

@ -50,7 +50,7 @@
that:
- 'result.changed'
- 'result.state == "present"'
- 'result.repo == "{{test_ppa_name}}"'
- 'result.repo == test_ppa_name'
- name: 'examine apt cache mtime'
stat: path='/var/cache/apt/pkgcache.bin'
@ -81,7 +81,7 @@
that:
- 'result.changed'
- 'result.state == "present"'
- 'result.repo == "{{test_ppa_name}}"'
- 'result.repo == test_ppa_name'
- name: 'examine apt cache mtime'
stat: path='/var/cache/apt/pkgcache.bin'
@ -112,7 +112,7 @@
that:
- 'result.changed'
- 'result.state == "present"'
- 'result.repo == "{{test_ppa_name}}"'
- 'result.repo == test_ppa_name'
- name: 'examine apt cache mtime'
stat: path='/var/cache/apt/pkgcache.bin'
@ -151,7 +151,7 @@
that:
- 'result.changed'
- 'result.state == "present"'
- 'result.repo == "{{test_ppa_spec}}"'
- 'result.repo == test_ppa_spec'
- '"sources_added" in result'
- 'result.sources_added | length == 1'
- '"git" in result.sources_added[0]'
@ -176,7 +176,7 @@
that:
- 'result.changed'
- 'result.state == "absent"'
- 'result.repo == "{{test_ppa_spec}}"'
- 'result.repo == test_ppa_spec'
- '"sources_added" in result'
- 'result.sources_added | length == 0'
- '"sources_removed" in result'
@ -207,7 +207,7 @@
that:
- 'result.changed'
- 'result.state == "present"'
- 'result.repo == "{{test_ppa_spec}}"'
- 'result.repo == test_ppa_spec'
- name: 'examine source file'
stat: path='/etc/apt/sources.list.d/{{test_ppa_filename}}.list'

@ -0,0 +1,4 @@
+ ansible-playbook -i localhost, -c local nested_tmpl.yml
++ set +x
[WARNING]: conditional statements should not include jinja2 templating
delimiters such as {{ }} or {% %}. Found: "{{ foo }}" == "bar"

@ -0,0 +1,12 @@
PLAY [localhost] ***************************************************************
TASK [assert] ******************************************************************
ok: [localhost] => {
"changed": false,
"msg": "All assertions passed"
}
PLAY RECAP *********************************************************************
localhost : ok=1 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0

@ -0,0 +1,9 @@
- hosts: localhost
gather_facts: False
tasks:
- assert:
that:
- '"{{ foo }}" == "bar"'
- foo == "bar"
vars:
foo: bar

@ -5,12 +5,12 @@
item_A: yes
tasks:
- assert:
that: "{{ item }} is defined"
that: "item is defined"
quiet: True
with_items:
- item_A
- assert:
that: "{{ item }} is defined"
that: "item is defined"
quiet: False
with_items:
- item_A

@ -45,7 +45,7 @@ cleanup() {
fi
}
BASEFILE=assert_quiet.out
BASEFILE=assert.out
ORIGFILE="${BASEFILE}"
OUTFILE="${BASEFILE}.new"
@ -69,3 +69,4 @@ export ANSIBLE_NOCOLOR=1
export ANSIBLE_RETRY_FILES_ENABLED=0
run_test quiet
run_test nested_tmpl

@ -320,7 +320,7 @@
assert:
that:
- shell_result0 is changed
- shell_result0.cmd == '{{ remote_tmp_dir_test }}/test.sh'
- shell_result0.cmd == remote_tmp_dir_test ~ '/test.sh'
- shell_result0.rc == 0
- shell_result0.stderr == ''
- shell_result0.stdout == 'win'

@ -1250,7 +1250,7 @@
assert:
that:
- "copy_result6.changed"
- "copy_result6.dest == '{{remote_dir_expanded}}/multiline.txt'"
- "copy_result6.dest == remote_dir_expanded ~ '/multiline.txt'"
- "copy_result6.checksum == '9cd0697c6a9ff6689f0afb9136fa62e0b3fee903'"
# test overwriting a file as an unprivileged user (pull request #8624)
@ -2153,26 +2153,26 @@
assert:
that:
- testcase5 is changed
- "stat_new_dir_with_chown.stat.uid == {{ ansible_copy_test_user.uid }}"
- "stat_new_dir_with_chown.stat.gid == {{ ansible_copy_test_group.gid }}"
- "stat_new_dir_with_chown.stat.pw_name == '{{ ansible_copy_test_user_name }}'"
- "stat_new_dir_with_chown.stat.gr_name == '{{ ansible_copy_test_user_name }}'"
- "stat_new_dir_with_chown_file1.stat.uid == {{ ansible_copy_test_user.uid }}"
- "stat_new_dir_with_chown_file1.stat.gid == {{ ansible_copy_test_group.gid }}"
- "stat_new_dir_with_chown_file1.stat.pw_name == '{{ ansible_copy_test_user_name }}'"
- "stat_new_dir_with_chown_file1.stat.gr_name == '{{ ansible_copy_test_user_name }}'"
- "stat_new_dir_with_chown_subdir.stat.uid == {{ ansible_copy_test_user.uid }}"
- "stat_new_dir_with_chown_subdir.stat.gid == {{ ansible_copy_test_group.gid }}"
- "stat_new_dir_with_chown_subdir.stat.pw_name == '{{ ansible_copy_test_user_name }}'"
- "stat_new_dir_with_chown_subdir.stat.gr_name == '{{ ansible_copy_test_user_name }}'"
- "stat_new_dir_with_chown_subdir_file12.stat.uid == {{ ansible_copy_test_user.uid }}"
- "stat_new_dir_with_chown_subdir_file12.stat.gid == {{ ansible_copy_test_group.gid }}"
- "stat_new_dir_with_chown_subdir_file12.stat.pw_name == '{{ ansible_copy_test_user_name }}'"
- "stat_new_dir_with_chown_subdir_file12.stat.gr_name == '{{ ansible_copy_test_user_name }}'"
- "stat_new_dir_with_chown_link_file12.stat.uid == {{ ansible_copy_test_user.uid }}"
- "stat_new_dir_with_chown_link_file12.stat.gid == {{ ansible_copy_test_group.gid }}"
- "stat_new_dir_with_chown_link_file12.stat.pw_name == '{{ ansible_copy_test_user_name }}'"
- "stat_new_dir_with_chown_link_file12.stat.gr_name == '{{ ansible_copy_test_user_name }}'"
- "stat_new_dir_with_chown.stat.uid == ansible_copy_test_user.uid"
- "stat_new_dir_with_chown.stat.gid == ansible_copy_test_group.gid"
- "stat_new_dir_with_chown.stat.pw_name == ansible_copy_test_user_name"
- "stat_new_dir_with_chown.stat.gr_name == ansible_copy_test_user_name"
- "stat_new_dir_with_chown_file1.stat.uid == ansible_copy_test_user.uid"
- "stat_new_dir_with_chown_file1.stat.gid == ansible_copy_test_group.gid"
- "stat_new_dir_with_chown_file1.stat.pw_name == ansible_copy_test_user_name"
- "stat_new_dir_with_chown_file1.stat.gr_name == ansible_copy_test_user_name"
- "stat_new_dir_with_chown_subdir.stat.uid == ansible_copy_test_user.uid"
- "stat_new_dir_with_chown_subdir.stat.gid == ansible_copy_test_group.gid"
- "stat_new_dir_with_chown_subdir.stat.pw_name == ansible_copy_test_user_name"
- "stat_new_dir_with_chown_subdir.stat.gr_name == ansible_copy_test_user_name"
- "stat_new_dir_with_chown_subdir_file12.stat.uid == ansible_copy_test_user.uid"
- "stat_new_dir_with_chown_subdir_file12.stat.gid == ansible_copy_test_group.gid"
- "stat_new_dir_with_chown_subdir_file12.stat.pw_name == ansible_copy_test_user_name"
- "stat_new_dir_with_chown_subdir_file12.stat.gr_name == ansible_copy_test_user_name"
- "stat_new_dir_with_chown_link_file12.stat.uid == ansible_copy_test_user.uid"
- "stat_new_dir_with_chown_link_file12.stat.gid == ansible_copy_test_group.gid"
- "stat_new_dir_with_chown_link_file12.stat.pw_name == ansible_copy_test_user_name"
- "stat_new_dir_with_chown_link_file12.stat.gr_name == ansible_copy_test_user_name"
always:
- name: execute - remove the user for test

@ -18,3 +18,5 @@ done
# ensure debug does not set top level vars when looking at ansible_facts
ansible-playbook nosetfacts.yml "$@"
ansible-playbook unsafe.yml "$@"

@ -0,0 +1,13 @@
- hosts: localhost
gather_facts: false
vars:
unsafe_var: !unsafe undef()|mandatory
tasks:
- debug:
var: '{{ unsafe_var }}'
ignore_errors: true
register: result
- assert:
that:
- result is successful

@ -117,7 +117,7 @@
- name: assert chdir works
assert:
that:
- "'{{chdir_result.stdout | trim}}' == '{{remote_tmp_dir_real_path.stdout | trim}}'"
- "chdir_result.stdout | trim == remote_tmp_dir_real_path.stdout | trim"
- name: test timeout option
expect:

@ -927,7 +927,7 @@
that:
- "file_error3 is failed"
- "file_error3.msg == 'src does not exist'"
- "file_error3.dest == '{{ remote_tmp_dir_test }}/hard.txt' | expanduser"
- "file_error3.dest == remote_tmp_dir_test | expanduser ~ '/hard.txt'"
- "file_error3.src == 'non-existing-file-that-does-not-exist.txt'"
- block:

@ -199,7 +199,7 @@
- "missing_dst_no_follow_enable_force_use_mode2 is changed"
- "missing_dst_no_follow_enable_force_use_mode3 is not changed"
- "soft3_result['stat'].islnk"
- "soft3_result['stat'].lnk_target == '{{ user.home }}/nonexistent'"
- "soft3_result['stat'].lnk_target == user.home ~ '/nonexistent'"
#
# Test creating a link to a directory https://github.com/ansible/ansible/issues/1369

@ -267,7 +267,7 @@
- name: assert we skipped the ogg file
assert:
that:
- '"{{ remote_tmp_dir_test }}/e/f/g/h/8.ogg" not in find_test3_list'
- 'remote_tmp_dir_test ~ "/e/f/g/h/8.ogg" not in find_test3_list'
- name: patterns with regex
find:
@ -317,7 +317,7 @@
assert:
that:
- result.matched == 1
- '"{{ remote_tmp_dir_test }}/astest/old.txt" in astest_list'
- 'remote_tmp_dir_test ~ "/astest/old.txt" in astest_list'
- name: find files newer than 1 week
find:
@ -332,7 +332,7 @@
assert:
that:
- result.matched == 1
- '"{{ remote_tmp_dir_test }}/astest/new.txt" in astest_list'
- 'remote_tmp_dir_test ~ "/astest/new.txt" in astest_list'
- name: add some content to the new file
shell: "echo hello world > {{ remote_tmp_dir_test }}/astest/new.txt"
@ -352,7 +352,7 @@
assert:
that:
- result.matched == 1
- '"{{ remote_tmp_dir_test }}/astest/new.txt" in astest_list'
- 'remote_tmp_dir_test ~ "/astest/new.txt" in astest_list'
- '"checksum" in result.files[0]'
- name: find ANY item with LESS than 5 bytes, also get checksums
@ -371,8 +371,8 @@
assert:
that:
- result.matched == 2
- '"{{ remote_tmp_dir_test }}/astest/old.txt" in astest_list'
- '"{{ remote_tmp_dir_test }}/astest/.hidden.txt" in astest_list'
- 'remote_tmp_dir_test ~ "/astest/old.txt" in astest_list'
- 'remote_tmp_dir_test ~ "/astest/.hidden.txt" in astest_list'
- '"checksum" in result.files[0]'
- name: Run mode tests

@ -61,7 +61,7 @@
- assert:
that:
- exact_mode_0644.files == exact_mode_0644_symbolic.files
- exact_mode_0644.files[0].path == '{{ remote_tmp_dir_test }}/mode_0644'
- exact_mode_0644.files[0].path == remote_tmp_dir_test ~ '/mode_0644'
- user_readable_octal.files == user_readable_symbolic.files
- user_readable_octal.files|map(attribute='path')|map('basename')|sort == ['mode_0400', 'mode_0444', 'mode_0644', 'mode_0666', 'mode_0700']
- other_readable_octal.files == other_readable_symbolic.files

@ -433,7 +433,7 @@
- name: Test reading facts from default fact_path
assert:
that:
- '"{{ ansible_local.testfact.fact_dir }}" == "default"'
- 'ansible_local.testfact.fact_dir == "default"'
- hosts: facthost9
tags: [ 'fact_local']
@ -444,7 +444,7 @@
- name: Test reading facts from custom fact_path
assert:
that:
- '"{{ ansible_local.testfact.fact_dir }}" == "custom"'
- 'ansible_local.testfact.fact_dir == "custom"'
- hosts: facthost20
tags: [ 'fact_facter_ohai' ]

@ -172,7 +172,7 @@
- name: DEPTH | check update arrived
assert:
that:
- "{{ a_file.content | b64decode | trim }} == 3"
- a_file.content | b64decode | trim == "3"
- git_fetch is changed
- name: DEPTH | clear checkout_dir

@ -58,7 +58,7 @@
- name: LOCALMODS | check update arrived
assert:
that:
- "{{ a_file.content | b64decode | trim }} == 2"
- a_file.content | b64decode | trim == "2"
- git_fetch_force is changed
- name: LOCALMODS | clear checkout_dir
@ -127,7 +127,7 @@
- name: LOCALMODS | check update arrived
assert:
that:
- "{{ a_file.content | b64decode | trim }} == 2"
- a_file.content | b64decode | trim == "2"
- git_fetch_force is changed
- name: LOCALMODS | clear checkout_dir

@ -32,7 +32,7 @@
- name: SUBMODULES | Ensure submodu1 is at the appropriate commit
assert:
that: '{{ submodule1.stdout_lines | length }} == 2'
that: 'submodule1.stdout_lines | length == 2'
- name: SUBMODULES | clear checkout_dir
file:
@ -53,7 +53,7 @@
- name: SUBMODULES | Ensure submodule1 is at the appropriate commit
assert:
that: '{{ submodule1.stdout_lines | length }} == 4'
that: 'submodule1.stdout_lines | length == 4'
- name: SUBMODULES | Copy the checkout so we can run several different tests on it
command: 'cp -pr {{ checkout_dir }} {{ checkout_dir }}.bak'
@ -84,8 +84,8 @@
- name: SUBMODULES | Ensure both submodules are at the appropriate commit
assert:
that:
- '{{ submodule1.stdout_lines|length }} == 4'
- '{{ submodule2.stdout_lines|length }} == 2'
- 'submodule1.stdout_lines|length == 4'
- 'submodule2.stdout_lines|length == 2'
- name: SUBMODULES | Remove checkout dir
@ -112,7 +112,7 @@
- name: SUBMODULES | Ensure submodule1 is at the appropriate commit
assert:
that: '{{ submodule1.stdout_lines | length }} == 5'
that: 'submodule1.stdout_lines | length == 5'
- name: SUBMODULES | Test that update with recursive found new submodules
@ -121,7 +121,7 @@
- name: SUBMODULES | Enusre submodule2 is at the appropriate commit
assert:
that: '{{ submodule2.stdout_lines | length }} == 4'
that: 'submodule2.stdout_lines | length == 4'
- name: SUBMODULES | clear checkout_dir
file:
@ -147,4 +147,4 @@
- name: SUBMODULES | Ensure submodule1 is at the appropriate commit
assert:
that: '{{ submodule1.stdout_lines | length }} == 4'
that: 'submodule1.stdout_lines | length == 4'

@ -22,7 +22,7 @@
- name: Check that multiple duplicate lines collapse into a single commands
assert:
that:
- "{{ result.commands|length }} == 1"
- "result.commands|length == 1"
- name: Check that set is correctly prepended
assert:
@ -58,6 +58,6 @@
- assert:
that:
- "{{ result.filtered|length }} == 2"
- "result.filtered|length == 2"
- debug: msg="END cli/config_check.yaml on connection={{ ansible_connection }}"

@ -16,17 +16,17 @@
- name: Assert that the before dicts were correctly generated
assert:
that:
- "{{ populate | symmetric_difference(result['before']) |length == 0 }}"
- "populate | symmetric_difference(result['before']) |length == 0"
- name: Assert that the correct set of commands were generated
assert:
that:
- "{{ deleted['commands'] | symmetric_difference(result['commands']) |length == 0 }}"
- "deleted['commands'] | symmetric_difference(result['commands']) |length == 0"
- name: Assert that the after dicts were correctly generated
assert:
that:
- "{{ deleted['after'] | symmetric_difference(result['after']) |length == 0 }}"
- "deleted['after'] | symmetric_difference(result['after']) |length == 0"
- name: Delete attributes of given interfaces (IDEMPOTENT)
vyos.vyos.vyos_lldp_interfaces: *deleted
@ -41,6 +41,6 @@
- name: Assert that the before dicts were correctly generated
assert:
that:
- "{{ deleted['after'] | symmetric_difference(result['before']) |length == 0 }}"
- "deleted['after'] | symmetric_difference(result['before']) |length == 0"
always:
- include_tasks: _remove_config.yaml

@ -28,17 +28,17 @@
- name: Assert that before dicts were correctly generated
assert:
that: "{{ merged['before'] | symmetric_difference(result['before']) |length == 0 }}"
that: "merged['before'] | symmetric_difference(result['before']) |length == 0"
- name: Assert that correct set of commands were generated
assert:
that:
- "{{ merged['commands'] | symmetric_difference(result['commands']) |length == 0 }}"
- "merged['commands'] | symmetric_difference(result['commands']) |length == 0"
- name: Assert that after dicts was correctly generated
assert:
that:
- "{{ merged['after'] | symmetric_difference(result['after']) |length == 0 }}"
- "merged['after'] | symmetric_difference(result['after']) |length == 0"
- name: Merge the provided configuration with the existing running configuration (IDEMPOTENT)
vyos.vyos.vyos_lldp_interfaces: *merged
@ -52,7 +52,7 @@
- name: Assert that before dicts were correctly generated
assert:
that:
- "{{ merged['after'] | symmetric_difference(result['before']) |length == 0 }}"
- "merged['after'] | symmetric_difference(result['before']) |length == 0"
always:
- include_tasks: _remove_config.yaml

@ -19,17 +19,17 @@
- name: Assert that before dicts were correctly generated
assert:
that:
- "{{ populate_intf | symmetric_difference(result['before']) |length == 0 }}"
- "populate_intf | symmetric_difference(result['before']) |length == 0"
- name: Assert that correct commands were generated
assert:
that:
- "{{ overridden['commands'] | symmetric_difference(result['commands']) |length == 0 }}"
- "overridden['commands'] | symmetric_difference(result['commands']) |length == 0"
- name: Assert that after dicts were correctly generated
assert:
that:
- "{{ overridden['after'] | symmetric_difference(result['after']) |length == 0 }}"
- "overridden['after'] | symmetric_difference(result['after']) |length == 0"
- name: Overrides all device configuration with provided configurations (IDEMPOTENT)
vyos.vyos.vyos_lldp_interfaces: *overridden
@ -43,7 +43,7 @@
- name: Assert that before dicts were correctly generated
assert:
that:
- "{{ overridden['after'] | symmetric_difference(result['before']) |length == 0 }}"
- "overridden['after'] | symmetric_difference(result['before']) |length == 0"
always:
- include_tasks: _remove_config.yaml

@ -33,17 +33,17 @@
- name: Assert that correct set of commands were generated
assert:
that:
- "{{ replaced['commands'] | symmetric_difference(result['commands']) |length == 0 }}"
- "replaced['commands'] | symmetric_difference(result['commands']) |length == 0"
- name: Assert that before dicts are correctly generated
assert:
that:
- "{{ populate | symmetric_difference(result['before']) |length == 0 }}"
- "populate | symmetric_difference(result['before']) |length == 0"
- name: Assert that after dict is correctly generated
assert:
that:
- "{{ replaced['after'] | symmetric_difference(result['after']) |length == 0 }}"
- "replaced['after'] | symmetric_difference(result['after']) |length == 0"
- name: Replace device configurations of listed LLDP interfaces with provided configurarions (IDEMPOTENT)
vyos.vyos.vyos_lldp_interfaces: *replaced
@ -57,7 +57,7 @@
- name: Assert that before dict is correctly generated
assert:
that:
- "{{ replaced['after'] | symmetric_difference(result['before']) |length == 0 }}"
- "replaced['after'] | symmetric_difference(result['before']) |length == 0"
always:
- include_tasks: _remove_config.yaml

@ -15,7 +15,7 @@
that:
- "testing == 789"
- "base_dir == 'environments/development'"
- "{{ included_one_file.ansible_included_var_files | length }} == 1"
- "included_one_file.ansible_included_var_files | length == 1"
- "'vars/environments/development/all.yml' in included_one_file.ansible_included_var_files[0]"
- name: include the vars/environments/development/all.yml and save results in all
@ -51,7 +51,7 @@
assert:
that:
- webapp_version is defined
- "'file_without_extension' in '{{ include_without_file_extension.ansible_included_var_files | join(' ') }}'"
- "'file_without_extension' in include_without_file_extension.ansible_included_var_files | join(' ')"
- name: include every directory in vars
include_vars:
@ -67,7 +67,7 @@
- "testing == 456"
- "base_dir == 'services'"
- "webapp_containers == 10"
- "{{ include_every_dir.ansible_included_var_files | length }} == 7"
- "include_every_dir.ansible_included_var_files | length == 7"
- "'vars/all/all.yml' in include_every_dir.ansible_included_var_files[0]"
- "'vars/environments/development/all.yml' in include_every_dir.ansible_included_var_files[1]"
- "'vars/environments/development/services/webapp.yml' in include_every_dir.ansible_included_var_files[2]"
@ -88,9 +88,9 @@
that:
- "testing == 789"
- "base_dir == 'environments/development'"
- "{{ include_without_webapp.ansible_included_var_files | length }} == 4"
- "'webapp.yml' not in '{{ include_without_webapp.ansible_included_var_files | join(' ') }}'"
- "'file_without_extension' not in '{{ include_without_webapp.ansible_included_var_files | join(' ') }}'"
- "include_without_webapp.ansible_included_var_files | length == 4"
- "'webapp.yml' not in include_without_webapp.ansible_included_var_files | join(' ')"
- "'file_without_extension' not in include_without_webapp.ansible_included_var_files | join(' ')"
- name: include only files matching webapp.yml
include_vars:
@ -104,9 +104,9 @@
- "testing == 101112"
- "base_dir == 'development/services'"
- "webapp_containers == 20"
- "{{ include_match_webapp.ansible_included_var_files | length }} == 1"
- "include_match_webapp.ansible_included_var_files | length == 1"
- "'vars/environments/development/services/webapp.yml' in include_match_webapp.ansible_included_var_files[0]"
- "'all.yml' not in '{{ include_match_webapp.ansible_included_var_files | join(' ') }}'"
- "'all.yml' not in include_match_webapp.ansible_included_var_files | join(' ')"
- name: include only files matching webapp.yml and store results in webapp
include_vars:
@ -173,10 +173,10 @@
- name: Verify the hash variable
assert:
that:
- "{{ config | length }} == 3"
- "config | length == 3"
- "config.key0 == 0"
- "config.key1 == 0"
- "{{ config.key2 | length }} == 1"
- "config.key2 | length == 1"
- "config.key2.a == 21"
- name: Include the second file to merge the hash variable
@ -187,10 +187,10 @@
- name: Verify that the hash is merged
assert:
that:
- "{{ config | length }} == 4"
- "config | length == 4"
- "config.key0 == 0"
- "config.key1 == 1"
- "{{ config.key2 | length }} == 2"
- "config.key2 | length == 2"
- "config.key2.a == 21"
- "config.key2.b == 22"
- "config.key3 == 3"
@ -202,9 +202,9 @@
- name: Verify that the properties from the first file is cleared
assert:
that:
- "{{ config | length }} == 3"
- "config | length == 3"
- "config.key1 == 1"
- "{{ config.key2 | length }} == 1"
- "config.key2 | length == 1"
- "config.key2.b == 22"
- "config.key3 == 3"
@ -216,10 +216,10 @@
- name: Verify that the hash is merged after vars files are accumulated
assert:
that:
- "{{ config | length }} == 3"
- "config | length == 3"
- "config.key0 is undefined"
- "config.key1 == 1"
- "{{ config.key2 | length }} == 1"
- "config.key2 | length == 1"
- "config.key2.b == 22"
- "config.key3 == 3"

@ -109,8 +109,8 @@
- name: Load variables specific for OS family
assert:
that:
- "{{item|quote}} is file"
- "{{item|basename == 'itworks.yml'}}"
- "item is file"
- "item|basename == 'itworks.yml'"
with_first_found:
- files:
- "{{ansible_id}}-{{ansible_lsb.major_release}}.yml" # invalid var, should be skipped
@ -124,8 +124,8 @@
- name: Load variables specific for OS family, but now as list of dicts, same options as above
assert:
that:
- "{{item|quote}} is file"
- "{{item|basename == 'itworks.yml'}}"
- "item is file"
- "item|basename == 'itworks.yml'"
with_first_found:
- files:
- "{{ansible_id}}-{{ansible_lsb.major_release}}.yml"

@ -10,7 +10,7 @@
field_with_space: "{{lookup('ini', 'field.with.space type=properties file=lookup.properties')}}"
- assert:
that: "{{item}} is defined"
that: "item is defined"
with_items: [ 'test1', 'test2', 'test_dot', 'field_with_space' ]
- name: "read ini value"

@ -133,7 +133,7 @@
- assert:
that:
- "'{{ item.0.name }}' != 'carol'"
- "item.0.name != 'carol'"
with_subelements:
- "{{ users }}"
- mysql.privs
@ -220,5 +220,5 @@
- assert:
that:
- "'{{ user_alice }}' == 'localhost'"
- "'{{ user_bob }}' == 'db1'"
- "user_alice == 'localhost'"
- "user_bob == 'db1'"

@ -3,7 +3,7 @@
that:
- ansible_loop.index == ansible_loop.index0 + 1
- ansible_loop.revindex == ansible_loop.revindex0 + 1
- ansible_loop.first == {{ ansible_loop.index == 1 }}
- ansible_loop.last == {{ ansible_loop.index == ansible_loop.length }}
- ansible_loop.first == (ansible_loop.index == 1)
- ansible_loop.last == (ansible_loop.index == ansible_loop.length)
- ansible_loop.length == 3
- ansible_loop.allitems|join(',') == 'first,second,third'

@ -14,4 +14,4 @@
- assert:
that:
- '"location" in result'
- 'result["location"] == "{{ expected_location}}"'
- 'result["location"] == expected_location'

@ -13,4 +13,4 @@
- assert:
that:
- '"location" in result'
- 'result["location"] == "{{ expected_location}}"'
- 'result["location"] == expected_location'

@ -7,4 +7,4 @@
assert:
that:
- '"location" in result'
- 'result["location"] == "{{ expected_location }}"'
- 'result["location"] == expected_location'

@ -7,4 +7,4 @@
assert:
that:
- '"location" in result'
- 'result["location"] == "{{ expected_location }}"'
- 'result["location"] == expected_location'

@ -209,7 +209,7 @@
assert:
that:
- _check_mode_test2 is skipped
- '_check_mode_test2.msg == "{{ remote_tmp_dir_test | expanduser }}/afile2.txt exists, matching creates option"'
- '_check_mode_test2.msg == remote_tmp_dir_test | expanduser ~ "/afile2.txt exists, matching creates option"'
- name: Remove afile2.txt
file:
@ -231,7 +231,7 @@
assert:
that:
- _check_mode_test3 is skipped
- '_check_mode_test3.msg == "{{ remote_tmp_dir_test | expanduser }}/afile2.txt does not exist, matching removes option"'
- '_check_mode_test3.msg == remote_tmp_dir_test | expanduser ~ "/afile2.txt does not exist, matching removes option"'
# executable

@ -33,7 +33,7 @@
- 'slurp_existing.encoding == "base64"'
- 'slurp_existing is not changed'
- 'slurp_existing is not failed'
- '"{{ slurp_existing.content | b64decode }}" == "We are at the café"'
- 'slurp_existing.content | b64decode == "We are at the café"'
- name: Create a binary file to test with
copy:

@ -357,7 +357,7 @@
- assert:
that:
- "\"foo t'e~m\\plated\" in unusual_results.stdout_lines"
- "{{unusual_results.stdout_lines| length}} == 1"
- "unusual_results.stdout_lines| length == 1"
- name: check that the unusual filename can be checked for changes
template:

@ -66,7 +66,7 @@
- zip_success.changed
# Verify that file list is generated
- "'files' in zip_success"
- "{{zip_success['files']| length}} == 3"
- "zip_success['files']| length == 3"
- "'foo-unarchive.txt' in zip_success['files']"
- "'foo-unarchive-777.txt' in zip_success['files']"
- "'FOO-UNAR.TXT' in zip_success['files']"

@ -47,7 +47,7 @@
- "unarchive06_stat.stat.mode == '0600'"
# Verify that file list is generated
- "'files' in unarchive06"
- "{{unarchive06['files']| length}} == 1"
- "unarchive06['files']| length == 1"
- "'foo-unarchive.txt' in unarchive06['files']"
- name: remove our tar.gz unarchive destination
@ -97,7 +97,7 @@
- "unarchive07.changed == false"
# Verify that file list is generated
- "'files' in unarchive07"
- "{{unarchive07['files']| length}} == 1"
- "unarchive07['files']| length == 1"
- "'foo-unarchive.txt' in unarchive07['files']"
- name: remove our tar.gz unarchive destination
@ -131,7 +131,7 @@
- "unarchive08_stat.stat.mode == '0601'"
# Verify that file list is generated
- "'files' in unarchive08"
- "{{unarchive08['files']| length}} == 3"
- "unarchive08['files']| length == 3"
- "'foo-unarchive.txt' in unarchive08['files']"
- "'foo-unarchive-777.txt' in unarchive08['files']"
- "'FOO-UNAR.TXT' in unarchive08['files']"
@ -163,7 +163,7 @@
- "unarchive08_stat.stat.mode == '0601'"
# Verify that file list is generated
- "'files' in unarchive08"
- "{{unarchive08['files']| length}} == 3"
- "unarchive08['files']| length == 3"
- "'foo-unarchive.txt' in unarchive08['files']"
- "'foo-unarchive-777.txt' in unarchive08['files']"
- "'FOO-UNAR.TXT' in unarchive08['files']"

@ -40,7 +40,7 @@
- unarchive10 is changed
# Verify that file list is generated
- "'files' in unarchive10"
- "{{unarchive10['files']| length}} == 1"
- "unarchive10['files']| length == 1"
- "'foo-unarchive.txt' in unarchive10['files']"
- archive_path.stat.exists

@ -17,7 +17,7 @@
- "unarchive03.changed == true"
# Verify that file list is generated
- "'files' in unarchive03"
- "{{unarchive03['files']| length}} == 3"
- "unarchive03['files']| length == 3"
- "'foo-unarchive.txt' in unarchive03['files']"
- "'foo-unarchive-777.txt' in unarchive03['files']"
- "'FOO-UNAR.TXT' in unarchive03['files']"

@ -40,7 +40,7 @@
assert:
that:
- waitfor is successful
- waitfor.path == "{{ remote_tmp_dir | expanduser }}/wait_for_file"
- waitfor.path == remote_tmp_dir | expanduser ~ "/wait_for_file"
- waitfor.elapsed >= 2
- waitfor.elapsed <= 15
@ -58,7 +58,7 @@
assert:
that:
- waitfor is successful
- waitfor.path == "{{ remote_tmp_dir | expanduser }}/wait_for_file"
- waitfor.path == remote_tmp_dir | expanduser ~ "/wait_for_file"
- waitfor.elapsed >= 2
- waitfor.elapsed <= 15
@ -165,7 +165,7 @@
that:
- waitfor is successful
- waitfor is not changed
- "waitfor.port == {{ http_port }}"
- "waitfor.port == http_port"
- name: install psutil using pip (non-Linux only)
pip:
@ -193,7 +193,7 @@
that:
- waitfor is successful
- waitfor is not changed
- "waitfor.port == {{ http_port }}"
- "waitfor.port == http_port"
- name: test wait_for with delay
wait_for:

@ -27,7 +27,6 @@ from ansible.parsing import vault
from ansible.parsing.yaml import dumper, objects
from ansible.parsing.yaml.loader import AnsibleLoader
from ansible.template import AnsibleUndefined
from ansible.utils.unsafe_proxy import AnsibleUnsafeText, AnsibleUnsafeBytes
from units.mock.yaml_helper import YamlTestUtils
from units.mock.vault_helper import TextVaultSecret
@ -67,8 +66,7 @@ class TestAnsibleDumper(unittest.TestCase, YamlTestUtils):
def test_bytes(self):
b_text = u'tréma'.encode('utf-8')
unsafe_object = AnsibleUnsafeBytes(b_text)
yaml_out = self._dump_string(unsafe_object, dumper=self.dumper)
yaml_out = self._dump_string(b_text, dumper=self.dumper)
stream = self._build_stream(yaml_out)
loader = self._loader(stream)
@ -81,8 +79,7 @@ class TestAnsibleDumper(unittest.TestCase, YamlTestUtils):
def test_unicode(self):
u_text = u'nöel'
unsafe_object = AnsibleUnsafeText(u_text)
yaml_out = self._dump_string(unsafe_object, dumper=self.dumper)
yaml_out = self._dump_string(u_text, dumper=self.dumper)
stream = self._build_stream(yaml_out)
loader = self._loader(stream)

Loading…
Cancel
Save