diff --git a/.azure-pipelines/commands/generic.sh b/.azure-pipelines/commands/generic.sh index 28eb12688ed..74ef8bbb8c4 100755 --- a/.azure-pipelines/commands/generic.sh +++ b/.azure-pipelines/commands/generic.sh @@ -13,6 +13,9 @@ else target="shippable/generic/" fi +stage="${S:-prod}" + # shellcheck disable=SC2086 ansible-test integration --color -v --retry-on-error "${target}" ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} ${UNSTABLE:+"$UNSTABLE"} \ + --remote-terminate always --remote-stage "${stage}" \ --docker default --python "${python}" diff --git a/changelogs/fragments/81796-ansible-doc-roles.yml b/changelogs/fragments/81796-ansible-doc-roles.yml new file mode 100644 index 00000000000..3856d406427 --- /dev/null +++ b/changelogs/fragments/81796-ansible-doc-roles.yml @@ -0,0 +1,2 @@ +minor_changes: + - "ansible-doc - show ``notes``, ``seealso``, and top-level ``version_added`` for role entrypoints (https://github.com/ansible/ansible/pull/81796)." diff --git a/changelogs/fragments/85497-default-callback-indent.yml b/changelogs/fragments/85497-default-callback-indent.yml new file mode 100644 index 00000000000..624c851f5a8 --- /dev/null +++ b/changelogs/fragments/85497-default-callback-indent.yml @@ -0,0 +1,2 @@ +minor_changes: + - "default callback plugin - add option to configure indentation for JSON and YAML output (https://github.com/ansible/ansible/pull/85497)." diff --git a/changelogs/fragments/85524-resolve-task-resolved_action-early.yml b/changelogs/fragments/85524-resolve-task-resolved_action-early.yml new file mode 100644 index 00000000000..5516837d87b --- /dev/null +++ b/changelogs/fragments/85524-resolve-task-resolved_action-early.yml @@ -0,0 +1,2 @@ +bugfixes: + - callback plugins - improve consistency accessing the Task object's resolved_action attribute. diff --git a/changelogs/fragments/85632-setup-logical-volume-name-uniqueness.yml b/changelogs/fragments/85632-setup-logical-volume-name-uniqueness.yml new file mode 100644 index 00000000000..275a9db6292 --- /dev/null +++ b/changelogs/fragments/85632-setup-logical-volume-name-uniqueness.yml @@ -0,0 +1,6 @@ +minor_changes: +- >- + setup - added new subkey ``lvs`` within each entry of ``ansible_facts['vgs']`` + to provide complete logical volume data scoped by volume group. + The top level ``lvs`` fact by comparison, deduplicates logical volume names + across volume groups and may be incomplete. (https://github.com/ansible/ansible/issues/85632) diff --git a/changelogs/fragments/85638-ansible-test-validate-modules-doc-fragments.yml b/changelogs/fragments/85638-ansible-test-validate-modules-doc-fragments.yml new file mode 100644 index 00000000000..7f265d13e07 --- /dev/null +++ b/changelogs/fragments/85638-ansible-test-validate-modules-doc-fragments.yml @@ -0,0 +1,2 @@ +bugfixes: + - "validate-modules sanity test - fix handling of missing doc fragments (https://github.com/ansible/ansible/pull/85638)." diff --git a/changelogs/fragments/85682-rescue-flush_handlers.yml b/changelogs/fragments/85682-rescue-flush_handlers.yml new file mode 100644 index 00000000000..115dd4b5faf --- /dev/null +++ b/changelogs/fragments/85682-rescue-flush_handlers.yml @@ -0,0 +1,2 @@ +bugfixes: + - The ``ansible_failed_task`` variable is now correctly exposed in a rescue section, even when a failing handler is triggered by the ``flush_handlers`` task in the corresponding ``block`` (https://github.com/ansible/ansible/issues/85682) diff --git a/changelogs/fragments/85743-lazy-ternary.yml b/changelogs/fragments/85743-lazy-ternary.yml new file mode 100644 index 00000000000..e6e4872e668 --- /dev/null +++ b/changelogs/fragments/85743-lazy-ternary.yml @@ -0,0 +1,2 @@ +bugfixes: + - "``ternary`` filter - evaluate values lazily (https://github.com/ansible/ansible/issues/85743)" diff --git a/changelogs/fragments/ansible-test-auth-update.yml b/changelogs/fragments/ansible-test-auth-update.yml new file mode 100644 index 00000000000..bbb611f3153 --- /dev/null +++ b/changelogs/fragments/ansible-test-auth-update.yml @@ -0,0 +1,2 @@ +minor_changes: + - ansible-test - Implement new authentication methods for accessing the Ansible Core CI service. diff --git a/changelogs/fragments/ansible-test-coverage-upgrade.yml b/changelogs/fragments/ansible-test-coverage-upgrade.yml index 6f4b2062dd6..ea9163e6ee2 100644 --- a/changelogs/fragments/ansible-test-coverage-upgrade.yml +++ b/changelogs/fragments/ansible-test-coverage-upgrade.yml @@ -1,2 +1,2 @@ minor_changes: - - ansible-test - Upgrade to ``coverage`` version 7.10.0 for Python 3.9 and later. + - ansible-test - Upgrade to ``coverage`` version 7.10.5 for Python 3.9 and later. diff --git a/changelogs/fragments/ansible-test-missing-dir-fix.yml b/changelogs/fragments/ansible-test-missing-dir-fix.yml new file mode 100644 index 00000000000..cc7e9b21f5f --- /dev/null +++ b/changelogs/fragments/ansible-test-missing-dir-fix.yml @@ -0,0 +1,2 @@ +bugfixes: + - ansible-test - Fix a traceback that can occur when using delegation before the ansible-test temp directory is created. diff --git a/changelogs/fragments/apt_repo_debug.yml b/changelogs/fragments/apt_repo_debug.yml new file mode 100644 index 00000000000..1513ec2d944 --- /dev/null +++ b/changelogs/fragments/apt_repo_debug.yml @@ -0,0 +1,3 @@ +--- +minor_changes: + - apt_repository - use correct debug method to print debug message. diff --git a/changelogs/fragments/blockinfile-new-module-option-encoding.yml b/changelogs/fragments/blockinfile-new-module-option-encoding.yml new file mode 100644 index 00000000000..d81fa104497 --- /dev/null +++ b/changelogs/fragments/blockinfile-new-module-option-encoding.yml @@ -0,0 +1,2 @@ +minor_changes: + - blockinfile - add new module option ``encoding`` to support files in encodings other than UTF-8 (https://github.com/ansible/ansible/pull/85291). \ No newline at end of file diff --git a/changelogs/fragments/concat_coerce_none_to_empty.yml b/changelogs/fragments/concat_coerce_none_to_empty.yml new file mode 100644 index 00000000000..9fea388973a --- /dev/null +++ b/changelogs/fragments/concat_coerce_none_to_empty.yml @@ -0,0 +1,3 @@ +bugfixes: + - templating - Multi-node template results coerce embedded ``None`` nodes to empty string (instead of rendering literal ``None`` to the output). + - argspec validation - The ``str`` argspec type treats ``None`` values as empty string for better consistency with pre-2.19 templating conversions. diff --git a/changelogs/fragments/deprecate_inject.yml b/changelogs/fragments/deprecate_inject.yml new file mode 100644 index 00000000000..2036ef35b5e --- /dev/null +++ b/changelogs/fragments/deprecate_inject.yml @@ -0,0 +1,3 @@ +deprecated_features: +- INJECT_FACTS_AS_VARS configuration currently defaults to ``True``, this is now deprecated and it will switch to ``False`` by Ansible 2.24. + You will only get notified if you are accessing 'injected' facts (for example, ansible_os_distribution vs ansible_facts['os_distribution']). diff --git a/changelogs/fragments/dnf_installroot_dir.yml b/changelogs/fragments/dnf_installroot_dir.yml new file mode 100644 index 00000000000..dcb9bb2acdf --- /dev/null +++ b/changelogs/fragments/dnf_installroot_dir.yml @@ -0,0 +1,3 @@ +--- +bugfixes: + - dnf - Check if installroot is directory or not (https://github.com/ansible/ansible/issues/85680). diff --git a/changelogs/fragments/elide_broken_conditional_result.yml b/changelogs/fragments/elide_broken_conditional_result.yml new file mode 100644 index 00000000000..195db72155c --- /dev/null +++ b/changelogs/fragments/elide_broken_conditional_result.yml @@ -0,0 +1,3 @@ +bugfixes: + - conditionals - When displaying a broken conditional error or deprecation warning, + the origin of the non-boolean result is included (if available), and the raw result is omitted. diff --git a/changelogs/fragments/encrypt.yml b/changelogs/fragments/encrypt.yml new file mode 100644 index 00000000000..9507b166ed4 --- /dev/null +++ b/changelogs/fragments/encrypt.yml @@ -0,0 +1,3 @@ +--- +removed_features: + - encrypt - remove deprecated passlib_or_crypt API. diff --git a/changelogs/fragments/getattr_marker_access.yml b/changelogs/fragments/getattr_marker_access.yml new file mode 100644 index 00000000000..d23ec5f9341 --- /dev/null +++ b/changelogs/fragments/getattr_marker_access.yml @@ -0,0 +1,4 @@ +bugfixes: + - templating - Undefined marker values sourced from the Jinja ``getattr->getitem`` fallback are now accessed correctly, + raising AnsibleUndefinedVariable for user plugins that do not understand markers. + Previously, these values were erroneously returned to user plugin code that had not opted in to marker acceptance. diff --git a/changelogs/fragments/include_vars.yml b/changelogs/fragments/include_vars.yml new file mode 100644 index 00000000000..dd5d4764648 --- /dev/null +++ b/changelogs/fragments/include_vars.yml @@ -0,0 +1,6 @@ +--- +deprecated_features: + - include_vars - Specifying 'ignore_files' as a string is deprecated. +minor_changes: + - include_vars - Raise an error if 'ignore_files' is not specified as a list. + - include_vars - Raise an error if 'extensions' is not specified as a list. diff --git a/changelogs/fragments/known_issues_jinja_error.yml b/changelogs/fragments/known_issues_jinja_error.yml new file mode 100644 index 00000000000..8208996a90a --- /dev/null +++ b/changelogs/fragments/known_issues_jinja_error.yml @@ -0,0 +1,4 @@ +known_issues: + - templating - Exceptions raised in a Jinja ``set`` or ``with`` block which are not accessed by the template are ignored in the same manner as undefined values. + - templating - Passing a container created in a Jinja ``set`` or ``with`` block to a method results in a copy of that container. + Mutations to that container which are not returned by the method will be discarded. diff --git a/changelogs/fragments/lineinfile-new-module-option-encoding.yml b/changelogs/fragments/lineinfile-new-module-option-encoding.yml new file mode 100644 index 00000000000..6c198dd8328 --- /dev/null +++ b/changelogs/fragments/lineinfile-new-module-option-encoding.yml @@ -0,0 +1,2 @@ +minor_changes: + - lineinfile - add new module option ``encoding`` to support files in encodings other than UTF-8 (https://github.com/ansible/ansible/pull/84999). diff --git a/changelogs/fragments/powershell-quoting.yml b/changelogs/fragments/powershell-quoting.yml new file mode 100644 index 00000000000..b539f8ab7e2 --- /dev/null +++ b/changelogs/fragments/powershell-quoting.yml @@ -0,0 +1,4 @@ +breaking_changes: + - >- + powershell - Removed code that tried to remote quotes from paths when performing Windows operations like copying + and fetching file. This should not affect normal playbooks unless a value is quoted too many times. diff --git a/changelogs/fragments/regex_test.yml b/changelogs/fragments/regex_test.yml new file mode 100644 index 00000000000..64ec5e358aa --- /dev/null +++ b/changelogs/fragments/regex_test.yml @@ -0,0 +1,4 @@ +--- +minor_changes: + - regex - Document the match_type fullmatch. + - regex - Ensure that match_type is one of match, fullmatch, or search (https://github.com/ansible/ansible/pull/85629). diff --git a/changelogs/fragments/remove-role-entrypoint-attrs.yml b/changelogs/fragments/remove-role-entrypoint-attrs.yml new file mode 100644 index 00000000000..97168e884b8 --- /dev/null +++ b/changelogs/fragments/remove-role-entrypoint-attrs.yml @@ -0,0 +1,2 @@ +removed_features: + - "ansible-doc - role entrypoint attributes are no longer shown" diff --git a/changelogs/fragments/remove_hash_params.yml b/changelogs/fragments/remove_hash_params.yml new file mode 100644 index 00000000000..bc9e28c6f9c --- /dev/null +++ b/changelogs/fragments/remove_hash_params.yml @@ -0,0 +1,2 @@ +deprecated_features: + - hash_params function in roles/__init__ is being deprecated as it is not in use. diff --git a/changelogs/fragments/shell-wrap_for_exec_deprecation.yml b/changelogs/fragments/shell-wrap_for_exec_deprecation.yml new file mode 100644 index 00000000000..89453c44608 --- /dev/null +++ b/changelogs/fragments/shell-wrap_for_exec_deprecation.yml @@ -0,0 +1,5 @@ +deprecated_features: + - >- + Deprecated the shell plugin's ``wrap_for_exec`` function. This API is not used in Ansible or any known collection + and is being removed to simplify the plugin API. Plugin authors should wrap their command to execute within an + explicit shell or other known executable. diff --git a/changelogs/fragments/smart-transport-removal.yml b/changelogs/fragments/smart-transport-removal.yml new file mode 100644 index 00000000000..15f16fc2469 --- /dev/null +++ b/changelogs/fragments/smart-transport-removal.yml @@ -0,0 +1,4 @@ +removed_features: + - >- + Removed the option to set the ``DEFAULT_TRANSPORT`` configuration to ``smart`` that selects the default transport + as either ``ssh`` or ``paramiko`` based on the underlying platform configuraton. diff --git a/changelogs/fragments/tag_u_it.yml b/changelogs/fragments/tag_u_it.yml new file mode 100644 index 00000000000..e32088e9e89 --- /dev/null +++ b/changelogs/fragments/tag_u_it.yml @@ -0,0 +1,2 @@ +minor_changes: + - tags now warn when using reserved keywords. diff --git a/changelogs/fragments/template-tuple-fix.yml b/changelogs/fragments/template-tuple-fix.yml new file mode 100644 index 00000000000..e82760976d8 --- /dev/null +++ b/changelogs/fragments/template-tuple-fix.yml @@ -0,0 +1,2 @@ +bugfixes: + - templating - Fix slicing of tuples in templating (https://github.com/ansible/ansible/issues/85606). diff --git a/changelogs/fragments/template_lookup_skip_finalize.yml b/changelogs/fragments/template_lookup_skip_finalize.yml new file mode 100644 index 00000000000..7cbc1dfd9cf --- /dev/null +++ b/changelogs/fragments/template_lookup_skip_finalize.yml @@ -0,0 +1,6 @@ +bugfixes: + - template lookup - Skip finalization on the internal templating operation to allow markers to be returned and handled by, e.g. the ``default`` filter. + Previously, finalization tripped markers, causing an exception to end processing of the current template pipeline. + (https://github.com/ansible/ansible/issues/85674) + - templating - Avoid tripping markers within Jinja generated code. + (https://github.com/ansible/ansible/issues/85674) diff --git a/changelogs/fragments/templating-filter-generators.yml b/changelogs/fragments/templating-filter-generators.yml new file mode 100644 index 00000000000..e0fcfe9116c --- /dev/null +++ b/changelogs/fragments/templating-filter-generators.yml @@ -0,0 +1,3 @@ +bugfixes: + - templating - Ensure filter plugin result processing occurs under the correct call context. + (https://github.com/ansible/ansible/issues/85585) diff --git a/changelogs/fragments/to-text-to-bytes.yml b/changelogs/fragments/to-text-to-bytes.yml new file mode 100644 index 00000000000..2345539bb94 --- /dev/null +++ b/changelogs/fragments/to-text-to-bytes.yml @@ -0,0 +1,2 @@ +minor_changes: + - Python type hints applied to ``to_text`` and ``to_bytes`` functions for better type hint interactions with code utilizing these functions. diff --git a/changelogs/fragments/vars-remove-get_hostgroup_vars.yml b/changelogs/fragments/vars-remove-get_hostgroup_vars.yml new file mode 100644 index 00000000000..e5218bb41eb --- /dev/null +++ b/changelogs/fragments/vars-remove-get_hostgroup_vars.yml @@ -0,0 +1,4 @@ +removed_features: + - >- + vars plugins - removed the deprecated ``get_host_vars`` or ``get_group_vars`` fallback for vars plugins that do + not inherit from ``BaseVarsPlugin`` and define a ``get_vars`` method. diff --git a/changelogs/fragments/vars_begone.yml b/changelogs/fragments/vars_begone.yml new file mode 100644 index 00000000000..0bf8b8f43da --- /dev/null +++ b/changelogs/fragments/vars_begone.yml @@ -0,0 +1,2 @@ +deprecated_features: + - vars, the internal variable cache will be removed in 2.24. This cache, once used internally exposes variables in inconsistent states, the 'vars' and 'varnames' lookups should be used instead. diff --git a/changelogs/fragments/vault-vaultid-removal.yml b/changelogs/fragments/vault-vaultid-removal.yml new file mode 100644 index 00000000000..d61fad7716d --- /dev/null +++ b/changelogs/fragments/vault-vaultid-removal.yml @@ -0,0 +1,2 @@ +removed_features: + - "``vault``/``unvault`` filters - remove the deprecated ``vaultid`` parameter." diff --git a/changelogs/fragments/warn_reserved_tags.yml b/changelogs/fragments/warn_reserved_tags.yml new file mode 100644 index 00000000000..b21014cd330 --- /dev/null +++ b/changelogs/fragments/warn_reserved_tags.yml @@ -0,0 +1,2 @@ +minor_changes: + - ansible now warns if you use reserved tags that were only meant for selection and not for use in play. diff --git a/lib/ansible/_internal/_templating/_engine.py b/lib/ansible/_internal/_templating/_engine.py index 4beb1806291..a56288dac29 100644 --- a/lib/ansible/_internal/_templating/_engine.py +++ b/lib/ansible/_internal/_templating/_engine.py @@ -6,7 +6,6 @@ from __future__ import annotations import copy import dataclasses import enum -import textwrap import typing as t import collections.abc as c import re @@ -44,7 +43,7 @@ from ._jinja_bits import ( _finalize_template_result, FinalizeMode, ) -from ._jinja_common import _TemplateConfig, MarkerError, ExceptionMarker +from ._jinja_common import _TemplateConfig, MarkerError, ExceptionMarker, JinjaCallContext from ._lazy_containers import _AnsibleLazyTemplateMixin from ._marker_behaviors import MarkerBehavior, FAIL_ON_UNDEFINED from ._transform import _type_transform_mapping @@ -260,6 +259,7 @@ class TemplateEngine: with ( TemplateContext(template_value=variable, templar=self, options=options, stop_on_template=stop_on_template) as ctx, DeprecatedAccessAuditContext.when(ctx.is_top_level), + JinjaCallContext(accept_lazy_markers=True), # let default Jinja marker behavior apply, since we're descending into a new template ): try: if not value_is_str: @@ -559,9 +559,11 @@ class TemplateEngine: bool_result = bool(result) + result_origin = Origin.get_tag(result) or Origin.UNKNOWN + msg = ( - f'Conditional result was {textwrap.shorten(str(result), width=40)!r} of type {native_type_name(result)!r}, ' - f'which evaluates to {bool_result}. Conditionals must have a boolean result.' + f'Conditional result ({bool_result}) was derived from value of type {native_type_name(result)!r} at {str(result_origin)!r}. ' + 'Conditionals must have a boolean result.' ) if _TemplateConfig.allow_broken_conditionals: diff --git a/lib/ansible/_internal/_templating/_jinja_bits.py b/lib/ansible/_internal/_templating/_jinja_bits.py index 1190bbef60f..34e777bac2f 100644 --- a/lib/ansible/_internal/_templating/_jinja_bits.py +++ b/lib/ansible/_internal/_templating/_jinja_bits.py @@ -811,7 +811,7 @@ class AnsibleEnvironment(SandboxedEnvironment): try: value = obj[attribute] except (TypeError, LookupError): - return self.undefined(obj=obj, name=attribute) if is_safe else self.unsafe_undefined(obj, attribute) + value = self.undefined(obj=obj, name=attribute) if is_safe else self.unsafe_undefined(obj, attribute) AnsibleAccessContext.current().access(value) @@ -891,6 +891,8 @@ def _flatten_nodes(nodes: t.Iterable[t.Any]) -> t.Iterable[t.Any]: else: if type(node) is TemplateModule: # pylint: disable=unidiomatic-typecheck yield from _flatten_nodes(node._body_stream) + elif node is None: + continue # avoid yielding `None`-valued nodes to avoid literal "None" in stringified template results else: yield node diff --git a/lib/ansible/_internal/_templating/_jinja_plugins.py b/lib/ansible/_internal/_templating/_jinja_plugins.py index a79d9b18067..d960c8f9ed3 100644 --- a/lib/ansible/_internal/_templating/_jinja_plugins.py +++ b/lib/ansible/_internal/_templating/_jinja_plugins.py @@ -114,7 +114,13 @@ class JinjaPluginIntercept(c.MutableMapping): try: with JinjaCallContext(accept_lazy_markers=instance.accept_lazy_markers): - return instance.j2_function(*lazify_container_args(args), **lazify_container_kwargs(kwargs)) + result = instance.j2_function(*lazify_container_args(args), **lazify_container_kwargs(kwargs)) + + if instance.plugin_type == 'filter': + # ensure list conversion occurs under the call context + result = _wrap_plugin_output(result) + + return result except MarkerError as ex: return ex.source except Exception as ex: @@ -155,7 +161,6 @@ class JinjaPluginIntercept(c.MutableMapping): @functools.wraps(instance.j2_function) def wrapper(*args, **kwargs) -> t.Any: result = self._invoke_plugin(instance, *args, **kwargs) - result = _wrap_plugin_output(result) return result diff --git a/lib/ansible/_internal/_templating/_lazy_containers.py b/lib/ansible/_internal/_templating/_lazy_containers.py index 1d19e88c645..0f4b41bde9e 100644 --- a/lib/ansible/_internal/_templating/_lazy_containers.py +++ b/lib/ansible/_internal/_templating/_lazy_containers.py @@ -229,8 +229,6 @@ class _AnsibleLazyTemplateDict(_AnsibleTaggedDict, _AnsibleLazyTemplateMixin): __slots__ = _AnsibleLazyTemplateMixin._SLOTS def __init__(self, contents: t.Iterable | _LazyValueSource, /, **kwargs) -> None: - _AnsibleLazyTemplateMixin.__init__(self, contents) - if isinstance(contents, _AnsibleLazyTemplateDict): super().__init__(dict.items(contents), **kwargs) elif isinstance(contents, _LazyValueSource): @@ -238,6 +236,8 @@ class _AnsibleLazyTemplateDict(_AnsibleTaggedDict, _AnsibleLazyTemplateMixin): else: raise UnsupportedConstructionMethodError() + _AnsibleLazyTemplateMixin.__init__(self, contents) + def get(self, key: t.Any, default: t.Any = None) -> t.Any: if (value := super().get(key, _NoKeySentinel)) is _NoKeySentinel: return default @@ -372,8 +372,6 @@ class _AnsibleLazyTemplateList(_AnsibleTaggedList, _AnsibleLazyTemplateMixin): __slots__ = _AnsibleLazyTemplateMixin._SLOTS def __init__(self, contents: t.Iterable | _LazyValueSource, /) -> None: - _AnsibleLazyTemplateMixin.__init__(self, contents) - if isinstance(contents, _AnsibleLazyTemplateList): super().__init__(list.__iter__(contents)) elif isinstance(contents, _LazyValueSource): @@ -381,6 +379,8 @@ class _AnsibleLazyTemplateList(_AnsibleTaggedList, _AnsibleLazyTemplateMixin): else: raise UnsupportedConstructionMethodError() + _AnsibleLazyTemplateMixin.__init__(self, contents) + def __getitem__(self, key: t.SupportsIndex | slice, /) -> t.Any: if type(key) is slice: # pylint: disable=unidiomatic-typecheck return _AnsibleLazyTemplateList(_LazyValueSource(source=super().__getitem__(key), templar=self._templar, lazy_options=self._lazy_options)) @@ -567,7 +567,7 @@ class _AnsibleLazyAccessTuple(_AnsibleTaggedTuple, _AnsibleLazyTemplateMixin): def __getitem__(self, key: t.SupportsIndex | slice, /) -> t.Any: if type(key) is slice: # pylint: disable=unidiomatic-typecheck - return _AnsibleLazyAccessTuple(super().__getitem__(key)) + return _AnsibleLazyAccessTuple(_LazyValueSource(source=super().__getitem__(key), templar=self._templar, lazy_options=self._lazy_options)) value = super().__getitem__(key) diff --git a/lib/ansible/cli/__init__.py b/lib/ansible/cli/__init__.py index 2a4ca0f3a71..292639daa18 100644 --- a/lib/ansible/cli/__init__.py +++ b/lib/ansible/cli/__init__.py @@ -107,7 +107,6 @@ from ansible import context from ansible.utils import display as _display from ansible.cli.arguments import option_helpers as opt_help from ansible.inventory.manager import InventoryManager -from ansible.module_utils.six import string_types from ansible.module_utils.common.text.converters import to_bytes, to_text from ansible.module_utils.common.collections import is_sequence from ansible.module_utils.common.file import is_executable @@ -403,8 +402,8 @@ class CLI(ABC): options = super(MyCLI, self).post_process_args(options) if options.addition and options.subtraction: raise AnsibleOptionsError('Only one of --addition and --subtraction can be specified') - if isinstance(options.listofhosts, string_types): - options.listofhosts = string_types.split(',') + if isinstance(options.listofhosts, str): + options.listofhosts = options.listofhosts.split(',') return options """ @@ -440,7 +439,7 @@ class CLI(ABC): if options.inventory: # should always be list - if isinstance(options.inventory, string_types): + if isinstance(options.inventory, str): options.inventory = [options.inventory] # Ensure full paths when needed diff --git a/lib/ansible/cli/config.py b/lib/ansible/cli/config.py index ed42545df47..8d21e78a238 100755 --- a/lib/ansible/cli/config.py +++ b/lib/ansible/cli/config.py @@ -24,7 +24,6 @@ from ansible.config.manager import ConfigManager from ansible.errors import AnsibleError, AnsibleOptionsError, AnsibleRequiredOptionError from ansible.module_utils.common.text.converters import to_native, to_text, to_bytes from ansible._internal import _json -from ansible.module_utils.six import string_types from ansible.parsing.quoting import is_quoted from ansible.parsing.yaml.dumper import AnsibleDumper from ansible.utils.color import stringc @@ -288,21 +287,21 @@ class ConfigCLI(CLI): default = '0' elif default: if stype == 'list': - if not isinstance(default, string_types): + if not isinstance(default, str): # python lists are not valid env ones try: default = ', '.join(default) except Exception as e: # list of other stuff default = '%s' % to_native(default) - if isinstance(default, string_types) and not is_quoted(default): + if isinstance(default, str) and not is_quoted(default): default = shlex.quote(default) elif default is None: default = '' if subkey in settings[setting] and settings[setting][subkey]: entry = settings[setting][subkey][-1]['name'] - if isinstance(settings[setting]['description'], string_types): + if isinstance(settings[setting]['description'], str): desc = settings[setting]['description'] else: desc = '\n#'.join(settings[setting]['description']) @@ -343,7 +342,7 @@ class ConfigCLI(CLI): sections[s] = new_sections[s] continue - if isinstance(opt['description'], string_types): + if isinstance(opt['description'], str): desc = '# (%s) %s' % (opt.get('type', 'string'), opt['description']) else: desc = "# (%s) " % opt.get('type', 'string') @@ -361,7 +360,7 @@ class ConfigCLI(CLI): seen[entry['section']].append(entry['key']) default = self.config.template_default(opt.get('default', ''), get_constants()) - if opt.get('type', '') == 'list' and not isinstance(default, string_types): + if opt.get('type', '') == 'list' and not isinstance(default, str): # python lists are not valid ini ones default = ', '.join(default) elif default is None: diff --git a/lib/ansible/cli/doc.py b/lib/ansible/cli/doc.py index 6b3c27e3408..967ce4911d2 100755 --- a/lib/ansible/cli/doc.py +++ b/lib/ansible/cli/doc.py @@ -32,7 +32,6 @@ from ansible.errors import AnsibleError, AnsibleOptionsError, AnsibleParserError from ansible.module_utils.common.text.converters import to_native, to_text from ansible.module_utils.common.collections import is_sequence from ansible.module_utils.common.yaml import yaml_dump -from ansible.module_utils.six import string_types from ansible.parsing.plugin_docs import read_docstub from ansible.parsing.yaml.dumper import AnsibleDumper from ansible.parsing.yaml.loader import AnsibleLoader @@ -1274,7 +1273,7 @@ class DocCLI(CLI, RoleMixin): sub_indent = inline_indent + extra_indent if is_sequence(opt['description']): for entry_idx, entry in enumerate(opt['description'], 1): - if not isinstance(entry, string_types): + if not isinstance(entry, str): raise AnsibleError("Expected string in description of %s at index %s, got %s" % (o, entry_idx, type(entry))) if entry_idx == 1: text.append(key + DocCLI.warp_fill(DocCLI.tty_ify(entry), limit, @@ -1282,7 +1281,7 @@ class DocCLI(CLI, RoleMixin): else: text.append(DocCLI.warp_fill(DocCLI.tty_ify(entry), limit, initial_indent=sub_indent, subsequent_indent=sub_indent)) else: - if not isinstance(opt['description'], string_types): + if not isinstance(opt['description'], str): raise AnsibleError("Expected string in description of %s, got %s" % (o, type(opt['description']))) text.append(key + DocCLI.warp_fill(DocCLI.tty_ify(opt['description']), limit, initial_indent=inline_indent, subsequent_indent=sub_indent, initial_extra=len(extra_indent))) @@ -1344,6 +1343,51 @@ class DocCLI(CLI, RoleMixin): text.append("%s%s:" % (opt_indent, subkey)) DocCLI.add_fields(text, subdata, limit, opt_indent + ' ', return_values, opt_indent) + @staticmethod + def _add_seealso(text: list[str], seealsos: list[dict[str, t.Any]], limit: int, opt_indent: str) -> None: + for item in seealsos: + if 'module' in item: + text.append(DocCLI.warp_fill(DocCLI.tty_ify('Module %s' % item['module']), + limit - 6, initial_indent=opt_indent[:-2] + "* ", subsequent_indent=opt_indent)) + description = item.get('description') + if description is None and item['module'].startswith('ansible.builtin.'): + description = 'The official documentation on the %s module.' % item['module'] + if description is not None: + text.append(DocCLI.warp_fill(DocCLI.tty_ify(description), + limit - 6, initial_indent=opt_indent + ' ', subsequent_indent=opt_indent + ' ')) + if item['module'].startswith('ansible.builtin.'): + relative_url = 'collections/%s_module.html' % item['module'].replace('.', '/', 2) + text.append(DocCLI.warp_fill(DocCLI.tty_ify(get_versioned_doclink(relative_url)), + limit - 6, initial_indent=opt_indent + ' ', subsequent_indent=opt_indent)) + elif 'plugin' in item and 'plugin_type' in item: + plugin_suffix = ' plugin' if item['plugin_type'] not in ('module', 'role') else '' + text.append(DocCLI.warp_fill(DocCLI.tty_ify('%s%s %s' % (item['plugin_type'].title(), plugin_suffix, item['plugin'])), + limit - 6, initial_indent=opt_indent[:-2] + "* ", subsequent_indent=opt_indent)) + description = item.get('description') + if description is None and item['plugin'].startswith('ansible.builtin.'): + description = 'The official documentation on the %s %s%s.' % (item['plugin'], item['plugin_type'], plugin_suffix) + if description is not None: + text.append(DocCLI.warp_fill(DocCLI.tty_ify(description), + limit - 6, initial_indent=opt_indent + ' ', subsequent_indent=opt_indent + ' ')) + if item['plugin'].startswith('ansible.builtin.'): + relative_url = 'collections/%s_%s.html' % (item['plugin'].replace('.', '/', 2), item['plugin_type']) + text.append(DocCLI.warp_fill(DocCLI.tty_ify(get_versioned_doclink(relative_url)), + limit - 6, initial_indent=opt_indent + ' ', subsequent_indent=opt_indent)) + elif 'name' in item and 'link' in item and 'description' in item: + text.append(DocCLI.warp_fill(DocCLI.tty_ify(item['name']), + limit - 6, initial_indent=opt_indent[:-2] + "* ", subsequent_indent=opt_indent)) + text.append(DocCLI.warp_fill(DocCLI.tty_ify(item['description']), + limit - 6, initial_indent=opt_indent + ' ', subsequent_indent=opt_indent + ' ')) + text.append(DocCLI.warp_fill(DocCLI.tty_ify(item['link']), + limit - 6, initial_indent=opt_indent + ' ', subsequent_indent=opt_indent + ' ')) + elif 'ref' in item and 'description' in item: + text.append(DocCLI.warp_fill(DocCLI.tty_ify('Ansible documentation [%s]' % item['ref']), + limit - 6, initial_indent=opt_indent[:-2] + "* ", subsequent_indent=opt_indent)) + text.append(DocCLI.warp_fill(DocCLI.tty_ify(item['description']), + limit - 6, initial_indent=opt_indent + ' ', subsequent_indent=opt_indent + ' ')) + text.append(DocCLI.warp_fill(DocCLI.tty_ify(get_versioned_doclink('/#stq=%s&stp=1' % item['ref'])), + limit - 6, initial_indent=opt_indent + ' ', subsequent_indent=opt_indent + ' ')) + def get_role_man_text(self, role, role_json): """Generate text for the supplied role suitable for display. @@ -1371,6 +1415,9 @@ class DocCLI(CLI, RoleMixin): text.append("ENTRY POINT: %s %s" % (_format(entry_point, "BOLD"), desc)) text.append('') + if version_added := doc.pop('version_added', None): + text.append(_format("ADDED IN:", 'bold') + " %s\n" % DocCLI._format_version_added(version_added)) + if doc.get('description'): if isinstance(doc['description'], list): descs = doc['description'] @@ -1384,29 +1431,24 @@ class DocCLI(CLI, RoleMixin): text.append(_format("Options", 'bold') + " (%s indicates it is required):" % ("=" if C.ANSIBLE_NOCOLOR else 'red')) DocCLI.add_fields(text, doc.pop('options'), limit, opt_indent) - if doc.get('attributes', False): - display.deprecated( - f'The role {role}\'s argument spec {entry_point} contains the key "attributes", ' - 'which will not be displayed by ansible-doc in the future. ' - 'This was unintentionally allowed when plugin attributes were added, ' - 'but the feature does not map well to role argument specs.', - version='2.20', - ) + if notes := doc.pop('notes', False): text.append("") - text.append(_format("ATTRIBUTES:", 'bold')) - for k in doc['attributes'].keys(): - text.append('') - text.append(DocCLI.warp_fill(DocCLI.tty_ify(_format('%s:' % k, 'UNDERLINE')), limit - 6, initial_indent=opt_indent, - subsequent_indent=opt_indent)) - text.append(DocCLI._indent_lines(DocCLI._dump_yaml(doc['attributes'][k]), opt_indent)) - del doc['attributes'] + text.append(_format("NOTES:", 'bold')) + for note in notes: + text.append(DocCLI.warp_fill(DocCLI.tty_ify(note), limit - 6, + initial_indent=opt_indent[:-2] + "* ", subsequent_indent=opt_indent)) + + if seealso := doc.pop('seealso', False): + text.append("") + text.append(_format("SEE ALSO:", 'bold')) + DocCLI._add_seealso(text, seealso, limit=limit, opt_indent=opt_indent) # generic elements we will handle identically for k in ('author',): if k not in doc: continue text.append('') - if isinstance(doc[k], string_types): + if isinstance(doc[k], str): text.append('%s: %s' % (k.upper(), DocCLI.warp_fill(DocCLI.tty_ify(doc[k]), limit - (len(k) + 2), subsequent_indent=opt_indent))) elif isinstance(doc[k], (list, tuple)): @@ -1418,7 +1460,7 @@ class DocCLI(CLI, RoleMixin): if doc.get('examples', False): text.append('') text.append(_format("EXAMPLES:", 'bold')) - if isinstance(doc['examples'], string_types): + if isinstance(doc['examples'], str): text.append(doc.pop('examples').strip()) else: try: @@ -1497,49 +1539,7 @@ class DocCLI(CLI, RoleMixin): if doc.get('seealso', False): text.append("") text.append(_format("SEE ALSO:", 'bold')) - for item in doc['seealso']: - if 'module' in item: - text.append(DocCLI.warp_fill(DocCLI.tty_ify('Module %s' % item['module']), - limit - 6, initial_indent=opt_indent[:-2] + "* ", subsequent_indent=opt_indent)) - description = item.get('description') - if description is None and item['module'].startswith('ansible.builtin.'): - description = 'The official documentation on the %s module.' % item['module'] - if description is not None: - text.append(DocCLI.warp_fill(DocCLI.tty_ify(description), - limit - 6, initial_indent=opt_indent + ' ', subsequent_indent=opt_indent + ' ')) - if item['module'].startswith('ansible.builtin.'): - relative_url = 'collections/%s_module.html' % item['module'].replace('.', '/', 2) - text.append(DocCLI.warp_fill(DocCLI.tty_ify(get_versioned_doclink(relative_url)), - limit - 6, initial_indent=opt_indent + ' ', subsequent_indent=opt_indent)) - elif 'plugin' in item and 'plugin_type' in item: - plugin_suffix = ' plugin' if item['plugin_type'] not in ('module', 'role') else '' - text.append(DocCLI.warp_fill(DocCLI.tty_ify('%s%s %s' % (item['plugin_type'].title(), plugin_suffix, item['plugin'])), - limit - 6, initial_indent=opt_indent[:-2] + "* ", subsequent_indent=opt_indent)) - description = item.get('description') - if description is None and item['plugin'].startswith('ansible.builtin.'): - description = 'The official documentation on the %s %s%s.' % (item['plugin'], item['plugin_type'], plugin_suffix) - if description is not None: - text.append(DocCLI.warp_fill(DocCLI.tty_ify(description), - limit - 6, initial_indent=opt_indent + ' ', subsequent_indent=opt_indent + ' ')) - if item['plugin'].startswith('ansible.builtin.'): - relative_url = 'collections/%s_%s.html' % (item['plugin'].replace('.', '/', 2), item['plugin_type']) - text.append(DocCLI.warp_fill(DocCLI.tty_ify(get_versioned_doclink(relative_url)), - limit - 6, initial_indent=opt_indent + ' ', subsequent_indent=opt_indent)) - elif 'name' in item and 'link' in item and 'description' in item: - text.append(DocCLI.warp_fill(DocCLI.tty_ify(item['name']), - limit - 6, initial_indent=opt_indent[:-2] + "* ", subsequent_indent=opt_indent)) - text.append(DocCLI.warp_fill(DocCLI.tty_ify(item['description']), - limit - 6, initial_indent=opt_indent + ' ', subsequent_indent=opt_indent + ' ')) - text.append(DocCLI.warp_fill(DocCLI.tty_ify(item['link']), - limit - 6, initial_indent=opt_indent + ' ', subsequent_indent=opt_indent + ' ')) - elif 'ref' in item and 'description' in item: - text.append(DocCLI.warp_fill(DocCLI.tty_ify('Ansible documentation [%s]' % item['ref']), - limit - 6, initial_indent=opt_indent[:-2] + "* ", subsequent_indent=opt_indent)) - text.append(DocCLI.warp_fill(DocCLI.tty_ify(item['description']), - limit - 6, initial_indent=opt_indent + ' ', subsequent_indent=opt_indent + ' ')) - text.append(DocCLI.warp_fill(DocCLI.tty_ify(get_versioned_doclink('/#stq=%s&stp=1' % item['ref'])), - limit - 6, initial_indent=opt_indent + ' ', subsequent_indent=opt_indent + ' ')) - + DocCLI._add_seealso(text, doc['seealso'], limit=limit, opt_indent=opt_indent) del doc['seealso'] if doc.get('requirements', False): @@ -1554,7 +1554,7 @@ class DocCLI(CLI, RoleMixin): continue text.append('') header = _format(k.upper(), 'bold') - if isinstance(doc[k], string_types): + if isinstance(doc[k], str): text.append('%s: %s' % (header, DocCLI.warp_fill(DocCLI.tty_ify(doc[k]), limit - (len(k) + 2), subsequent_indent=opt_indent))) elif isinstance(doc[k], (list, tuple)): text.append('%s: %s' % (header, ', '.join(doc[k]))) @@ -1566,7 +1566,7 @@ class DocCLI(CLI, RoleMixin): if doc.get('plainexamples', False): text.append('') text.append(_format("EXAMPLES:", 'bold')) - if isinstance(doc['plainexamples'], string_types): + if isinstance(doc['plainexamples'], str): text.append(doc.pop('plainexamples').strip()) else: try: @@ -1603,7 +1603,7 @@ def _do_yaml_snippet(doc): for o in sorted(doc['options'].keys()): opt = doc['options'][o] - if isinstance(opt['description'], string_types): + if isinstance(opt['description'], str): desc = DocCLI.tty_ify(opt['description']) else: desc = DocCLI.tty_ify(" ".join(opt['description'])) diff --git a/lib/ansible/cli/galaxy.py b/lib/ansible/cli/galaxy.py index 981b02c7128..6fc310ea6b1 100755 --- a/lib/ansible/cli/galaxy.py +++ b/lib/ansible/cli/galaxy.py @@ -54,7 +54,6 @@ from ansible.module_utils.common.collections import is_iterable from ansible.module_utils.common.yaml import yaml_dump, yaml_load from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text from ansible._internal._datatag._tags import TrustedAsTemplate -from ansible.module_utils import six from ansible.parsing.dataloader import DataLoader from ansible.playbook.role.requirement import RoleRequirement from ansible._internal._templating._engine import TemplateEngine @@ -65,7 +64,6 @@ from ansible.utils.plugin_docs import get_versioned_doclink from ansible.utils.vars import load_extra_vars display = Display() -urlparse = six.moves.urllib.parse.urlparse def with_collection_artifacts_manager(wrapped_method): diff --git a/lib/ansible/config/base.yml b/lib/ansible/config/base.yml index 56dca21bbc0..67e000bb17e 100644 --- a/lib/ansible/config/base.yml +++ b/lib/ansible/config/base.yml @@ -1215,7 +1215,6 @@ DEFAULT_TRANSPORT: default: ssh description: - Can be any connection plugin available to your ansible installation. - - There is also a (DEPRECATED) special 'smart' option, that will toggle between 'ssh' and 'paramiko' depending on controller OS and ssh versions. env: [{name: ANSIBLE_TRANSPORT}] ini: - {key: transport, section: defaults} diff --git a/lib/ansible/executor/play_iterator.py b/lib/ansible/executor/play_iterator.py index 69d0b00b0e7..de0c5f78d1b 100644 --- a/lib/ansible/executor/play_iterator.py +++ b/lib/ansible/executor/play_iterator.py @@ -574,7 +574,7 @@ class PlayIterator: Given the current HostState state, determines if the current block, or any child blocks, are in rescue mode. """ - if state.run_state == IteratingStates.TASKS and state.get_current_block().rescue: + if state.run_state in (IteratingStates.TASKS, IteratingStates.HANDLERS) and state.get_current_block().rescue: return True if state.tasks_child_state is not None: return self.is_any_block_rescuing(state.tasks_child_state) diff --git a/lib/ansible/executor/playbook_executor.py b/lib/ansible/executor/playbook_executor.py index 78329df342f..9bd7317615c 100644 --- a/lib/ansible/executor/playbook_executor.py +++ b/lib/ansible/executor/playbook_executor.py @@ -31,7 +31,6 @@ from ansible.utils.helpers import pct_to_int from ansible.utils.collection_loader import AnsibleCollectionConfig from ansible.utils.collection_loader._collection_finder import _get_collection_name_from_path, _get_collection_playbook_path from ansible.utils.path import makedirs_safe -from ansible.utils.ssh_functions import set_default_transport from ansible.utils.display import Display @@ -65,14 +64,6 @@ class PlaybookExecutor: forks=context.CLIARGS.get('forks'), ) - # Note: We run this here to cache whether the default ansible ssh - # executable supports control persist. Sometime in the future we may - # need to enhance this to check that ansible_ssh_executable specified - # in inventory is also cached. We can't do this caching at the point - # where it is used (in task_executor) because that is post-fork and - # therefore would be discarded after every task. - set_default_transport() - def run(self): """ Run the given playbook, based on the settings in the play which diff --git a/lib/ansible/executor/task_executor.py b/lib/ansible/executor/task_executor.py index 223abfc3ac8..98b6b3f0017 100644 --- a/lib/ansible/executor/task_executor.py +++ b/lib/ansible/executor/task_executor.py @@ -27,7 +27,6 @@ from ansible._internal._datatag._tags import TrustedAsTemplate from ansible.module_utils.parsing.convert_bool import boolean from ansible.module_utils.common.text.converters import to_text, to_native from ansible.module_utils.connection import write_to_stream -from ansible.module_utils.six import string_types from ansible.playbook.task import Task from ansible.plugins import get_plugin_class from ansible.plugins.loader import become_loader, cliconf_loader, connection_loader, httpapi_loader, netconf_loader, terminal_loader @@ -48,6 +47,7 @@ display = Display() RETURN_VARS = [x for x in C.MAGIC_VARIABLE_MAPPING.items() if 'become' not in x and '_pass' not in x] +_INJECT_FACTS, _INJECT_FACTS_ORIGIN = C.config.get_config_value_and_origin('INJECT_FACTS_AS_VARS') __all__ = ['TaskExecutor'] @@ -340,7 +340,7 @@ class TaskExecutor: }) # if plugin is loaded, get resolved name, otherwise leave original task connection - if self._connection and not isinstance(self._connection, string_types): + if self._connection and not isinstance(self._connection, str): task_fields['connection'] = getattr(self._connection, 'ansible_name') tr = _RawTaskResult( @@ -664,8 +664,11 @@ class TaskExecutor: # TODO: cleaning of facts should eventually become part of taskresults instead of vars af = result['ansible_facts'] vars_copy['ansible_facts'] = combine_vars(vars_copy.get('ansible_facts', {}), namespace_facts(af)) - if C.INJECT_FACTS_AS_VARS: - cleaned_toplevel = {k: _deprecate_top_level_fact(v) for k, v in clean_facts(af).items()} + if _INJECT_FACTS: + if _INJECT_FACTS_ORIGIN == 'default': + cleaned_toplevel = {k: _deprecate_top_level_fact(v) for k, v in clean_facts(af).items()} + else: + cleaned_toplevel = clean_facts(af) vars_copy.update(cleaned_toplevel) # set the failed property if it was missing. @@ -759,9 +762,13 @@ class TaskExecutor: # TODO: cleaning of facts should eventually become part of taskresults instead of vars af = result['ansible_facts'] variables['ansible_facts'] = combine_vars(variables.get('ansible_facts', {}), namespace_facts(af)) - if C.INJECT_FACTS_AS_VARS: - # DTFIX-FUTURE: why is this happening twice, esp since we're post-fork and these will be discarded? - cleaned_toplevel = {k: _deprecate_top_level_fact(v) for k, v in clean_facts(af).items()} + if _INJECT_FACTS: + if _INJECT_FACTS_ORIGIN == 'default': + # This happens x2 due to loops and being able to use values in subsequent iterations + # these copies are later discared in favor of 'total/final' one on loop end. + cleaned_toplevel = {k: _deprecate_top_level_fact(v) for k, v in clean_facts(af).items()} + else: + cleaned_toplevel = clean_facts(af) variables.update(cleaned_toplevel) # save the notification target in the result, if it was specified, as @@ -960,9 +967,6 @@ class TaskExecutor: self._play_context.connection = current_connection - # TODO: play context has logic to update the connection for 'smart' - # (default value, will chose between ssh and paramiko) and 'persistent' - # (really paramiko), eventually this should move to task object itself. conn_type = self._play_context.connection connection, plugin_load_context = self._shared_loader_obj.connection_loader.get_with_context( diff --git a/lib/ansible/galaxy/api.py b/lib/ansible/galaxy/api.py index 60689f44670..b34be950f8c 100644 --- a/lib/ansible/galaxy/api.py +++ b/lib/ansible/galaxy/api.py @@ -25,7 +25,6 @@ from ansible.errors import AnsibleError from ansible.galaxy.user_agent import user_agent from ansible.module_utils.api import retry_with_delays_and_condition from ansible.module_utils.api import generate_jittered_backoff -from ansible.module_utils.six import string_types from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text from ansible.module_utils.urls import open_url, prepare_multipart from ansible.utils.display import Display @@ -595,11 +594,11 @@ class GalaxyAPI: page_size = kwargs.get('page_size', None) author = kwargs.get('author', None) - if tags and isinstance(tags, string_types): + if tags and isinstance(tags, str): tags = tags.split(',') search_url += '&tags_autocomplete=' + '+'.join(tags) - if platforms and isinstance(platforms, string_types): + if platforms and isinstance(platforms, str): platforms = platforms.split(',') search_url += '&platforms_autocomplete=' + '+'.join(platforms) diff --git a/lib/ansible/galaxy/collection/__init__.py b/lib/ansible/galaxy/collection/__init__.py index 38737468dcd..4056f0c177e 100644 --- a/lib/ansible/galaxy/collection/__init__.py +++ b/lib/ansible/galaxy/collection/__init__.py @@ -339,12 +339,12 @@ def verify_local_collection(local_collection, remote_collection, artifacts_manag ] # Find any paths not in the FILES.json - for root, dirs, files in os.walk(b_collection_path): - for name in files: + for root, dirs, filenames in os.walk(b_collection_path): + for name in filenames: full_path = os.path.join(root, name) path = to_text(full_path[len(b_collection_path) + 1::], errors='surrogate_or_strict') if any(fnmatch.fnmatch(full_path, b_pattern) for b_pattern in b_ignore_patterns): - display.v("Ignoring verification for %s" % full_path) + display.v("Ignoring verification for %s" % to_text(full_path)) continue if full_path not in collection_files: diff --git a/lib/ansible/galaxy/dependency_resolution/providers.py b/lib/ansible/galaxy/dependency_resolution/providers.py index 5f602b8242c..8cfb14b0b15 100644 --- a/lib/ansible/galaxy/dependency_resolution/providers.py +++ b/lib/ansible/galaxy/dependency_resolution/providers.py @@ -24,7 +24,6 @@ from ansible.galaxy.dependency_resolution.versioning import ( is_pre_release, meets_requirements, ) -from ansible.module_utils.six import string_types from ansible.utils.version import SemanticVersion, LooseVersion try: @@ -278,7 +277,7 @@ class CollectionDependencyProviderBase(AbstractProvider): # NOTE: Another known mistake is setting a minor part of the SemVer notation # NOTE: skipping the "patch" bit like "1.0" which is assumed non-compliant even # NOTE: after the conversion to string. - if not isinstance(version, string_types): + if not isinstance(version, str): raise ValueError(version_err) elif version != '*': try: diff --git a/lib/ansible/inventory/manager.py b/lib/ansible/inventory/manager.py index e6183ccd095..615b5a6d2eb 100644 --- a/lib/ansible/inventory/manager.py +++ b/lib/ansible/inventory/manager.py @@ -33,7 +33,6 @@ from ansible._internal import _json, _wrapt from ansible._internal._json import EncryptedStringBehavior from ansible.errors import AnsibleError, AnsibleOptionsError from ansible.inventory.data import InventoryData -from ansible.module_utils.six import string_types from ansible.module_utils.common.text.converters import to_bytes, to_text from ansible.parsing.utils.addresses import parse_address from ansible.plugins.loader import inventory_loader @@ -112,7 +111,7 @@ def split_host_pattern(pattern): results = (split_host_pattern(p) for p in pattern) # flatten the results return list(itertools.chain.from_iterable(results)) - elif not isinstance(pattern, string_types): + elif not isinstance(pattern, str): pattern = to_text(pattern, errors='surrogate_or_strict') # If it's got commas in it, we'll treat it as a straightforward @@ -162,7 +161,7 @@ class InventoryManager(object): # the inventory dirs, files, script paths or lists of hosts if sources is None: self._sources = [] - elif isinstance(sources, string_types): + elif isinstance(sources, str): self._sources = [sources] else: self._sources = sources diff --git a/lib/ansible/module_utils/_internal/_no_six.py b/lib/ansible/module_utils/_internal/_no_six.py new file mode 100644 index 00000000000..93263f94a83 --- /dev/null +++ b/lib/ansible/module_utils/_internal/_no_six.py @@ -0,0 +1,86 @@ +from __future__ import annotations + +import sys +import types + +from ansible.module_utils.common import warnings + + +# INLINED FROM THE SIX LIBRARY, see lib/ansible/module_utils/six/__init__.py +# Copyright (c) 2010-2024 Benjamin Peterson +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(type): + + def __new__(cls, name, this_bases, d): + if sys.version_info[:2] >= (3, 7): + # This version introduced PEP 560 that requires a bit + # of extra care (we mimic what is done by __build_class__). + resolved_bases = types.resolve_bases(bases) + if resolved_bases is not bases: + d['__orig_bases__'] = bases + else: + resolved_bases = bases + return meta(name, resolved_bases, d) + + @classmethod + def __prepare__(cls, name, this_bases): + return meta.__prepare__(name, bases) + + return type.__new__(metaclass, 'temporary_class', (), {}) + + +def add_metaclass(metaclass): + """Class decorator for creating a class with a metaclass.""" + + def wrapper(cls): + orig_vars = cls.__dict__.copy() + slots = orig_vars.get('__slots__') + if slots is not None: + if isinstance(slots, str): + slots = [slots] + for slots_var in slots: + orig_vars.pop(slots_var) + orig_vars.pop('__dict__', None) + orig_vars.pop('__weakref__', None) + if hasattr(cls, '__qualname__'): + orig_vars['__qualname__'] = cls.__qualname__ + return metaclass(cls.__name__, cls.__bases__, orig_vars) + + return wrapper + + +def iteritems(d, **kw): + return iter(d.items(**kw)) + + +_mini_six = { + "PY2": False, + "PY3": True, + "text_type": str, + "binary_type": bytes, + "string_types": (str,), + "integer_types": (int,), + "iteritems": iteritems, + "add_metaclass": add_metaclass, + "with_metaclass": with_metaclass, +} +# INLINED SIX END + + +def deprecate(importable_name: str, module_name: str, *deprecated_args) -> object: + """Inject import-time deprecation warnings.""" + if not (importable_name in deprecated_args and (importable := _mini_six.get(importable_name, ...) is not ...)): + raise AttributeError(f"module {module_name!r} has no attribute {importable_name!r}") + + # TODO Inspect and remove all calls to this function in 2.24 + warnings.deprecate( + msg=f"Importing {importable_name!r} from {module_name!r} is deprecated.", + version="2.24", + ) + + return importable diff --git a/lib/ansible/module_utils/_text.py b/lib/ansible/module_utils/_text.py index b6dd62074f6..e73361b6cd1 100644 --- a/lib/ansible/module_utils/_text.py +++ b/lib/ansible/module_utils/_text.py @@ -1,15 +1,35 @@ # Copyright (c), Toshio Kuratomi 2016 # Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause) -""" -.. warn:: Use ansible.module_utils.common.text.converters instead. -""" from __future__ import annotations -# Backwards compat for people still calling it from this package -# pylint: disable=unused-import -import codecs +from ansible.module_utils.common import warnings as _warnings -from ansible.module_utils.six import PY3, text_type, binary_type -from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text +_mini_six = { + "binary_type": bytes, + "text_type": str, + "PY3": True, +} + + +def __getattr__(importable_name: str) -> object: + """Inject import-time deprecation warnings.""" + help_text: str | None = None + importable: object + if importable_name == "codecs": + import codecs + importable = codecs + elif importable_name in {"to_bytes", "to_native", "to_text"}: + from ansible.module_utils.common.text import converters + importable = getattr(converters, importable_name) + help_text = "Use ansible.module_utils.common.text.converters instead." + elif (importable := _mini_six.get(importable_name, ...)) is ...: + raise AttributeError(f"module {__name__!r} has no attribute {importable_name!r}") + + _warnings.deprecate( + msg=f"Importing {importable_name!r} from {__name__!r} is deprecated.", + version="2.24", + help_text=help_text, + ) + return importable diff --git a/lib/ansible/module_utils/basic.py b/lib/ansible/module_utils/basic.py index ba87c7b3850..7f9742d836d 100644 --- a/lib/ansible/module_utils/basic.py +++ b/lib/ansible/module_utils/basic.py @@ -46,6 +46,15 @@ import tempfile import time import traceback +from collections.abc import ( + KeysView, + Mapping, + MutableMapping, + Sequence, + MutableSequence, + Set, + MutableSet, +) from functools import reduce try: @@ -123,13 +132,6 @@ def _get_available_hash_algorithms(): AVAILABLE_HASH_ALGORITHMS = _get_available_hash_algorithms() from ansible.module_utils.common import json as _json - -from ansible.module_utils.six.moves.collections_abc import ( - KeysView, - Mapping, MutableMapping, - Sequence, MutableSequence, - Set, MutableSet, -) from ansible.module_utils.common.locale import get_best_parsable_locale from ansible.module_utils.common.process import get_bin_path from ansible.module_utils.common.file import ( @@ -2186,6 +2188,18 @@ def get_module_path(): return os.path.dirname(os.path.realpath(__file__)) +_mini_six = { + "b": lambda s: s.encode("latin-1"), + "PY2": False, + "PY3": True, + "text_type": str, + "binary_type": bytes, + "string_types": (str,), + "integer_types": (int,), + "iteritems": lambda d, **kw: iter(d.items(**kw)), +} + + def __getattr__(importable_name): """Inject import-time deprecation warnings.""" if importable_name == 'datetime': @@ -2203,24 +2217,12 @@ def __getattr__(importable_name): elif importable_name == 'repeat': from itertools import repeat importable = repeat - elif importable_name in { - 'PY2', 'PY3', 'b', 'binary_type', 'integer_types', - 'iteritems', 'string_types', 'text_type', - }: - import importlib - importable = getattr( - importlib.import_module('ansible.module_utils.six'), - importable_name - ) elif importable_name == 'map': importable = map elif importable_name == 'shlex_quote': importable = shlex.quote - else: - raise AttributeError( - f'cannot import name {importable_name !r} ' - f"from '{__name__}' ({__file__ !s})" - ) + elif (importable := _mini_six.get(importable_name, ...)) is ...: + raise AttributeError(f"module {__name__!r} has no attribute {importable_name!r}") deprecate( msg=f"Importing '{importable_name}' from '{__name__}' is deprecated.", diff --git a/lib/ansible/module_utils/common/_collections_compat.py b/lib/ansible/module_utils/common/_collections_compat.py index 25f7889d8ef..684c33753e6 100644 --- a/lib/ansible/module_utils/common/_collections_compat.py +++ b/lib/ansible/module_utils/common/_collections_compat.py @@ -2,7 +2,7 @@ # Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause) """Collections ABC import shim. -Use `ansible.module_utils.six.moves.collections_abc` instead, which has been available since ansible-core 2.11. +Use `collections.abc` instead. This module exists only for backwards compatibility. """ @@ -10,7 +10,7 @@ from __future__ import annotations # Although this was originally intended for internal use only, it has wide adoption in collections. # This is due in part to sanity tests previously recommending its use over `collections` imports. -from ansible.module_utils.six.moves.collections_abc import ( # pylint: disable=unused-import +from collections.abc import ( # pylint: disable=unused-import MappingView, ItemsView, KeysView, @@ -25,3 +25,12 @@ from ansible.module_utils.six.moves.collections_abc import ( # pylint: disable= Iterable, Iterator, ) + +from ansible.module_utils.common import warnings as _warnings + + +_warnings.deprecate( + msg="The `ansible.module_utils.common._collections_compat` module is deprecated.", + help_text="Use `collections.abc` from the Python standard library instead.", + version="2.24", +) diff --git a/lib/ansible/module_utils/common/collections.py b/lib/ansible/module_utils/common/collections.py index 9f4dfb9b4d0..4fdc874269b 100644 --- a/lib/ansible/module_utils/common/collections.py +++ b/lib/ansible/module_utils/common/collections.py @@ -6,9 +6,10 @@ from __future__ import annotations +from collections.abc import Hashable, Mapping, MutableMapping, Sequence # pylint: disable=unused-import + +from ansible.module_utils._internal import _no_six from ansible.module_utils.common import warnings as _warnings -from ansible.module_utils.six import binary_type, text_type -from ansible.module_utils.six.moves.collections_abc import Hashable, Mapping, MutableMapping, Sequence # pylint: disable=unused-import class ImmutableDict(Hashable, Mapping): @@ -67,7 +68,7 @@ class ImmutableDict(Hashable, Mapping): def is_string(seq): """Identify whether the input has a string-like type (including bytes).""" - return isinstance(seq, (text_type, binary_type)) + return isinstance(seq, (str, bytes)) def is_iterable(seq, include_strings=False): @@ -114,3 +115,7 @@ def count(seq): for elem in seq: counters[elem] = counters.get(elem, 0) + 1 return counters + + +def __getattr__(importable_name): + return _no_six.deprecate(importable_name, __name__, "binary_type", "text_type") diff --git a/lib/ansible/module_utils/common/dict_transformations.py b/lib/ansible/module_utils/common/dict_transformations.py index 8d318f5ef63..3e6510c4b78 100644 --- a/lib/ansible/module_utils/common/dict_transformations.py +++ b/lib/ansible/module_utils/common/dict_transformations.py @@ -7,10 +7,9 @@ from __future__ import annotations import re +from collections.abc import MutableMapping from copy import deepcopy -from ansible.module_utils.six.moves.collections_abc import MutableMapping - def camel_dict_to_snake_dict(camel_dict, reversible=False, ignore_list=()): """ diff --git a/lib/ansible/module_utils/common/network.py b/lib/ansible/module_utils/common/network.py index a85fc1ce4ab..a5c2148c06f 100644 --- a/lib/ansible/module_utils/common/network.py +++ b/lib/ansible/module_utils/common/network.py @@ -6,11 +6,13 @@ from __future__ import annotations import re + +# backward compat +from builtins import zip # pylint: disable=unused-import + from struct import pack from socket import inet_ntoa -from ansible.module_utils.six.moves import zip - VALID_MASKS = [2**8 - 2**i for i in range(0, 9)] diff --git a/lib/ansible/module_utils/common/parameters.py b/lib/ansible/module_utils/common/parameters.py index fc886463c94..129bb05178a 100644 --- a/lib/ansible/module_utils/common/parameters.py +++ b/lib/ansible/module_utils/common/parameters.py @@ -9,9 +9,19 @@ import os import typing as t from collections import deque -from itertools import chain +from collections.abc import ( + KeysView, + Set, + Sequence, + Mapping, + MutableMapping, + MutableSet, + MutableSequence, +) +from itertools import chain # pylint: disable=unused-import from ansible.module_utils.common.collections import is_iterable +from ansible.module_utils._internal import _no_six from ansible.module_utils._internal._datatag import AnsibleSerializable, AnsibleTagHelper from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text from ansible.module_utils.common.warnings import warn @@ -33,26 +43,6 @@ from ansible.module_utils.errors import ( SubParameterTypeError, ) from ansible.module_utils.parsing.convert_bool import BOOLEANS_FALSE, BOOLEANS_TRUE - -from ansible.module_utils.six.moves.collections_abc import ( - KeysView, - Set, - Sequence, - Mapping, - MutableMapping, - MutableSet, - MutableSequence, -) - -from ansible.module_utils.six import ( - binary_type, - integer_types, - string_types, - text_type, - PY2, - PY3, -) - from ansible.module_utils.common.validation import ( check_mutually_exclusive, check_required_arguments, @@ -243,7 +233,7 @@ def _handle_aliases(argument_spec, parameters, alias_warnings=None, alias_deprec if aliases is None: continue - if not is_iterable(aliases) or isinstance(aliases, (binary_type, text_type)): + if not is_iterable(aliases) or isinstance(aliases, (bytes, str)): raise TypeError('internal error: aliases must be a list or tuple') for alias in aliases: @@ -346,7 +336,7 @@ def _list_no_log_values(argument_spec, params): for sub_param in sub_parameters: # Validate dict fields in case they came in as strings - if isinstance(sub_param, string_types): + if isinstance(sub_param, str): sub_param = check_type_dict(sub_param) if not isinstance(sub_param, Mapping): @@ -362,7 +352,7 @@ def _return_datastructure_name(obj): """ Return native stringified values from datastructures. For use with removing sensitive values pre-jsonification.""" - if isinstance(obj, (text_type, binary_type)): + if isinstance(obj, (str, bytes)): if obj: yield to_native(obj, errors='surrogate_or_strict') return @@ -375,7 +365,7 @@ def _return_datastructure_name(obj): elif obj is None or isinstance(obj, bool): # This must come before int because bools are also ints return - elif isinstance(obj, tuple(list(integer_types) + [float])): + elif isinstance(obj, (int, float)): yield to_native(obj, nonstring='simplerepr') else: raise TypeError('Unknown parameter type: %s' % (type(obj))) @@ -413,26 +403,23 @@ def _remove_values_conditions(value, no_log_strings, deferred_removals): """ original_value = value - if isinstance(value, (text_type, binary_type)): + if isinstance(value, (str, bytes)): # Need native str type native_str_value = value - if isinstance(value, text_type): + if isinstance(value, str): value_is_text = True - if PY2: - native_str_value = to_bytes(value, errors='surrogate_or_strict') - elif isinstance(value, binary_type): + elif isinstance(value, bytes): value_is_text = False - if PY3: - native_str_value = to_text(value, errors='surrogate_or_strict') + native_str_value = to_text(value, errors='surrogate_or_strict') if native_str_value in no_log_strings: return 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER' for omit_me in no_log_strings: native_str_value = native_str_value.replace(omit_me, '*' * 8) - if value_is_text and isinstance(native_str_value, binary_type): + if value_is_text and isinstance(native_str_value, bytes): value = to_text(native_str_value, encoding='utf-8', errors='surrogate_then_replace') - elif not value_is_text and isinstance(native_str_value, text_type): + elif not value_is_text and isinstance(native_str_value, str): value = to_bytes(native_str_value, encoding='utf-8', errors='surrogate_then_replace') else: value = native_str_value @@ -514,7 +501,7 @@ def _set_defaults(argument_spec, parameters, set_default=True): def _sanitize_keys_conditions(value, no_log_strings, ignore_keys, deferred_removals): """ Helper method to :func:`sanitize_keys` to build ``deferred_removals`` and avoid deep recursion. """ - if isinstance(value, (text_type, binary_type)): + if isinstance(value, (str, bytes)): return value if isinstance(value, Sequence): @@ -541,7 +528,7 @@ def _sanitize_keys_conditions(value, no_log_strings, ignore_keys, deferred_remov deferred_removals.append((value, new_value)) return new_value - if isinstance(value, tuple(chain(integer_types, (float, bool, NoneType)))): + if isinstance(value, (int, float, bool, NoneType)): return value if isinstance(value, (datetime.datetime, datetime.date, datetime.time)): @@ -560,8 +547,8 @@ def _validate_elements(wanted_type, parameter, values, options_context=None, err # Get param name for strings so we can later display this value in a useful error message if needed # Only pass 'kwargs' to our checkers and ignore custom callable checkers kwargs = {} - if wanted_element_type == 'str' and isinstance(wanted_type, string_types): - if isinstance(parameter, string_types): + if wanted_element_type == 'str' and isinstance(wanted_type, str): + if isinstance(parameter, str): kwargs['param'] = parameter elif isinstance(parameter, dict): kwargs['param'] = list(parameter.keys())[0] @@ -620,7 +607,7 @@ def _validate_argument_types(argument_spec, parameters, prefix='', options_conte # Get param name for strings so we can later display this value in a useful error message if needed # Only pass 'kwargs' to our checkers and ignore custom callable checkers kwargs = {} - if wanted_name == 'str' and isinstance(wanted_type, string_types): + if wanted_name == 'str' and isinstance(wanted_type, str): kwargs['param'] = list(parameters.keys())[0] # Get the name of the parent key if this is a nested option @@ -659,7 +646,7 @@ def _validate_argument_values(argument_spec, parameters, options_context=None, e if choices is None: continue - if isinstance(choices, (frozenset, KeysView, Sequence)) and not isinstance(choices, (binary_type, text_type)): + if isinstance(choices, (frozenset, KeysView, Sequence)) and not isinstance(choices, (bytes, str)): if param in parameters: # Allow one or more when type='list' param with choices if isinstance(parameters[param], list): @@ -745,7 +732,7 @@ def _validate_sub_spec( options_context.append(param) # Make sure we can iterate over the elements - if not isinstance(parameters[param], Sequence) or isinstance(parameters[param], string_types): + if not isinstance(parameters[param], Sequence) or isinstance(parameters[param], str): elements = [parameters[param]] else: elements = parameters[param] @@ -940,3 +927,7 @@ def remove_values(value, no_log_strings): raise TypeError('Unknown container type encountered when removing private values from output') return new_value + + +def __getattr__(importable_name): + return _no_six.deprecate(importable_name, __name__, "binary_type", "text_type", "integer_types", "string_types", "PY2", "PY3") diff --git a/lib/ansible/module_utils/common/text/converters.py b/lib/ansible/module_utils/common/text/converters.py index 057d06bdbea..0bc0cd4f252 100644 --- a/lib/ansible/module_utils/common/text/converters.py +++ b/lib/ansible/module_utils/common/text/converters.py @@ -8,11 +8,8 @@ from __future__ import annotations import codecs import json -from ansible.module_utils.six import ( - binary_type, - iteritems, - text_type, -) +from ansible.module_utils.compat import typing as _t +from ansible.module_utils._internal import _no_six try: codecs.lookup_error('surrogateescape') @@ -25,8 +22,54 @@ _COMPOSED_ERROR_HANDLERS = frozenset((None, 'surrogate_or_replace', 'surrogate_or_strict', 'surrogate_then_replace')) +_T = _t.TypeVar('_T') -def to_bytes(obj, encoding='utf-8', errors=None, nonstring='simplerepr'): +_NonStringPassthru: _t.TypeAlias = _t.Literal['passthru'] +_NonStringOther: _t.TypeAlias = _t.Literal['simplerepr', 'empty', 'strict'] +_NonStringAll: _t.TypeAlias = _t.Union[_NonStringPassthru, _NonStringOther] + + +@_t.overload +def to_bytes( + obj: object, + encoding: str = 'utf-8', + errors: str | None = None, +) -> bytes: ... + + +@_t.overload +def to_bytes( + obj: bytes | str, + encoding: str = 'utf-8', + errors: str | None = None, + nonstring: _NonStringPassthru = 'passthru', +) -> bytes: ... + + +@_t.overload +def to_bytes( + obj: _T, + encoding: str = 'utf-8', + errors: str | None = None, + nonstring: _NonStringPassthru = 'passthru', +) -> _T: ... + + +@_t.overload +def to_bytes( + obj: object, + encoding: str = 'utf-8', + errors: str | None = None, + nonstring: _NonStringOther = 'simplerepr', +) -> bytes: ... + + +def to_bytes( + obj: _T, + encoding: str = 'utf-8', + errors: str | None = None, + nonstring: _NonStringAll = 'simplerepr' +) -> _T | bytes: """Make sure that a string is a byte string :arg obj: An object to make sure is a byte string. In most cases this @@ -84,13 +127,13 @@ def to_bytes(obj, encoding='utf-8', errors=None, nonstring='simplerepr'): string is valid in the specified encoding. If it's important that the byte string is in the specified encoding do:: - encoded_string = to_bytes(to_text(input_string, 'latin-1'), 'utf-8') + encoded_string = to_bytes(to_text(input_string, encoding='latin-1'), encoding='utf-8') .. version_changed:: 2.3 Added the ``surrogate_then_replace`` error handler and made it the default error handler. """ - if isinstance(obj, binary_type): + if isinstance(obj, bytes): return obj # We're given a text string @@ -104,7 +147,7 @@ def to_bytes(obj, encoding='utf-8', errors=None, nonstring='simplerepr'): else: errors = 'replace' - if isinstance(obj, text_type): + if isinstance(obj, str): try: # Try this first as it's the fastest return obj.encode(encoding, errors) @@ -129,21 +172,60 @@ def to_bytes(obj, encoding='utf-8', errors=None, nonstring='simplerepr'): value = repr(obj) except UnicodeError: # Giving up - return to_bytes('') + return b'' elif nonstring == 'passthru': return obj elif nonstring == 'empty': - # python2.4 doesn't have b'' - return to_bytes('') + return b'' elif nonstring == 'strict': raise TypeError('obj must be a string type') else: raise TypeError('Invalid value %s for to_bytes\' nonstring parameter' % nonstring) - return to_bytes(value, encoding, errors) + return to_bytes(value, encoding=encoding, errors=errors) -def to_text(obj, encoding='utf-8', errors=None, nonstring='simplerepr'): +@_t.overload +def to_text( + obj: object, + encoding: str = 'utf-8', + errors: str | None = None, +) -> str: ... + + +@_t.overload +def to_text( + obj: str | bytes, + encoding: str = 'utf-8', + errors: str | None = None, + nonstring: _NonStringPassthru = 'passthru', +) -> str: ... + + +@_t.overload +def to_text( + obj: _T, + encoding: str = 'utf-8', + errors: str | None = None, + nonstring: _NonStringPassthru = 'passthru', +) -> _T: ... + + +@_t.overload +def to_text( + obj: object, + encoding: str = 'utf-8', + errors: str | None = None, + nonstring: _NonStringOther = 'simplerepr', +) -> str: ... + + +def to_text( + obj: _T, + encoding: str = 'utf-8', + errors: str | None = None, + nonstring: _NonStringAll = 'simplerepr' +) -> _T | str: """Make sure that a string is a text string :arg obj: An object to make sure is a text string. In most cases this @@ -194,7 +276,7 @@ def to_text(obj, encoding='utf-8', errors=None, nonstring='simplerepr'): Added the surrogate_then_replace error handler and made it the default error handler. """ - if isinstance(obj, text_type): + if isinstance(obj, str): return obj if errors in _COMPOSED_ERROR_HANDLERS: @@ -205,7 +287,7 @@ def to_text(obj, encoding='utf-8', errors=None, nonstring='simplerepr'): else: errors = 'replace' - if isinstance(obj, binary_type): + if isinstance(obj, bytes): # Note: We don't need special handling for surrogate_then_replace # because all bytes will either be made into surrogates or are valid # to decode. @@ -221,17 +303,17 @@ def to_text(obj, encoding='utf-8', errors=None, nonstring='simplerepr'): value = repr(obj) except UnicodeError: # Giving up - return u'' + return '' elif nonstring == 'passthru': return obj elif nonstring == 'empty': - return u'' + return '' elif nonstring == 'strict': raise TypeError('obj must be a string type') else: raise TypeError('Invalid value %s for to_text\'s nonstring parameter' % nonstring) - return to_text(value, encoding, errors) + return to_text(value, encoding=encoding, errors=errors) to_native = to_text @@ -259,10 +341,10 @@ def container_to_bytes(d, encoding='utf-8', errors='surrogate_or_strict'): """ # DTFIX-FUTURE: deprecate - if isinstance(d, text_type): + if isinstance(d, str): return to_bytes(d, encoding=encoding, errors=errors) elif isinstance(d, dict): - return dict(container_to_bytes(o, encoding, errors) for o in iteritems(d)) + return dict(container_to_bytes(o, encoding, errors) for o in d.items()) elif isinstance(d, list): return [container_to_bytes(o, encoding, errors) for o in d] elif isinstance(d, tuple): @@ -279,14 +361,18 @@ def container_to_text(d, encoding='utf-8', errors='surrogate_or_strict'): """ # DTFIX-FUTURE: deprecate - if isinstance(d, binary_type): + if isinstance(d, bytes): # Warning, can traceback return to_text(d, encoding=encoding, errors=errors) elif isinstance(d, dict): - return dict(container_to_text(o, encoding, errors) for o in iteritems(d)) + return dict(container_to_text(o, encoding, errors) for o in d.items()) elif isinstance(d, list): return [container_to_text(o, encoding, errors) for o in d] elif isinstance(d, tuple): return tuple(container_to_text(o, encoding, errors) for o in d) else: return d + + +def __getattr__(importable_name): + return _no_six.deprecate(importable_name, __name__, "binary_type", "text_type", "iteritems") diff --git a/lib/ansible/module_utils/common/text/formatters.py b/lib/ansible/module_utils/common/text/formatters.py index d548085c57f..1eeae7c10f4 100644 --- a/lib/ansible/module_utils/common/text/formatters.py +++ b/lib/ansible/module_utils/common/text/formatters.py @@ -6,7 +6,7 @@ from __future__ import annotations import re -from ansible.module_utils.six import iteritems +from ansible.module_utils._internal import _no_six SIZE_RANGES = { 'Y': 1 << 80, @@ -117,7 +117,7 @@ def bytes_to_human(size, isbits=False, unit=None): base = 'bits' suffix = '' - for suffix, limit in sorted(iteritems(SIZE_RANGES), key=lambda item: -item[1]): + for suffix, limit in sorted(SIZE_RANGES.items(), key=lambda item: -item[1]): if (unit is None and size >= limit) or unit is not None and unit.upper() == suffix[0]: break @@ -127,3 +127,7 @@ def bytes_to_human(size, isbits=False, unit=None): suffix = base return '%.2f %s' % (size / limit, suffix) + + +def __getattr__(importable_name): + return _no_six.deprecate(importable_name, __name__, "iteritems") diff --git a/lib/ansible/module_utils/common/validation.py b/lib/ansible/module_utils/common/validation.py index f5d5f5a061f..81100cd5bce 100644 --- a/lib/ansible/module_utils/common/validation.py +++ b/lib/ansible/module_utils/common/validation.py @@ -10,15 +10,13 @@ import os import re from ast import literal_eval +from ansible.module_utils._internal import _no_six from ansible.module_utils.common import json as _common_json from ansible.module_utils.common.text.converters import to_native from ansible.module_utils.common.collections import is_iterable from ansible.module_utils.common.text.formatters import human_to_bytes from ansible.module_utils.common.warnings import deprecate from ansible.module_utils.parsing.convert_bool import boolean -from ansible.module_utils.six import ( - string_types, -) def count_terms(terms, parameters): @@ -43,7 +41,7 @@ def safe_eval(value, locals=None, include_exceptions=False): version="2.21", ) # do not allow method calls to modules - if not isinstance(value, string_types): + if not isinstance(value, str): # already templated to a datavaluestructure, perhaps? if include_exceptions: return (value, None) @@ -194,7 +192,7 @@ def check_required_by(requirements, parameters, options_context=None): if key not in parameters or parameters[key] is None: continue # Support strings (single-item lists) - if isinstance(value, string_types): + if isinstance(value, str): value = [value] if missing := [required for required in value if required not in parameters or parameters[required] is None]: @@ -373,10 +371,13 @@ def check_type_str(value, allow_conversion=True, param=None, prefix=''): :returns: Original value if it is a string, the value converted to a string if allow_conversion=True, or raises a TypeError if allow_conversion=False. """ - if isinstance(value, string_types): + if isinstance(value, str): return value - if allow_conversion and value is not None: + if value is None: + return '' # approximate pre-2.19 templating None->empty str equivalency here for backward compatibility + + if allow_conversion: return to_native(value, errors='surrogate_or_strict') msg = "'{0!r}' is not a string and conversion is not allowed".format(value) @@ -403,7 +404,7 @@ def check_type_list(value): return value # DTFIX-FUTURE: deprecate legacy comma split functionality, eventually replace with `_check_type_list_strict` - if isinstance(value, string_types): + if isinstance(value, str): return value.split(",") elif isinstance(value, int) or isinstance(value, float): return [str(value)] @@ -431,7 +432,7 @@ def check_type_dict(value): if isinstance(value, dict): return value - if isinstance(value, string_types): + if isinstance(value, str): if value.startswith("{"): try: return json.loads(value) @@ -494,7 +495,7 @@ def check_type_bool(value): if isinstance(value, bool): return value - if isinstance(value, string_types) or isinstance(value, (int, float)): + if isinstance(value, str) or isinstance(value, (int, float)): return boolean(value) raise TypeError('%s cannot be converted to a bool' % type(value)) @@ -594,3 +595,7 @@ def check_type_jsonarg(value): return json.dumps(value, cls=_common_json._get_legacy_encoder(), _decode_bytes=True) raise TypeError('%s cannot be converted to a json string' % type(value)) + + +def __getattr__(importable_name): + return _no_six.deprecate(importable_name, __name__, "string_types") diff --git a/lib/ansible/module_utils/connection.py b/lib/ansible/module_utils/connection.py index aa81095d93d..f40aceffa5c 100644 --- a/lib/ansible/module_utils/connection.py +++ b/lib/ansible/module_utils/connection.py @@ -36,9 +36,10 @@ import struct import uuid from functools import partial + +from ansible.module_utils._internal import _no_six from ansible.module_utils.common.text.converters import to_bytes, to_text from ansible.module_utils.common.json import _get_legacy_encoder -from ansible.module_utils.six import iteritems def write_to_stream(stream, obj): @@ -95,7 +96,7 @@ class ConnectionError(Exception): def __init__(self, message, *args, **kwargs): super(ConnectionError, self).__init__(message) - for k, v in iteritems(kwargs): + for k, v in kwargs.items(): setattr(self, k, v) @@ -149,7 +150,7 @@ class Connection(object): raise ConnectionError( "Unable to decode JSON from response to {0}. Received '{1}'.".format(name, out) ) - params = [repr(arg) for arg in args] + ['{0}={1!r}'.format(k, v) for k, v in iteritems(kwargs)] + params = [repr(arg) for arg in args] + ['{0}={1!r}'.format(k, v) for k, v in kwargs.items()] params = ', '.join(params) raise ConnectionError( "Unable to decode JSON from response to {0}({1}). Received '{2}'.".format(name, params, out) @@ -200,3 +201,7 @@ class Connection(object): sf.close() return to_text(response, errors='surrogate_or_strict') + + +def __getattr__(importable_name): + return _no_six.deprecate(importable_name, __name__, "iteritems") diff --git a/lib/ansible/module_utils/facts/hardware/linux.py b/lib/ansible/module_utils/facts/hardware/linux.py index 62fdb896f0f..a28ea9e48da 100644 --- a/lib/ansible/module_utils/facts/hardware/linux.py +++ b/lib/ansible/module_utils/facts/hardware/linux.py @@ -24,13 +24,13 @@ import re import sys import time +from ansible.module_utils._internal import _no_six from ansible.module_utils._internal._concurrent import _futures from ansible.module_utils.common.locale import get_best_parsable_locale from ansible.module_utils.common.text.converters import to_text from ansible.module_utils.common.text.formatters import bytes_to_human from ansible.module_utils.facts.hardware.base import Hardware, HardwareCollector from ansible.module_utils.facts.utils import get_file_content, get_file_lines, get_mount_size -from ansible.module_utils.six import iteritems # import this as a module to ensure we get the same module instance from ansible.module_utils.facts import timeout @@ -653,7 +653,7 @@ class LinuxHardware(Hardware): retval[target].add(entry) except OSError: continue - return dict((k, list(sorted(v))) for (k, v) in iteritems(retval)) + return dict((k, list(sorted(v))) for (k, v) in retval.items()) except OSError: return {} @@ -665,7 +665,7 @@ class LinuxHardware(Hardware): device = elements[3] target = elements[5] retval[target].add(device) - return dict((k, list(sorted(v))) for (k, v) in iteritems(retval)) + return dict((k, list(sorted(v))) for (k, v) in retval.items()) except OSError: return {} @@ -750,7 +750,7 @@ class LinuxHardware(Hardware): d = {} d['virtual'] = virtual d['links'] = {} - for (link_type, link_values) in iteritems(links): + for (link_type, link_values) in links.items(): d['links'][link_type] = link_values.get(block, []) diskname = os.path.basename(sysdir) for key in ['vendor', 'model', 'sas_address', 'sas_device_handle']: @@ -801,7 +801,7 @@ class LinuxHardware(Hardware): part_sysdir = sysdir + "/" + partname part['links'] = {} - for (link_type, link_values) in iteritems(links): + for (link_type, link_values) in links.items(): part['links'][link_type] = link_values.get(partname, []) part['start'] = get_file_content(part_sysdir + "/start", 0) @@ -890,7 +890,8 @@ class LinuxHardware(Hardware): 'size_g': items[-2], 'free_g': items[-1], 'num_lvs': items[2], - 'num_pvs': items[1] + 'num_pvs': items[1], + 'lvs': {}, } lvs_path = self.module.get_bin_path('lvs') @@ -901,7 +902,18 @@ class LinuxHardware(Hardware): rc, lv_lines, err = self.module.run_command('%s %s' % (lvs_path, lvm_util_options)) for lv_line in lv_lines.splitlines(): items = lv_line.strip().split(',') - lvs[items[0]] = {'size_g': items[3], 'vg': items[1]} + vg_name = items[1] + lv_name = items[0] + # The LV name is only unique per VG, so the top level fact lvs can be misleading. + # TODO: deprecate lvs in favor of vgs + lvs[lv_name] = {'size_g': items[3], 'vg': vg_name} + try: + vgs[vg_name]['lvs'][lv_name] = {'size_g': items[3]} + except KeyError: + self.module.warn( + "An LVM volume group was created while gathering LVM facts, " + "and is not included in ansible_facts['vgs']." + ) pvs_path = self.module.get_bin_path('pvs') # pvs fields: PV VG #Fmt #Attr PSize PFree @@ -925,3 +937,7 @@ class LinuxHardwareCollector(HardwareCollector): _fact_class = LinuxHardware required_facts = set(['platform']) + + +def __getattr__(importable_name): + return _no_six.deprecate(importable_name, __name__, "iteritems") diff --git a/lib/ansible/module_utils/facts/hardware/netbsd.py b/lib/ansible/module_utils/facts/hardware/netbsd.py index 69ac583df64..226da7ffd54 100644 --- a/lib/ansible/module_utils/facts/hardware/netbsd.py +++ b/lib/ansible/module_utils/facts/hardware/netbsd.py @@ -19,7 +19,7 @@ import os import re import time -from ansible.module_utils.six.moves import reduce +from functools import reduce from ansible.module_utils.facts.hardware.base import Hardware, HardwareCollector from ansible.module_utils.facts.timeout import TimeoutError, timeout diff --git a/lib/ansible/module_utils/facts/hardware/sunos.py b/lib/ansible/module_utils/facts/hardware/sunos.py index 134e59a8c2c..26a26865dd6 100644 --- a/lib/ansible/module_utils/facts/hardware/sunos.py +++ b/lib/ansible/module_utils/facts/hardware/sunos.py @@ -18,12 +18,13 @@ from __future__ import annotations import re import time +from functools import reduce + from ansible.module_utils.common.locale import get_best_parsable_locale from ansible.module_utils.common.text.formatters import bytes_to_human from ansible.module_utils.facts.utils import get_file_content, get_mount_size from ansible.module_utils.facts.hardware.base import Hardware, HardwareCollector from ansible.module_utils.facts import timeout -from ansible.module_utils.six.moves import reduce class SunOSHardware(Hardware): diff --git a/lib/ansible/module_utils/facts/packages.py b/lib/ansible/module_utils/facts/packages.py index b5b9bcb35ef..fafdf5a67ec 100644 --- a/lib/ansible/module_utils/facts/packages.py +++ b/lib/ansible/module_utils/facts/packages.py @@ -7,7 +7,7 @@ import ansible.module_utils.compat.typing as t from abc import ABCMeta, abstractmethod -from ansible.module_utils.six import with_metaclass +from ansible.module_utils._internal import _no_six from ansible.module_utils.basic import missing_required_lib from ansible.module_utils.common.process import get_bin_path from ansible.module_utils.common.respawn import has_respawned, probe_interpreters_for_module, respawn_module @@ -19,7 +19,7 @@ def get_all_pkg_managers(): return {obj.__name__.lower(): obj for obj in get_all_subclasses(PkgMgr) if obj not in (CLIMgr, LibMgr, RespawningLibMgr)} -class PkgMgr(with_metaclass(ABCMeta, object)): # type: ignore[misc] +class PkgMgr(metaclass=ABCMeta): @abstractmethod def is_available(self, handle_exceptions): @@ -125,3 +125,7 @@ class CLIMgr(PkgMgr): if not handle_exceptions: raise return found + + +def __getattr__(importable_name): + return _no_six.deprecate(importable_name, __name__, "with_metaclass") diff --git a/lib/ansible/module_utils/facts/system/env.py b/lib/ansible/module_utils/facts/system/env.py index cf6a22457a9..1327ffd9d37 100644 --- a/lib/ansible/module_utils/facts/system/env.py +++ b/lib/ansible/module_utils/facts/system/env.py @@ -18,8 +18,7 @@ from __future__ import annotations import os import typing as t -from ansible.module_utils.six import iteritems - +from ansible.module_utils._internal import _no_six from ansible.module_utils.facts.collector import BaseFactCollector @@ -31,7 +30,11 @@ class EnvFactCollector(BaseFactCollector): env_facts = {} env_facts['env'] = {} - for k, v in iteritems(os.environ): + for k, v in os.environ.items(): env_facts['env'][k] = v return env_facts + + +def __getattr__(importable_name): + return _no_six.deprecate(importable_name, __name__, "iteritems") diff --git a/lib/ansible/module_utils/facts/system/local.py b/lib/ansible/module_utils/facts/system/local.py index 7cf0f144d26..50abe123126 100644 --- a/lib/ansible/module_utils/facts/system/local.py +++ b/lib/ansible/module_utils/facts/system/local.py @@ -3,16 +3,18 @@ from __future__ import annotations +import configparser import glob import json import os import stat import typing as t +from io import StringIO + from ansible.module_utils.common.text.converters import to_text from ansible.module_utils.facts.utils import get_file_content from ansible.module_utils.facts.collector import BaseFactCollector -from ansible.module_utils.six.moves import configparser, StringIO class LocalFactCollector(BaseFactCollector): diff --git a/lib/ansible/module_utils/parsing/convert_bool.py b/lib/ansible/module_utils/parsing/convert_bool.py index b97a6d05780..e6f1405af97 100644 --- a/lib/ansible/module_utils/parsing/convert_bool.py +++ b/lib/ansible/module_utils/parsing/convert_bool.py @@ -5,7 +5,7 @@ from __future__ import annotations import collections.abc as c -from ansible.module_utils.six import binary_type, text_type +from ansible.module_utils._internal import _no_six from ansible.module_utils.common.text.converters import to_text @@ -20,7 +20,7 @@ def boolean(value, strict=True): normalized_value = value - if isinstance(value, (text_type, binary_type)): + if isinstance(value, (str, bytes)): normalized_value = to_text(value, errors='surrogate_or_strict').lower().strip() if not isinstance(value, c.Hashable): @@ -32,3 +32,7 @@ def boolean(value, strict=True): return False raise TypeError("The value '%s' is not a valid boolean. Valid booleans include: %s" % (to_text(value), ', '.join(repr(i) for i in BOOLEANS))) + + +def __getattr__(importable_name): + return _no_six.deprecate(importable_name, __name__, "binary_type", "text_type") diff --git a/lib/ansible/module_utils/service.py b/lib/ansible/module_utils/service.py index 013ec0435f6..a75668e3f24 100644 --- a/lib/ansible/module_utils/service.py +++ b/lib/ansible/module_utils/service.py @@ -36,7 +36,6 @@ import select import shlex import subprocess -from ansible.module_utils.six import b from ansible.module_utils.common.text.converters import to_bytes, to_text @@ -200,7 +199,7 @@ def daemonize(module, cmd): fds = [p.stdout, p.stderr] # loop reading output till it is done - output = {p.stdout: b(""), p.stderr: b("")} + output = {p.stdout: b"", p.stderr: b""} while fds: rfd, wfd, efd = select.select(fds, [], fds, 1) if (rfd + wfd + efd) or p.poll() is None: @@ -234,7 +233,7 @@ def daemonize(module, cmd): os.waitpid(pid, 0) # Grab response data after child finishes - return_data = b("") + return_data = b"" while True: rfd, wfd, efd = select.select([pipe[0]], [], [pipe[0]]) if pipe[0] in rfd: diff --git a/lib/ansible/modules/apt.py b/lib/ansible/modules/apt.py index 53c403133a2..97452f03106 100644 --- a/lib/ansible/modules/apt.py +++ b/lib/ansible/modules/apt.py @@ -383,7 +383,6 @@ from ansible.module_utils.common.file import S_IRWXU_RXG_RXO from ansible.module_utils.common.locale import get_best_parsable_locale from ansible.module_utils.common.respawn import has_respawned, probe_interpreters_for_module, respawn_module from ansible.module_utils.common.text.converters import to_native, to_text -from ansible.module_utils.six import string_types from ansible.module_utils.urls import fetch_file DPKG_OPTIONS = 'force-confdef,force-confold' @@ -633,7 +632,7 @@ def expand_pkgspec_from_fnmatches(m, pkgspec, cache): if pkgspec: for pkgspec_pattern in pkgspec: - if not isinstance(pkgspec_pattern, string_types): + if not isinstance(pkgspec_pattern, str): m.fail_json(msg="Invalid type for package name, expected string but got %s" % type(pkgspec_pattern)) pkgname_pattern, version_cmp, version = package_split(pkgspec_pattern) diff --git a/lib/ansible/modules/apt_repository.py b/lib/ansible/modules/apt_repository.py index 2f701d8c429..5be21c2b0c5 100644 --- a/lib/ansible/modules/apt_repository.py +++ b/lib/ansible/modules/apt_repository.py @@ -508,7 +508,7 @@ class UbuntuSourcesList(SourcesList): try: rc, out, err = self.module.run_command([self.gpg_bin, '--list-packets', key_file]) except OSError as ex: - self.debug(f"Could check key against file {key_file!r}: {ex}") + self.module.debug(f"Could check key against file {key_file!r}: {ex}") continue if key_fingerprint in out: diff --git a/lib/ansible/modules/assemble.py b/lib/ansible/modules/assemble.py index b1329496d96..b28b696ea85 100644 --- a/lib/ansible/modules/assemble.py +++ b/lib/ansible/modules/assemble.py @@ -131,7 +131,6 @@ import re import tempfile from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils.six import b, indexbytes from ansible.module_utils.common.text.converters import to_native @@ -141,6 +140,7 @@ def assemble_from_fragments(src_path, delimiter=None, compiled_regexp=None, igno tmp = os.fdopen(tmpfd, 'wb') delimit_me = False add_newline = False + b_linesep = os.linesep.encode() for f in sorted(os.listdir(src_path)): if compiled_regexp and not compiled_regexp.search(f): @@ -153,7 +153,7 @@ def assemble_from_fragments(src_path, delimiter=None, compiled_regexp=None, igno # always put a newline between fragments if the previous fragment didn't end with a newline. if add_newline: - tmp.write(b('\n')) + tmp.write(b_linesep) # delimiters should only appear between fragments if delimit_me: @@ -163,16 +163,12 @@ def assemble_from_fragments(src_path, delimiter=None, compiled_regexp=None, igno tmp.write(delimiter) # always make sure there's a newline after the # delimiter, so lines don't run together - - # byte indexing differs on Python 2 and 3, - # use indexbytes for compat - # chr(10) == '\n' - if indexbytes(delimiter, -1) != 10: - tmp.write(b('\n')) + if not delimiter.endswith(b_linesep): + tmp.write(b_linesep) tmp.write(fragment_content) delimit_me = True - if fragment_content.endswith(b('\n')): + if fragment_content.endswith(b_linesep): add_newline = False else: add_newline = True diff --git a/lib/ansible/modules/blockinfile.py b/lib/ansible/modules/blockinfile.py index e5240a0cc4f..3394ffe7715 100644 --- a/lib/ansible/modules/blockinfile.py +++ b/lib/ansible/modules/blockinfile.py @@ -102,6 +102,13 @@ options: type: bool default: no version_added: '2.16' + encoding: + description: + - The character set in which the target file is encoded. + - For a list of available built-in encodings, see U(https://docs.python.org/3/library/codecs.html#standard-encodings) + type: str + default: utf-8 + version_added: '2.20' notes: - When using C(with_*) loops be aware that if you do not set a unique mark the block will be overwritten on each iteration. - As of Ansible 2.3, the O(dest) option has been changed to O(path) as default, but O(dest) still works as well. @@ -192,15 +199,16 @@ EXAMPLES = r""" import re import os import tempfile -from ansible.module_utils.six import b + from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils.common.text.converters import to_bytes, to_native +from ansible.module_utils.common.text.converters import to_native -def write_changes(module, contents, path): +def write_changes(module, contents, path, encoding=None): tmpfd, tmpfile = tempfile.mkstemp(dir=module.tmpdir) - with os.fdopen(tmpfd, 'wb') as tf: + # newline param set to translate newline sequences with system default line separator + with os.fdopen(tmpfd, 'w', encoding=encoding, newline=None) as tf: tf.write(contents) validate = module.params.get('validate', None) @@ -246,6 +254,7 @@ def main(): marker_end=dict(type='str', default='END'), append_newline=dict(type='bool', default=False), prepend_newline=dict(type='bool', default=False), + encoding=dict(type='str', default='utf-8'), ), mutually_exclusive=[['insertbefore', 'insertafter']], add_file_common_args=True, @@ -254,6 +263,8 @@ def main(): params = module.params path = params['path'] + encoding = module.params.get('encoding', None) + if os.path.isdir(path): module.fail_json(rc=256, msg='Path %s is a directory !' % path) @@ -274,7 +285,8 @@ def main(): original = None lines = [] else: - with open(path, 'rb') as f: + # newline param set to preserve newline sequences read from file + with open(path, 'r', encoding=encoding, newline='') as f: original = f.read() lines = original.splitlines(True) @@ -288,10 +300,12 @@ def main(): insertbefore = params['insertbefore'] insertafter = params['insertafter'] - block = to_bytes(params['block']) - marker = to_bytes(params['marker']) + block = params['block'] + marker = params['marker'] present = params['state'] == 'present' - blank_line = [b(os.linesep)] + + line_separator = os.linesep + blank_line = [line_separator] if not present and not path_exists: module.exit_json(changed=False, msg="File %s not present" % path) @@ -300,17 +314,19 @@ def main(): insertafter = 'EOF' if insertafter not in (None, 'EOF'): - insertre = re.compile(to_bytes(insertafter, errors='surrogate_or_strict')) + insertre = re.compile(insertafter) elif insertbefore not in (None, 'BOF'): - insertre = re.compile(to_bytes(insertbefore, errors='surrogate_or_strict')) + insertre = re.compile(insertbefore) else: insertre = None - marker0 = re.sub(b(r'{mark}'), b(params['marker_begin']), marker) + b(os.linesep) - marker1 = re.sub(b(r'{mark}'), b(params['marker_end']), marker) + b(os.linesep) + marker0 = re.sub(r'{mark}', params['marker_begin'], marker) + os.linesep + marker1 = re.sub(r'{mark}', params['marker_end'], marker) + os.linesep + if present and block: - if not block.endswith(b(os.linesep)): - block += b(os.linesep) + if not block.endswith(os.linesep): + block += os.linesep + blocklines = [marker0] + block.splitlines(True) + [marker1] else: blocklines = [] @@ -329,9 +345,9 @@ def main(): match = insertre.search(original) if match: if insertafter: - n0 = to_native(original).count('\n', 0, match.end()) + n0 = original.count('\n', 0, match.end()) elif insertbefore: - n0 = to_native(original).count('\n', 0, match.start()) + n0 = original.count('\n', 0, match.start()) else: for i, line in enumerate(lines): if insertre.search(line): @@ -352,15 +368,15 @@ def main(): # Ensure there is a line separator before the block of lines to be inserted if n0 > 0: - if not lines[n0 - 1].endswith(b(os.linesep)): - lines[n0 - 1] += b(os.linesep) + if not lines[n0 - 1].endswith(os.linesep): + lines[n0 - 1] += os.linesep # Before the block: check if we need to prepend a blank line # If yes, we need to add the blank line if we are not at the beginning of the file # and the previous line is not a blank line # In both cases, we need to shift by one on the right the inserting position of the block if params['prepend_newline'] and present: - if n0 != 0 and lines[n0 - 1] != b(os.linesep): + if n0 != 0 and lines[n0 - 1] != os.linesep: lines[n0:n0] = blank_line n0 += 1 @@ -372,13 +388,13 @@ def main(): # and the line right after is not a blank line if params['append_newline'] and present: line_after_block = n0 + len(blocklines) - if line_after_block < len(lines) and lines[line_after_block] != b(os.linesep): + if line_after_block < len(lines) and lines[line_after_block] != os.linesep: lines[line_after_block:line_after_block] = blank_line if lines: - result = b''.join(lines) + result = ''.join(lines) else: - result = b'' + result = '' if module._diff: diff['after'] = result @@ -402,7 +418,7 @@ def main(): backup_file = module.backup_local(path) # We should always follow symlinks so that we change the real file real_path = os.path.realpath(params['path']) - write_changes(module, result, real_path) + write_changes(module, result, real_path, encoding) if module.check_mode and not path_exists: module.exit_json(changed=changed, msg=msg, diff=diff) diff --git a/lib/ansible/modules/cron.py b/lib/ansible/modules/cron.py index e64be5d7b9f..325aa524beb 100644 --- a/lib/ansible/modules/cron.py +++ b/lib/ansible/modules/cron.py @@ -219,20 +219,20 @@ import os import platform import pwd import re +import shlex import sys import tempfile from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.common.file import S_IRWU_RWG_RWO from ansible.module_utils.common.text.converters import to_bytes, to_native -from ansible.module_utils.six.moves import shlex_quote class CronTabError(Exception): pass -class CronTab(object): +class CronTab: """ CronTab object to write time based crontab file @@ -243,8 +243,8 @@ class CronTab(object): def __init__(self, module, user=None, cron_file=None): self.module = module self.user = user - self.root = (os.getuid() == 0) - self.lines = None + self.root = os.getuid() == 0 + self.lines = [] self.ansible = "#Ansible: " self.n_existing = '' self.cron_cmd = self.module.get_bin_path('crontab', required=True) @@ -264,7 +264,6 @@ class CronTab(object): def read(self): # Read in the crontab from the system - self.lines = [] if self.cron_file: # read the cronfile try: @@ -280,7 +279,7 @@ class CronTab(object): # FIXME: using safely quoted shell for now, but this really should be two non-shell calls instead. (rc, out, err) = self.module.run_command(self._read_user_execute(), use_unsafe_shell=True) - if rc != 0 and rc != 1: # 1 can mean that there are no jobs. + if rc not in (0, 1): # 1 can mean that there are no jobs. raise CronTabError("Unable to read crontab") self.n_existing = out @@ -300,11 +299,10 @@ class CronTab(object): def is_empty(self): if len(self.lines) == 0: return True - else: - for line in self.lines: - if line.strip(): - return False - return True + for line in self.lines: + if line.strip(): + return False + return True def write(self, backup_file=None): """ @@ -451,13 +449,10 @@ class CronTab(object): if special: if self.cron_file: return "%s@%s %s %s" % (disable_prefix, special, self.user, job) - else: - return "%s@%s %s" % (disable_prefix, special, job) - else: - if self.cron_file: - return "%s%s %s %s %s %s %s %s" % (disable_prefix, minute, hour, day, month, weekday, self.user, job) - else: - return "%s%s %s %s %s %s %s" % (disable_prefix, minute, hour, day, month, weekday, job) + return "%s@%s %s" % (disable_prefix, special, job) + if self.cron_file: + return "%s%s %s %s %s %s %s %s" % (disable_prefix, minute, hour, day, month, weekday, self.user, job) + return "%s%s %s %s %s %s %s" % (disable_prefix, minute, hour, day, month, weekday, job) def get_jobnames(self): jobnames = [] @@ -495,8 +490,7 @@ class CronTab(object): if len(newlines) == 0: return True - else: - return False # TODO add some more error testing + return False # TODO add some more error testing def _update_env(self, name, decl, addenvfunction): newlines = [] @@ -529,13 +523,13 @@ class CronTab(object): user = '' if self.user: if platform.system() == 'SunOS': - return "su %s -c '%s -l'" % (shlex_quote(self.user), shlex_quote(self.cron_cmd)) - elif platform.system() == 'AIX': - return "%s -l %s" % (shlex_quote(self.cron_cmd), shlex_quote(self.user)) - elif platform.system() == 'HP-UX': - return "%s %s %s" % (self.cron_cmd, '-l', shlex_quote(self.user)) - elif pwd.getpwuid(os.getuid())[0] != self.user: - user = '-u %s' % shlex_quote(self.user) + return "su %s -c '%s -l'" % (shlex.quote(self.user), shlex.quote(self.cron_cmd)) + if platform.system() == 'AIX': + return "%s -l %s" % (shlex.quote(self.cron_cmd), shlex.quote(self.user)) + if platform.system() == 'HP-UX': + return "%s %s %s" % (self.cron_cmd, '-l', shlex.quote(self.user)) + if pwd.getpwuid(os.getuid())[0] != self.user: + user = '-u %s' % shlex.quote(self.user) return "%s %s %s" % (self.cron_cmd, user, '-l') def _write_execute(self, path): @@ -546,10 +540,10 @@ class CronTab(object): if self.user: if platform.system() in ['SunOS', 'HP-UX', 'AIX']: return "chown %s %s ; su '%s' -c '%s %s'" % ( - shlex_quote(self.user), shlex_quote(path), shlex_quote(self.user), self.cron_cmd, shlex_quote(path)) - elif pwd.getpwuid(os.getuid())[0] != self.user: - user = '-u %s' % shlex_quote(self.user) - return "%s %s %s" % (self.cron_cmd, user, shlex_quote(path)) + shlex.quote(self.user), shlex.quote(path), shlex.quote(self.user), self.cron_cmd, shlex.quote(path)) + if pwd.getpwuid(os.getuid())[0] != self.user: + user = '-u %s' % shlex.quote(self.user) + return "%s %s %s" % (self.cron_cmd, user, shlex.quote(path)) def main(): @@ -668,7 +662,7 @@ def main(): # if requested make a backup before making a change if backup and not module.check_mode: - (backuph, backup_file) = tempfile.mkstemp(prefix='crontab') + (dummy, backup_file) = tempfile.mkstemp(prefix='crontab') crontab.write(backup_file) if env: @@ -763,9 +757,6 @@ def main(): module.exit_json(**res_args) - # --- should never get here - module.exit_json(msg="Unable to execute cron task.") - if __name__ == '__main__': main() diff --git a/lib/ansible/modules/deb822_repository.py b/lib/ansible/modules/deb822_repository.py index 00278fb0342..68b08d41a4e 100644 --- a/lib/ansible/modules/deb822_repository.py +++ b/lib/ansible/modules/deb822_repository.py @@ -250,7 +250,6 @@ from ansible.module_utils.common.file import S_IRWXU_RXG_RXO, S_IRWU_RG_RO from ansible.module_utils.common.respawn import has_respawned, probe_interpreters_for_module, respawn_module from ansible.module_utils.common.text.converters import to_bytes from ansible.module_utils.common.text.converters import to_native -from ansible.module_utils.six import raise_from # type: ignore[attr-defined] from ansible.module_utils.urls import generic_urlparse from ansible.module_utils.urls import open_url from ansible.module_utils.urls import get_user_agent @@ -339,7 +338,7 @@ def write_signed_by_key(module, v, slug): try: r = open_url(v, http_agent=get_user_agent()) except Exception as exc: - raise_from(RuntimeError(to_native(exc)), exc) + raise RuntimeError('Could not fetch signed_by key.') from exc else: b_data = r.read() else: @@ -587,14 +586,9 @@ def main(): elif is_sequence(value): value = format_list(value) elif key == 'signed_by': - try: - key_changed, signed_by_filename, signed_by_data = write_signed_by_key(module, value, slug) - value = signed_by_filename or signed_by_data - changed |= key_changed - except RuntimeError as exc: - module.fail_json( - msg='Could not fetch signed_by key: %s' % to_native(exc) - ) + key_changed, signed_by_filename, signed_by_data = write_signed_by_key(module, value, slug) + value = signed_by_filename or signed_by_data + changed |= key_changed if value.count('\n') > 0: value = format_multiline(value) diff --git a/lib/ansible/modules/dnf.py b/lib/ansible/modules/dnf.py index 9d14a90dc15..3c07ffd52af 100644 --- a/lib/ansible/modules/dnf.py +++ b/lib/ansible/modules/dnf.py @@ -537,6 +537,9 @@ class DnfModule(YumDnf): conf.sslverify = sslverify # Set installroot + if not os.path.isdir(installroot): + self.module.fail_json(msg=f"Installroot {installroot} must be a directory") + conf.installroot = installroot # Load substitutions from the filesystem diff --git a/lib/ansible/modules/dnf5.py b/lib/ansible/modules/dnf5.py index dc2d0eb5e13..b469c975882 100644 --- a/lib/ansible/modules/dnf5.py +++ b/lib/ansible/modules/dnf5.py @@ -595,6 +595,10 @@ class Dnf5Module(YumDnf): conf.localpkg_gpgcheck = not self.disable_gpg_check conf.sslverify = self.sslverify conf.clean_requirements_on_remove = self.autoremove + + if not os.path.isdir(self.installroot): + self.module.fail_json(msg=f"Installroot {self.installroot} must be a directory") + conf.installroot = self.installroot conf.use_host_config = True # needed for installroot conf.cacheonly = "all" if self.cacheonly else "none" diff --git a/lib/ansible/modules/find.py b/lib/ansible/modules/find.py index b30f6c71697..970eb0cf929 100644 --- a/lib/ansible/modules/find.py +++ b/lib/ansible/modules/find.py @@ -291,7 +291,6 @@ import time from ansible.module_utils.common.text.converters import to_text, to_native from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils.six import string_types class _Object: @@ -496,7 +495,7 @@ def main(): params = module.params - if params['mode'] and not isinstance(params['mode'], string_types): + if params['mode'] and not isinstance(params['mode'], str): module.fail_json( msg="argument 'mode' is not a string and conversion is not allowed, value is of type %s" % params['mode'].__class__.__name__ ) diff --git a/lib/ansible/modules/get_url.py b/lib/ansible/modules/get_url.py index f25743b9a41..b679d08d9c4 100644 --- a/lib/ansible/modules/get_url.py +++ b/lib/ansible/modules/get_url.py @@ -374,9 +374,9 @@ import shutil import tempfile from datetime import datetime, timezone +from urllib.parse import urlsplit from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils.six.moves.urllib.parse import urlsplit from ansible.module_utils.common.text.converters import to_native from ansible.module_utils.urls import fetch_url, url_argument_spec diff --git a/lib/ansible/modules/git.py b/lib/ansible/modules/git.py index f4c1100fda1..4564277cec5 100644 --- a/lib/ansible/modules/git.py +++ b/lib/ansible/modules/git.py @@ -343,7 +343,6 @@ from ansible.module_utils.common.text.converters import to_native, to_text from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.common.locale import get_best_parsable_locale from ansible.module_utils.common.process import get_bin_path -from ansible.module_utils.six import b, string_types def relocate_repo(module, result, repo_dir, old_repo_dir, worktree_dir): @@ -443,12 +442,12 @@ def write_ssh_wrapper(module): fd, wrapper_path = tempfile.mkstemp() # use existing git_ssh/ssh_command, fallback to 'ssh' - template = b("""#!/bin/sh + template = """#!/bin/sh %s $GIT_SSH_OPTS "$@" -""" % os.environ.get('GIT_SSH', os.environ.get('GIT_SSH_COMMAND', 'ssh'))) +""" % os.environ.get('GIT_SSH', os.environ.get('GIT_SSH_COMMAND', 'ssh')) # write it - with os.fdopen(fd, 'w+b') as fh: + with os.fdopen(fd, 'w') as fh: fh.write(template) # set execute @@ -1257,7 +1256,7 @@ def main(): # evaluate and set the umask before doing anything else if umask is not None: - if not isinstance(umask, string_types): + if not isinstance(umask, str): module.fail_json(msg="umask must be defined as a quoted octal integer") try: umask = int(umask, 8) diff --git a/lib/ansible/modules/include_vars.py b/lib/ansible/modules/include_vars.py index b2e3c44e386..3c3f1cb7082 100644 --- a/lib/ansible/modules/include_vars.py +++ b/lib/ansible/modules/include_vars.py @@ -66,7 +66,7 @@ options: description: - Ignore unknown file extensions within the directory. - This allows users to specify a directory containing vars files that are intermingled with non-vars files extension types - (e.g. a directory with a README in it and vars files). + (for example, a directory with a README in it and vars files). type: bool default: no version_added: "2.7" diff --git a/lib/ansible/modules/lineinfile.py b/lib/ansible/modules/lineinfile.py index 0ef882f4840..9b820096d3d 100644 --- a/lib/ansible/modules/lineinfile.py +++ b/lib/ansible/modules/lineinfile.py @@ -123,6 +123,13 @@ options: type: bool default: no version_added: "2.5" + encoding: + description: + - The character set in which the target file is encoded. + - For a list of available built-in encodings, see U(https://docs.python.org/3/library/codecs.html#standard-encodings) + type: str + default: utf-8 + version_added: "2.20" extends_documentation_fragment: - action_common_attributes - action_common_attributes.files @@ -250,11 +257,11 @@ from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text -def write_changes(module, b_lines, dest): +def write_changes(module, lines, dest, encoding=None): tmpfd, tmpfile = tempfile.mkstemp(dir=module.tmpdir) - with os.fdopen(tmpfd, 'wb') as f: - f.writelines(b_lines) + with os.fdopen(tmpfd, 'w', encoding=encoding) as f: + f.writelines(lines) validate = module.params.get('validate', None) valid = not validate @@ -293,6 +300,7 @@ def present(module, dest, regexp, search_string, line, insertafter, insertbefore 'before_header': '%s (content)' % dest, 'after_header': '%s (content)' % dest} + encoding = module.params.get('encoding', None) b_dest = to_bytes(dest, errors='surrogate_or_strict') if not os.path.exists(b_dest): if not create: @@ -304,30 +312,29 @@ def present(module, dest, regexp, search_string, line, insertafter, insertbefore except Exception as e: module.fail_json(msg='Error creating %s (%s)' % (to_text(b_destpath), to_text(e))) - b_lines = [] + lines = [] else: - with open(b_dest, 'rb') as f: - b_lines = f.readlines() + with open(b_dest, 'r', encoding=encoding) as f: + lines = f.readlines() if module._diff: - diff['before'] = to_native(b''.join(b_lines)) + diff['before'] = ''.join(lines) if regexp is not None: - bre_m = re.compile(to_bytes(regexp, errors='surrogate_or_strict')) + re_m = re.compile(regexp) if insertafter not in (None, 'BOF', 'EOF'): - bre_ins = re.compile(to_bytes(insertafter, errors='surrogate_or_strict')) + re_ins = re.compile(insertafter) elif insertbefore not in (None, 'BOF'): - bre_ins = re.compile(to_bytes(insertbefore, errors='surrogate_or_strict')) + re_ins = re.compile(insertbefore) else: - bre_ins = None + re_ins = None # index[0] is the line num where regexp has been found # index[1] is the line num where insertafter/insertbefore has been found index = [-1, -1] match = None exact_line_match = False - b_line = to_bytes(line, errors='surrogate_or_strict') # The module's doc says # "If regular expressions are passed to both regexp and @@ -339,8 +346,8 @@ def present(module, dest, regexp, search_string, line, insertafter, insertbefore # Given the above: # 1. First check that there is no match for regexp: if regexp is not None: - for lineno, b_cur_line in enumerate(b_lines): - match_found = bre_m.search(b_cur_line) + for lineno, cur_line in enumerate(lines): + match_found = re_m.search(cur_line) if match_found: index[0] = lineno match = match_found @@ -349,8 +356,8 @@ def present(module, dest, regexp, search_string, line, insertafter, insertbefore # 2. Second check that there is no match for search_string: if search_string is not None: - for lineno, b_cur_line in enumerate(b_lines): - match_found = to_bytes(search_string, errors='surrogate_or_strict') in b_cur_line + for lineno, cur_line in enumerate(lines): + match_found = search_string in cur_line if match_found: index[0] = lineno match = match_found @@ -360,12 +367,12 @@ def present(module, dest, regexp, search_string, line, insertafter, insertbefore # 3. When no match found on the previous step, # parse for searching insertafter/insertbefore: if not match: - for lineno, b_cur_line in enumerate(b_lines): - if b_line == b_cur_line.rstrip(b'\r\n'): + for lineno, cur_line in enumerate(lines): + if line == cur_line.rstrip('\r\n'): index[0] = lineno exact_line_match = True - elif bre_ins is not None and bre_ins.search(b_cur_line): + elif re_ins is not None and re_ins.search(cur_line): if insertafter: # + 1 for the next line index[1] = lineno + 1 @@ -380,17 +387,17 @@ def present(module, dest, regexp, search_string, line, insertafter, insertbefore msg = '' changed = False - b_linesep = to_bytes(os.linesep, errors='surrogate_or_strict') + linesep = os.linesep # Exact line or Regexp matched a line in the file if index[0] != -1: if backrefs and match: - b_new_line = match.expand(b_line) + new_line = match.expand(line) else: # Don't do backref expansion if not asked. - b_new_line = b_line + new_line = line - if not b_new_line.endswith(b_linesep): - b_new_line += b_linesep + if not new_line.endswith(linesep): + new_line += linesep # If no regexp or search_string was given and no line match is found anywhere in the file, # insert the line appropriately if using insertbefore or insertafter @@ -400,18 +407,18 @@ def present(module, dest, regexp, search_string, line, insertafter, insertbefore if insertafter and insertafter != 'EOF': # Ensure there is a line separator after the found string # at the end of the file. - if b_lines and not b_lines[-1][-1:] in (b'\n', b'\r'): - b_lines[-1] = b_lines[-1] + b_linesep + if lines and not lines[-1][-1:] in ('\n', '\r'): + lines[-1] = lines[-1] + linesep # If the line to insert after is at the end of the file # use the appropriate index value. - if len(b_lines) == index[1]: - if b_lines[index[1] - 1].rstrip(b'\r\n') != b_line: - b_lines.append(b_line + b_linesep) + if len(lines) == index[1]: + if lines[index[1] - 1].rstrip('\r\n') != line: + lines.append(line + linesep) msg = 'line added' changed = True - elif b_lines[index[1]].rstrip(b'\r\n') != b_line: - b_lines.insert(index[1], b_line + b_linesep) + elif lines[index[1]].rstrip('\r\n') != line: + lines.insert(index[1], line + linesep) msg = 'line added' changed = True @@ -419,18 +426,18 @@ def present(module, dest, regexp, search_string, line, insertafter, insertbefore # If the line to insert before is at the beginning of the file # use the appropriate index value. if index[1] <= 0: - if b_lines[index[1]].rstrip(b'\r\n') != b_line: - b_lines.insert(index[1], b_line + b_linesep) + if lines[index[1]].rstrip('\r\n') != line: + lines.insert(index[1], line + linesep) msg = 'line added' changed = True - elif b_lines[index[1] - 1].rstrip(b'\r\n') != b_line: - b_lines.insert(index[1], b_line + b_linesep) + elif lines[index[1] - 1].rstrip('\r\n') != line: + lines.insert(index[1], line + linesep) msg = 'line added' changed = True - elif b_lines[index[0]] != b_new_line: - b_lines[index[0]] = b_new_line + elif lines[index[0]] != new_line: + lines[index[0]] = new_line msg = 'line replaced' changed = True @@ -440,7 +447,7 @@ def present(module, dest, regexp, search_string, line, insertafter, insertbefore pass # Add it to the beginning of the file elif insertbefore == 'BOF' or insertafter == 'BOF': - b_lines.insert(0, b_line + b_linesep) + lines.insert(0, line + linesep) msg = 'line added' changed = True # Add it to the end of the file if requested or @@ -449,10 +456,10 @@ def present(module, dest, regexp, search_string, line, insertafter, insertbefore elif insertafter == 'EOF' or index[1] == -1: # If the file is not empty then ensure there's a newline before the added line - if b_lines and not b_lines[-1][-1:] in (b'\n', b'\r'): - b_lines.append(b_linesep) + if lines and not lines[-1][-1:] in ('\n', '\r'): + lines.append(linesep) - b_lines.append(b_line + b_linesep) + lines.append(line + linesep) msg = 'line added' changed = True @@ -460,30 +467,30 @@ def present(module, dest, regexp, search_string, line, insertafter, insertbefore # Don't insert the line if it already matches at the index. # If the line to insert after is at the end of the file use the appropriate index value. - if len(b_lines) == index[1]: - if b_lines[index[1] - 1].rstrip(b'\r\n') != b_line: - b_lines.append(b_line + b_linesep) + if len(lines) == index[1]: + if lines[index[1] - 1].rstrip('\r\n') != line: + lines.append(line + linesep) msg = 'line added' changed = True - elif b_line != b_lines[index[1]].rstrip(b'\n\r'): - b_lines.insert(index[1], b_line + b_linesep) + elif line != lines[index[1]].rstrip('\n\r'): + lines.insert(index[1], line + linesep) msg = 'line added' changed = True # insert matched, but not the regexp or search_string else: - b_lines.insert(index[1], b_line + b_linesep) + lines.insert(index[1], line + linesep) msg = 'line added' changed = True if module._diff: - diff['after'] = to_native(b''.join(b_lines)) + diff['after'] = ''.join(lines) backupdest = "" if changed and not module.check_mode: if backup and os.path.exists(b_dest): backupdest = module.backup_local(dest) - write_changes(module, b_lines, dest) + write_changes(module, lines, dest, encoding) if module.check_mode and not os.path.exists(b_dest): module.exit_json(changed=changed, msg=msg, backup=backupdest, diff=diff) @@ -510,40 +517,40 @@ def absent(module, dest, regexp, search_string, line, backup): 'before_header': '%s (content)' % dest, 'after_header': '%s (content)' % dest} - with open(b_dest, 'rb') as f: - b_lines = f.readlines() + encoding = module.params['encoding'] + + with open(b_dest, 'r', encoding=encoding) as f: + lines = f.readlines() if module._diff: - diff['before'] = to_native(b''.join(b_lines)) + diff['before'] = ''.join(lines) if regexp is not None: - bre_c = re.compile(to_bytes(regexp, errors='surrogate_or_strict')) + re_c = re.compile(regexp) found = [] - b_line = to_bytes(line, errors='surrogate_or_strict') - - def matcher(b_cur_line): + def matcher(cur_line): if regexp is not None: - match_found = bre_c.search(b_cur_line) + match_found = re_c.search(cur_line) elif search_string is not None: - match_found = to_bytes(search_string, errors='surrogate_or_strict') in b_cur_line + match_found = search_string in cur_line else: - match_found = b_line == b_cur_line.rstrip(b'\r\n') + match_found = line == cur_line.rstrip('\r\n') if match_found: - found.append(b_cur_line) + found.append(cur_line) return not match_found - b_lines = [l for l in b_lines if matcher(l)] + lines = [l for l in lines if matcher(l)] changed = len(found) > 0 if module._diff: - diff['after'] = to_native(b''.join(b_lines)) + diff['after'] = ''.join(lines) backupdest = "" if changed and not module.check_mode: if backup: backupdest = module.backup_local(dest) - write_changes(module, b_lines, dest) + write_changes(module, lines, dest, encoding) if changed: msg = "%s line(s) removed" % len(found) @@ -567,6 +574,7 @@ def main(): regexp=dict(type='str', aliases=['regex']), search_string=dict(type='str'), line=dict(type='str', aliases=['value']), + encoding=dict(type='str', default='utf-8'), insertafter=dict(type='str'), insertbefore=dict(type='str'), backrefs=dict(type='bool', default=False), diff --git a/lib/ansible/modules/package_facts.py b/lib/ansible/modules/package_facts.py index 595d3f58465..c6dc5b0b98a 100644 --- a/lib/ansible/modules/package_facts.py +++ b/lib/ansible/modules/package_facts.py @@ -18,7 +18,7 @@ options: This is a list and can support multiple package managers per system, since version 2.8. - The V(portage) and V(pkg) options were added in version 2.8. - The V(apk) option was added in version 2.11. - - The V(pkg_info)' option was added in version 2.13. + - The V(pkg_info) option was added in version 2.13. - Aliases were added in 2.18, to support using C(manager={{ansible_facts['pkg_mgr']}}) default: ['auto'] choices: diff --git a/lib/ansible/modules/service.py b/lib/ansible/modules/service.py index 438aeb0e1a4..f430903dffe 100644 --- a/lib/ansible/modules/service.py +++ b/lib/ansible/modules/service.py @@ -180,7 +180,6 @@ from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.common.locale import get_best_parsable_locale from ansible.module_utils.common.sys_info import get_platform_subclass from ansible.module_utils.service import fail_if_missing, is_systemd_managed -from ansible.module_utils.six import b class Service(object): @@ -292,8 +291,8 @@ class Service(object): # chkconfig localizes messages and we're screen scraping so make # sure we use the C locale p = subprocess.Popen(cmd, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=lang_env, preexec_fn=lambda: os.close(pipe[1])) - stdout = b("") - stderr = b("") + stdout = b"" + stderr = b"" fds = [p.stdout, p.stderr] # Wait for all output, or until the main process is dead and its output is done. while fds: @@ -322,7 +321,7 @@ class Service(object): os.close(pipe[1]) os.waitpid(pid, 0) # Wait for data from daemon process and process it. - data = b("") + data = b"" while True: rfd, wfd, efd = select.select([pipe[0]], [], [pipe[0]]) if pipe[0] in rfd: diff --git a/lib/ansible/modules/uri.py b/lib/ansible/modules/uri.py index e19450b358d..ceb6bcae764 100644 --- a/lib/ansible/modules/uri.py +++ b/lib/ansible/modules/uri.py @@ -438,13 +438,12 @@ import os import re import shutil import tempfile +from collections.abc import Mapping, Sequence from datetime import datetime, timezone +from urllib.parse import urlencode, urljoin from ansible.module_utils.basic import AnsibleModule, sanitize_keys -from ansible.module_utils.six import binary_type, iteritems, string_types -from ansible.module_utils.six.moves.urllib.parse import urlencode, urljoin from ansible.module_utils.common.text.converters import to_native, to_text -from ansible.module_utils.six.moves.collections_abc import Mapping, Sequence from ansible.module_utils.urls import ( fetch_url, get_response_filename, @@ -479,7 +478,7 @@ def write_file(module, dest, content, resp): try: fd, tmpsrc = tempfile.mkstemp(dir=module.tmpdir) with os.fdopen(fd, 'wb') as f: - if isinstance(content, binary_type): + if isinstance(content, bytes): f.write(content) else: shutil.copyfileobj(content, f) @@ -521,14 +520,14 @@ def kv_list(data): def form_urlencoded(body): """ Convert data into a form-urlencoded string """ - if isinstance(body, string_types): + if isinstance(body, str): return body if isinstance(body, (Mapping, Sequence)): result = [] # Turn a list of lists into a list of tuples that urlencode accepts for key, values in kv_list(body): - if isinstance(values, string_types) or not isinstance(values, (Mapping, Sequence)): + if isinstance(values, str) or not isinstance(values, (Mapping, Sequence)): values = [values] for value in values: if value is not None: @@ -641,12 +640,12 @@ def main(): if body_format == 'json': # Encode the body unless its a string, then assume it is pre-formatted JSON - if not isinstance(body, string_types): + if not isinstance(body, str): body = json.dumps(body) if 'content-type' not in [header.lower() for header in dict_headers]: dict_headers['Content-Type'] = 'application/json' elif body_format == 'form-urlencoded': - if not isinstance(body, string_types): + if not isinstance(body, str): try: body = form_urlencoded(body) except ValueError as e: @@ -747,7 +746,7 @@ def main(): # In python3, the headers are title cased. Lowercase them to be # compatible with the python2 behaviour. uresp = {} - for key, value in iteritems(resp): + for key, value in resp.items(): ukey = key.replace("-", "_").lower() uresp[ukey] = value @@ -755,7 +754,7 @@ def main(): uresp['location'] = urljoin(url, uresp['location']) # Default content_encoding to try - if isinstance(content, binary_type): + if isinstance(content, bytes): u_content = to_text(content, encoding=content_encoding) if maybe_json: try: diff --git a/lib/ansible/parsing/dataloader.py b/lib/ansible/parsing/dataloader.py index d8ce2a95ef9..22deaa606cd 100644 --- a/lib/ansible/parsing/dataloader.py +++ b/lib/ansible/parsing/dataloader.py @@ -19,7 +19,6 @@ from ansible._internal._errors import _error_utils from ansible.module_utils.basic import is_executable from ansible._internal._datatag._tags import Origin, TrustedAsTemplate, SourceWasEncrypted from ansible.module_utils._internal._datatag import AnsibleTagHelper -from ansible.module_utils.six import binary_type, text_type from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text from ansible.parsing.quoting import unquote from ansible.parsing.utils.yaml import from_yaml @@ -32,7 +31,7 @@ display = Display() # Tries to determine if a path is inside a role, last dir must be 'tasks' # this is not perfect but people should really avoid 'tasks' dirs outside roles when using Ansible. -RE_TASKS = re.compile(u'(?:^|%s)+tasks%s?$' % (os.path.sep, os.path.sep)) +RE_TASKS = re.compile('(?:^|%s)+tasks%s?$' % (os.path.sep, os.path.sep)) class DataLoader: @@ -54,23 +53,22 @@ class DataLoader: ds = dl.load_from_file('/path/to/file') """ - def __init__(self): + def __init__(self) -> None: - self._basedir = '.' + self._basedir: str = '.' # NOTE: not effective with forks as the main copy does not get updated. # avoids rereading files - self._FILE_CACHE = dict() + self._FILE_CACHE: dict[str, object] = {} # NOTE: not thread safe, also issues with forks not returning data to main proc # so they need to be cleaned independently. See WorkerProcess for example. # used to keep track of temp files for cleaning - self._tempfiles = set() + self._tempfiles: set[str] = set() # initialize the vault stuff with an empty password # TODO: replace with a ref to something that can get the password # a creds/auth provider - self._vaults = {} self._vault = VaultLib() self.set_vault_secrets(None) @@ -230,23 +228,19 @@ class DataLoader: def set_basedir(self, basedir: str) -> None: """ sets the base directory, used to find files when a relative path is given """ - - if basedir is not None: - self._basedir = to_text(basedir) + self._basedir = basedir def path_dwim(self, given: str) -> str: """ make relative paths work like folks expect. """ - given = to_text(given, errors='surrogate_or_strict') given = unquote(given) - if given.startswith(to_text(os.path.sep)) or given.startswith(u'~'): + if given.startswith(os.path.sep) or given.startswith('~'): path = given else: - basedir = to_text(self._basedir, errors='surrogate_or_strict') - path = os.path.join(basedir, given) + path = os.path.join(self._basedir, given) return unfrackpath(path, follow=False) @@ -294,10 +288,9 @@ class DataLoader: """ search = [] - source = to_text(source, errors='surrogate_or_strict') # I have full path, nothing else needs to be looked at - if source.startswith(to_text(os.path.sep)) or source.startswith(u'~'): + if source.startswith(os.path.sep) or source.startswith('~'): search.append(unfrackpath(source, follow=False)) else: # base role/play path + templates/files/vars + relative filename @@ -364,7 +357,7 @@ class DataLoader: if os.path.exists(to_bytes(test_path, errors='surrogate_or_strict')): result = test_path else: - display.debug(u'evaluation_path:\n\t%s' % '\n\t'.join(paths)) + display.debug('evaluation_path:\n\t%s' % '\n\t'.join(paths)) for path in paths: upath = unfrackpath(path, follow=False) b_upath = to_bytes(upath, errors='surrogate_or_strict') @@ -385,9 +378,9 @@ class DataLoader: search.append(os.path.join(to_bytes(self.get_basedir(), errors='surrogate_or_strict'), b_dirname, b_source)) search.append(os.path.join(to_bytes(self.get_basedir(), errors='surrogate_or_strict'), b_source)) - display.debug(u'search_path:\n\t%s' % to_text(b'\n\t'.join(search))) + display.debug('search_path:\n\t%s' % to_text(b'\n\t'.join(search))) for b_candidate in search: - display.vvvvv(u'looking for "%s" at "%s"' % (source, to_text(b_candidate))) + display.vvvvv('looking for "%s" at "%s"' % (source, to_text(b_candidate))) if os.path.exists(b_candidate): result = to_text(b_candidate) break @@ -418,11 +411,10 @@ class DataLoader: Temporary files are cleanup in the destructor """ - if not file_path or not isinstance(file_path, (binary_type, text_type)): + if not file_path or not isinstance(file_path, (bytes, str)): raise AnsibleParserError("Invalid filename: '%s'" % to_native(file_path)) - b_file_path = to_bytes(file_path, errors='surrogate_or_strict') - if not self.path_exists(b_file_path) or not self.is_file(b_file_path): + if not self.path_exists(file_path) or not self.is_file(file_path): raise AnsibleFileNotFound(file_name=file_path) real_path = self.path_dwim(file_path) @@ -480,7 +472,7 @@ class DataLoader: """ b_path = to_bytes(os.path.join(path, name)) - found = [] + found: list[str] = [] if extensions is None: # Look for file with no extension first to find dir before file @@ -489,27 +481,29 @@ class DataLoader: for ext in extensions: if '.' in ext: - full_path = b_path + to_bytes(ext) + b_full_path = b_path + to_bytes(ext) elif ext: - full_path = b'.'.join([b_path, to_bytes(ext)]) + b_full_path = b'.'.join([b_path, to_bytes(ext)]) else: - full_path = b_path + b_full_path = b_path + + full_path = to_text(b_full_path) if self.path_exists(full_path): if self.is_directory(full_path): if allow_dir: - found.extend(self._get_dir_vars_files(to_text(full_path), extensions)) + found.extend(self._get_dir_vars_files(full_path, extensions)) else: continue else: - found.append(to_text(full_path)) + found.append(full_path) break return found def _get_dir_vars_files(self, path: str, extensions: list[str]) -> list[str]: found = [] for spath in sorted(self.list_directory(path)): - if not spath.startswith(u'.') and not spath.endswith(u'~'): # skip hidden and backups + if not spath.startswith('.') and not spath.endswith('~'): # skip hidden and backups ext = os.path.splitext(spath)[-1] full_spath = os.path.join(path, spath) diff --git a/lib/ansible/parsing/mod_args.py b/lib/ansible/parsing/mod_args.py index 09823d59dd5..90aee815d06 100644 --- a/lib/ansible/parsing/mod_args.py +++ b/lib/ansible/parsing/mod_args.py @@ -130,6 +130,7 @@ class ModuleArgsParser: # HACK: why are these not FieldAttributes on task with a post-validate to check usage? self._task_attrs.update(['local_action', 'static']) self._task_attrs = frozenset(self._task_attrs) + self._resolved_action = None def _split_module_string(self, module_string: str) -> tuple[str, str]: """ @@ -344,6 +345,8 @@ class ModuleArgsParser: raise e is_action_candidate = context.resolved and bool(context.redirect_list) + if is_action_candidate: + self._resolved_action = context.resolved_fqcn if is_action_candidate: # finding more than one module name is a problem diff --git a/lib/ansible/parsing/vault/__init__.py b/lib/ansible/parsing/vault/__init__.py index 0322dd7b2e0..3128be8b060 100644 --- a/lib/ansible/parsing/vault/__init__.py +++ b/lib/ansible/parsing/vault/__init__.py @@ -59,7 +59,6 @@ except ImportError: from ansible.errors import AnsibleError, AnsibleAssertionError from ansible import constants as C -from ansible.module_utils.six import binary_type from ansible.module_utils.common.text.converters import to_bytes, to_text, to_native from ansible.utils.display import Display from ansible.utils.path import makedirs_safe, unfrackpath @@ -1237,7 +1236,7 @@ class VaultAES256: It would be nice if there were a library for this but hey. """ - if not (isinstance(b_a, binary_type) and isinstance(b_b, binary_type)): + if not (isinstance(b_a, bytes) and isinstance(b_b, bytes)): raise TypeError('_is_equal can only be used to compare two byte strings') # http://codahale.com/a-lesson-in-timing-attacks/ diff --git a/lib/ansible/playbook/base.py b/lib/ansible/playbook/base.py index 955962ea324..5a166929540 100644 --- a/lib/ansible/playbook/base.py +++ b/lib/ansible/playbook/base.py @@ -19,7 +19,6 @@ from ansible import context from ansible.errors import AnsibleError, AnsibleParserError, AnsibleAssertionError, AnsibleValueOmittedError, AnsibleFieldAttributeError from ansible.module_utils.datatag import native_type_name from ansible._internal._datatag._tags import Origin -from ansible.module_utils.six import string_types from ansible.module_utils.parsing.convert_bool import boolean from ansible.module_utils.common.sentinel import Sentinel from ansible.module_utils.common.text.converters import to_text @@ -37,7 +36,7 @@ display = Display() def _validate_action_group_metadata(action, found_group_metadata, fq_group_name): valid_metadata = { 'extend_group': { - 'types': (list, string_types,), + 'types': (list, str,), 'errortype': 'list', }, } @@ -204,7 +203,7 @@ class FieldAttributeBase: value = self.set_to_context(attr.name) valid_values = frozenset(('always', 'on_failed', 'on_unreachable', 'on_skipped', 'never')) - if value and isinstance(value, string_types) and value not in valid_values: + if value and isinstance(value, str) and value not in valid_values: raise AnsibleParserError("'%s' is not a valid value for debugger. Must be one of %s" % (value, ', '.join(valid_values)), obj=self.get_ds()) return value @@ -350,14 +349,14 @@ class FieldAttributeBase: found_group_metadata = False for action in action_group: # Everything should be a string except the metadata entry - if not isinstance(action, string_types): + if not isinstance(action, str): _validate_action_group_metadata(action, found_group_metadata, fq_group_name) if isinstance(action['metadata'], dict): found_group_metadata = True include_groups = action['metadata'].get('extend_group', []) - if isinstance(include_groups, string_types): + if isinstance(include_groups, str): include_groups = [include_groups] if not isinstance(include_groups, list): # Bad entries may be a warning above, but prevent tracebacks by setting it back to the acceptable type. @@ -472,7 +471,7 @@ class FieldAttributeBase: elif attribute.isa == 'percent': # special value, which may be an integer or float # with an optional '%' at the end - if isinstance(value, string_types) and '%' in value: + if isinstance(value, str) and '%' in value: value = value.replace('%', '') value = float(value) elif attribute.isa == 'list': diff --git a/lib/ansible/playbook/collectionsearch.py b/lib/ansible/playbook/collectionsearch.py index d5bc9450ef2..b0036a5b9e6 100644 --- a/lib/ansible/playbook/collectionsearch.py +++ b/lib/ansible/playbook/collectionsearch.py @@ -3,7 +3,6 @@ from __future__ import annotations -from ansible.module_utils.six import string_types from ansible.playbook.attribute import FieldAttribute from ansible.utils.collection_loader import AnsibleCollectionConfig from ansible.utils.display import Display @@ -32,7 +31,7 @@ def _ensure_default_collection(collection_list=None): class CollectionSearch: # this needs to be populated before we can resolve tasks/roles/etc - collections = FieldAttribute(isa='list', listof=string_types, priority=100, default=_ensure_default_collection, always_post_validate=True, static=True) + collections = FieldAttribute(isa='list', listof=(str,), priority=100, default=_ensure_default_collection, always_post_validate=True, static=True) def _load_collections(self, attr, ds): # We are always a mixin with Base, so we can validate this untemplated diff --git a/lib/ansible/playbook/handler.py b/lib/ansible/playbook/handler.py index 125a9cddc75..5a212c3f1d4 100644 --- a/lib/ansible/playbook/handler.py +++ b/lib/ansible/playbook/handler.py @@ -20,12 +20,11 @@ from __future__ import annotations from ansible.errors import AnsibleAssertionError from ansible.playbook.attribute import NonInheritableFieldAttribute from ansible.playbook.task import Task -from ansible.module_utils.six import string_types class Handler(Task): - listen = NonInheritableFieldAttribute(isa='list', default=list, listof=string_types, static=True) + listen = NonInheritableFieldAttribute(isa='list', default=list, listof=(str,), static=True) def __init__(self, block=None, role=None, task_include=None): self.notified_hosts = [] diff --git a/lib/ansible/playbook/play.py b/lib/ansible/playbook/play.py index 461a0a39258..032716e90b4 100644 --- a/lib/ansible/playbook/play.py +++ b/lib/ansible/playbook/play.py @@ -22,7 +22,6 @@ from ansible import context from ansible.errors import AnsibleError from ansible.errors import AnsibleParserError, AnsibleAssertionError from ansible.module_utils.common.collections import is_sequence -from ansible.module_utils.six import binary_type, string_types, text_type from ansible.playbook.attribute import NonInheritableFieldAttribute from ansible.playbook.base import Base from ansible.playbook.block import Block @@ -53,11 +52,11 @@ class Play(Base, Taggable, CollectionSearch): """ # ================================================================================= - hosts = NonInheritableFieldAttribute(isa='list', required=True, listof=string_types, always_post_validate=True, priority=-2) + hosts = NonInheritableFieldAttribute(isa='list', required=True, listof=(str,), always_post_validate=True, priority=-2) # Facts gather_facts = NonInheritableFieldAttribute(isa='bool', default=None, always_post_validate=True) - gather_subset = NonInheritableFieldAttribute(isa='list', default=None, listof=string_types, always_post_validate=True) + gather_subset = NonInheritableFieldAttribute(isa='list', default=None, listof=(str,), always_post_validate=True) gather_timeout = NonInheritableFieldAttribute(isa='int', default=None, always_post_validate=True) fact_path = NonInheritableFieldAttribute(isa='string', default=None) @@ -120,10 +119,10 @@ class Play(Base, Taggable, CollectionSearch): for entry in value: if entry is None: raise AnsibleParserError("Hosts list cannot contain values of 'None'. Please check your playbook") - elif not isinstance(entry, (binary_type, text_type)): + elif not isinstance(entry, (bytes, str)): raise AnsibleParserError("Hosts list contains an invalid host value: '{host!s}'".format(host=entry)) - elif not isinstance(value, (binary_type, text_type, EncryptedString)): + elif not isinstance(value, (bytes, str, EncryptedString)): raise AnsibleParserError("Hosts list must be a sequence or string. Please check your playbook.") def get_name(self): @@ -303,7 +302,7 @@ class Play(Base, Taggable, CollectionSearch): t = Task(block=flush_block) t.action = 'meta' - t.resolved_action = 'ansible.builtin.meta' + t._resolved_action = 'ansible.builtin.meta' t.args['_raw_params'] = 'flush_handlers' t.implicit = True t.set_loader(self._loader) diff --git a/lib/ansible/playbook/role/__init__.py b/lib/ansible/playbook/role/__init__.py index e6794741be9..9e2c68f9cda 100644 --- a/lib/ansible/playbook/role/__init__.py +++ b/lib/ansible/playbook/role/__init__.py @@ -28,7 +28,6 @@ from ansible import constants as C from ansible.errors import AnsibleError, AnsibleParserError, AnsibleAssertionError from ansible.module_utils.common.sentinel import Sentinel from ansible.module_utils.common.text.converters import to_text -from ansible.module_utils.six import binary_type, text_type from ansible.playbook.base import Base from ansible.playbook.collectionsearch import CollectionSearch from ansible.playbook.conditional import Conditional @@ -38,6 +37,7 @@ from ansible.playbook.role.metadata import RoleMetadata from ansible.playbook.taggable import Taggable from ansible.plugins.loader import add_all_plugin_dirs from ansible.utils.collection_loader import AnsibleCollectionConfig +from ansible.utils.display import Display from ansible.utils.path import is_subpath from ansible.utils.vars import combine_vars @@ -53,14 +53,12 @@ if _t.TYPE_CHECKING: __all__ = ['Role', 'hash_params'] -# TODO: this should be a utility function, but can't be a member of -# the role due to the fact that it would require the use of self -# in a static method. This is also used in the base class for -# strategies (ansible/plugins/strategy/__init__.py) +_display = Display() def hash_params(params): """ + DEPRECATED Construct a data structure of parameters that is hashable. This requires changing any mutable data structures into immutable ones. @@ -72,10 +70,16 @@ def hash_params(params): 1) There shouldn't be any unhashable scalars specified in the yaml 2) Our only choice would be to return an error anyway. """ + + _display.deprecated( + msg="The hash_params function is deprecated as its consumers have moved to internal alternatives", + version='2.24', + help_text='Contact the plugin author to update their code', + ) # Any container is unhashable if it contains unhashable items (for # instance, tuple() is a Hashable subclass but if it contains a dict, it # cannot be hashed) - if isinstance(params, Container) and not isinstance(params, (text_type, binary_type)): + if isinstance(params, Container) and not isinstance(params, (str, bytes)): if isinstance(params, Mapping): try: # Optimistically hope the contents are all hashable diff --git a/lib/ansible/playbook/role/definition.py b/lib/ansible/playbook/role/definition.py index 670a4e101ca..017344062eb 100644 --- a/lib/ansible/playbook/role/definition.py +++ b/lib/ansible/playbook/role/definition.py @@ -22,7 +22,6 @@ import os from ansible import constants as C from ansible.errors import AnsibleError, AnsibleAssertionError from ansible.module_utils._internal._datatag import AnsibleTagHelper -from ansible.module_utils.six import string_types from ansible.playbook.attribute import NonInheritableFieldAttribute from ansible.playbook.base import Base from ansible.playbook.collectionsearch import CollectionSearch @@ -70,7 +69,7 @@ class RoleDefinition(Base, Conditional, Taggable, CollectionSearch): if isinstance(ds, int): ds = "%s" % ds - if not isinstance(ds, dict) and not isinstance(ds, string_types): + if not isinstance(ds, dict) and not isinstance(ds, str): raise AnsibleAssertionError() if isinstance(ds, dict): @@ -113,11 +112,11 @@ class RoleDefinition(Base, Conditional, Taggable, CollectionSearch): string), just that string """ - if isinstance(ds, string_types): + if isinstance(ds, str): return ds role_name = ds.get('role', ds.get('name')) - if not role_name or not isinstance(role_name, string_types): + if not role_name or not isinstance(role_name, str): raise AnsibleError('role definitions must contain a role name', obj=ds) # if we have the required datastructures, and if the role_name diff --git a/lib/ansible/playbook/role/include.py b/lib/ansible/playbook/role/include.py index 3ab3d153a39..a9eaeb9f12f 100644 --- a/lib/ansible/playbook/role/include.py +++ b/lib/ansible/playbook/role/include.py @@ -18,7 +18,6 @@ from __future__ import annotations from ansible.errors import AnsibleError, AnsibleParserError -from ansible.module_utils.six import string_types from ansible.playbook.delegatable import Delegatable from ansible.playbook.role.definition import RoleDefinition @@ -40,10 +39,10 @@ class RoleInclude(RoleDefinition, Delegatable): @staticmethod def load(data, play, current_role_path=None, parent_role=None, variable_manager=None, loader=None, collection_list=None): - if not (isinstance(data, string_types) or isinstance(data, dict)): + if not (isinstance(data, str) or isinstance(data, dict)): raise AnsibleParserError("Invalid role definition.", obj=data) - if isinstance(data, string_types) and ',' in data: + if isinstance(data, str) and ',' in data: raise AnsibleError("Invalid old style role requirement: %s" % data) ri = RoleInclude(play=play, role_basedir=current_role_path, variable_manager=variable_manager, loader=loader, collection_list=collection_list) diff --git a/lib/ansible/playbook/role/metadata.py b/lib/ansible/playbook/role/metadata.py index 0125ae2e084..5b07d35ec27 100644 --- a/lib/ansible/playbook/role/metadata.py +++ b/lib/ansible/playbook/role/metadata.py @@ -20,7 +20,6 @@ from __future__ import annotations import os from ansible.errors import AnsibleParserError, AnsibleError -from ansible.module_utils.six import string_types from ansible.playbook.attribute import NonInheritableFieldAttribute from ansible.playbook.base import Base from ansible.playbook.collectionsearch import CollectionSearch @@ -70,7 +69,7 @@ class RoleMetadata(Base, CollectionSearch): for role_def in ds: # FIXME: consolidate with ansible-galaxy to keep this in sync - if isinstance(role_def, string_types) or 'role' in role_def or 'name' in role_def: + if isinstance(role_def, str) or 'role' in role_def or 'name' in role_def: roles.append(role_def) continue try: diff --git a/lib/ansible/playbook/role/requirement.py b/lib/ansible/playbook/role/requirement.py index d68f686e013..716ad51b233 100644 --- a/lib/ansible/playbook/role/requirement.py +++ b/lib/ansible/playbook/role/requirement.py @@ -18,7 +18,6 @@ from __future__ import annotations from ansible.errors import AnsibleError -from ansible.module_utils.six import string_types from ansible.playbook.role.definition import RoleDefinition from ansible.utils.display import Display from ansible.utils.galaxy import scm_archive_resource @@ -65,7 +64,7 @@ class RoleRequirement(RoleDefinition): @staticmethod def role_yaml_parse(role): - if isinstance(role, string_types): + if isinstance(role, str): name = None scm = None src = None diff --git a/lib/ansible/playbook/role_include.py b/lib/ansible/playbook/role_include.py index 48003db7dff..e9a6d7072b2 100644 --- a/lib/ansible/playbook/role_include.py +++ b/lib/ansible/playbook/role_include.py @@ -23,7 +23,6 @@ from ansible.playbook.task_include import TaskInclude from ansible.playbook.role import Role from ansible.playbook.role.include import RoleInclude from ansible.utils.display import Display -from ansible.module_utils.six import string_types from ansible._internal._templating._engine import TemplateEngine __all__ = ['IncludeRole'] @@ -137,7 +136,7 @@ class IncludeRole(TaskInclude): for key in my_arg_names.intersection(IncludeRole.FROM_ARGS): from_key = key.removesuffix('_from') args_value = ir.args.get(key) - if not isinstance(args_value, string_types): + if not isinstance(args_value, str): raise AnsibleParserError('Expected a string for %s but got %s instead' % (key, type(args_value))) ir._from_files[from_key] = args_value diff --git a/lib/ansible/playbook/taggable.py b/lib/ansible/playbook/taggable.py index 5823b775947..98d91cac65e 100644 --- a/lib/ansible/playbook/taggable.py +++ b/lib/ansible/playbook/taggable.py @@ -19,11 +19,14 @@ from __future__ import annotations import typing as t +from ansible._internal._templating._engine import TemplateEngine from ansible.errors import AnsibleError from ansible.module_utils.common.sentinel import Sentinel from ansible.module_utils._internal._datatag import AnsibleTagHelper from ansible.playbook.attribute import FieldAttribute -from ansible._internal._templating._engine import TemplateEngine +from ansible.utils.display import Display + +_display = Display() def _flatten_tags(tags: list[str | int]) -> list[str | int]: @@ -38,17 +41,25 @@ def _flatten_tags(tags: list[str | int]) -> list[str | int]: class Taggable: + _RESERVED = frozenset(['tagged', 'all', 'untagged']) untagged = frozenset(['untagged']) tags = FieldAttribute(isa='list', default=list, listof=(str, int), extend=True) def _load_tags(self, attr, ds): + + tags = None if isinstance(ds, list): - return ds + tags = ds + elif isinstance(ds, str): + tags = [AnsibleTagHelper.tag_copy(ds, item.strip()) for item in ds.split(',')] + + if tags is None: + raise AnsibleError('tags must be specified as a list', obj=ds) - if isinstance(ds, str): - return [AnsibleTagHelper.tag_copy(ds, item.strip()) for item in ds.split(',')] + if found := self._RESERVED.intersection(tags): + _display.warning(f"Found reserved tagnames in tags: {list(found)!r}, we do not recommend doing this as it might give unexpected results", obj=ds) - raise AnsibleError('tags must be specified as a list', obj=ds) + return tags def _get_all_taggable_objects(self) -> t.Iterable[Taggable]: obj = self diff --git a/lib/ansible/playbook/task.py b/lib/ansible/playbook/task.py index 4f97e268194..91e410ebc86 100644 --- a/lib/ansible/playbook/task.py +++ b/lib/ansible/playbook/task.py @@ -25,7 +25,6 @@ from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVar from ansible.executor.module_common import _get_action_arg_defaults from ansible.module_utils.common.text.converters import to_native from ansible.module_utils._internal._datatag import AnsibleTagHelper -from ansible.module_utils.six import string_types from ansible.parsing.mod_args import ModuleArgsParser, RAW_PARAM_MODULES from ansible.plugins.action import ActionBase from ansible.plugins.loader import action_loader, module_loader, lookup_loader @@ -41,7 +40,7 @@ from ansible.playbook.role import Role from ansible.playbook.taggable import Taggable from ansible._internal import _task from ansible._internal._templating import _marker_behaviors -from ansible._internal._templating._jinja_bits import is_possibly_all_template +from ansible._internal._templating._jinja_bits import is_possibly_all_template, is_possibly_template from ansible._internal._templating._engine import TemplateEngine, TemplateOptions from ansible.utils.collection_loader import AnsibleCollectionConfig from ansible.utils.display import Display @@ -101,7 +100,7 @@ class Task(Base, Conditional, Taggable, CollectionSearch, Notifiable, Delegatabl self._role = role self._parent = None self.implicit = False - self.resolved_action: str | None = None + self._resolved_action: str | None = None if task_include: self._parent = task_include @@ -110,6 +109,38 @@ class Task(Base, Conditional, Taggable, CollectionSearch, Notifiable, Delegatabl super(Task, self).__init__() + _resolved_action_warning = ( + "A plugin is sampling the task's resolved_action when it is not resolved. " + "This can be caused by callback plugins using the resolved_action attribute too " + "early (such as in v2_playbook_on_task_start for a task using the action/local_action " + "keyword), or too late (such as in v2_runner_on_ok for a task with a loop). " + "To maximize compatibility with user features, callback plugins should " + "only use this attribute in v2_runner_on_ok/v2_runner_on_failed for tasks " + "without a loop, and v2_runner_item_on_ok/v2_runner_item_on_failed otherwise." + ) + + @property + def resolved_action(self) -> str | None: + """The templated and resolved FQCN of the task action or None. + + If the action is a template, callback plugins can only use this value in certain methods. + - v2_runner_on_ok and v2_runner_on_failed if there's no task loop + - v2_runner_item_on_ok and v2_runner_item_on_failed if there is a task loop + """ + # Consider deprecating this because it's difficult to use? + # Moving it to the task result would improve the no-loop limitation on v2_runner_on_ok + # but then wouldn't be accessible to v2_playbook_on_task_start, *_on_skipped, etc. + if self._resolved_action is not None: + return self._resolved_action + if not is_possibly_template(self.action): + try: + return self._resolve_action(self.action) + except AnsibleParserError: + display.warning(self._resolved_action_warning, obj=self.action) + else: + display.warning(self._resolved_action_warning, obj=self.action) + return None + def get_name(self, include_role_fqcn=True): """ return the name of the task """ @@ -129,7 +160,7 @@ class Task(Base, Conditional, Taggable, CollectionSearch, Notifiable, Delegatabl def _merge_kv(self, ds): if ds is None: return "" - elif isinstance(ds, string_types): + elif isinstance(ds, str): return ds elif isinstance(ds, dict): buf = "" @@ -168,7 +199,7 @@ class Task(Base, Conditional, Taggable, CollectionSearch, Notifiable, Delegatabl else: module_or_action_context = action_context.plugin_load_context - self.resolved_action = module_or_action_context.resolved_fqcn + self._resolved_action = module_or_action_context.resolved_fqcn action_type: type[ActionBase] = action_context.object @@ -282,6 +313,9 @@ class Task(Base, Conditional, Taggable, CollectionSearch, Notifiable, Delegatabl # But if it wasn't, we can add the yaml object now to get more detail raise AnsibleParserError("Error parsing task arguments.", obj=ds) from ex + if args_parser._resolved_action is not None: + self._resolved_action = args_parser._resolved_action + new_ds['action'] = action new_ds['args'] = args new_ds['delegate_to'] = delegate_to @@ -465,7 +499,7 @@ class Task(Base, Conditional, Taggable, CollectionSearch, Notifiable, Delegatabl new_me._role = self._role new_me.implicit = self.implicit - new_me.resolved_action = self.resolved_action + new_me._resolved_action = self._resolved_action new_me._uuid = self._uuid return new_me @@ -482,7 +516,7 @@ class Task(Base, Conditional, Taggable, CollectionSearch, Notifiable, Delegatabl data['role'] = self._role.serialize() data['implicit'] = self.implicit - data['resolved_action'] = self.resolved_action + data['_resolved_action'] = self._resolved_action return data @@ -513,7 +547,7 @@ class Task(Base, Conditional, Taggable, CollectionSearch, Notifiable, Delegatabl del data['role'] self.implicit = data.get('implicit', False) - self.resolved_action = data.get('resolved_action') + self._resolved_action = data.get('_resolved_action') super(Task, self).deserialize(data) @@ -591,7 +625,7 @@ class Task(Base, Conditional, Taggable, CollectionSearch, Notifiable, Delegatabl def dump_attrs(self): """Override to smuggle important non-FieldAttribute values back to the controller.""" attrs = super().dump_attrs() - attrs.update(resolved_action=self.resolved_action) + attrs.update(_resolved_action=self._resolved_action) return attrs def _resolve_conditional( diff --git a/lib/ansible/plugins/action/__init__.py b/lib/ansible/plugins/action/__init__.py index b719000f66a..dd1981f3843 100644 --- a/lib/ansible/plugins/action/__init__.py +++ b/lib/ansible/plugins/action/__init__.py @@ -29,7 +29,6 @@ from ansible.module_utils.common.arg_spec import ArgumentSpecValidator from ansible.module_utils.errors import UnsupportedError from ansible.module_utils.json_utils import _filter_non_json_lines from ansible.module_utils.common.json import Direction, get_module_encoder, get_module_decoder -from ansible.module_utils.six import binary_type, string_types, text_type from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text from ansible.release import __version__ from ansible.utils.collection_loader import resource_from_fqcr @@ -52,7 +51,7 @@ if t.TYPE_CHECKING: def _validate_utf8_json(d): - if isinstance(d, text_type): + if isinstance(d, str): # Purposefully not using to_bytes here for performance reasons d.encode(encoding='utf-8', errors='strict') elif isinstance(d, dict): @@ -288,14 +287,6 @@ class ActionBase(ABC, _AnsiblePluginInfoMixin): elif leaf_module_name == 'async_status' and collection_name in rewrite_collection_names: module_name = '%s.%s' % (win_collection, leaf_module_name) - # TODO: move this tweak down to the modules, not extensible here - # Remove extra quotes surrounding path parameters before sending to module. - if leaf_module_name in ['win_stat', 'win_file', 'win_copy', 'slurp'] and module_args and \ - hasattr(self._connection._shell, '_unquote'): - for key in ('src', 'dest', 'path'): - if key in module_args: - module_args[key] = self._connection._shell._unquote(module_args[key]) - result = self._shared_loader_obj.module_loader.find_plugin_with_context(module_name, mod_type, collection_list=self._task.collections) if not result.resolved: @@ -874,7 +865,7 @@ class ActionBase(ABC, _AnsiblePluginInfoMixin): # happens sometimes when it is a dir and not on bsd if 'checksum' not in mystat['stat']: mystat['stat']['checksum'] = '' - elif not isinstance(mystat['stat']['checksum'], string_types): + elif not isinstance(mystat['stat']['checksum'], str): raise AnsibleError("Invalid checksum returned by stat: expected a string type but got %s" % type(mystat['stat']['checksum'])) return mystat['stat'] @@ -1084,7 +1075,7 @@ class ActionBase(ABC, _AnsiblePluginInfoMixin): # the remote system, which can be read and parsed by the module args_data = "" for k, v in module_args.items(): - args_data += '%s=%s ' % (k, shlex.quote(text_type(v))) + args_data += '%s=%s ' % (k, shlex.quote(str(v))) self._transfer_data(args_file_path, args_data) elif module_style in ('non_native_want_json', 'binary'): profile_encoder = get_module_encoder(module_bits.serialization_profile, Direction.CONTROLLER_TO_MODULE) @@ -1169,7 +1160,7 @@ class ActionBase(ABC, _AnsiblePluginInfoMixin): self._cleanup_remote_tmp = False # NOTE: dnf returns results .. but that made it 'compatible' with squashing, so we allow mappings, for now - if 'results' in data and (not isinstance(data['results'], Sequence) or isinstance(data['results'], string_types)): + if 'results' in data and (not isinstance(data['results'], Sequence) or isinstance(data['results'], str)): data['ansible_module_results'] = data['results'] del data['results'] display.warning("Found internal 'results' key in module return, renamed to 'ansible_module_results'.") @@ -1322,16 +1313,16 @@ class ActionBase(ABC, _AnsiblePluginInfoMixin): # stdout and stderr may be either a file-like or a bytes object. # Convert either one to a text type - if isinstance(stdout, binary_type): + if isinstance(stdout, bytes): out = to_text(stdout, errors=encoding_errors) - elif not isinstance(stdout, text_type): + elif not isinstance(stdout, str): out = to_text(b''.join(stdout.readlines()), errors=encoding_errors) else: out = stdout - if isinstance(stderr, binary_type): + if isinstance(stderr, bytes): err = to_text(stderr, errors=encoding_errors) - elif not isinstance(stderr, text_type): + elif not isinstance(stderr, str): err = to_text(b''.join(stderr.readlines()), errors=encoding_errors) else: err = stderr diff --git a/lib/ansible/plugins/action/add_host.py b/lib/ansible/plugins/action/add_host.py index 1e80fa68e24..827d7b71012 100644 --- a/lib/ansible/plugins/action/add_host.py +++ b/lib/ansible/plugins/action/add_host.py @@ -21,7 +21,6 @@ from __future__ import annotations from collections.abc import Mapping from ansible.errors import AnsibleActionFail -from ansible.module_utils.six import string_types from ansible.plugins.action import ActionBase from ansible.parsing.utils.addresses import parse_address from ansible.utils.display import Display @@ -74,7 +73,7 @@ class ActionModule(ActionBase): if groups: if isinstance(groups, list): group_list = groups - elif isinstance(groups, string_types): + elif isinstance(groups, str): group_list = groups.split(",") else: raise AnsibleActionFail("Groups must be specified as a list.", obj=groups) diff --git a/lib/ansible/plugins/action/fetch.py b/lib/ansible/plugins/action/fetch.py index 3fb21fbe3c5..4978f029994 100644 --- a/lib/ansible/plugins/action/fetch.py +++ b/lib/ansible/plugins/action/fetch.py @@ -20,7 +20,6 @@ import os import base64 from ansible.errors import AnsibleConnectionFailure, AnsibleError, AnsibleActionFail, AnsibleActionSkip from ansible.module_utils.common.text.converters import to_bytes, to_text -from ansible.module_utils.six import string_types from ansible.module_utils.parsing.convert_bool import boolean from ansible.plugins.action import ActionBase from ansible.utils.display import Display @@ -52,10 +51,10 @@ class ActionModule(ActionBase): msg = '' # FIXME: validate source and dest are strings; use basic.py and module specs - if not isinstance(source, string_types): + if not isinstance(source, str): msg = "Invalid type supplied for source option, it must be a string" - if not isinstance(dest, string_types): + if not isinstance(dest, str): msg = "Invalid type supplied for dest option, it must be a string" if source is None or dest is None: @@ -131,7 +130,6 @@ class ActionModule(ActionBase): # calculate the destination name if os.path.sep not in self._connection._shell.join_path('a', ''): - source = self._connection._shell._unquote(source) source_local = source.replace('\\', '/') else: source_local = source diff --git a/lib/ansible/plugins/action/group_by.py b/lib/ansible/plugins/action/group_by.py index 27c433ac69c..1b9997c0b79 100644 --- a/lib/ansible/plugins/action/group_by.py +++ b/lib/ansible/plugins/action/group_by.py @@ -17,7 +17,6 @@ from __future__ import annotations from ansible.plugins.action import ActionBase -from ansible.module_utils.six import string_types class ActionModule(ActionBase): @@ -42,7 +41,7 @@ class ActionModule(ActionBase): group_name = self._task.args.get('key') parent_groups = self._task.args.get('parents', ['all']) - if isinstance(parent_groups, string_types): + if isinstance(parent_groups, str): parent_groups = [parent_groups] result['changed'] = False diff --git a/lib/ansible/plugins/action/include_vars.py b/lib/ansible/plugins/action/include_vars.py index 3eeef2d9c8d..5b3cd4b6c3c 100644 --- a/lib/ansible/plugins/action/include_vars.py +++ b/lib/ansible/plugins/action/include_vars.py @@ -10,7 +10,6 @@ import pathlib import ansible.constants as C from ansible.errors import AnsibleError from ansible._internal._datatag._tags import SourceWasEncrypted -from ansible.module_utils.six import string_types from ansible.module_utils.common.text.converters import to_native from ansible.plugins.action import ActionBase from ansible.utils.vars import combine_vars @@ -38,14 +37,17 @@ class ActionModule(ActionBase): if not self.ignore_files: self.ignore_files = list() - if isinstance(self.ignore_files, string_types): + if isinstance(self.ignore_files, str): + self._display.deprecated( + msg="Specifying 'ignore_files' as a string is deprecated.", + version="2.24", + help_text="Use a list of strings instead.", + obj=self.ignore_files, + ) self.ignore_files = self.ignore_files.split() - elif isinstance(self.ignore_files, dict): - return { - 'failed': True, - 'message': '{0} must be a list'.format(self.ignore_files) - } + if not isinstance(self.ignore_files, list): + raise AnsibleError("The 'ignore_files' option must be a list.", obj=self.ignore_files) def _set_args(self): """ Set instance variables based on the arguments that were passed """ @@ -65,11 +67,8 @@ class ActionModule(ActionBase): self.ignore_files = self._task.args.get('ignore_files', None) self.valid_extensions = self._task.args.get('extensions', self.VALID_FILE_EXTENSIONS) - # convert/validate extensions list - if isinstance(self.valid_extensions, string_types): - self.valid_extensions = list(self.valid_extensions) if not isinstance(self.valid_extensions, list): - raise AnsibleError('Invalid type for "extensions" option, it must be a list') + raise AnsibleError("The 'extensions' option must be a list.", obj=self.valid_extensions) def run(self, tmp=None, task_vars=None): """ Load yml files recursively from a directory. @@ -93,10 +92,10 @@ class ActionModule(ActionBase): elif arg in self.VALID_ALL: pass else: - raise AnsibleError('{0} is not a valid option in include_vars'.format(to_native(arg))) + raise AnsibleError(f'{arg} is not a valid option in include_vars', obj=arg) if dirs and files: - raise AnsibleError("You are mixing file only and dir only arguments, these are incompatible") + raise AnsibleError("You are mixing file only and dir only arguments, these are incompatible", obj=self._task.args) # set internal vars from args self._set_args() @@ -108,13 +107,13 @@ class ActionModule(ActionBase): self._set_root_dir() if not path.exists(self.source_dir): failed = True - err_msg = ('{0} directory does not exist'.format(to_native(self.source_dir))) + err_msg = f"{self.source_dir} directory does not exist" elif not path.isdir(self.source_dir): failed = True - err_msg = ('{0} is not a directory'.format(to_native(self.source_dir))) + err_msg = f"{self.source_dir} is not a directory" else: for root_dir, filenames in self._traverse_dir_depth(): - failed, err_msg, updated_results = (self._load_files_in_dir(root_dir, filenames)) + failed, err_msg, updated_results = self._load_files_in_dir(root_dir, filenames) if failed: break results.update(updated_results) @@ -175,7 +174,7 @@ class ActionModule(ActionBase): self.source_dir = path.join(current_dir, self.source_dir) def _log_walk(self, error): - self._display.vvv('Issue with walking through "%s": %s' % (to_native(error.filename), to_native(error))) + self._display.vvv(f"Issue with walking through {error.filename}: {error}") def _traverse_dir_depth(self): """ Recursively iterate over a directory and sort the files in @@ -204,9 +203,8 @@ class ActionModule(ActionBase): try: if re.search(r'{0}$'.format(file_type), filename): return True - except Exception: - err_msg = 'Invalid regular expression: {0}'.format(file_type) - raise AnsibleError(err_msg) + except Exception as ex: + raise AnsibleError(f'Invalid regular expression: {file_type!r}', obj=file_type) from ex return False def _is_valid_file_ext(self, source_file): @@ -232,7 +230,7 @@ class ActionModule(ActionBase): err_msg = '' if validate_extensions and not self._is_valid_file_ext(filename): failed = True - err_msg = ('{0} does not have a valid extension: {1}'.format(to_native(filename), ', '.join(self.valid_extensions))) + err_msg = f"{filename!r} does not have a valid extension: {', '.join(self.valid_extensions)}" else: data = self._loader.load_from_file(filename, cache='none', trusted_as_template=True) @@ -243,7 +241,7 @@ class ActionModule(ActionBase): if not isinstance(data, dict): failed = True - err_msg = ('{0} must be stored as a dictionary/hash'.format(to_native(filename))) + err_msg = f"{filename!r} must be stored as a dictionary/hash" else: self.included_files.append(filename) results.update(data) diff --git a/lib/ansible/plugins/action/script.py b/lib/ansible/plugins/action/script.py index 0f2b2d49892..2149bef91ca 100644 --- a/lib/ansible/plugins/action/script.py +++ b/lib/ansible/plugins/action/script.py @@ -139,8 +139,6 @@ class ActionModule(ActionBase): else: script_cmd = ' '.join([env_string, target_command]) - script_cmd = self._connection._shell.wrap_for_exec(script_cmd) - exec_data = None # PowerShell runs the script in a special wrapper to enable things # like become and environment args @@ -149,7 +147,7 @@ class ActionModule(ActionBase): pc = self._task exec_data = ps_manifest._create_powershell_wrapper( name=f"ansible.builtin.script.{pathlib.Path(source).stem}", - module_data=to_bytes(script_cmd), + module_data=to_bytes(f"& {script_cmd}; exit $LASTEXITCODE"), module_path=source, module_args={}, environment=env_dict, diff --git a/lib/ansible/plugins/action/template.py b/lib/ansible/plugins/action/template.py index 19844827341..e8cc2bbbf8b 100644 --- a/lib/ansible/plugins/action/template.py +++ b/lib/ansible/plugins/action/template.py @@ -23,7 +23,6 @@ from ansible.config.manager import ensure_type from ansible.errors import AnsibleError, AnsibleActionFail from ansible.module_utils.common.text.converters import to_bytes, to_text, to_native from ansible.module_utils.parsing.convert_bool import boolean -from ansible.module_utils.six import string_types from ansible.plugins.action import ActionBase from ansible.template import trust_as_template from ansible._internal._templating import _template_vars @@ -49,7 +48,7 @@ class ActionModule(ActionBase): 'block_end_string', 'comment_start_string', 'comment_end_string'): if s_type in self._task.args: value = ensure_type(self._task.args[s_type], 'string') - if value is not None and not isinstance(value, string_types): + if value is not None and not isinstance(value, str): raise AnsibleActionFail("%s is expected to be a string, but got %s instead" % (s_type, type(value))) self._task.args[s_type] = value diff --git a/lib/ansible/plugins/callback/__init__.py b/lib/ansible/plugins/callback/__init__.py index ea675ee444f..944606f26ff 100644 --- a/lib/ansible/plugins/callback/__init__.py +++ b/lib/ansible/plugins/callback/__init__.py @@ -290,7 +290,11 @@ class CallbackBase(AnsiblePlugin): ) if not indent and any(indent_conditions): - indent = 4 + try: + indent = self.get_option('result_indentation') + except KeyError: + # Callback does not declare result_indentation nor extend result_format_callback + indent = 4 if pretty_results is False: # pretty_results=False overrides any specified indentation indent = None @@ -391,8 +395,14 @@ class CallbackBase(AnsiblePlugin): # Callback does not declare pretty_results nor extend result_format_callback pretty_results = None + try: + indent = self.get_option('result_indentation') + except KeyError: + # Callback does not declare result_indentation nor extend result_format_callback + indent = 4 + if result_format == 'json': - return json.dumps(diff, sort_keys=True, indent=4, separators=(u',', u': ')) + u'\n' + return json.dumps(diff, sort_keys=True, indent=indent, separators=(u',', u': ')) + u'\n' if result_format == 'yaml': # None is a sentinel in this case that indicates default behavior @@ -404,7 +414,7 @@ class CallbackBase(AnsiblePlugin): allow_unicode=True, Dumper=functools.partial(_AnsibleCallbackDumper, lossy=lossy), default_flow_style=False, - indent=4, + indent=indent, # sort_keys=sort_keys # This requires PyYAML>=5.1 ), ' ' diff --git a/lib/ansible/plugins/connection/local.py b/lib/ansible/plugins/connection/local.py index 0e650fd14f0..6b7581a2f45 100644 --- a/lib/ansible/plugins/connection/local.py +++ b/lib/ansible/plugins/connection/local.py @@ -47,7 +47,6 @@ import typing as t import ansible.constants as C from ansible.errors import AnsibleError, AnsibleFileNotFound, AnsibleConnectionFailure -from ansible.module_utils.six import text_type, binary_type from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text from ansible.plugins.connection import ConnectionBase from ansible.utils.display import Display @@ -100,7 +99,7 @@ class Connection(ConnectionBase): display.vvv(u"EXEC {0}".format(to_text(cmd)), host=self._play_context.remote_addr) display.debug("opening command with Popen()") - if isinstance(cmd, (text_type, binary_type)): + if isinstance(cmd, (str, bytes)): cmd = to_text(cmd) else: cmd = map(to_text, cmd) @@ -119,7 +118,7 @@ class Connection(ConnectionBase): p = subprocess.Popen( cmd, - shell=isinstance(cmd, (text_type, binary_type)), + shell=isinstance(cmd, (str, bytes)), executable=executable, cwd=self.cwd, stdin=stdin, diff --git a/lib/ansible/plugins/connection/psrp.py b/lib/ansible/plugins/connection/psrp.py index cef9b4346d7..7a7ddbbaa2f 100644 --- a/lib/ansible/plugins/connection/psrp.py +++ b/lib/ansible/plugins/connection/psrp.py @@ -489,7 +489,6 @@ class Connection(ConnectionBase): def put_file(self, in_path: str, out_path: str) -> None: super(Connection, self).put_file(in_path, out_path) - out_path = self._shell._unquote(out_path) display.vvv("PUT %s TO %s" % (in_path, out_path), host=self._psrp_host) script, in_data = _bootstrap_powershell_script('psrp_put_file.ps1', { @@ -549,7 +548,6 @@ class Connection(ConnectionBase): display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self._psrp_host) - in_path = self._shell._unquote(in_path) out_path = out_path.replace('\\', '/') b_out_path = to_bytes(out_path, errors='surrogate_or_strict') diff --git a/lib/ansible/plugins/connection/ssh.py b/lib/ansible/plugins/connection/ssh.py index b9b6c1356a1..36b4fdcc377 100644 --- a/lib/ansible/plugins/connection/ssh.py +++ b/lib/ansible/plugins/connection/ssh.py @@ -441,7 +441,6 @@ from ansible.errors import ( AnsibleError, AnsibleFileNotFound, ) -from ansible.module_utils.six import text_type, binary_type from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text from ansible.plugins.connection import ConnectionBase, BUFSIZE from ansible.plugins.shell.powershell import _replace_stderr_clixml @@ -1122,7 +1121,7 @@ class Connection(ConnectionBase): p = None - if isinstance(cmd, (text_type, binary_type)): + if isinstance(cmd, (str, bytes)): cmd = to_bytes(cmd) else: cmd = list(map(to_bytes, cmd)) diff --git a/lib/ansible/plugins/connection/winrm.py b/lib/ansible/plugins/connection/winrm.py index 179d848fe51..571bfece8d1 100644 --- a/lib/ansible/plugins/connection/winrm.py +++ b/lib/ansible/plugins/connection/winrm.py @@ -768,7 +768,6 @@ class Connection(ConnectionBase): def put_file(self, in_path: str, out_path: str) -> None: super(Connection, self).put_file(in_path, out_path) - out_path = self._shell._unquote(out_path) display.vvv('PUT "%s" TO "%s"' % (in_path, out_path), host=self._winrm_host) if not os.path.exists(to_bytes(in_path, errors='surrogate_or_strict')): raise AnsibleFileNotFound('file or module does not exist: "%s"' % to_native(in_path)) @@ -806,7 +805,6 @@ class Connection(ConnectionBase): def fetch_file(self, in_path: str, out_path: str) -> None: super(Connection, self).fetch_file(in_path, out_path) - in_path = self._shell._unquote(in_path) out_path = out_path.replace('\\', '/') # consistent with other connection plugins, we assume the caller has created the target dir display.vvv('FETCH "%s" TO "%s"' % (in_path, out_path), host=self._winrm_host) diff --git a/lib/ansible/plugins/doc_fragments/result_format_callback.py b/lib/ansible/plugins/doc_fragments/result_format_callback.py index 88e37b8c344..220a311bab0 100644 --- a/lib/ansible/plugins/doc_fragments/result_format_callback.py +++ b/lib/ansible/plugins/doc_fragments/result_format_callback.py @@ -26,6 +26,21 @@ class ModuleDocFragment(object): - json - yaml version_added: '2.13' + result_indentation: + name: Indentation of the result + description: + - Allows to configure indentation for YAML and verbose/pretty JSON. + - Please note that for O(result_format=yaml), only values between 2 and 9 will be handled as expected by PyYAML. + If indentation is set to 1, or to 10 or larger, the first level of indentation will be used, + but all further indentations will be by 2 spaces. + type: int + default: 4 + env: + - name: ANSIBLE_CALLBACK_RESULT_INDENTATION + ini: + - key: callback_result_indentation + section: defaults + version_added: '2.20' pretty_results: name: Configure output for readability description: diff --git a/lib/ansible/plugins/filter/core.py b/lib/ansible/plugins/filter/core.py index f9f9da73a00..96329d5b5b2 100644 --- a/lib/ansible/plugins/filter/core.py +++ b/lib/ansible/plugins/filter/core.py @@ -29,7 +29,6 @@ from ansible._internal._templating import _lazy_containers from ansible.errors import AnsibleFilterError, AnsibleTypeError, AnsibleTemplatePluginError from ansible.module_utils.datatag import native_type_name from ansible.module_utils.common.json import get_encoder, get_decoder -from ansible.module_utils.six import string_types, integer_types, text_type from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text from ansible.module_utils.common.collections import is_sequence from ansible.parsing.yaml.dumper import AnsibleDumper @@ -221,6 +220,7 @@ def regex_search(value, regex, *args, **kwargs): return items +@accept_args_markers def ternary(value, true_val, false_val, none_val=None): """ value ? true_val : false_val """ if value is None and none_val is not None: @@ -278,7 +278,7 @@ def rand(environment, end, start=None, step=None, seed=None): r = SystemRandom() else: r = Random(seed) - if isinstance(end, integer_types): + if isinstance(end, int): if not start: start = 0 if not step: @@ -555,7 +555,7 @@ def subelements(obj, subelements, skip_missing=False): if isinstance(subelements, list): subelement_list = subelements[:] - elif isinstance(subelements, string_types): + elif isinstance(subelements, str): subelement_list = subelements.split('.') else: raise AnsibleTypeError('subelements must be a list or a string') @@ -617,7 +617,7 @@ def list_of_dict_key_value_elements_to_dict(mylist, key_name='key', value_name=' def path_join(paths): """ takes a sequence or a string, and return a concatenation of the different members """ - if isinstance(paths, string_types): + if isinstance(paths, str): return os.path.join(paths) if is_sequence(paths): return os.path.join(*paths) @@ -809,7 +809,7 @@ class FilterModule(object): 'dict2items': dict_to_list_of_dict_key_value_elements, 'items2dict': list_of_dict_key_value_elements_to_dict, 'subelements': subelements, - 'split': partial(unicode_wrap, text_type.split), + 'split': partial(unicode_wrap, str.split), # FDI038 - replace this with a standard type compat shim 'groupby': _cleansed_groupby, diff --git a/lib/ansible/plugins/filter/encryption.py b/lib/ansible/plugins/filter/encryption.py index 78c50422c1c..c6d1d732146 100644 --- a/lib/ansible/plugins/filter/encryption.py +++ b/lib/ansible/plugins/filter/encryption.py @@ -13,25 +13,13 @@ from ansible.utils.display import Display display = Display() -def do_vault(data, secret, salt=None, vault_id='filter_default', wrap_object=False, vaultid=None): +def do_vault(data, secret, salt=None, vault_id='filter_default', wrap_object=False): if not isinstance(secret, (str, bytes)): raise TypeError(f"Secret passed is required to be a string, instead we got {type(secret)}.") if not isinstance(data, (str, bytes)): raise TypeError(f"Can only vault strings, instead we got {type(data)}.") - if vaultid is not None: - display.deprecated( - msg="Use of undocumented `vaultid`.", - version="2.20", - help_text="Use `vault_id` instead.", - ) - - if vault_id == 'filter_default': - vault_id = vaultid - else: - display.warning("Ignoring vaultid as vault_id is already set.") - vs = VaultSecret(to_bytes(secret)) vl = VaultLib() try: @@ -48,11 +36,11 @@ def do_vault(data, secret, salt=None, vault_id='filter_default', wrap_object=Fal @_template.accept_args_markers -def do_unvault(vault, secret, vault_id='filter_default', vaultid=None): +def do_unvault(vault, secret, vault_id='filter_default'): if isinstance(vault, VaultExceptionMarker): vault = vault._disarm() - if (first_marker := _template.get_first_marker_arg((vault, secret, vault_id, vaultid), {})) is not None: + if (first_marker := _template.get_first_marker_arg((vault, secret, vault_id), {})) is not None: return first_marker if not isinstance(secret, (str, bytes)): @@ -61,18 +49,6 @@ def do_unvault(vault, secret, vault_id='filter_default', vaultid=None): if not isinstance(vault, (str, bytes)): raise TypeError(f"Vault should be in the form of a string, instead we got {type(vault)}.") - if vaultid is not None: - display.deprecated( - msg="Use of undocumented `vaultid`.", - version="2.20", - help_text="Use `vault_id` instead.", - ) - - if vault_id == 'filter_default': - vault_id = vaultid - else: - display.warning("Ignoring vaultid as vault_id is already set.") - vs = VaultSecret(to_bytes(secret)) vl = VaultLib([(vault_id, vs)]) diff --git a/lib/ansible/plugins/filter/mathstuff.py b/lib/ansible/plugins/filter/mathstuff.py index a9247a2c984..c2fe8e8b3e3 100644 --- a/lib/ansible/plugins/filter/mathstuff.py +++ b/lib/ansible/plugins/filter/mathstuff.py @@ -29,7 +29,6 @@ from jinja2.filters import pass_environment from ansible.errors import AnsibleError from ansible.module_utils.common.text import formatters -from ansible.module_utils.six import binary_type, text_type from ansible.utils.display import Display try: @@ -180,7 +179,7 @@ def rekey_on_member(data, key, duplicates='error'): if isinstance(data, Mapping): iterate_over = data.values() - elif isinstance(data, Iterable) and not isinstance(data, (text_type, binary_type)): + elif isinstance(data, Iterable) and not isinstance(data, (str, bytes)): iterate_over = data else: raise AnsibleError("Type is not a valid list, set, or dict") diff --git a/lib/ansible/plugins/inventory/__init__.py b/lib/ansible/plugins/inventory/__init__.py index 348e8dc8834..b6f70b3c44f 100644 --- a/lib/ansible/plugins/inventory/__init__.py +++ b/lib/ansible/plugins/inventory/__init__.py @@ -34,7 +34,6 @@ from ansible.parsing.dataloader import DataLoader from ansible.plugins import AnsiblePlugin, _ConfigurablePlugin from ansible.plugins.cache import CachePluginAdjudicator from ansible.module_utils.common.text.converters import to_bytes, to_native -from ansible.module_utils.six import string_types from ansible.utils.display import Display from ansible.utils.vars import combine_vars, load_extra_vars @@ -439,7 +438,7 @@ class Constructable(_BaseInventoryPlugin): new_raw_group_names = [] if use_default: new_raw_group_names.append(default_value_name) - elif isinstance(key, string_types): + elif isinstance(key, str): new_raw_group_names.append(key) elif isinstance(key, list): for name in key: diff --git a/lib/ansible/plugins/inventory/toml.py b/lib/ansible/plugins/inventory/toml.py index f0b62a85a2a..eb38b5f9556 100644 --- a/lib/ansible/plugins/inventory/toml.py +++ b/lib/ansible/plugins/inventory/toml.py @@ -89,8 +89,6 @@ import tomllib from collections.abc import MutableMapping, MutableSequence from ansible.errors import AnsibleFileNotFound, AnsibleParserError -from ansible.module_utils.common.text.converters import to_bytes, to_native -from ansible.module_utils.six import string_types from ansible.plugins.inventory import BaseFileInventoryPlugin from ansible.utils.display import Display @@ -147,11 +145,10 @@ class InventoryModule(BaseFileInventoryPlugin): ) def _load_file(self, file_name): - if not file_name or not isinstance(file_name, string_types): - raise AnsibleParserError("Invalid filename: '%s'" % to_native(file_name)) + if not file_name or not isinstance(file_name, str): + raise AnsibleParserError("Invalid filename: '%s'" % file_name) - b_file_name = to_bytes(self.loader.path_dwim(file_name)) - if not self.loader.path_exists(b_file_name): + if not self.loader.path_exists(file_name): raise AnsibleFileNotFound("Unable to retrieve file contents", file_name=file_name) try: diff --git a/lib/ansible/plugins/inventory/yaml.py b/lib/ansible/plugins/inventory/yaml.py index c822c6ad5a9..3b7ac16911a 100644 --- a/lib/ansible/plugins/inventory/yaml.py +++ b/lib/ansible/plugins/inventory/yaml.py @@ -70,7 +70,6 @@ import os from collections.abc import MutableMapping from ansible.errors import AnsibleError, AnsibleParserError -from ansible.module_utils.six import string_types from ansible.module_utils.common.text.converters import to_native, to_text from ansible.plugins.inventory import BaseFileInventoryPlugin @@ -136,7 +135,7 @@ class InventoryModule(BaseFileInventoryPlugin): for section in ['vars', 'children', 'hosts']: if section in group_data: # convert strings to dicts as these are allowed - if isinstance(group_data[section], string_types): + if isinstance(group_data[section], str): group_data[section] = {group_data[section]: None} if not isinstance(group_data[section], (MutableMapping, NoneType)): # type: ignore[misc] diff --git a/lib/ansible/plugins/loader.py b/lib/ansible/plugins/loader.py index bad7f88991b..e8a39a095fa 100644 --- a/lib/ansible/plugins/loader.py +++ b/lib/ansible/plugins/loader.py @@ -27,7 +27,6 @@ from ansible import _internal, constants as C from ansible.errors import AnsibleError, AnsiblePluginCircularRedirect, AnsiblePluginRemovedError, AnsibleCollectionUnsupportedVersionError from ansible.module_utils.common.text.converters import to_bytes, to_text, to_native from ansible.module_utils.datatag import deprecator_from_collection_name -from ansible.module_utils.six import string_types from ansible.parsing.yaml.loader import AnsibleLoader from ansible._internal._yaml._loader import AnsibleInstrumentedLoader from ansible.plugins import get_plugin_class, MODULE_CACHE, PATH_CACHE, PLUGIN_PATH_CACHE, AnsibleJinja2Plugin @@ -96,7 +95,7 @@ def get_shell_plugin(shell_type=None, executable=None): # mostly for backwards compat if executable: - if isinstance(executable, string_types): + if isinstance(executable, str): shell_filename = os.path.basename(executable) try: shell = shell_loader.get(shell_filename) diff --git a/lib/ansible/plugins/lookup/password.py b/lib/ansible/plugins/lookup/password.py index 86afb2ae8f6..a0718fbf18f 100644 --- a/lib/ansible/plugins/lookup/password.py +++ b/lib/ansible/plugins/lookup/password.py @@ -134,7 +134,6 @@ import hashlib from ansible.errors import AnsibleError, AnsibleAssertionError from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text -from ansible.module_utils.six import string_types from ansible.parsing.splitter import parse_kv from ansible.plugins.lookup import LookupBase from ansible.utils.encrypt import BaseHash, do_encrypt, random_password, random_salt @@ -335,7 +334,7 @@ class LookupModule(LookupBase): # chars still might need more chars = params.get('chars', self.get_option('chars')) - if chars and isinstance(chars, string_types): + if chars and isinstance(chars, str): tmp_chars = [] if u',,' in chars: tmp_chars.append(u',') diff --git a/lib/ansible/plugins/lookup/subelements.py b/lib/ansible/plugins/lookup/subelements.py index a08d14ec912..7b1a475c2ae 100644 --- a/lib/ansible/plugins/lookup/subelements.py +++ b/lib/ansible/plugins/lookup/subelements.py @@ -83,7 +83,6 @@ _list: """ from ansible.errors import AnsibleError -from ansible.module_utils.six import string_types from ansible.module_utils.parsing.convert_bool import boolean from ansible.plugins.lookup import LookupBase @@ -104,7 +103,7 @@ class LookupModule(LookupBase): _raise_terms_error() # first term should be a list (or dict), second a string holding the subkey - if not isinstance(terms[0], (list, dict)) or not isinstance(terms[1], string_types): + if not isinstance(terms[0], (list, dict)) or not isinstance(terms[1], str): _raise_terms_error("first a dict or a list, second a string pointing to the subkey") subelements = terms[1].split(".") @@ -122,7 +121,7 @@ class LookupModule(LookupBase): flags = {} if len(terms) == 3: flags = terms[2] - if not isinstance(flags, dict) and not all(isinstance(key, string_types) and key in FLAGS for key in flags): + if not isinstance(flags, dict) and not all(isinstance(key, str) and key in FLAGS for key in flags): _raise_terms_error("the optional third item must be a dict with flags %s" % FLAGS) # build_items diff --git a/lib/ansible/plugins/lookup/template.py b/lib/ansible/plugins/lookup/template.py index 141d6684746..76cd8a9ceec 100644 --- a/lib/ansible/plugins/lookup/template.py +++ b/lib/ansible/plugins/lookup/template.py @@ -107,6 +107,7 @@ from ansible.errors import AnsibleError from ansible.plugins.lookup import LookupBase from ansible.template import trust_as_template from ansible._internal._templating import _template_vars +from ansible._internal._templating._engine import TemplateOptions, TemplateOverrides from ansible.utils.display import Display @@ -174,7 +175,11 @@ class LookupModule(LookupBase): ) data_templar = templar.copy_with_new_env(available_variables=vars, searchpath=searchpath) - res = data_templar.template(template_data, escape_backslashes=False, overrides=overrides) + # use the internal template API to avoid forced top-level finalization behavior imposed by the public API + res = data_templar._engine.template(template_data, options=TemplateOptions( + escape_backslashes=False, + overrides=TemplateOverrides.from_kwargs(overrides), + )) ret.append(res) else: diff --git a/lib/ansible/plugins/lookup/varnames.py b/lib/ansible/plugins/lookup/varnames.py index ef6159f3902..6eeef66f62f 100644 --- a/lib/ansible/plugins/lookup/varnames.py +++ b/lib/ansible/plugins/lookup/varnames.py @@ -52,7 +52,6 @@ import re from ansible.errors import AnsibleError from ansible.module_utils.common.text.converters import to_native -from ansible.module_utils.six import string_types from ansible.plugins.lookup import LookupBase @@ -69,7 +68,7 @@ class LookupModule(LookupBase): variable_names = list(variables.keys()) for term in terms: - if not isinstance(term, string_types): + if not isinstance(term, str): raise AnsibleError('Invalid setting identifier, "%s" is not a string, it is a %s' % (term, type(term))) try: diff --git a/lib/ansible/plugins/shell/__init__.py b/lib/ansible/plugins/shell/__init__.py index 57e0e930b71..a633f1d20de 100644 --- a/lib/ansible/plugins/shell/__init__.py +++ b/lib/ansible/plugins/shell/__init__.py @@ -24,12 +24,12 @@ import secrets import shlex import time -from collections.abc import Mapping, Sequence - from ansible.errors import AnsibleError from ansible.module_utils.common.text.converters import to_native -from ansible.module_utils.six import text_type, string_types from ansible.plugins import AnsiblePlugin +from ansible.utils.display import Display + +display = Display() _USER_HOME_PATH_RE = re.compile(r'^~[_.A-Za-z0-9][-_.A-Za-z0-9]*$') @@ -84,7 +84,7 @@ class ShellBase(AnsiblePlugin): return 'ansible-tmp-%s-%s-%s' % (time.time(), os.getpid(), secrets.randbelow(2**48)) def env_prefix(self, **kwargs): - return ' '.join(['%s=%s' % (k, self.quote(text_type(v))) for k, v in kwargs.items()]) + return ' '.join(['%s=%s' % (k, self.quote(str(v))) for k, v in kwargs.items()]) def join_path(self, *args): return os.path.join(*args) @@ -272,6 +272,11 @@ class ShellBase(AnsiblePlugin): def wrap_for_exec(self, cmd): """wrap script execution with any necessary decoration (eg '&' for quoted powershell script paths)""" + display.deprecated( + msg='The Shell.wrap_for_exec method is deprecated.', + help_text="Contact plugin author to update their plugin to not use this method.", + version='2.24', + ) return cmd def quote(self, cmd): diff --git a/lib/ansible/plugins/shell/powershell.py b/lib/ansible/plugins/shell/powershell.py index 27a65bdeb91..3f1936c62d2 100644 --- a/lib/ansible/plugins/shell/powershell.py +++ b/lib/ansible/plugins/shell/powershell.py @@ -192,7 +192,7 @@ class ShellModule(ShellBase): def join_path(self, *args): # use normpath() to remove doubled slashed and convert forward to backslashes - parts = [ntpath.normpath(self._unquote(arg)) for arg in args] + parts = [ntpath.normpath(arg) for arg in args] # Because ntpath.join treats any component that begins with a backslash as an absolute path, # we have to strip slashes from at least the beginning, otherwise join will ignore all previous @@ -210,7 +210,6 @@ class ShellModule(ShellBase): def path_has_trailing_slash(self, path): # Allow Windows paths to be specified using either slash. - path = self._unquote(path) return path.endswith('/') or path.endswith('\\') def chmod(self, paths, mode): @@ -223,11 +222,11 @@ class ShellModule(ShellBase): raise NotImplementedError('set_user_facl is not implemented for Powershell') def remove(self, path, recurse=False): - path = self._escape(self._unquote(path)) + quoted_path = self._escape(path) if recurse: - return self._encode_script("""Remove-Item '%s' -Force -Recurse;""" % path) + return self._encode_script("""Remove-Item '%s' -Force -Recurse;""" % quoted_path) else: - return self._encode_script("""Remove-Item '%s' -Force;""" % path) + return self._encode_script("""Remove-Item '%s' -Force;""" % quoted_path) def mkdtemp( self, @@ -240,7 +239,6 @@ class ShellModule(ShellBase): # compatibility in case other action plugins outside Ansible calls this. if not basefile: basefile = self.__class__._generate_temp_dir_name() - basefile = self._escape(self._unquote(basefile)) basetmpdir = self._escape(tmpdir if tmpdir else self.get_option('remote_tmp')) script = f""" @@ -263,7 +261,6 @@ class ShellModule(ShellBase): if not basefile: basefile = self.__class__._generate_temp_dir_name() - basefile = self._unquote(basefile) basetmpdir = tmpdir if tmpdir else self.get_option('remote_tmp') script, stdin = _bootstrap_powershell_script("powershell_mkdtemp.ps1", { @@ -283,7 +280,6 @@ class ShellModule(ShellBase): ) -> str: # This is not called in Ansible anymore but it is kept for backwards # compatibility in case other actions plugins outside Ansible called this. - user_home_path = self._unquote(user_home_path) if user_home_path == '~': script = 'Write-Output (Get-Location).Path' elif user_home_path.startswith('~\\'): @@ -297,7 +293,6 @@ class ShellModule(ShellBase): user_home_path: str, username: str = '', ) -> _ShellCommand: - user_home_path = self._unquote(user_home_path) script, stdin = _bootstrap_powershell_script("powershell_expand_user.ps1", { 'Path': user_home_path, }) @@ -308,7 +303,7 @@ class ShellModule(ShellBase): ) def exists(self, path): - path = self._escape(self._unquote(path)) + path = self._escape(path) script = """ If (Test-Path '%s') { @@ -329,7 +324,7 @@ class ShellModule(ShellBase): version="2.23", help_text="Use `ActionBase._execute_remote_stat()` instead.", ) - path = self._escape(self._unquote(path)) + path = self._escape(path) script = """ If (Test-Path -PathType Leaf '%(path)s') { @@ -364,7 +359,7 @@ class ShellModule(ShellBase): if arg_path: # Running a module without the exec_wrapper and with an argument # file. - script_path = self._unquote(cmd_parts[0]) + script_path = cmd_parts[0] if not script_path.lower().endswith('.ps1'): script_path += '.ps1' @@ -387,7 +382,6 @@ class ShellModule(ShellBase): cmd_parts.insert(0, shebang[2:]) elif not shebang: # The module is assumed to be a binary - cmd_parts[0] = self._unquote(cmd_parts[0]) cmd_parts.append(arg_path) script = """ Try @@ -428,19 +422,9 @@ class ShellModule(ShellBase): return self._encode_script(script, preserve_rc=False) def wrap_for_exec(self, cmd): + super().wrap_for_exec(cmd) return '& %s; exit $LASTEXITCODE' % cmd - def _unquote(self, value): - """Remove any matching quotes that wrap the given value.""" - value = to_text(value or '') - m = re.match(r'^\s*?\'(.*?)\'\s*?$', value) - if m: - return m.group(1) - m = re.match(r'^\s*?"(.*?)"\s*?$', value) - if m: - return m.group(1) - return value - def _escape(self, value): """Return value escaped for use in PowerShell single quotes.""" # There are 5 chars that need to be escaped in a single quote. diff --git a/lib/ansible/plugins/strategy/__init__.py b/lib/ansible/plugins/strategy/__init__.py index 848aa366137..7563de66797 100644 --- a/lib/ansible/plugins/strategy/__init__.py +++ b/lib/ansible/plugins/strategy/__init__.py @@ -903,7 +903,7 @@ class StrategyBase: display.warning("%s task does not support when conditional" % task_name) def _execute_meta(self, task: Task, play_context, iterator, target_host: Host): - task.resolved_action = 'ansible.builtin.meta' # _post_validate_args is never called for meta actions, so resolved_action hasn't been set + task._resolved_action = 'ansible.builtin.meta' # _post_validate_args is never called for meta actions, so resolved_action hasn't been set # meta tasks store their args in the _raw_params field of args, # since they do not use k=v pairs, so get that diff --git a/lib/ansible/plugins/test/core.py b/lib/ansible/plugins/test/core.py index 8bfa46d882d..33436ed2f7d 100644 --- a/lib/ansible/plugins/test/core.py +++ b/lib/ansible/plugins/test/core.py @@ -142,6 +142,9 @@ def regex(value='', pattern='', ignorecase=False, multiline=False, match_type='s This is likely only useful for `search` and `match` which already have their own filters. """ + valid_match_types = ('search', 'match', 'fullmatch') + if match_type not in valid_match_types: + raise errors.AnsibleTemplatePluginError(f"Invalid match_type specified. Expected one of: {', '.join(valid_match_types)}.", obj=match_type) value = to_text(value, errors='surrogate_or_strict') flags = 0 if ignorecase: @@ -149,7 +152,7 @@ def regex(value='', pattern='', ignorecase=False, multiline=False, match_type='s if multiline: flags |= re.M _re = re.compile(pattern, flags=flags) - return bool(getattr(_re, match_type, 'search')(value)) + return bool(getattr(_re, match_type)(value)) @accept_args_markers diff --git a/lib/ansible/plugins/test/regex.yml b/lib/ansible/plugins/test/regex.yml index d80ca850a2e..98f512cf3d5 100644 --- a/lib/ansible/plugins/test/regex.yml +++ b/lib/ansible/plugins/test/regex.yml @@ -3,7 +3,7 @@ DOCUMENTATION: author: Ansible Core short_description: Does string match regular expression from the start description: - - Compare string against regular expression using Python's match or search functions. + - Compare string against regular expression using Python's match, fullmatch or search functions. options: _input: description: String to match. @@ -22,14 +22,26 @@ DOCUMENTATION: type: boolean default: False match_type: - description: Decide which function to be used to do the matching. + description: + - Decide which function to be used to do the matching. type: string - choices: [match, search] + choices: [match, search, fullmatch] default: search -EXAMPLES: | - url: "https://example.com/users/foo/resources/bar" - foundmatch: url is regex("example\.com/\w+/foo") +EXAMPLES: + - name: check if string matches regex + assert: + that: + - 'url is regex("example\.com/\w+/foo")' + vars: + url: "https://example.com/users/foo/resources/bar" + + - name: check if string matches regex ignoring case + assert: + that: + - 'url is regex("EXAMPLE\.COM/\w+/foo", ignorecase=True)' + vars: + url: "https://Example.com/users/foo/resources/bar" RETURN: _value: diff --git a/lib/ansible/utils/context_objects.py b/lib/ansible/utils/context_objects.py index 02db666b0a4..9f67827b7ea 100644 --- a/lib/ansible/utils/context_objects.py +++ b/lib/ansible/utils/context_objects.py @@ -9,14 +9,14 @@ from __future__ import annotations from abc import ABCMeta from collections.abc import Container, Mapping, Sequence, Set +from ansible.module_utils._internal import _no_six from ansible.module_utils.common.collections import ImmutableDict -from ansible.module_utils.six import add_metaclass, binary_type, text_type from ansible.utils.singleton import Singleton def _make_immutable(obj): """Recursively convert a container and objects inside of it into immutable data types""" - if isinstance(obj, (text_type, binary_type)): + if isinstance(obj, (str, bytes)): # Strings first because they are also sequences return obj elif isinstance(obj, Mapping): @@ -79,11 +79,14 @@ class CLIArgs(ImmutableDict): return cls(vars(options)) -@add_metaclass(_ABCSingleton) -class GlobalCLIArgs(CLIArgs): +class GlobalCLIArgs(CLIArgs, metaclass=_ABCSingleton): """ Globally hold a parsed copy of cli arguments. Only one of these exist per program as it is for global context """ pass + + +def __getattr__(importable_name): + return _no_six.deprecate(importable_name, __name__, "binary_type", "text_type", "add_metaclass") diff --git a/lib/ansible/utils/display.py b/lib/ansible/utils/display.py index a0abd5dccd5..b5e2ff134a8 100644 --- a/lib/ansible/utils/display.py +++ b/lib/ansible/utils/display.py @@ -53,11 +53,10 @@ from ansible.constants import config from ansible.errors import AnsibleAssertionError, AnsiblePromptInterrupt, AnsiblePromptNoninteractive, AnsibleError from ansible._internal._errors import _error_utils, _error_factory from ansible._internal import _event_formatting -from ansible.module_utils._internal import _ambient_context, _deprecator, _messages +from ansible.module_utils._internal import _ambient_context, _deprecator, _messages, _no_six from ansible.module_utils.common.text.converters import to_bytes, to_text from ansible.module_utils.datatag import deprecator_from_collection_name from ansible._internal._datatag._tags import TrustedAsTemplate -from ansible.module_utils.six import text_type from ansible.module_utils._internal import _traceback, _errors from ansible.utils.color import stringc from ansible.utils.multiprocessing import context as multiprocessing_context @@ -106,7 +105,7 @@ def get_text_width(text: str) -> int: character and using wcwidth individually, falling back to a value of 0 for non-printable wide characters. """ - if not isinstance(text, text_type): + if not isinstance(text, str): raise TypeError('get_text_width requires text, not %s' % type(text)) try: @@ -1282,3 +1281,7 @@ def _report_config_warnings(deprecator: _messages.PluginInfo) -> None: # emit any warnings or deprecations # in the event config fails before display is up, we'll lose warnings -- but that's OK, since everything is broken anyway _report_config_warnings(_deprecator.ANSIBLE_CORE_DEPRECATOR) + + +def __getattr__(importable_name): + return _no_six.deprecate(importable_name, __name__, "text_type") diff --git a/lib/ansible/utils/encrypt.py b/lib/ansible/utils/encrypt.py index 016ab76a466..b4034276404 100644 --- a/lib/ansible/utils/encrypt.py +++ b/lib/ansible/utils/encrypt.py @@ -176,11 +176,6 @@ class PasslibHash(BaseHash): return to_text(result, errors='strict') -def passlib_or_crypt(secret, algorithm, salt=None, salt_size=None, rounds=None, ident=None): - display.deprecated("passlib_or_crypt API is deprecated in favor of do_encrypt", version='2.20') - return do_encrypt(secret, algorithm, salt=salt, salt_size=salt_size, rounds=rounds, ident=ident) - - def do_encrypt(result, encrypt, salt_size=None, salt=None, ident=None, rounds=None): if PASSLIB_AVAILABLE: return PasslibHash(encrypt).hash(result, salt=salt, salt_size=salt_size, rounds=rounds, ident=ident) diff --git a/lib/ansible/utils/helpers.py b/lib/ansible/utils/helpers.py index 97f34acd0e8..5714c885f74 100644 --- a/lib/ansible/utils/helpers.py +++ b/lib/ansible/utils/helpers.py @@ -17,7 +17,7 @@ from __future__ import annotations -from ansible.module_utils.six import string_types +from ansible.module_utils._internal import _no_six def pct_to_int(value, num_items, min_value=1): @@ -25,7 +25,7 @@ def pct_to_int(value, num_items, min_value=1): Converts a given value to a percentage if specified as "x%", otherwise converts the given value to an integer. """ - if isinstance(value, string_types) and value.endswith('%'): + if isinstance(value, str) and value.endswith('%'): value_pct = int(value.replace("%", "")) return int((value_pct / 100.0) * num_items) or min_value else: @@ -47,3 +47,7 @@ def deduplicate_list(original_list): """ seen = set() return [x for x in original_list if x not in seen and not seen.add(x)] + + +def __getattr__(importable_name): + return _no_six.deprecate(importable_name, __name__, "string_types") diff --git a/lib/ansible/utils/jsonrpc.py b/lib/ansible/utils/jsonrpc.py index 82d1c02ea12..e6e5e950824 100644 --- a/lib/ansible/utils/jsonrpc.py +++ b/lib/ansible/utils/jsonrpc.py @@ -7,9 +7,9 @@ import json import pickle import traceback +from ansible.module_utils._internal import _no_six from ansible.module_utils.common.text.converters import to_text from ansible.module_utils.connection import ConnectionError -from ansible.module_utils.six import binary_type, text_type from ansible.utils.display import Display display = Display() @@ -79,9 +79,9 @@ class JsonRpcServer(object): def response(self, result=None): response = self.header() - if isinstance(result, binary_type): + if isinstance(result, bytes): result = to_text(result) - if not isinstance(result, text_type): + if not isinstance(result, str): response["result_type"] = "pickle" result = to_text(pickle.dumps(result), errors='surrogateescape') response['result'] = result @@ -110,3 +110,7 @@ class JsonRpcServer(object): def internal_error(self, data=None): return self.error(-32603, 'Internal error', data) + + +def __getattr__(importable_name): + return _no_six.deprecate(importable_name, __name__, "binary_type", "text_type") diff --git a/lib/ansible/utils/plugin_docs.py b/lib/ansible/utils/plugin_docs.py index 8c1d9b46658..3bff309f4d9 100644 --- a/lib/ansible/utils/plugin_docs.py +++ b/lib/ansible/utils/plugin_docs.py @@ -11,7 +11,7 @@ import yaml from ansible import constants as C from ansible.release import __version__ as ansible_version from ansible.errors import AnsibleError, AnsibleParserError, AnsiblePluginNotFound -from ansible.module_utils.six import string_types +from ansible.module_utils._internal import _no_six from ansible.module_utils.common.text.converters import to_native from ansible.parsing.plugin_docs import read_docstring from ansible.parsing.yaml.loader import AnsibleLoader @@ -126,6 +126,10 @@ def remove_current_collection_from_versions_and_dates(fragment, collection_name, _process_versions_and_dates(fragment, is_module, return_docs, remove) +class AnsibleFragmentError(AnsibleError): + pass + + def add_fragments(doc, filename, fragment_loader, is_module=False, section='DOCUMENTATION'): if section not in _FRAGMENTABLE: @@ -133,7 +137,7 @@ def add_fragments(doc, filename, fragment_loader, is_module=False, section='DOCU fragments = doc.pop('extends_documentation_fragment', []) - if isinstance(fragments, string_types): + if isinstance(fragments, str): fragments = fragments.split(',') unknown_fragments = [] @@ -185,7 +189,7 @@ def add_fragments(doc, filename, fragment_loader, is_module=False, section='DOCU doc[doc_key].extend(entries) if 'options' not in fragment and 'attributes' not in fragment: - raise Exception("missing options or attributes in fragment (%s), possibly misformatted?: %s" % (fragment_name, filename)) + raise AnsibleFragmentError("missing options or attributes in fragment (%s), possibly misformatted?: %s" % (fragment_name, filename)) # ensure options themselves are directly merged for doc_key in ['options', 'attributes']: @@ -194,7 +198,7 @@ def add_fragments(doc, filename, fragment_loader, is_module=False, section='DOCU try: merge_fragment(doc[doc_key], fragment.pop(doc_key)) except Exception as e: - raise AnsibleError("%s %s (%s) of unknown type: %s" % (to_native(e), doc_key, fragment_name, filename)) + raise AnsibleFragmentError("%s %s (%s) of unknown type: %s" % (to_native(e), doc_key, fragment_name, filename)) else: doc[doc_key] = fragment.pop(doc_key) @@ -202,10 +206,10 @@ def add_fragments(doc, filename, fragment_loader, is_module=False, section='DOCU try: merge_fragment(doc, fragment) except Exception as e: - raise AnsibleError("%s (%s) of unknown type: %s" % (to_native(e), fragment_name, filename)) + raise AnsibleFragmentError("%s (%s) of unknown type: %s" % (to_native(e), fragment_name, filename)) if unknown_fragments: - raise AnsibleError('unknown doc_fragment(s) in file {0}: {1}'.format(filename, to_native(', '.join(unknown_fragments)))) + raise AnsibleFragmentError('unknown doc_fragment(s) in file {0}: {1}'.format(filename, to_native(', '.join(unknown_fragments)))) def get_docstring(filename, fragment_loader, verbose=False, ignore_errors=False, collection_name=None, is_module=None, plugin_type=None): @@ -355,3 +359,7 @@ def get_plugin_docs(plugin, plugin_type, loader, fragment_loader, verbose): docs[0]['plugin_name'] = context.resolved_fqcn return docs + + +def __getattr__(importable_name): + return _no_six.deprecate(importable_name, __name__, "string_types") diff --git a/lib/ansible/utils/ssh_functions.py b/lib/ansible/utils/ssh_functions.py index bbe8982d630..ed8e086ae39 100644 --- a/lib/ansible/utils/ssh_functions.py +++ b/lib/ansible/utils/ssh_functions.py @@ -20,9 +20,7 @@ from __future__ import annotations import subprocess -from ansible import constants as C from ansible.module_utils.common.text.converters import to_bytes -from ansible.module_utils.compat.paramiko import _paramiko as paramiko from ansible.utils.display import Display display = Display() @@ -50,20 +48,3 @@ def check_for_controlpersist(ssh_executable): _HAS_CONTROLPERSIST[ssh_executable] = has_cp return has_cp - - -def set_default_transport(): - - # deal with 'smart' connection .. one time .. - if C.DEFAULT_TRANSPORT == 'smart': - display.deprecated( - msg="The `smart` option for connections is deprecated.", - version="2.20", - help_text="Set the connection plugin directly instead.", - ) - - # see if SSH can support ControlPersist if not use paramiko - if not check_for_controlpersist('ssh') and paramiko is not None: - C.DEFAULT_TRANSPORT = "paramiko" - else: - C.DEFAULT_TRANSPORT = "ssh" diff --git a/lib/ansible/utils/unsafe_proxy.py b/lib/ansible/utils/unsafe_proxy.py index 1a2c6d04b24..07695a6cc9b 100644 --- a/lib/ansible/utils/unsafe_proxy.py +++ b/lib/ansible/utils/unsafe_proxy.py @@ -7,10 +7,10 @@ from __future__ import annotations from collections.abc import Mapping, Set +from ansible.module_utils._internal import _no_six from ansible.module_utils.common.text.converters import to_bytes, to_text from ansible.module_utils.common.collections import is_sequence from ansible._internal._datatag._tags import TrustedAsTemplate -from ansible.module_utils.six import binary_type, text_type __all__ = ['AnsibleUnsafe', 'wrap_var'] @@ -62,9 +62,9 @@ def wrap_var(v): v = _wrap_set(v) elif is_sequence(v): v = _wrap_sequence(v) - elif isinstance(v, binary_type): + elif isinstance(v, bytes): v = AnsibleUnsafeBytes(v) - elif isinstance(v, text_type): + elif isinstance(v, str): v = AnsibleUnsafeText(v) return v @@ -76,3 +76,7 @@ def to_unsafe_bytes(*args, **kwargs): def to_unsafe_text(*args, **kwargs): return wrap_var(to_text(*args, **kwargs)) + + +def __getattr__(importable_name): + return _no_six.deprecate(importable_name, __name__, "binary_type", "text_type") diff --git a/lib/ansible/vars/clean.py b/lib/ansible/vars/clean.py index 8be64b2679a..f9a0722ec59 100644 --- a/lib/ansible/vars/clean.py +++ b/lib/ansible/vars/clean.py @@ -10,7 +10,6 @@ from collections.abc import MutableMapping, MutableSequence from ansible import constants as C from ansible.errors import AnsibleError -from ansible.module_utils import six from ansible.plugins.loader import connection_loader from ansible.utils.display import Display @@ -48,7 +47,7 @@ def module_response_deepcopy(v): """ if isinstance(v, dict): ret = v.copy() - items = six.iteritems(ret) + items = ret.items() elif isinstance(v, list): ret = v[:] items = enumerate(ret) @@ -80,7 +79,7 @@ def strip_internal_keys(dirty, exceptions=None): # listify to avoid updating dict while iterating over it for k in list(dirty.keys()): - if isinstance(k, six.string_types): + if isinstance(k, str): if k.startswith('_ansible_') and k not in exceptions: del dirty[k] continue diff --git a/lib/ansible/vars/manager.py b/lib/ansible/vars/manager.py index c48668863a5..0f8054b04e1 100644 --- a/lib/ansible/vars/manager.py +++ b/lib/ansible/vars/manager.py @@ -33,7 +33,6 @@ from ansible.inventory.host import Host from ansible.inventory.helpers import sort_groups, get_group_vars from ansible.inventory.manager import InventoryManager from ansible.module_utils.datatag import native_type_name -from ansible.module_utils.six import text_type from ansible.parsing.dataloader import DataLoader from ansible._internal._templating._engine import TemplateEngine from ansible.plugins.loader import cache_loader @@ -51,12 +50,18 @@ if t.TYPE_CHECKING: display = Display() # deprecated: description='enable top-level facts deprecation' core_version='2.20' -# _DEPRECATE_TOP_LEVEL_FACT_TAG = _tags.Deprecated( -# msg='Top-level facts are deprecated.', -# version='2.24', -# deprecator=_deprecator.ANSIBLE_CORE_DEPRECATOR, -# help_text='Use `ansible_facts` instead.', -# ) +_DEPRECATE_TOP_LEVEL_FACT_TAG = _tags.Deprecated( + msg='INJECT_FACTS_AS_VARS default to `True` is deprecated, top-level facts will not be auto injected after the change.', + version='2.24', + deprecator=_deprecator.ANSIBLE_CORE_DEPRECATOR, + help_text='Use `ansible_facts["fact_name"]` (no `ansible_` prefix) instead.', +) +_DEPRECATE_VARS = _tags.Deprecated( + msg='The internal "vars" dictionary is deprecated.', + version='2.24', + deprecator=_deprecator.ANSIBLE_CORE_DEPRECATOR, + help_text='Use the `vars` and `varnames` lookups instead.', +) def _deprecate_top_level_fact(value: t.Any) -> t.Any: @@ -66,8 +71,7 @@ def _deprecate_top_level_fact(value: t.Any) -> t.Any: Unique tag instances are required to achieve the correct de-duplication within a top-level templating operation. """ # deprecated: description='enable top-level facts deprecation' core_version='2.20' - # return _DEPRECATE_TOP_LEVEL_FACT_TAG.tag(value) - return value + return _DEPRECATE_TOP_LEVEL_FACT_TAG.tag(value) def preprocess_vars(a): @@ -289,8 +293,7 @@ class VariableManager: all_vars = _combine_and_track(all_vars, _plugins_inventory([host]), "inventory host_vars for '%s'" % host) all_vars = _combine_and_track(all_vars, _plugins_play([host]), "playbook host_vars for '%s'" % host) - # finally, the facts caches for this host, if it exists - # TODO: cleaning of facts should eventually become part of taskresults instead of vars + # finally, the facts caches for this host, if they exist try: try: facts = self._fact_cache.get(host.name) @@ -299,12 +302,16 @@ class VariableManager: all_vars |= namespace_facts(facts) + inject, origin = C.config.get_config_value_and_origin('INJECT_FACTS_AS_VARS') # push facts to main namespace - if C.INJECT_FACTS_AS_VARS: - deprecated_facts_vars = {k: _deprecate_top_level_fact(v) for k, v in clean_facts(facts).items()} - all_vars = _combine_and_track(all_vars, deprecated_facts_vars, "facts") + if inject: + if origin == 'default': + clean_top = {k: _deprecate_top_level_fact(v) for k, v in clean_facts(facts).items()} + else: + clean_top = clean_facts(facts) + all_vars = _combine_and_track(all_vars, clean_top, "facts") else: - # always 'promote' ansible_local + # always 'promote' ansible_local, even if empty all_vars = _combine_and_track(all_vars, {'ansible_local': facts.get('ansible_local', {})}, "facts") except KeyError: pass @@ -423,8 +430,10 @@ class VariableManager: # 'vars' magic var if task or play: - # has to be copy, otherwise recursive ref - all_vars['vars'] = all_vars.copy() + all_vars['vars'] = _DEPRECATE_VARS.tag({}) + for k, v in all_vars.items(): + # has to be copy, otherwise recursive ref + all_vars['vars'][k] = _DEPRECATE_VARS.tag(v) display.debug("done with get_vars()") return all_vars @@ -471,7 +480,7 @@ class VariableManager: if task._role: variables['role_name'] = task._role.get_name(include_role_fqcn=False) variables['role_path'] = task._role._role_path - variables['role_uuid'] = text_type(task._role._uuid) + variables['role_uuid'] = str(task._role._uuid) variables['ansible_collection_name'] = task._role._role_collection variables['ansible_role_name'] = task._role.get_name() diff --git a/lib/ansible/vars/plugins.py b/lib/ansible/vars/plugins.py index 79dca153a81..d00a87cdb1c 100644 --- a/lib/ansible/vars/plugins.py +++ b/lib/ansible/vars/plugins.py @@ -8,8 +8,6 @@ import os from functools import lru_cache from ansible import constants as C -from ansible.errors import AnsibleError -from ansible.inventory.group import InventoryObjectType from ansible.plugins.loader import vars_loader from ansible.utils.display import Display from ansible.utils.vars import combine_vars @@ -26,34 +24,6 @@ def _prime_vars_loader(): vars_loader.get(plugin_name) -def get_plugin_vars(loader, plugin, path, entities): - - data = {} - try: - data = plugin.get_vars(loader, path, entities) - except AttributeError: - if hasattr(plugin, 'get_host_vars') or hasattr(plugin, 'get_group_vars'): - display.deprecated( - msg=f"The vars plugin {plugin.ansible_name} from {plugin._original_path} is relying " - "on the deprecated entrypoints `get_host_vars` and `get_group_vars`.", - version="2.20", - help_text="This plugin should be updated to inherit from `BaseVarsPlugin` and define " - "a `get_vars` method as the main entrypoint instead.", - ) - try: - for entity in entities: - if entity.base_type is InventoryObjectType.HOST: - data |= plugin.get_host_vars(entity.name) - else: - data |= plugin.get_group_vars(entity.name) - except AttributeError: - if hasattr(plugin, 'run'): - raise AnsibleError("Cannot use v1 type vars plugin %s from %s" % (plugin._load_name, plugin._original_path)) - else: - raise AnsibleError("Invalid vars plugin %s from %s" % (plugin._load_name, plugin._original_path)) - return data - - # optimized for stateless plugins; non-stateless plugin instances will fall out quickly @lru_cache(maxsize=10) def _plugin_should_run(plugin, stage): @@ -99,7 +69,7 @@ def get_vars_from_path(loader, path, entities, stage): if not _plugin_should_run(plugin, stage): continue - if (new_vars := get_plugin_vars(loader, plugin, path, entities)) != {}: + if (new_vars := plugin.get_vars(loader, path, entities)) != {}: data = combine_vars(data, new_vars) return data diff --git a/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/roles/testrole/meta/main.yml b/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/roles/testrole/meta/main.yml index 5ec7177f2c4..fefe8913df7 100644 --- a/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/roles/testrole/meta/main.yml +++ b/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/roles/testrole/meta/main.yml @@ -13,16 +13,14 @@ argument_specs: description: opt1 description type: "str" required: true + version_added: 1.2.0 alternate: short_description: testns.testcol.testrole short description for alternate entry point description: - Longer description for testns.testcol.testrole alternate entry point. author: Ansible Core (@ansible) - attributes: - check_mode: - description: Can run in check_mode and return changed status prediction without modifying target - support: full + version_added: 2.1.0 options: altopt1: description: altopt1 description diff --git a/test/integration/targets/ansible-doc/fakecollrole.output b/test/integration/targets/ansible-doc/fakecollrole.output index efbdee4f63f..2c6f53cb341 100644 --- a/test/integration/targets/ansible-doc/fakecollrole.output +++ b/test/integration/targets/ansible-doc/fakecollrole.output @@ -2,6 +2,8 @@ ENTRY POINT: *alternate* - testns.testcol.testrole short description for alternate entry point +ADDED IN: version 2.1.0 + Longer description for testns.testcol.testrole alternate entry point. @@ -10,13 +12,6 @@ Options (= indicates it is required): = altopt1 altopt1 description type: int -ATTRIBUTES: - - `check_mode:` - description: Can run in check_mode and return changed status prediction without modifying - target - support: full - AUTHOR: Ansible Core (@ansible) EXAMPLES: diff --git a/test/integration/targets/ansible-doc/fakerole.output b/test/integration/targets/ansible-doc/fakerole.output index 3f4302a04e1..e5cc1008a1e 100644 --- a/test/integration/targets/ansible-doc/fakerole.output +++ b/test/integration/targets/ansible-doc/fakerole.output @@ -2,6 +2,8 @@ ENTRY POINT: *main* - test_role1 from roles subdir +ADDED IN: version 1.2.0 + In to am attended desirous raptures *declared* diverted confined at. Collected instantly remaining up certainly to `necessary' as. Over walk dull into son boy door went new. @@ -25,12 +27,17 @@ Options (= indicates it is required): default: null type: str -ATTRIBUTES: - - `diff_mode:` - description: Will return details on what has changed (or possibly needs changing in - check_mode), when in diff mode - details: Not all modules used support this - support: partial +NOTES: + * This is a role. + * More text. + +SEE ALSO: + * Module ansible.builtin.file + The official documentation on the + ansible.builtin.file module. + https://docs.ansible.com/ansible-core/devel/collections/ansible/builtin/file_module.html + * Lookup plugin ansible.builtin.file + Reads a file from the controller. + https://docs.ansible.com/ansible-core/devel/collections/ansible/builtin/file_lookup.html AUTHOR: John Doe (@john), Jane Doe (@jane) diff --git a/test/integration/targets/ansible-doc/roles/test_role1/meta/argument_specs.yml b/test/integration/targets/ansible-doc/roles/test_role1/meta/argument_specs.yml index 42857cd7ff1..eb782da9585 100644 --- a/test/integration/targets/ansible-doc/roles/test_role1/meta/argument_specs.yml +++ b/test/integration/targets/ansible-doc/roles/test_role1/meta/argument_specs.yml @@ -11,11 +11,15 @@ argument_specs: author: - John Doe (@john) - Jane Doe (@jane) - attributes: - diff_mode: - description: Will return details on what has changed (or possibly needs changing in check_mode), when in diff mode - support: partial - details: Not all modules used support this + version_added: 1.2.0 + notes: + - This is a role. + - More text. + seealso: + - module: ansible.builtin.file + - plugin: ansible.builtin.file + plugin_type: lookup + description: Reads a file from the controller. options: myopt1: description: diff --git a/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/doc_fragments/return_doc_fragment.py b/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/doc_fragments/return_doc_fragment.py new file mode 100644 index 00000000000..1c885b4b35b --- /dev/null +++ b/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/doc_fragments/return_doc_fragment.py @@ -0,0 +1,19 @@ +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import annotations + + +class ModuleDocFragment: + DOCUMENTATION = r""" +options: {} +""" + + RETURN = r""" +bar: + description: + - Some foo bar. + - P(a.b.asfd#dfsa) this is an error. + returned: success + type: int + sample: 42 +""" diff --git a/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/doc_fragments_not_exist.py b/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/doc_fragments_not_exist.py new file mode 100644 index 00000000000..6116d869693 --- /dev/null +++ b/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/doc_fragments_not_exist.py @@ -0,0 +1,28 @@ +#!/usr/bin/python +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import annotations + +DOCUMENTATION = """ +module: doc_fragments_not_exist +short_description: Non-existing doc fragment +description: A module with a non-existing doc fragment +author: + - Ansible Core Team +extends_documentation_fragment: + - does.not.exist +""" + +EXAMPLES = """#""" + +RETURN = """""" + +from ansible.module_utils.basic import AnsibleModule + + +def main(): + AnsibleModule().exit_json() + + +if __name__ == '__main__': + main() diff --git a/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/return_fragments.py b/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/return_fragments.py new file mode 100644 index 00000000000..963d368c32c --- /dev/null +++ b/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/return_fragments.py @@ -0,0 +1,29 @@ +#!/usr/bin/python +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import annotations + +DOCUMENTATION = """ +module: return_fragments +short_description: Uses return fragments +description: A module with a return doc fragment. +author: + - Ansible Core Team +""" + +EXAMPLES = """#""" + +RETURN = """ +extends_documentation_fragment: + - ns.col.return_doc_fragment +""" + +from ansible.module_utils.basic import AnsibleModule + + +def main(): + AnsibleModule().exit_json(bar=42) + + +if __name__ == '__main__': + main() diff --git a/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/return_fragments_not_exist.py b/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/return_fragments_not_exist.py new file mode 100644 index 00000000000..69879b3156f --- /dev/null +++ b/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/return_fragments_not_exist.py @@ -0,0 +1,29 @@ +#!/usr/bin/python +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import annotations + +DOCUMENTATION = """ +module: return_fragments_not_exist +short_description: Non-existing return doc fragment +description: A module with a non-existing return doc fragment. +author: + - Ansible Core Team +""" + +EXAMPLES = """#""" + +RETURN = """ +extends_documentation_fragment: + - does.not.exist +""" + +from ansible.module_utils.basic import AnsibleModule + + +def main(): + AnsibleModule().exit_json(bar=42) + + +if __name__ == '__main__': + main() diff --git a/test/integration/targets/ansible-test-sanity-validate-modules/expected.txt b/test/integration/targets/ansible-test-sanity-validate-modules/expected.txt index d3a1ffa70ba..d178a8b9207 100644 --- a/test/integration/targets/ansible-test-sanity-validate-modules/expected.txt +++ b/test/integration/targets/ansible-test-sanity-validate-modules/expected.txt @@ -3,6 +3,7 @@ plugins/modules/check_mode_attribute_1.py:0:0: attributes-check-mode: The module plugins/modules/check_mode_attribute_2.py:0:0: attributes-check-mode: The module does not declare support for check mode, but the check_mode attribute's support value is 'partial' and not 'none' plugins/modules/check_mode_attribute_3.py:0:0: attributes-check-mode: The module does declare support for check mode, but the check_mode attribute's support value is 'none' plugins/modules/check_mode_attribute_4.py:0:0: attributes-check-mode-details: The module declares it does not fully support check mode, but has no details on what exactly that means +plugins/modules/doc_fragments_not_exist.py:0:0: doc-fragment-error: Error while adding fragments: unknown doc_fragment(s) in file plugins/modules/doc_fragments_not_exist.py: does.not.exist plugins/modules/import_order.py:7:0: import-before-documentation: Import found before documentation variables. All imports must appear below DOCUMENTATION/EXAMPLES/RETURN. plugins/modules/invalid_argument_spec_extra_key.py:0:0: invalid-ansiblemodule-schema: AnsibleModule.argument_spec.foo.extra_key: extra keys not allowed @ data['argument_spec']['foo']['extra_key']. Got 'bar' plugins/modules/invalid_argument_spec_incorrect_context.py:0:0: invalid-ansiblemodule-schema: AnsibleModule.argument_spec.foo.context: expected dict for dictionary value @ data['argument_spec']['foo']['context']. Got 'bar' @@ -14,6 +15,8 @@ plugins/modules/invalid_yaml_syntax.py:11:15: invalid-examples: EXAMPLES is not plugins/modules/invalid_yaml_syntax.py:15:15: return-syntax-error: RETURN is not valid YAML plugins/modules/option_name_casing.py:0:0: option-equal-up-to-casing: Multiple options/aliases are equal up to casing: option 'Bar', alias 'baR' of option 'bam', alias 'bar' of option 'foo' plugins/modules/option_name_casing.py:0:0: option-equal-up-to-casing: Multiple options/aliases are equal up to casing: option 'Foo', option 'foo' +plugins/modules/return_fragments.py:0:0: invalid-documentation-markup: RETURN.bar.description.1: Directive "P(a.b.asfd#dfsa)" must contain a valid plugin type; found "dfsa" @ data['bar']['description'][1]. Got 'P(a.b.asfd#dfsa) this is an error.' +plugins/modules/return_fragments_not_exist.py:0:0: return-fragment-error: Error while adding fragments: unknown doc_fragment(s) in file plugins/modules/return_fragments_not_exist.py: does.not.exist plugins/modules/semantic_markup.py:0:0: invalid-documentation-markup: DOCUMENTATION.options.a11.suboptions.b1.description.0: While parsing "V(C\(" at index 1: Unnecessarily escaped "(" @ data['options']['a11']['suboptions']['b1']['description'][0]. Got 'V(C\\(foo\\)).' plugins/modules/semantic_markup.py:0:0: invalid-documentation-markup: DOCUMENTATION.options.a11.suboptions.b1.description.2: While parsing "P(foo.bar#baz)" at index 1: Plugin name "foo.bar" is not a FQCN @ data['options']['a11']['suboptions']['b1']['description'][2]. Got 'P(foo.bar#baz).' plugins/modules/semantic_markup.py:0:0: invalid-documentation-markup: DOCUMENTATION.options.a11.suboptions.b1.description.3: While parsing "P(foo.bar.baz)" at index 1: Parameter "foo.bar.baz" is not of the form FQCN#type @ data['options']['a11']['suboptions']['b1']['description'][3]. Got 'P(foo.bar.baz).' diff --git a/test/integration/targets/apt_repository/tasks/apt.yml b/test/integration/targets/apt_repository/tasks/apt.yml index 9d51e16e4bd..f1706ea0302 100644 --- a/test/integration/targets/apt_repository/tasks/apt.yml +++ b/test/integration/targets/apt_repository/tasks/apt.yml @@ -301,7 +301,7 @@ - assert: that: - result is failed - - result.msg.startswith("argument 'repo' is of type NoneType and we were unable to convert to str") + - result.msg == 'Please set argument \'repo\' to a non-empty value' - name: Test apt_repository with an empty value for repo apt_repository: diff --git a/test/integration/targets/blockinfile/tasks/encoding.yml b/test/integration/targets/blockinfile/tasks/encoding.yml new file mode 100644 index 00000000000..575dac163ae --- /dev/null +++ b/test/integration/targets/blockinfile/tasks/encoding.yml @@ -0,0 +1,110 @@ +- name: Create a new file and add block + ansible.builtin.blockinfile: + path: "{{ remote_tmp_dir_test }}/encoding_file.txt" + block: | + This is a block added to the beginning of the file. + Line BOF 1 + Line BOF 2 + marker: "# {mark} ANSIBLE MANAGED BLOCK FOR BOF" + insertbefore: BOF + create: yes + encoding: cp273 + register: add_block_bof + +- name: add block at end of file + ansible.builtin.blockinfile: + path: "{{ remote_tmp_dir_test }}/encoding_file.txt" + block: | + This is a block added to the end of the file. + Line EOF 1 + Line EOF 2 + marker: "# {mark} ANSIBLE MANAGED BLOCK FOR EOF" + insertafter: EOF + encoding: cp273 + register: add_block_eof + +- name: stat the new file + stat: + path: "{{ remote_tmp_dir_test }}/encoding_file.txt" + register: result1 + +- name: check idempotency by adding same block at end of file again + ansible.builtin.blockinfile: + path: "{{ remote_tmp_dir_test }}/encoding_file.txt" + block: | + This is a block added to the end of the file. + Line EOF 1 + Line EOF 2 + marker: "# {mark} ANSIBLE MANAGED BLOCK FOR EOF" + insertafter: EOF + encoding: cp273 + register: add_block_eof_1 + +- name: assert the results for adding block EOF and BOF + assert: + that: + - add_block_bof is changed and add_block_eof is changed + - 'add_block_bof.msg == "File created"' + - 'add_block_eof.msg == "Block inserted"' + - result1.stat.exists + - result1.stat.checksum == '724f92d56c2bdaf8e701359e71091bce898af988' + - add_block_eof_1 is not changed + +- name: Add block after Line + blockinfile: + path: "{{ remote_tmp_dir_test }}/encoding_file.txt" + insertafter: Line BOF 1 + block: | + This is block added after Line BOF 1 + Line Added After BOF 1 1 + Line Added After BOF 1 2 + marker: "# {mark} ANSIBLE MANAGED BLOCK FOR AFTER_LINE" + encoding: cp273 + register: insert_after_line + +- name: Add block Before Line + blockinfile: + path: "{{ remote_tmp_dir_test }}/encoding_file.txt" + insertbefore: Line EOF 2 + block: | + This is block added Before Line EOF 2 + Line Added Before EOF 1 1 + Line Added Before EOF 1 2 + marker: "# {mark} ANSIBLE MANAGED BLOCK FOR BEFORE_LINE" + encoding: cp273 + register: insert_before_line + +- name: stat the new file + stat: + path: "{{ remote_tmp_dir_test }}/encoding_file.txt" + register: result1 + +- name: assert the results for Insert After and Before line + assert: + that: + - insert_after_line is changed and insert_before_line is changed + - 'insert_after_line.msg == "Block inserted"' + - 'insert_before_line.msg == "Block inserted"' + - result1.stat.exists + - result1.stat.checksum == '11af61de9ed9e9182eee8a2c271921d0dd1992c9' + +- name: Delete the custom Block + ansible.builtin.blockinfile: + path: "{{ remote_tmp_dir_test }}/encoding_file.txt" + marker: "# {mark} ANSIBLE MANAGED BLOCK FOR EOF" + state: absent + encoding: cp273 + register: delete_custom_block + +- name: stat the new file + stat: + path: "{{ remote_tmp_dir_test }}/encoding_file.txt" + register: result1 + +- name: assert the results for Insert After and Before with Regexp + assert: + that: + - delete_custom_block is changed + - 'delete_custom_block.msg == "Block removed"' + - result1.stat.exists + - result1.stat.checksum == '6e192ae0a60a7f0e6299a2918b6e6708a59b8183' diff --git a/test/integration/targets/blockinfile/tasks/main.yml b/test/integration/targets/blockinfile/tasks/main.yml index f26cb165e9c..f6f39534b29 100644 --- a/test/integration/targets/blockinfile/tasks/main.yml +++ b/test/integration/targets/blockinfile/tasks/main.yml @@ -42,3 +42,4 @@ - import_tasks: multiline_search.yml - import_tasks: append_newline.yml - import_tasks: prepend_newline.yml +- import_tasks: encoding.yml diff --git a/test/integration/targets/builtin_vars_prompt/test-vars_prompt.py b/test/integration/targets/builtin_vars_prompt/test-vars_prompt.py index 435a7eb979a..e85c39e987b 100644 --- a/test/integration/targets/builtin_vars_prompt/test-vars_prompt.py +++ b/test/integration/targets/builtin_vars_prompt/test-vars_prompt.py @@ -6,12 +6,6 @@ import os import pexpect import sys -from ansible.module_utils.six import PY2 - -if PY2: - log_buffer = sys.stdout -else: - log_buffer = sys.stdout.buffer env_vars = { 'ANSIBLE_ROLES_PATH': './roles', @@ -36,7 +30,7 @@ def run_test(playbook, test_spec, args=None, timeout=10, env=None): env=env, ) - vars_prompt_test.logfile = log_buffer + vars_prompt_test.logfile = sys.stdout.buffer for item in test_spec[0]: vars_prompt_test.expect(item[0]) if item[1]: diff --git a/test/integration/targets/callback_default/callback_default.out.result_format_yaml_indent_2.stderr b/test/integration/targets/callback_default/callback_default.out.result_format_yaml_indent_2.stderr new file mode 100644 index 00000000000..d3e07d472db --- /dev/null +++ b/test/integration/targets/callback_default/callback_default.out.result_format_yaml_indent_2.stderr @@ -0,0 +1,2 @@ ++ ansible-playbook -i inventory test.yml +++ set +x diff --git a/test/integration/targets/callback_default/callback_default.out.result_format_yaml_indent_2.stdout b/test/integration/targets/callback_default/callback_default.out.result_format_yaml_indent_2.stdout new file mode 100644 index 00000000000..50743cc6d7c --- /dev/null +++ b/test/integration/targets/callback_default/callback_default.out.result_format_yaml_indent_2.stdout @@ -0,0 +1,132 @@ + +PLAY [testhost] **************************************************************** + +TASK [Changed task] ************************************************************ +changed: [testhost] + +TASK [Ok task] ***************************************************************** +ok: [testhost] + +TASK [Failed task] ************************************************************* +[ERROR]: Task failed: Action failed: no reason +Origin: TEST_PATH/test.yml:16:7 + +14 changed_when: false +15 +16 - name: Failed task + ^ column 7 + +fatal: [testhost]: FAILED! => + changed: false + msg: no reason +...ignoring + +TASK [Skipped task] ************************************************************ +skipping: [testhost] + +TASK [Task with var in name (foo bar)] ***************************************** +changed: [testhost] + +TASK [Loop task] *************************************************************** +changed: [testhost] => (item=foo-1) +changed: [testhost] => (item=foo-2) +changed: [testhost] => (item=foo-3) + +TASK [debug loop] ************************************************************** +changed: [testhost] => (item=debug-1) => + msg: debug-1 +[ERROR]: Task failed: Action failed: debug-2 +Origin: TEST_PATH/test.yml:38:7 + +36 +37 # detect "changed" debug tasks being hidden with display_ok_tasks=false +38 - name: debug loop + ^ column 7 + +failed: [testhost] (item=debug-2) => + msg: debug-2 +ok: [testhost] => (item=debug-3) => + msg: debug-3 +skipping: [testhost] => (item=debug-4) +fatal: [testhost]: FAILED! => + msg: One or more items failed +...ignoring + +TASK [EXPECTED FAILURE Failed task to be rescued] ****************************** +[ERROR]: Task failed: Action failed: Failed as requested from task +Origin: TEST_PATH/test.yml:54:11 + +52 +53 - block: +54 - name: EXPECTED FAILURE Failed task to be rescued + ^ column 11 + +fatal: [testhost]: FAILED! => + changed: false + msg: Failed as requested from task + +TASK [Rescue task] ************************************************************* +changed: [testhost] + +TASK [include_tasks] *********************************************************** +included: .../test/integration/targets/callback_default/include_me.yml for testhost => (item=1) + +TASK [debug] ******************************************************************* +ok: [testhost] => + item: 1 + +TASK [copy] ******************************************************************** +changed: [testhost] + +TASK [replace] ***************************************************************** +--- before: .../test_diff.txt ++++ after: .../test_diff.txt +@@ -1 +1 @@ +-foo +\ No newline at end of file ++bar +\ No newline at end of file + +changed: [testhost] + +TASK [replace] ***************************************************************** +ok: [testhost] + +TASK [debug] ******************************************************************* +skipping: [testhost] + +TASK [debug] ******************************************************************* +skipping: [testhost] + +TASK [debug] ******************************************************************* +skipping: [testhost] => (item=1) +skipping: [testhost] => (item=2) +skipping: [testhost] + +RUNNING HANDLER [Test handler 1] *********************************************** +changed: [testhost] + +RUNNING HANDLER [Test handler 2] *********************************************** +ok: [testhost] + +RUNNING HANDLER [Test handler 3] *********************************************** +changed: [testhost] + +PLAY [testhost] **************************************************************** + +TASK [First free task] ********************************************************* +changed: [testhost] + +TASK [Second free task] ******************************************************** +changed: [testhost] + +TASK [Include some tasks] ****************************************************** +included: .../test/integration/targets/callback_default/include_me.yml for testhost => (item=1) + +TASK [debug] ******************************************************************* +ok: [testhost] => + item: 1 + +PLAY RECAP ********************************************************************* +testhost : ok=19 changed=11 unreachable=0 failed=0 skipped=4 rescued=1 ignored=2 + diff --git a/test/integration/targets/callback_default/runme.sh b/test/integration/targets/callback_default/runme.sh index 473518b4af9..9936b9ad664 100755 --- a/test/integration/targets/callback_default/runme.sh +++ b/test/integration/targets/callback_default/runme.sh @@ -203,7 +203,10 @@ export ANSIBLE_DISPLAY_FAILED_STDERR=0 export ANSIBLE_CALLBACK_RESULT_FORMAT=yaml run_test result_format_yaml test.yml +export ANSIBLE_CALLBACK_RESULT_INDENTATION=2 +run_test result_format_yaml_indent_2 test.yml export ANSIBLE_CALLBACK_RESULT_FORMAT=json +unset ANSIBLE_CALLBACK_RESULT_INDENTATION export ANSIBLE_CALLBACK_RESULT_FORMAT=yaml export ANSIBLE_CALLBACK_FORMAT_PRETTY=1 diff --git a/test/integration/targets/collections/test_task_resolved_plugin.sh b/test/integration/targets/collections/test_task_resolved_plugin.sh index 444b4f11094..dd293e5f6b2 100755 --- a/test/integration/targets/collections/test_task_resolved_plugin.sh +++ b/test/integration/targets/collections/test_task_resolved_plugin.sh @@ -15,6 +15,22 @@ for result in "${action_resolution[@]}"; do grep -q out.txt -e "$result" done +# Test local_action/action warning +export ANSIBLE_TEST_ON_TASK_START=True +ansible-playbook -i debug, test_task_resolved_plugin/dynamic_action.yml "$@" 2>&1 | tee out.txt +grep -q out.txt -e "A plugin is sampling the task's resolved_action when it is not resolved" +grep -q out.txt -e "v2_playbook_on_task_start: {{ inventory_hostname }} == None" +grep -q out.txt -e "v2_runner_on_ok: debug == ansible.builtin.debug" +grep -q out.txt -e "v2_runner_item_on_ok: debug == ansible.builtin.debug" + +# Test static actions don't cause a warning +ansible-playbook test_task_resolved_plugin/unqualified.yml "$@" 2>&1 | tee out.txt +grep -v out.txt -e "A plugin is sampling the task's resolved_action when it is not resolved" +for result in "${action_resolution[@]}"; do + grep -q out.txt -e "v2_playbook_on_task_start: $result" +done +unset ANSIBLE_TEST_ON_TASK_START + ansible-playbook test_task_resolved_plugin/unqualified_and_collections_kw.yml "$@" | tee out.txt action_resolution=( "legacy_action == legacy_action" diff --git a/test/integration/targets/collections/test_task_resolved_plugin/callback_plugins/display_resolved_action.py b/test/integration/targets/collections/test_task_resolved_plugin/callback_plugins/display_resolved_action.py index d8f84824a6f..db235cab7d1 100644 --- a/test/integration/targets/collections/test_task_resolved_plugin/callback_plugins/display_resolved_action.py +++ b/test/integration/targets/collections/test_task_resolved_plugin/callback_plugins/display_resolved_action.py @@ -9,6 +9,12 @@ DOCUMENTATION = """ short_description: Displays the requested and resolved actions at the end of a playbook. description: - Displays the requested and resolved actions in the format "requested == resolved". + options: + test_on_task_start: + description: Test using task.resolved_action before it is reliably resolved. + default: False + env: + - name: ANSIBLE_TEST_ON_TASK_START requirements: - Enable in configuration. """ @@ -25,11 +31,14 @@ class CallbackModule(CallbackBase): def __init__(self, *args, **kwargs): super(CallbackModule, self).__init__(*args, **kwargs) - self.requested_to_resolved = {} - def v2_runner_on_ok(self, result): - self.requested_to_resolved[result.task.action] = result.task.resolved_action + def v2_playbook_on_task_start(self, task, is_conditional): + if self.get_option("test_on_task_start"): + self._display.display(f"v2_playbook_on_task_start: {task.action} == {task.resolved_action}") + + def v2_runner_item_on_ok(self, result): + self._display.display(f"v2_runner_item_on_ok: {result.task.action} == {result.task.resolved_action}") - def v2_playbook_on_stats(self, stats): - for requested, resolved in self.requested_to_resolved.items(): - self._display.display("%s == %s" % (requested, resolved), screen_only=True) + def v2_runner_on_ok(self, result): + if not result.task.loop: + self._display.display(f"v2_runner_on_ok: {result.task.action} == {result.task.resolved_action}") diff --git a/test/integration/targets/collections/test_task_resolved_plugin/dynamic_action.yml b/test/integration/targets/collections/test_task_resolved_plugin/dynamic_action.yml new file mode 100644 index 00000000000..067be0810ab --- /dev/null +++ b/test/integration/targets/collections/test_task_resolved_plugin/dynamic_action.yml @@ -0,0 +1,10 @@ +--- +- hosts: all + gather_facts: no + tasks: + - name: Run dynamic action + action: "{{ inventory_hostname }}" + + - name: Run dynamic action in loop + action: "{{ inventory_hostname }}" + loop: [1] diff --git a/test/integration/targets/collections/test_task_resolved_plugin/unqualified.yml b/test/integration/targets/collections/test_task_resolved_plugin/unqualified.yml index 076b8cc7123..d0b042866be 100644 --- a/test/integration/targets/collections/test_task_resolved_plugin/unqualified.yml +++ b/test/integration/targets/collections/test_task_resolved_plugin/unqualified.yml @@ -4,5 +4,5 @@ tasks: - legacy_action: - legacy_module: - - debug: - - ping: + - local_action: debug + - action: ping diff --git a/test/integration/targets/deprecations/injectfacts.yml b/test/integration/targets/deprecations/injectfacts.yml new file mode 100644 index 00000000000..7e356e2e3a2 --- /dev/null +++ b/test/integration/targets/deprecations/injectfacts.yml @@ -0,0 +1,5 @@ +- hosts: localhost + gather_facts: true + tasks: + - debug: + msg: '{{ansible_distribution}}' diff --git a/test/integration/targets/deprecations/runme.sh b/test/integration/targets/deprecations/runme.sh index 1d291ce8873..ac7b209730d 100755 --- a/test/integration/targets/deprecations/runme.sh +++ b/test/integration/targets/deprecations/runme.sh @@ -55,3 +55,6 @@ export ANSIBLE_CACHE_PLUGIN=notjsonfile # check for plugin deprecation [ "$(ansible-doc -t cache notjsonfile --playbook-dir ./ | grep -c 'DEPRECATED:')" -eq "1" ] + +# Injection default is deprecated +[ "$(ANSIBLE_INJECT_FACT_VARS=1 ansible-playbook injectfacts.yml 2>&1 | grep -c 'INJECT_FACTS_AS_VARS')" -eq "0" ] diff --git a/test/integration/targets/dnf/tasks/dnfinstallroot.yml b/test/integration/targets/dnf/tasks/dnfinstallroot.yml index 19f67069096..7ec646b0fea 100644 --- a/test/integration/targets/dnf/tasks/dnfinstallroot.yml +++ b/test/integration/targets/dnf/tasks/dnfinstallroot.yml @@ -33,3 +33,33 @@ file: path: "/{{ dnfroot.stdout }}/" state: absent + +- block: + - name: Clean setup + file: + path: "{{ remote_tmp_dir }}/file_root" + state: absent + + - name: Setup - create invalid installroot file (not a dir) + copy: + content: '' + dest: "{{ remote_tmp_dir }}/file_root" + + - name: Try with invalid installroot + dnf: + name: bash + state: present + installroot: "{{ remote_tmp_dir }}/file_root" + ignore_errors: yes + register: invalid_install_root + + - name: Check if invalid installroot failed + assert: + that: + - invalid_install_root.failed + - "'Installroot ' ~ remote_tmp_dir ~ '/file_root must be a directory' in invalid_install_root.msg" + always: + - name: Cleanup invalid installroot + file: + path: "{{ remote_tmp_dir }}/file_root" + state: absent diff --git a/test/integration/targets/filter_core/tasks/main.yml b/test/integration/targets/filter_core/tasks/main.yml index c11b21c40c6..0d982a7736d 100644 --- a/test/integration/targets/filter_core/tasks/main.yml +++ b/test/integration/targets/filter_core/tasks/main.yml @@ -430,6 +430,13 @@ - '123|ternary("seven", "eight") == "seven"' - '"haha"|ternary("seven", "eight") == "seven"' +- name: Verify ternary does not evaluate unused values + assert: + that: + - (false | ternary(undefined_variable, 'seven')) == (false | ternary(d.no_such_key, 'seven')) + vars: + d: {} + - name: Verify regex_escape raises on posix_extended (failure expected) set_fact: foo: '{{"]]^"|regex_escape(re_type="posix_extended")}}' diff --git a/test/integration/targets/handlers/rescue_flush_handlers.yml b/test/integration/targets/handlers/rescue_flush_handlers.yml new file mode 100644 index 00000000000..065743654a8 --- /dev/null +++ b/test/integration/targets/handlers/rescue_flush_handlers.yml @@ -0,0 +1,16 @@ +- hosts: localhost + gather_facts: false + tasks: + - block: + - debug: + changed_when: true + notify: h1 + + - meta: flush_handlers + rescue: + - assert: + that: + - ansible_failed_task is defined + handlers: + - name: h1 + fail: diff --git a/test/integration/targets/handlers/runme.sh b/test/integration/targets/handlers/runme.sh index 0cc7b3c36ca..648eb87bb91 100755 --- a/test/integration/targets/handlers/runme.sh +++ b/test/integration/targets/handlers/runme.sh @@ -230,3 +230,5 @@ ansible-playbook handler_notify_earlier_handler.yml "$@" 2>&1 | tee out.txt ANSIBLE_DEBUG=1 ansible-playbook tagged_play.yml --skip-tags the_whole_play "$@" 2>&1 | tee out.txt [ "$(grep out.txt -ce 'META: triggered running handlers')" = "0" ] [ "$(grep out.txt -ce 'handler_ran')" = "0" ] + +ansible-playbook rescue_flush_handlers.yml "$@" diff --git a/test/integration/targets/hardware_facts/tasks/Linux.yml b/test/integration/targets/hardware_facts/tasks/Linux.yml index 885aa0ec930..c15cce30036 100644 --- a/test/integration/targets/hardware_facts/tasks/Linux.yml +++ b/test/integration/targets/hardware_facts/tasks/Linux.yml @@ -52,7 +52,25 @@ - ansible_lvm.lvs.two.vg == 'first' - ansible_lvm.lvs.uno.vg == 'second' - ansible_lvm.vgs.first.num_lvs == "2" + - ansible_facts['lvm']['vgs']['first']['lvs'] | sort == ['one', 'two'] - ansible_lvm.vgs.second.num_lvs == "1" + - ansible_facts['lvm']['vgs']['second']['lvs'] | sort == ['uno'] + + - name: Create another lv using duplicate name + command: lvcreate -L 4M second --name two + + - name: Gather facts + setup: + + - assert: + that: + - ansible_facts['lvm']['vgs']['first']['lvs'] | sort == ['one', 'two'] + - ansible_facts['lvm']['vgs']['second']['lvs'] | sort == ['two', 'uno'] + # only one lv named 'two' is represented in the top level lvs fact + - ansible_facts['lvm']['lvs']['two']['vg'] == 'second' + - (vgs_lvs | unique | sort) == (ansible_facts['lvm']['lvs'] | sort) + vars: + vgs_lvs: "{{ ansible_facts['lvm']['vgs'].values() | map(attribute='lvs') | map('list') | flatten }}" always: - name: remove lvs diff --git a/test/integration/targets/include_vars/tasks/main.yml b/test/integration/targets/include_vars/tasks/main.yml index cfb6e4bfd22..072675430e8 100644 --- a/test/integration/targets/include_vars/tasks/main.yml +++ b/test/integration/targets/include_vars/tasks/main.yml @@ -61,6 +61,61 @@ - no_auto_unsafe.yml register: include_every_dir +- name: invalid type for ignore_files + include_vars: + dir: vars + ignore_files: 123 + register: invalid_ignore_files + ignore_errors: True + +- name: non-existent directory + include_vars: + dir: non_existent_dir + register: non_existent_dir_results + ignore_errors: True + +- name: invalid option + include_vars: + blah: 123 + ignore_errors: True + register: invalid_option_results + +- name: provide non-compatible files and dir parameter together + include_vars: + dir: vars + file: main.yml + ignore_errors: True + register: invalid_files_dir_results + +- name: provide file value to a dir param + include_vars: + dir: environments/development/all.yml + register: non_file_dir_results + ignore_errors: True + +- name: invalid extensions value + include_vars: + dir: vars + extensions: 123 + register: invalid_extensions_results + ignore_errors: True + +- name: verify all parameters are validated + assert: + that: + - "invalid_ignore_files is failed" + - "'option must be a list' in invalid_ignore_files.msg" + - "non_existent_dir_results is failed" + - "'directory does not exist' in non_existent_dir_results.message" + - "invalid_option_results is failed" + - "'is not a valid option' in invalid_option_results.msg" + - "invalid_files_dir_results is failed" + - "'these are incompatible' in invalid_files_dir_results.msg" + - "non_file_dir_results is failed" + - "'is not a directory' in non_file_dir_results.message" + - "invalid_extensions_results is failed" + - "'option must be a list' in invalid_extensions_results.msg" + - name: verify that the correct files have been loaded and overwrite based on alphabetical order assert: that: diff --git a/test/integration/targets/lineinfile/tasks/main.yml b/test/integration/targets/lineinfile/tasks/main.yml index 752e96dff6b..a21ffa70e6a 100644 --- a/test/integration/targets/lineinfile/tasks/main.yml +++ b/test/integration/targets/lineinfile/tasks/main.yml @@ -1416,6 +1416,134 @@ - testend2 is changed - testend_file.stat.checksum == 'ef36116966836ce04f6b249fd1837706acae4e19' +# Test lineinfile operations in a non-UTF8 encoding + +- name: encoding - create a file + lineinfile: + path: "{{ remote_tmp_dir }}/test-encoding-file.txt" + create: True + line: "Test line encoded in cp273" + state: present + encoding: cp273 + register: result1 + +- name: assert that the file was created + assert: + that: + - result1 is changed + +- name: stat the new file + stat: + path: "{{ remote_tmp_dir }}/test-encoding-file.txt" + register: result2 + +- name: assert the file content is as expected. + assert: + that: + - result2.stat.exists + - result2.stat.checksum == 'e0bf2bdca94727d03483a9adab66543bcec4b99a' + + +- name: encoding - write a line + lineinfile: + line: "Another test line." + state: present + path: "{{ remote_tmp_dir }}/test-encoding-file.txt" + encoding: cp273 + register: result1 + +- name: assert that the file was changed. + assert: + that: + - result1 is changed + +- name: stat the file. + stat: + path: "{{ remote_tmp_dir }}/test-encoding-file.txt" + register: result2 + +- name: assert the file content is as expected. + assert: + that: + - result2.stat.exists + - result2.stat.checksum == '1ac77f83a69c53b54fc16111694abe07e5a6703b' + + +- name: encoding - attempt to write the same line again. + lineinfile: + line: "Another test line." + state: present + path: "{{ remote_tmp_dir }}/test-encoding-file.txt" + encoding: cp273 + register: result1 + +- name: assert that the file was not changed. + assert: + that: + - result1 is not changed + +- name: stat the file. + stat: + path: "{{ remote_tmp_dir }}/test-encoding-file.txt" + register: result2 + +- name: assert the file content is as expected. + assert: + that: + - result2.stat.exists + - result2.stat.checksum == '1ac77f83a69c53b54fc16111694abe07e5a6703b' + + +- name: encoding - write a line before regex match + lineinfile: + line: "In between test line." + state: present + insertbefore: "^Another test line.$" + path: "{{ remote_tmp_dir }}/test-encoding-file.txt" + encoding: cp273 + register: result1 + +- name: assert that the file was changed. + assert: + that: + - result1 is changed + +- name: stat the file. + stat: + path: "{{ remote_tmp_dir }}/test-encoding-file.txt" + register: result2 + +- name: assert the file content is as expected. + assert: + that: + - result2.stat.exists + - result2.stat.checksum == 'e48e3177e79d16871dcfc25a92333fa92d4f40bd' + +- name: encoding - remove a line + lineinfile: + regexp: "^Another test line.$" + state: absent + path: "{{ remote_tmp_dir }}/test-encoding-file.txt" + encoding: cp273 + register: result1 + +- name: assert that the file was changed. + assert: + that: + - result1 is changed + +- name: stat the file. + stat: + path: "{{ remote_tmp_dir }}/test-encoding-file.txt" + register: result2 + +- name: assert the file content is as expected. + assert: + that: + - result2.stat.exists + - result2.stat.checksum == 'fd917ecb76b768a9e87d1a5c65f7fae4f0f2b4ea' + + - name: Integration test for issue 76727 block: - name: Create a symbolic link for the test file diff --git a/test/integration/targets/lookup_ini/test_errors.yml b/test/integration/targets/lookup_ini/test_errors.yml index 8ce7c1aaeaf..161ee0c5435 100644 --- a/test/integration/targets/lookup_ini/test_errors.yml +++ b/test/integration/targets/lookup_ini/test_errors.yml @@ -2,28 +2,25 @@ hosts: testhost tasks: - - name: Test for failure on Python 3 - when: ansible_facts.python.version_info[0] >= 3 - block: - - name: Lookup a file with duplicate keys - debug: - msg: "{{ lookup('ini', 'name', file='duplicate.ini', section='reggae') }}" - ignore_errors: yes - register: duplicate - - - name: Lookup a file with keys that differ only in case - debug: - msg: "{{ lookup('ini', 'name', file='duplicate_case_check.ini', section='reggae') }}" - ignore_errors: yes - register: duplicate_case_sensitive - - - name: Ensure duplicate key errors were handled properly - assert: - that: - - duplicate is failed - - "'Duplicate option in' in duplicate.msg" - - duplicate_case_sensitive is failed - - "'Duplicate option in' in duplicate_case_sensitive.msg" + - name: Lookup a file with duplicate keys + debug: + msg: "{{ lookup('ini', 'name', file='duplicate.ini', section='reggae') }}" + ignore_errors: yes + register: duplicate + + - name: Lookup a file with keys that differ only in case + debug: + msg: "{{ lookup('ini', 'name', file='duplicate_case_check.ini', section='reggae') }}" + ignore_errors: yes + register: duplicate_case_sensitive + + - name: Ensure duplicate key errors were handled properly + assert: + that: + - duplicate is failed + - "'Duplicate option in' in duplicate.msg" + - duplicate_case_sensitive is failed + - "'Duplicate option in' in duplicate_case_sensitive.msg" - name: Lookup a file with a missing section debug: diff --git a/test/integration/targets/old_style_vars_plugins/deprecation_warning/v2_vars_plugin.py b/test/integration/targets/old_style_vars_plugins/deprecation_warning/v2_vars_plugin.py deleted file mode 100644 index 723e0a40cd1..00000000000 --- a/test/integration/targets/old_style_vars_plugins/deprecation_warning/v2_vars_plugin.py +++ /dev/null @@ -1,9 +0,0 @@ -from __future__ import annotations - - -class VarsModule: - def get_host_vars(self, entity): - return {} - - def get_group_vars(self, entity): - return {} diff --git a/test/integration/targets/old_style_vars_plugins/runme.sh b/test/integration/targets/old_style_vars_plugins/runme.sh index 2f56e9d0777..7064e932acf 100755 --- a/test/integration/targets/old_style_vars_plugins/runme.sh +++ b/test/integration/targets/old_style_vars_plugins/runme.sh @@ -12,13 +12,6 @@ export ANSIBLE_VARS_PLUGINS=./vars_plugins export ANSIBLE_VARS_ENABLED=require_enabled [ "$(ansible-inventory -i localhost, --list --yaml all "$@" | grep -c 'require_enabled')" = "1" ] -# Test deprecated features -export ANSIBLE_VARS_PLUGINS=./deprecation_warning -WARNING="The vars plugin v2_vars_plugin .* is relying on the deprecated entrypoints \`get_host_vars\` and \`get_group_vars\`" -ANSIBLE_DEPRECATION_WARNINGS=True ANSIBLE_NOCOLOR=True ANSIBLE_FORCE_COLOR=False \ - ansible-inventory -i localhost, --list all "$@" 2> err.txt -ansible localhost -m debug -a "msg={{ lookup('file', 'err.txt') | regex_replace('\n', '') }}" | grep "$WARNING" - # Test how many times vars plugins are loaded for a simple play containing a task # host_group_vars is stateless, so we can load it once and reuse it, every other vars plugin should be instantiated before it runs cat << EOF > "test_task_vars.yml" diff --git a/test/integration/targets/pause/test-pause.py b/test/integration/targets/pause/test-pause.py index 6fcb5bf10a2..bdde0a47380 100755 --- a/test/integration/targets/pause/test-pause.py +++ b/test/integration/targets/pause/test-pause.py @@ -7,7 +7,6 @@ import pexpect import sys import termios -from ansible.module_utils.six import PY2 args = sys.argv[1:] @@ -22,11 +21,6 @@ try: except Exception: backspace = b'\x7f' -if PY2: - log_buffer = sys.stdout -else: - log_buffer = sys.stdout.buffer - os.environ.update(env_vars) # -- Plain pause -- # @@ -40,7 +34,7 @@ pause_test = pexpect.spawn( env=os.environ ) -pause_test.logfile = log_buffer +pause_test.logfile = sys.stdout.buffer pause_test.expect(r'Press enter to continue, Ctrl\+C to interrupt:') pause_test.send('\r') pause_test.expect('Task after pause') @@ -56,7 +50,7 @@ pause_test = pexpect.spawn( env=os.environ ) -pause_test.logfile = log_buffer +pause_test.logfile = sys.stdout.buffer pause_test.expect(r'Press enter to continue, Ctrl\+C to interrupt:') pause_test.send('\x03') pause_test.expect("Press 'C' to continue the play or 'A' to abort") @@ -74,7 +68,7 @@ pause_test = pexpect.spawn( env=os.environ ) -pause_test.logfile = log_buffer +pause_test.logfile = sys.stdout.buffer pause_test.expect(r'Press enter to continue, Ctrl\+C to interrupt:') pause_test.send('\x03') pause_test.expect("Press 'C' to continue the play or 'A' to abort") @@ -94,7 +88,7 @@ pause_test = pexpect.spawn( env=os.environ ) -pause_test.logfile = log_buffer +pause_test.logfile = sys.stdout.buffer pause_test.expect(r'Custom prompt:') pause_test.send('\r') pause_test.expect('Task after pause') @@ -110,7 +104,7 @@ pause_test = pexpect.spawn( env=os.environ ) -pause_test.logfile = log_buffer +pause_test.logfile = sys.stdout.buffer pause_test.expect(r'Custom prompt:') pause_test.send('\x03') pause_test.expect("Press 'C' to continue the play or 'A' to abort") @@ -128,7 +122,7 @@ pause_test = pexpect.spawn( env=os.environ ) -pause_test.logfile = log_buffer +pause_test.logfile = sys.stdout.buffer pause_test.expect(r'Custom prompt:') pause_test.send('\x03') pause_test.expect("Press 'C' to continue the play or 'A' to abort") @@ -149,7 +143,7 @@ pause_test = pexpect.spawn( env=os.environ ) -pause_test.logfile = log_buffer +pause_test.logfile = sys.stdout.buffer pause_test.expect(r'Pausing for \d+ seconds') pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)") pause_test.expect('Task after pause') @@ -164,7 +158,7 @@ pause_test = pexpect.spawn( env=os.environ ) -pause_test.logfile = log_buffer +pause_test.logfile = sys.stdout.buffer pause_test.expect(r'Pausing for \d+ seconds') pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)") pause_test.send('\n') # test newline does not stop the prompt - waiting for a timeout or ctrl+C @@ -184,7 +178,7 @@ pause_test = pexpect.spawn( env=os.environ ) -pause_test.logfile = log_buffer +pause_test.logfile = sys.stdout.buffer pause_test.expect(r'Pausing for \d+ seconds') pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)") pause_test.send('\x03') @@ -206,7 +200,7 @@ pause_test = pexpect.spawn( env=os.environ ) -pause_test.logfile = log_buffer +pause_test.logfile = sys.stdout.buffer pause_test.expect(r'Pausing for \d+ seconds') pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)") pause_test.expect(r"Waiting for two seconds:") @@ -222,7 +216,7 @@ pause_test = pexpect.spawn( env=os.environ ) -pause_test.logfile = log_buffer +pause_test.logfile = sys.stdout.buffer pause_test.expect(r'Pausing for \d+ seconds') pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)") pause_test.expect(r"Waiting for two seconds:") @@ -242,7 +236,7 @@ pause_test = pexpect.spawn( env=os.environ ) -pause_test.logfile = log_buffer +pause_test.logfile = sys.stdout.buffer pause_test.expect(r'Pausing for \d+ seconds') pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)") pause_test.expect(r"Waiting for two seconds:") @@ -264,7 +258,7 @@ pause_test = pexpect.spawn( env=os.environ ) -pause_test.logfile = log_buffer +pause_test.logfile = sys.stdout.buffer pause_test.expect(r'Enter some text:') pause_test.send('hello there') pause_test.send('\r') @@ -290,7 +284,7 @@ pause_test = pexpect.spawn( env=os.environ ) -pause_test.logfile = log_buffer +pause_test.logfile = sys.stdout.buffer pause_test.expect(r'Wait for three seconds:') pause_test.send('ignored user input') pause_test.expect('Task after pause') @@ -307,7 +301,7 @@ pause_test = pexpect.spawn( env=os.environ ) -pause_test.logfile = log_buffer +pause_test.logfile = sys.stdout.buffer pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)") pause_test.send('\r') pause_test.expect(pexpect.EOF) diff --git a/test/integration/targets/rel_plugin_loading/subdir/inventory_plugins/notyaml.py b/test/integration/targets/rel_plugin_loading/subdir/inventory_plugins/notyaml.py index c068681292d..f5c0abbfe0c 100644 --- a/test/integration/targets/rel_plugin_loading/subdir/inventory_plugins/notyaml.py +++ b/test/integration/targets/rel_plugin_loading/subdir/inventory_plugins/notyaml.py @@ -62,7 +62,6 @@ import os from collections.abc import MutableMapping from ansible.errors import AnsibleError, AnsibleParserError -from ansible.module_utils.six import string_types from ansible.module_utils.common.text.converters import to_native, to_text from ansible.plugins.inventory import BaseFileInventoryPlugin @@ -126,7 +125,7 @@ class InventoryModule(BaseFileInventoryPlugin): for section in ['vars', 'children', 'hosts']: if section in group_data: # convert strings to dicts as these are allowed - if isinstance(group_data[section], string_types): + if isinstance(group_data[section], str): group_data[section] = {group_data[section]: None} if not isinstance(group_data[section], (MutableMapping, NoneType)): diff --git a/test/integration/targets/roles_arg_spec/test.yml b/test/integration/targets/roles_arg_spec/test.yml index 73f797140e4..2c24fc481d3 100644 --- a/test/integration/targets/roles_arg_spec/test.yml +++ b/test/integration/targets/roles_arg_spec/test.yml @@ -188,29 +188,6 @@ c_list: [] c_raw: ~ tasks: - - name: test type coercion fails on None for required str - block: - - name: "Test import_role of role C (missing a_str)" - import_role: - name: c - vars: - a_str: ~ - - fail: - msg: "Should not get here" - rescue: - - debug: - var: ansible_failed_result - - name: "Validate import_role failure" - assert: - that: - # NOTE: a bug here that prevents us from getting ansible_failed_task - - ansible_failed_result.argument_errors == [error] - - ansible_failed_result.argument_spec_data == a_main_spec - vars: - error: >- - argument 'a_str' is of type NoneType and we were unable to convert to str: - 'None' is not a string and conversion is not allowed - - name: test type coercion fails on None for required int block: - name: "Test import_role of role C (missing c_int)" diff --git a/test/integration/targets/tags/runme.sh b/test/integration/targets/tags/runme.sh index 3e0828f5d2c..fb7951f5594 100755 --- a/test/integration/targets/tags/runme.sh +++ b/test/integration/targets/tags/runme.sh @@ -99,4 +99,8 @@ ansible-playbook test_template_role_tags.yml --tags tag1 "$@" 2>&1 | tee out.txt [ "$(grep out.txt -ce 'Tagged_task')" = "1" ]; rm out.txt ansible-playbook test_template_role_tags.yml --skip-tags tag1 "$@" 2>&1 | tee out.txt -[ "$(grep out.txt -ce 'Tagged_task')" = "0" ]; rm out.txt +[ "$(grep out.txt -ce 'Tagged_task')" = "0" ]; +[ "$(grep out.txt -ce 'Found reserved tagsnames')" = "0" ]; rm out.txt + +ansible-playbook warn_reserved.yml "$@" 2>&1 | tee out.txt +[ "$(grep out.txt -ce 'Found reserved tagnames')" = "1" ]; rm out.txt diff --git a/test/integration/targets/tags/warn_reserved.yml b/test/integration/targets/tags/warn_reserved.yml new file mode 100644 index 00000000000..1adee2cf505 --- /dev/null +++ b/test/integration/targets/tags/warn_reserved.yml @@ -0,0 +1,6 @@ +- hosts: all + gather_facts: false + tasks: + - debug: msg=not tagged + - debug: msg=taggged + tags: tagged diff --git a/test/integration/targets/test_core/tasks/main.yml b/test/integration/targets/test_core/tasks/main.yml index 0e173ac5c0a..dfbd844c22f 100644 --- a/test/integration/targets/test_core/tasks/main.yml +++ b/test/integration/targets/test_core/tasks/main.yml @@ -218,6 +218,18 @@ - "'hello' is not regex('.L')" - "'hello\nAnsible' is not regex('^Ansible')" +- name: Try regex test with an invalid match_type + set_fact: + result: "{{ '1.0' is regex('.l', match_type='invalid') }}" + ignore_errors: yes + register: regex_invalid_match_type + +- name: Assert regex test raises an error when a match_type is invalid + assert: + that: + - regex_invalid_match_type is failed + - "'Invalid match_type specified.' in regex_invalid_match_type.msg" + - name: Try version tests with bad operator set_fact: result: "{{ '1.0' is version('1.0', 'equals') }}" diff --git a/test/integration/targets/var_templating/runme.sh b/test/integration/targets/var_templating/runme.sh index 69782f112a0..591de012461 100755 --- a/test/integration/targets/var_templating/runme.sh +++ b/test/integration/targets/var_templating/runme.sh @@ -13,3 +13,6 @@ ansible-playbook task_vars_templating.yml -v "$@" # there should be an attempt to use 'sudo' in the connection debug output ANSIBLE_BECOME_ALLOW_SAME_USER=true ansible-playbook test_connection_vars.yml -vvvv "$@" | tee /dev/stderr | grep 'sudo \-H \-S' + +# test vars deprecation +ANSIBLE_DEPRECATION_WARNINGS=1 ansible-playbook vars_deprecation.yml "$@" diff --git a/test/integration/targets/var_templating/vars_deprecation.yml b/test/integration/targets/var_templating/vars_deprecation.yml new file mode 100644 index 00000000000..fe0d463fe9d --- /dev/null +++ b/test/integration/targets/var_templating/vars_deprecation.yml @@ -0,0 +1,16 @@ +- hosts: localhost + gather_facts: false + vars: + deprecation_message: 'The internal "vars" dictionary is deprecated' + tasks: + - shell: !unsafe ansible -m debug -a "msg='{{vars}}'" localhost + register: just_vars + + - shell: !unsafe ansible -m debug -a 'msg="{{vars["'"ansible_python_interpreter"'"]}}"' localhost + register: sub_vars + + - name: verify we got deprecation + assert: + that: + - deprecation_message in just_vars.stderr + - deprecation_message in sub_vars.stderr diff --git a/test/lib/ansible_test/_data/requirements/ansible-test.txt b/test/lib/ansible_test/_data/requirements/ansible-test.txt index 404cc49b2c0..7a80f968b6c 100644 --- a/test/lib/ansible_test/_data/requirements/ansible-test.txt +++ b/test/lib/ansible_test/_data/requirements/ansible-test.txt @@ -1,2 +1,2 @@ # The test-constraints sanity test verifies this file, but changes must be made manually to keep it in up-to-date. -coverage == 7.10.0 ; python_version >= '3.9' and python_version <= '3.14' +coverage == 7.10.5 ; python_version >= '3.9' and python_version <= '3.14' diff --git a/test/lib/ansible_test/_data/requirements/sanity.changelog.txt b/test/lib/ansible_test/_data/requirements/sanity.changelog.txt index 8a7226bee8f..ea1088f13ca 100644 --- a/test/lib/ansible_test/_data/requirements/sanity.changelog.txt +++ b/test/lib/ansible_test/_data/requirements/sanity.changelog.txt @@ -6,4 +6,4 @@ PyYAML==6.0.2 rstcheck==5.0.0 semantic-version==2.10.0 types-docutils==0.18.3 -typing_extensions==4.14.1 +typing_extensions==4.15.0 diff --git a/test/lib/ansible_test/_data/requirements/sanity.pylint.txt b/test/lib/ansible_test/_data/requirements/sanity.pylint.txt index bf28bb62b59..8ad125663eb 100644 --- a/test/lib/ansible_test/_data/requirements/sanity.pylint.txt +++ b/test/lib/ansible_test/_data/requirements/sanity.pylint.txt @@ -3,7 +3,7 @@ astroid==3.3.11 dill==0.4.0 isort==6.0.1 mccabe==0.7.0 -platformdirs==4.3.8 -pylint==3.3.7 +platformdirs==4.4.0 +pylint==3.3.8 PyYAML==6.0.2 tomlkit==0.13.3 diff --git a/test/lib/ansible_test/_internal/ci/__init__.py b/test/lib/ansible_test/_internal/ci/__init__.py index 44cc64e84a4..25bfa524463 100644 --- a/test/lib/ansible_test/_internal/ci/__init__.py +++ b/test/lib/ansible_test/_internal/ci/__init__.py @@ -3,22 +3,13 @@ from __future__ import annotations import abc -import base64 +import dataclasses +import datetime import json -import os +import pathlib import tempfile import typing as t -from ..encoding import ( - to_bytes, - to_text, -) - -from ..io import ( - read_text_file, - write_text_file, -) - from ..config import ( CommonConfig, TestConfig, @@ -34,6 +25,65 @@ from ..util import ( ) +@dataclasses.dataclass(frozen=True, kw_only=True) +class AuthContext: + """Information about the request to which authentication will be applied.""" + + stage: str + provider: str + request_id: str + + +class AuthHelper: + """Authentication helper.""" + + NAMESPACE: t.ClassVar = 'ci@core.ansible.com' + + def __init__(self, key_file: pathlib.Path) -> None: + self.private_key_file = pathlib.Path(str(key_file).removesuffix('.pub')) + self.public_key_file = pathlib.Path(f'{self.private_key_file}.pub') + + def sign_request(self, request: dict[str, object], context: AuthContext) -> None: + """Sign the given auth request using the provided context.""" + request.update( + stage=context.stage, + provider=context.provider, + request_id=context.request_id, + timestamp=datetime.datetime.now(tz=datetime.timezone.utc).replace(microsecond=0).isoformat(), + ) + + with tempfile.TemporaryDirectory() as temp_dir: + payload_path = pathlib.Path(temp_dir) / 'auth.json' + payload_path.write_text(json.dumps(request, sort_keys=True)) + + cmd = ['ssh-keygen', '-q', '-Y', 'sign', '-f', str(self.private_key_file), '-n', self.NAMESPACE, str(payload_path)] + raw_command(cmd, capture=False, interactive=True) + + signature_path = pathlib.Path(f'{payload_path}.sig') + signature = signature_path.read_text() + + request.update(signature=signature) + + +class GeneratingAuthHelper(AuthHelper, metaclass=abc.ABCMeta): + """Authentication helper which generates a key pair on demand.""" + + def __init__(self) -> None: + super().__init__(pathlib.Path('~/.ansible/test/ansible-core-ci').expanduser()) + + def sign_request(self, request: dict[str, object], context: AuthContext) -> None: + if not self.private_key_file.exists(): + self.generate_key_pair() + + super().sign_request(request, context) + + def generate_key_pair(self) -> None: + """Generate key pair.""" + self.private_key_file.parent.mkdir(parents=True, exist_ok=True) + + raw_command(['ssh-keygen', '-q', '-f', str(self.private_key_file), '-N', ''], capture=True) + + class ChangeDetectionNotSupported(ApplicationError): """Exception for cases where change detection is not supported.""" @@ -75,8 +125,8 @@ class CIProvider(metaclass=abc.ABCMeta): """Return True if Ansible Core CI is supported.""" @abc.abstractmethod - def prepare_core_ci_auth(self) -> dict[str, t.Any]: - """Return authentication details for Ansible Core CI.""" + def prepare_core_ci_request(self, config: dict[str, object], context: AuthContext) -> dict[str, object]: + """Prepare an Ansible Core CI request using the given config and context.""" @abc.abstractmethod def get_git_details(self, args: CommonConfig) -> t.Optional[dict[str, t.Any]]: @@ -101,119 +151,3 @@ def get_ci_provider() -> CIProvider: display.info('Detected CI provider: %s' % provider.name) return provider - - -class AuthHelper(metaclass=abc.ABCMeta): - """Public key based authentication helper for Ansible Core CI.""" - - def sign_request(self, request: dict[str, t.Any]) -> None: - """Sign the given auth request and make the public key available.""" - payload_bytes = to_bytes(json.dumps(request, sort_keys=True)) - signature_raw_bytes = self.sign_bytes(payload_bytes) - signature = to_text(base64.b64encode(signature_raw_bytes)) - - request.update(signature=signature) - - def initialize_private_key(self) -> str: - """ - Initialize and publish a new key pair (if needed) and return the private key. - The private key is cached across ansible-test invocations, so it is only generated and published once per CI job. - """ - path = os.path.expanduser('~/.ansible-core-ci-private.key') - - if os.path.exists(to_bytes(path)): - private_key_pem = read_text_file(path) - else: - private_key_pem = self.generate_private_key() - write_text_file(path, private_key_pem) - - return private_key_pem - - @abc.abstractmethod - def sign_bytes(self, payload_bytes: bytes) -> bytes: - """Sign the given payload and return the signature, initializing a new key pair if required.""" - - @abc.abstractmethod - def publish_public_key(self, public_key_pem: str) -> None: - """Publish the given public key.""" - - @abc.abstractmethod - def generate_private_key(self) -> str: - """Generate a new key pair, publishing the public key and returning the private key.""" - - -class CryptographyAuthHelper(AuthHelper, metaclass=abc.ABCMeta): - """Cryptography based public key based authentication helper for Ansible Core CI.""" - - def sign_bytes(self, payload_bytes: bytes) -> bytes: - """Sign the given payload and return the signature, initializing a new key pair if required.""" - # import cryptography here to avoid overhead and failures in environments which do not use/provide it - from cryptography.hazmat.backends import default_backend - from cryptography.hazmat.primitives import hashes - from cryptography.hazmat.primitives.asymmetric import ec - from cryptography.hazmat.primitives.serialization import load_pem_private_key - - private_key_pem = self.initialize_private_key() - private_key = load_pem_private_key(to_bytes(private_key_pem), None, default_backend()) - - assert isinstance(private_key, ec.EllipticCurvePrivateKey) - - signature_raw_bytes = private_key.sign(payload_bytes, ec.ECDSA(hashes.SHA256())) - - return signature_raw_bytes - - def generate_private_key(self) -> str: - """Generate a new key pair, publishing the public key and returning the private key.""" - # import cryptography here to avoid overhead and failures in environments which do not use/provide it - from cryptography.hazmat.backends import default_backend - from cryptography.hazmat.primitives import serialization - from cryptography.hazmat.primitives.asymmetric import ec - - private_key = ec.generate_private_key(ec.SECP384R1(), default_backend()) - public_key = private_key.public_key() - - private_key_pem = to_text(private_key.private_bytes( # type: ignore[attr-defined] # documented method, but missing from type stubs - encoding=serialization.Encoding.PEM, - format=serialization.PrivateFormat.PKCS8, - encryption_algorithm=serialization.NoEncryption(), - )) - - public_key_pem = to_text(public_key.public_bytes( - encoding=serialization.Encoding.PEM, - format=serialization.PublicFormat.SubjectPublicKeyInfo, - )) - - self.publish_public_key(public_key_pem) - - return private_key_pem - - -class OpenSSLAuthHelper(AuthHelper, metaclass=abc.ABCMeta): - """OpenSSL based public key based authentication helper for Ansible Core CI.""" - - def sign_bytes(self, payload_bytes: bytes) -> bytes: - """Sign the given payload and return the signature, initializing a new key pair if required.""" - private_key_pem = self.initialize_private_key() - - with tempfile.NamedTemporaryFile() as private_key_file: - private_key_file.write(to_bytes(private_key_pem)) - private_key_file.flush() - - with tempfile.NamedTemporaryFile() as payload_file: - payload_file.write(payload_bytes) - payload_file.flush() - - with tempfile.NamedTemporaryFile() as signature_file: - raw_command(['openssl', 'dgst', '-sha256', '-sign', private_key_file.name, '-out', signature_file.name, payload_file.name], capture=True) - signature_raw_bytes = signature_file.read() - - return signature_raw_bytes - - def generate_private_key(self) -> str: - """Generate a new key pair, publishing the public key and returning the private key.""" - private_key_pem = raw_command(['openssl', 'ecparam', '-genkey', '-name', 'secp384r1', '-noout'], capture=True)[0] - public_key_pem = raw_command(['openssl', 'ec', '-pubout'], data=private_key_pem, capture=True)[0] - - self.publish_public_key(public_key_pem) - - return private_key_pem diff --git a/test/lib/ansible_test/_internal/ci/azp.py b/test/lib/ansible_test/_internal/ci/azp.py index c2a9e004c3a..556859dc586 100644 --- a/test/lib/ansible_test/_internal/ci/azp.py +++ b/test/lib/ansible_test/_internal/ci/azp.py @@ -31,9 +31,10 @@ from ..util import ( ) from . import ( + AuthContext, ChangeDetectionNotSupported, CIProvider, - CryptographyAuthHelper, + GeneratingAuthHelper, ) CODE = 'azp' @@ -112,10 +113,11 @@ class AzurePipelines(CIProvider): """Return True if Ansible Core CI is supported.""" return True - def prepare_core_ci_auth(self) -> dict[str, t.Any]: - """Return authentication details for Ansible Core CI.""" + def prepare_core_ci_request(self, config: dict[str, object], context: AuthContext) -> dict[str, object]: try: - request = dict( + request: dict[str, object] = dict( + type="azp:ssh", + config=config, org_name=os.environ['SYSTEM_COLLECTIONURI'].strip('/').split('/')[-1], project_name=os.environ['SYSTEM_TEAMPROJECT'], build_id=int(os.environ['BUILD_BUILDID']), @@ -124,13 +126,9 @@ class AzurePipelines(CIProvider): except KeyError as ex: raise MissingEnvironmentVariable(name=ex.args[0]) from None - self.auth.sign_request(request) + self.auth.sign_request(request, context) - auth = dict( - azp=request, - ) - - return auth + return request def get_git_details(self, args: CommonConfig) -> t.Optional[dict[str, t.Any]]: """Return details about git in the current environment.""" @@ -144,14 +142,14 @@ class AzurePipelines(CIProvider): return details -class AzurePipelinesAuthHelper(CryptographyAuthHelper): - """ - Authentication helper for Azure Pipelines. - Based on cryptography since it is provided by the default Azure Pipelines environment. - """ +class AzurePipelinesAuthHelper(GeneratingAuthHelper): + """Authentication helper for Azure Pipelines.""" + + def generate_key_pair(self) -> None: + super().generate_key_pair() + + public_key_pem = self.public_key_file.read_text() - def publish_public_key(self, public_key_pem: str) -> None: - """Publish the given public key.""" try: agent_temp_directory = os.environ['AGENT_TEMPDIRECTORY'] except KeyError as ex: diff --git a/test/lib/ansible_test/_internal/ci/local.py b/test/lib/ansible_test/_internal/ci/local.py index 90fad430b8f..fc8ab52d640 100644 --- a/test/lib/ansible_test/_internal/ci/local.py +++ b/test/lib/ansible_test/_internal/ci/local.py @@ -2,10 +2,12 @@ from __future__ import annotations -import os +import abc +import inspect import platform import random import re +import pathlib import typing as t from ..config import ( @@ -24,11 +26,14 @@ from ..git import ( from ..util import ( ApplicationError, display, + get_subclasses, is_binary_file, SubprocessError, ) from . import ( + AuthContext, + AuthHelper, CIProvider, ) @@ -120,34 +125,20 @@ class Local(CIProvider): def supports_core_ci_auth(self) -> bool: """Return True if Ansible Core CI is supported.""" - path = self._get_aci_key_path() - return os.path.exists(path) + return Authenticator.available() - def prepare_core_ci_auth(self) -> dict[str, t.Any]: - """Return authentication details for Ansible Core CI.""" - path = self._get_aci_key_path() - auth_key = read_text_file(path).strip() + def prepare_core_ci_request(self, config: dict[str, object], context: AuthContext) -> dict[str, object]: + if not (authenticator := Authenticator.load()): + raise ApplicationError('Ansible Core CI authentication has not been configured.') - request = dict( - key=auth_key, - nonce=None, - ) + display.info(f'Using {authenticator} for Ansible Core CI.', verbosity=1) - auth = dict( - remote=request, - ) - - return auth + return authenticator.prepare_auth_request(config, context) def get_git_details(self, args: CommonConfig) -> t.Optional[dict[str, t.Any]]: """Return details about git in the current environment.""" return None # not yet implemented for local - @staticmethod - def _get_aci_key_path() -> str: - path = os.path.expanduser('~/.ansible-core-ci.key') - return path - class InvalidBranch(ApplicationError): """Exception for invalid branch specification.""" @@ -214,3 +205,108 @@ class LocalChanges: return True return False + + +class Authenticator(metaclass=abc.ABCMeta): + """Base class for authenticators.""" + + @staticmethod + def list() -> list[type[Authenticator]]: + """List all authenticators in priority order.""" + return sorted((sc for sc in get_subclasses(Authenticator) if not inspect.isabstract(sc)), key=lambda obj: obj.priority()) + + @staticmethod + def load() -> Authenticator | None: + """Load an authenticator instance, returning None if not configured.""" + for implementation in Authenticator.list(): + if implementation.config_file().exists(): + return implementation() + + return None + + @staticmethod + def available() -> bool: + """Return True if an authenticator is available, otherwise False.""" + return bool(Authenticator.load()) + + @classmethod + @abc.abstractmethod + def priority(cls) -> int: + """Priority used to determine which authenticator is tried first, from lowest to highest.""" + + @classmethod + @abc.abstractmethod + def config_file(cls) -> pathlib.Path: + """Path to the config file for this authenticator.""" + + @abc.abstractmethod + def prepare_auth_request(self, config: dict[str, object], context: AuthContext) -> dict[str, object]: + """Prepare an authenticated Ansible Core CI request using the given config and context.""" + + def __str__(self) -> str: + return self.__class__.__name__ + + +class PasswordAuthenticator(Authenticator): + """Authenticate using a password.""" + + @classmethod + def priority(cls) -> int: + return 200 + + @classmethod + def config_file(cls) -> pathlib.Path: + return pathlib.Path('~/.ansible-core-ci.key').expanduser() + + def prepare_auth_request(self, config: dict[str, object], context: AuthContext) -> dict[str, object]: + parts = self.config_file().read_text().strip().split(maxsplit=1) + + if len(parts) == 1: # temporary backward compatibility for legacy API keys + request = dict( + config=config, + auth=dict( + remote=dict( + key=parts[0], + ), + ), + ) + + return request + + username, password = parts + + request = dict( + type="remote:password", + config=config, + username=username, + password=password, + ) + + return request + + +class SshAuthenticator(Authenticator): + """Authenticate using an SSH key.""" + + @classmethod + def priority(cls) -> int: + return 100 + + @classmethod + def config_file(cls) -> pathlib.Path: + return pathlib.Path('~/.ansible-core-ci.auth').expanduser() + + def prepare_auth_request(self, config: dict[str, object], context: AuthContext) -> dict[str, object]: + parts = self.config_file().read_text().strip().split(maxsplit=1) + username, key_file = parts + + request: dict[str, object] = dict( + type="remote:ssh", + config=config, + username=username, + ) + + auth_helper = AuthHelper(pathlib.Path(key_file).expanduser()) + auth_helper.sign_request(request, context) + + return request diff --git a/test/lib/ansible_test/_internal/core_ci.py b/test/lib/ansible_test/_internal/core_ci.py index f7591daf7a1..72315880536 100644 --- a/test/lib/ansible_test/_internal/core_ci.py +++ b/test/lib/ansible_test/_internal/core_ci.py @@ -42,6 +42,7 @@ from .config import ( ) from .ci import ( + AuthContext, get_ci_provider, ) @@ -68,6 +69,10 @@ class Resource(metaclass=abc.ABCMeta): def persist(self) -> bool: """True if the resource is persistent, otherwise false.""" + @abc.abstractmethod + def get_config(self, core_ci: AnsibleCoreCI) -> dict[str, object]: + """Return the configuration for this resource.""" + @dataclasses.dataclass(frozen=True) class VmResource(Resource): @@ -92,6 +97,16 @@ class VmResource(Resource): """True if the resource is persistent, otherwise false.""" return True + def get_config(self, core_ci: AnsibleCoreCI) -> dict[str, object]: + """Return the configuration for this resource.""" + return dict( + type="vm", + platform=self.platform, + version=self.version, + architecture=self.architecture, + public_key=core_ci.ssh_key.pub_contents, + ) + @dataclasses.dataclass(frozen=True) class CloudResource(Resource): @@ -112,6 +127,12 @@ class CloudResource(Resource): """True if the resource is persistent, otherwise false.""" return False + def get_config(self, core_ci: AnsibleCoreCI) -> dict[str, object]: + """Return the configuration for this resource.""" + return dict( + type="cloud", + ) + class AnsibleCoreCI: """Client for Ansible Core CI services.""" @@ -189,7 +210,7 @@ class AnsibleCoreCI: display.info(f'Skipping started {self.label} instance.', verbosity=1) return None - return self._start(self.ci_provider.prepare_core_ci_auth()) + return self._start() def stop(self) -> None: """Stop instance.""" @@ -288,26 +309,25 @@ class AnsibleCoreCI: def _uri(self) -> str: return f'{self.endpoint}/{self.stage}/{self.provider}/{self.instance_id}' - def _start(self, auth) -> dict[str, t.Any]: + def _start(self) -> dict[str, t.Any]: """Start instance.""" display.info(f'Initializing new {self.label} instance using: {self._uri}', verbosity=1) - data = dict( - config=dict( - platform=self.platform, - version=self.version, - architecture=self.arch, - public_key=self.ssh_key.pub_contents, - ) + config = self.resource.get_config(self) + + context = AuthContext( + request_id=self.instance_id, + stage=self.stage, + provider=self.provider, ) - data.update(auth=auth) + request = self.ci_provider.prepare_core_ci_request(config, context) headers = { 'Content-Type': 'application/json', } - response = self._start_endpoint(data, headers) + response = self._start_endpoint(request, headers) self.started = True self._save() diff --git a/test/lib/ansible_test/_internal/coverage_util.py b/test/lib/ansible_test/_internal/coverage_util.py index 95ec08a483c..907854bc9bb 100644 --- a/test/lib/ansible_test/_internal/coverage_util.py +++ b/test/lib/ansible_test/_internal/coverage_util.py @@ -69,7 +69,7 @@ class CoverageVersion: COVERAGE_VERSIONS = ( # IMPORTANT: Keep this in sync with the ansible-test.txt requirements file. - CoverageVersion('7.10.0', 7, (3, 9), (3, 14)), + CoverageVersion('7.10.5', 7, (3, 9), (3, 14)), ) """ This tuple specifies the coverage version to use for Python version ranges. diff --git a/test/lib/ansible_test/_internal/delegation.py b/test/lib/ansible_test/_internal/delegation.py index 74c4adbfc6a..d69146b2678 100644 --- a/test/lib/ansible_test/_internal/delegation.py +++ b/test/lib/ansible_test/_internal/delegation.py @@ -124,6 +124,8 @@ def delegate(args: CommonConfig, host_state: HostState, exclude: list[str], requ @contextlib.contextmanager def metadata_context(args: EnvironmentConfig) -> t.Generator[None]: """A context manager which exports delegation metadata.""" + os.makedirs(ResultType.TMP.path, exist_ok=True) + with tempfile.NamedTemporaryFile(prefix='metadata-', suffix='.json', dir=ResultType.TMP.path) as metadata_fd: args.metadata_path = os.path.join(ResultType.TMP.relative_path, os.path.basename(metadata_fd.name)) args.metadata.to_file(args.metadata_path) diff --git a/test/lib/ansible_test/_internal/host_profiles.py b/test/lib/ansible_test/_internal/host_profiles.py index d926ec38454..c72733f814a 100644 --- a/test/lib/ansible_test/_internal/host_profiles.py +++ b/test/lib/ansible_test/_internal/host_profiles.py @@ -260,6 +260,9 @@ class HostProfile[THostConfig: HostConfig](metaclass=abc.ABCMeta): def name(self) -> str: """The name of the host profile.""" + def pre_provision(self) -> None: + """Pre-provision the host profile.""" + def provision(self) -> None: """Provision the host before delegation.""" @@ -517,8 +520,8 @@ class RemoteProfile[TRemoteConfig: RemoteConfig](SshTargetHostProfile[TRemoteCon """The saved Ansible Core CI state.""" self.state['core_ci'] = value - def provision(self) -> None: - """Provision the host before delegation.""" + def pre_provision(self) -> None: + """Pre-provision the host before delegation.""" self.core_ci = self.create_core_ci(load=True) self.core_ci.start() diff --git a/test/lib/ansible_test/_internal/provisioning.py b/test/lib/ansible_test/_internal/provisioning.py index 66679d263c5..cc3235d037e 100644 --- a/test/lib/ansible_test/_internal/provisioning.py +++ b/test/lib/ansible_test/_internal/provisioning.py @@ -129,6 +129,9 @@ def prepare_profiles[TEnvironmentConfig: EnvironmentConfig]( ExitHandler.register(functools.partial(cleanup_profiles, host_state)) + for pre_profile in host_state.profiles: + pre_profile.pre_provision() + def provision(profile: HostProfile) -> None: """Provision the given profile.""" profile.provision() diff --git a/test/lib/ansible_test/_internal/util.py b/test/lib/ansible_test/_internal/util.py index 1811f04def6..5ed60ea3b01 100644 --- a/test/lib/ansible_test/_internal/util.py +++ b/test/lib/ansible_test/_internal/util.py @@ -702,6 +702,7 @@ def common_environment() -> dict[str, str]: optional = ( 'LD_LIBRARY_PATH', 'SSH_AUTH_SOCK', + 'SSH_SK_PROVIDER', # MacOS High Sierra Compatibility # http://sealiesoftware.com/blog/archive/2017/6/5/Objective-C_and_fork_in_macOS_1013.html # Example configuration for macOS: diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.py index 99c809918c3..90d99c06093 100644 --- a/test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.py +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.py @@ -17,14 +17,13 @@ from voluptuous import Required, Schema, Invalid from voluptuous.humanize import humanize_error from ansible.module_utils.compat.version import StrictVersion, LooseVersion -from ansible.module_utils.six import string_types from ansible.utils.collection_loader import AnsibleCollectionRef from ansible.utils.version import SemanticVersion def fqcr(value): """Validate a FQCR.""" - if not isinstance(value, string_types): + if not isinstance(value, str): raise Invalid('Must be a string that is a FQCR') if not AnsibleCollectionRef.is_valid_fqcr(value): raise Invalid('Must be a FQCR') @@ -33,7 +32,7 @@ def fqcr(value): def fqcr_or_shortname(value): """Validate a FQCR or a shortname.""" - if not isinstance(value, string_types): + if not isinstance(value, str): raise Invalid('Must be a string that is a FQCR or a short name') if '.' in value and not AnsibleCollectionRef.is_valid_fqcr(value): raise Invalid('Must be a FQCR or a short name') @@ -48,7 +47,7 @@ def isodate(value, check_deprecation_date=False, is_tombstone=False): else: # make sure we have a string msg = 'Expected ISO 8601 date string (YYYY-MM-DD), or YAML date' - if not isinstance(value, string_types): + if not isinstance(value, str): raise Invalid(msg) # From Python 3.7 in, there is datetime.date.fromisoformat(). For older versions, # we have to do things manually. @@ -80,7 +79,7 @@ def removal_version(value, is_ansible, current_version=None, is_tombstone=False) 'Removal version must be a string' if is_ansible else 'Removal version must be a semantic version (https://semver.org/)' ) - if not isinstance(value, string_types): + if not isinstance(value, str): raise Invalid(msg) try: if is_ansible: @@ -191,7 +190,7 @@ def validate_metadata_file(path, is_ansible, check_deprecation_dates=False): 'removal_version': partial(removal_version, is_ansible=is_ansible, current_version=current_version), 'removal_date': partial(isodate, check_deprecation_date=check_deprecation_dates), - 'warning_text': Any(*string_types), + 'warning_text': str, } ), avoid_additional_data @@ -204,7 +203,7 @@ def validate_metadata_file(path, is_ansible, check_deprecation_dates=False): 'removal_version': partial(removal_version, is_ansible=is_ansible, current_version=current_version, is_tombstone=True), 'removal_date': partial(isodate, is_tombstone=True), - 'warning_text': Any(*string_types), + 'warning_text': str, } ), avoid_additional_data @@ -228,18 +227,15 @@ def validate_metadata_file(path, is_ansible, check_deprecation_dates=False): # Adjusted schema for module_utils plugin_routing_schema_mu = Any( plugins_routing_common_schema.extend({ - ('redirect'): Any(*string_types)} + ('redirect'): str} ), ) - list_dict_plugin_routing_schema = [{str_type: plugin_routing_schema} - for str_type in string_types] + list_dict_plugin_routing_schema = [{str: plugin_routing_schema}] - list_dict_plugin_routing_schema_mu = [{str_type: plugin_routing_schema_mu} - for str_type in string_types] + list_dict_plugin_routing_schema_mu = [{str: plugin_routing_schema_mu}] - list_dict_plugin_routing_schema_modules = [{str_type: plugin_routing_schema_modules} - for str_type in string_types] + list_dict_plugin_routing_schema_modules = [{str: plugin_routing_schema_modules}] plugin_schema = Schema({ ('action'): Any(None, *list_dict_plugin_routing_schema), @@ -267,13 +263,12 @@ def validate_metadata_file(path, is_ansible, check_deprecation_dates=False): import_redirection_schema = Any( Schema({ - ('redirect'): Any(*string_types), + ('redirect'): str, # import_redirect doesn't currently support deprecation }, extra=PREVENT_EXTRA) ) - list_dict_import_redirection_schema = [{str_type: import_redirection_schema} - for str_type in string_types] + list_dict_import_redirection_schema = [{str: import_redirection_schema}] # action_groups schema @@ -289,7 +284,7 @@ def validate_metadata_file(path, is_ansible, check_deprecation_dates=False): }, extra=PREVENT_EXTRA) }, extra=PREVENT_EXTRA) action_group_schema = All([metadata_dict, fqcr_or_shortname], at_most_one_dict) - list_dict_action_groups_schema = [{str_type: action_group_schema} for str_type in string_types] + list_dict_action_groups_schema = [{str: action_group_schema}] # top level schema @@ -298,7 +293,7 @@ def validate_metadata_file(path, is_ansible, check_deprecation_dates=False): ('plugin_routing'): Any(plugin_schema), ('import_redirection'): Any(None, *list_dict_import_redirection_schema), # requires_ansible: In the future we should validate this with SpecifierSet - ('requires_ansible'): Any(*string_types), + ('requires_ansible'): str, ('action_groups'): Any(*list_dict_action_groups_schema), }, extra=PREVENT_EXTRA) diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/unwanted.py b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/unwanted.py index f9abec0c0f4..e0414fdefd0 100644 --- a/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/unwanted.py +++ b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/unwanted.py @@ -80,36 +80,39 @@ class AnsibleUnwantedChecker(BaseChecker): 'Identifies imports which should not be used.'), ) - unwanted_imports = dict( - # Additional imports that we may want to start checking: - # boto=UnwantedEntry('boto3', modules_only=True), - # requests=UnwantedEntry('ansible.module_utils.urls', modules_only=True), - # urllib=UnwantedEntry('ansible.module_utils.urls', modules_only=True), - + unwanted_imports = { # see https://docs.python.org/2/library/urllib2.html - urllib2=UnwantedEntry('ansible.module_utils.urls', - ignore_paths=( - '/lib/ansible/module_utils/urls.py', - )), + 'urllib2': UnwantedEntry( + 'ansible.module_utils.urls', + ignore_paths=( + '/lib/ansible/module_utils/urls.py', + ) + ), # see https://docs.python.org/3/library/collections.abc.html - collections=UnwantedEntry('ansible.module_utils.six.moves.collections_abc', - names=( - 'MappingView', - 'ItemsView', - 'KeysView', - 'ValuesView', - 'Mapping', 'MutableMapping', - 'Sequence', 'MutableSequence', - 'Set', 'MutableSet', - 'Container', - 'Hashable', - 'Sized', - 'Callable', - 'Iterable', - 'Iterator', - )), - ) + 'collections': UnwantedEntry( + 'collections.abc', + names=( + 'MappingView', + 'ItemsView', + 'KeysView', + 'ValuesView', + 'Mapping', 'MutableMapping', + 'Sequence', 'MutableSequence', + 'Set', 'MutableSet', + 'Container', + 'Hashable', + 'Sized', + 'Callable', + 'Iterable', + 'Iterator', + ) + ), + + 'ansible.module_utils.six': UnwantedEntry( + 'the Python standard library equivalent' + ), + } unwanted_functions = { # see https://docs.python.org/3/library/tempfile.html#tempfile.mktemp diff --git a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py index cd2a301eaa5..341c37f3789 100644 --- a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py +++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py @@ -72,7 +72,7 @@ from ansible.module_utils.compat.version import StrictVersion, LooseVersion from ansible.module_utils.basic import to_bytes from ansible.plugins.loader import fragment_loader from ansible.plugins.list import IGNORE as REJECTLIST -from ansible.utils.plugin_docs import add_collection_to_versions_and_dates, add_fragments, get_docstring +from ansible.utils.plugin_docs import AnsibleFragmentError, add_collection_to_versions_and_dates, add_fragments, get_docstring from ansible.utils.version import SemanticVersion from .module_args import AnsibleModuleImportError, AnsibleModuleNotInitialized, get_py_argument_spec, get_ps_argument_spec @@ -1003,21 +1003,15 @@ class ModuleValidator(Validator): add_collection_to_versions_and_dates(doc, self.collection_name, is_module=self.plugin_type == 'module') - missing_fragment = False with CaptureStd(): try: get_docstring(os.path.abspath(self.path), fragment_loader=fragment_loader, verbose=True, collection_name=self.collection_name, plugin_type=self.plugin_type) - except AssertionError: - fragment = doc['extends_documentation_fragment'] - self.reporter.error( - path=self.object_path, - code='missing-doc-fragment', - msg='DOCUMENTATION fragment missing: %s' % fragment - ) - missing_fragment = True + except AnsibleFragmentError: + # Will be re-triggered below when explicitly calling add_fragments() + pass except Exception as e: self.reporter.trace( path=self.object_path, @@ -1029,9 +1023,16 @@ class ModuleValidator(Validator): msg='Unknown DOCUMENTATION error, see TRACE: %s' % e ) - if not missing_fragment: + try: add_fragments(doc, os.path.abspath(self.object_path), fragment_loader=fragment_loader, - is_module=self.plugin_type == 'module') + is_module=self.plugin_type == 'module', section='DOCUMENTATION') + except AnsibleFragmentError as exc: + error = str(exc).replace(os.path.abspath(self.object_path), self.object_path) + self.reporter.error( + path=self.object_path, + code='doc-fragment-error', + msg=f'Error while adding fragments: {error}' + ) if 'options' in doc and doc['options'] is None: self.reporter.error( @@ -1130,6 +1131,16 @@ class ModuleValidator(Validator): self.collection_name, is_module=self.plugin_type == 'module', return_docs=True) + try: + add_fragments(returns, os.path.abspath(self.object_path), fragment_loader=fragment_loader, + is_module=self.plugin_type == 'module', section='RETURN') + except AnsibleFragmentError as exc: + error = str(exc).replace(os.path.abspath(self.object_path), self.object_path) + self.reporter.error( + path=self.object_path, + code='return-fragment-error', + msg=f'Error while adding fragments: {error}' + ) self._validate_docs_schema( returns, return_schema(for_collection=bool(self.collection), plugin_type=self.plugin_type), @@ -1268,16 +1279,18 @@ class ModuleValidator(Validator): if not isinstance(options, dict): return for key, value in options.items(): - self._validate_semantic_markup(value.get('description')) - self._validate_semantic_markup_options(value.get('suboptions')) + if isinstance(value, dict): + self._validate_semantic_markup(value.get('description')) + self._validate_semantic_markup_options(value.get('suboptions')) def _validate_semantic_markup_return_values(self, return_vars): if not isinstance(return_vars, dict): return for key, value in return_vars.items(): - self._validate_semantic_markup(value.get('description')) - self._validate_semantic_markup(value.get('returned')) - self._validate_semantic_markup_return_values(value.get('contains')) + if isinstance(value, dict): + self._validate_semantic_markup(value.get('description')) + self._validate_semantic_markup(value.get('returned')) + self._validate_semantic_markup_return_values(value.get('contains')) def _validate_all_semantic_markup(self, docs, return_docs): if not isinstance(docs, dict): @@ -1617,7 +1630,7 @@ class ModuleValidator(Validator): try: if not context: add_fragments(docs, os.path.abspath(self.object_path), fragment_loader=fragment_loader, - is_module=self.plugin_type == 'module') + is_module=self.plugin_type == 'module', section='DOCUMENTATION') except Exception: # Cannot merge fragments return diff --git a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/module_args.py b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/module_args.py index daeb57ac1f6..9c6b44f1d98 100644 --- a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/module_args.py +++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/module_args.py @@ -28,7 +28,6 @@ from contextlib import contextmanager from ansible.executor.powershell.module_manifest import PSModuleDepFinder from ansible.module_utils.basic import FILE_COMMON_ARGUMENTS, AnsibleModule -from ansible.module_utils.six import reraise from ansible.module_utils.common.text.converters import to_bytes, to_text from .utils import CaptureStd, find_executable, get_module_name_from_filename @@ -153,7 +152,7 @@ def get_py_argument_spec(filename, collection): pass except BaseException as e: # we want to catch all exceptions here, including sys.exit - reraise(AnsibleModuleImportError, AnsibleModuleImportError('%s' % e), sys.exc_info()[2]) + raise AnsibleModuleImportError from e if not fake.called: raise AnsibleModuleNotInitialized() diff --git a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/schema.py b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/schema.py index abbbd66c25c..6c2b3415430 100644 --- a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/schema.py +++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/schema.py @@ -13,7 +13,6 @@ from urllib.parse import urlparse from voluptuous import ALLOW_EXTRA, PREVENT_EXTRA, All, Any, Invalid, Length, MultipleInvalid, Required, Schema, Self, ValueInvalid, Exclusive from ansible.constants import DOCUMENTABLE_PLUGINS -from ansible.module_utils.six import string_types from ansible.module_utils.common.collections import is_iterable from ansible.module_utils.parsing.convert_bool import boolean from ansible.parsing.quoting import unquote @@ -25,9 +24,8 @@ from antsibull_docs_parser.parser import parse, Context from .utils import parse_isodate -list_string_types = list(string_types) -tuple_string_types = tuple(string_types) -any_string_types = Any(*string_types) +list_string_types = [str] +tuple_string_types = (str,) # Valid DOCUMENTATION.author lines # Based on Ansibulbot's extract_github_id() @@ -57,7 +55,7 @@ FULLY_QUALIFIED_COLLECTION_RESOURCE_RE = re.compile(r'^\w+(?:\.\w+){2,}$') def collection_name(v, error_code=None): - if not isinstance(v, string_types): + if not isinstance(v, str): raise _add_ansible_error_code( Invalid('Collection name must be a string'), error_code or 'collection-invalid-name') m = COLLECTION_NAME_RE.match(v) @@ -68,7 +66,7 @@ def collection_name(v, error_code=None): def fqcn(v, error_code=None): - if not isinstance(v, string_types): + if not isinstance(v, str): raise _add_ansible_error_code( Invalid('Module/plugin name must be a string'), error_code or 'invalid-documentation') m = FULLY_QUALIFIED_COLLECTION_RESOURCE_RE.match(v) @@ -87,8 +85,8 @@ def deprecation_versions(): def version(for_collection=False): if for_collection: # We do not accept floats for versions in collections - return Any(*string_types) - return Any(float, *string_types) + return str + return Any(float, str) def date(error_code=None): @@ -128,7 +126,7 @@ def _check_url(directive, content): def doc_string(v): """Match a documentation string.""" - if not isinstance(v, string_types): + if not isinstance(v, str): raise _add_ansible_error_code( Invalid('Must be a string'), 'invalid-documentation') errors = [] @@ -216,12 +214,12 @@ seealso_schema = Schema( 'description': doc_string, }, { - Required('ref'): Any(*string_types), + Required('ref'): str, Required('description'): doc_string, }, { - Required('name'): Any(*string_types), - Required('link'): Any(*string_types), + Required('name'): str, + Required('link'): str, Required('description'): doc_string, }, ), @@ -238,7 +236,7 @@ argument_spec_modifiers = { 'required_together': sequence_of_sequences(min=2), 'required_one_of': sequence_of_sequences(min=2), 'required_if': sequence_of_sequences(min=3, max=4), - 'required_by': Schema({str: Any(list_string_types, tuple_string_types, *string_types)}), + 'required_by': Schema({str: Any(list_string_types, tuple_string_types, str)}), } @@ -263,7 +261,7 @@ def options_with_apply_defaults(v): def check_removal_version(v, version_field, collection_name_field, error_code='invalid-removal-version'): version = v.get(version_field) collection_name = v.get(collection_name_field) - if not isinstance(version, string_types) or not isinstance(collection_name, string_types): + if not isinstance(version, str) or not isinstance(collection_name, str): # If they are not strings, schema validation will have already complained. return v if collection_name == 'ansible.builtin': @@ -313,9 +311,8 @@ def option_deprecation(v): def argument_spec_schema(for_collection): - any_string_types = Any(*string_types) schema = { - any_string_types: { + str: { 'type': Any(is_callable, *argument_spec_types), 'elements': Any(*argument_spec_types), 'default': object, @@ -336,12 +333,12 @@ def argument_spec_schema(for_collection): 'deprecated_aliases': Any([All( Any( { - Required('name'): Any(*string_types), + Required('name'): str, Required('date'): date(), Required('collection_name'): collection_name, }, { - Required('name'): Any(*string_types), + Required('name'): str, Required('version'): version(for_collection), Required('collection_name'): collection_name, }, @@ -353,13 +350,13 @@ def argument_spec_schema(for_collection): )]), } } - schema[any_string_types].update(argument_spec_modifiers) + schema[str].update(argument_spec_modifiers) schemas = All( schema, - Schema({any_string_types: no_required_with_default}), - Schema({any_string_types: elements_with_list}), - Schema({any_string_types: options_with_apply_defaults}), - Schema({any_string_types: option_deprecation}), + Schema({str: no_required_with_default}), + Schema({str: elements_with_list}), + Schema({str: options_with_apply_defaults}), + Schema({str: option_deprecation}), ) return Schema(schemas) @@ -385,14 +382,15 @@ json_value = Schema(Any( int, float, [Self], - *(list({str_type: Self} for str_type in string_types) + list(string_types)) + {str: Self}, + str, )) def version_added(v, error_code='version-added-invalid', accept_historical=False): if 'version_added' in v: version_added = v.get('version_added') - if isinstance(version_added, string_types): + if isinstance(version_added, str): # If it is not a string, schema validation will have already complained # - or we have a float and we are in ansible/ansible, in which case we're # also happy. @@ -451,7 +449,7 @@ def get_type_checker(v): elt_checker, elt_name = get_type_checker({'type': v.get('elements')}) def list_checker(value): - if isinstance(value, string_types): + if isinstance(value, str): value = [unquote(x.strip()) for x in value.split(',')] if not isinstance(value, list): raise ValueError('Value must be a list') @@ -482,14 +480,14 @@ def get_type_checker(v): if v_type in ('str', 'string', 'path', 'tmp', 'temppath', 'tmppath'): def str_checker(value): - if not isinstance(value, string_types): + if not isinstance(value, str): raise ValueError('Value must be string') return str_checker, v_type if v_type in ('pathspec', 'pathlist'): def path_list_checker(value): - if not isinstance(value, string_types) and not is_iterable(value): + if not isinstance(value, str) and not is_iterable(value): raise ValueError('Value must be string or list of strings') return path_list_checker, v_type @@ -588,7 +586,7 @@ def list_dict_option_schema(for_collection, plugin_type): 'elements': element_types, } if plugin_type != 'module': - basic_option_schema['name'] = Any(*string_types) + basic_option_schema['name'] = str deprecated_schema = All( Schema( All( @@ -605,10 +603,10 @@ def list_dict_option_schema(for_collection, plugin_type): }, { # This definition makes sure that everything we require is there - Required('why'): Any(*string_types), - Required(Any('alternatives', 'alternative')): Any(*string_types), - Required(Any('removed_at_date', 'version')): Any(*string_types), - Required('collection_name'): Any(*string_types), + Required('why'): str, + Required(Any('alternatives', 'alternative')): str, + Required(Any('removed_at_date', 'version')): str, + Required('collection_name'): str, }, ), extra=PREVENT_EXTRA @@ -620,7 +618,7 @@ def list_dict_option_schema(for_collection, plugin_type): ) env_schema = All( Schema({ - Required('name'): Any(*string_types), + Required('name'): str, 'deprecated': deprecated_schema, 'version_added': version(for_collection), 'version_added_collection': collection_name, @@ -629,8 +627,8 @@ def list_dict_option_schema(for_collection, plugin_type): ) ini_schema = All( Schema({ - Required('key'): Any(*string_types), - Required('section'): Any(*string_types), + Required('key'): str, + Required('section'): str, 'deprecated': deprecated_schema, 'version_added': version(for_collection), 'version_added_collection': collection_name, @@ -639,7 +637,7 @@ def list_dict_option_schema(for_collection, plugin_type): ) vars_schema = All( Schema({ - Required('name'): Any(*string_types), + Required('name'): str, 'deprecated': deprecated_schema, 'version_added': version(for_collection), 'version_added_collection': collection_name, @@ -648,8 +646,8 @@ def list_dict_option_schema(for_collection, plugin_type): ) cli_schema = All( Schema({ - Required('name'): Any(*string_types), - 'option': Any(*string_types), + Required('name'): str, + 'option': str, 'deprecated': deprecated_schema, 'version_added': version(for_collection), 'version_added_collection': collection_name, @@ -658,7 +656,7 @@ def list_dict_option_schema(for_collection, plugin_type): ) keyword_schema = All( Schema({ - Required('name'): Any(*string_types), + Required('name'): str, 'deprecated': deprecated_schema, 'version_added': version(for_collection), 'version_added_collection': collection_name, @@ -677,7 +675,7 @@ def list_dict_option_schema(for_collection, plugin_type): suboption_schema = dict(basic_option_schema) suboption_schema.update({ # Recursive suboptions - 'suboptions': Any(None, *list({str_type: Self} for str_type in string_types)), + 'suboptions': Any(None, {str: Self}), }) suboption_schema = Schema(All( suboption_schema, @@ -686,13 +684,9 @@ def list_dict_option_schema(for_collection, plugin_type): check_option_default, ), extra=PREVENT_EXTRA) - # This generates list of dicts with keys from string_types and suboption_schema value - # for example in Python 3: {str: suboption_schema} - list_dict_suboption_schema = [{str_type: suboption_schema} for str_type in string_types] - option_schema = dict(basic_option_schema) option_schema.update({ - 'suboptions': Any(None, *list_dict_suboption_schema), + 'suboptions': Any(None, {str: suboption_schema}), }) option_schema = Schema(All( option_schema, @@ -703,20 +697,18 @@ def list_dict_option_schema(for_collection, plugin_type): option_version_added = Schema( All({ - 'suboptions': Any(None, *[{str_type: Self} for str_type in string_types]), + 'suboptions': Any(None, {str: Self}), }, partial(version_added, error_code='option-invalid-version-added')), extra=ALLOW_EXTRA ) - # This generates list of dicts with keys from string_types and option_schema value - # for example in Python 3: {str: option_schema} - return [{str_type: All(option_schema, option_version_added)} for str_type in string_types] + return [{str: All(option_schema, option_version_added)}] def return_contains(v): schema = Schema( { - Required('contains'): Any(dict, list, *string_types) + Required('contains'): Any(dict, list, str) }, extra=ALLOW_EXTRA ) @@ -752,7 +744,7 @@ def return_schema(for_collection, plugin_type='module'): inner_return_option_schema = dict(basic_return_option_schema) inner_return_option_schema.update({ - 'contains': Any(None, *list({str_type: Self} for str_type in string_types)), + 'contains': Any(None, {str: Self}), }) return_contains_schema = Any( All( @@ -763,27 +755,23 @@ def return_schema(for_collection, plugin_type='module'): Schema(type(None)), ) - # This generates list of dicts with keys from string_types and return_contains_schema value - # for example in Python 3: {str: return_contains_schema} - list_dict_return_contains_schema = [{str_type: return_contains_schema} for str_type in string_types] - return_option_schema = dict(basic_return_option_schema) return_option_schema.update({ - 'contains': Any(None, *list_dict_return_contains_schema), + 'contains': Any(None, {str: return_contains_schema}), }) if plugin_type == 'module': # 'returned' is required on top-level del return_option_schema['returned'] - return_option_schema[Required('returned')] = Any(*string_types) + return_option_schema[Required('returned')] = str return Any( All( Schema( { - any_string_types: return_option_schema + str: return_option_schema } ), - Schema({any_string_types: return_contains}), - Schema({any_string_types: partial(version_added, error_code='option-invalid-version-added')}), + Schema({str: return_contains}), + Schema({str: partial(version_added, error_code='option-invalid-version-added')}), ), Schema(type(None)), ) @@ -840,7 +828,7 @@ def author(value): value = [value] for line in value: - if not isinstance(line, string_types): + if not isinstance(line, str): continue # let schema checks handle m = author_line.search(line) if not m: @@ -868,14 +856,14 @@ def doc_schema(module_name, for_collection=False, deprecated_module=False, plugi 'requirements': [doc_string], 'todo': Any(None, doc_string_or_strings), 'options': Any(None, *list_dict_option_schema(for_collection, plugin_type)), - 'extends_documentation_fragment': Any(list_string_types, *string_types), + 'extends_documentation_fragment': Any(list_string_types, str), 'version_added_collection': collection_name, } if plugin_type == 'module': - doc_schema_dict[Required('author')] = All(Any(None, list_string_types, *string_types), author) + doc_schema_dict[Required('author')] = All(Any(None, list_string_types, str), author) else: # author is optional for plugins (for now) - doc_schema_dict['author'] = All(Any(None, list_string_types, *string_types), author) + doc_schema_dict['author'] = All(Any(None, list_string_types, str), author) if plugin_type == 'callback': doc_schema_dict[Required('type')] = Any('aggregate', 'notification', 'stdout') @@ -896,9 +884,9 @@ def doc_schema(module_name, for_collection=False, deprecated_module=False, plugi schema = { 'description': doc_string_or_strings, 'details': doc_string_or_strings, - 'support': any_string_types, - 'version_added_collection': any_string_types, - 'version_added': any_string_types, + 'support': str, + 'version_added_collection': str, + 'version_added': str, } if more: schema.update(more) @@ -907,7 +895,7 @@ def doc_schema(module_name, for_collection=False, deprecated_module=False, plugi doc_schema_dict['attributes'] = Schema( All( Schema({ - any_string_types: { + str: { Required('description'): doc_string_or_strings, Required('support'): Any('full', 'partial', 'none', 'N/A'), 'details': doc_string_or_strings, @@ -917,12 +905,12 @@ def doc_schema(module_name, for_collection=False, deprecated_module=False, plugi }, extra=ALLOW_EXTRA), partial(version_added, error_code='attribute-invalid-version-added', accept_historical=False), Schema({ - any_string_types: add_default_attributes(), + str: add_default_attributes(), 'action_group': add_default_attributes({ Required('membership'): list_string_types, }), 'platform': add_default_attributes({ - Required('platforms'): Any(list_string_types, *string_types) + Required('platforms'): Any(list_string_types, str) }), }, extra=PREVENT_EXTRA), ) diff --git a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/utils.py b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/utils.py index 065c39cbc94..4481fa4285e 100644 --- a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/utils.py +++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/utils.py @@ -31,7 +31,6 @@ import yaml.reader from ansible.module_utils.common.text.converters import to_text from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.common.yaml import SafeLoader -from ansible.module_utils.six import string_types from ansible.parsing.yaml.loader import AnsibleLoader @@ -211,7 +210,7 @@ def parse_isodate(v, allow_date): msg = 'Expected ISO 8601 date string (YYYY-MM-DD) or YAML date' else: msg = 'Expected ISO 8601 date string (YYYY-MM-DD)' - if not isinstance(v, string_types): + if not isinstance(v, str): raise ValueError(msg) # From Python 3.7 in, there is datetime.date.fromisoformat(). For older versions, # we have to do things manually. diff --git a/test/sanity/code-smell/black.requirements.txt b/test/sanity/code-smell/black.requirements.txt index ffb59dd6b3a..d18c072d543 100644 --- a/test/sanity/code-smell/black.requirements.txt +++ b/test/sanity/code-smell/black.requirements.txt @@ -4,4 +4,4 @@ click==8.2.1 mypy_extensions==1.1.0 packaging==25.0 pathspec==0.12.1 -platformdirs==4.3.8 +platformdirs==4.4.0 diff --git a/test/sanity/code-smell/mypy.requirements.txt b/test/sanity/code-smell/mypy.requirements.txt index fd91a7b5352..d35fdc6da51 100644 --- a/test/sanity/code-smell/mypy.requirements.txt +++ b/test/sanity/code-smell/mypy.requirements.txt @@ -1,10 +1,10 @@ # edit "mypy.requirements.in" and generate with: hacking/update-sanity-requirements.py --test mypy cffi==1.17.1 -cryptography==45.0.5 +cryptography==45.0.6 iniconfig==2.1.0 Jinja2==3.1.6 MarkupSafe==3.0.2 -mypy==1.17.0 +mypy==1.17.1 mypy_extensions==1.1.0 packaging==25.0 pathspec==0.12.1 @@ -15,10 +15,10 @@ pytest==8.4.1 pytest-mock==3.14.1 tomli==2.2.1 types-backports==0.1.3 -types-paramiko==3.5.0.20250708 -types-PyYAML==6.0.12.20250516 -types-requests==2.32.4.20250611 -types-setuptools==80.9.0.20250529 +types-paramiko==4.0.0.20250822 +types-PyYAML==6.0.12.20250822 +types-requests==2.32.4.20250809 +types-setuptools==80.9.0.20250822 types-toml==0.10.8.20240310 -typing_extensions==4.14.1 +typing_extensions==4.15.0 urllib3==2.5.0 diff --git a/test/sanity/code-smell/package-data.requirements.txt b/test/sanity/code-smell/package-data.requirements.txt index b54decb6fe4..d0d2324f9a4 100644 --- a/test/sanity/code-smell/package-data.requirements.txt +++ b/test/sanity/code-smell/package-data.requirements.txt @@ -1,4 +1,4 @@ # edit "package-data.requirements.in" and generate with: hacking/update-sanity-requirements.py --test package-data -build==1.2.2.post1 +build==1.3.0 packaging==25.0 pyproject_hooks==1.2.0 diff --git a/test/sanity/code-smell/pymarkdown.requirements.txt b/test/sanity/code-smell/pymarkdown.requirements.txt index 3062b71ad06..065677faba9 100644 --- a/test/sanity/code-smell/pymarkdown.requirements.txt +++ b/test/sanity/code-smell/pymarkdown.requirements.txt @@ -2,9 +2,9 @@ application_properties==0.9.0 Columnar==1.4.1 pyjson5==1.6.9 -pymarkdownlnt==0.9.31 +pymarkdownlnt==0.9.32 PyYAML==6.0.2 tomli==2.2.1 toolz==1.0.0 -typing_extensions==4.14.1 +typing_extensions==4.15.0 wcwidth==0.2.13 diff --git a/test/sanity/ignore.txt b/test/sanity/ignore.txt index 07b4474dc64..e64cffe8e49 100644 --- a/test/sanity/ignore.txt +++ b/test/sanity/ignore.txt @@ -229,11 +229,6 @@ test/integration/targets/ansible-test-sanity-pylint/deprecated_thing.py pylint:a test/integration/targets/ansible-test-sanity-pylint/deprecated_thing.py pylint:ansible-deprecated-date-not-permitted # required to verify plugin against core test/integration/targets/ansible-test-sanity-pylint/deprecated_thing.py pylint:ansible-deprecated-unnecessary-collection-name # required to verify plugin against core test/integration/targets/ansible-test-sanity-pylint/deprecated_thing.py pylint:ansible-deprecated-collection-name-not-permitted # required to verify plugin against core -lib/ansible/cli/doc.py pylint:ansible-deprecated-version # TODO: 2.20 lib/ansible/galaxy/api.py pylint:ansible-deprecated-version # TODO: 2.20 -lib/ansible/plugins/filter/encryption.py pylint:ansible-deprecated-version # TODO: 2.20 -lib/ansible/utils/encrypt.py pylint:ansible-deprecated-version # TODO: 2.20 -lib/ansible/utils/ssh_functions.py pylint:ansible-deprecated-version # TODO: 2.20 lib/ansible/vars/manager.py pylint:ansible-deprecated-version-comment # TODO: 2.20 -lib/ansible/vars/plugins.py pylint:ansible-deprecated-version # TODO: 2.20 lib/ansible/galaxy/role.py pylint:ansible-deprecated-python-version-comment # TODO: 2.20 diff --git a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/plugin_utils/_quote.py b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/plugin_utils/_quote.py index d01386efc75..aa895f932b4 100644 --- a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/plugin_utils/_quote.py +++ b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/plugin_utils/_quote.py @@ -18,8 +18,6 @@ from __future__ import annotations import re -from ansible.module_utils.six import text_type - _UNSAFE_C = re.compile(u'[\\s\t"]') _UNSAFE_CMD = re.compile(u'[\\s\\(\\)\\^\\|%!"<>&]') @@ -30,7 +28,7 @@ _UNSAFE_CMD = re.compile(u'[\\s\\(\\)\\^\\|%!"<>&]') _UNSAFE_PWSH = re.compile(u"(['\u2018\u2019\u201a\u201b])") -def quote_c(s): # type: (text_type) -> text_type +def quote_c(s): # type: (str) -> str """Quotes a value for the raw Win32 process command line. Quotes a value to be safely used by anything that calls the Win32 @@ -40,7 +38,7 @@ def quote_c(s): # type: (text_type) -> text_type s: The string to quote. Returns: - (text_type): The quoted string value. + (str): The quoted string value. """ # https://docs.microsoft.com/en-us/archive/blogs/twistylittlepassagesallalike/everyone-quotes-command-line-arguments-the-wrong-way if not s: @@ -62,7 +60,7 @@ def quote_c(s): # type: (text_type) -> text_type return u'"{0}"'.format(s) -def quote_cmd(s): # type: (text_type) -> text_type +def quote_cmd(s): # type: (str) -> str """Quotes a value for cmd. Quotes a value to be safely used by a command prompt call. @@ -71,7 +69,7 @@ def quote_cmd(s): # type: (text_type) -> text_type s: The string to quote. Returns: - (text_type): The quoted string value. + (str): The quoted string value. """ # https://docs.microsoft.com/en-us/archive/blogs/twistylittlepassagesallalike/everyone-quotes-command-line-arguments-the-wrong-way#a-better-method-of-quoting if not s: @@ -92,7 +90,7 @@ def quote_cmd(s): # type: (text_type) -> text_type return u'^"{0}^"'.format(s) -def quote_pwsh(s): # type: (text_type) -> text_type +def quote_pwsh(s): # type: (str) -> str """Quotes a value for PowerShell. Quotes a value to be safely used by a PowerShell expression. The input @@ -102,7 +100,7 @@ def quote_pwsh(s): # type: (text_type) -> text_type s: The string to quote. Returns: - (text_type): The quoted string value. + (str): The quoted string value. """ # https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.core/about/about_quoting_rules?view=powershell-5.1 if not s: diff --git a/test/units/_internal/templating/test_jinja_bits.py b/test/units/_internal/templating/test_jinja_bits.py index ad32166da70..97fe7b21cae 100644 --- a/test/units/_internal/templating/test_jinja_bits.py +++ b/test/units/_internal/templating/test_jinja_bits.py @@ -9,6 +9,7 @@ from contextlib import nullcontext import pytest import pytest_mock +from ansible._internal._templating._access import NotifiableAccessContextBase from ansible.errors import AnsibleUndefinedVariable, AnsibleTemplateError from ansible._internal._templating._errors import AnsibleTemplatePluginRuntimeError from ansible.module_utils._internal._datatag import AnsibleTaggedObject @@ -443,3 +444,27 @@ def test_mutation_methods(template: str, result: object) -> None: This feature may be deprecated and removed in a future release by using Jinja's ImmutableSandboxedEnvironment. """ assert TemplateEngine().template(TRUST.tag(template)) == result + + +class ExampleMarkerAccessTracker(NotifiableAccessContextBase): + def __init__(self) -> None: + self._type_interest = frozenset(Marker._concrete_subclasses) + self._markers: list[Marker] = [] + + def _notify(self, o: Marker) -> None: + self._markers.append(o) + + +@pytest.mark.parametrize("template", ( + '{{ adict["bogus"] | default("ok") }}', + '{{ adict.bogus | default("ok") }}', +)) +def test_marker_access_getattr_and_getitem(template: str) -> None: + """Ensure that getattr and getitem always access markers.""" + # the absence of a JinjaCallContext should cause the access done by getattr and getitem not to trip when a marker is encountered + assert TemplateEngine(variables=dict(adict={})).template(TRUST.tag(template)) == "ok" + + with ExampleMarkerAccessTracker() as tracker: # the access done by getattr and getitem should immediately trip when a marker is encountered + TemplateEngine(variables=dict(adict={})).template(TRUST.tag(template)) + + assert type(tracker._markers[0]) is UndefinedMarker # pylint: disable=unidiomatic-typecheck diff --git a/test/units/_internal/templating/test_lazy_containers.py b/test/units/_internal/templating/test_lazy_containers.py index 87d4f11e9ff..ef0004b7279 100644 --- a/test/units/_internal/templating/test_lazy_containers.py +++ b/test/units/_internal/templating/test_lazy_containers.py @@ -16,7 +16,8 @@ from ansible._internal._templating._jinja_common import CapturedExceptionMarker, from ansible._internal._datatag._tags import Origin, TrustedAsTemplate from ansible._internal._templating._utils import TemplateContext, LazyOptions from ansible._internal._templating._engine import TemplateEngine, TemplateOptions -from ansible._internal._templating._lazy_containers import _AnsibleLazyTemplateMixin, _AnsibleLazyTemplateList, _AnsibleLazyTemplateDict, _LazyValue +from ansible._internal._templating._lazy_containers import _AnsibleLazyTemplateMixin, _AnsibleLazyTemplateList, _AnsibleLazyTemplateDict, _LazyValue, \ + _AnsibleLazyAccessTuple, UnsupportedConstructionMethodError from ansible.module_utils._internal._datatag import AnsibleTaggedObject from ...module_utils.datatag.test_datatag import ExampleSingletonTag @@ -299,6 +300,7 @@ def test_lazy_list_adapter_operators(template, variables, expected) -> None: ('type(d1)(d1)', dict(a=_LazyValue(1), c=_LazyValue(1)), _AnsibleLazyTemplateDict), # _AnsibleLazyTemplateDict.__init__ copy ('l1.copy()', [_LazyValue(1)], _AnsibleLazyTemplateList), # _AnsibleLazyTemplateList.copy ('type(l1)(l1)', [_LazyValue(1)], _AnsibleLazyTemplateList), # _AnsibleLazyTemplateList.__init__ copy + ('type(t1)(t1)', (1,), _AnsibleLazyAccessTuple), ('copy.copy(l1)', [_LazyValue(1)], _AnsibleLazyTemplateList), ('copy.copy(d1)', dict(a=_LazyValue(1), c=_LazyValue(1)), _AnsibleLazyTemplateDict), ('copy.deepcopy(l1)', [_LazyValue(1)], _AnsibleLazyTemplateList), # __AnsibleLazyTemplateList.__deepcopy__ @@ -308,6 +310,7 @@ def test_lazy_list_adapter_operators(template, variables, expected) -> None: ('list(reversed(l1))', [1], list), # _AnsibleLazyTemplateList.__reversed__ ('list(reversed(d1))', ['c', 'a'], list), # dict.__reversed__ - keys only ('l1[:]', [_LazyValue(1)], _AnsibleLazyTemplateList), # __getitem__ (slice) + ('t1[:]', (1,), _AnsibleLazyAccessTuple), # __getitem__ (slice) ('d1["a"]', 1, int), # __getitem__ ('d1.get("a")', 1, int), # get ('l1[0]', 1, int), # __getitem__ @@ -366,6 +369,9 @@ def test_lazy_list_adapter_operators(template, variables, expected) -> None: ('tuple() + l1', 'can only concatenate tuple (not "_AnsibleLazyTemplateList") to tuple', TypeError), # __radd__ (relies on tuple.__add__) ('tuple() + d1', 'can only concatenate tuple (not "_AnsibleLazyTemplateDict") to tuple', TypeError), # relies on tuple.__add__ ('l1.pop(42)', "pop index out of range", IndexError), + ('type(l1)([])', 'Direct construction of lazy containers is not supported.', UnsupportedConstructionMethodError), + ('type(t1)([])', 'Direct construction of lazy containers is not supported.', UnsupportedConstructionMethodError), + ('type(d1)({})', 'Direct construction of lazy containers is not supported.', UnsupportedConstructionMethodError), ], ids=str) def test_lazy_container_operators(expression: str, expected_value: t.Any, expected_type: type) -> None: """ @@ -387,6 +393,7 @@ def test_lazy_container_operators(expression: str, expected_value: t.Any, expect l1x=[TRUST.tag('{{ one }}')], l2=[TRUST.tag('{{ two }}')], l2f=l2f, + t1=(TRUST.tag('{{ one }}'),), d1=dict(a=TRUST.tag('{{ one }}'), c=TRUST.tag('{{ one }}')), d1x=dict(a=TRUST.tag('{{ one }}'), c=TRUST.tag('{{ one }}')), d2=dict(b=TRUST.tag('{{ two }}'), c=TRUST.tag('{{ two }}')), @@ -436,6 +443,15 @@ def test_lazy_container_operators(expression: str, expected_value: t.Any, expect actual_list_types: list[type] = [type(value) for value in list.__iter__(result)] assert actual_list_types == expected_list_types + elif issubclass(expected_type, tuple): + assert isinstance(result, tuple) # redundant, but assists mypy in understanding the type + + expected_tuple_types = [type(value) for value in expected_value] + expected_result = expected_value + + actual_tuple_types: list[type] = [type(value) for value in tuple.__iter__(result)] + + assert actual_tuple_types == expected_tuple_types elif issubclass(expected_type, dict): assert isinstance(result, dict) # redundant, but assists mypy in understanding the type @@ -867,3 +883,12 @@ def test_lazy_copies(value: list | dict, deep: bool, template_context: TemplateC assert all((base_type.__getitem__(copied, key) is base_type.__getitem__(original, key)) != deep for key in keys) assert (copied._templar is original._templar) != deep assert (copied._lazy_options is original._lazy_options) != deep + + +def test_lazy_template_mixin_init() -> None: + """ + Verify `_AnsibleLazyTemplateMixin` checks the __init__ arg type. + This code path is not normally reachable, since types which use it perform the same check before invoking the mixin. + """ + with pytest.raises(UnsupportedConstructionMethodError): + _AnsibleLazyTemplateMixin(t.cast(t.Any, None)) diff --git a/test/units/_internal/templating/test_templar.py b/test/units/_internal/templating/test_templar.py index ed1c5eb40bb..8565a9dbf05 100644 --- a/test/units/_internal/templating/test_templar.py +++ b/test/units/_internal/templating/test_templar.py @@ -27,6 +27,7 @@ import typing as t import pytest_mock from jinja2.runtime import Context +from jinja2.loaders import DictLoader import unittest @@ -1078,3 +1079,47 @@ def test_marker_from_test_plugin() -> None: """Verify test plugins can raise MarkerError to return a Marker, and that no warnings or deprecations are emitted.""" with emits_warnings(deprecation_pattern=[], warning_pattern=[]): assert TemplateEngine(variables=dict(something=TRUST.tag("{{ nope }}"))).template(TRUST.tag("{{ (something is eq {}) is undefined }}")) + + +@pytest.mark.parametrize("template,expected", ( + ("{{ none }}", None), # concat sees one node, NoneType result is preserved + ("{% if False %}{% endif %}", None), # concat sees one node, NoneType result is preserved + ("{{''}}{% if False %}{% endif %}", ""), # multiple blocks with an embedded None result, concat is in play, the result is an empty string + ("hey {{ none }}", "hey "), # composite template, the result is an empty string + ("{% import 'importme' as imported %}{{ imported }}", "imported template result"), +)) +def test_none_concat(template: str, expected: object) -> None: + """Validate that None values are omitted from composite template concat.""" + te = TemplateEngine() + + # set up an importable template to exercise TemplateModule code paths + te.environment.loader = DictLoader(dict(importme=TRUST.tag("{{ none }}{{ 'imported template result' }}{{ none }}"))) + + assert te.template(TRUST.tag(template)) == expected + + +def test_filter_generator() -> None: + """Verify that filters which return a generator are converted to a list while under the filter's JinjaCallContext.""" + variables = dict( + foo=[ + dict(x=1, optional_var=0), + dict(x=2), + ], + bar=TRUST.tag("{{ foo | selectattr('optional_var', 'defined') }}"), + ) + + te = TemplateEngine(variables=variables) + te.template(TRUST.tag("{{ bar }}")) + te.template(TRUST.tag("{{ lookup('vars', 'bar') }}")) + + +def test_call_context_reset() -> None: + """Ensure that new template invocations do not inherit trip behavior from running Jinja plugins.""" + templar = TemplateEngine(variables=dict( + somevar=TRUST.tag("{{ somedict.somekey | default('ok') }}"), + somedict=dict( + somekey=TRUST.tag("{{ not_here }}"), + ) + )) + + assert templar.template(TRUST.tag("{{ lookup('vars', 'somevar') }}")) == 'ok' diff --git a/test/units/ansible_test/ci/__init__.py b/test/units/ansible_test/ci/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/test/units/ansible_test/ci/test_azp.py b/test/units/ansible_test/ci/test_azp.py deleted file mode 100644 index 66cef83b35e..00000000000 --- a/test/units/ansible_test/ci/test_azp.py +++ /dev/null @@ -1,30 +0,0 @@ -from __future__ import annotations - -from .util import common_auth_test - - -def test_auth(): - # noinspection PyProtectedMember - from ansible_test._internal.ci.azp import ( - AzurePipelinesAuthHelper, - ) - - class TestAzurePipelinesAuthHelper(AzurePipelinesAuthHelper): - def __init__(self): - self.public_key_pem = None - self.private_key_pem = None - - def publish_public_key(self, public_key_pem): - # avoid publishing key - self.public_key_pem = public_key_pem - - def initialize_private_key(self): - # cache in memory instead of on disk - if not self.private_key_pem: - self.private_key_pem = self.generate_private_key() - - return self.private_key_pem - - auth = TestAzurePipelinesAuthHelper() - - common_auth_test(auth) diff --git a/test/units/ansible_test/ci/util.py b/test/units/ansible_test/ci/util.py deleted file mode 100644 index 75ba27cc86b..00000000000 --- a/test/units/ansible_test/ci/util.py +++ /dev/null @@ -1,50 +0,0 @@ -from __future__ import annotations - -import base64 -import json -import re - - -def common_auth_test(auth): - private_key_pem = auth.initialize_private_key() - public_key_pem = auth.public_key_pem - - extract_pem_key(private_key_pem, private=True) - extract_pem_key(public_key_pem, private=False) - - request = dict(hello='World') - auth.sign_request(request) - - verify_signature(request, public_key_pem) - - -def extract_pem_key(value, private): - assert isinstance(value, type(u'')) - - key_type = '(EC )?PRIVATE' if private else 'PUBLIC' - pattern = r'^-----BEGIN ' + key_type + r' KEY-----\n(?P.*?)\n-----END ' + key_type + r' KEY-----\n$' - match = re.search(pattern, value, flags=re.DOTALL) - - assert match, 'key "%s" does not match pattern "%s"' % (value, pattern) - - base64.b64decode(match.group('key')) # make sure the key can be decoded - - -def verify_signature(request, public_key_pem): - signature = request.pop('signature') - payload_bytes = json.dumps(request, sort_keys=True).encode() - - assert isinstance(signature, type(u'')) - - from cryptography.hazmat.backends import default_backend - from cryptography.hazmat.primitives import hashes - from cryptography.hazmat.primitives.asymmetric import ec - from cryptography.hazmat.primitives.serialization import load_pem_public_key - - public_key = load_pem_public_key(public_key_pem.encode(), default_backend()) - - public_key.verify( - base64.b64decode(signature.encode()), - payload_bytes, - ec.ECDSA(hashes.SHA256()), - ) diff --git a/test/units/executor/module_common/test_recursive_finder.py b/test/units/executor/module_common/test_recursive_finder.py index 948e499bd82..799f5207b6c 100644 --- a/test/units/executor/module_common/test_recursive_finder.py +++ b/test/units/executor/module_common/test_recursive_finder.py @@ -26,7 +26,6 @@ from ansible.plugins.loader import init_plugin_loader MODULE_UTILS_BASIC_FILES = frozenset(('ansible/__init__.py', 'ansible/module_utils/__init__.py', 'ansible/module_utils/basic.py', - 'ansible/module_utils/six/__init__.py', 'ansible/module_utils/_internal/__init__.py', 'ansible/module_utils/_internal/_ansiballz/__init__.py', 'ansible/module_utils/_internal/_ansiballz/_loader.py', @@ -46,6 +45,7 @@ MODULE_UTILS_BASIC_FILES = frozenset(('ansible/__init__.py', 'ansible/module_utils/_internal/_traceback.py', 'ansible/module_utils/_internal/_validation.py', 'ansible/module_utils/_internal/_messages.py', + 'ansible/module_utils/_internal/_no_six.py', 'ansible/module_utils/_internal/_patches/_dataclass_annotation_patch.py', 'ansible/module_utils/_internal/_patches/_socket_patch.py', 'ansible/module_utils/_internal/_patches/_sys_intern_patch.py', @@ -78,7 +78,6 @@ MODULE_UTILS_BASIC_FILES = frozenset(('ansible/__init__.py', 'ansible/module_utils/errors.py', 'ansible/module_utils/parsing/__init__.py', 'ansible/module_utils/parsing/convert_bool.py', - 'ansible/module_utils/six/__init__.py', )) ONLY_BASIC_FILE = frozenset(('ansible/module_utils/basic.py',)) diff --git a/test/units/module_utils/common/text/converters/test_to_str.py b/test/units/module_utils/common/text/converters/test_to_str.py index 4c2f63ae5ee..a06a91b72ee 100644 --- a/test/units/module_utils/common/text/converters/test_to_str.py +++ b/test/units/module_utils/common/text/converters/test_to_str.py @@ -45,3 +45,62 @@ def test_to_bytes(in_string, encoding, expected): def test_to_native(in_string, encoding, expected): """test happy path of encoding to native strings""" assert to_native(in_string, encoding) == expected + + +def test_type_hints() -> None: + """This test isn't really here to test the functionality of to_text/to_bytes + but more to ensure the overloads are properly validated for type hinting + """ + d: dict[str, str] = {'k': 'v'} + s: str = 's' + b: bytes = b'b' + + to_bytes_bytes: bytes = to_bytes(b) + to_bytes_str: bytes = to_bytes(s) + to_bytes_dict: bytes = to_bytes(d) + assert to_bytes_dict == repr(d).encode('utf-8') + + to_bytes_bytes_repr: bytes = to_bytes(b, nonstring='simplerepr') + to_bytes_str_repr: bytes = to_bytes(s, nonstring='simplerepr') + to_bytes_dict_repr: bytes = to_bytes(d, nonstring='simplerepr') + assert to_bytes_dict_repr == repr(d).encode('utf-8') + + to_bytes_bytes_passthru: bytes = to_bytes(b, nonstring='passthru') + to_bytes_str_passthru: bytes = to_bytes(s, nonstring='passthru') + to_bytes_dict_passthru: dict[str, str] = to_bytes(d, nonstring='passthru') + assert to_bytes_dict_passthru == d + + to_bytes_bytes_empty: bytes = to_bytes(b, nonstring='empty') + to_bytes_str_empty: bytes = to_bytes(s, nonstring='empty') + to_bytes_dict_empty: bytes = to_bytes(d, nonstring='empty') + assert to_bytes_dict_empty == b'' + + to_bytes_bytes_strict: bytes = to_bytes(b, nonstring='strict') + to_bytes_str_strict: bytes = to_bytes(s, nonstring='strict') + with pytest.raises(TypeError): + to_bytes_dict_strict: bytes = to_bytes(d, nonstring='strict') + + to_text_bytes: str = to_text(b) + to_text_str: str = to_text(s) + to_text_dict: str = to_text(d) + assert to_text_dict == repr(d) + + to_text_bytes_repr: str = to_text(b, nonstring='simplerepr') + to_text_str_repr: str = to_text(s, nonstring='simplerepr') + to_text_dict_repr: str = to_text(d, nonstring='simplerepr') + assert to_text_dict_repr == repr(d) + + to_text_bytes_passthru: str = to_text(b, nonstring='passthru') + to_text_str_passthru: str = to_text(s, nonstring='passthru') + to_text_dict_passthru: dict[str, str] = to_text(d, nonstring='passthru') + assert to_text_dict_passthru == d + + to_text_bytes_empty: str = to_text(b, nonstring='empty') + to_text_str_empty: str = to_text(s, nonstring='empty') + to_text_dict_empty: str = to_text(d, nonstring='empty') + assert to_text_dict_empty == '' + + to_text_bytes_strict: str = to_text(b, nonstring='strict') + to_text_str_strict: str = to_text(s, nonstring='strict') + with pytest.raises(TypeError): + to_text_dict_strict: str = to_text(d, nonstring='strict') diff --git a/test/units/module_utils/common/validation/test_check_type_str.py b/test/units/module_utils/common/validation/test_check_type_str.py index 4381ad1fd04..8ea8b23a0e0 100644 --- a/test/units/module_utils/common/validation/test_check_type_str.py +++ b/test/units/module_utils/common/validation/test_check_type_str.py @@ -12,6 +12,7 @@ from ansible.module_utils.common.validation import check_type_str, _check_type_s TEST_CASES = ( ('string', 'string'), + (None, '',), # 2.19+ relaxed restriction on None<->empty for backward compatibility (100, '100'), (1.5, '1.5'), ({'k1': 'v1'}, "{'k1': 'v1'}"), @@ -25,7 +26,7 @@ def test_check_type_str(value, expected): assert expected == check_type_str(value) -@pytest.mark.parametrize('value, expected', TEST_CASES[1:]) +@pytest.mark.parametrize('value, expected', TEST_CASES[2:]) def test_check_type_str_no_conversion(value, expected): with pytest.raises(TypeError) as e: _check_type_str_no_conversion(value) diff --git a/test/units/plugins/lookup/test_template.py b/test/units/plugins/lookup/test_template.py new file mode 100644 index 00000000000..5f77b73847f --- /dev/null +++ b/test/units/plugins/lookup/test_template.py @@ -0,0 +1,31 @@ +from __future__ import annotations + +import pathlib + +from ansible._internal._templating._utils import Omit +from ansible.parsing.dataloader import DataLoader +from ansible.template import Templar, trust_as_template + + +def test_no_finalize_marker_passthru(tmp_path: pathlib.Path) -> None: + """Return an Undefined marker from a template lookup to ensure that the internal templating operation does not finalize its result.""" + template_path = tmp_path / 'template.txt' + template_path.write_text("{{ bogusvar }}") + + templar = Templar(loader=DataLoader(), variables=dict(template_path=str(template_path))) + + assert templar.template(trust_as_template('{{ lookup("template", template_path) | default("pass") }}')) == "pass" + + +def test_no_finalize_omit_passthru(tmp_path: pathlib.Path) -> None: + """Return an Omit scalar from a template lookup to ensure that the internal templating operation does not finalize its result.""" + template_path = tmp_path / 'template.txt' + template_path.write_text("{{ omitted }}") + + data = dict(omitted=trust_as_template("{{ omit }}"), template_path=str(template_path)) + + # The result from the lookup should be an Omit value, since the result of the template lookup's internal templating call should not be finalized. + # If it were, finalize would trip the Omit and raise an error about a top-level template result resolving to an Omit scalar. + res = Templar(loader=DataLoader(), variables=data).template(trust_as_template("{{ lookup('template', template_path) | type_debug }}")) + + assert res == type(Omit).__name__ diff --git a/test/units/utils/test_encrypt.py b/test/units/utils/test_encrypt.py index 0de27f6e340..4abc543c05e 100644 --- a/test/units/utils/test_encrypt.py +++ b/test/units/utils/test_encrypt.py @@ -18,93 +18,175 @@ def assert_hash(expected, secret, algorithm, **settings): assert encrypt.PasslibHash(algorithm).hash(secret, **settings) == expected +@pytest.mark.parametrize( + ("algorithm", "ident", "salt", "rounds", "expected"), + [ + pytest.param( + "bcrypt", + None, + "1234567890123456789012", + None, + "$2b$12$123456789012345678901ugbM1PeTfRQ0t6dCJu5lQA8hwrZOYgDu", + id="bcrypt_default", + ), + pytest.param( + "bcrypt", + "2", + "1234567890123456789012", + None, + "$2$12$123456789012345678901ufd3hZRrev.WXCbemqGIV/gmWaTGLImm", + id="bcrypt_ident_2", + ), + pytest.param( + "bcrypt", + "2y", + "1234567890123456789012", + None, + "$2y$12$123456789012345678901ugbM1PeTfRQ0t6dCJu5lQA8hwrZOYgDu", + id="bcrypt_ident_2y", + ), + pytest.param( + "bcrypt", + "2a", + "1234567890123456789012", + None, + "$2a$12$123456789012345678901ugbM1PeTfRQ0t6dCJu5lQA8hwrZOYgDu", + id="bcrypt_ident_2a", + ), + pytest.param( + "bcrypt", + "2b", + "1234567890123456789012", + None, + "$2b$12$123456789012345678901ugbM1PeTfRQ0t6dCJu5lQA8hwrZOYgDu", + id="bcrypt_ident_2b", + ), + pytest.param( + "sha256_crypt", + "invalid_ident", + "12345678", + 5000, + "$5$12345678$uAZsE3BenI2G.nA8DpTl.9Dc8JiqacI53pEqRr5ppT7", + id="sha256_crypt_invalid_ident", + ), + pytest.param( + "crypt16", + None, + "12", + None, + "12pELHK2ME3McUFlHxel6uMM", + id="crypt16_no_ident", + ), + ], +) @pytest.mark.skipif(not encrypt.PASSLIB_AVAILABLE, reason='passlib must be installed to run this test') -def test_passlib(): - expected = "$5$12345678$uAZsE3BenI2G.nA8DpTl.9Dc8JiqacI53pEqRr5ppT7" - assert encrypt.passlib_or_crypt("123", "sha256_crypt", salt="12345678", rounds=5000) == expected - - -@pytest.mark.skipif(not encrypt.PASSLIB_AVAILABLE, reason='passlib must be installed to run this test') -def test_encrypt_with_ident(): - assert_hash("$2$12$123456789012345678901ufd3hZRrev.WXCbemqGIV/gmWaTGLImm", - secret="123", algorithm="bcrypt", salt='1234567890123456789012', ident='2') - assert_hash("$2y$12$123456789012345678901ugbM1PeTfRQ0t6dCJu5lQA8hwrZOYgDu", - secret="123", algorithm="bcrypt", salt='1234567890123456789012', ident='2y') - assert_hash("$2a$12$123456789012345678901ugbM1PeTfRQ0t6dCJu5lQA8hwrZOYgDu", - secret="123", algorithm="bcrypt", salt='1234567890123456789012', ident='2a') - assert_hash("$2b$12$123456789012345678901ugbM1PeTfRQ0t6dCJu5lQA8hwrZOYgDu", - secret="123", algorithm="bcrypt", salt='1234567890123456789012', ident='2b') - assert_hash("$2b$12$123456789012345678901ugbM1PeTfRQ0t6dCJu5lQA8hwrZOYgDu", - secret="123", algorithm="bcrypt", salt='1234567890123456789012') - # negative test: sha256_crypt does not take ident as parameter so ignore it - assert_hash("$5$12345678$uAZsE3BenI2G.nA8DpTl.9Dc8JiqacI53pEqRr5ppT7", - secret="123", algorithm="sha256_crypt", salt="12345678", rounds=5000, ident='invalid_ident') +def test_encrypt_with_ident(algorithm, ident, salt, rounds, expected): + assert_hash(expected, secret="123", algorithm=algorithm, salt=salt, rounds=rounds, ident=ident) # If passlib is not installed. this is identical to the test_encrypt_with_rounds_no_passlib() test +@pytest.mark.parametrize( + ("algorithm", "rounds", "expected"), + [ + pytest.param( + "sha256_crypt", + None, + "$5$rounds=535000$12345678$uy3TurUPaY71aioJi58HvUY8jkbhSQU8HepbyaNngv.", + id="sha256_crypt_default_rounds", + ), + pytest.param( + "sha256_crypt", + 5000, + "$5$12345678$uAZsE3BenI2G.nA8DpTl.9Dc8JiqacI53pEqRr5ppT7", + id="sha256_crypt_rounds_5000", + ), + pytest.param( + "sha256_crypt", + 10000, + "$5$rounds=10000$12345678$JBinliYMFEcBeAXKZnLjenhgEhTmJBvZn3aR8l70Oy/", + id="sha256_crypt_rounds_10000", + ), + pytest.param( + "sha512_crypt", + None, + "$6$rounds=656000$12345678$InMy49UwxyCh2pGJU1NpOhVSElDDzKeyuC6n6E9O34BCUGVNYADnI.rcA3m.Vro9BiZpYmjEoNhpREqQcbvQ80", + id="sha512_crypt_default_rounds", + ), + pytest.param( + "sha512_crypt", + 5000, + "$6$12345678$LcV9LQiaPekQxZ.OfkMADjFdSO2k9zfbDQrHPVcYjSLqSdjLYpsgqviYvTEP/R41yPmhH3CCeEDqVhW1VHr3L.", + id="sha512_crypt_rounds_5000", + ), + pytest.param( + "md5_crypt", + None, + "$1$12345678$tRy4cXc3kmcfRZVj4iFXr/", + id="md5_crypt_default_rounds", + ), + ], +) @pytest.mark.skipif(not encrypt.PASSLIB_AVAILABLE, reason='passlib must be installed to run this test') -def test_encrypt_with_rounds(): - assert_hash("$5$12345678$uAZsE3BenI2G.nA8DpTl.9Dc8JiqacI53pEqRr5ppT7", - secret="123", algorithm="sha256_crypt", salt="12345678", rounds=5000) - assert_hash("$5$rounds=10000$12345678$JBinliYMFEcBeAXKZnLjenhgEhTmJBvZn3aR8l70Oy/", - secret="123", algorithm="sha256_crypt", salt="12345678", rounds=10000) - assert_hash("$6$12345678$LcV9LQiaPekQxZ.OfkMADjFdSO2k9zfbDQrHPVcYjSLqSdjLYpsgqviYvTEP/R41yPmhH3CCeEDqVhW1VHr3L.", - secret="123", algorithm="sha512_crypt", salt="12345678", rounds=5000) - - -# If passlib is not installed. this is identical to the test_encrypt_default_rounds_no_passlib() test -@pytest.mark.skipif(not encrypt.PASSLIB_AVAILABLE, reason='passlib must be installed to run this test') -def test_encrypt_default_rounds(): - assert_hash("$1$12345678$tRy4cXc3kmcfRZVj4iFXr/", - secret="123", algorithm="md5_crypt", salt="12345678") - assert_hash("$5$rounds=535000$12345678$uy3TurUPaY71aioJi58HvUY8jkbhSQU8HepbyaNngv.", - secret="123", algorithm="sha256_crypt", salt="12345678") - assert_hash("$6$rounds=656000$12345678$InMy49UwxyCh2pGJU1NpOhVSElDDzKeyuC6n6E9O34BCUGVNYADnI.rcA3m.Vro9BiZpYmjEoNhpREqQcbvQ80", - secret="123", algorithm="sha512_crypt", salt="12345678") - - assert encrypt.PasslibHash("md5_crypt").hash("123") +def test_encrypt_with_rounds(algorithm, rounds, expected): + assert_hash(expected, secret="123", algorithm=algorithm, salt="12345678", rounds=rounds) @pytest.mark.skipif(not encrypt.PASSLIB_AVAILABLE, reason='passlib must be installed to run this test') -def test_password_hash_filter_passlib(): +def test_password_hash_filter_passlib_with_exception(): with pytest.raises(AnsibleError): get_encrypted_password("123", "sha257", salt="12345678") - # Uses passlib default rounds value for sha256 matching crypt behaviour - assert get_encrypted_password("123", "sha256", salt="12345678") == "$5$rounds=535000$12345678$uy3TurUPaY71aioJi58HvUY8jkbhSQU8HepbyaNngv." - assert get_encrypted_password("123", "sha256", salt="12345678", rounds=5000) == "$5$12345678$uAZsE3BenI2G.nA8DpTl.9Dc8JiqacI53pEqRr5ppT7" - assert (get_encrypted_password("123", "sha256", salt="12345678", rounds=10000) == - "$5$rounds=10000$12345678$JBinliYMFEcBeAXKZnLjenhgEhTmJBvZn3aR8l70Oy/") - - assert (get_encrypted_password("123", "sha512", salt="12345678", rounds=6000) == - "$6$rounds=6000$12345678$l/fC67BdJwZrJ7qneKGP1b6PcatfBr0dI7W6JLBrsv8P1wnv/0pu4WJsWq5p6WiXgZ2gt9Aoir3MeORJxg4.Z/") - - assert (get_encrypted_password("123", "sha512", salt="12345678", rounds=5000) == - "$6$12345678$LcV9LQiaPekQxZ.OfkMADjFdSO2k9zfbDQrHPVcYjSLqSdjLYpsgqviYvTEP/R41yPmhH3CCeEDqVhW1VHr3L.") +@pytest.mark.parametrize( + ("algorithm", "rounds", "expected_hash"), + [ + pytest.param( + "sha256", + None, + "$5$rounds=535000$12345678$uy3TurUPaY71aioJi58HvUY8jkbhSQU8HepbyaNngv.", + id="sha256_default_rounds", + ), + pytest.param( + "sha256", + 5000, + "$5$12345678$uAZsE3BenI2G.nA8DpTl.9Dc8JiqacI53pEqRr5ppT7", + id="sha256_rounds_5000", + ), + pytest.param( + "sha256", + 10000, + "$5$rounds=10000$12345678$JBinliYMFEcBeAXKZnLjenhgEhTmJBvZn3aR8l70Oy/", + id="sha256_rounds_10000", + ), + pytest.param( + "sha512", + 5000, + "$6$12345678$LcV9LQiaPekQxZ.OfkMADjFdSO2k9zfbDQrHPVcYjSLqSdjLYpsgqviYvTEP/R41yPmhH3CCeEDqVhW1VHr3L.", + id="sha512_rounds_5000", + ), + pytest.param( + "sha512", + 6000, + "$6$rounds=6000$12345678$l/fC67BdJwZrJ7qneKGP1b6PcatfBr0dI7W6JLBrsv8P1wnv/0pu4WJsWq5p6WiXgZ2gt9Aoir3MeORJxg4.Z/", + id="sha512_rounds_6000", + ), + ], +) +@pytest.mark.skipif(not encrypt.PASSLIB_AVAILABLE, reason='passlib must be installed to run this test') +def test_password_hash_filter_passlib(algorithm, rounds, expected_hash): + assert get_encrypted_password("123", algorithm, salt="12345678", rounds=rounds) == expected_hash @pytest.mark.skipif(not encrypt.PASSLIB_AVAILABLE, reason='passlib must be installed to run this test') -def test_do_encrypt_passlib(): +def test_do_encrypt_passlib_with_exception(): with pytest.raises(AnsibleError): encrypt.do_encrypt("123", "sha257_crypt", salt="12345678") - # Uses passlib default rounds value for sha256 matching crypt behaviour. - assert encrypt.do_encrypt("123", "sha256_crypt", salt="12345678") == "$5$rounds=535000$12345678$uy3TurUPaY71aioJi58HvUY8jkbhSQU8HepbyaNngv." - - assert encrypt.do_encrypt("123", "md5_crypt", salt="12345678") == "$1$12345678$tRy4cXc3kmcfRZVj4iFXr/" - - assert encrypt.do_encrypt("123", "crypt16", salt="12") == "12pELHK2ME3McUFlHxel6uMM" - - assert encrypt.do_encrypt("123", "bcrypt", - salt='1234567890123456789012', - ident='2a') == "$2a$12$123456789012345678901ugbM1PeTfRQ0t6dCJu5lQA8hwrZOYgDu" - def test_random_salt(): res = encrypt.random_salt() - expected_salt_candidate_chars = u'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789./' + expected_salt_candidate_chars = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789./' assert len(res) == 8 for res_char in res: assert res_char in expected_salt_candidate_chars @@ -123,14 +205,14 @@ def test_passlib_bcrypt_salt(recwarn): expected = '$2b$12$123456789012345678901uMv44x.2qmQeefEGb3bcIRc1mLuO7bqa' ident = '2b' - p = encrypt.PasslibHash('bcrypt') + passlib_obj = encrypt.PasslibHash('bcrypt') - result = p.hash(secret, salt=salt, ident=ident) + result = passlib_obj.hash(secret, salt=salt, ident=ident) passlib_warnings = [w.message for w in recwarn if isinstance(w.message, passlib_exc.PasslibHashWarning)] assert len(passlib_warnings) == 0 assert result == expected recwarn.clear() - result = p.hash(secret, salt=repaired_salt, ident=ident) + result = passlib_obj.hash(secret, salt=repaired_salt, ident=ident) assert result == expected