Typo fixes and other bits and bobs (#83672)

Co-authored-by: Lee Garrett <lgarrett@rocketjump.eu>
pull/83680/head
Lee Garrett 4 months ago committed by GitHub
parent 8e74cdc7b2
commit 7e3916b767
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -54,7 +54,7 @@ body:
<em>Why?</em>
</summary>
We would do it by ourselves but unfortunatelly, the curent
We would do it by ourselves but unfortunately, the current
edition of GitHub Issue Forms Alpha does not support this yet 🤷

@ -109,7 +109,7 @@ body:
<em>Why?</em>
</summary>
We would do it by ourselves but unfortunatelly, the curent
We would do it by ourselves but unfortunately, the current
edition of GitHub Issue Forms Alpha does not support this yet 🤷

@ -1,2 +1,2 @@
bugfixes:
- disro package - update the distro package version from 1.8.0 to 1.9.0 (https://github.com/ansible/ansible/issues/82935)
- distro package - update the distro package version from 1.8.0 to 1.9.0 (https://github.com/ansible/ansible/issues/82935)

@ -506,7 +506,7 @@ class ConfigCLI(CLI):
# prep loading
loader = getattr(plugin_loader, '%s_loader' % ptype)
# acumulators
# accumulators
output = []
config_entries = {}
@ -523,7 +523,7 @@ class ConfigCLI(CLI):
plugin_cs = loader.all(class_only=True)
for plugin in plugin_cs:
# in case of deprecastion they diverge
# in case of deprecation they diverge
finalname = name = plugin._load_name
if name.startswith('_'):
if os.path.islink(plugin._original_path):

@ -545,7 +545,7 @@ class ConsoleCLI(CLI, cmd.Cmd):
if path:
module_loader.add_directory(path)
# dynamically add 'cannonical' modules as commands, aliases coudld be used and dynamically loaded
# dynamically add 'canonical' modules as commands, aliases could be used and dynamically loaded
self.modules = self.list_modules()
for module in self.modules:
setattr(self, 'do_' + module, lambda arg, module=module: self.default(module + ' ' + arg))

@ -50,7 +50,7 @@ PB_OBJECTS = ['Play', 'Role', 'Block', 'Task', 'Handler']
PB_LOADED = {}
SNIPPETS = ['inventory', 'lookup', 'module']
# harcoded from ascii values
# hardcoded from ascii values
STYLE = {
'BLINK': '\033[5m',
'BOLD': '\033[1m',
@ -1195,7 +1195,7 @@ class DocCLI(CLI, RoleMixin):
opt_leadin = "-"
key = "%s%s %s" % (base_indent, opt_leadin, _format(o, 'yellow'))
# description is specifically formated and can either be string or list of strings
# description is specifically formatted and can either be string or list of strings
if 'description' not in opt:
raise AnsibleError("All (sub-)options and return values must have a 'description' field")
text.append('')

@ -1018,7 +1018,7 @@ DEFAULT_STDOUT_CALLBACK:
EDITOR:
name: editor application to use
default: vi
descrioption:
description:
- for the cases in which Ansible needs to return a file within an editor, this chooses the application to use.
ini:
- section: defaults
@ -1781,7 +1781,7 @@ OLD_PLUGIN_CACHE_CLEARING:
PAGER:
name: pager application to use
default: less
descrioption:
description:
- for the cases in which Ansible needs to return output in a pageable fashion, this chooses the application to use.
ini:
- section: defaults

@ -41,7 +41,7 @@ class InterpreterDiscoveryRequiredError(Exception):
def discover_interpreter(action, interpreter_name, discovery_mode, task_vars):
# interpreter discovery is a 2-step process with the target. First, we use a simple shell-agnostic bootstrap to
# get the system type from uname, and find any random Python that can get us the info we need. For supported
# target OS types, we'll dispatch a Python script that calls plaform.dist() (for older platforms, where available)
# target OS types, we'll dispatch a Python script that calls platform.dist() (for older platforms, where available)
# and brings back /etc/os-release (if present). The proper Python path is looked up in a table of known
# distros/versions with included Pythons; if nothing is found, depending on the discovery mode, either the
# default fallback of /usr/bin/python is used (if we know it's there), or discovery fails.

@ -16,7 +16,7 @@ begin {
.SYNOPSIS
Converts a JSON string to a Hashtable/Array in the fastest way
possible. Unfortunately ConvertFrom-Json is still faster but outputs
a PSCustomObject which is combersone for module consumption.
a PSCustomObject which is cumbersome for module consumption.
.PARAMETER InputObject
[String] The JSON string to deserialize.

@ -223,7 +223,7 @@ class TaskQueueManager:
callback_type = getattr(callback_plugin, 'CALLBACK_TYPE', '')
callback_needs_enabled = getattr(callback_plugin, 'CALLBACK_NEEDS_ENABLED', getattr(callback_plugin, 'CALLBACK_NEEDS_WHITELIST', False))
# try to get colleciotn world name first
# try to get collection world name first
cnames = getattr(callback_plugin, '_redirected_names', [])
if cnames:
# store the name the plugin was loaded as, as that's what we'll need to compare to the configured callback list later

@ -139,7 +139,7 @@ class TaskResult:
elif self._result:
result._result = module_response_deepcopy(self._result)
# actualy remove
# actually remove
for remove_key in ignore:
if remove_key in result._result:
del result._result[remove_key]

@ -61,7 +61,7 @@ class ConcreteArtifactsManager:
"""
def __init__(self, b_working_directory, validate_certs=True, keyring=None, timeout=60, required_signature_count=None, ignore_signature_errors=None):
# type: (bytes, bool, str, int, str, list[str]) -> None
"""Initialize ConcreteArtifactsManager caches and costraints."""
"""Initialize ConcreteArtifactsManager caches and constraints."""
self._validate_certs = validate_certs # type: bool
self._artifact_cache = {} # type: dict[bytes, bytes]
self._galaxy_artifact_cache = {} # type: dict[Candidate | Requirement, bytes]

@ -126,7 +126,7 @@ class CollectionDependencyProviderBase(AbstractProvider):
the current candidate list
* ``parent`` specifies the candidate that provides
(dependend on) the requirement, or `None`
(depended on) the requirement, or `None`
to indicate a root requirement.
resolvelib >=0.7.0, < 0.8.0
@ -202,7 +202,7 @@ class CollectionDependencyProviderBase(AbstractProvider):
remote archives), the one-and-only match is returned
For a "named" requirement, Galaxy-compatible APIs are consulted
to find concrete candidates for this requirement. Of theres a
to find concrete candidates for this requirement. If there's a
pre-installed candidate, it's prepended in front of others.
resolvelib >=0.5.3, <0.6.0
@ -437,7 +437,7 @@ class CollectionDependencyProviderBase(AbstractProvider):
# FIXME: differs. So how do we resolve this case? Priority?
# FIXME: Taking into account a pinned hash? Exploding on
# FIXME: any differences?
# NOTE: The underlying implmentation currently uses first found
# NOTE: The underlying implementation currently uses first found
req_map = self._api_proxy.get_collection_dependencies(candidate)
# NOTE: This guard expression MUST perform an early exit only

@ -1431,7 +1431,7 @@ class AnsibleModule(object):
kwargs['deprecations'] = deprecations
# preserve bools/none from no_log
# TODO: once python version on target high enough, dict comprh
# TODO: once python version on target high enough, dict comprehensions
preserved = {}
for k, v in kwargs.items():
if v is None or isinstance(v, bool):

@ -65,7 +65,7 @@ class ImmutableDict(Hashable, Mapping):
def is_string(seq):
"""Identify whether the input has a string-like type (inclding bytes)."""
"""Identify whether the input has a string-like type (including bytes)."""
# AnsibleVaultEncryptedUnicode inherits from Sequence, but is expected to be a string like object
return isinstance(seq, (text_type, binary_type)) or getattr(seq, '__ENCRYPTED__', False)

@ -54,7 +54,7 @@ class OpenBSDHardware(Hardware):
hardware_facts.update(self.get_dmi_facts())
hardware_facts.update(self.get_uptime_facts())
# storage devices notorioslly prone to hang/block so they are under a timeout
# storage devices notoriously prone to hang/block so they are under a timeout
try:
hardware_facts.update(self.get_mount_facts())
except timeout.TimeoutError:

@ -20,7 +20,7 @@ from ansible.module_utils.facts.network.base import Network, NetworkCollector
class HPUXNetwork(Network):
"""
HP-UX-specifig subclass of Network. Defines networking facts:
HP-UX-specific subclass of Network. Defines networking facts:
- default_interface
- interfaces (a list of interface names)
- interface_<name> dictionary of ipv4 address information.

@ -106,7 +106,7 @@ class ServiceMgrFactCollector(BaseFactCollector):
proc_1 = proc_1.strip()
if proc_1 is not None and (proc_1 == 'init' or proc_1.endswith('sh')):
# many systems return init, so this cannot be trusted, if it ends in 'sh' it probalby is a shell in a container
# many systems return init, so this cannot be trusted, if it ends in 'sh' it probably is a shell in a container
proc_1 = None
# if not init/None it should be an identifiable or custom init, so we are done!

@ -37,7 +37,7 @@ Function Add-CSharpType {
.PARAMETER CompileSymbols
[String[]] A list of symbols to be defined during compile time. These are
added to the existing symbols, 'CORECLR', 'WINDOWS', 'UNIX' that are set
conditionalls in this cmdlet.
conditionals in this cmdlet.
.NOTES
The following features were added to control the compiling options from the

@ -4,7 +4,7 @@
# used by Convert-DictToSnakeCase to convert a string in camelCase
# format to snake_case
Function Convert-StringToSnakeCase($string) {
# cope with pluralized abbreaviations such as TargetGroupARNs
# cope with pluralized abbreviations such as TargetGroupARNs
if ($string -cmatch "[A-Z]{3,}s") {
$replacement_string = $string -creplace $matches[0], "_$($matches[0].ToLower())"

@ -81,7 +81,7 @@ def split_args(args):
'''
# the list of params parsed out of the arg string
# this is going to be the result value when we are donei
# this is going to be the result value when we are done
params = []
# here we encode the args, so we have a uniform charset to

@ -507,7 +507,7 @@ def package_best_match(pkgname, version_cmp, version, release, cache):
policy.create_pin('Release', pkgname, release, 990)
if version_cmp == "=":
# Installing a specific version from command line overrides all pinning
# We don't mimmic this exactly, but instead set a priority which is higher than all APT built-in pin priorities.
# We don't mimic this exactly, but instead set a priority which is higher than all APT built-in pin priorities.
policy.create_pin('Version', pkgname, version, 1001)
pkg = cache[pkgname]
pkgver = policy.get_candidate_ver(pkg)

@ -468,7 +468,7 @@ def write_ssh_wrapper(module):
def set_git_ssh_env(key_file, ssh_opts, git_version, module):
'''
use environment variables to configure git's ssh execution,
which varies by version but this functino should handle all.
which varies by version but this function should handle all.
'''
# initialise to existing ssh opts and/or append user provided

@ -516,7 +516,7 @@ class DarwinStrategy(BaseStrategy):
However, macOS also has LocalHostName and ComputerName settings.
LocalHostName controls the Bonjour/ZeroConf name, used by services
like AirDrop. This class implements a method, _scrub_hostname(), that mimics
the transformations macOS makes on hostnames when enterened in the Sharing
the transformations macOS makes on hostnames when entered in the Sharing
preference pane. It replaces spaces with dashes and removes all special
characters.

@ -203,7 +203,7 @@ def main():
worked = is_started = get_ps(module, pattern)
else:
if location.get('service'):
# standard tool that has been 'destandarized' by reimplementation in other OS/distros
# standard tool that has been 'destandardized' by reimplementation in other OS/distros
cmd = '%s %s status' % (location['service'], name)
elif script:
# maybe script implements status (not LSB)

@ -2331,7 +2331,7 @@ class DarwinUser(User):
super(DarwinUser, self).__init__(module)
# make the user hidden if option is set or deffer to system option
# make the user hidden if option is set or defer to system option
if self.hidden is None:
if self.system:
self.hidden = 1

@ -122,7 +122,7 @@ class AnsibleVaultEncryptedUnicode(Sequence, AnsibleBaseYAMLObject):
return True
def __reversed__(self):
# This gets inerhited from ``collections.Sequence`` which returns a generator
# This gets inherited from ``collections.Sequence`` which returns a generator
# make this act more like the string implementation
return to_text(self[::-1], errors='surrogate_or_strict')

@ -90,7 +90,7 @@ class PlaybookInclude(Base, Conditional, Taggable):
# it is a collection playbook, setup default collections
AnsibleCollectionConfig.default_collection = playbook_collection
else:
# it is NOT a collection playbook, setup adjecent paths
# it is NOT a collection playbook, setup adjacent paths
AnsibleCollectionConfig.playbook_paths.append(os.path.dirname(os.path.abspath(to_bytes(playbook, errors='surrogate_or_strict'))))
pb._load_playbook_data(file_name=playbook, variable_manager=variable_manager, vars=self.vars.copy())
@ -123,7 +123,7 @@ class PlaybookInclude(Base, Conditional, Taggable):
def preprocess_data(self, ds):
'''
Regorganizes the data for a PlaybookInclude datastructure to line
Reorganizes the data for a PlaybookInclude datastructure to line
up with what we expect the proper attributes to be
'''

@ -107,7 +107,7 @@ class Role(Base, Conditional, Taggable, CollectionSearch, Delegatable):
self.static = static
# includes (static=false) default to private, while imports (static=true) default to public
# but both can be overriden by global config if set
# but both can be overridden by global config if set
if public is None:
global_private, origin = C.config.get_config_value_and_origin('DEFAULT_PRIVATE_ROLE_VARS')
if origin == 'default':
@ -508,7 +508,7 @@ class Role(Base, Conditional, Taggable, CollectionSearch, Delegatable):
# get exported variables from meta/dependencies
seen = []
for dep in self.get_all_dependencies():
# Avoid reruning dupe deps since they can have vars from previous invocations and they accumulate in deps
# Avoid rerunning dupe deps since they can have vars from previous invocations and they accumulate in deps
# TODO: re-examine dep loading to see if we are somehow improperly adding the same dep too many times
if dep not in seen:
# only take 'exportable' vars from deps

@ -146,7 +146,7 @@ class ActionBase(ABC):
Be cautious when directly passing ``new_module_args`` directly to a
module invocation, as it will contain the defaults, and not only
the args supplied from the task. If you do this, the module
should not define ``mututally_exclusive`` or similar.
should not define ``mutually_exclusive`` or similar.
This code is roughly copied from the ``validate_argument_spec``
action plugin for use by other action plugins.

@ -263,7 +263,7 @@ class CliconfBase(AnsiblePlugin):
'supports_commit_comment': <bool>, # identify if adding comment to commit is supported of not
'supports_onbox_diff': <bool>, # identify if on box diff capability is supported or not
'supports_generate_diff': <bool>, # identify if diff capability is supported within plugin
'supports_multiline_delimiter': <bool>, # identify if multiline demiliter is supported within config
'supports_multiline_delimiter': <bool>, # identify if multiline delimiter is supported within config
'supports_diff_match': <bool>, # identify if match is supported
'supports_diff_ignore_lines': <bool>, # identify if ignore line in diff is supported
'supports_config_replace': <bool>, # identify if running config replace with candidate config is supported

@ -346,7 +346,7 @@ class Connection(ConnectionBase):
has_pipelining = True
allow_extras = True
# Satifies mypy as this connection only ever runs with this plugin
# Satisfies mypy as this connection only ever runs with this plugin
_shell: PowerShellPlugin
def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:

@ -3,7 +3,7 @@ DOCUMENTATION:
version_added: "2.4"
short_description: date formating
description:
- Using Python's C(strftime) function, take a data formating string and a date/time to create a formated date.
- Using Python's C(strftime) function, take a data formating string and a date/time to create a formatted date.
notes:
- This is a passthrough to Python's C(stftime), for a complete set of formatting options go to https://strftime.org/.
positional: _input, second, utc

@ -3,7 +3,7 @@ DOCUMENTATION:
version_added: "2.9"
short_description: namespaced UUID generator
description:
- Use to generate namespeced Universal Unique ID.
- Use to generate namespaced Universal Unique ID.
positional: _input, namespace
options:
_input:

@ -130,7 +130,7 @@ EXAMPLES = r'''# fmt: code
mandatory_options = arg_parser.add_mutually_exclusive_group()
mandatory_options.add_argument('--list', action='store', nargs="*", help="Get inventory JSON from our API")
mandatory_options.add_argument('--host', action='store',
help="Get variables for specific host, not used but kept for compatability")
help="Get variables for specific host, not used but kept for compatibility")
try:
config = load_config()

@ -84,7 +84,7 @@ def _list_plugins_from_paths(ptype, dirs, collection, depth=0):
to_native(b_ext) in C.REJECT_EXTS, # general extensions to ignore
b_ext in (b'.yml', b'.yaml', b'.json'), # ignore docs files TODO: constant!
plugin in IGNORE.get(bkey, ()), # plugin in reject list
os.path.islink(full_path), # skip aliases, author should document in 'aliaes' field
os.path.islink(full_path), # skip aliases, author should document in 'aliases' field
]):
continue

@ -8,7 +8,7 @@ DOCUMENTATION = """
version_added: "2.5"
short_description: Display the 'resolved' Ansible option values.
description:
- Retrieves the value of an Ansible configuration setting, resolving all sources, from defaults, ansible.cfg, envirionmnet,
- Retrieves the value of an Ansible configuration setting, resolving all sources, from defaults, ansible.cfg, environment,
CLI, and variables, but not keywords.
- The values returned assume the context of the current host or C(inventory_hostname).
- You can use C(ansible-config list) to see the global available settings, add C(-t all) to also show plugin options.

@ -129,7 +129,7 @@ class ShellBase(AnsiblePlugin):
# other users can read and access the tmp directory.
# This is because we use system to create tmp dirs for unprivileged users who are
# sudo'ing to a second unprivileged user.
# The 'system_tmpdirs' setting defines dirctories we can use for this purpose
# The 'system_tmpdirs' setting defines directories we can use for this purpose
# the default are, /tmp and /var/tmp.
# So we only allow one of those locations if system=True, using the
# passed in tmpdir if it is valid or the first one from the setting if not.

@ -646,7 +646,7 @@ class StrategyBase:
for result_item in result_items:
if '_ansible_notify' in result_item and task_result.is_changed():
# only ensure that notified handlers exist, if so save the notifications for when
# handlers are actually flushed so the last defined handlers are exexcuted,
# handlers are actually flushed so the last defined handlers are executed,
# otherwise depending on the setting either error or warn
host_state = iterator.get_state_for_host(original_host.name)
for notification in result_item['_ansible_notify']:

@ -469,7 +469,7 @@ class JinjaPluginIntercept(MutableMapping):
if self._pluginloader.type == 'filter':
# filter need wrapping
if key in C.STRING_TYPE_FILTERS:
# avoid litera_eval when you WANT strings
# avoid literal_eval when you WANT strings
func = _wrap_native_text(func)
else:
# conditionally unroll iterators/generators to avoid having to use `|list` after every filter

@ -7,7 +7,7 @@
dest: "{{ galaxy_dir }}/resolvelib/ns/coll"
state: directory
- name: create galaxy.yml with a dependecy on a galaxy-sourced collection
- name: create galaxy.yml with a dependency on a galaxy-sourced collection
copy:
dest: "{{ galaxy_dir }}/resolvelib/ns/coll/galaxy.yml"
content: |

@ -153,7 +153,7 @@
that:
- "updated_file.stat.checksum != file.stat.checksum"
- name: test verifying checksumes of the modified collection
- name: test verifying checksums of the modified collection
command: ansible-galaxy collection verify ansible_test.verify:2.0.0 -s {{ test_name }} {{ galaxy_verbosity }}
register: verify
failed_when: verify.rc == 0

@ -31,7 +31,7 @@
- name: Initialize git repository
command: 'git init {{ scm_path }}/test_prereleases'
- name: Configure commiter for the repo
- name: Configure committer for the repo
shell: git config user.email ansible-test@ansible.com && git config user.name ansible-test
args:
chdir: "{{ scm_path }}/test_prereleases"

@ -13,7 +13,7 @@
- name: final task, has to be reached for the test to succeed
debug: msg="MAGICKEYWORD"
- name: check that extra vars are correclty passed
- name: check that extra vars are correctly passed
assert:
that:
- docker_registries_login is defined

@ -1,4 +1,4 @@
- name: "Ensure import of a deliberately corrupted downloaded GnuPG binary key results in an 'inline data' occurence in the message"
- name: "Ensure import of a deliberately corrupted downloaded GnuPG binary key results in an 'inline data' occurrence in the message"
apt_key:
url: https://ci-files.testing.ansible.com/test/integration/targets/apt_key/apt-key-corrupt-zeros-2k.gpg
register: gpg_inline_result

@ -123,12 +123,12 @@
# a literal 600 as the mode will fail currently, in the sense that it
# doesn't guess and consider 600 and 0600 to be the same, and will instead
# intepret literal 600 as the decimal 600 (and thereby octal 1130).
# interpret literal 600 as the decimal 600 (and thereby octal 1130).
# The literal 0600 can be interpreted as octal correctly. Note that
# a decimal 644 is octal 420. The default perm is 0644 so a mis intrpretation
# a decimal 644 is octal 420. The default perm is 0644 so a misinterpretation
# of 644 was previously resulting in a default file mode of 0420.
# 'mode: 600' is likely not what a user meant but there isnt enough info
# to determine that. Note that a string arg of '600' will be intrepeted as 0600.
# to determine that. Note that a string arg of '600' will be interpreted as 0600.
# See https://github.com/ansible/ansible/issues/16370
- name: Assert mode_given_yaml_literal_600 is correct
assert:

@ -25,7 +25,7 @@ run_test() {
sed -i -e 's/ *$//' "${OUTFILE}.${testname}.stdout"
sed -i -e 's/ *$//' "${OUTFILE}.${testname}.stderr"
# Scrub deprication warning that shows up in Python 2.6 on CentOS 6
# Scrub deprecation warning that shows up in Python 2.6 on CentOS 6
sed -i -e '/RandomPool_DeprecationWarning/d' "${OUTFILE}.${testname}.stderr"
diff -u "${ORIGFILE}.${testname}.stdout" "${OUTFILE}.${testname}.stdout" || diff_failure

@ -12,7 +12,7 @@
backup: yes
register: blockinfile_test0
- name: ensure we have a bcackup file
- name: ensure we have a backup file
assert:
that:
- "'backup_file' in blockinfile_test0"
@ -38,7 +38,7 @@
- 'blockinfile_test0.msg == "Block inserted"'
- 'blockinfile_test0_grep.stdout == "2"'
- name: check idemptotence
- name: check idempotence
blockinfile:
path: "{{ remote_tmp_dir_test }}/sshd_config"
block: |

@ -13,4 +13,4 @@
run_once: true
rescue:
- debug:
msg: "Attemp 1 failed!"
msg: "Attempt 1 failed!"

@ -16,7 +16,7 @@ grep -e "${EXPECTED_REGEX}" "${OUTFILE}"
# test connection tracking
EXPECTED_CONNECTION='{"testhost":{"ssh":4}}'
OUTPUT_TAIL=$(tail -n5 ${OUTFILE} | tr -d '[:space:]')
echo "Checking for connection strin ${OUTPUT_TAIL} in stdout."
echo "Checking for connection string ${OUTPUT_TAIL} in stdout."
[ "${EXPECTED_CONNECTION}" == "${OUTPUT_TAIL}" ]
echo $?

@ -1,5 +1,5 @@
---
# Run withhout --check
# Run without --check
- hosts: testhost
gather_facts: False
tasks:

@ -4,7 +4,7 @@
ansible_connection: ssh
ansible_ssh_timeout: 10
tasks:
- name: contain the maddness
- name: contain the madness
block:
- name: test all is good
ping:
@ -16,7 +16,7 @@
ping:
ignore_unreachable: True
vars:
ansible_ssh_args: "-F {{playbook_dir}}/files/port_overrride_ssh.cfg"
ansible_ssh_args: "-F {{playbook_dir}}/files/port_override_ssh.cfg"
register: expected
- name: check all is as expected

@ -36,7 +36,7 @@
- check_mode_subdir_real_stat.stat.exists
# Do some finagling here. First, use check_mode to ensure it never gets
# created. Then actualy create it, and use check_mode to ensure that doing
# created. Then actually create it, and use check_mode to ensure that doing
# the same copy gets marked as no change.
#
# This same pattern repeats for several other src/dest combinations.

@ -1,4 +1,4 @@
- name: setup delegated hsot
- name: setup delegated host
hosts: localhost
gather_facts: false
tasks:

@ -57,7 +57,7 @@ ansible-playbook delegate_facts_block.yml -i inventory -v "$@"
ansible-playbook test_delegate_to_loop_caching.yml -i inventory -v "$@"
# ensure we are using correct settings when delegating
ANSIBLE_TIMEOUT=3 ansible-playbook delegate_vars_hanldling.yml -i inventory -v "$@"
ANSIBLE_TIMEOUT=3 ansible-playbook delegate_vars_handling.yml -i inventory -v "$@"
ansible-playbook has_hostvars.yml -i inventory -v "$@"

@ -11,7 +11,7 @@
ansible_python_interpreter: "{{ ansible_playbook_python }}"
loop: "{{ range(10)|list }}"
# We expect all of the next 3 runs to succeeed
# We expect all of the next 3 runs to succeed
# this is done multiple times to increase randomness
- assert:
that:

@ -4,7 +4,7 @@ from __future__ import annotations
DOCUMENTATION = """
module: fakeslurp
short_desciptoin: fake slurp module
short_description: fake slurp module
description:
- this is a fake slurp module
options:

@ -27,7 +27,7 @@ ansible-playbook -i ../../inventory injection/avoid_slurp_return.yml -e "output_
# Change the known_hosts file to avoid changing the test environment
export ANSIBLE_CACHE_PLUGIN=jsonfile
export ANSIBLE_CACHE_PLUGIN_CONNECTION="${OUTPUT_DIR}/cache"
# Create a non-root user account and configure SSH acccess for that account
# Create a non-root user account and configure SSH access for that account
ansible-playbook -i "${INVENTORY_PATH}" setup_unreadable_test.yml -e "output_dir=${OUTPUT_DIR}" "$@"
# Run the tests as the unprivileged user without become to test the use of the stat module from the fetch module

@ -24,7 +24,7 @@
content: 'Hello World'
#
# Error condtion: specify a directory with state={link,file}, force=False
# Error condition: specify a directory with state={link,file}, force=False
#
# file raises an error

@ -25,7 +25,7 @@
that:
- (response['content'] | b64decode | from_json).user == 'foo'
- name: Test Bearer authorization is successfull with use_netrc=False
- name: Test Bearer authorization is successful with use_netrc=False
get_url:
url: https://{{ httpbin_host }}/bearer
use_netrc: false
@ -40,7 +40,7 @@
src: "{{ remote_tmp_dir }}/msg.txt"
register: response
- name: assert Test Bearer authorization is successfull with use_netrc=False
- name: assert Test Bearer authorization is successful with use_netrc=False
assert:
that:
- (response['content'] | b64decode | from_json).token == 'foobar'

@ -30,7 +30,7 @@
command: 'ls -1a {{ checkout_dir }}/submodule1'
register: submodule1
- name: SUBMODULES | Ensure submodu1 is at the appropriate commit
- name: SUBMODULES | Ensure submodule1 is at the appropriate commit
assert:
that: '{{ submodule1.stdout_lines | length }} == 2'

@ -2,7 +2,7 @@
set -eux
export ANSIBLE_CONNECTION_PLUGINS=./fake_connectors
# use fake connectors that raise srrors at different stages
# use fake connectors that raise errors at different stages
ansible-playbook test_with_bad_plugins.yml -i inventory -v "$@"
unset ANSIBLE_CONNECTION_PLUGINS

@ -68,7 +68,7 @@ ANSIBLE_STRATEGY='free' ansible-playbook test_copious_include_tasks.yml -i inve
ANSIBLE_STRATEGY='free' ansible-playbook test_copious_include_tasks_fqcn.yml -i inventory "$@"
rm -f tasks/hello/*.yml
# Inlcuded tasks should inherit attrs from non-dynamic blocks in parent chain
# Included tasks should inherit attrs from non-dynamic blocks in parent chain
# https://github.com/ansible/ansible/pull/38827
ANSIBLE_STRATEGY='linear' ansible-playbook test_grandparent_inheritance.yml -i inventory "$@"
ANSIBLE_STRATEGY='linear' ansible-playbook test_grandparent_inheritance_fqcn.yml -i inventory "$@"

@ -1,4 +1,4 @@
plugin: ansible.legacy.contructed_with_hostvars
plugin: ansible.legacy.constructed_with_hostvars
groups:
host_var1_defined: host_var1 is defined
keyed_groups:

@ -875,7 +875,7 @@
path: "{{ remote_tmp_dir }}/testempty.txt"
register: oneline_insbefore_file
- name: Assert that insertebefore worked properly with a one line file
- name: Assert that insertbefore worked properly with a one line file
assert:
that:
- oneline_insbefore_test1 is changed

@ -22,7 +22,7 @@
msg: "{{ lookup('ini', 'NAME', file='lookup_case_check.properties', type='properties', case_sensitive=True) }}"
register: duplicate_case_sensitive_properties_NAME
- name: Ensure the correct case-sensitive values were retieved
- name: Ensure the correct case-sensitive values were retrieved
assert:
that:
- duplicate_case_sensitive_name.msg == 'bob'

@ -23,7 +23,7 @@ if sleep 0 | ansible localhost -m pause -a 'seconds=1' 2>&1 | grep '\[WARNING\]:
echo "Incorrectly issued warning when run in the background"
exit 1
else
echo "Succesfully ran in the background with no warning"
echo "Successfully ran in the background with no warning"
fi
# Test redirecting stdout

@ -4,7 +4,7 @@
become: no
tasks:
- name: non-integer for duraction (EXPECTED FAILURE)
- name: non-integer for duration (EXPECTED FAILURE)
pause:
seconds: hello
register: result

@ -219,13 +219,13 @@
state: absent
name: "{{ remote_tmp_dir }}/pipenv"
- name: install pip throught pip into fresh virtualenv
- name: install pip through pip into fresh virtualenv
pip:
name: pip
virtualenv: "{{ remote_tmp_dir }}/pipenv"
register: pip_install_venv
- name: make sure pip in fresh virtualenv report changed
- name: make sure pip in fresh virtualenv reports changed
assert:
that:
- "pip_install_venv is changed"
@ -371,7 +371,7 @@
version: "<100,!=1.0,>0.0.0"
register: version2
- name: assert no changes ocurred
- name: assert no changes occurred
assert:
that: "not version2.changed"

@ -1,4 +1,4 @@
- name: ensure configuration is loaded when we use FQCN and have already loaded using 'short namne' (which is case will all builtin connection plugins)
- name: ensure configuration is loaded when we use FQCN and have already loaded using 'short name' (which is case will all builtin connection plugins)
hosts: all
gather_facts: false
tasks:

@ -41,7 +41,7 @@
paths:
- '{{ role_path }}/vars'
- name: Install Kerberos sytem packages
- name: Install Kerberos system packages
package:
name: '{{ krb5_packages }}'
state: present

@ -5,7 +5,7 @@ set -eux
# does it work?
ansible-playbook can_register.yml -i ../../inventory -v "$@"
# ensure we do error when it its apprpos
# ensure we continue when ansible-playbook errors out
set +e
result="$(ansible-playbook invalid.yml -i ../../inventory -v "$@" 2>&1)"
set -e

@ -105,7 +105,7 @@ class InventoryModule(BaseFileInventoryPlugin):
raise AnsibleParserError('Plugin configuration YAML file, not YAML inventory')
# We expect top level keys to correspond to groups, iterate over them
# to get host, vars and subgroups (which we iterate over recursivelly)
# to get host, vars and subgroups (which we iterate over recursively)
if isinstance(data, MutableMapping):
for group_name in data:
self._parse_group(group_name, data[group_name])

@ -1,4 +1,4 @@
- name: ensure data was correctly defind
- name: ensure data was correctly defined
assert:
that:
- defined_var is defined

@ -51,7 +51,7 @@
key: /tmp/RPM-GPG-KEY-EPEL-7
register: idempotent_test
- name: check idempontence
- name: check idempotence
assert:
that: "not idempotent_test.changed"

@ -205,7 +205,7 @@
var: _check_mode_test2
verbosity: 2
- name: Assert that task was skipped and mesage was returned
- name: Assert that task was skipped and message was returned
assert:
that:
- _check_mode_test2 is skipped

@ -17,7 +17,7 @@ from ansible.plugins.connection import ConnectionBase
class Connection(ConnectionBase):
''' test connnection '''
''' test connection '''
transport = 'test_connection_default'

@ -89,7 +89,7 @@
include_role:
name: include_parent_role_vars
- name: check that ansible_parent_role_names is normally unset when not included/imported (after both import and inlcude)
- name: check that ansible_parent_role_names is normally unset when not included/imported (after both import and include)
assert:
that:
- "ansible_parent_role_names is undefined"

@ -6,7 +6,7 @@
debug:
msg: Hello
# ensure we properly test for an action name, not a task name when cheking for a meta task
# ensure we properly test for an action name, not a task name when checking for a meta task
- name: "meta"
debug:
msg: Hello

@ -6,7 +6,7 @@
dont: I SHOULD NOT BE TEMPLATED
other: I WORK
tasks:
- name: 'ensure we are not interpolating data from outside of j2 delmiters'
- name: 'ensure we are not interpolating data from outside of j2 delimiters'
assert:
that:
- '"I SHOULD NOT BE TEMPLATED" not in adjacent'

@ -54,7 +54,7 @@
throttle: 12
throttle: 15
- block:
- name: "Teat 5 (max throttle: 3)"
- name: "Test 5 (max throttle: 3)"
script: "test_throttle.py {{throttledir}} {{inventory_hostname}} 3"
vars:
test_id: 5

@ -80,7 +80,7 @@
- zip
- tar
- name: Reinsntall zip binaries
- name: Reinstall zip binaries
package:
name:
- zip

@ -4,7 +4,7 @@
ignore_errors: True
register: unarchive04
- name: fail if the proposed destination file exists for safey
- name: fail if the proposed destination file exists for safety
fail:
msg: /tmp/foo-unarchive.txt already exists, aborting
when: unarchive04.stat.exists

@ -3,7 +3,7 @@
path: "{{ remote_tmp_dir }}/test-unarchive-tar-gz"
state: directory
- name: Create a symlink to the detination dir
- name: Create a symlink to the destination dir
file:
path: "{{ remote_tmp_dir }}/link-to-unarchive-dir"
src: "{{ remote_tmp_dir }}/test-unarchive-tar-gz"

@ -82,7 +82,7 @@
that:
- "'Zażółć gęślą jaźń' == results.ping"
- name: Command that echos a non-ascii env var
- name: Command that echoes a non-ascii env var
command: "echo $option"
environment:
option: Zażółć

@ -8,5 +8,5 @@ ansible-playbook basic.yml -i ../../inventory "$@"
# test enabled fallback env var
ANSIBLE_UNSAFE_WRITES=1 ansible-playbook basic.yml -i ../../inventory "$@"
# test disnabled fallback env var
# test disabled fallback env var
ANSIBLE_UNSAFE_WRITES=0 ansible-playbook basic.yml -i ../../inventory "$@"

@ -232,7 +232,7 @@
register: result
ignore_errors: true
- name: Ensure bad SSL site reidrect fails
- name: Ensure bad SSL site redirect fails
assert:
that:
- result is failed

@ -14,7 +14,7 @@
unix_socket: '{{ remote_tmp_dir }}/http.sock'
register: unix_socket_http
- name: Test https connection to unix socket with valdiate_certs=false
- name: Test https connection to unix socket with validate_certs=false
uri:
url: https://localhost/get
unix_socket: '{{ remote_tmp_dir }}/https.sock'

@ -108,7 +108,7 @@
- name: unicode tests for psrp
when: ansible_connection == 'psrp'
block:
# Cannot test unicode passed into separate exec as PSRP doesn't run with a preset CP of 65001 which reuslts in ? for unicode chars
# Cannot test unicode passed into separate exec as PSRP doesn't run with a preset CP of 65001 which results in ? for unicode chars
- name: run a raw command with unicode chars
raw: Write-Output "! ЗАО. Руководство"
register: raw_result2

@ -221,7 +221,7 @@ class AzurePipelinesChanges:
self.diff = []
def get_successful_merge_run_commits(self) -> set[str]:
"""Return a set of recent successsful merge commits from Azure Pipelines."""
"""Return a set of recent successful merge commits from Azure Pipelines."""
parameters = dict(
maxBuildsPerDefinition=100, # max 5000
queryOrder='queueTimeDescending', # assumes under normal circumstances that later queued jobs are for later commits

@ -78,7 +78,7 @@ class HcloudCloudProvider(CloudProvider):
self._write_config(config)
def _create_ansible_core_ci(self) -> AnsibleCoreCI:
"""Return a Heztner instance of AnsibleCoreCI."""
"""Return a Hetzner instance of AnsibleCoreCI."""
return AnsibleCoreCI(self.args, CloudResource(platform='hetzner'))

@ -159,7 +159,7 @@ def main():
loader = self._get_loader(fullname, path=path)
if loader is not None:
if has_py3_loader:
# loader is expected to be Optional[importlib.abc.Loader], but RestrictedModuleLoader does not inherit from importlib.abc.Loder
# loader is expected to be Optional[importlib.abc.Loader], but RestrictedModuleLoader does not inherit from importlib.abc.Loader
return spec_from_loader(fullname, loader) # type: ignore[arg-type]
raise ImportError("Failed to import '%s' due to a bug in ansible-test. Check importlib imports for typos." % fullname)
return None

@ -78,7 +78,7 @@ class ActionModule(ActionBase):
except ValueError as exc:
return dict(failed=True, msg=to_text(exc))
# Now src has resolved file write to disk in current diectory for scp
# Now src has resolved file write to disk in current directory for scp
src = self._task.args.get("src")
filename = str(uuid.uuid4())
cwd = self._loader.get_basedir()
@ -137,7 +137,7 @@ class ActionModule(ActionBase):
result["msg"] = "Exception received: %s" % exc
if mode == "text":
# Cleanup tmp file expanded wih ansible vars
# Cleanup tmp file expanded with ansible vars
os.remove(output_file)
result["changed"] = changed

@ -262,7 +262,7 @@ Function Get-AnsibleWindowsWebRequest {
# proxy to work with, otherwise just ignore the credentials property.
if ($null -ne $proxy) {
if ($ProxyUseDefaultCredential) {
# Weird hack, $web_request.Proxy returns an IWebProxy object which only gurantees the Credentials
# Weird hack, $web_request.Proxy returns an IWebProxy object which only guarantees the Credentials
# property. We cannot set UseDefaultCredentials so we just set the Credentials to the
# DefaultCredentials in the CredentialCache which does the same thing.
$proxy.Credentials = [System.Net.CredentialCache]::DefaultCredentials

@ -46,7 +46,7 @@ Function Get-CertFile($module, $path, $password, $key_exportable, $key_storage)
$store_flags = $store_flags -bor [System.Security.Cryptography.X509Certificates.X509KeyStorageFlags]::Exportable
}
# TODO: If I'm feeling adventurours, write code to parse PKCS#12 PEM encoded
# TODO: If I'm feeling adventurous, write code to parse PKCS#12 PEM encoded
# file as .NET does not have an easy way to import this
$certs = New-Object -TypeName System.Security.Cryptography.X509Certificates.X509Certificate2Collection
@ -140,7 +140,7 @@ Function Get-CertFileType($path, $password) {
} elseif ($certs[0].HasPrivateKey) {
return "pkcs12"
} elseif ($path.EndsWith(".pfx") -or $path.EndsWith(".p12")) {
# no way to differenciate a pfx with a der file so we must rely on the
# no way to differentiate a pfx with a der file so we must rely on the
# extension
return "pkcs12"
} else {

@ -220,7 +220,7 @@ Function Extract-ZipLegacy($src, $dest) {
# - 4: do not display a progress dialog box
$dest_path.CopyHere($entry, 1044)
# once file is extraced, we need to rename it with non base64 name
# once file is extracted, we need to rename it with non base64 name
$combined_encoded_path = [System.IO.Path]::Combine($dest, $encoded_archive_entry)
Move-Item -LiteralPath $combined_encoded_path -Destination $entry_target_path -Force | Out-Null
}

@ -61,7 +61,7 @@ options:
is different than destination.
- If set to C(no), the file will only be transferred if the
destination does not exist.
- If set to C(no), no checksuming of the content is performed which can
- If set to C(no), no checksumming of the content is performed which can
help improve performance on larger files.
type: bool
default: yes

@ -1090,7 +1090,7 @@ def test_parse_requirements_file_that_isnt_yaml(requirements_cli, requirements_f
- galaxy.role
- anotherrole
''')], indirect=True)
def test_parse_requirements_in_older_format_illega(requirements_cli, requirements_file):
def test_parse_requirements_in_older_format_illegal(requirements_cli, requirements_file):
expected = "Expecting requirements file to be a dict with the key 'collections' that contains a list of " \
"collections to install"

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save