Spelling fixes (#85345)

A few grammar fixes too.

(cherry picked from commit 83fcaeaa8b)
pull/85383/head
Matt Clay 6 months ago
parent 59cdb65926
commit 0aa8a878cf

@ -136,7 +136,7 @@ body:
attributes:
label: Steps to Reproduce
description: |
Describe exactly how to reproduce the problem, using a minimal test-case. It would *really* help us understand your problem if you could also pased any playbooks, configs and commands you used.
Describe exactly how to reproduce the problem, using a minimal test-case. It would *really* help us understand your problem if you could also provide any playbooks, configs and commands you used.
**HINT:** You can paste https://gist.github.com links for larger files.
value: |

@ -1,2 +1,2 @@
deprecated_features:
- Stategy Plugins - Use of strategy plugins not provided in ``ansible.builtin`` are deprecated and do not carry any backwards compatibility guarantees going forward. A future release will remove the ability to use external strategy plugins. No alternative for third party strategy plugins is currently planned.
- Strategy Plugins - Use of strategy plugins not provided in ``ansible.builtin`` are deprecated and do not carry any backwards compatibility guarantees going forward. A future release will remove the ability to use external strategy plugins. No alternative for third party strategy plugins is currently planned.

@ -1,7 +1,7 @@
minor_changes:
- ansible-test - Replace container Fedora 40 with 41.
- ansible-test - Replace container Alpine 3.20 with 3.21.
- ansible-test - Update distro containers to remove unnecessary pakages (apache2, subversion, ruby).
- ansible-test - Update distro containers to remove unnecessary packages (apache2, subversion, ruby).
- ansible-test - Update the HTTP test container.
- ansible-test - Update the PyPI test container.
- ansible-test - Update the utility container.

@ -2,4 +2,4 @@ minor_changes:
- pipelining logic has mostly moved to connection plugins so they can decide/override settings.
- ssh connection plugin now overrides pipelining when a tty is requested.
- become plugins get new property 'pipelining' to show support or lack there of for the feature.
- removed harcoding of su plugin as it now works with pipelining.
- removed hardcoding of su plugin as it now works with pipelining.

@ -3,9 +3,9 @@ bugfixes:
minor_changes:
- windows - add hard minimum limit for PowerShell to 5.1. Ansible dropped support for older versions of PowerShell
in the 2.16 release but this reqirement is now enforced at runtime.
in the 2.16 release but this requirement is now enforced at runtime.
- windows - refactor windows exec runner to improve efficiency and add better error reporting on failures.
removed_features:
- windows - removed common module functions ``ConvertFrom-AnsibleJson``, ``Format-AnsibleException`` from Windows
modules as they are not used and add uneeded complexity to the code.
modules as they are not used and add unneeded complexity to the code.

@ -799,7 +799,7 @@ class AnsibleEnvironment(ImmutableSandboxedEnvironment):
# Performing either before calling them will interfere with that processing.
return super().call(__context, __obj, *args, **kwargs)
# Jinja's generated macro code handles Markers, so pre-emptive raise on Marker args and lazy retrieval should be disabled for the macro invocation.
# Jinja's generated macro code handles Markers, so preemptive raise on Marker args and lazy retrieval should be disabled for the macro invocation.
is_macro = isinstance(__obj, Macro)
if not is_macro and (first_marker := get_first_marker_arg(args, kwargs)) is not None:

@ -96,7 +96,7 @@ class Marker(StrictUndefined, Tripwire):
return AnsibleUndefinedVariable(self._undefined_message, obj=self._marker_template_source)
def _as_message(self) -> str:
"""Return the error message to show when this marker must be represented as a string, such as for subsitutions or warnings."""
"""Return the error message to show when this marker must be represented as a string, such as for substitutions or warnings."""
return self._undefined_message
def _fail_with_undefined_error(self, *args: t.Any, **kwargs: t.Any) -> t.NoReturn:

@ -212,9 +212,9 @@ class CLI(ABC):
# used by --vault-id and --vault-password-file
vault_ids.append(id_slug)
# if an action needs an encrypt password (create_new_password=True) and we dont
# if an action needs an encrypt password (create_new_password=True) and we don't
# have other secrets setup, then automatically add a password prompt as well.
# prompts cant/shouldnt work without a tty, so dont add prompt secrets
# prompts can't/shouldn't work without a tty, so don't add prompt secrets
if ask_vault_pass or (not vault_ids and auto_prompt):
id_slug = u'%s@%s' % (C.DEFAULT_VAULT_IDENTITY, u'prompt_ask_vault_pass')

@ -1309,7 +1309,7 @@ class DocCLI(CLI, RoleMixin):
if ignore in item:
del item[ignore]
# reformat cli optoins
# reformat cli options
if 'cli' in opt and opt['cli']:
conf['cli'] = []
for cli in opt['cli']:

@ -364,7 +364,7 @@ def _get_shebang(interpreter, task_vars, templar: _template.Templar, args=tuple(
options=TemplateOptions(value_for_omit=None))
if not interpreter_out:
# nothing matched(None) or in case someone configures empty string or empty intepreter
# nothing matched(None) or in case someone configures empty string or empty interpreter
interpreter_out = interpreter
# set shebang

@ -102,7 +102,7 @@ begin {
Set-Property 'MaximumAllowedMemory' $null
}
catch {
# Satify pslint, we purposefully ignore this error as it is not critical it works.
# Satisfy pslint, we purposefully ignore this error as it is not critical it works.
$null = $null
}
}

@ -872,7 +872,7 @@ class TaskExecutor:
async_result = async_handler.run(task_vars=task_vars)
# We do not bail out of the loop in cases where the failure
# is associated with a parsing error. The async_runner can
# have issues which result in a half-written/unparseable result
# have issues which result in a half-written/unparsable result
# file on disk, which manifests to the user as a timeout happening
# before it's time to timeout.
if (async_result.get('finished', False) or
@ -910,7 +910,7 @@ class TaskExecutor:
if async_result.get('_ansible_parsed'):
return dict(failed=True, msg="async task did not complete within the requested time - %ss" % self._task.async_val, async_result=async_result)
else:
return dict(failed=True, msg="async task produced unparseable results", async_result=async_result)
return dict(failed=True, msg="async task produced unparsable results", async_result=async_result)
else:
# If the async task finished, automatically cleanup the temporary
# status file left behind.

@ -27,7 +27,7 @@ _SUB_PRESERVE = {'_ansible_delegated_vars': {'ansible_host', 'ansible_port', 'an
CLEAN_EXCEPTIONS = (
'_ansible_verbose_always', # for debug and other actions, to always expand data (pretty jsonification)
'_ansible_item_label', # to know actual 'item' variable
'_ansible_no_log', # jic we didnt clean up well enough, DON'T LOG
'_ansible_no_log', # jic we didn't clean up well enough, DON'T LOG
'_ansible_verbose_override', # controls display of ansible_facts, gathering would be very noise with -v otherwise
)

@ -92,7 +92,7 @@ def g_connect(versions):
try:
data = self._call_galaxy(n_url, method='GET', error_context_msg=error_context_msg, cache=True)
except (AnsibleError, GalaxyError, ValueError, KeyError) as err:
# Either the URL doesnt exist, or other error. Or the URL exists, but isn't a galaxy API
# Either the URL doesn't exist, or other error. Or the URL exists, but isn't a galaxy API
# root (not JSON, no 'available_versions') so try appending '/api/'
if n_url.endswith('/api') or n_url.endswith('/api/'):
raise
@ -877,7 +877,7 @@ class GalaxyAPI:
except GalaxyError as err:
if err.http_code != 404:
raise
# v3 doesn't raise a 404 so we need to mimick the empty response from APIs that do.
# v3 doesn't raise a 404 so we need to mimic the empty response from APIs that do.
return []
if 'data' in data:

@ -449,9 +449,9 @@ def _extract_collection_from_git(repo_url, coll_ver, b_path):
except subprocess.CalledProcessError as proc_err:
raise AnsibleError( # should probably be LookupError
'Failed to switch a cloned Git repo `{repo_url!s}` '
'to the requested revision `{commitish!s}`.'.
'to the requested revision `{revision!s}`.'.
format(
commitish=to_native(version),
revision=to_native(version),
repo_url=to_native(git_url),
),
) from proc_err

@ -148,7 +148,7 @@ class CollectionDependencyProviderBase(AbstractProvider):
:param resolutions: Mapping of identifier, candidate pairs.
:param candidates: Possible candidates for the identifer.
:param candidates: Possible candidates for the identifier.
Mapping of identifier, list of candidate pairs.
:param information: Requirement information of each package.
@ -158,7 +158,7 @@ class CollectionDependencyProviderBase(AbstractProvider):
:param backtrack_causes: Sequence of requirement information that were
the requirements that caused the resolver to most recently backtrack.
The preference could depend on a various of issues, including
The preference could depend on various of issues, including
(not necessarily in this order):
* Is this package pinned in the current resolution result?
@ -404,7 +404,7 @@ class CollectionDependencyProviderBase(AbstractProvider):
:param requirement: A requirement that produced the `candidate`.
:param candidate: A pinned candidate supposedly matchine the \
:param candidate: A pinned candidate supposedly matching the \
`requirement` specifier. It is guaranteed to \
have been generated from the `requirement`.

@ -26,7 +26,7 @@ else:
# DTFIX-FUTURE: refactor this to share the implementation with the controller version
# use an abstract base class, with __init_subclass__ for representer registration, and instance methods for overridable representers
# then tests can be consolidated intead of having two nearly identical copies
# then tests can be consolidated instead of having two nearly identical copies
if HAS_YAML:
try:

@ -1696,7 +1696,7 @@ namespace Ansible.Basic
if ((attr & FileAttributes.ReadOnly) != 0)
{
// Windows does not allow files set with ReadOnly to be
// deleted. Pre-emptively unset the attribute.
// deleted. Preemptively unset the attribute.
// FILE_DISPOSITION_IGNORE_READONLY_ATTRIBUTE is quite new,
// look at using that flag with POSIX delete once Server 2019
// is the baseline.

@ -228,7 +228,7 @@ namespace Ansible.Privilege
}
/// <summary>
/// Get's the status of all the privileges on the token specified
/// Gets the status of all the privileges on the token specified
/// </summary>
/// <param name="token">The process token to get the privilege status on</param>
/// <returns>Dictionary where the key is the privilege constant and the value is the PrivilegeAttributes flags</returns>
@ -342,7 +342,7 @@ namespace Ansible.Privilege
// Need to manually marshal the bytes requires for newState as the constant size
// of LUID_AND_ATTRIBUTES is set to 1 and can't be overridden at runtime, TOKEN_PRIVILEGES
// always contains at least 1 entry so we need to calculate the extra size if there are
// nore than 1 LUID_AND_ATTRIBUTES entry
// more than 1 LUID_AND_ATTRIBUTES entry
int tokenPrivilegesSize = Marshal.SizeOf(typeof(NativeHelpers.TOKEN_PRIVILEGES));
int luidAttrSize = 0;
if (newState.Length > 1)

@ -49,7 +49,7 @@ class HardwareCollector(BaseFactCollector):
_fact_ids = set(['processor',
'processor_cores',
'processor_count',
# TODO: mounts isnt exactly hardware
# TODO: mounts isn't exactly hardware
'mounts',
'devices']) # type: t.Set[str]
_fact_class = Hardware

@ -61,7 +61,7 @@ class FacterFactCollector(BaseFactCollector):
return out
def collect(self, module=None, collected_facts=None):
# Note that this mirrors previous facter behavior, where there isnt
# Note that this mirrors previous facter behavior, where there isn't
# a 'ansible_facter' key in the main fact dict, but instead, 'facter_whatever'
# items are added to the main dict.
facter_dict = {}

@ -100,7 +100,7 @@ class DistributionFiles:
return get_file_content(path)
def _get_dist_file_content(self, path, allow_empty=False):
# cant find that dist file or it is incorrectly empty
# can't find that dist file, or it is incorrectly empty
if not _file_exists(path, allow_empty=allow_empty):
return False, None
@ -585,7 +585,7 @@ class Distribution(object):
distribution_facts.update(dist_file_facts)
distro = distribution_facts['distribution']
# look for a os family alias for the 'distribution', if there isnt one, use 'distribution'
# look for an os family alias for the 'distribution', if there isn't one, use 'distribution'
distribution_facts['os_family'] = self.OS_FAMILY.get(distro, None) or distro
return distribution_facts

@ -5,7 +5,7 @@ Function Add-CSharpType {
<#
.SYNOPSIS
Compiles one or more C# scripts similar to Add-Type. This exposes
more configuration options that are useable within Ansible and it
more configuration options that are usable within Ansible and it
also allows multiple C# sources to be compiled together.
.PARAMETER References

@ -21,7 +21,7 @@ Function Convert-StringToSnakeCase($string) {
return $string
}
# used by Convert-DictToSnakeCase to covert list entries from camelCase
# used by Convert-DictToSnakeCase to convert list entries from camelCase
# to snake_case
Function Convert-ListToSnakeCase($list) {
$snake_list = [System.Collections.ArrayList]@()

@ -6,7 +6,7 @@
Function Get-ExecutablePath {
<#
.SYNOPSIS
Get's the full path to an executable, will search the directory specified or ones in the PATH env var.
Gets the full path to an executable, will search the directory specified or ones in the PATH env var.
.PARAMETER executable
[String]The executable to search for.

@ -17,7 +17,7 @@ Function Get-AnsibleWebRequest {
The protocol method to use, if omitted, will use the default value for the URI protocol specified.
.PARAMETER FollowRedirects
Whether to follow redirect reponses. This is only valid when using a HTTP URI.
Whether to follow redirect responses. This is only valid when using a HTTP URI.
all - Will follow all redirects
none - Will follow no redirects
safe - Will only follow redirects when GET or HEAD is used as the Method

@ -1155,7 +1155,7 @@ def url_argument_spec():
def url_redirect_argument_spec():
"""
Creates an addition arugment spec to `url_argument_spec`
Creates an addition argument spec to `url_argument_spec`
for `follow_redirects` argument
"""
return dict(

@ -218,7 +218,7 @@ def main():
rc=0
)
startd = datetime.datetime.now()
start_date = datetime.datetime.now()
try:
try:
@ -246,8 +246,8 @@ def main():
except pexpect.ExceptionPexpect as e:
module.fail_json(msg='%s' % to_native(e))
endd = datetime.datetime.now()
delta = endd - startd
end_date = datetime.datetime.now()
delta = end_date - start_date
if b_out is None:
b_out = b''
@ -256,8 +256,8 @@ def main():
cmd=args,
stdout=to_native(b_out).rstrip('\r\n'),
rc=rc,
start=str(startd),
end=str(endd),
start=str(start_date),
end=str(end_date),
delta=str(delta),
changed=True,
)

@ -608,8 +608,8 @@ class Hostname(object):
self.use = module.params['use']
if self.use is not None:
strat = globals()['%sStrategy' % STRATS[self.use]]
self.strategy = strat(module)
strategy = globals()['%sStrategy' % STRATS[self.use]]
self.strategy = strategy(module)
elif platform.system() == 'Linux' and ServiceMgrFactCollector.is_systemd_managed(module):
# This is Linux and systemd is active
self.strategy = SystemdStrategy(module)

@ -408,7 +408,7 @@ def format_output(module, path, st):
('st_blksize', 'block_size'),
('st_rdev', 'device_type'),
('st_flags', 'flags'),
# Some Berkley based
# Some Berkeley based
('st_gen', 'generation'),
('st_birthtime', 'birthtime'),
# RISCOS

@ -30,7 +30,7 @@ from ansible._internal._templating._engine import TemplateEngine
from ansible.utils.fqcn import add_internal_fqcns
# modules formated for user msg
# modules formatted for user msg
_BUILTIN_RAW_PARAM_MODULES_SIMPLE = set([
'include_vars',
'include_tasks',

@ -570,8 +570,8 @@ def match_encrypt_secret(secrets, encrypt_vault_id=None):
return match_encrypt_vault_id_secret(secrets,
encrypt_vault_id=encrypt_vault_id)
# Find the best/first secret from secrets since we didnt specify otherwise
# ie, consider all of the available secrets as matches
# Find the best/first secret from secrets since we didn't specify otherwise
# ie, consider all the available secrets as matches
_vault_id_matchers = [_vault_id for _vault_id, dummy in secrets]
best_secret = match_best_secret(secrets, _vault_id_matchers)
@ -1413,7 +1413,7 @@ class EncryptedString(AnsibleTaggedObject):
'ljust',
'lower',
'lstrip',
'maketrans', # static, but implemented for simplicty/consistency
'maketrans', # static, but implemented for simplicity/consistency
'partition',
'removeprefix',
'removesuffix',

@ -72,12 +72,12 @@ class ActionModule(ActionBase):
fail_msg = new_module_args['fail_msg']
success_msg = new_module_args['success_msg']
quiet = new_module_args['quiet']
thats = new_module_args['that']
that_list = new_module_args['that']
if not quiet:
result['_ansible_verbose_always'] = True
for that in thats:
for that in that_list:
test_result = self._templar.evaluate_conditional(conditional=that)
if not test_result:
result['failed'] = True

@ -44,7 +44,7 @@ class ActionModule(ActionBase):
del tmp # tmp no longer has any effect
# Options type validation
# stings
# strings
for s_type in ('src', 'dest', 'state', 'newline_sequence', 'variable_start_string', 'variable_end_string', 'block_start_string',
'block_end_string', 'comment_start_string', 'comment_end_string'):
if s_type in self._task.args:

@ -1171,7 +1171,7 @@ class Connection(ConnectionBase):
# Are we requesting privilege escalation? Right now, we may be invoked
# to execute sftp/scp with sudoable=True, but we can request escalation
# only when using ssh. Otherwise we can send initial data straightaway.
# only when using ssh. Otherwise, we can send initial data straight away.
state = states.index('ready_to_send')
if to_bytes(self.get_option('ssh_executable')) in cmd and sudoable:

@ -3,7 +3,7 @@ DOCUMENTATION:
version_added: "1.9"
short_description: power of (math operation)
description:
- Math operation that returns the Nth power of inputed number, C(X ^ N).
- Math operation that returns the Nth power of inputted number, C(X ^ N).
notes:
- This is a passthrough to Python's C(math.pow).
positional: _input, _power

@ -3,7 +3,7 @@ DOCUMENTATION:
version_added: "1.9"
short_description: root of (math operation)
description:
- Math operation that returns the Nth root of inputed number C(X ^^ N).
- Math operation that returns the Nth root of inputted number C(X ^^ N).
positional: _input, base
options:
_input:

@ -1,16 +1,16 @@
DOCUMENTATION:
name: strftime
version_added: "2.4"
short_description: date formating
short_description: date formatting
description:
- Using Python's C(strftime) function, take a data formating string and a date/time to create a formatted date.
- Using Python's C(strftime) function, take a data formatting string and a date/time to create a formatted date.
notes:
- This is a passthrough to Python's C(stftime), for a complete set of formatting options go to https://strftime.org/.
positional: _input, second, utc
options:
_input:
description:
- A formating string following C(stftime) conventions.
- A formatting string following C(stftime) conventions.
- See L(the Python documentation, https://docs.python.org/3/library/datetime.html#strftime-strptime-behavior) for a reference.
type: str
required: true

@ -7,7 +7,7 @@ DOCUMENTATION:
positional: _input, namespace
options:
_input:
description: String to use as base fo the UUID.
description: String to use as base of the UUID.
type: str
required: true
namespace:

@ -98,7 +98,7 @@ EXAMPLES = r'''# fmt: code
def get_api_data(namespace: str, pretty=False) -> str:
"""
:param namespace: parameter for our custom api
:param pretty: Human redable JSON vs machine readable
:param pretty: Human readable JSON vs machine readable
:return: JSON string
"""
found_data = list(MyInventoryAPI(namespace))

@ -102,7 +102,7 @@ def basedir(source):
dname = os.path.dirname(source)
if dname:
# don't follow symlinks for basedir, enables source re-use
# don't follow symlinks for basedir, enables source reuse
dname = os.path.abspath(dname)
return to_text(dname, errors='surrogate_or_strict')

@ -71,7 +71,7 @@ class HttptesterProvider(CloudProvider):
return
# Read the password from the container environment.
# This allows the tests to work when re-using an existing container.
# This allows the tests to work when reusing an existing container.
# The password is marked as sensitive, since it may differ from the one we generated.
krb5_password = descriptor.details.container.env_dict()[KRB5_PASSWORD_ENV]
display.sensitive.add(krb5_password)

@ -1087,7 +1087,7 @@ class SanityScript(SanityTest, metaclass=abc.ABCMeta):
class SanityVersionNeutral(SanityTest, metaclass=abc.ABCMeta):
"""Base class for sanity test plugins which are idependent of the python version being used."""
"""Base class for sanity test plugins which are independent of the python version being used."""
@abc.abstractmethod
def test(self, args: SanityConfig, targets: SanityTargets) -> TestResult:

@ -810,7 +810,7 @@ class DockerProfile(ControllerHostProfile[DockerConfig], SshTargetHostProfile[Do
# The host namespace must be used to permit the container to access the cgroup v1 systemd hierarchy created by Podman.
'--cgroupns', 'host',
# Mask the host cgroup tmpfs mount to avoid exposing the host cgroup v1 hierarchies (or cgroup v2 hybrid) to the container.
# Podman will provide a cgroup v1 systemd hiearchy on top of this.
# Podman will provide a cgroup v1 systemd hierarchy on top of this.
'--tmpfs', '/sys/fs/cgroup',
))

Loading…
Cancel
Save