Update triple single quotes to triple double quotes (#84099)

* Update triple single quotes to triple double quotes

This change was fully automated.

The updated Python files have been verified to tokenize the same as the originals, except for the expected change in quoting of strings, which were verified through literal_eval.

* Manual conversion of docstring quotes
pull/84100/head
Matt Clay 1 month ago committed by GitHub
parent 62ce21b6e4
commit a0495fc314
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -32,14 +32,14 @@ TICKET_NUMBER = re.compile(r'(?:^|\s)#(\d+)')
def normalize_pr_url(pr, allow_non_ansible_ansible=False, only_number=False):
'''
"""
Given a PullRequest, or a string containing a PR number, PR URL,
or internal PR URL (e.g. ansible-collections/community.general#1234),
return either a full github URL to the PR (if only_number is False),
or an int containing the PR number (if only_number is True).
Throws if it can't parse the input.
'''
"""
if isinstance(pr, PullRequest):
return pr.html_url
@ -71,10 +71,10 @@ def normalize_pr_url(pr, allow_non_ansible_ansible=False, only_number=False):
def url_to_org_repo(url):
'''
"""
Given a full Github PR URL, extract the user/org and repo name.
Return them in the form: "user/repo"
'''
"""
match = PULL_HTTP_URL_RE.match(url)
if not match:
return ''
@ -82,7 +82,7 @@ def url_to_org_repo(url):
def generate_new_body(pr, source_pr):
'''
"""
Given the new PR (the backport) and the originating (source) PR,
construct the new body for the backport PR.
@ -93,7 +93,7 @@ def generate_new_body(pr, source_pr):
This function does not side-effect, it simply returns the new body as a
string.
'''
"""
backport_text = '\nBackport of {0}\n'.format(source_pr)
body_lines = pr.body.split('\n')
new_body_lines = []
@ -115,10 +115,10 @@ def generate_new_body(pr, source_pr):
def get_prs_for_commit(g, commit):
'''
"""
Given a commit hash, attempt to find the hash in any repo in the
ansible orgs, and then use it to determine what, if any, PR it appeared in.
'''
"""
commits = g.search_commits(
'hash:{0} org:ansible org:ansible-collections is:public'.format(commit)
@ -132,7 +132,7 @@ def get_prs_for_commit(g, commit):
def search_backport(pr, g, ansible_ansible):
'''
"""
Do magic. This is basically the "brain" of 'auto'.
It will search the PR (the newest PR - the backport) and try to find where
it originated.
@ -148,7 +148,7 @@ def search_backport(pr, g, ansible_ansible):
It will take all of the above, and return a list of "possibilities",
which is a list of PullRequest objects.
'''
"""
possibilities = []
@ -198,20 +198,20 @@ def search_backport(pr, g, ansible_ansible):
def prompt_add():
'''
"""
Prompt the user and return whether or not they agree.
'''
"""
res = input('Shall I add the reference? [Y/n]: ')
return res.lower() in ('', 'y', 'yes')
def commit_edit(new_pr, pr):
'''
"""
Given the new PR (the backport), and the "possibility" that we have decided
on, prompt the user and then add the reference to the body of the new PR.
This method does the actual "destructive" work of editing the PR body.
'''
"""
print('I think this PR might have come from:')
print(pr.title)
print('-' * 50)

@ -116,7 +116,7 @@ except ImportError:
class CLI(ABC):
''' code behind bin/ansible* programs '''
""" code behind bin/ansible* programs """
PAGER = C.config.get_config_value('PAGER')
@ -317,7 +317,7 @@ class CLI(ABC):
@staticmethod
def ask_passwords():
''' prompt for connection and become passwords if needed '''
""" prompt for connection and become passwords if needed """
op = context.CLIARGS
sshpass = None
@ -347,7 +347,7 @@ class CLI(ABC):
return (sshpass, becomepass)
def validate_conflicts(self, op, runas_opts=False, fork_opts=False):
''' check for conflicting options '''
""" check for conflicting options """
if fork_opts:
if op.forks < 1:
@ -459,7 +459,7 @@ class CLI(ABC):
@staticmethod
def version_info(gitinfo=False):
''' return full ansible version info '''
""" return full ansible version info """
if gitinfo:
# expensive call, user with care
ansible_version_string = opt_help.version()
@ -485,7 +485,7 @@ class CLI(ABC):
@staticmethod
def pager(text):
''' find reasonable way to display text '''
""" find reasonable way to display text """
# this is a much simpler form of what is in pydoc.py
if not sys.stdout.isatty():
display.display(text, screen_only=True)
@ -504,7 +504,7 @@ class CLI(ABC):
@staticmethod
def pager_pipe(text):
''' pipe text through a pager '''
""" pipe text through a pager """
if 'less' in CLI.PAGER:
os.environ['LESS'] = CLI.LESS_OPTS
try:

@ -24,14 +24,14 @@ display = Display()
class AdHocCLI(CLI):
''' is an extra-simple tool/framework/API for doing 'remote things'.
""" is an extra-simple tool/framework/API for doing 'remote things'.
this command allows you to define and run a single task 'playbook' against a set of hosts
'''
"""
name = 'ansible'
def init_parser(self):
''' create an options parser for bin/ansible '''
""" create an options parser for bin/ansible """
super(AdHocCLI, self).init_parser(usage='%prog <host-pattern> [options]',
desc="Define and run a single task 'playbook' against a set of hosts",
epilog="Some actions do not make sense in Ad-Hoc (include, meta, etc)")
@ -60,7 +60,7 @@ class AdHocCLI(CLI):
self.parser.add_argument('args', metavar='pattern', help='host pattern')
def post_process_args(self, options):
'''Post process and validate options for bin/ansible '''
"""Post process and validate options for bin/ansible """
options = super(AdHocCLI, self).post_process_args(options)
@ -98,7 +98,7 @@ class AdHocCLI(CLI):
tasks=[mytask])
def run(self):
''' create and execute the single task playbook '''
""" create and execute the single task playbook """
super(AdHocCLI, self).run()

@ -47,14 +47,14 @@ def yaml_short(data):
def get_constants():
''' helper method to ensure we can template based on existing constants '''
""" helper method to ensure we can template based on existing constants """
if not hasattr(get_constants, 'cvars'):
get_constants.cvars = {k: getattr(C, k) for k in dir(C) if not k.startswith('__')}
return get_constants.cvars
def _ansible_env_vars(varname):
''' return true or false depending if variable name is possibly a 'configurable' ansible env variable '''
""" return true or false depending if variable name is possibly a 'configurable' ansible env variable """
return all(
[
varname.startswith("ANSIBLE_"),
@ -188,9 +188,9 @@ class ConfigCLI(CLI):
context.CLIARGS['func']()
def execute_update(self):
'''
"""
Updates a single setting in the specified ansible.cfg
'''
"""
raise AnsibleError("Option not implemented yet")
# pylint: disable=unreachable
@ -212,9 +212,9 @@ class ConfigCLI(CLI):
])
def execute_view(self):
'''
"""
Displays the current config file
'''
"""
try:
with open(self.config_file, 'rb') as f:
self.pager(to_text(f.read(), errors='surrogate_or_strict'))
@ -222,9 +222,9 @@ class ConfigCLI(CLI):
raise AnsibleError("Failed to open config file: %s" % to_native(e))
def execute_edit(self):
'''
"""
Opens ansible.cfg in the default EDITOR
'''
"""
raise AnsibleError("Option not implemented yet")
# pylint: disable=unreachable
@ -266,9 +266,9 @@ class ConfigCLI(CLI):
return entries
def _list_entries_from_args(self):
'''
"""
build a dict with the list requested configs
'''
"""
config_entries = {}
if context.CLIARGS['type'] in ('base', 'all'):
@ -294,9 +294,9 @@ class ConfigCLI(CLI):
return config_entries
def execute_list(self):
'''
"""
list and output available configs
'''
"""
config_entries = self._list_entries_from_args()
if context.CLIARGS['format'] == 'yaml':
@ -599,9 +599,9 @@ class ConfigCLI(CLI):
return output
def execute_dump(self):
'''
"""
Shows the current settings, merges ansible.cfg if specified
'''
"""
output = []
if context.CLIARGS['type'] in ('base', 'all'):
# deal with base

@ -35,7 +35,7 @@ display = Display()
class ConsoleCLI(CLI, cmd.Cmd):
'''
"""
A REPL that allows for running ad-hoc tasks against a chosen inventory
from a nice shell with built-in tab completion (based on dominis'
``ansible-shell``).
@ -62,7 +62,7 @@ class ConsoleCLI(CLI, cmd.Cmd):
- ``help [command/module]``: display documentation for
the command or module
- ``exit``: exit ``ansible-console``
'''
"""
name = 'ansible-console'
modules = [] # type: list[str] | None
@ -579,7 +579,7 @@ class ConsoleCLI(CLI, cmd.Cmd):
self.cmdloop()
def __getattr__(self, name):
''' handle not found to populate dynamically a module function if module matching name exists '''
""" handle not found to populate dynamically a module function if module matching name exists """
attr = None
if name.startswith('do_'):

@ -409,7 +409,7 @@ def _doclink(url):
def _format(string, *args):
''' add ascii formatting or delimiters '''
""" add ascii formatting or delimiters """
for style in args:
@ -433,10 +433,10 @@ def _format(string, *args):
class DocCLI(CLI, RoleMixin):
''' displays information on modules installed in Ansible libraries.
""" displays information on modules installed in Ansible libraries.
It displays a terse listing of plugins and their short descriptions,
provides a printout of their DOCUMENTATION strings,
and it can create a short "snippet" which can be pasted into a playbook. '''
and it can create a short "snippet" which can be pasted into a playbook. """
name = 'ansible-doc'
@ -850,14 +850,14 @@ class DocCLI(CLI, RoleMixin):
return plugin_docs
def _get_roles_path(self):
'''
"""
Add any 'roles' subdir in playbook dir to the roles search path.
And as a last resort, add the playbook dir itself. Order being:
- 'roles' subdir of playbook dir
- DEFAULT_ROLES_PATH (default in cliargs)
- playbook dir (basedir)
NOTE: This matches logic in RoleDefinition._load_role_path() method.
'''
"""
roles_path = context.CLIARGS['roles_path']
if context.CLIARGS['basedir'] is not None:
subdir = os.path.join(context.CLIARGS['basedir'], "roles")
@ -868,7 +868,7 @@ class DocCLI(CLI, RoleMixin):
@staticmethod
def _prep_loader(plugin_type):
''' return a plugint type specific loader '''
""" return a plugint type specific loader """
loader = getattr(plugin_loader, '%s_loader' % plugin_type)
# add to plugin paths from command line
@ -1058,7 +1058,7 @@ class DocCLI(CLI, RoleMixin):
@staticmethod
def format_snippet(plugin, plugin_type, doc):
''' return heavily commented plugin use to insert into play '''
""" return heavily commented plugin use to insert into play """
if plugin_type == 'inventory' and doc.get('options', {}).get('plugin'):
# these do not take a yaml config that we can write a snippet for
raise ValueError('The {0} inventory plugin does not take YAML type config source'
@ -1140,7 +1140,7 @@ class DocCLI(CLI, RoleMixin):
@staticmethod
def print_paths(finder):
''' Returns a string suitable for printing of the search path '''
""" Returns a string suitable for printing of the search path """
# Uses a list to get the order right
ret = []
@ -1280,7 +1280,7 @@ class DocCLI(CLI, RoleMixin):
DocCLI.add_fields(text, subdata, limit, opt_indent + ' ', return_values, opt_indent)
def get_role_man_text(self, role, role_json):
'''Generate text for the supplied role suitable for display.
"""Generate text for the supplied role suitable for display.
This is similar to get_man_text(), but roles are different enough that we have
a separate method for formatting their display.
@ -1289,7 +1289,7 @@ class DocCLI(CLI, RoleMixin):
:param role_json: The JSON for the given role as returned from _create_role_doc().
:returns: A array of text suitable for displaying to screen.
'''
"""
text = []
opt_indent = " "
pad = display.columns * 0.20

@ -177,11 +177,11 @@ class RoleDistributionServer:
class GalaxyCLI(CLI):
'''Command to manage Ansible roles and collections.
"""Command to manage Ansible roles and collections.
None of the CLI tools are designed to run concurrently with themselves.
Use an external scheduler and/or locking to ensure there are no clashing operations.
'''
"""
name = 'ansible-galaxy'
@ -212,7 +212,7 @@ class GalaxyCLI(CLI):
super(GalaxyCLI, self).__init__(args)
def init_parser(self):
''' create an options parser for bin/ansible '''
""" create an options parser for bin/ansible """
super(GalaxyCLI, self).init_parser(
desc="Perform various Role and Collection related operations.",
@ -1721,7 +1721,7 @@ class GalaxyCLI(CLI):
publish_collection(collection_path, self.api, wait, timeout)
def execute_search(self):
''' searches for roles on the Ansible Galaxy server'''
""" searches for roles on the Ansible Galaxy server"""
page_size = 1000
search = None

@ -26,7 +26,7 @@ display = Display()
class InventoryCLI(CLI):
''' used to display or dump the configured inventory as Ansible sees it '''
""" used to display or dump the configured inventory as Ansible sees it """
name = 'ansible-inventory'

@ -29,8 +29,8 @@ display = Display()
class PlaybookCLI(CLI):
''' the tool to run *Ansible playbooks*, which are a configuration and multinode deployment system.
See the project home page (https://docs.ansible.com) for more information. '''
""" the tool to run *Ansible playbooks*, which are a configuration and multinode deployment system.
See the project home page (https://docs.ansible.com) for more information. """
name = 'ansible-playbook'

@ -33,7 +33,7 @@ display = Display()
class PullCLI(CLI):
''' Used to pull a remote copy of ansible on each managed node,
""" Used to pull a remote copy of ansible on each managed node,
each set to run via cron and update playbook source via a source repository.
This inverts the default *push* architecture of ansible into a *pull* architecture,
which has near-limitless scaling potential.
@ -45,7 +45,7 @@ class PullCLI(CLI):
This is useful both for extreme scale-out as well as periodic remediation.
Usage of the 'fetch' module to retrieve logs from ansible-pull runs would be an
excellent way to gather and analyze remote logs from ansible-pull.
'''
"""
name = 'ansible-pull'
@ -76,7 +76,7 @@ class PullCLI(CLI):
return inv_opts
def init_parser(self):
''' create an options parser for bin/ansible '''
""" create an options parser for bin/ansible """
super(PullCLI, self).init_parser(
usage='%prog -U <repository> [options] [<playbook.yml>]',
@ -157,7 +157,7 @@ class PullCLI(CLI):
return options
def run(self):
''' use Runner lib to do SSH things '''
""" use Runner lib to do SSH things """
super(PullCLI, self).run()

@ -57,10 +57,10 @@ def file_lock(lock_path):
class ConnectionProcess(object):
'''
"""
The connection process wraps around a Connection object that manages
the connection to a remote device that persists over the playbook
'''
"""
def __init__(self, fd, play_context, socket_path, original_path, task_uuid=None, ansible_playbook_pid=None):
self.play_context = play_context
self.socket_path = socket_path

@ -25,7 +25,7 @@ display = Display()
class VaultCLI(CLI):
''' can encrypt any structured data file used by Ansible.
""" can encrypt any structured data file used by Ansible.
This can include *group_vars/* or *host_vars/* inventory variables,
variables loaded by *include_vars* or *vars_files*, or variable files
passed on the ansible-playbook command line with *-e @file.yml* or *-e @file.json*.
@ -33,7 +33,7 @@ class VaultCLI(CLI):
Because Ansible tasks, handlers, and other objects are data, these can also be encrypted with vault.
If you'd like to not expose what variables you are using, you can keep an individual task file entirely encrypted.
'''
"""
name = 'ansible-vault'
@ -252,7 +252,7 @@ class VaultCLI(CLI):
os.umask(old_umask)
def execute_encrypt(self):
''' encrypt the supplied file using the provided vault secret '''
""" encrypt the supplied file using the provided vault secret """
if not context.CLIARGS['args'] and sys.stdin.isatty():
display.display("Reading plaintext input from stdin", stderr=True)
@ -286,7 +286,7 @@ class VaultCLI(CLI):
return yaml_ciphertext
def execute_encrypt_string(self):
''' encrypt the supplied string using the provided vault secret '''
""" encrypt the supplied string using the provided vault secret """
b_plaintext = None
# Holds tuples (the_text, the_source_of_the_string, the variable name if its provided).
@ -431,7 +431,7 @@ class VaultCLI(CLI):
return output
def execute_decrypt(self):
''' decrypt the supplied file using the provided vault secret '''
""" decrypt the supplied file using the provided vault secret """
if not context.CLIARGS['args'] and sys.stdin.isatty():
display.display("Reading ciphertext input from stdin", stderr=True)
@ -443,7 +443,7 @@ class VaultCLI(CLI):
display.display("Decryption successful", stderr=True)
def execute_create(self):
''' create and open a file in an editor that will be encrypted with the provided vault secret when closed'''
""" create and open a file in an editor that will be encrypted with the provided vault secret when closed"""
if len(context.CLIARGS['args']) != 1:
raise AnsibleOptionsError("ansible-vault create can take only one filename argument")
@ -455,12 +455,12 @@ class VaultCLI(CLI):
raise AnsibleOptionsError("not a tty, editor cannot be opened")
def execute_edit(self):
''' open and decrypt an existing vaulted file in an editor, that will be encrypted again when closed'''
""" open and decrypt an existing vaulted file in an editor, that will be encrypted again when closed"""
for f in context.CLIARGS['args']:
self.editor.edit_file(f)
def execute_view(self):
''' open, decrypt and view an existing vaulted file using a pager using the supplied vault secret '''
""" open, decrypt and view an existing vaulted file using a pager using the supplied vault secret """
for f in context.CLIARGS['args']:
# Note: vault should return byte strings because it could encrypt
@ -472,7 +472,7 @@ class VaultCLI(CLI):
self.pager(to_text(plaintext))
def execute_rekey(self):
''' re-encrypt a vaulted file with a new secret, the previous secret is required '''
""" re-encrypt a vaulted file with a new secret, the previous secret is required """
for f in context.CLIARGS['args']:
# FIXME: plumb in vault_id, use the default new_vault_secret for now
self.editor.rekey_file(f, self.new_encrypt_secret,

@ -15,9 +15,9 @@
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
'''
"""
Compat library for ansible. This contains compatibility definitions for older python
When we need to import a module differently depending on python version, do it
here. Then in the code we can simply import from compat in order to get what we want.
'''
"""
from __future__ import annotations

@ -53,7 +53,7 @@ GALAXY_SERVER_ADDITIONAL = {
def _get_entry(plugin_type, plugin_name, config):
''' construct entry for requested config '''
""" construct entry for requested config """
entry = ''
if plugin_type:
entry += 'plugin_type: %s ' % plugin_type
@ -65,7 +65,7 @@ def _get_entry(plugin_type, plugin_name, config):
# FIXME: see if we can unify in module_utils with similar function used by argspec
def ensure_type(value, value_type, origin=None, origin_ftype=None):
''' return a configuration variable with casting
""" return a configuration variable with casting
:arg value: The value to ensure correct typing of
:kwarg value_type: The type of the value. This can be any of the following strings:
:boolean: sets the value to a True or False value
@ -88,7 +88,7 @@ def ensure_type(value, value_type, origin=None, origin_ftype=None):
tildes's in the value.
:str: Sets the value to string types.
:string: Same as 'str'
'''
"""
errmsg = ''
basedir = None
@ -190,7 +190,7 @@ def ensure_type(value, value_type, origin=None, origin_ftype=None):
# FIXME: see if this can live in utils/path
def resolve_path(path, basedir=None):
''' resolve relative or 'variable' paths '''
""" resolve relative or 'variable' paths """
if '{{CWD}}' in path: # allow users to force CWD using 'magic' {{CWD}}
path = path.replace('{{CWD}}', os.getcwd())
@ -215,7 +215,7 @@ def get_config_type(cfile):
# FIXME: can move to module_utils for use for ini plugins also?
def get_ini_config_value(p, entry):
''' returns the value of last ini entry found '''
""" returns the value of last ini entry found """
value = None
if p is not None:
try:
@ -226,7 +226,7 @@ def get_ini_config_value(p, entry):
def find_ini_config_file(warnings=None):
''' Load INI Config File order(first found is used): ENV, CWD, HOME, /etc/ansible '''
""" Load INI Config File order(first found is used): ENV, CWD, HOME, /etc/ansible """
# FIXME: eventually deprecate ini configs
if warnings is None:
@ -289,7 +289,7 @@ def find_ini_config_file(warnings=None):
def _add_base_defs_deprecations(base_defs):
'''Add deprecation source 'ansible.builtin' to deprecations in base.yml'''
"""Add deprecation source 'ansible.builtin' to deprecations in base.yml"""
def process(entry):
if 'deprecated' in entry:
entry['deprecated']['collection_name'] = 'ansible.builtin'
@ -388,7 +388,7 @@ class ConfigManager(object):
"Missing base YAML definition file (bad install?): %s" % to_native(yml_file))
def _parse_config_file(self, cfile=None):
''' return flat configuration settings from file(s) '''
""" return flat configuration settings from file(s) """
# TODO: take list of files with merge/nomerge
if cfile is None:
@ -415,7 +415,7 @@ class ConfigManager(object):
raise AnsibleOptionsError("Unsupported configuration file type: %s" % to_native(ftype))
def _find_yaml_config_files(self):
''' Load YAML Config Files in order, check merge flags, keep origin of settings'''
""" Load YAML Config Files in order, check merge flags, keep origin of settings"""
pass
def get_plugin_options(self, plugin_type, name, keys=None, variables=None, direct=None):
@ -467,7 +467,7 @@ class ConfigManager(object):
return has
def get_configuration_definitions(self, plugin_type=None, name=None, ignore_private=False):
''' just list the possible settings, either base or for specific plugins or plugin '''
""" just list the possible settings, either base or for specific plugins or plugin """
ret = {}
if plugin_type is None:
@ -484,7 +484,7 @@ class ConfigManager(object):
return ret
def _loop_entries(self, container, entry_list):
''' repeat code for value entry assignment '''
""" repeat code for value entry assignment """
value = None
origin = None
@ -510,7 +510,7 @@ class ConfigManager(object):
return value, origin
def get_config_value(self, config, cfile=None, plugin_type=None, plugin_name=None, keys=None, variables=None, direct=None):
''' wrapper '''
""" wrapper """
try:
value, _drop = self.get_config_value_and_origin(config, cfile=cfile, plugin_type=plugin_type, plugin_name=plugin_name,
@ -522,7 +522,7 @@ class ConfigManager(object):
return value
def get_config_value_and_origin(self, config, cfile=None, plugin_type=None, plugin_name=None, keys=None, variables=None, direct=None):
''' Given a config key figure out the actual value and report on the origin of the settings '''
""" Given a config key figure out the actual value and report on the origin of the settings """
if cfile is None:
# use default config
cfile = self._config_file

@ -21,7 +21,7 @@ config = ConfigManager()
def _warning(msg):
''' display is not guaranteed here, nor it being the full class, but try anyways, fallback to sys.stderr.write '''
""" display is not guaranteed here, nor it being the full class, but try anyways, fallback to sys.stderr.write """
try:
from ansible.utils.display import Display
Display().warning(msg)
@ -31,7 +31,7 @@ def _warning(msg):
def _deprecated(msg, version):
''' display is not guaranteed here, nor it being the full class, but try anyways, fallback to sys.stderr.write '''
""" display is not guaranteed here, nor it being the full class, but try anyways, fallback to sys.stderr.write """
try:
from ansible.utils.display import Display
Display().deprecated(msg, version=version)
@ -63,7 +63,7 @@ def handle_config_noise(display=None):
def set_constant(name, value, export=vars()):
''' sets constants and returns resolved options dict '''
""" sets constants and returns resolved options dict """
export[name] = value

@ -36,7 +36,7 @@ from ansible.module_utils.common.text.converters import to_native, to_text
class AnsibleError(Exception):
'''
"""
This is the base class for all errors raised from Ansible code,
and can be instantiated with two optional parameters beyond the
error message to control whether detailed information is displayed
@ -48,7 +48,7 @@ class AnsibleError(Exception):
Where "obj" is some subclass of ansible.parsing.yaml.objects.AnsibleBaseYAMLObject,
which should be returned by the DataLoader() class.
'''
"""
def __init__(self, message="", obj=None, show_content=True, suppress_extended_error=False, orig_exc=None):
super(AnsibleError, self).__init__(message)
@ -92,11 +92,11 @@ class AnsibleError(Exception):
return self.message
def _get_error_lines_from_file(self, file_name, line_number):
'''
"""
Returns the line in the file which corresponds to the reported error
location, as well as the line preceding it (if the error did not
occur on the first line), to provide context to the error.
'''
"""
target_line = ''
prev_line = ''
@ -125,7 +125,7 @@ class AnsibleError(Exception):
return (target_line, prev_line)
def _get_extended_error(self):
'''
"""
Given an object reporting the location of the exception in a file, return
detailed information regarding it including:
@ -134,7 +134,7 @@ class AnsibleError(Exception):
If this error was created with show_content=False, the reporting of content
is suppressed, as the file contents may be sensitive (ie. vault data).
'''
"""
error_message = ''
@ -214,85 +214,85 @@ class AnsibleError(Exception):
class AnsiblePromptInterrupt(AnsibleError):
'''User interrupt'''
"""User interrupt"""
class AnsiblePromptNoninteractive(AnsibleError):
'''Unable to get user input'''
"""Unable to get user input"""
class AnsibleAssertionError(AnsibleError, AssertionError):
'''Invalid assertion'''
"""Invalid assertion"""
pass
class AnsibleOptionsError(AnsibleError):
''' bad or incomplete options passed '''
""" bad or incomplete options passed """
pass
class AnsibleRequiredOptionError(AnsibleOptionsError):
''' bad or incomplete options passed '''
""" bad or incomplete options passed """
pass
class AnsibleParserError(AnsibleError):
''' something was detected early that is wrong about a playbook or data file '''
""" something was detected early that is wrong about a playbook or data file """
pass
class AnsibleInternalError(AnsibleError):
''' internal safeguards tripped, something happened in the code that should never happen '''
""" internal safeguards tripped, something happened in the code that should never happen """
pass
class AnsibleRuntimeError(AnsibleError):
''' ansible had a problem while running a playbook '''
""" ansible had a problem while running a playbook """
pass
class AnsibleModuleError(AnsibleRuntimeError):
''' a module failed somehow '''
""" a module failed somehow """
pass
class AnsibleConnectionFailure(AnsibleRuntimeError):
''' the transport / connection_plugin had a fatal error '''
""" the transport / connection_plugin had a fatal error """
pass
class AnsibleAuthenticationFailure(AnsibleConnectionFailure):
'''invalid username/password/key'''
"""invalid username/password/key"""
pass
class AnsibleCallbackError(AnsibleRuntimeError):
''' a callback failure '''
""" a callback failure """
pass
class AnsibleTemplateError(AnsibleRuntimeError):
'''A template related error'''
"""A template related error"""
pass
class AnsibleFilterError(AnsibleTemplateError):
''' a templating failure '''
""" a templating failure """
pass
class AnsibleLookupError(AnsibleTemplateError):
''' a lookup failure '''
""" a lookup failure """
pass
class AnsibleUndefinedVariable(AnsibleTemplateError):
''' a templating failure '''
""" a templating failure """
pass
class AnsibleFileNotFound(AnsibleRuntimeError):
''' a file missing failure '''
""" a file missing failure """
def __init__(self, message="", obj=None, show_content=True, suppress_extended_error=False, orig_exc=None, paths=None, file_name=None):
@ -322,7 +322,7 @@ class AnsibleFileNotFound(AnsibleRuntimeError):
# DO NOT USE as they will probably be removed soon.
# We will port the action modules in our tree to use a context manager instead.
class AnsibleAction(AnsibleRuntimeError):
''' Base Exception for Action plugin flow control '''
""" Base Exception for Action plugin flow control """
def __init__(self, message="", obj=None, show_content=True, suppress_extended_error=False, orig_exc=None, result=None):
@ -335,7 +335,7 @@ class AnsibleAction(AnsibleRuntimeError):
class AnsibleActionSkip(AnsibleAction):
''' an action runtime skip'''
""" an action runtime skip"""
def __init__(self, message="", obj=None, show_content=True, suppress_extended_error=False, orig_exc=None, result=None):
super(AnsibleActionSkip, self).__init__(message=message, obj=obj, show_content=show_content,
@ -344,7 +344,7 @@ class AnsibleActionSkip(AnsibleAction):
class AnsibleActionFail(AnsibleAction):
''' an action runtime failure'''
""" an action runtime failure"""
def __init__(self, message="", obj=None, show_content=True, suppress_extended_error=False, orig_exc=None, result=None):
super(AnsibleActionFail, self).__init__(message=message, obj=obj, show_content=show_content,
suppress_extended_error=suppress_extended_error, orig_exc=orig_exc, result=result)
@ -352,37 +352,37 @@ class AnsibleActionFail(AnsibleAction):
class _AnsibleActionDone(AnsibleAction):
''' an action runtime early exit'''
""" an action runtime early exit"""
pass
class AnsiblePluginError(AnsibleError):
''' base class for Ansible plugin-related errors that do not need AnsibleError contextual data '''
""" base class for Ansible plugin-related errors that do not need AnsibleError contextual data """
def __init__(self, message=None, plugin_load_context=None):
super(AnsiblePluginError, self).__init__(message)
self.plugin_load_context = plugin_load_context
class AnsiblePluginRemovedError(AnsiblePluginError):
''' a requested plugin has been removed '''
""" a requested plugin has been removed """
pass
class AnsiblePluginCircularRedirect(AnsiblePluginError):
'''a cycle was detected in plugin redirection'''
"""a cycle was detected in plugin redirection"""
pass
class AnsibleCollectionUnsupportedVersionError(AnsiblePluginError):
'''a collection is not supported by this version of Ansible'''
"""a collection is not supported by this version of Ansible"""
pass
class AnsibleFilterTypeError(AnsibleTemplateError, TypeError):
''' a Jinja filter templating failure due to bad type'''
""" a Jinja filter templating failure due to bad type"""
pass
class AnsiblePluginNotFound(AnsiblePluginError):
''' Indicates we did not find an Ansible plugin '''
""" Indicates we did not find an Ansible plugin """
pass

@ -74,7 +74,7 @@ _MODULE_UTILS_PATH = os.path.join(os.path.dirname(__file__), '..', 'module_utils
# ******************************************************************************
ANSIBALLZ_TEMPLATE = u'''%(shebang)s
ANSIBALLZ_TEMPLATE = u"""%(shebang)s
%(coding)s
_ANSIBALLZ_WRAPPER = True # For test-module.py script to tell this is a ANSIBALLZ_WRAPPER
# This code is part of Ansible, but is an independent component.
@ -333,9 +333,9 @@ def _ansiballz_main():
if __name__ == '__main__':
_ansiballz_main()
'''
"""
ANSIBALLZ_COVERAGE_TEMPLATE = '''
ANSIBALLZ_COVERAGE_TEMPLATE = """
os.environ['COVERAGE_FILE'] = %(coverage_output)r + '=python-%%s=coverage' %% '.'.join(str(v) for v in sys.version_info[:2])
import atexit
@ -355,9 +355,9 @@ ANSIBALLZ_COVERAGE_TEMPLATE = '''
atexit.register(atexit_coverage)
cov.start()
'''
"""
ANSIBALLZ_COVERAGE_CHECK_TEMPLATE = '''
ANSIBALLZ_COVERAGE_CHECK_TEMPLATE = """
try:
if PY3:
import importlib.util
@ -369,9 +369,9 @@ ANSIBALLZ_COVERAGE_CHECK_TEMPLATE = '''
except ImportError:
print('{"msg": "Could not find `coverage` module.", "failed": true}')
sys.exit(1)
'''
"""
ANSIBALLZ_RLIMIT_TEMPLATE = '''
ANSIBALLZ_RLIMIT_TEMPLATE = """
import resource
existing_soft, existing_hard = resource.getrlimit(resource.RLIMIT_NOFILE)
@ -385,7 +385,7 @@ ANSIBALLZ_RLIMIT_TEMPLATE = '''
except ValueError:
# some platforms (eg macOS) lie about their hard limit
pass
'''
"""
def _strip_comments(source):

@ -563,9 +563,9 @@ class PlayIterator:
self._clear_state_errors(state.always_child_state)
def get_active_state(self, state):
'''
"""
Finds the active state, recursively if necessary when there are child states.
'''
"""
if state.run_state == IteratingStates.TASKS and state.tasks_child_state is not None:
return self.get_active_state(state.tasks_child_state)
elif state.run_state == IteratingStates.RESCUE and state.rescue_child_state is not None:
@ -575,10 +575,10 @@ class PlayIterator:
return state
def is_any_block_rescuing(self, state):
'''
"""
Given the current HostState state, determines if the current block, or any child blocks,
are in rescue mode.
'''
"""
if state.run_state == IteratingStates.TASKS and state.get_current_block().rescue:
return True
if state.tasks_child_state is not None:

@ -40,10 +40,10 @@ display = Display()
class PlaybookExecutor:
'''
"""
This is the primary class for executing playbooks, and thus the
basis for bin/ansible-playbook operation.
'''
"""
def __init__(self, playbooks, inventory, variable_manager, loader, passwords):
self._playbooks = playbooks
@ -74,10 +74,10 @@ class PlaybookExecutor:
set_default_transport()
def run(self):
'''
"""
Run the given playbook, based on the settings in the play which
may limit the runs to serialized groups, etc.
'''
"""
result = 0
entrylist = []
@ -267,10 +267,10 @@ class PlaybookExecutor:
return result
def _get_serialized_batches(self, play):
'''
"""
Returns a list of hosts, subdivided into batches based on
the serial size specified in the play.
'''
"""
# make sure we have a unique list of hosts
all_hosts = self._inventory.get_hosts(play.hosts, order=play.order)
@ -313,11 +313,11 @@ class PlaybookExecutor:
return serialized_batches
def _generate_retry_inventory(self, retry_path, replay_hosts):
'''
"""
Called when a playbook run fails. It generates an inventory which allows
re-running on ONLY the failed hosts. This may duplicate some variable
information in group_vars/host_vars but that is ok, and expected.
'''
"""
try:
makedirs_safe(os.path.dirname(retry_path))
with open(retry_path, 'w') as fd:

@ -47,11 +47,11 @@ class WorkerQueue(Queue):
class WorkerProcess(multiprocessing_context.Process): # type: ignore[name-defined]
'''
"""
The worker thread class, which uses TaskExecutor to run tasks
read from a job queue and pushes results into a results queue
for reading later.
'''
"""
def __init__(self, final_q, task_vars, host, task, play_context, loader, variable_manager, shared_loader_obj, worker_id):
@ -91,13 +91,13 @@ class WorkerProcess(multiprocessing_context.Process): # type: ignore[name-defin
self._new_stdin = open(os.devnull)
def start(self):
'''
"""
multiprocessing.Process replaces the worker's stdin with a new file
but we wish to preserve it if it is connected to a terminal.
Therefore dup a copy prior to calling the real start(),
ensuring the descriptor is preserved somewhere in the new child, and
make sure it is closed in the parent when start() completes.
'''
"""
self._save_stdin()
# FUTURE: this lock can be removed once a more generalized pre-fork thread pause is in place
@ -108,12 +108,12 @@ class WorkerProcess(multiprocessing_context.Process): # type: ignore[name-defin
self._new_stdin.close()
def _hard_exit(self, e):
'''
"""
There is no safe exception to return to higher level code that does not
risk an innocent try/except finding itself executing in the wrong
process. All code executing above WorkerProcess.run() on the stack
conceptually belongs to another program.
'''
"""
try:
display.debug(u"WORKER HARD EXIT: %s" % to_text(e))
@ -126,7 +126,7 @@ class WorkerProcess(multiprocessing_context.Process): # type: ignore[name-defin
os._exit(1)
def run(self):
'''
"""
Wrap _run() to ensure no possibility an errant exception can cause
control to return to the StrategyBase task loop, or any other code
higher in the stack.
@ -134,7 +134,7 @@ class WorkerProcess(multiprocessing_context.Process): # type: ignore[name-defin
As multiprocessing in Python 2.x provides no protection, it is possible
a try/except added in far-away code can cause a crashed child process
to suddenly assume the role and prior state of its parent.
'''
"""
try:
return self._run()
except BaseException as e:
@ -155,11 +155,11 @@ class WorkerProcess(multiprocessing_context.Process): # type: ignore[name-defin
sys.stdout = sys.stderr = open(os.devnull, 'w')
def _run(self):
'''
"""
Called when the process is started. Pushes the result onto the
results queue. We also remove the host from the blocked hosts list, to
signify that they are ready for their next task.
'''
"""
# import cProfile, pstats, StringIO
# pr = cProfile.Profile()

@ -23,7 +23,7 @@ from ansible.utils.vars import merge_hash
class AggregateStats:
''' holds stats about per-host activity during playbook runs '''
""" holds stats about per-host activity during playbook runs """
def __init__(self):
@ -40,7 +40,7 @@ class AggregateStats:
self.custom = {}
def increment(self, what, host):
''' helper function to bump a statistic '''
""" helper function to bump a statistic """
self.processed[host] = 1
prev = (getattr(self, what)).get(host, 0)
@ -57,7 +57,7 @@ class AggregateStats:
_what[host] = 0
def summarize(self, host):
''' return information about a particular host '''
""" return information about a particular host """
return dict(
ok=self.ok.get(host, 0),
@ -70,7 +70,7 @@ class AggregateStats:
)
def set_custom_stats(self, which, what, host=None):
''' allow setting of a custom stat'''
""" allow setting of a custom stat"""
if host is None:
host = '_run'
@ -80,7 +80,7 @@ class AggregateStats:
self.custom[host][which] = what
def update_custom_stats(self, which, what, host=None):
''' allow aggregation of a custom stat'''
""" allow aggregation of a custom stat"""
if host is None:
host = '_run'

@ -61,10 +61,10 @@ def task_timeout(signum, frame):
def remove_omit(task_args, omit_token):
'''
"""
Remove args with a value equal to the ``omit_token`` recursively
to align with now having suboptions in the argument_spec
'''
"""
if not isinstance(task_args, dict):
return task_args
@ -85,12 +85,12 @@ def remove_omit(task_args, omit_token):
class TaskExecutor:
'''
"""
This is the main worker class for the executor pipeline, which
handles loading an action plugin to actually dispatch the task to
a given host. This class roughly corresponds to the old Runner()
class.
'''
"""
def __init__(self, host, task, job_vars, play_context, new_stdin, loader, shared_loader_obj, final_q, variable_manager):
self._host = host
@ -108,12 +108,12 @@ class TaskExecutor:
self._task.squash()
def run(self):
'''
"""
The main executor entrypoint, where we determine if the specified
task requires looping and either runs the task with self._run_loop()
or self._execute(). After that, the returned results are parsed and
returned as a dict.
'''
"""
display.debug("in run() - task %s" % self._task._uuid)
@ -218,10 +218,10 @@ class TaskExecutor:
display.debug(u"error closing connection: %s" % to_text(e))
def _get_loop_items(self):
'''
"""
Loads a lookup plugin to handle the with_* portion of a task (if specified),
and returns the items result.
'''
"""
# get search path for this task to pass to lookup plugins
self._job_vars['ansible_search_path'] = self._task.get_search_path()
@ -266,11 +266,11 @@ class TaskExecutor:
return items
def _run_loop(self, items):
'''
"""
Runs the task with the loop items specified and collates the result
into an array named 'results' which is inserted into the final result
along with the item for which the loop ran.
'''
"""
task_vars = self._job_vars
templar = Templar(loader=self._loader, variables=task_vars)
@ -452,11 +452,11 @@ class TaskExecutor:
variables.update(delegated_vars)
def _execute(self, variables=None):
'''
"""
The primary workhorse of the executor system, this runs the task
on the specified host (which may be the delegated_to host) and handles
the retry/until and block rescue/always execution
'''
"""
if variables is None:
variables = self._job_vars
@ -858,9 +858,9 @@ class TaskExecutor:
return result
def _poll_async_result(self, result, templar, task_vars=None):
'''
"""
Polls for the specified JID to be complete
'''
"""
if task_vars is None:
task_vars = self._job_vars
@ -976,10 +976,10 @@ class TaskExecutor:
return become
def _get_connection(self, cvars, templar, current_connection):
'''
"""
Reads the connection property for the host, and returns the
correct connection object from the list of connection plugins
'''
"""
self._play_context.connection = current_connection
@ -1134,15 +1134,15 @@ class TaskExecutor:
return varnames
def _get_action_handler(self, templar):
'''
"""
Returns the correct action plugin to handle the requestion task action
'''
"""
return self._get_action_handler_with_module_context(templar)[0]
def _get_action_handler_with_module_context(self, templar):
'''
"""
Returns the correct action plugin to handle the requestion task action and the module context
'''
"""
module_collection, separator, module_name = self._task.action.rpartition(".")
module_prefix = module_name.split('_')[0]
if module_collection:
@ -1216,9 +1216,9 @@ CLI_STUB_NAME = 'ansible_connection_cli_stub.py'
def start_connection(play_context, options, task_uuid):
'''
"""
Starts the persistent connection
'''
"""
env = os.environ.copy()
env.update({

@ -112,7 +112,7 @@ class AnsibleEndPlay(Exception):
class TaskQueueManager:
'''
"""
This class handles the multiprocessing requirements of Ansible by
creating a pool of worker forks, a result handler fork, and a
manager object with shared datastructures/queues for coordinating
@ -120,7 +120,7 @@ class TaskQueueManager:
The queue manager is responsible for loading the play strategy plugin,
which dispatches the Play's tasks to hosts.
'''
"""
RUN_OK = 0
RUN_ERROR = 1
@ -176,11 +176,11 @@ class TaskQueueManager:
self._workers.append(None)
def load_callbacks(self):
'''
"""
Loads all available callbacks, with the exception of those which
utilize the CALLBACK_TYPE option. When CALLBACK_TYPE is set to 'stdout',
only one such callback plugin will be loaded.
'''
"""
if self._callbacks_loaded:
return
@ -269,13 +269,13 @@ class TaskQueueManager:
self._callbacks_loaded = True
def run(self, play):
'''
"""
Iterates over the roles/tasks in a play, using the given (or default)
strategy for queueing tasks. The default is the linear strategy, which
operates like classic Ansible by keeping all hosts in lock-step with
a given task (meaning no hosts move on to the next task until all hosts
are done with the current task).
'''
"""
if not self._callbacks_loaded:
self.load_callbacks()

@ -22,11 +22,11 @@ CLEAN_EXCEPTIONS = (
class TaskResult:
'''
"""
This class is responsible for interpreting the resulting data
from an executed task, and provides helper methods for determining
the result of a given task.
'''
"""
def __init__(self, host, task, return_data, task_fields=None):
self._host = host
@ -93,7 +93,7 @@ class TaskResult:
return ret
def _check_key(self, key):
'''get a specific key from the result or its items'''
"""get a specific key from the result or its items"""
if isinstance(self._result, dict) and key in self._result:
return self._result.get(key, False)
@ -106,7 +106,7 @@ class TaskResult:
def clean_copy(self):
''' returns 'clean' taskresult object '''
""" returns 'clean' taskresult object """
# FIXME: clean task_fields, _task and _host copies
result = TaskResult(self._host, self._task, {}, self._task_fields)

@ -18,7 +18,7 @@
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
########################################################################
''' This manages remote shared Ansible objects, mainly roles'''
""" This manages remote shared Ansible objects, mainly roles"""
from __future__ import annotations
@ -40,7 +40,7 @@ def get_collections_galaxy_meta_info():
class Galaxy(object):
''' Keeps global galaxy info '''
""" Keeps global galaxy info """
def __init__(self):
# TODO: eventually remove this as it contains a mismash of properties that aren't really global

@ -40,10 +40,10 @@ display = Display()
class KeycloakToken(object):
'''A token granted by a Keycloak server.
"""A token granted by a Keycloak server.
Like sso.redhat.com as used by cloud.redhat.com
ie Automation Hub'''
ie Automation Hub"""
token_type = 'Bearer'
@ -105,7 +105,7 @@ class KeycloakToken(object):
class GalaxyToken(object):
''' Class to storing and retrieving local galaxy token '''
""" Class to storing and retrieving local galaxy token """
token_type = 'Token'

@ -101,7 +101,7 @@ class InventoryData(object):
return new_host
def reconcile_inventory(self):
''' Ensure inventory basic rules, run after updates '''
""" Ensure inventory basic rules, run after updates """
display.debug('Reconcile groups and hosts in inventory.')
self.current_source = None
@ -145,7 +145,7 @@ class InventoryData(object):
self._groups_dict_cache = {}
def get_host(self, hostname):
''' fetch host object using name deal with implicit localhost '''
""" fetch host object using name deal with implicit localhost """
matching_host = self.hosts.get(hostname, None)
@ -157,7 +157,7 @@ class InventoryData(object):
return matching_host
def add_group(self, group):
''' adds a group to inventory if not there already, returns named actually used '''
""" adds a group to inventory if not there already, returns named actually used """
if group:
if not isinstance(group, string_types):
@ -188,7 +188,7 @@ class InventoryData(object):
h.remove_group(group)
def add_host(self, host, group=None, port=None):
''' adds a host to inventory and possibly a group if not there already '''
""" adds a host to inventory and possibly a group if not there already """
if host:
if not isinstance(host, string_types):
@ -242,7 +242,7 @@ class InventoryData(object):
g.remove_host(host)
def set_variable(self, entity, varname, value):
''' sets a variable for an inventory object '''
""" sets a variable for an inventory object """
if entity in self.groups:
inv_object = self.groups[entity]
@ -255,7 +255,7 @@ class InventoryData(object):
display.debug('set %s for %s' % (varname, entity))
def add_child(self, group, child):
''' Add host or group to group '''
""" Add host or group to group """
added = False
if group in self.groups:
g = self.groups[group]

@ -59,7 +59,7 @@ class InventoryObjectType(Enum):
class Group:
''' a group of ansible hosts '''
""" a group of ansible hosts """
base_type = InventoryObjectType.GROUP
# __slots__ = [ 'name', 'hosts', 'vars', 'child_groups', 'parent_groups', 'depth', '_hosts_cache' ]
@ -120,7 +120,7 @@ class Group:
self.parent_groups.append(g)
def _walk_relationship(self, rel, include_self=False, preserve_ordering=False):
'''
"""
Given `rel` that is an iterable property of Group,
consitituting a directed acyclic graph among all groups,
Returns a set of all groups in full tree
@ -132,7 +132,7 @@ class Group:
| / are directed upward
F
Called on F, returns set of (A, B, C, D, E)
'''
"""
seen = set([])
unprocessed = set(getattr(self, rel))
if include_self:

@ -28,7 +28,7 @@ __all__ = ['Host']
class Host:
''' a single ansible host '''
""" a single ansible host """
base_type = InventoryObjectType.HOST
# __slots__ = [ 'name', 'vars', 'groups' ]

@ -50,7 +50,7 @@ IGNORED_EXTS = [b'%s$' % to_bytes(re.escape(x)) for x in C.INVENTORY_IGNORE_EXTS
IGNORED = re.compile(b'|'.join(IGNORED_ALWAYS + IGNORED_PATTERNS + IGNORED_EXTS))
PATTERN_WITH_SUBSCRIPT = re.compile(
r'''^
r"""^
(.+) # A pattern expression ending with...
\[(?: # A [subscript] expression comprising:
(-?[0-9]+)| # A single positive or negative number
@ -58,12 +58,12 @@ PATTERN_WITH_SUBSCRIPT = re.compile(
([0-9]*)
)\]
$
''', re.X
""", re.X
)
def order_patterns(patterns):
''' takes a list of patterns and reorders them by modifier to apply them consistently '''
""" takes a list of patterns and reorders them by modifier to apply them consistently """
# FIXME: this goes away if we apply patterns incrementally or by groups
pattern_regular = []
@ -125,19 +125,19 @@ def split_host_pattern(pattern):
# This mishandles IPv6 addresses, and is retained only for backwards
# compatibility.
patterns = re.findall(
to_text(r'''(?: # We want to match something comprising:
to_text(r"""(?: # We want to match something comprising:
[^\s:\[\]] # (anything other than whitespace or ':[]'
| # ...or...
\[[^\]]*\] # a single complete bracketed expression)
)+ # occurring once or more
'''), pattern, re.X
"""), pattern, re.X
)
return [p.strip() for p in patterns if p.strip()]
class InventoryManager(object):
''' Creates and manages inventory '''
""" Creates and manages inventory """
def __init__(self, loader, sources=None, parse=True, cache=True):
@ -197,7 +197,7 @@ class InventoryManager(object):
return self._inventory.get_host(hostname)
def _fetch_inventory_plugins(self):
''' sets up loaded inventory plugins for usage '''
""" sets up loaded inventory plugins for usage """
display.vvvv('setting up inventory plugins')
@ -215,7 +215,7 @@ class InventoryManager(object):
return plugins
def parse_sources(self, cache=False):
''' iterate over inventory sources and parse each one to populate it'''
""" iterate over inventory sources and parse each one to populate it"""
parsed = False
# allow for multiple inventory parsing
@ -243,7 +243,7 @@ class InventoryManager(object):
host.vars = combine_vars(host.vars, get_vars_from_inventory_sources(self._loader, self._sources, [host], 'inventory'))
def parse_source(self, source, cache=False):
''' Generate or update inventory for the source provided '''
""" Generate or update inventory for the source provided """
parsed = False
failures = []
@ -335,12 +335,12 @@ class InventoryManager(object):
return parsed
def clear_caches(self):
''' clear all caches '''
""" clear all caches """
self._hosts_patterns_cache = {}
self._pattern_cache = {}
def refresh_inventory(self):
''' recalculate inventory '''
""" recalculate inventory """
self.clear_caches()
self._inventory = InventoryData()
@ -657,9 +657,9 @@ class InventoryManager(object):
self._pattern_cache = {}
def add_dynamic_host(self, host_info, result_item):
'''
"""
Helper function to add a new host to inventory based on a task result.
'''
"""
changed = False
if not result_item.get('refresh'):
@ -697,10 +697,10 @@ class InventoryManager(object):
result_item['changed'] = changed
def add_dynamic_group(self, host, result_item):
'''
"""
Helper function to add a group (if it does not exist), and to assign the
specified host to that group.
'''
"""
changed = False

@ -199,14 +199,14 @@ PERMS_RE = re.compile(r'^[rwxXstugo]*$')
#
def get_platform():
'''
"""
**Deprecated** Use :py:func:`platform.system` directly.
:returns: Name of the platform the module is running on in a native string
Returns a native string that labels the platform ("Linux", "Solaris", etc). Currently, this is
the result of calling :py:func:`platform.system`.
'''
"""
return platform.system()
# End deprecated functions
@ -231,7 +231,7 @@ def get_all_subclasses(cls):
def heuristic_log_sanitize(data, no_log_values=None):
''' Remove strings that look like passwords from log messages '''
""" Remove strings that look like passwords from log messages """
# Currently filters:
# user:pass@foo/whatever and http://username:pass@wherever/foo
# This code has false positives and consumes parts of logs that are
@ -296,7 +296,7 @@ def heuristic_log_sanitize(data, no_log_values=None):
def _load_params():
''' read the modules parameters and store them globally.
""" read the modules parameters and store them globally.
This function may be needed for certain very dynamic custom modules which
want to process the parameters that are being handed the module. Since
@ -305,7 +305,7 @@ def _load_params():
will try not to break it gratuitously. It is certainly more future-proof
to call this function and consume its outputs than to implement the logic
inside it as a copy in your own code.
'''
"""
global _ANSIBLE_ARGS
if _ANSIBLE_ARGS is not None:
buffer = _ANSIBLE_ARGS
@ -363,13 +363,13 @@ class AnsibleModule(object):
required_one_of=None, add_file_common_args=False,
supports_check_mode=False, required_if=None, required_by=None):
'''
"""
Common code for quickly building an ansible module in Python
(although you can write modules with anything that can return JSON).
See :ref:`developing_modules_general` for a general introduction
and :ref:`developing_program_flow_modules` for more detailed explanation.
'''
"""
self._name = os.path.basename(__file__) # initialize name until we can parse from options
self.argument_spec = argument_spec
@ -516,13 +516,13 @@ class AnsibleModule(object):
self.log('[DEPRECATION WARNING] %s %s' % (msg, version))
def load_file_common_arguments(self, params, path=None):
'''
"""
many modules deal with files, this encapsulates common
options that the file module accepts such that it is directly
available to all modules and they can share code.
Allows to overwrite the path/dest module argument by providing path.
'''
"""
if path is None:
path = params.get('path', params.get('dest', None))
@ -635,12 +635,12 @@ class AnsibleModule(object):
return (uid, gid)
def find_mount_point(self, path):
'''
"""
Takes a path and returns its mount point
:param path: a string type with a filesystem path
:returns: the path to the mount point as a text type
'''
"""
b_path = os.path.realpath(to_bytes(os.path.expanduser(os.path.expandvars(path)), errors='surrogate_or_strict'))
while not os.path.ismount(b_path):
@ -1115,10 +1115,10 @@ class AnsibleModule(object):
return self.set_fs_attributes_if_different(file_args, changed, diff, expand)
def add_path_info(self, kwargs):
'''
"""
for results that are files, supplement the info about the file
in the return path with stats about the file path.
'''
"""
path = kwargs.get('path', kwargs.get('dest', None))
if path is None:
@ -1155,10 +1155,10 @@ class AnsibleModule(object):
return kwargs
def _check_locale(self):
'''
"""
Uses the locale module to test the currently set locale
(per the LANG and LC_CTYPE environment settings)
'''
"""
try:
# setting the locale to '' uses the default locale
# as it would be returned by locale.getdefaultlocale()
@ -1206,11 +1206,11 @@ class AnsibleModule(object):
return safe_eval(value, locals, include_exceptions)
def _load_params(self):
''' read the input and set the params attribute.
""" read the input and set the params attribute.
This method is for backwards compatibility. The guts of the function
were moved out in 2.1 so that custom modules could read the parameters.
'''
"""
# debug overrides to read args from file or cmdline
self.params = _load_params()
@ -1297,7 +1297,7 @@ class AnsibleModule(object):
self._log_to_syslog(journal_msg)
def _log_invocation(self):
''' log that ansible ran the module '''
""" log that ansible ran the module """
# TODO: generalize a separate log function and make log_invocation use it
# Sanitize possible password argument when logging.
log_args = dict()
@ -1350,7 +1350,7 @@ class AnsibleModule(object):
return None
def get_bin_path(self, arg, required=False, opt_dirs=None):
'''
"""
Find system executable in PATH.
:param arg: The executable to find.
@ -1358,7 +1358,7 @@ class AnsibleModule(object):
:param opt_dirs: optional list of directories to search in addition to ``PATH``
:returns: if found return full path; otherwise return original arg, unless 'warning' then return None
:raises: Sysexit: if arg is not found and required=True (via fail_json)
'''
"""
bin_path = None
try:
@ -1370,7 +1370,7 @@ class AnsibleModule(object):
return bin_path
def boolean(self, arg):
'''Convert the argument to a boolean'''
"""Convert the argument to a boolean"""
if arg is None:
return arg
@ -1447,14 +1447,14 @@ class AnsibleModule(object):
print('\n%s' % self.jsonify(kwargs))
def exit_json(self, **kwargs):
''' return from the module, without error '''
""" return from the module, without error """
self.do_cleanup_files()
self._return_formatted(kwargs)
sys.exit(0)
def fail_json(self, msg, **kwargs):
''' return from the module, with an error message '''
""" return from the module, with an error message """
kwargs['failed'] = True
kwargs['msg'] = msg
@ -1477,7 +1477,7 @@ class AnsibleModule(object):
self.fail_json(msg=to_native(e))
def digest_from_file(self, filename, algorithm):
''' Return hex digest of local file for a digest_method specified by name, or None if file is not present. '''
""" Return hex digest of local file for a digest_method specified by name, or None if file is not present. """
b_filename = to_bytes(filename, errors='surrogate_or_strict')
if not os.path.exists(b_filename):
@ -1505,7 +1505,7 @@ class AnsibleModule(object):
return digest_method.hexdigest()
def md5(self, filename):
''' Return MD5 hex digest of local file using digest_from_file().
""" Return MD5 hex digest of local file using digest_from_file().
Do not use this function unless you have no other choice for:
1) Optional backwards compatibility
@ -1514,21 +1514,21 @@ class AnsibleModule(object):
This function will not work on systems complying with FIPS-140-2.
Most uses of this function can use the module.sha1 function instead.
'''
"""
if 'md5' not in AVAILABLE_HASH_ALGORITHMS:
raise ValueError('MD5 not available. Possibly running in FIPS mode')
return self.digest_from_file(filename, 'md5')
def sha1(self, filename):
''' Return SHA1 hex digest of local file using digest_from_file(). '''
""" Return SHA1 hex digest of local file using digest_from_file(). """
return self.digest_from_file(filename, 'sha1')
def sha256(self, filename):
''' Return SHA-256 hex digest of local file using digest_from_file(). '''
""" Return SHA-256 hex digest of local file using digest_from_file(). """
return self.digest_from_file(filename, 'sha256')
def backup_local(self, fn):
'''make a date-marked backup of the specified file, return True or False on success or failure'''
"""make a date-marked backup of the specified file, return True or False on success or failure"""
backupdest = ''
if os.path.exists(fn):
@ -1586,9 +1586,9 @@ class AnsibleModule(object):
self.set_attributes_if_different(dest, current_attribs, True)
def atomic_move(self, src, dest, unsafe_writes=False, keep_dest_attrs=True):
'''atomically move src to dest, copying attributes from dest, returns true on success
"""atomically move src to dest, copying attributes from dest, returns true on success
it uses os.rename to ensure this as it is an atomic operation, rest of the function is
to work around limitations, corner cases and ensure selinux context is saved if possible'''
to work around limitations, corner cases and ensure selinux context is saved if possible"""
context = None
dest_stat = None
b_src = to_bytes(src, errors='surrogate_or_strict')
@ -1756,7 +1756,7 @@ class AnsibleModule(object):
def run_command(self, args, check_rc=False, close_fds=True, executable=None, data=None, binary_data=False, path_prefix=None, cwd=None,
use_unsafe_shell=False, prompt_regex=None, environ_update=None, umask=None, encoding='utf-8', errors='surrogate_or_strict',
expand_user_and_vars=True, pass_fds=None, before_communicate_callback=None, ignore_invalid_cwd=True, handle_exceptions=True):
'''
"""
Execute a command, returns rc, stdout, and stderr.
The mechanism of this method for reading stdout and stderr differs from
@ -1825,7 +1825,7 @@ class AnsibleModule(object):
byte strings. On python3, stdout and stderr are text strings converted
according to the encoding and errors parameters. If you want byte
strings on python3, use encoding=None to turn decoding to text off.
'''
"""
# used by clean args later on
self._clean = None

@ -10,7 +10,7 @@ from __future__ import annotations
def get_all_subclasses(cls):
'''
"""
Recursively search and find all subclasses of a given class
:arg cls: A python class
@ -21,7 +21,7 @@ def get_all_subclasses(cls):
of a class exist. However, `__subclasses__` only goes one level deep. This function searches
each child class's `__subclasses__` method to find all of the descendent classes. It then
returns an iterable of the descendent classes.
'''
"""
# Retrieve direct subclasses
subclasses = set(cls.__subclasses__())
to_visit = list(subclasses)

@ -109,9 +109,9 @@ def _camel_to_snake(name, reversible=False):
def dict_merge(a, b):
'''recursively merges dicts. not just simple a['key'] = b['key'], if
"""recursively merges dicts. not just simple a['key'] = b['key'], if
both a and b have a key whose value is a dict then dict_merge is called
on both values and the result stored in the returned dictionary.'''
on both values and the result stored in the returned dictionary."""
if not isinstance(b, dict):
return b
result = deepcopy(a)

@ -55,7 +55,7 @@ def is_executable(path):
# This method is reused by the basic module,
# the repetition helps the basic module's html documentation come out right.
# http://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#confval-autodoc_docstring_signature
'''is_executable(path)
"""is_executable(path)
is the given path executable?
@ -66,7 +66,7 @@ def is_executable(path):
* Does not account for FSACLs.
* Most times we really want to know "Can the current user execute this
file". This function does not tell us that, only if any execute bit is set.
'''
"""
# These are all bitfields so first bitwise-or all the permissions we're
# looking for, then bitwise-and with the file's mode to determine if any
# execute bits are set.

@ -42,9 +42,9 @@ def json_dump(structure):
class AnsibleJSONEncoder(json.JSONEncoder):
'''
"""
Simple encoder class to deal with JSON encoding of Ansible internal types
'''
"""
def __init__(self, preprocess_unsafe=False, vault_to_text=False, **kwargs):
self._preprocess_unsafe = preprocess_unsafe

@ -7,7 +7,7 @@ from ansible.module_utils.common.text.converters import to_native
def get_best_parsable_locale(module, preferences=None, raise_on_locale=False):
'''
"""
Attempts to return the best possible locale for parsing output in English
useful for scraping output with i18n tools. When this raises an exception
and the caller wants to continue, it should use the 'C' locale.
@ -17,7 +17,7 @@ def get_best_parsable_locale(module, preferences=None, raise_on_locale=False):
:param raise_on_locale: boolean that determines if we raise exception or not
due to locale CLI issues
:returns: The first matched preferred locale or 'C' which is the default
'''
"""
found = 'C' # default posix, its ascii but always there
try:

@ -10,7 +10,7 @@ from ansible.module_utils.common.warnings import deprecate
def get_bin_path(arg, opt_dirs=None, required=None):
'''
"""
Find system executable in PATH. Raises ValueError if the executable is not found.
:param arg: the executable to find
@ -24,7 +24,7 @@ def get_bin_path(arg, opt_dirs=None, required=None):
In addition to PATH and opt_dirs, this function also looks through /sbin, /usr/sbin and /usr/local/sbin. A lot of
modules, especially for gathering facts, depend on this behaviour.
'''
"""
if required is not None:
deprecate(
msg="The `required` parameter in `get_bin_path` API is deprecated.",

@ -74,7 +74,7 @@ def _create_payload():
raise Exception('unable to access ansible.module_utils.basic._ANSIBLE_ARGS (not launched by AnsiballZ?)')
module_fqn = sys.modules['__main__']._module_fqn
modlib_path = sys.modules['__main__']._modlib_path
respawn_code_template = '''
respawn_code_template = """
import runpy
import sys
@ -89,7 +89,7 @@ if __name__ == '__main__':
basic._ANSIBLE_ARGS = smuggled_args
runpy.run_module(module_fqn, init_globals=dict(_respawned=True), run_name='__main__', alter_sys=True)
'''
"""
respawn_code = respawn_code_template.format(module_fqn=module_fqn, modlib_path=modlib_path, smuggled_args=smuggled_args.strip())

@ -14,7 +14,7 @@ __all__ = ('get_distribution', 'get_distribution_version', 'get_platform_subclas
def get_distribution():
'''
"""
Return the name of the distribution the module is running on.
:rtype: NativeString or None
@ -23,7 +23,7 @@ def get_distribution():
This function attempts to determine what distribution the code is running
on and return a string representing that value. If the platform is Linux
and the distribution cannot be determined, it returns ``OtherLinux``.
'''
"""
distribution = distro.id().capitalize()
if platform.system() == 'Linux':
@ -38,14 +38,14 @@ def get_distribution():
def get_distribution_version():
'''
"""
Get the version of the distribution the code is running on
:rtype: NativeString or None
:returns: A string representation of the version of the distribution. If it
cannot determine the version, it returns an empty string. If this is not run on
a Linux machine it returns None.
'''
"""
version = None
needs_best_version = frozenset((
@ -79,12 +79,12 @@ def get_distribution_version():
def get_distribution_codename():
'''
"""
Return the code name for this Linux Distribution
:rtype: NativeString or None
:returns: A string representation of the distribution's codename or None if not a Linux distro
'''
"""
codename = None
if platform.system() == 'Linux':
# Until this gets merged and we update our bundled copy of distro:
@ -109,7 +109,7 @@ def get_distribution_codename():
def get_platform_subclass(cls):
'''
"""
Finds a subclass implementing desired functionality on the platform the code is running on
:arg cls: Class to find an appropriate subclass for
@ -135,7 +135,7 @@ def get_platform_subclass(cls):
def __new__(cls, *args, **kwargs):
new_cls = get_platform_subclass(User)
return super(cls, new_cls).__new__(new_cls)
'''
"""
this_platform = platform.system()
distribution = get_distribution()

@ -278,11 +278,11 @@ def jsonify(data, **kwargs):
def container_to_bytes(d, encoding='utf-8', errors='surrogate_or_strict'):
''' Recursively convert dict keys and values to byte str
""" Recursively convert dict keys and values to byte str
Specialized for json return because this only handles, lists, tuples,
and dict container types (the containers that the json module returns)
'''
"""
if isinstance(d, text_type):
return to_bytes(d, encoding=encoding, errors=errors)

@ -16,9 +16,9 @@
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
'''
"""
Compat distro library.
'''
"""
from __future__ import annotations
# The following makes it easier for us to script updates of the bundled code

@ -39,13 +39,13 @@ from ansible.module_utils.common.collections import is_string
class AnsibleFactCollector(collector.BaseFactCollector):
'''A FactCollector that returns results under 'ansible_facts' top level key.
"""A FactCollector that returns results under 'ansible_facts' top level key.
If a namespace if provided, facts will be collected under that namespace.
For ex, a ansible.module_utils.facts.namespace.PrefixFactNamespace(prefix='ansible_')
Has a 'from_gather_subset() constructor that populates collectors based on a
gather_subset specifier.'''
gather_subset specifier."""
def __init__(self, collectors=None, namespace=None, filter_spec=None):
@ -102,7 +102,7 @@ class AnsibleFactCollector(collector.BaseFactCollector):
class CollectorMetaDataCollector(collector.BaseFactCollector):
'''Collector that provides a facts with the gather_subset metadata.'''
"""Collector that provides a facts with the gather_subset metadata."""
name = 'gather_subset'
_fact_ids = set() # type: t.Set[str]

@ -38,13 +38,13 @@ from ansible.module_utils.facts import timeout
class CycleFoundInFactDeps(Exception):
'''Indicates there is a cycle in fact collector deps
"""Indicates there is a cycle in fact collector deps
If collector-B requires collector-A, and collector-A requires
collector-B, that is a cycle. In that case, there is no ordering
that will satisfy B before A and A and before B. That will cause this
error to be raised.
'''
"""
pass
@ -64,9 +64,9 @@ class BaseFactCollector:
required_facts = set() # type: t.Set[str]
def __init__(self, collectors=None, namespace=None):
'''Base class for things that collect facts.
"""Base class for things that collect facts.
'collectors' is an optional list of other FactCollectors for composing.'''
'collectors' is an optional list of other FactCollectors for composing."""
self.collectors = collectors or []
# self.namespace is a object with a 'transform' method that transforms
@ -88,7 +88,7 @@ class BaseFactCollector:
return key_name
def _transform_dict_keys(self, fact_dict):
'''update a dicts keys to use new names as transformed by self._transform_name'''
"""update a dicts keys to use new names as transformed by self._transform_name"""
if fact_dict is None:
return {}
@ -107,7 +107,7 @@ class BaseFactCollector:
return facts_dict
def collect(self, module=None, collected_facts=None):
'''do the fact collection
"""do the fact collection
'collected_facts' is a object (a dict, likely) that holds all previously
facts. This is intended to be used if a FactCollector needs to reference
@ -115,7 +115,7 @@ class BaseFactCollector:
Returns a dict of facts.
'''
"""
facts_dict = {}
return facts_dict
@ -125,12 +125,12 @@ def get_collector_names(valid_subsets=None,
gather_subset=None,
aliases_map=None,
platform_info=None):
'''return a set of FactCollector names based on gather_subset spec.
"""return a set of FactCollector names based on gather_subset spec.
gather_subset is a spec describing which facts to gather.
valid_subsets is a frozenset of potential matches for gather_subset ('all', 'network') etc
minimal_gather_subsets is a frozenset of matches to always use, even for gather_subset='!all'
'''
"""
# Retrieve module parameters
gather_subset = gather_subset or ['all']
@ -267,11 +267,11 @@ def _get_requires_by_collector_name(collector_name, all_fact_subsets):
def find_unresolved_requires(collector_names, all_fact_subsets):
'''Find any collector names that have unresolved requires
"""Find any collector names that have unresolved requires
Returns a list of collector names that correspond to collector
classes whose .requires_facts() are not in collector_names.
'''
"""
unresolved = set()
for collector_name in collector_names:
@ -351,7 +351,7 @@ def collector_classes_from_gather_subset(all_collector_classes=None,
gather_subset=None,
gather_timeout=None,
platform_info=None):
'''return a list of collector classes that match the args'''
"""return a list of collector classes that match the args"""
# use gather_name etc to get the list of collectors

@ -34,19 +34,19 @@ from ansible.module_utils.facts import ansible_collector
def get_all_facts(module):
'''compat api for ansible 2.2/2.3 module_utils.facts.get_all_facts method
"""compat api for ansible 2.2/2.3 module_utils.facts.get_all_facts method
Expects module to be an instance of AnsibleModule, with a 'gather_subset' param.
returns a dict mapping the bare fact name ('default_ipv4' with no 'ansible_' namespace) to
the fact value.'''
the fact value."""
gather_subset = module.params['gather_subset']
return ansible_facts(module, gather_subset=gather_subset)
def ansible_facts(module, gather_subset=None):
'''Compat api for ansible 2.0/2.2/2.3 module_utils.facts.ansible_facts method
"""Compat api for ansible 2.0/2.2/2.3 module_utils.facts.ansible_facts method
2.3/2.3 expects a gather_subset arg.
2.0/2.1 does not except a gather_subset arg
@ -57,7 +57,7 @@ def ansible_facts(module, gather_subset=None):
returns a dict mapping the bare fact name ('default_ipv4' with no 'ansible_' namespace) to
the fact value.
'''
"""
gather_subset = gather_subset or module.params.get('gather_subset', ['all'])
gather_timeout = module.params.get('gather_timeout', 10)

@ -224,9 +224,9 @@ class FreeBSDHardware(Hardware):
return device_facts
def get_dmi_facts(self):
''' learn dmi facts from system
""" learn dmi facts from system
Use dmidecode executable if available'''
Use dmidecode executable if available"""
dmi_facts = {}

@ -311,10 +311,10 @@ class LinuxHardware(Hardware):
return cpu_facts
def get_dmi_facts(self):
''' learn dmi facts from system
""" learn dmi facts from system
Try /sys first for dmi related facts.
If that is not available, fall back to dmidecode executable '''
If that is not available, fall back to dmidecode executable """
dmi_facts = {}
@ -423,13 +423,13 @@ class LinuxHardware(Hardware):
'NA'
)
sysinfo_re = re.compile(
r'''
r"""
^
(?:Manufacturer:\s+(?P<system_vendor>.+))|
(?:Type:\s+(?P<product_name>.+))|
(?:Sequence\ Code:\s+0+(?P<product_serial>.+))
$
''',
""",
re.VERBOSE | re.MULTILINE
)
data = get_file_content('/proc/sysinfo')

@ -33,7 +33,7 @@ class FactNamespace:
self.namespace_name = namespace_name
def transform(self, name):
'''Take a text name, and transforms it as needed (add a namespace prefix, etc)'''
"""Take a text name, and transforms it as needed (add a namespace prefix, etc)"""
return name
def _underscore(self, name):

@ -25,7 +25,7 @@ from ansible.module_utils.facts.collector import BaseFactCollector
class OhaiFactCollector(BaseFactCollector):
'''This is a subclass of Facts for including information gathered from Ohai.'''
"""This is a subclass of Facts for including information gathered from Ohai."""
name = 'ohai'
_fact_ids = set() # type: t.Set[str]

@ -46,7 +46,7 @@ def _file_exists(path, allow_empty=False):
class DistributionFiles:
'''has-a various distro file parsers (os-release, etc) and logic for finding the right one.'''
"""has-a various distro file parsers (os-release, etc) and logic for finding the right one."""
# every distribution name mentioned here, must have one of
# - allowempty == True
# - be listed in SEARCH_STRING

@ -20,7 +20,7 @@ import os
def get_file_content(path, default=None, strip=True):
'''
"""
Return the contents of a given file path
:args path: path to file to return contents from
@ -28,7 +28,7 @@ def get_file_content(path, default=None, strip=True):
:args strip: controls if we strip whitespace from the result or not
:returns: String with file contents (optionally stripped) or 'default' value
'''
"""
data = default
if os.path.exists(path) and os.access(path, os.R_OK):
datafile = None
@ -62,7 +62,7 @@ def get_file_content(path, default=None, strip=True):
def get_file_lines(path, strip=True, line_sep=None):
'''get list of lines from file'''
"""get list of lines from file"""
data = get_file_content(path, strip=strip)
if data:
if line_sep is None:

@ -32,13 +32,13 @@ import json # pylint: disable=unused-import
# NB: a copy of this function exists in ../../modules/core/async_wrapper.py. Ensure any
# changes are propagated there.
def _filter_non_json_lines(data, objects_only=False):
'''
"""
Used to filter unrelated output around module JSON output, like messages from
tcagetattr, or where dropbear spews MOTD on every single command (which is nuts).
Filters leading lines before first line-starting occurrence of '{' or '[', and filter all
trailing lines after matching close character (working from the bottom of output).
'''
"""
warnings = []
# Filter initial junk

@ -42,13 +42,13 @@ from ansible.module_utils.common.text.converters import to_bytes, to_text
def sysv_is_enabled(name, runlevel=None):
'''
"""
This function will check if the service name supplied
is enabled in any of the sysv runlevels
:arg name: name of the service to test for
:kw runlevel: runlevel to check (default: None)
'''
"""
if runlevel:
if not os.path.isdir('/etc/rc0.d/'):
return bool(glob.glob('/etc/init.d/rc%s.d/S??%s' % (runlevel, name)))
@ -60,12 +60,12 @@ def sysv_is_enabled(name, runlevel=None):
def get_sysv_script(name):
'''
"""
This function will return the expected path for an init script
corresponding to the service name supplied.
:arg name: name or path of the service to test for
'''
"""
if name.startswith('/'):
result = name
else:
@ -75,19 +75,19 @@ def get_sysv_script(name):
def sysv_exists(name):
'''
"""
This function will return True or False depending on
the existence of an init script corresponding to the service name supplied.
:arg name: name of the service to test for
'''
"""
return os.path.exists(get_sysv_script(name))
def get_ps(module, pattern):
'''
"""
Last resort to find a service by trying to match pattern to programs in memory
'''
"""
found = False
if platform.system() == 'SunOS':
flags = '-ef'
@ -106,7 +106,7 @@ def get_ps(module, pattern):
def fail_if_missing(module, found, service, msg=''):
'''
"""
This function will return an error or exit gracefully depending on check mode status
and if the service is missing or not.
@ -114,16 +114,16 @@ def fail_if_missing(module, found, service, msg=''):
:arg found: boolean indicating if services were found or not
:arg service: name of service
:kw msg: extra info to append to error/success msg when missing
'''
"""
if not found:
module.fail_json(msg='Could not find the requested service %s: %s' % (service, msg))
def fork_process():
'''
"""
This function performs the double fork process to detach from the
parent process and execute.
'''
"""
pid = os.fork()
if pid == 0:
@ -162,7 +162,7 @@ def fork_process():
def daemonize(module, cmd):
'''
"""
Execute a command while detaching as a daemon, returns rc, stdout, and stderr.
:arg module: is an AnsibleModule object, used for it's utility methods
@ -171,7 +171,7 @@ def daemonize(module, cmd):
This is complex because daemonization is hard for people.
What we do is daemonize a part of this module, the daemon runs the command,
picks up the return code and output, and returns it to the main process.
'''
"""
# init some vars
chunk = 4096 # FIXME: pass in as arg?

@ -30,10 +30,10 @@ from __future__ import annotations
def _get_quote_state(token, quote_char):
'''
"""
the goal of this block is to determine if the quoted string
is unterminated in which case it needs to be put back together
'''
"""
# the char before the current one, used to see if
# the current character is escaped
prev_char = None
@ -50,11 +50,11 @@ def _get_quote_state(token, quote_char):
def _count_jinja2_blocks(token, cur_depth, open_token, close_token):
'''
"""
this function counts the number of opening/closing blocks for a
given opening/closing type and adjusts the current depth for that
block based on the difference
'''
"""
num_open = token.count(open_token)
num_close = token.count(close_token)
if num_open != num_close:
@ -65,7 +65,7 @@ def _count_jinja2_blocks(token, cur_depth, open_token, close_token):
def split_args(args):
'''
"""
Splits args on whitespace, but intelligently reassembles
those that may have been split over a jinja2 block or quotes.
@ -78,7 +78,7 @@ def split_args(args):
Basically this is a variation shlex that has some more intelligence for
how Ansible needs to use it.
'''
"""
# the list of params parsed out of the arg string
# this is going to be the result value when we are done
@ -212,7 +212,7 @@ def is_quoted(data):
def unquote(data):
''' removes first and last quotes from a string, if the string starts and ends with the same quotes '''
""" removes first and last quotes from a string, if the string starts and ends with the same quotes """
if is_quoted(data):
return data[1:-1]
return data

@ -12,7 +12,7 @@
# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause)
'''
"""
The **urls** utils module offers a replacement for the urllib python library.
urllib is the python stdlib way to retrieve files from the Internet but it
@ -25,7 +25,7 @@ to replace urllib with a more secure library. However, all third party libraries
require that the library be installed on the managed machine. That is an extra step
for users making use of a module. If possible, avoid third party libraries by using
this code instead.
'''
"""
from __future__ import annotations
@ -223,10 +223,10 @@ UnixHTTPSConnection = None
if HAS_SSL:
@contextmanager
def unix_socket_patch_httpconnection_connect():
'''Monkey patch ``http.client.HTTPConnection.connect`` to be ``UnixHTTPConnection.connect``
"""Monkey patch ``http.client.HTTPConnection.connect`` to be ``UnixHTTPConnection.connect``
so that when calling ``super(UnixHTTPSConnection, self).connect()`` we get the
correct behavior of creating self.sock for the unix socket
'''
"""
_connect = http.client.HTTPConnection.connect
http.client.HTTPConnection.connect = UnixHTTPConnection.connect
yield
@ -270,7 +270,7 @@ if HAS_SSL:
class UnixHTTPConnection(http.client.HTTPConnection):
'''Handles http requests to a unix socket file'''
"""Handles http requests to a unix socket file"""
def __init__(self, unix_socket):
self._unix_socket = unix_socket
@ -290,7 +290,7 @@ class UnixHTTPConnection(http.client.HTTPConnection):
class UnixHTTPHandler(urllib.request.HTTPHandler):
'''Handler for Unix urls'''
"""Handler for Unix urls"""
def __init__(self, unix_socket, **kwargs):
super().__init__(**kwargs)
@ -301,29 +301,29 @@ class UnixHTTPHandler(urllib.request.HTTPHandler):
class ParseResultDottedDict(dict):
'''
"""
A dict that acts similarly to the ParseResult named tuple from urllib
'''
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.__dict__ = self
def as_list(self):
'''
"""
Generate a list from this dict, that looks like the ParseResult named tuple
'''
"""
return [self.get(k, None) for k in ('scheme', 'netloc', 'path', 'params', 'query', 'fragment')]
def generic_urlparse(parts):
'''
"""
Returns a dictionary of url parts as parsed by urlparse,
but accounts for the fact that older versions of that
library do not support named attributes (ie. .netloc)
This method isn't of much use any longer, but is kept
in a minimal state for backwards compat.
'''
"""
result = ParseResultDottedDict(parts._asdict())
result.update({
'username': parts.username,
@ -989,11 +989,11 @@ def open_url(url, data=None, headers=None, method=None, use_proxy=True,
client_cert=None, client_key=None, cookies=None,
use_gssapi=False, unix_socket=None, ca_path=None,
unredirected_headers=None, decompress=True, ciphers=None, use_netrc=True):
'''
"""
Sends a request via HTTP(S) or FTP using urllib (Python3)
Does not require the module environment
'''
"""
method = method or ('POST' if data else 'GET')
return Request().open(method, url, data=data, headers=headers, use_proxy=use_proxy,
force=force, last_mod_time=last_mod_time, timeout=timeout, validate_certs=validate_certs,
@ -1117,10 +1117,10 @@ def basic_auth_header(username, password):
def url_argument_spec():
'''
"""
Creates an argument spec that can be used with any module
that will be requesting content via urllib/urllib2
'''
"""
return dict(
url=dict(type='str'),
force=dict(type='bool', default=False),
@ -1333,7 +1333,7 @@ def _split_multiext(name, min=3, max=4, count=2):
def fetch_file(module, url, data=None, headers=None, method=None,
use_proxy=True, force=False, last_mod_time=None, timeout=10,
unredirected_headers=None, decompress=True, ciphers=None):
'''Download and save a file via HTTP(S) or FTP (needs the module as parameter).
"""Download and save a file via HTTP(S) or FTP (needs the module as parameter).
This is basically a wrapper around fetch_url().
:arg module: The AnsibleModule (used to get username, password etc. (s.b.).
@ -1351,7 +1351,7 @@ def fetch_file(module, url, data=None, headers=None, method=None,
:kwarg ciphers: (optional) List of ciphers to use
:returns: A string, the path to the downloaded file.
'''
"""
# download file
bufsize = 65536
parts = urlparse(url)

@ -7,7 +7,7 @@
from __future__ import annotations
DOCUMENTATION = r'''
DOCUMENTATION = r"""
---
module: add_host
short_description: Add a host (and alternatively a group) to the ansible-playbook in-memory inventory
@ -69,9 +69,9 @@ seealso:
author:
- Ansible Core Team
- Seth Vidal (@skvidal)
'''
"""
EXAMPLES = r'''
EXAMPLES = r"""
- name: Add host to group 'just_created' with variable foo=42
ansible.builtin.add_host:
name: '{{ ip_from_ec2 }}'
@ -111,4 +111,4 @@ EXAMPLES = r'''
name: '{{ item }}'
groups: done
loop: "{{ ansible_play_hosts }}"
'''
"""

@ -9,7 +9,7 @@
from __future__ import annotations
DOCUMENTATION = '''
DOCUMENTATION = """
---
module: apt
short_description: Manages apt-packages
@ -217,9 +217,9 @@ notes:
- If the interpreter can't import C(python-apt)/C(python3-apt) the module will check for it in system-owned interpreters as well.
If the dependency can't be found, the module will attempt to install it.
If the dependency is found or installed, the module will be respawned under the correct interpreter.
'''
"""
EXAMPLES = '''
EXAMPLES = """
- name: Install apache httpd (state=present is optional)
ansible.builtin.apt:
name: apache2
@ -327,9 +327,9 @@ EXAMPLES = '''
- name: Run the equivalent of "apt-get clean" as a separate step
ansible.builtin.apt:
clean: yes
'''
"""
RETURN = '''
RETURN = """
cache_updated:
description: if the cache was updated or not
returned: success, in some cases
@ -355,7 +355,7 @@ stderr:
returned: success, when needed
type: str
sample: "AH00558: apache2: Could not reliably determine the server's fully qualified domain name, using 127.0.1.1. Set the 'ServerName' directive globally to ..."
''' # NOQA
""" # NOQA
# added to stave off future warnings about apt api
import warnings
@ -1184,7 +1184,7 @@ def get_updated_cache_time():
# https://github.com/ansible/ansible-modules-core/issues/2951
def get_cache(module):
'''Attempt to get the cache object and update till it works'''
"""Attempt to get the cache object and update till it works"""
cache = None
try:
cache = apt.Cache()

@ -8,7 +8,7 @@
from __future__ import annotations
DOCUMENTATION = '''
DOCUMENTATION = """
---
module: apt_key
author:
@ -79,9 +79,9 @@ options:
on personally controlled sites using self-signed certificates.
type: bool
default: 'yes'
'''
"""
EXAMPLES = '''
EXAMPLES = """
- name: One way to avoid apt_key once it is removed from your distro, armored keys should use .asc extension, binary should use .gpg
block:
- name: somerepo | no apt key
@ -133,9 +133,9 @@ EXAMPLES = '''
id: 9FED2BCBDCD29CDF762678CBAED4B06F473041FA
file: /tmp/apt.gpg
state: present
'''
"""
RETURN = '''
RETURN = """
after:
description: List of apt key ids or fingerprints after any modification
returned: on change
@ -166,7 +166,7 @@ short_id:
returned: always
type: str
sample: "A88D21E9"
'''
"""
import os

@ -9,7 +9,7 @@
from __future__ import annotations
DOCUMENTATION = '''
DOCUMENTATION = """
---
module: apt_repository
short_description: Add and remove APT repositories
@ -101,9 +101,9 @@ requirements:
- python-apt (python 2)
- python3-apt (python 3)
- apt-key or gpg
'''
"""
EXAMPLES = '''
EXAMPLES = """
- name: Add specified repository into sources list
ansible.builtin.apt_repository:
repo: deb http://archive.canonical.com/ubuntu hardy partner
@ -145,9 +145,9 @@ EXAMPLES = '''
ansible.builtin.apt_repository:
repo: "deb [arch=amd64 signed-by=/etc/apt/keyrings/myrepo.asc] https://download.example.com/linux/ubuntu {{ ansible_distribution_release }} stable"
state: present
'''
"""
RETURN = '''
RETURN = """
repo:
description: A source string for the repository
returned: always
@ -167,7 +167,7 @@ sources_removed:
type: list
sample: ["/etc/apt/sources.list.d/artifacts_elastic_co_packages_6_x_apt.list"]
version_added: "2.15"
'''
"""
import copy
import glob
@ -245,7 +245,7 @@ class SourcesList(object):
self.load(file)
def __iter__(self):
'''Simple iterator to go over all sources. Empty, non-source, and other not valid lines will be skipped.'''
"""Simple iterator to go over all sources. Empty, non-source, and other not valid lines will be skipped."""
for file, sources in self.files.items():
for n, valid, enabled, source, comment in sources:
if valid:
@ -315,9 +315,9 @@ class SourcesList(object):
@staticmethod
def _apt_cfg_file(filespec):
'''
"""
Wrapper for `apt_pkg` module for running with Python 2.5
'''
"""
try:
result = apt_pkg.config.find_file(filespec)
except AttributeError:
@ -326,9 +326,9 @@ class SourcesList(object):
@staticmethod
def _apt_cfg_dir(dirspec):
'''
"""
Wrapper for `apt_pkg` module for running with Python 2.5
'''
"""
try:
result = apt_pkg.config.find_dir(dirspec)
except AttributeError:
@ -413,10 +413,10 @@ class SourcesList(object):
return new
def modify(self, file, n, enabled=None, source=None, comment=None):
'''
"""
This function to be used with iterator, so we don't care of invalid sources.
If source, enabled, or comment is None, original value from line ``n`` will be preserved.
'''
"""
valid, enabled_old, source_old, comment_old = self.files[file][n][1:]
self.files[file][n] = (n, valid, self._choice(enabled, enabled_old), self._choice(source, source_old), self._choice(comment, comment_old))
@ -616,7 +616,7 @@ class UbuntuSourcesList(SourcesList):
def revert_sources_list(sources_before, sources_after, sourceslist_before):
'''Revert the sourcelist files to their previous state.'''
"""Revert the sourcelist files to their previous state."""
# First remove any new files that were created:
for filename in set(sources_after.keys()).difference(sources_before.keys()):

@ -8,7 +8,7 @@
from __future__ import annotations
DOCUMENTATION = r'''
DOCUMENTATION = r"""
---
module: assemble
short_description: Assemble configuration files from fragments
@ -102,9 +102,9 @@ extends_documentation_fragment:
- action_common_attributes.files
- decrypt
- files
'''
"""
EXAMPLES = r'''
EXAMPLES = r"""
- name: Assemble from fragments from a directory
ansible.builtin.assemble:
src: /etc/someapp/fragments
@ -121,9 +121,9 @@ EXAMPLES = r'''
src: /etc/ssh/conf.d/
dest: /etc/ssh/sshd_config
validate: /usr/sbin/sshd -t -f %s
'''
"""
RETURN = r'''#'''
RETURN = r"""#"""
import codecs
import os
@ -136,7 +136,7 @@ from ansible.module_utils.common.text.converters import to_native
def assemble_from_fragments(src_path, delimiter=None, compiled_regexp=None, ignore_hidden=False, tmpdir=None):
''' assemble a file from a directory of fragments '''
""" assemble a file from a directory of fragments """
tmpfd, temp_path = tempfile.mkstemp(dir=tmpdir)
tmp = os.fdopen(tmpfd, 'wb')
delimit_me = False

@ -6,7 +6,7 @@
from __future__ import annotations
DOCUMENTATION = r'''
DOCUMENTATION = r"""
---
module: assert
short_description: Asserts given expressions are true
@ -70,9 +70,9 @@ seealso:
author:
- Ansible Core Team
- Michael DeHaan
'''
"""
EXAMPLES = r'''
EXAMPLES = r"""
- name: A single condition can be supplied as string instead of list
ansible.builtin.assert:
that: "ansible_os_family != 'RedHat'"
@ -106,4 +106,4 @@ EXAMPLES = r'''
- my_param <= 100
- my_param >= 0
quiet: true
'''
"""

@ -6,7 +6,7 @@
from __future__ import annotations
DOCUMENTATION = r'''
DOCUMENTATION = r"""
---
module: async_status
short_description: Obtain status of asynchronous task
@ -51,9 +51,9 @@ seealso:
author:
- Ansible Core Team
- Michael DeHaan
'''
"""
EXAMPLES = r'''
EXAMPLES = r"""
---
- name: Asynchronous dnf task
ansible.builtin.dnf:
@ -75,9 +75,9 @@ EXAMPLES = r'''
ansible.builtin.async_status:
jid: '{{ dnf_sleeper.ansible_job_id }}'
mode: cleanup
'''
"""
RETURN = r'''
RETURN = r"""
ansible_job_id:
description: The asynchronous job id
returned: success
@ -105,7 +105,7 @@ erased:
description: Path to erased job file
returned: when file is erased
type: str
'''
"""
import json
import os

@ -75,13 +75,13 @@ def daemonize_self():
# NB: this function copied from module_utils/json_utils.py. Ensure any changes are propagated there.
# FUTURE: AnsibleModule-ify this module so it's Ansiballz-compatible and can use the module_utils copy of this function.
def _filter_non_json_lines(data):
'''
"""
Used to filter unrelated output around module JSON output, like messages from
tcagetattr, or where dropbear spews MOTD on every single command (which is nuts).
Filters leading lines before first line-starting occurrence of '{', and filter all
trailing lines after matching close character (working from the bottom of output).
'''
"""
warnings = []
# Filter initial junk

@ -7,7 +7,7 @@
from __future__ import annotations
DOCUMENTATION = r'''
DOCUMENTATION = r"""
---
module: blockinfile
short_description: Insert/update/remove a text block surrounded by marker lines
@ -125,9 +125,9 @@ attributes:
platforms: posix
vault:
support: none
'''
"""
EXAMPLES = r'''
EXAMPLES = r"""
# Before Ansible 2.3, option 'dest' or 'name' was used instead of 'path'
- name: Insert/Update "Match User" configuration block in /etc/ssh/sshd_config prepending and appending a new line
ansible.builtin.blockinfile:
@ -187,7 +187,7 @@ EXAMPLES = r'''
insertafter: '(?m)SID_LIST_LISTENER_DG =\n.*\(SID_LIST ='
marker: " <!-- {mark} ANSIBLE MANAGED BLOCK -->"
'''
"""
import re
import os

@ -7,7 +7,7 @@
from __future__ import annotations
DOCUMENTATION = r'''
DOCUMENTATION = r"""
---
module: command
short_description: Execute commands on targets
@ -118,9 +118,9 @@ seealso:
author:
- Ansible Core Team
- Michael DeHaan
'''
"""
EXAMPLES = r'''
EXAMPLES = r"""
- name: Return motd to registered var
ansible.builtin.command: cat /etc/motd
register: mymotd
@ -174,9 +174,9 @@ EXAMPLES = r'''
- name: Safely use templated variable to run command. Always use the quote filter to avoid injection issues
ansible.builtin.command: cat {{ myfile|quote }}
register: myoutput
'''
"""
RETURN = r'''
RETURN = r"""
msg:
description: changed
returned: always
@ -229,7 +229,7 @@ stderr_lines:
returned: always
type: list
sample: [u'ls cannot access foo: No such file or directory', u'ls …']
'''
"""
import datetime
import glob

@ -7,7 +7,7 @@
from __future__ import annotations
DOCUMENTATION = r'''
DOCUMENTATION = r"""
---
module: copy
version_added: historical
@ -154,9 +154,9 @@ attributes:
vault:
support: full
version_added: '2.2'
'''
"""
EXAMPLES = r'''
EXAMPLES = r"""
- name: Copy file with owner and permissions
ansible.builtin.copy:
src: /srv/myfiles/foo.conf
@ -219,9 +219,9 @@ EXAMPLES = r'''
src: /etc/foo.conf
dest: /path/to/link # link to /path/to/file
follow: no
'''
"""
RETURN = r'''
RETURN = r"""
dest:
description: Destination file/path.
returned: success
@ -282,7 +282,7 @@ state:
returned: success
type: str
sample: file
'''
"""
import errno
import filecmp
@ -305,9 +305,9 @@ class AnsibleModuleError(Exception):
def split_pre_existing_dir(dirname):
'''
"""
Return the first pre-existing directory and a list of the new directories that will be created.
'''
"""
head, tail = os.path.split(dirname)
b_head = to_bytes(head, errors='surrogate_or_strict')
if head == '':
@ -323,9 +323,9 @@ def split_pre_existing_dir(dirname):
def adjust_recursive_directory_permissions(pre_existing_dir, new_directory_list, module, directory_args, changed):
'''
"""
Walk the new directories list and make sure that permissions are as we would expect
'''
"""
if new_directory_list:
working_dir = os.path.join(pre_existing_dir, new_directory_list.pop(0))

@ -10,7 +10,7 @@
from __future__ import annotations
DOCUMENTATION = r'''
DOCUMENTATION = r"""
---
module: cron
short_description: Manage cron.d and crontab entries
@ -150,9 +150,9 @@ attributes:
platform:
support: full
platforms: posix
'''
"""
EXAMPLES = r'''
EXAMPLES = r"""
- name: Ensure a job that runs at 2 and 5 exists. Creates an entry like "0 5,2 * * ls -alh > /dev/null"
ansible.builtin.cron:
name: "check dirs"
@ -205,9 +205,9 @@ EXAMPLES = r'''
name: APP_HOME
env: yes
state: absent
'''
"""
RETURN = r'''#'''
RETURN = r"""#"""
import os
import platform

@ -4,7 +4,7 @@
from __future__ import annotations
DOCUMENTATION = '''
DOCUMENTATION = """
author: 'Ansible Core Team (@ansible)'
short_description: 'Add and remove deb822 formatted repositories'
description:
@ -145,9 +145,9 @@ options:
requirements:
- python3-debian / python-debian
version_added: '2.15'
'''
"""
EXAMPLES = '''
EXAMPLES = """
- name: Add debian repo
deb822_repository:
name: debian
@ -189,9 +189,9 @@ EXAMPLES = '''
components: stable
architectures: amd64
signed_by: https://download.example.com/linux/ubuntu/gpg
'''
"""
RETURN = '''
RETURN = """
repo:
description: A source string for the repository
returned: always
@ -224,7 +224,7 @@ key_filename:
returned: always
type: str
sample: /etc/apt/keyrings/debian.gpg
'''
"""
import os
import re

@ -6,7 +6,7 @@
from __future__ import annotations
DOCUMENTATION = r'''
DOCUMENTATION = r"""
---
module: debconf
short_description: Configure a .deb package
@ -86,9 +86,9 @@ options:
default: false
author:
- Brian Coca (@bcoca)
'''
"""
EXAMPLES = r'''
EXAMPLES = r"""
- name: Set default locale to fr_FR.UTF-8
ansible.builtin.debconf:
name: locales
@ -121,9 +121,9 @@ EXAMPLES = r'''
value: "{{ site_passphrase }}"
vtype: password
no_log: True
'''
"""
RETURN = r'''#'''
RETURN = r"""#"""
from ansible.module_utils.common.text.converters import to_text, to_native
from ansible.module_utils.basic import AnsibleModule

@ -6,7 +6,7 @@
from __future__ import annotations
DOCUMENTATION = r'''
DOCUMENTATION = r"""
---
module: debug
short_description: Print statements during execution
@ -68,9 +68,9 @@ seealso:
author:
- Dag Wieers (@dagwieers)
- Michael DeHaan
'''
"""
EXAMPLES = r'''
EXAMPLES = r"""
- name: Print the gateway for each host when defined
ansible.builtin.debug:
msg: System {{ inventory_hostname }} has gateway {{ ansible_default_ipv4.gateway }}
@ -95,4 +95,4 @@ EXAMPLES = r'''
msg:
- "Provisioning based on YOUR_KEY which is: {{ lookup('ansible.builtin.env', 'YOUR_KEY') }}"
- "These servers were built using the password of '{{ password_used }}'. Please retain this for later use."
'''
"""

@ -9,7 +9,7 @@
from __future__ import annotations
DOCUMENTATION = '''
DOCUMENTATION = """
---
module: dnf
version_added: 1.9
@ -306,9 +306,9 @@ author:
- Cristian van Ee (@DJMuggs) <cristian at cvee.org>
- Berend De Schouwer (@berenddeschouwer)
- Adam Miller (@maxamillion) <admiller@redhat.com>
'''
"""
EXAMPLES = '''
EXAMPLES = """
- name: Install the latest version of Apache
ansible.builtin.dnf:
name: httpd
@ -394,7 +394,7 @@ EXAMPLES = '''
ansible.builtin.dnf:
name: '@postgresql/client'
state: present
'''
"""
import os
import sys

@ -6,7 +6,7 @@
from __future__ import annotations
DOCUMENTATION = '''
DOCUMENTATION = """
---
module: dpkg_selections
short_description: Dpkg package selection selections
@ -39,8 +39,8 @@ attributes:
platforms: debian
notes:
- This module will not cause any packages to be installed/removed/purged, use the M(ansible.builtin.apt) module for that.
'''
EXAMPLES = '''
"""
EXAMPLES = """
- name: Prevent python from being upgraded
ansible.builtin.dpkg_selections:
name: python
@ -50,7 +50,7 @@ EXAMPLES = '''
ansible.builtin.dpkg_selections:
name: python
selection: install
'''
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.common.locale import get_best_parsable_locale

@ -6,7 +6,7 @@
from __future__ import annotations
DOCUMENTATION = r'''
DOCUMENTATION = r"""
---
module: expect
version_added: '2.0'
@ -83,9 +83,9 @@ seealso:
- module: ansible.builtin.script
- module: ansible.builtin.shell
author: "Matt Martz (@sivel)"
'''
"""
EXAMPLES = r'''
EXAMPLES = r"""
- name: Case insensitive password string match
ansible.builtin.expect:
command: passwd username
@ -116,7 +116,7 @@ EXAMPLES = r'''
- "{{ db_username }}"
"Database password":
- "{{ db_password }}"
'''
"""
import datetime
import os

@ -6,7 +6,7 @@
from __future__ import annotations
DOCUMENTATION = r'''
DOCUMENTATION = r"""
---
module: fail
short_description: Fail with custom message
@ -52,11 +52,11 @@ seealso:
- module: ansible.builtin.meta
author:
- Dag Wieers (@dagwieers)
'''
"""
EXAMPLES = r'''
EXAMPLES = r"""
- name: Example using fail and when together
ansible.builtin.fail:
msg: The system may not be provisioned according to the CMDB status.
when: cmdb_status != "to-be-staged"
'''
"""

@ -8,7 +8,7 @@
from __future__ import annotations
DOCUMENTATION = r'''
DOCUMENTATION = r"""
---
module: fetch
short_description: Fetch files from remote nodes
@ -95,9 +95,9 @@ seealso:
author:
- Ansible Core Team
- Michael DeHaan
'''
"""
EXAMPLES = r'''
EXAMPLES = r"""
- name: Store file into /tmp/fetched/host.example.com/tmp/somefile
ansible.builtin.fetch:
src: /tmp/somefile
@ -120,4 +120,4 @@ EXAMPLES = r'''
src: /tmp/uniquefile
dest: special/prefix-{{ inventory_hostname }}
flat: yes
'''
"""

@ -7,7 +7,7 @@
from __future__ import annotations
DOCUMENTATION = r'''
DOCUMENTATION = r"""
---
module: file
version_added: historical
@ -123,9 +123,9 @@ attributes:
author:
- Ansible Core Team
- Michael DeHaan
'''
"""
EXAMPLES = r'''
EXAMPLES = r"""
- name: Change file ownership, group and permissions
ansible.builtin.file:
path: /etc/foo.conf
@ -214,8 +214,8 @@ EXAMPLES = r'''
path: /etc/foo
state: absent
'''
RETURN = r'''
"""
RETURN = r"""
dest:
description: Destination file/path, equal to the value passed to O(path).
returned: O(state=touch), O(state=hard), O(state=link)
@ -226,7 +226,7 @@ path:
returned: O(state=absent), O(state=directory), O(state=file)
type: str
sample: /path/to/file.txt
'''
"""
import errno
import os
@ -296,7 +296,7 @@ def additional_parameter_handling(module):
def get_state(path):
''' Find out current state '''
""" Find out current state """
b_path = to_bytes(path, errors='surrogate_or_strict')
try:

@ -9,7 +9,7 @@
from __future__ import annotations
DOCUMENTATION = r'''
DOCUMENTATION = r"""
---
module: find
author: Brian Coca (@bcoca)
@ -174,10 +174,10 @@ attributes:
platforms: posix
seealso:
- module: ansible.windows.win_find
'''
"""
EXAMPLES = r'''
EXAMPLES = r"""
- name: Recursively find /tmp files older than 2 days
ansible.builtin.find:
paths: /tmp
@ -246,9 +246,9 @@ EXAMPLES = r'''
use_regex: true
recurse: true
limit: 1
'''
"""
RETURN = r'''
RETURN = r"""
files:
description: All matches found with the specified criteria (see stat module for full output of each dictionary)
returned: success
@ -279,7 +279,7 @@ skipped_paths:
type: dict
sample: {"/laskdfj": "'/laskdfj' is not a directory"}
version_added: '2.12'
'''
"""
import errno
import fnmatch
@ -302,7 +302,7 @@ class _Object:
def pfilter(f, patterns=None, excludes=None, use_regex=False):
'''filter using glob patterns'''
"""filter using glob patterns"""
if not patterns and not excludes:
return True
@ -341,7 +341,7 @@ def pfilter(f, patterns=None, excludes=None, use_regex=False):
def agefilter(st, now, age, timestamp):
'''filter files older than age'''
"""filter files older than age"""
if age is None:
return True
elif age >= 0 and now - getattr(st, "st_%s" % timestamp) >= abs(age):
@ -352,7 +352,7 @@ def agefilter(st, now, age, timestamp):
def sizefilter(st, size):
'''filter files greater than size'''
"""filter files greater than size"""
if size is None:
return True
elif size >= 0 and st.st_size >= abs(size):

@ -5,7 +5,7 @@
from __future__ import annotations
DOCUMENTATION = '''
DOCUMENTATION = """
---
module: gather_facts
version_added: 2.8
@ -57,7 +57,7 @@ notes:
Order is not guaranteed, when doing parallel gathering on multiple modules.
author:
- "Ansible Core Team"
'''
"""
RETURN = """
# depends on the fact module called

@ -6,7 +6,7 @@
from __future__ import annotations
DOCUMENTATION = r'''
DOCUMENTATION = r"""
---
module: get_url
short_description: Downloads files from HTTP, HTTPS, or FTP to node
@ -219,9 +219,9 @@ seealso:
- module: ansible.windows.win_get_url
author:
- Jan-Piet Mens (@jpmens)
'''
"""
EXAMPLES = r'''
EXAMPLES = r"""
- name: Download foo.conf
ansible.builtin.get_url:
url: http://example.com/path/file.conf
@ -272,9 +272,9 @@ EXAMPLES = r'''
dest: /etc/foo.conf
username: bar
password: '{{ mysecret }}'
'''
"""
RETURN = r'''
RETURN = r"""
backup_file:
description: name of backup file created after download
returned: changed and if backup=yes
@ -365,7 +365,7 @@ url:
returned: always
type: str
sample: https://www.ansible.com/
'''
"""
import email.message
import os

@ -6,7 +6,7 @@
from __future__ import annotations
DOCUMENTATION = r'''
DOCUMENTATION = r"""
---
module: getent
short_description: A wrapper to the unix getent utility
@ -58,9 +58,9 @@ notes:
- Not all databases support enumeration, check system documentation for details.
author:
- Brian Coca (@bcoca)
'''
"""
EXAMPLES = '''
EXAMPLES = """
- name: Get root user info
ansible.builtin.getent:
database: passwd
@ -97,9 +97,9 @@ EXAMPLES = '''
- ansible.builtin.debug:
var: ansible_facts.getent_shadow
'''
"""
RETURN = '''
RETURN = """
ansible_facts:
description: Facts to add to ansible_facts.
returned: always
@ -112,7 +112,7 @@ ansible_facts:
- Starting at 2.11 it now returns multiple duplicate entries, previously it only returned the last one
returned: always
type: list
'''
"""
import traceback

@ -6,7 +6,7 @@
from __future__ import annotations
DOCUMENTATION = '''
DOCUMENTATION = """
---
module: git
author:
@ -236,9 +236,9 @@ notes:
one solution is to use the option accept_hostkey. Another solution is to
add the remote host public key in C(/etc/ssh/ssh_known_hosts) before calling
the git module, with the following command: C(ssh-keyscan -H remote_host.com >> /etc/ssh/ssh_known_hosts)."
'''
"""
EXAMPLES = '''
EXAMPLES = """
- name: Git checkout
ansible.builtin.git:
repo: 'https://github.com/ansible/ansible.git'
@ -295,9 +295,9 @@ EXAMPLES = '''
environment:
GIT_TERMINAL_PROMPT: 0 # reports "terminal prompts disabled" on missing password
# or GIT_ASKPASS: /bin/true # for git before version 2.3.0, reports "Authentication failed" on missing password
'''
"""
RETURN = '''
RETURN = """
after:
description: Last commit revision of the repository retrieved during the update.
returned: success
@ -328,7 +328,7 @@ git_dir_before:
returned: success
type: str
sample: /path/to/old/git/dir
'''
"""
import filecmp
import os
@ -366,7 +366,7 @@ def relocate_repo(module, result, repo_dir, old_repo_dir, worktree_dir):
def head_splitter(headfile, remote, module=None, fail_on_error=False):
'''Extract the head reference'''
"""Extract the head reference"""
# https://github.com/ansible/ansible-modules-core/pull/907
res = None
@ -429,11 +429,11 @@ def get_submodule_update_params(module, git_path, cwd):
def write_ssh_wrapper(module):
'''
"""
This writes an shell wrapper for ssh options to be used with git
this is only relevant for older versions of gitthat cannot
handle the options themselves. Returns path to the script
'''
"""
try:
# make sure we have full permission to the module_dir, which
# may not be the case if we're sudo'ing to a non-root user
@ -466,10 +466,10 @@ def write_ssh_wrapper(module):
def set_git_ssh_env(key_file, ssh_opts, git_version, module):
'''
"""
use environment variables to configure git's ssh execution,
which varies by version but this function should handle all.
'''
"""
# initialise to existing ssh opts and/or append user provided
if ssh_opts is None:
@ -519,7 +519,7 @@ def set_git_ssh_env(key_file, ssh_opts, git_version, module):
def get_version(module, git_path, dest, ref="HEAD"):
''' samples the version of the git repo '''
""" samples the version of the git repo """
cmd = "%s rev-parse %s" % (git_path, ref)
rc, stdout, stderr = module.run_command(cmd, cwd=dest)
@ -571,7 +571,7 @@ def get_submodule_versions(git_path, module, dest, version='HEAD'):
def clone(git_path, module, repo, dest, remote, depth, version, bare,
reference, refspec, git_version_used, verify_commit, separate_git_dir, result, gpg_allowlist, single_branch):
''' makes a new git repo if it does not already exist '''
""" makes a new git repo if it does not already exist """
dest_dirname = os.path.dirname(dest)
try:
os.makedirs(dest_dirname)
@ -653,17 +653,17 @@ def has_local_mods(module, git_path, dest, bare):
def reset(git_path, module, dest):
'''
"""
Resets the index and working tree to HEAD.
Discards any changes to tracked files in working
tree since that commit.
'''
"""
cmd = "%s reset --hard HEAD" % (git_path,)
return module.run_command(cmd, check_rc=True, cwd=dest)
def get_diff(module, git_path, dest, repo, remote, depth, bare, before, after):
''' Return the difference between 2 versions '''
""" Return the difference between 2 versions """
if before is None:
return {'prepared': '>> Newly checked out %s' % after}
elif before != after:
@ -817,13 +817,13 @@ def get_repo_path(dest, bare):
def get_head_branch(git_path, module, dest, remote, bare=False):
'''
"""
Determine what branch HEAD is associated with. This is partly
taken from lib/ansible/utils/__init__.py. It finds the correct
path to .git/HEAD and reads from that file the branch that HEAD is
associated with. In the case of a detached HEAD, this will look
up the branch in .git/refs/remotes/<remote>/HEAD.
'''
"""
try:
repo_path = get_repo_path(dest, bare)
except (IOError, ValueError) as err:
@ -845,7 +845,7 @@ def get_head_branch(git_path, module, dest, remote, bare=False):
def get_remote_url(git_path, module, dest, remote):
'''Return URL of remote source for repo.'''
"""Return URL of remote source for repo."""
command = [git_path, 'ls-remote', '--get-url', remote]
(rc, out, err) = module.run_command(command, cwd=dest)
if rc != 0:
@ -856,7 +856,7 @@ def get_remote_url(git_path, module, dest, remote):
def set_remote_url(git_path, module, repo, dest, remote):
''' updates repo from remote sources '''
""" updates repo from remote sources """
# Return if remote URL isn't changing.
remote_url = get_remote_url(git_path, module, dest, remote)
if remote_url == repo or unfrackgitpath(remote_url) == unfrackgitpath(repo):
@ -874,7 +874,7 @@ def set_remote_url(git_path, module, repo, dest, remote):
def fetch(git_path, module, repo, dest, version, remote, depth, bare, refspec, git_version_used, force=False):
''' updates repo from remote sources '''
""" updates repo from remote sources """
set_remote_url(git_path, module, repo, dest, remote)
commands = []
@ -981,7 +981,7 @@ def submodules_fetch(git_path, module, remote, track_submodules, dest):
def submodule_update(git_path, module, dest, track_submodules, force=False):
''' init and update any submodules '''
""" init and update any submodules """
# get the valid submodule params
params = get_submodule_update_params(module, git_path, dest)

@ -6,7 +6,7 @@
from __future__ import annotations
DOCUMENTATION = '''
DOCUMENTATION = """
---
module: group
version_added: "0.0.2"
@ -91,9 +91,9 @@ seealso:
- module: ansible.windows.win_group
author:
- Stephen Fromm (@sfromm)
'''
"""
EXAMPLES = '''
EXAMPLES = """
- name: Ensure group "somegroup" exists
ansible.builtin.group:
name: somegroup
@ -104,9 +104,9 @@ EXAMPLES = '''
name: docker
state: present
gid: 1750
'''
"""
RETURN = r'''
RETURN = r"""
gid:
description: Group ID of the group.
returned: When O(state) is C(present)
@ -127,7 +127,7 @@ system:
returned: When O(state) is C(present)
type: bool
sample: False
'''
"""
import grp
import os

@ -7,7 +7,7 @@
from __future__ import annotations
DOCUMENTATION = r'''
DOCUMENTATION = r"""
---
module: group_by
short_description: Create Ansible groups based on facts
@ -65,9 +65,9 @@ seealso:
- module: ansible.builtin.add_host
author:
- Jeroen Hoekx (@jhoekx)
'''
"""
EXAMPLES = r'''
EXAMPLES = r"""
- name: Create groups based on the machine architecture
ansible.builtin.group_by:
key: machine_{{ ansible_machine }}
@ -85,4 +85,4 @@ EXAMPLES = r'''
- name: Add all active hosts to a static group
ansible.builtin.group_by:
key: done
'''
"""

@ -7,7 +7,7 @@
from __future__ import annotations
DOCUMENTATION = '''
DOCUMENTATION = """
---
module: hostname
author:
@ -52,9 +52,9 @@ attributes:
support: full
platform:
platforms: posix
'''
"""
EXAMPLES = '''
EXAMPLES = """
- name: Set a hostname
ansible.builtin.hostname:
name: web01
@ -63,7 +63,7 @@ EXAMPLES = '''
ansible.builtin.hostname:
name: web01
use: systemd
'''
"""
import os
import platform

@ -6,7 +6,7 @@
from __future__ import annotations
DOCUMENTATION = r'''
DOCUMENTATION = r"""
---
author: Ansible Core Team (@ansible)
module: import_playbook
@ -42,9 +42,9 @@ seealso:
- module: ansible.builtin.include_tasks
- ref: playbooks_reuse
description: More information related to including and importing playbooks, roles and tasks.
'''
"""
EXAMPLES = r'''
EXAMPLES = r"""
- hosts: localhost
tasks:
- ansible.builtin.debug:
@ -69,8 +69,8 @@ EXAMPLES = r'''
- name: This fails because I'm inside a play already
ansible.builtin.import_playbook: stuff.yaml
'''
"""
RETURN = r'''
RETURN = r"""
# This module does not return anything except plays to execute.
'''
"""

@ -5,7 +5,7 @@
from __future__ import annotations
DOCUMENTATION = r'''
DOCUMENTATION = r"""
---
author: Ansible Core Team (@ansible)
module: import_role
@ -87,9 +87,9 @@ seealso:
- module: ansible.builtin.include_tasks
- ref: playbooks_reuse
description: More information related to including and importing playbooks, roles and tasks.
'''
"""
EXAMPLES = r'''
EXAMPLES = r"""
- hosts: all
tasks:
- ansible.builtin.import_role:
@ -110,8 +110,8 @@ EXAMPLES = r'''
ansible.builtin.import_role:
name: myrole
when: not idontwanttorun
'''
"""
RETURN = r'''
RETURN = r"""
# This module does not return anything except tasks to execute.
'''
"""

@ -6,7 +6,7 @@
from __future__ import annotations
DOCUMENTATION = r'''
DOCUMENTATION = r"""
---
author: Ansible Core Team (@ansible)
module: import_tasks
@ -46,9 +46,9 @@ seealso:
- module: ansible.builtin.include_tasks
- ref: playbooks_reuse
description: More information related to including and importing playbooks, roles and tasks.
'''
"""
EXAMPLES = r'''
EXAMPLES = r"""
- hosts: all
tasks:
- ansible.builtin.debug:
@ -69,8 +69,8 @@ EXAMPLES = r'''
- name: Apply conditional to all imported tasks
ansible.builtin.import_tasks: stuff.yaml
when: hostvar is defined
'''
"""
RETURN = r'''
RETURN = r"""
# This module does not return anything except tasks to execute.
'''
"""

@ -6,7 +6,7 @@
from __future__ import annotations
DOCUMENTATION = r'''
DOCUMENTATION = r"""
---
author: Ansible Core Team (@ansible)
module: include_role
@ -92,9 +92,9 @@ seealso:
- module: ansible.builtin.include_tasks
- ref: playbooks_reuse
description: More information related to including and importing playbooks, roles and tasks.
'''
"""
EXAMPLES = r'''
EXAMPLES = r"""
- ansible.builtin.include_role:
name: myrole
@ -131,8 +131,8 @@ EXAMPLES = r'''
- install
tags:
- always
'''
"""
RETURN = r'''
RETURN = r"""
# This module does not return anything except tasks to execute.
'''
"""

@ -6,7 +6,7 @@
from __future__ import annotations
DOCUMENTATION = r'''
DOCUMENTATION = r"""
---
author: Ansible Core Team (@ansible)
module: include_tasks
@ -50,9 +50,9 @@ seealso:
- module: ansible.builtin.include_role
- ref: playbooks_reuse
description: More information related to including and importing playbooks, roles and tasks.
'''
"""
EXAMPLES = r'''
EXAMPLES = r"""
- hosts: all
tasks:
- ansible.builtin.debug:
@ -91,8 +91,8 @@ EXAMPLES = r'''
- install
tags:
- always
'''
"""
RETURN = r'''
RETURN = r"""
# This module does not return anything except tasks to execute.
'''
"""

@ -5,7 +5,7 @@
from __future__ import annotations
DOCUMENTATION = r'''
DOCUMENTATION = r"""
---
author: Allen Sanabria (@linuxdynasty)
module: include_vars
@ -112,9 +112,9 @@ seealso:
- module: ansible.builtin.set_fact
- ref: playbooks_delegation
description: More information related to task delegation.
'''
"""
EXAMPLES = r'''
EXAMPLES = r"""
- name: Include vars of stuff.yaml into the 'stuff' variable (2.2).
ansible.builtin.include_vars:
file: stuff.yaml
@ -179,9 +179,9 @@ EXAMPLES = r'''
- 'yaml'
- 'yml'
- 'json'
'''
"""
RETURN = r'''
RETURN = r"""
ansible_facts:
description: Variables that were included and their values
returned: success
@ -193,4 +193,4 @@ ansible_included_var_files:
type: list
sample: [ /path/to/file.json, /path/to/file.yaml ]
version_added: '2.4'
'''
"""

@ -7,7 +7,7 @@
from __future__ import annotations
DOCUMENTATION = r'''
DOCUMENTATION = r"""
---
module: iptables
short_description: Modify iptables rules
@ -394,9 +394,9 @@ options:
type: bool
default: false
version_added: "2.15"
'''
"""
EXAMPLES = r'''
EXAMPLES = r"""
- name: Block specific IP
ansible.builtin.iptables:
chain: INPUT
@ -543,7 +543,7 @@ EXAMPLES = r'''
- "443"
- "8081:8083"
jump: ACCEPT
'''
"""
import re

@ -5,7 +5,7 @@
from __future__ import annotations
DOCUMENTATION = r'''
DOCUMENTATION = r"""
---
module: known_hosts
short_description: Add or remove a host from the C(known_hosts) file
@ -65,9 +65,9 @@ extends_documentation_fragment:
- action_common_attributes
author:
- Matthew Vernon (@mcv21)
'''
"""
EXAMPLES = r'''
EXAMPLES = r"""
- name: Tell the host about our servers it might want to ssh to
ansible.builtin.known_hosts:
path: /etc/ssh/ssh_known_hosts
@ -87,7 +87,7 @@ EXAMPLES = r'''
key: '[host1.example.com]:2222 ssh-rsa ASDeararAIUHI324324' # some key gibberish
path: /etc/ssh/ssh_known_hosts
state: present
'''
"""
# Makes sure public host keys are present or absent in the given known_hosts
# file.
@ -195,13 +195,13 @@ def enforce_state(module, params):
def sanity_check(module, host, key, sshkeygen):
'''Check supplied key is sensible
"""Check supplied key is sensible
host and key are parameters provided by the user; If the host
provided is inconsistent with the key supplied, then this function
quits, providing an error to the user.
sshkeygen is the path to ssh-keygen, found earlier with get_bin_path
'''
"""
# If no key supplied, we're doing a removal, and have nothing to check here.
if not key:
return
@ -232,7 +232,7 @@ def sanity_check(module, host, key, sshkeygen):
def search_for_host_key(module, host, key, path, sshkeygen):
'''search_for_host_key(module,host,key,path,sshkeygen) -> (found,replace_or_add,found_line)
"""search_for_host_key(module,host,key,path,sshkeygen) -> (found,replace_or_add,found_line)
Looks up host and keytype in the known_hosts file path; if it's there, looks to see
if one of those entries matches key. Returns:
@ -241,7 +241,7 @@ def search_for_host_key(module, host, key, path, sshkeygen):
found_line (int or None): the line where a key of the same type was found
if found=False, then replace is always False.
sshkeygen is the path to ssh-keygen, found earlier with get_bin_path
'''
"""
if os.path.exists(path) is False:
return False, False, None
@ -304,14 +304,14 @@ def hash_host_key(host, key):
def normalize_known_hosts_key(key):
'''
"""
Transform a key, either taken from a known_host file or provided by the
user, into a normalized form.
The host part (which might include multiple hostnames or be hashed) gets
replaced by the provided host. Also, any spurious information gets removed
from the end (like the username@host tag usually present in hostkeys, but
absent in known_hosts files)
'''
"""
key = key.strip() # trim trailing newline
k = key.split()
d = dict()

@ -8,7 +8,7 @@
from __future__ import annotations
DOCUMENTATION = r'''
DOCUMENTATION = r"""
---
module: lineinfile
short_description: Manage lines in text files
@ -152,9 +152,9 @@ author:
- Daniel Hokka Zakrissoni (@dhozac)
- Ahti Kitsik (@ahtik)
- Jose Angel Munoz (@imjoseangel)
'''
"""
EXAMPLES = r'''
EXAMPLES = r"""
# NOTE: Before 2.3, option 'dest', 'destfile' or 'name' was used instead of 'path'
- name: Ensure SELinux is set to enforcing mode
ansible.builtin.lineinfile:
@ -237,9 +237,9 @@ EXAMPLES = r'''
regexp: ^(host=).*
line: \g<1>{{ hostname }}
backrefs: yes
'''
"""
RETURN = r'''#'''
RETURN = r"""#"""
import os
import re

@ -6,7 +6,7 @@
from __future__ import annotations
DOCUMENTATION = r'''
DOCUMENTATION = r"""
module: meta
short_description: Execute Ansible 'actions'
version_added: '1.2'
@ -78,9 +78,9 @@ seealso:
- module: ansible.builtin.fail
author:
- Ansible Core Team
'''
"""
EXAMPLES = r'''
EXAMPLES = r"""
# Example showing flushing handlers on demand, not at end of play
- ansible.builtin.template:
src: new.j2
@ -126,4 +126,4 @@ EXAMPLES = r'''
when:
- ansible_distribution == 'CentOS'
- ansible_distribution_major_version == '6'
'''
"""

@ -7,7 +7,7 @@
from __future__ import annotations
DOCUMENTATION = '''
DOCUMENTATION = """
---
module: package
version_added: 2.0
@ -66,8 +66,8 @@ attributes:
notes:
- While M(ansible.builtin.package) abstracts package managers to ease dealing with multiple distributions, package name often differs for the same software.
'''
EXAMPLES = '''
"""
EXAMPLES = """
- name: Install ntpdate
ansible.builtin.package:
name: ntpdate
@ -85,4 +85,4 @@ EXAMPLES = '''
- httpd
- mariadb-server
state: latest
'''
"""

@ -6,7 +6,7 @@
from __future__ import annotations
DOCUMENTATION = '''
DOCUMENTATION = """
module: package_facts
short_description: Package information as facts
description:
@ -67,9 +67,9 @@ attributes:
support: full
platform:
platforms: posix
'''
"""
EXAMPLES = '''
EXAMPLES = """
- name: Gather the package facts
ansible.builtin.package_facts:
manager: auto
@ -83,9 +83,9 @@ EXAMPLES = '''
msg: "{{ ansible_facts.packages['foobar'] | length }} versions of foobar are installed!"
when: "'foobar' in ansible_facts.packages"
'''
"""
RETURN = '''
RETURN = """
ansible_facts:
description: Facts to add to ansible_facts.
returned: always
@ -248,7 +248,7 @@ ansible_facts:
],
}
}
'''
"""
import re

@ -4,7 +4,7 @@
from __future__ import annotations
DOCUMENTATION = '''
DOCUMENTATION = """
---
module: pause
short_description: Pause playbook execution
@ -65,9 +65,9 @@ attributes:
notes:
- Starting in 2.2, if you specify 0 or negative for minutes or seconds, it will wait for 1 second, previously it would wait indefinitely.
- User input is not captured or echoed, regardless of echo setting, when minutes or seconds is specified.
'''
"""
EXAMPLES = '''
EXAMPLES = """
- name: Pause for 5 minutes to build app cache
ansible.builtin.pause:
minutes: 5
@ -83,9 +83,9 @@ EXAMPLES = '''
ansible.builtin.pause:
prompt: "Enter a secret"
echo: no
'''
"""
RETURN = '''
RETURN = """
user_input:
description: User input from interactive console
returned: if no waiting time set
@ -116,4 +116,4 @@ echo:
returned: always
type: bool
sample: true
'''
"""

@ -7,7 +7,7 @@
from __future__ import annotations
DOCUMENTATION = '''
DOCUMENTATION = """
---
module: ping
version_added: historical
@ -41,9 +41,9 @@ seealso:
author:
- Ansible Core Team
- Michael DeHaan
'''
"""
EXAMPLES = '''
EXAMPLES = """
# Test we can logon to 'webservers' and execute python with json lib.
# ansible webservers -m ansible.builtin.ping
@ -53,15 +53,15 @@ EXAMPLES = '''
- name: Induce an exception to see what happens
ansible.builtin.ping:
data: crash
'''
"""
RETURN = '''
RETURN = """
ping:
description: Value provided with the O(data) parameter.
returned: success
type: str
sample: pong
'''
"""
from ansible.module_utils.basic import AnsibleModule

@ -6,7 +6,7 @@
from __future__ import annotations
DOCUMENTATION = '''
DOCUMENTATION = """
---
module: pip
short_description: Manages Python library dependencies
@ -145,9 +145,9 @@ requirements:
- setuptools or packaging
author:
- Matt Wright (@mattupstate)
'''
"""
EXAMPLES = '''
EXAMPLES = """
- name: Install bottle python package
ansible.builtin.pip:
name: bottle
@ -262,9 +262,9 @@ EXAMPLES = '''
vars:
venv_dir: /tmp/pick-a-better-venv-path
venv_python: "{{ venv_dir }}/bin/python"
'''
"""
RETURN = '''
RETURN = """
cmd:
description: pip command used by the module
returned: success
@ -290,7 +290,7 @@ virtualenv:
returned: success, if a virtualenv path was provided
type: str
sample: "/tmp/virtualenv"
'''
"""
import argparse
import os
@ -417,7 +417,7 @@ def _get_cmd_options(module, cmd):
def _get_packages(module, pip, chdir):
'''Return results of pip command to get packages.'''
"""Return results of pip command to get packages."""
# Try 'pip list' command first.
command = pip + ['list', '--format=freeze']
locale = get_best_parsable_locale(module)
@ -435,7 +435,7 @@ def _get_packages(module, pip, chdir):
def _is_present(module, req, installed_pkgs, pkg_command):
'''Return whether or not package is installed.'''
"""Return whether or not package is installed."""
for pkg in installed_pkgs:
if '==' in pkg:
pkg_name, pkg_version = pkg.split('==')

@ -6,7 +6,7 @@
from __future__ import annotations
DOCUMENTATION = r'''
DOCUMENTATION = r"""
---
module: raw
short_description: Executes a low-down and dirty command
@ -70,9 +70,9 @@ seealso:
author:
- Ansible Core Team
- Michael DeHaan
'''
"""
EXAMPLES = r'''
EXAMPLES = r"""
- name: Bootstrap a host without python2 installed
ansible.builtin.raw: dnf install -y python2 python2-dnf libselinux-python
@ -86,4 +86,4 @@ EXAMPLES = r'''
- name: List user accounts on a Windows system
ansible.builtin.raw: Get-WmiObject -Class Win32_UserAccount
'''
"""

@ -5,7 +5,7 @@
from __future__ import annotations
DOCUMENTATION = r'''
DOCUMENTATION = r"""
module: reboot
short_description: Reboot a machine
notes:
@ -100,9 +100,9 @@ seealso:
author:
- Matt Davis (@nitzmahone)
- Sam Doran (@samdoran)
'''
"""
EXAMPLES = r'''
EXAMPLES = r"""
- name: Unconditionally reboot the machine with all defaults
ansible.builtin.reboot:
@ -124,9 +124,9 @@ EXAMPLES = r'''
ansible.builtin.reboot:
msg: "Rebooting machine in 5 seconds"
'''
"""
RETURN = r'''
RETURN = r"""
rebooted:
description: true if the machine was rebooted
returned: always
@ -137,4 +137,4 @@ elapsed:
returned: always
type: int
sample: 23
'''
"""

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save