ansible-test - Use more native type hints. (#78435)

* ansible-test - Use more native type hints.

Simple search and replace to switch from comments to native type hints for return types of functions with no arguments.

* ansible-test - Use more native type hints.

Conversion of simple single-line function annotation type comments to native type hints.

* ansible-test - Use more native type hints.

Conversion of single-line function annotation type comments with default values to native type hints.

* ansible-test - Use more native type hints.

Manual conversion of type annotation comments for functions which have pylint directives.
pull/77420/head
Matt Clay 3 years ago committed by GitHub
parent f2abfc4b3d
commit 3eb0485dd9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -48,7 +48,7 @@ from .provisioning import (
)
def main(cli_args=None): # type: (t.Optional[t.List[str]]) -> None
def main(cli_args: t.Optional[t.List[str]] = None) -> None:
"""Main program function."""
try:
os.chdir(data_context().content.root)

@ -52,7 +52,7 @@ from .host_configs import (
)
def parse_inventory(args, inventory_path): # type: (EnvironmentConfig, str) -> t.Dict[str, t.Any]
def parse_inventory(args: EnvironmentConfig, inventory_path: str) -> t.Dict[str, t.Any]:
"""Return a dict parsed from the given inventory file."""
cmd = ['ansible-inventory', '-i', inventory_path, '--list']
env = ansible_environment(args)
@ -69,7 +69,7 @@ def get_hosts(inventory, group_name): # type: (t.Dict[str, t.Any], str) -> t.Di
return hosts
def ansible_environment(args, color=True, ansible_config=None): # type: (CommonConfig, bool, t.Optional[str]) -> t.Dict[str, str]
def ansible_environment(args: CommonConfig, color: bool = True, ansible_config: t.Optional[str] = None) -> t.Dict[str, str]:
"""Return a dictionary of environment variables to use when running Ansible commands."""
env = common_environment()
path = env['PATH']
@ -138,7 +138,7 @@ def ansible_environment(args, color=True, ansible_config=None): # type: (Common
return env
def configure_plugin_paths(args): # type: (CommonConfig) -> t.Dict[str, str]
def configure_plugin_paths(args: CommonConfig) -> t.Dict[str, str]:
"""Return environment variables with paths to plugins relevant for the current command."""
if not isinstance(args, IntegrationConfig):
return {}
@ -192,7 +192,7 @@ def configure_plugin_paths(args): # type: (CommonConfig) -> t.Dict[str, str]
return env
def get_ansible_python_path(args): # type: (CommonConfig) -> str
def get_ansible_python_path(args: CommonConfig) -> str:
"""
Return a directory usable for PYTHONPATH, containing only the ansible package.
If a temporary directory is required, it will be cached for the lifetime of the process and cleaned up at exit.
@ -221,7 +221,7 @@ def get_ansible_python_path(args): # type: (CommonConfig) -> str
return python_path
def generate_egg_info(path): # type: (str) -> None
def generate_egg_info(path: str) -> None:
"""Generate an egg-info in the specified base directory."""
# minimal PKG-INFO stub following the format defined in PEP 241
# required for older setuptools versions to avoid a traceback when importing pkg_resources from packages like cryptography
@ -247,18 +247,18 @@ License: GPLv3+
class CollectionDetail:
"""Collection detail."""
def __init__(self): # type: () -> None
def __init__(self) -> None:
self.version = None # type: t.Optional[str]
class CollectionDetailError(ApplicationError):
"""An error occurred retrieving collection detail."""
def __init__(self, reason): # type: (str) -> None
def __init__(self, reason: str) -> None:
super().__init__('Error collecting collection detail: %s' % reason)
self.reason = reason
def get_collection_detail(python): # type: (PythonConfig) -> CollectionDetail
def get_collection_detail(python: PythonConfig) -> CollectionDetail:
"""Return collection detail."""
collection = data_context().content.collection
directory = os.path.join(collection.root, collection.directory)

@ -19,22 +19,22 @@ class Become(metaclass=abc.ABCMeta):
@property
@abc.abstractmethod
def method(self): # type: () -> str
def method(self) -> str:
"""The name of the Ansible become plugin that is equivalent to this."""
@abc.abstractmethod
def prepare_command(self, command): # type: (t.List[str]) -> t.List[str]
def prepare_command(self, command: t.List[str]) -> t.List[str]:
"""Return the given command, if any, with privilege escalation."""
class Doas(Become):
"""Become using 'doas'."""
@property
def method(self): # type: () -> str
def method(self) -> str:
"""The name of the Ansible become plugin that is equivalent to this."""
raise NotImplementedError('Ansible has no built-in doas become plugin.')
def prepare_command(self, command): # type: (t.List[str]) -> t.List[str]
def prepare_command(self, command: t.List[str]) -> t.List[str]:
"""Return the given command, if any, with privilege escalation."""
become = ['doas', '-n']
@ -54,7 +54,7 @@ class DoasSudo(Doas):
return 'doas_sudo'
@property
def method(self): # type: () -> str
def method(self) -> str:
"""The name of the Ansible become plugin that is equivalent to this."""
return 'sudo'
@ -62,11 +62,11 @@ class DoasSudo(Doas):
class Su(Become):
"""Become using 'su'."""
@property
def method(self): # type: () -> str
def method(self) -> str:
"""The name of the Ansible become plugin that is equivalent to this."""
return 'su'
def prepare_command(self, command): # type: (t.List[str]) -> t.List[str]
def prepare_command(self, command: t.List[str]) -> t.List[str]:
"""Return the given command, if any, with privilege escalation."""
become = ['su', '-l', 'root']
@ -84,7 +84,7 @@ class SuSudo(Su):
return 'su_sudo'
@property
def method(self): # type: () -> str
def method(self) -> str:
"""The name of the Ansible become plugin that is equivalent to this."""
return 'sudo'
@ -92,11 +92,11 @@ class SuSudo(Su):
class Sudo(Become):
"""Become using 'sudo'."""
@property
def method(self): # type: () -> str
def method(self) -> str:
"""The name of the Ansible become plugin that is equivalent to this."""
return 'sudo'
def prepare_command(self, command): # type: (t.List[str]) -> t.List[str]
def prepare_command(self, command: t.List[str]) -> t.List[str]:
"""Return the given command, if any, with privilege escalation."""
become = ['sudo', '-in']

@ -31,11 +31,11 @@ class Bootstrap:
ssh_key: SshKey
@property
def bootstrap_type(self): # type: () -> str
def bootstrap_type(self) -> str:
"""The bootstrap type to pass to the bootstrapping script."""
return self.__class__.__name__.replace('Bootstrap', '').lower()
def get_variables(self): # type: () -> t.Dict[str, t.Union[str, t.List[str]]]
def get_variables(self) -> t.Dict[str, t.Union[str, t.List[str]]]:
"""The variables to template in the bootstrapping script."""
return dict(
bootstrap_type=self.bootstrap_type,
@ -46,7 +46,7 @@ class Bootstrap:
ssh_public_key=self.ssh_key.pub_contents,
)
def get_script(self): # type: () -> str
def get_script(self) -> str:
"""Return a shell script to bootstrap the specified host."""
path = os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'setup', 'bootstrap.sh')
@ -65,7 +65,7 @@ class Bootstrap:
@dataclasses.dataclass
class BootstrapDocker(Bootstrap):
"""Bootstrap docker instances."""
def get_variables(self): # type: () -> t.Dict[str, t.Union[str, t.List[str]]]
def get_variables(self) -> t.Dict[str, t.Union[str, t.List[str]]]:
"""The variables to template in the bootstrapping script."""
variables = super().get_variables()
@ -83,7 +83,7 @@ class BootstrapRemote(Bootstrap):
platform: str
platform_version: str
def get_variables(self): # type: () -> t.Dict[str, t.Union[str, t.List[str]]]
def get_variables(self) -> t.Dict[str, t.Union[str, t.List[str]]]:
"""The variables to template in the bootstrapping script."""
variables = super().get_variables()

@ -12,7 +12,7 @@ TValue = t.TypeVar('TValue')
class CommonCache:
"""Common cache."""
def __init__(self, args): # type: (CommonConfig) -> None
def __init__(self, args: CommonConfig) -> None:
self.args = args
def get(self, key, factory): # type: (str, t.Callable[[], TValue]) -> TValue

@ -43,46 +43,46 @@ class CIProvider(metaclass=abc.ABCMeta):
@staticmethod
@abc.abstractmethod
def is_supported(): # type: () -> bool
def is_supported() -> bool:
"""Return True if this provider is supported in the current running environment."""
@property
@abc.abstractmethod
def code(self): # type: () -> str
def code(self) -> str:
"""Return a unique code representing this provider."""
@property
@abc.abstractmethod
def name(self): # type: () -> str
def name(self) -> str:
"""Return descriptive name for this provider."""
@abc.abstractmethod
def generate_resource_prefix(self): # type: () -> str
def generate_resource_prefix(self) -> str:
"""Return a resource prefix specific to this CI provider."""
@abc.abstractmethod
def get_base_branch(self): # type: () -> str
def get_base_branch(self) -> str:
"""Return the base branch or an empty string."""
@abc.abstractmethod
def detect_changes(self, args): # type: (TestConfig) -> t.Optional[t.List[str]]
def detect_changes(self, args: TestConfig) -> t.Optional[t.List[str]]:
"""Initialize change detection."""
@abc.abstractmethod
def supports_core_ci_auth(self): # type: () -> bool
def supports_core_ci_auth(self) -> bool:
"""Return True if Ansible Core CI is supported."""
@abc.abstractmethod
def prepare_core_ci_auth(self): # type: () -> t.Dict[str, t.Any]
def prepare_core_ci_auth(self) -> t.Dict[str, t.Any]:
"""Return authentication details for Ansible Core CI."""
@abc.abstractmethod
def get_git_details(self, args): # type: (CommonConfig) -> t.Optional[t.Dict[str, t.Any]]
def get_git_details(self, args: CommonConfig) -> t.Optional[t.Dict[str, t.Any]]:
"""Return details about git in the current environment."""
@cache
def get_ci_provider(): # type: () -> CIProvider
def get_ci_provider() -> CIProvider:
"""Return a CI provider instance for the current environment."""
provider = None
@ -111,7 +111,7 @@ class AuthHelper(metaclass=abc.ABCMeta):
request.update(signature=signature)
def initialize_private_key(self): # type: () -> str
def initialize_private_key(self) -> str:
"""
Initialize and publish a new key pair (if needed) and return the private key.
The private key is cached across ansible-test invocations, so it is only generated and published once per CI job.
@ -127,21 +127,21 @@ class AuthHelper(metaclass=abc.ABCMeta):
return private_key_pem
@abc.abstractmethod
def sign_bytes(self, payload_bytes): # type: (bytes) -> bytes
def sign_bytes(self, payload_bytes: bytes) -> bytes:
"""Sign the given payload and return the signature, initializing a new key pair if required."""
@abc.abstractmethod
def publish_public_key(self, public_key_pem): # type: (str) -> None
def publish_public_key(self, public_key_pem: str) -> None:
"""Publish the given public key."""
@abc.abstractmethod
def generate_private_key(self): # type: () -> str
def generate_private_key(self) -> str:
"""Generate a new key pair, publishing the public key and returning the private key."""
class CryptographyAuthHelper(AuthHelper, metaclass=abc.ABCMeta):
"""Cryptography based public key based authentication helper for Ansible Core CI."""
def sign_bytes(self, payload_bytes): # type: (bytes) -> bytes
def sign_bytes(self, payload_bytes: bytes) -> bytes:
"""Sign the given payload and return the signature, initializing a new key pair if required."""
# import cryptography here to avoid overhead and failures in environments which do not use/provide it
from cryptography.hazmat.backends import default_backend
@ -156,7 +156,7 @@ class CryptographyAuthHelper(AuthHelper, metaclass=abc.ABCMeta):
return signature_raw_bytes
def generate_private_key(self): # type: () -> str
def generate_private_key(self) -> str:
"""Generate a new key pair, publishing the public key and returning the private key."""
# import cryptography here to avoid overhead and failures in environments which do not use/provide it
from cryptography.hazmat.backends import default_backend
@ -184,7 +184,7 @@ class CryptographyAuthHelper(AuthHelper, metaclass=abc.ABCMeta):
class OpenSSLAuthHelper(AuthHelper, metaclass=abc.ABCMeta):
"""OpenSSL based public key based authentication helper for Ansible Core CI."""
def sign_bytes(self, payload_bytes): # type: (bytes) -> bytes
def sign_bytes(self, payload_bytes: bytes) -> bytes:
"""Sign the given payload and return the signature, initializing a new key pair if required."""
private_key_pem = self.initialize_private_key()
@ -202,7 +202,7 @@ class OpenSSLAuthHelper(AuthHelper, metaclass=abc.ABCMeta):
return signature_raw_bytes
def generate_private_key(self): # type: () -> str
def generate_private_key(self) -> str:
"""Generate a new key pair, publishing the public key and returning the private key."""
private_key_pem = raw_command(['openssl', 'ecparam', '-genkey', '-name', 'secp384r1', '-noout'], capture=True)[0]
public_key_pem = raw_command(['openssl', 'ec', '-pubout'], data=private_key_pem, capture=True)[0]

@ -44,21 +44,21 @@ class AzurePipelines(CIProvider):
self.auth = AzurePipelinesAuthHelper()
@staticmethod
def is_supported(): # type: () -> bool
def is_supported() -> bool:
"""Return True if this provider is supported in the current running environment."""
return os.environ.get('SYSTEM_COLLECTIONURI', '').startswith('https://dev.azure.com/')
@property
def code(self): # type: () -> str
def code(self) -> str:
"""Return a unique code representing this provider."""
return CODE
@property
def name(self): # type: () -> str
def name(self) -> str:
"""Return descriptive name for this provider."""
return 'Azure Pipelines'
def generate_resource_prefix(self): # type: () -> str
def generate_resource_prefix(self) -> str:
"""Return a resource prefix specific to this CI provider."""
try:
prefix = 'azp-%s-%s-%s' % (
@ -71,7 +71,7 @@ class AzurePipelines(CIProvider):
return prefix
def get_base_branch(self): # type: () -> str
def get_base_branch(self) -> str:
"""Return the base branch or an empty string."""
base_branch = os.environ.get('SYSTEM_PULLREQUEST_TARGETBRANCH') or os.environ.get('BUILD_SOURCEBRANCHNAME')
@ -80,7 +80,7 @@ class AzurePipelines(CIProvider):
return base_branch or ''
def detect_changes(self, args): # type: (TestConfig) -> t.Optional[t.List[str]]
def detect_changes(self, args: TestConfig) -> t.Optional[t.List[str]]:
"""Initialize change detection."""
result = AzurePipelinesChanges(args)
@ -102,11 +102,11 @@ class AzurePipelines(CIProvider):
return result.paths
def supports_core_ci_auth(self): # type: () -> bool
def supports_core_ci_auth(self) -> bool:
"""Return True if Ansible Core CI is supported."""
return True
def prepare_core_ci_auth(self): # type: () -> t.Dict[str, t.Any]
def prepare_core_ci_auth(self) -> t.Dict[str, t.Any]:
"""Return authentication details for Ansible Core CI."""
try:
request = dict(
@ -126,7 +126,7 @@ class AzurePipelines(CIProvider):
return auth
def get_git_details(self, args): # type: (CommonConfig) -> t.Optional[t.Dict[str, t.Any]]
def get_git_details(self, args: CommonConfig) -> t.Optional[t.Dict[str, t.Any]]:
"""Return details about git in the current environment."""
changes = AzurePipelinesChanges(args)
@ -143,7 +143,7 @@ class AzurePipelinesAuthHelper(CryptographyAuthHelper):
Authentication helper for Azure Pipelines.
Based on cryptography since it is provided by the default Azure Pipelines environment.
"""
def publish_public_key(self, public_key_pem): # type: (str) -> None
def publish_public_key(self, public_key_pem: str) -> None:
"""Publish the given public key."""
try:
agent_temp_directory = os.environ['AGENT_TEMPDIRECTORY']
@ -162,7 +162,7 @@ class AzurePipelinesAuthHelper(CryptographyAuthHelper):
class AzurePipelinesChanges:
"""Change information for an Azure Pipelines build."""
def __init__(self, args): # type: (CommonConfig) -> None
def __init__(self, args: CommonConfig) -> None:
self.args = args
self.git = Git()
@ -213,7 +213,7 @@ class AzurePipelinesChanges:
self.paths = None # act as though change detection not enabled, do not filter targets
self.diff = []
def get_successful_merge_run_commits(self): # type: () -> t.Set[str]
def get_successful_merge_run_commits(self) -> t.Set[str]:
"""Return a set of recent successsful merge commits from Azure Pipelines."""
parameters = dict(
maxBuildsPerDefinition=100, # max 5000
@ -241,7 +241,7 @@ class AzurePipelinesChanges:
return commits
def get_last_successful_commit(self, commits): # type: (t.Set[str]) -> t.Optional[str]
def get_last_successful_commit(self, commits: t.Set[str]) -> t.Optional[str]:
"""Return the last successful commit from git history that is found in the given commit list, or None."""
commit_history = self.git.get_rev_list(max_count=100)
ordered_successful_commits = [commit for commit in commit_history if commit in commits]
@ -249,7 +249,7 @@ class AzurePipelinesChanges:
return last_successful_commit
def vso_add_attachment(file_type, file_name, path): # type: (str, str, str) -> None
def vso_add_attachment(file_type: str, file_name: str, path: str) -> None:
"""Upload and attach a file to the current timeline record."""
vso('task.addattachment', dict(type=file_type, name=file_name), path)

@ -39,21 +39,21 @@ class Local(CIProvider):
priority = 1000
@staticmethod
def is_supported(): # type: () -> bool
def is_supported() -> bool:
"""Return True if this provider is supported in the current running environment."""
return True
@property
def code(self): # type: () -> str
def code(self) -> str:
"""Return a unique code representing this provider."""
return CODE
@property
def name(self): # type: () -> str
def name(self) -> str:
"""Return descriptive name for this provider."""
return 'Local'
def generate_resource_prefix(self): # type: () -> str
def generate_resource_prefix(self) -> str:
"""Return a resource prefix specific to this CI provider."""
prefix = 'ansible-test-%d-%s' % (
random.randint(10000000, 99999999),
@ -62,11 +62,11 @@ class Local(CIProvider):
return prefix
def get_base_branch(self): # type: () -> str
def get_base_branch(self) -> str:
"""Return the base branch or an empty string."""
return ''
def detect_changes(self, args): # type: (TestConfig) -> t.Optional[t.List[str]]
def detect_changes(self, args: TestConfig) -> t.Optional[t.List[str]]:
"""Initialize change detection."""
result = LocalChanges(args)
@ -116,12 +116,12 @@ class Local(CIProvider):
return sorted(names)
def supports_core_ci_auth(self): # type: () -> bool
def supports_core_ci_auth(self) -> bool:
"""Return True if Ansible Core CI is supported."""
path = self._get_aci_key_path()
return os.path.exists(path)
def prepare_core_ci_auth(self): # type: () -> t.Dict[str, t.Any]
def prepare_core_ci_auth(self) -> t.Dict[str, t.Any]:
"""Return authentication details for Ansible Core CI."""
path = self._get_aci_key_path()
auth_key = read_text_file(path).strip()
@ -137,19 +137,19 @@ class Local(CIProvider):
return auth
def get_git_details(self, args): # type: (CommonConfig) -> t.Optional[t.Dict[str, t.Any]]
def get_git_details(self, args: CommonConfig) -> t.Optional[t.Dict[str, t.Any]]:
"""Return details about git in the current environment."""
return None # not yet implemented for local
@staticmethod
def _get_aci_key_path(): # type: () -> str
def _get_aci_key_path() -> str:
path = os.path.expanduser('~/.ansible-core-ci.key')
return path
class InvalidBranch(ApplicationError):
"""Exception for invalid branch specification."""
def __init__(self, branch, reason): # type: (str, str) -> None
def __init__(self, branch: str, reason: str) -> None:
message = 'Invalid branch: %s\n%s' % (branch, reason)
super().__init__(message)
@ -159,7 +159,7 @@ class InvalidBranch(ApplicationError):
class LocalChanges:
"""Change information for local work."""
def __init__(self, args): # type: (TestConfig) -> None
def __init__(self, args: TestConfig) -> None:
self.args = args
self.git = Git()
@ -198,7 +198,7 @@ class LocalChanges:
# diff of all tracked files from fork point to working copy
self.diff = self.git.get_diff([self.fork_point])
def is_official_branch(self, name): # type: (str) -> bool
def is_official_branch(self, name: str) -> bool:
"""Return True if the given branch name an official branch for development or releases."""
if self.args.base_branch:
return name == self.args.base_branch

@ -54,7 +54,7 @@ from ..data import (
FOCUSED_TARGET = '__focused__'
def categorize_changes(args, paths, verbose_command=None): # type: (TestConfig, t.List[str], t.Optional[str]) -> ChangeDescription
def categorize_changes(args: TestConfig, paths: t.List[str], verbose_command: t.Optional[str] = None) -> ChangeDescription:
"""Categorize the given list of changed paths and return a description of the changes."""
mapper = PathMapper(args)
@ -176,7 +176,7 @@ def categorize_changes(args, paths, verbose_command=None): # type: (TestConfig,
class PathMapper:
"""Map file paths to test commands and targets."""
def __init__(self, args): # type: (TestConfig) -> None
def __init__(self, args: TestConfig) -> None:
self.args = args
self.integration_all_target = get_integration_all_target(self.args)
@ -219,7 +219,7 @@ class PathMapper:
self.paths_to_dependent_targets[path].add(target)
def get_dependent_paths(self, path): # type: (str) -> t.List[str]
def get_dependent_paths(self, path: str) -> t.List[str]:
"""Return a list of paths which depend on the given path, recursively expanding dependent paths as well."""
unprocessed_paths = set(self.get_dependent_paths_non_recursive(path))
paths = set()
@ -238,7 +238,7 @@ class PathMapper:
return sorted(paths)
def get_dependent_paths_non_recursive(self, path): # type: (str) -> t.List[str]
def get_dependent_paths_non_recursive(self, path: str) -> t.List[str]:
"""Return a list of paths which depend on the given path, including dependent integration test target paths."""
paths = self.get_dependent_paths_internal(path)
paths += [target.path + '/' for target in self.paths_to_dependent_targets.get(path, set())]
@ -246,7 +246,7 @@ class PathMapper:
return paths
def get_dependent_paths_internal(self, path): # type: (str) -> t.List[str]
def get_dependent_paths_internal(self, path: str) -> t.List[str]:
"""Return a list of paths which depend on the given path."""
ext = os.path.splitext(os.path.split(path)[1])[1]
@ -265,7 +265,7 @@ class PathMapper:
return []
def get_python_module_utils_usage(self, path): # type: (str) -> t.List[str]
def get_python_module_utils_usage(self, path: str) -> t.List[str]:
"""Return a list of paths which depend on the given path which is a Python module_utils file."""
if not self.python_module_utils_imports:
display.info('Analyzing python module_utils imports...')
@ -278,7 +278,7 @@ class PathMapper:
return sorted(self.python_module_utils_imports[name])
def get_powershell_module_utils_usage(self, path): # type: (str) -> t.List[str]
def get_powershell_module_utils_usage(self, path: str) -> t.List[str]:
"""Return a list of paths which depend on the given path which is a PowerShell module_utils file."""
if not self.powershell_module_utils_imports:
display.info('Analyzing powershell module_utils imports...')
@ -291,7 +291,7 @@ class PathMapper:
return sorted(self.powershell_module_utils_imports[name])
def get_csharp_module_utils_usage(self, path): # type: (str) -> t.List[str]
def get_csharp_module_utils_usage(self, path: str) -> t.List[str]:
"""Return a list of paths which depend on the given path which is a C# module_utils file."""
if not self.csharp_module_utils_imports:
display.info('Analyzing C# module_utils imports...')
@ -304,7 +304,7 @@ class PathMapper:
return sorted(self.csharp_module_utils_imports[name])
def get_integration_target_usage(self, path): # type: (str) -> t.List[str]
def get_integration_target_usage(self, path: str) -> t.List[str]:
"""Return a list of paths which depend on the given path which is an integration target file."""
target_name = path.split('/')[3]
dependents = [os.path.join(data_context().content.integration_targets_path, target) + os.path.sep
@ -312,7 +312,7 @@ class PathMapper:
return dependents
def classify(self, path): # type: (str) -> t.Optional[t.Dict[str, str]]
def classify(self, path: str) -> t.Optional[t.Dict[str, str]]:
"""Classify the given path and return an optional dictionary of the results."""
result = self._classify(path)
@ -326,7 +326,7 @@ class PathMapper:
return result
def _classify(self, path): # type: (str) -> t.Optional[t.Dict[str, str]]
def _classify(self, path: str) -> t.Optional[t.Dict[str, str]]:
"""Return the classification for the given path."""
if data_context().content.is_ansible:
return self._classify_ansible(path)
@ -336,7 +336,7 @@ class PathMapper:
return None
def _classify_common(self, path): # type: (str) -> t.Optional[t.Dict[str, str]]
def _classify_common(self, path: str) -> t.Optional[t.Dict[str, str]]:
"""Return the classification for the given path using rules common to all layouts."""
dirname = os.path.dirname(path)
filename = os.path.basename(path)
@ -621,7 +621,7 @@ class PathMapper:
return None
def _classify_collection(self, path): # type: (str) -> t.Optional[t.Dict[str, str]]
def _classify_collection(self, path: str) -> t.Optional[t.Dict[str, str]]:
"""Return the classification for the given path using rules specific to collections."""
result = self._classify_common(path)
@ -659,7 +659,7 @@ class PathMapper:
return None
def _classify_ansible(self, path): # type: (str) -> t.Optional[t.Dict[str, str]]
def _classify_ansible(self, path: str) -> t.Optional[t.Dict[str, str]]:
"""Return the classification for the given path using rules specific to Ansible."""
if path.startswith('test/units/compat/'):
return {
@ -850,7 +850,7 @@ class PathMapper:
return None # unknown, will result in fall-back to run all tests
def _simple_plugin_tests(self, plugin_type, plugin_name): # type: (str, str) -> t.Dict[str, t.Optional[str]]
def _simple_plugin_tests(self, plugin_type: str, plugin_name: str) -> t.Dict[str, t.Optional[str]]:
"""
Return tests for the given plugin type and plugin name.
This function is useful for plugin types which do not require special processing.
@ -876,7 +876,7 @@ class PathMapper:
)
def all_tests(args, force=False): # type: (TestConfig, bool) -> t.Dict[str, str]
def all_tests(args: TestConfig, force: bool = False) -> t.Dict[str, str]:
"""Return the targets for each test command when all tests should be run."""
if force:
integration_all_target = 'all'
@ -892,7 +892,7 @@ def all_tests(args, force=False): # type: (TestConfig, bool) -> t.Dict[str, str
}
def get_integration_all_target(args): # type: (TestConfig) -> str
def get_integration_all_target(args: TestConfig) -> str:
"""Return the target to use when all tests should be run."""
if isinstance(args, IntegrationConfig):
return args.changed_all_target

@ -8,7 +8,7 @@ from ..data import (
)
def resolve_csharp_ps_util(import_name, path): # type: (str, str) -> str
def resolve_csharp_ps_util(import_name: str, path: str) -> str:
"""Return the fully qualified name of the given import if possible, otherwise return the original import name."""
if data_context().content.is_ansible or not import_name.startswith('.'):
# We don't support relative paths for builtin utils, there's no point.

@ -26,7 +26,7 @@ from ..target import (
)
def get_csharp_module_utils_imports(powershell_targets, csharp_targets): # type: (t.List[TestTarget], t.List[TestTarget]) -> t.Dict[str, t.Set[str]]
def get_csharp_module_utils_imports(powershell_targets: t.List[TestTarget], csharp_targets: t.List[TestTarget]) -> t.Dict[str, t.Set[str]]:
"""Return a dictionary of module_utils names mapped to sets of powershell file paths."""
module_utils = enumerate_module_utils()
@ -51,7 +51,7 @@ def get_csharp_module_utils_imports(powershell_targets, csharp_targets): # type
return imports
def get_csharp_module_utils_name(path): # type: (str) -> str
def get_csharp_module_utils_name(path: str) -> str:
"""Return a namespace and name from the given module_utils path."""
base_path = data_context().content.module_utils_csharp_path
@ -65,14 +65,14 @@ def get_csharp_module_utils_name(path): # type: (str) -> str
return name
def enumerate_module_utils(): # type: () -> t.Set[str]
def enumerate_module_utils() -> t.Set[str]:
"""Return a set of available module_utils imports."""
return set(get_csharp_module_utils_name(p)
for p in data_context().content.walk_files(data_context().content.module_utils_csharp_path)
if os.path.splitext(p)[1] == '.cs')
def extract_csharp_module_utils_imports(path, module_utils, is_pure_csharp): # type: (str, t.Set[str], bool) -> t.Set[str]
def extract_csharp_module_utils_imports(path: str, module_utils: t.Set[str], is_pure_csharp: bool) -> t.Set[str]:
"""Return a set of module_utils imports found in the specified source file."""
imports = set()
if is_pure_csharp:

@ -26,7 +26,7 @@ from ..target import (
)
def get_powershell_module_utils_imports(powershell_targets): # type: (t.List[TestTarget]) -> t.Dict[str, t.Set[str]]
def get_powershell_module_utils_imports(powershell_targets: t.List[TestTarget]) -> t.Dict[str, t.Set[str]]:
"""Return a dictionary of module_utils names mapped to sets of powershell file paths."""
module_utils = enumerate_module_utils()
@ -48,7 +48,7 @@ def get_powershell_module_utils_imports(powershell_targets): # type: (t.List[Te
return imports
def get_powershell_module_utils_name(path): # type: (str) -> str
def get_powershell_module_utils_name(path: str) -> str:
"""Return a namespace and name from the given module_utils path."""
base_path = data_context().content.module_utils_powershell_path
@ -62,14 +62,14 @@ def get_powershell_module_utils_name(path): # type: (str) -> str
return name
def enumerate_module_utils(): # type: () -> t.Set[str]
def enumerate_module_utils() -> t.Set[str]:
"""Return a set of available module_utils imports."""
return set(get_powershell_module_utils_name(p)
for p in data_context().content.walk_files(data_context().content.module_utils_powershell_path)
if os.path.splitext(p)[1] == '.psm1')
def extract_powershell_module_utils_imports(path, module_utils): # type: (str, t.Set[str]) -> t.Set[str]
def extract_powershell_module_utils_imports(path: str, module_utils: t.Set[str]) -> t.Set[str]:
"""Return a set of module_utils imports found in the specified source file."""
imports = set()

@ -29,7 +29,7 @@ VIRTUAL_PACKAGES = {
}
def get_python_module_utils_imports(compile_targets): # type: (t.List[TestTarget]) -> t.Dict[str, t.Set[str]]
def get_python_module_utils_imports(compile_targets: t.List[TestTarget]) -> t.Dict[str, t.Set[str]]:
"""Return a dictionary of module_utils names mapped to sets of python file paths."""
module_utils = enumerate_module_utils()
@ -41,7 +41,7 @@ def get_python_module_utils_imports(compile_targets): # type: (t.List[TestTarge
for target in compile_targets:
imports_by_target_path[target.path] = extract_python_module_utils_imports(target.path, module_utils)
def recurse_import(import_name, depth=0, seen=None): # type: (str, int, t.Optional[t.Set[str]]) -> t.Set[str]
def recurse_import(import_name: str, depth: int = 0, seen: t.Optional[t.Set[str]] = None) -> t.Set[str]:
"""Recursively expand module_utils imports from module_utils files."""
display.info('module_utils import: %s%s' % (' ' * depth, import_name), verbosity=4)
@ -126,7 +126,7 @@ def get_python_module_utils_imports(compile_targets): # type: (t.List[TestTarge
return imports
def get_python_module_utils_name(path): # type: (str) -> str
def get_python_module_utils_name(path: str) -> str:
"""Return a namespace and name from the given module_utils path."""
base_path = data_context().content.module_utils_path
@ -163,7 +163,7 @@ def enumerate_module_utils():
return set(module_utils)
def extract_python_module_utils_imports(path, module_utils): # type: (str, t.Set[str]) -> t.Set[str]
def extract_python_module_utils_imports(path: str, module_utils: t.Set[str]) -> t.Set[str]:
"""Return a list of module_utils imports found in the specified source file."""
# Python code must be read as bytes to avoid a SyntaxError when the source uses comments to declare the file encoding.
# See: https://www.python.org/dev/peps/pep-0263
@ -183,7 +183,7 @@ def extract_python_module_utils_imports(path, module_utils): # type: (str, t.Se
return finder.imports
def get_import_path(name, package=False): # type: (str, bool) -> str
def get_import_path(name: str, package: bool = False) -> str:
"""Return a path from an import name."""
if package:
filename = os.path.join(name.replace('.', '/'), '__init__.py')
@ -202,7 +202,7 @@ def get_import_path(name, package=False): # type: (str, bool) -> str
return path
def path_to_module(path): # type: (str) -> str
def path_to_module(path: str) -> str:
"""Convert the given path to a module name."""
module = os.path.splitext(path)[0].replace(os.path.sep, '.')
@ -212,7 +212,7 @@ def path_to_module(path): # type: (str) -> str
return module
def relative_to_absolute(name, level, module, path, lineno): # type: (str, int, str, str, int) -> str
def relative_to_absolute(name: str, level: int, module: str, path: str, lineno: int) -> str:
"""Convert a relative import to an absolute import."""
if level <= 0:
absolute_name = name
@ -233,7 +233,7 @@ def relative_to_absolute(name, level, module, path, lineno): # type: (str, int,
class ModuleUtilFinder(ast.NodeVisitor):
"""AST visitor to find valid module_utils imports."""
def __init__(self, path, module_utils): # type: (str, t.Set[str]) -> None
def __init__(self, path: str, module_utils: t.Set[str]) -> None:
self.path = path
self.module_utils = module_utils
self.imports = set() # type: t.Set[str]
@ -277,7 +277,7 @@ class ModuleUtilFinder(ast.NodeVisitor):
self.module = path_to_module(os.path.join(data_context().content.collection.directory, self.path))
# pylint: disable=locally-disabled, invalid-name
def visit_Import(self, node): # type: (ast.Import) -> None
def visit_Import(self, node: ast.Import) -> None:
"""Visit an import node."""
self.generic_visit(node)
@ -286,7 +286,7 @@ class ModuleUtilFinder(ast.NodeVisitor):
self.add_imports([alias.name for alias in node.names], node.lineno)
# pylint: disable=locally-disabled, invalid-name
def visit_ImportFrom(self, node): # type: (ast.ImportFrom) -> None
def visit_ImportFrom(self, node: ast.ImportFrom) -> None:
"""Visit an import from node."""
self.generic_visit(node)
@ -304,7 +304,7 @@ class ModuleUtilFinder(ast.NodeVisitor):
# from ansible_collections.{ns}.{col}.plugins.module_utils.MODULE[.MODULE] import MODULE[, MODULE]
self.add_imports(['%s.%s' % (module, alias.name) for alias in node.names], node.lineno)
def add_import(self, name, line_number): # type: (str, int) -> None
def add_import(self, name: str, line_number: int) -> None:
"""Record the specified import."""
import_name = name
@ -325,14 +325,14 @@ class ModuleUtilFinder(ast.NodeVisitor):
# This error should be detected by unit or integration tests.
display.warning('%s:%d Invalid module_utils import: %s' % (self.path, line_number, import_name))
def add_imports(self, names, line_no): # type: (t.List[str], int) -> None
def add_imports(self, names: t.List[str], line_no: int) -> None:
"""Add the given import names if they are module_utils imports."""
for name in names:
if self.is_module_util_name(name):
self.add_import(name, line_no)
@staticmethod
def is_module_util_name(name): # type: (str) -> bool
def is_module_util_name(name: str) -> bool:
"""Return True if the given name is a module_util name for the content under test. External module_utils are ignored."""
if data_context().content.is_ansible and name.startswith('ansible.module_utils.'):
return True

@ -28,7 +28,7 @@ from ..util import (
)
def parse_args(argv=None): # type: (t.Optional[t.List[str]]) -> argparse.Namespace
def parse_args(argv: t.Optional[t.List[str]] = None) -> argparse.Namespace:
"""Parse command line arguments."""
completer = CompositeActionCompletionFinder()

@ -22,69 +22,69 @@ from .parsers import (
class OriginControllerAction(CompositeAction):
"""Composite action parser for the controller when the only option is `origin`."""
def create_parser(self): # type: () -> NamespaceParser
def create_parser(self) -> NamespaceParser:
"""Return a namespace parser to parse the argument associated with this action."""
return OriginControllerParser()
class DelegatedControllerAction(CompositeAction):
"""Composite action parser for the controller when delegation is supported."""
def create_parser(self): # type: () -> NamespaceParser
def create_parser(self) -> NamespaceParser:
"""Return a namespace parser to parse the argument associated with this action."""
return DelegatedControllerParser()
class PosixTargetAction(CompositeAction):
"""Composite action parser for a POSIX target."""
def create_parser(self): # type: () -> NamespaceParser
def create_parser(self) -> NamespaceParser:
"""Return a namespace parser to parse the argument associated with this action."""
return PosixTargetParser()
class WindowsTargetAction(CompositeAction):
"""Composite action parser for a Windows target."""
def create_parser(self): # type: () -> NamespaceParser
def create_parser(self) -> NamespaceParser:
"""Return a namespace parser to parse the argument associated with this action."""
return WindowsTargetParser()
class NetworkTargetAction(CompositeAction):
"""Composite action parser for a network target."""
def create_parser(self): # type: () -> NamespaceParser
def create_parser(self) -> NamespaceParser:
"""Return a namespace parser to parse the argument associated with this action."""
return NetworkTargetParser()
class SanityPythonTargetAction(CompositeAction):
"""Composite action parser for a sanity target."""
def create_parser(self): # type: () -> NamespaceParser
def create_parser(self) -> NamespaceParser:
"""Return a namespace parser to parse the argument associated with this action."""
return SanityPythonTargetParser()
class UnitsPythonTargetAction(CompositeAction):
"""Composite action parser for a units target."""
def create_parser(self): # type: () -> NamespaceParser
def create_parser(self) -> NamespaceParser:
"""Return a namespace parser to parse the argument associated with this action."""
return UnitsPythonTargetParser()
class PosixSshTargetAction(CompositeAction):
"""Composite action parser for a POSIX SSH target."""
def create_parser(self): # type: () -> NamespaceParser
def create_parser(self) -> NamespaceParser:
"""Return a namespace parser to parse the argument associated with this action."""
return PosixSshTargetParser()
class WindowsSshTargetAction(CompositeAction):
"""Composite action parser for a Windows SSH target."""
def create_parser(self): # type: () -> NamespaceParser
def create_parser(self) -> NamespaceParser:
"""Return a namespace parser to parse the argument associated with this action."""
return WindowsSshTargetParser()
class NetworkSshTargetAction(CompositeAction):
"""Composite action parser for a network SSH target."""
def create_parser(self): # type: () -> NamespaceParser
def create_parser(self) -> NamespaceParser:
"""Return a namespace parser to parse the argument associated with this action."""
return NetworkSshTargetParser()

@ -104,7 +104,7 @@ class CompositeAction(argparse.Action, metaclass=abc.ABCMeta):
register_safe_action(type(self))
@abc.abstractmethod
def create_parser(self): # type: () -> NamespaceParser
def create_parser(self) -> NamespaceParser:
"""Return a namespace parser to parse the argument associated with this action."""
def __call__(
@ -163,7 +163,7 @@ class CompositeActionCompletionFinder(RegisteredCompletionFinder):
return completions
def detect_file_listing(value, mode): # type: (str, ParserMode) -> bool
def detect_file_listing(value: str, mode: ParserMode) -> bool:
"""
Return True if Bash will show a file listing and redraw the prompt, otherwise return False.
@ -198,7 +198,7 @@ def detect_file_listing(value, mode): # type: (str, ParserMode) -> bool
return listing
def detect_false_file_completion(value, mode): # type: (str, ParserMode) -> bool
def detect_false_file_completion(value: str, mode: ParserMode) -> bool:
"""
Return True if Bash will provide an incorrect file completion, otherwise return False.

@ -62,18 +62,18 @@ class CompType(enum.Enum):
"""
@property
def list_mode(self): # type: () -> bool
def list_mode(self) -> bool:
"""True if completion is running in list mode, otherwise False."""
return self in (CompType.LIST, CompType.LIST_AMBIGUOUS, CompType.LIST_UNMODIFIED)
def register_safe_action(action_type): # type: (t.Type[argparse.Action]) -> None
def register_safe_action(action_type: t.Type[argparse.Action]) -> None:
"""Register the given action as a safe action for argcomplete to use during completion if it is not already registered."""
if argcomplete and action_type not in argcomplete.safe_actions:
argcomplete.safe_actions += (action_type,)
def get_comp_type(): # type: () -> t.Optional[CompType]
def get_comp_type() -> t.Optional[CompType]:
"""Parse the COMP_TYPE environment variable (if present) and return the associated CompType enum value."""
value = os.environ.get('COMP_TYPE')
comp_type = CompType(chr(int(value))) if value else None

@ -47,7 +47,7 @@ class CompletionSuccess(Completion):
matches: t.List[str] = dataclasses.field(default_factory=list)
@property
def preserve(self): # type: () -> bool
def preserve(self) -> bool:
"""
True if argcomplete should not mangle completion values, otherwise False.
Only used when more than one completion exists to avoid overwriting the word undergoing completion.
@ -55,7 +55,7 @@ class CompletionSuccess(Completion):
return len(self.matches) > 1 and self.list_mode
@property
def completions(self): # type: () -> t.List[str]
def completions(self) -> t.List[str]:
"""List of completion values to return to argcomplete."""
completions = self.matches
continuation = '' if self.list_mode else self.continuation
@ -98,11 +98,11 @@ class ParserState:
parts: t.List[str] = dataclasses.field(default_factory=list)
@property
def incomplete(self): # type: () -> bool
def incomplete(self) -> bool:
"""True if parsing is incomplete (unparsed input remains), otherwise False."""
return self.remainder is not None
def match(self, value, choices): # type: (str, t.List[str]) -> bool
def match(self, value: str, choices: t.List[str]) -> bool:
"""Return True if the given value matches the provided choices, taking into account parsing boundaries, otherwise return False."""
if self.current_boundary:
delimiters, delimiter = self.current_boundary.delimiters, self.current_boundary.match
@ -121,7 +121,7 @@ class ParserState:
return False
def read(self): # type: () -> str
def read(self) -> str:
"""Read and return the next input segment, taking into account parsing boundaries."""
delimiters = "".join(boundary.delimiters for boundary in self.boundaries)
@ -154,26 +154,26 @@ class ParserState:
return value
@property
def root_namespace(self): # type: () -> t.Any
def root_namespace(self) -> t.Any:
"""THe root namespace."""
return self.namespaces[0]
@property
def current_namespace(self): # type: () -> t.Any
def current_namespace(self) -> t.Any:
"""The current namespace."""
return self.namespaces[-1]
@property
def current_boundary(self): # type: () -> t.Optional[ParserBoundary]
def current_boundary(self) -> t.Optional[ParserBoundary]:
"""The current parser boundary, if any, otherwise None."""
return self.boundaries[-1] if self.boundaries else None
def set_namespace(self, namespace): # type: (t.Any) -> None
def set_namespace(self, namespace: t.Any) -> None:
"""Set the current namespace."""
self.namespaces.append(namespace)
@contextlib.contextmanager
def delimit(self, delimiters, required=True): # type: (str, bool) -> t.Iterator[ParserBoundary]
def delimit(self, delimiters: str, required: bool = True) -> t.Iterator[ParserBoundary]:
"""Context manager for delimiting parsing of input."""
boundary = ParserBoundary(delimiters=delimiters, required=required)
@ -197,10 +197,10 @@ class DocumentationState:
class Parser(metaclass=abc.ABCMeta):
"""Base class for all composite argument parsers."""
@abc.abstractmethod
def parse(self, state): # type: (ParserState) -> t.Any
def parse(self, state: ParserState) -> t.Any:
"""Parse the input from the given state and return the result."""
def document(self, state): # type: (DocumentationState) -> t.Optional[str]
def document(self, state: DocumentationState) -> t.Optional[str]:
"""Generate and return documentation for this parser."""
raise Exception(f'Undocumented parser: {type(self)}')
@ -217,22 +217,22 @@ class MatchConditions(enum.Flag):
class DynamicChoicesParser(Parser, metaclass=abc.ABCMeta):
"""Base class for composite argument parsers which use a list of choices that can be generated during completion."""
def __init__(self, conditions=MatchConditions.CHOICE): # type: (MatchConditions) -> None
def __init__(self, conditions: MatchConditions = MatchConditions.CHOICE) -> None:
self.conditions = conditions
@abc.abstractmethod
def get_choices(self, value): # type: (str) -> t.List[str]
def get_choices(self, value: str) -> t.List[str]:
"""Return a list of valid choices based on the given input value."""
def no_completion_match(self, value): # type: (str) -> CompletionUnavailable # pylint: disable=unused-argument
def no_completion_match(self, value: str) -> CompletionUnavailable: # pylint: disable=unused-argument
"""Return an instance of CompletionUnavailable when no match was found for the given value."""
return CompletionUnavailable()
def no_choices_available(self, value): # type: (str) -> ParserError # pylint: disable=unused-argument
def no_choices_available(self, value: str) -> ParserError: # pylint: disable=unused-argument
"""Return an instance of ParserError when parsing fails and no choices are available."""
return ParserError('No choices available.')
def parse(self, state): # type: (ParserState) -> t.Any
def parse(self, state: ParserState) -> t.Any:
"""Parse the input from the given state and return the result."""
value = state.read()
choices = self.get_choices(value)
@ -272,16 +272,16 @@ class DynamicChoicesParser(Parser, metaclass=abc.ABCMeta):
class ChoicesParser(DynamicChoicesParser):
"""Composite argument parser which relies on a static list of choices."""
def __init__(self, choices, conditions=MatchConditions.CHOICE): # type: (t.List[str], MatchConditions) -> None
def __init__(self, choices: t.List[str], conditions: MatchConditions = MatchConditions.CHOICE) -> None:
self.choices = choices
super().__init__(conditions=conditions)
def get_choices(self, value): # type: (str) -> t.List[str]
def get_choices(self, value: str) -> t.List[str]:
"""Return a list of valid choices based on the given input value."""
return self.choices
def document(self, state): # type: (DocumentationState) -> t.Optional[str]
def document(self, state: DocumentationState) -> t.Optional[str]:
"""Generate and return documentation for this parser."""
return '|'.join(self.choices)
@ -290,12 +290,12 @@ class IntegerParser(DynamicChoicesParser):
"""Composite argument parser for integers."""
PATTERN = re.compile('^[1-9][0-9]*$')
def __init__(self, maximum=None): # type: (t.Optional[int]) -> None
def __init__(self, maximum: t.Optional[int] = None) -> None:
self.maximum = maximum
super().__init__()
def get_choices(self, value): # type: (str) -> t.List[str]
def get_choices(self, value: str) -> t.List[str]:
"""Return a list of valid choices based on the given input value."""
if not value:
numbers = list(range(1, 10))
@ -313,12 +313,12 @@ class IntegerParser(DynamicChoicesParser):
return [str(n) for n in numbers]
def parse(self, state): # type: (ParserState) -> t.Any
def parse(self, state: ParserState) -> t.Any:
"""Parse the input from the given state and return the result."""
value = super().parse(state)
return int(value)
def document(self, state): # type: (DocumentationState) -> t.Optional[str]
def document(self, state: DocumentationState) -> t.Optional[str]:
"""Generate and return documentation for this parser."""
return '{integer}'
@ -328,7 +328,7 @@ class BooleanParser(ChoicesParser):
def __init__(self):
super().__init__(['yes', 'no'])
def parse(self, state): # type: (ParserState) -> bool
def parse(self, state: ParserState) -> bool:
"""Parse the input from the given state and return the result."""
value = super().parse(state)
return value == 'yes'
@ -336,7 +336,7 @@ class BooleanParser(ChoicesParser):
class AnyParser(ChoicesParser):
"""Composite argument parser which accepts any input value."""
def __init__(self, nothing=False, no_match_message=None): # type: (bool, t.Optional[str]) -> None
def __init__(self, nothing: bool = False, no_match_message: t.Optional[str] = None) -> None:
self.no_match_message = no_match_message
conditions = MatchConditions.ANY
@ -346,14 +346,14 @@ class AnyParser(ChoicesParser):
super().__init__([], conditions=conditions)
def no_completion_match(self, value): # type: (str) -> CompletionUnavailable
def no_completion_match(self, value: str) -> CompletionUnavailable:
"""Return an instance of CompletionUnavailable when no match was found for the given value."""
if self.no_match_message:
return CompletionUnavailable(message=self.no_match_message)
return super().no_completion_match(value)
def no_choices_available(self, value): # type: (str) -> ParserError
def no_choices_available(self, value: str) -> ParserError:
"""Return an instance of ParserError when parsing fails and no choices are available."""
if self.no_match_message:
return ParserError(self.no_match_message)
@ -365,12 +365,12 @@ class RelativePathNameParser(DynamicChoicesParser):
"""Composite argument parser for relative path names."""
RELATIVE_NAMES = ['.', '..']
def __init__(self, choices): # type: (t.List[str]) -> None
def __init__(self, choices: t.List[str]) -> None:
self.choices = choices
super().__init__()
def get_choices(self, value): # type: (str) -> t.List[str]
def get_choices(self, value: str) -> t.List[str]:
"""Return a list of valid choices based on the given input value."""
choices = list(self.choices)
@ -384,7 +384,7 @@ class RelativePathNameParser(DynamicChoicesParser):
class FileParser(Parser):
"""Composite argument parser for absolute or relative file paths."""
def parse(self, state): # type: (ParserState) -> str
def parse(self, state: ParserState) -> str:
"""Parse the input from the given state and return the result."""
if state.mode == ParserMode.PARSE:
path = AnyParser().parse(state)
@ -416,7 +416,7 @@ class FileParser(Parser):
class AbsolutePathParser(Parser):
"""Composite argument parser for absolute file paths. Paths are only verified for proper syntax, not for existence."""
def parse(self, state): # type: (ParserState) -> t.Any
def parse(self, state: ParserState) -> t.Any:
"""Parse the input from the given state and return the result."""
path = ''
@ -434,7 +434,7 @@ class AbsolutePathParser(Parser):
class NamespaceParser(Parser, metaclass=abc.ABCMeta):
"""Base class for composite argument parsers that store their results in a namespace."""
def parse(self, state): # type: (ParserState) -> t.Any
def parse(self, state: ParserState) -> t.Any:
"""Parse the input from the given state and return the result."""
namespace = state.current_namespace
current = getattr(namespace, self.dest)
@ -458,38 +458,38 @@ class NamespaceParser(Parser, metaclass=abc.ABCMeta):
return value
def get_value(self, state): # type: (ParserState) -> t.Any
def get_value(self, state: ParserState) -> t.Any:
"""Parse the input from the given state and return the result, without storing the result in the namespace."""
return super().parse(state)
@property
def use_list(self): # type: () -> bool
def use_list(self) -> bool:
"""True if the destination is a list, otherwise False."""
return False
@property
def limit_one(self): # type: () -> bool
def limit_one(self) -> bool:
"""True if only one target is allowed, otherwise False."""
return not self.use_list
@property
@abc.abstractmethod
def dest(self): # type: () -> str
def dest(self) -> str:
"""The name of the attribute where the value should be stored."""
class NamespaceWrappedParser(NamespaceParser):
"""Composite argument parser that wraps a non-namespace parser and stores the result in a namespace."""
def __init__(self, dest, parser): # type: (str, Parser) -> None
def __init__(self, dest: str, parser: Parser) -> None:
self._dest = dest
self.parser = parser
def get_value(self, state): # type: (ParserState) -> t.Any
def get_value(self, state: ParserState) -> t.Any:
"""Parse the input from the given state and return the result, without storing the result in the namespace."""
return self.parser.parse(state)
@property
def dest(self): # type: () -> str
def dest(self) -> str:
"""The name of the attribute where the value should be stored."""
return self._dest
@ -497,10 +497,10 @@ class NamespaceWrappedParser(NamespaceParser):
class KeyValueParser(Parser, metaclass=abc.ABCMeta):
"""Base class for key/value composite argument parsers."""
@abc.abstractmethod
def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser]
def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]:
"""Return a dictionary of key names and value parsers."""
def parse(self, state): # type: (ParserState) -> t.Any
def parse(self, state: ParserState) -> t.Any:
"""Parse the input from the given state and return the result."""
namespace = state.current_namespace
parsers = self.get_parsers(state)
@ -522,7 +522,7 @@ class KeyValueParser(Parser, metaclass=abc.ABCMeta):
class PairParser(Parser, metaclass=abc.ABCMeta):
"""Base class for composite argument parsers consisting of a left and right argument parser, with input separated by a delimiter."""
def parse(self, state): # type: (ParserState) -> t.Any
def parse(self, state: ParserState) -> t.Any:
"""Parse the input from the given state and return the result."""
namespace = self.create_namespace()
@ -537,39 +537,39 @@ class PairParser(Parser, metaclass=abc.ABCMeta):
return namespace
@property
def required(self): # type: () -> bool
def required(self) -> bool:
"""True if the delimiter (and thus right parser) is required, otherwise False."""
return False
@property
def delimiter(self): # type: () -> str
def delimiter(self) -> str:
"""The delimiter to use between the left and right parser."""
return PAIR_DELIMITER
@abc.abstractmethod
def create_namespace(self): # type: () -> t.Any
def create_namespace(self) -> t.Any:
"""Create and return a namespace."""
@abc.abstractmethod
def get_left_parser(self, state): # type: (ParserState) -> Parser
def get_left_parser(self, state: ParserState) -> Parser:
"""Return the parser for the left side."""
@abc.abstractmethod
def get_right_parser(self, choice): # type: (t.Any) -> Parser
def get_right_parser(self, choice: t.Any) -> Parser:
"""Return the parser for the right side."""
class TypeParser(Parser, metaclass=abc.ABCMeta):
"""Base class for composite argument parsers which parse a type name, a colon and then parse results based on the type given by the type name."""
def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser] # pylint: disable=unused-argument
def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]: # pylint: disable=unused-argument
"""Return a dictionary of type names and type parsers."""
return self.get_stateless_parsers()
@abc.abstractmethod
def get_stateless_parsers(self): # type: () -> t.Dict[str, Parser]
def get_stateless_parsers(self) -> t.Dict[str, Parser]:
"""Return a dictionary of type names and type parsers."""
def parse(self, state): # type: (ParserState) -> t.Any
def parse(self, state: ParserState) -> t.Any:
"""Parse the input from the given state and return the result."""
parsers = self.get_parsers(state)

@ -227,7 +227,7 @@ def do_commands(
do_units(subparsers, test, completer)
def color(value): # type: (str) -> bool
def color(value: str) -> bool:
"""Strict converter for color option."""
if value == 'yes':
return True

@ -55,17 +55,17 @@ from ..data import (
)
def filter_python(version, versions): # type: (t.Optional[str], t.Optional[t.Sequence[str]]) -> t.Optional[str]
def filter_python(version: t.Optional[str], versions: t.Optional[t.Sequence[str]]) -> t.Optional[str]:
"""If a Python version is given and is in the given version list, return that Python version, otherwise return None."""
return version if version in versions else None
def controller_python(version): # type: (t.Optional[str]) -> t.Optional[str]
def controller_python(version: t.Optional[str]) -> t.Optional[str]:
"""If a Python version is given and is supported by the controller, return that Python version, otherwise return None."""
return filter_python(version, CONTROLLER_PYTHON_VERSIONS)
def get_fallback_remote_controller(): # type: () -> str
def get_fallback_remote_controller() -> str:
"""Return the remote fallback platform for the controller."""
platform = 'freebsd' # lower cost than RHEL and macOS
candidates = [item for item in filter_completion(remote_completion()).values() if item.controller_supported and item.platform == platform]
@ -73,7 +73,7 @@ def get_fallback_remote_controller(): # type: () -> str
return fallback.name
def get_option_name(name): # type: (str) -> str
def get_option_name(name: str) -> str:
"""Return a command-line option name from the given option name."""
if name == 'targets':
name = 'target'
@ -144,14 +144,14 @@ class LegacyHostOptions:
delattr(namespace, field.name)
@staticmethod
def purge_args(args): # type: (t.List[str]) -> t.List[str]
def purge_args(args: t.List[str]) -> t.List[str]:
"""Purge legacy host options from the given command line arguments."""
fields = dataclasses.fields(LegacyHostOptions) # type: t.Tuple[dataclasses.Field, ...]
filters = {get_option_name(field.name): 0 if field.type is t.Optional[bool] else 1 for field in fields} # type: t.Dict[str, int]
return filter_args(args, filters)
def get_options_used(self): # type: () -> t.Tuple[str, ...]
def get_options_used(self) -> t.Tuple[str, ...]:
"""Return a tuple of the command line options used."""
fields = dataclasses.fields(self) # type: t.Tuple[dataclasses.Field, ...]
options = tuple(sorted(get_option_name(field.name) for field in fields if getattr(self, field.name)))
@ -278,7 +278,7 @@ def controller_targets(
return targets
def native_python(options): # type: (LegacyHostOptions) -> t.Optional[NativePythonConfig]
def native_python(options: LegacyHostOptions) -> t.Optional[NativePythonConfig]:
"""Return a NativePythonConfig for the given version if it is not None, otherwise return None."""
if not options.python and not options.python_interpreter:
return None

@ -5,12 +5,12 @@ import argparse
import typing as t
def key_value_type(value): # type: (str) -> t.Tuple[str, str]
def key_value_type(value: str) -> t.Tuple[str, str]:
"""Wrapper around key_value."""
return key_value(value)
def key_value(value): # type: (str) -> t.Tuple[str, str]
def key_value(value: str) -> t.Tuple[str, str]:
"""Type parsing and validation for argparse key/value pairs separated by an '=' character."""
parts = value.split('=')

@ -172,7 +172,7 @@ def add_composite_environment_options(
action_types = [] # type: t.List[t.Type[CompositeAction]]
def register_action_type(action_type): # type: (t.Type[CompositeAction]) -> t.Type[CompositeAction]
def register_action_type(action_type: t.Type[CompositeAction]) -> t.Type[CompositeAction]:
"""Register the provided composite action type and return it."""
action_types.append(action_type)
return action_type
@ -577,16 +577,16 @@ def complete_network_platform_connection(prefix: str, parsed_args: argparse.Name
return [i + '=' for i in images if i.startswith(left) and (not parsed_args.platform_connection or i not in [x[0] for x in parsed_args.platform_connection])]
def get_remote_platform_choices(controller=False): # type: (bool) -> t.List[str]
def get_remote_platform_choices(controller: bool = False) -> t.List[str]:
"""Return a list of supported remote platforms matching the given prefix."""
return sorted(filter_completion(remote_completion(), controller_only=controller))
def get_windows_platform_choices(): # type: () -> t.List[str]
def get_windows_platform_choices() -> t.List[str]:
"""Return a list of supported Windows versions matching the given prefix."""
return sorted(f'windows/{windows.version}' for windows in filter_completion(windows_completion()).values())
def get_windows_version_choices(): # type: () -> t.List[str]
def get_windows_version_choices() -> t.List[str]:
"""Return a list of supported Windows versions."""
return sorted(windows.version for windows in filter_completion(windows_completion()).values())

@ -53,13 +53,13 @@ from .base_argument_parsers import (
class OriginControllerParser(ControllerNamespaceParser, TypeParser):
"""Composite argument parser for the controller when delegation is not supported."""
def get_stateless_parsers(self): # type: () -> t.Dict[str, Parser]
def get_stateless_parsers(self) -> t.Dict[str, Parser]:
"""Return a dictionary of type names and type parsers."""
return dict(
origin=OriginParser(),
)
def document(self, state): # type: (DocumentationState) -> t.Optional[str]
def document(self, state: DocumentationState) -> t.Optional[str]:
"""Generate and return documentation for this parser."""
section = '--controller options:'
@ -71,7 +71,7 @@ class OriginControllerParser(ControllerNamespaceParser, TypeParser):
class DelegatedControllerParser(ControllerNamespaceParser, TypeParser):
"""Composite argument parser for the controller when delegation is supported."""
def get_stateless_parsers(self): # type: () -> t.Dict[str, Parser]
def get_stateless_parsers(self) -> t.Dict[str, Parser]:
"""Return a dictionary of type names and type parsers."""
parsers: t.Dict[str, Parser] = dict(
origin=OriginParser(),
@ -85,7 +85,7 @@ class DelegatedControllerParser(ControllerNamespaceParser, TypeParser):
return parsers
def document(self, state): # type: (DocumentationState) -> t.Optional[str]
def document(self, state: DocumentationState) -> t.Optional[str]:
"""Generate and return documentation for this parser."""
section = '--controller options:'
@ -97,7 +97,7 @@ class DelegatedControllerParser(ControllerNamespaceParser, TypeParser):
class PosixTargetParser(TargetNamespaceParser, TypeParser):
"""Composite argument parser for a POSIX target."""
def get_stateless_parsers(self): # type: () -> t.Dict[str, Parser]
def get_stateless_parsers(self) -> t.Dict[str, Parser]:
"""Return a dictionary of type names and type parsers."""
parsers: t.Dict[str, Parser] = dict(
controller=ControllerParser(),
@ -115,7 +115,7 @@ class PosixTargetParser(TargetNamespaceParser, TypeParser):
return parsers
def document(self, state): # type: (DocumentationState) -> t.Optional[str]
def document(self, state: DocumentationState) -> t.Optional[str]:
"""Generate and return documentation for this parser."""
section = f'{self.option_name} options (choose one):'
@ -128,19 +128,19 @@ class PosixTargetParser(TargetNamespaceParser, TypeParser):
class WindowsTargetParser(TargetsNamespaceParser, TypeParser):
"""Composite argument parser for a Windows target."""
@property
def allow_inventory(self): # type: () -> bool
def allow_inventory(self) -> bool:
"""True if inventory is allowed, otherwise False."""
return True
def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser]
def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]:
"""Return a dictionary of type names and type parsers."""
return self.get_internal_parsers(state.root_namespace.targets)
def get_stateless_parsers(self): # type: () -> t.Dict[str, Parser]
def get_stateless_parsers(self) -> t.Dict[str, Parser]:
"""Return a dictionary of type names and type parsers."""
return self.get_internal_parsers([])
def get_internal_parsers(self, targets): # type: (t.List[WindowsConfig]) -> t.Dict[str, Parser]
def get_internal_parsers(self, targets: t.List[WindowsConfig]) -> t.Dict[str, Parser]:
"""Return a dictionary of type names and type parsers."""
parsers = {} # type: t.Dict[str, Parser]
@ -157,7 +157,7 @@ class WindowsTargetParser(TargetsNamespaceParser, TypeParser):
return parsers
def document(self, state): # type: (DocumentationState) -> t.Optional[str]
def document(self, state: DocumentationState) -> t.Optional[str]:
"""Generate and return documentation for this parser."""
section = f'{self.option_name} options (choose one):'
@ -170,19 +170,19 @@ class WindowsTargetParser(TargetsNamespaceParser, TypeParser):
class NetworkTargetParser(TargetsNamespaceParser, TypeParser):
"""Composite argument parser for a network target."""
@property
def allow_inventory(self): # type: () -> bool
def allow_inventory(self) -> bool:
"""True if inventory is allowed, otherwise False."""
return True
def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser]
def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]:
"""Return a dictionary of type names and type parsers."""
return self.get_internal_parsers(state.root_namespace.targets)
def get_stateless_parsers(self): # type: () -> t.Dict[str, Parser]
def get_stateless_parsers(self) -> t.Dict[str, Parser]:
"""Return a dictionary of type names and type parsers."""
return self.get_internal_parsers([])
def get_internal_parsers(self, targets): # type: (t.List[NetworkConfig]) -> t.Dict[str, Parser]
def get_internal_parsers(self, targets: t.List[NetworkConfig]) -> t.Dict[str, Parser]:
"""Return a dictionary of type names and type parsers."""
parsers = {} # type: t.Dict[str, Parser]
@ -199,7 +199,7 @@ class NetworkTargetParser(TargetsNamespaceParser, TypeParser):
return parsers
def document(self, state): # type: (DocumentationState) -> t.Optional[str]
def document(self, state: DocumentationState) -> t.Optional[str]:
"""Generate and return documentation for this parser."""
section = f'{self.option_name} options (choose one):'
@ -211,17 +211,17 @@ class NetworkTargetParser(TargetsNamespaceParser, TypeParser):
class PythonTargetParser(TargetsNamespaceParser, Parser):
"""Composite argument parser for a Python target."""
def __init__(self, allow_venv): # type: (bool) -> None
def __init__(self, allow_venv: bool) -> None:
super().__init__()
self.allow_venv = allow_venv
@property
def option_name(self): # type: () -> str
def option_name(self) -> str:
"""The option name used for this parser."""
return '--target-python'
def get_value(self, state): # type: (ParserState) -> t.Any
def get_value(self, state: ParserState) -> t.Any:
"""Parse the input from the given state and return the result, without storing the result in the namespace."""
versions = list(SUPPORTED_PYTHON_VERSIONS)
@ -235,7 +235,7 @@ class PythonTargetParser(TargetsNamespaceParser, Parser):
return value
def document(self, state): # type: (DocumentationState) -> t.Optional[str]
def document(self, state: DocumentationState) -> t.Optional[str]:
"""Generate and return documentation for this parser."""
section = f'{self.option_name} options (choose one):'
@ -249,20 +249,20 @@ class PythonTargetParser(TargetsNamespaceParser, Parser):
class SanityPythonTargetParser(PythonTargetParser):
"""Composite argument parser for a sanity Python target."""
def __init__(self): # type: () -> None
def __init__(self) -> None:
super().__init__(allow_venv=False)
class UnitsPythonTargetParser(PythonTargetParser):
"""Composite argument parser for a units Python target."""
def __init__(self): # type: () -> None
def __init__(self) -> None:
super().__init__(allow_venv=True)
class PosixSshTargetParser(PosixTargetParser):
"""Composite argument parser for a POSIX SSH target."""
@property
def option_name(self): # type: () -> str
def option_name(self) -> str:
"""The option name used for this parser."""
return '--target-posix'
@ -270,17 +270,17 @@ class PosixSshTargetParser(PosixTargetParser):
class WindowsSshTargetParser(WindowsTargetParser):
"""Composite argument parser for a Windows SSH target."""
@property
def option_name(self): # type: () -> str
def option_name(self) -> str:
"""The option name used for this parser."""
return '--target-windows'
@property
def allow_inventory(self): # type: () -> bool
def allow_inventory(self) -> bool:
"""True if inventory is allowed, otherwise False."""
return False
@property
def limit_one(self): # type: () -> bool
def limit_one(self) -> bool:
"""True if only one target is allowed, otherwise False."""
return True
@ -288,16 +288,16 @@ class WindowsSshTargetParser(WindowsTargetParser):
class NetworkSshTargetParser(NetworkTargetParser):
"""Composite argument parser for a network SSH target."""
@property
def option_name(self): # type: () -> str
def option_name(self) -> str:
"""The option name used for this parser."""
return '--target-network'
@property
def allow_inventory(self): # type: () -> bool
def allow_inventory(self) -> bool:
"""True if inventory is allowed, otherwise False."""
return False
@property
def limit_one(self): # type: () -> bool
def limit_one(self) -> bool:
"""True if only one target is allowed, otherwise False."""
return True

@ -14,11 +14,11 @@ from ..argparsing.parsers import (
class ControllerNamespaceParser(NamespaceParser, metaclass=abc.ABCMeta):
"""Base class for controller namespace parsers."""
@property
def dest(self): # type: () -> str
def dest(self) -> str:
"""The name of the attribute where the value should be stored."""
return 'controller'
def parse(self, state): # type: (ParserState) -> t.Any
def parse(self, state: ParserState) -> t.Any:
"""Parse the input from the given state and return the result."""
if state.root_namespace.targets:
raise ControllerRequiredFirstError()
@ -29,22 +29,22 @@ class ControllerNamespaceParser(NamespaceParser, metaclass=abc.ABCMeta):
class TargetNamespaceParser(NamespaceParser, metaclass=abc.ABCMeta):
"""Base class for target namespace parsers involving a single target."""
@property
def option_name(self): # type: () -> str
def option_name(self) -> str:
"""The option name used for this parser."""
return '--target'
@property
def dest(self): # type: () -> str
def dest(self) -> str:
"""The name of the attribute where the value should be stored."""
return 'targets'
@property
def use_list(self): # type: () -> bool
def use_list(self) -> bool:
"""True if the destination is a list, otherwise False."""
return True
@property
def limit_one(self): # type: () -> bool
def limit_one(self) -> bool:
"""True if only one target is allowed, otherwise False."""
return True
@ -52,17 +52,17 @@ class TargetNamespaceParser(NamespaceParser, metaclass=abc.ABCMeta):
class TargetsNamespaceParser(NamespaceParser, metaclass=abc.ABCMeta):
"""Base class for controller namespace parsers involving multiple targets."""
@property
def option_name(self): # type: () -> str
def option_name(self) -> str:
"""The option name used for this parser."""
return '--target'
@property
def dest(self): # type: () -> str
def dest(self) -> str:
"""The name of the attribute where the value should be stored."""
return 'targets'
@property
def use_list(self): # type: () -> bool
def use_list(self) -> bool:
"""True if the destination is a list, otherwise False."""
return True

@ -21,7 +21,7 @@ from ...host_configs import (
)
def get_docker_pythons(name, controller, strict): # type: (str, bool, bool) -> t.List[str]
def get_docker_pythons(name: str, controller: bool, strict: bool) -> t.List[str]:
"""Return a list of docker instance Python versions supported by the specified host config."""
image_config = filter_completion(docker_completion()).get(name)
available_pythons = CONTROLLER_PYTHON_VERSIONS if controller else SUPPORTED_PYTHON_VERSIONS
@ -34,7 +34,7 @@ def get_docker_pythons(name, controller, strict): # type: (str, bool, bool) ->
return supported_pythons
def get_remote_pythons(name, controller, strict): # type: (str, bool, bool) -> t.List[str]
def get_remote_pythons(name: str, controller: bool, strict: bool) -> t.List[str]:
"""Return a list of remote instance Python versions supported by the specified host config."""
platform_config = filter_completion(remote_completion()).get(name)
available_pythons = CONTROLLER_PYTHON_VERSIONS if controller else SUPPORTED_PYTHON_VERSIONS
@ -47,7 +47,7 @@ def get_remote_pythons(name, controller, strict): # type: (str, bool, bool) ->
return supported_pythons
def get_controller_pythons(controller_config, strict): # type: (HostConfig, bool) -> t.List[str]
def get_controller_pythons(controller_config: HostConfig, strict: bool) -> t.List[str]:
"""Return a list of controller Python versions supported by the specified host config."""
if isinstance(controller_config, DockerConfig):
pythons = get_docker_pythons(controller_config.name, False, strict)

@ -63,7 +63,7 @@ from .helpers import (
class OriginParser(Parser):
"""Composite argument parser for the origin."""
def parse(self, state): # type: (ParserState) -> t.Any
def parse(self, state: ParserState) -> t.Any:
"""Parse the input from the given state and return the result."""
namespace = OriginConfig()
@ -74,14 +74,14 @@ class OriginParser(Parser):
return namespace
def document(self, state): # type: (DocumentationState) -> t.Optional[str]
def document(self, state: DocumentationState) -> t.Optional[str]:
"""Generate and return documentation for this parser."""
return OriginKeyValueParser().document(state)
class ControllerParser(Parser):
"""Composite argument parser for the controller."""
def parse(self, state): # type: (ParserState) -> t.Any
def parse(self, state: ParserState) -> t.Any:
"""Parse the input from the given state and return the result."""
namespace = ControllerConfig()
@ -92,30 +92,30 @@ class ControllerParser(Parser):
return namespace
def document(self, state): # type: (DocumentationState) -> t.Optional[str]
def document(self, state: DocumentationState) -> t.Optional[str]:
"""Generate and return documentation for this parser."""
return ControllerKeyValueParser().document(state)
class DockerParser(PairParser):
"""Composite argument parser for a docker host."""
def __init__(self, controller): # type: (bool) -> None
def __init__(self, controller: bool) -> None:
self.controller = controller
def create_namespace(self): # type: () -> t.Any
def create_namespace(self) -> t.Any:
"""Create and return a namespace."""
return DockerConfig()
def get_left_parser(self, state): # type: (ParserState) -> Parser
def get_left_parser(self, state: ParserState) -> Parser:
"""Return the parser for the left side."""
return NamespaceWrappedParser('name', ChoicesParser(list(filter_completion(docker_completion(), controller_only=self.controller)),
conditions=MatchConditions.CHOICE | MatchConditions.ANY))
def get_right_parser(self, choice): # type: (t.Any) -> Parser
def get_right_parser(self, choice: t.Any) -> Parser:
"""Return the parser for the right side."""
return DockerKeyValueParser(choice, self.controller)
def parse(self, state): # type: (ParserState) -> t.Any
def parse(self, state: ParserState) -> t.Any:
"""Parse the input from the given state and return the result."""
value = super().parse(state) # type: DockerConfig
@ -124,7 +124,7 @@ class DockerParser(PairParser):
return value
def document(self, state): # type: (DocumentationState) -> t.Optional[str]
def document(self, state: DocumentationState) -> t.Optional[str]:
"""Generate and return documentation for this parser."""
default = 'default'
content = '\n'.join([f' {image} ({", ".join(get_docker_pythons(image, self.controller, False))})'
@ -142,22 +142,22 @@ class DockerParser(PairParser):
class PosixRemoteParser(PairParser):
"""Composite argument parser for a POSIX remote host."""
def __init__(self, controller): # type: (bool) -> None
def __init__(self, controller: bool) -> None:
self.controller = controller
def create_namespace(self): # type: () -> t.Any
def create_namespace(self) -> t.Any:
"""Create and return a namespace."""
return PosixRemoteConfig()
def get_left_parser(self, state): # type: (ParserState) -> Parser
def get_left_parser(self, state: ParserState) -> Parser:
"""Return the parser for the left side."""
return NamespaceWrappedParser('name', PlatformParser(list(filter_completion(remote_completion(), controller_only=self.controller))))
def get_right_parser(self, choice): # type: (t.Any) -> Parser
def get_right_parser(self, choice: t.Any) -> Parser:
"""Return the parser for the right side."""
return PosixRemoteKeyValueParser(choice, self.controller)
def parse(self, state): # type: (ParserState) -> t.Any
def parse(self, state: ParserState) -> t.Any:
"""Parse the input from the given state and return the result."""
value = super().parse(state) # type: PosixRemoteConfig
@ -166,7 +166,7 @@ class PosixRemoteParser(PairParser):
return value
def document(self, state): # type: (DocumentationState) -> t.Optional[str]
def document(self, state: DocumentationState) -> t.Optional[str]:
"""Generate and return documentation for this parser."""
default = get_fallback_remote_controller()
content = '\n'.join([f' {name} ({", ".join(get_remote_pythons(name, self.controller, False))})'
@ -184,11 +184,11 @@ class PosixRemoteParser(PairParser):
class WindowsRemoteParser(PairParser):
"""Composite argument parser for a Windows remote host."""
def create_namespace(self): # type: () -> t.Any
def create_namespace(self) -> t.Any:
"""Create and return a namespace."""
return WindowsRemoteConfig()
def get_left_parser(self, state): # type: (ParserState) -> Parser
def get_left_parser(self, state: ParserState) -> Parser:
"""Return the parser for the left side."""
names = list(filter_completion(windows_completion()))
@ -197,11 +197,11 @@ class WindowsRemoteParser(PairParser):
return NamespaceWrappedParser('name', PlatformParser(names))
def get_right_parser(self, choice): # type: (t.Any) -> Parser
def get_right_parser(self, choice: t.Any) -> Parser:
"""Return the parser for the right side."""
return WindowsRemoteKeyValueParser()
def document(self, state): # type: (DocumentationState) -> t.Optional[str]
def document(self, state: DocumentationState) -> t.Optional[str]:
"""Generate and return documentation for this parser."""
content = '\n'.join([f' {name}' for name, item in filter_completion(windows_completion()).items()])
@ -217,11 +217,11 @@ class WindowsRemoteParser(PairParser):
class NetworkRemoteParser(PairParser):
"""Composite argument parser for a network remote host."""
def create_namespace(self): # type: () -> t.Any
def create_namespace(self) -> t.Any:
"""Create and return a namespace."""
return NetworkRemoteConfig()
def get_left_parser(self, state): # type: (ParserState) -> Parser
def get_left_parser(self, state: ParserState) -> Parser:
"""Return the parser for the left side."""
names = list(filter_completion(network_completion()))
@ -230,11 +230,11 @@ class NetworkRemoteParser(PairParser):
return NamespaceWrappedParser('name', PlatformParser(names))
def get_right_parser(self, choice): # type: (t.Any) -> Parser
def get_right_parser(self, choice: t.Any) -> Parser:
"""Return the parser for the right side."""
return NetworkRemoteKeyValueParser()
def document(self, state): # type: (DocumentationState) -> t.Optional[str]
def document(self, state: DocumentationState) -> t.Optional[str]:
"""Generate and return documentation for this parser."""
content = '\n'.join([f' {name}' for name, item in filter_completion(network_completion()).items()])
@ -250,61 +250,61 @@ class NetworkRemoteParser(PairParser):
class WindowsInventoryParser(PairParser):
"""Composite argument parser for a Windows inventory."""
def create_namespace(self): # type: () -> t.Any
def create_namespace(self) -> t.Any:
"""Create and return a namespace."""
return WindowsInventoryConfig()
def get_left_parser(self, state): # type: (ParserState) -> Parser
def get_left_parser(self, state: ParserState) -> Parser:
"""Return the parser for the left side."""
return NamespaceWrappedParser('path', FileParser())
def get_right_parser(self, choice): # type: (t.Any) -> Parser
def get_right_parser(self, choice: t.Any) -> Parser:
"""Return the parser for the right side."""
return EmptyKeyValueParser()
def document(self, state): # type: (DocumentationState) -> t.Optional[str]
def document(self, state: DocumentationState) -> t.Optional[str]:
"""Generate and return documentation for this parser."""
return '{path} # INI format inventory file'
class NetworkInventoryParser(PairParser):
"""Composite argument parser for a network inventory."""
def create_namespace(self): # type: () -> t.Any
def create_namespace(self) -> t.Any:
"""Create and return a namespace."""
return NetworkInventoryConfig()
def get_left_parser(self, state): # type: (ParserState) -> Parser
def get_left_parser(self, state: ParserState) -> Parser:
"""Return the parser for the left side."""
return NamespaceWrappedParser('path', FileParser())
def get_right_parser(self, choice): # type: (t.Any) -> Parser
def get_right_parser(self, choice: t.Any) -> Parser:
"""Return the parser for the right side."""
return EmptyKeyValueParser()
def document(self, state): # type: (DocumentationState) -> t.Optional[str]
def document(self, state: DocumentationState) -> t.Optional[str]:
"""Generate and return documentation for this parser."""
return '{path} # INI format inventory file'
class PosixSshParser(PairParser):
"""Composite argument parser for a POSIX SSH host."""
def create_namespace(self): # type: () -> t.Any
def create_namespace(self) -> t.Any:
"""Create and return a namespace."""
return PosixSshConfig()
def get_left_parser(self, state): # type: (ParserState) -> Parser
def get_left_parser(self, state: ParserState) -> Parser:
"""Return the parser for the left side."""
return SshConnectionParser()
def get_right_parser(self, choice): # type: (t.Any) -> Parser
def get_right_parser(self, choice: t.Any) -> Parser:
"""Return the parser for the right side."""
return PosixSshKeyValueParser()
@property
def required(self): # type: () -> bool
def required(self) -> bool:
"""True if the delimiter (and thus right parser) is required, otherwise False."""
return True
def document(self, state): # type: (DocumentationState) -> t.Optional[str]
def document(self, state: DocumentationState) -> t.Optional[str]:
"""Generate and return documentation for this parser."""
return f'{SshConnectionParser().document(state)}[,{PosixSshKeyValueParser().document(state)}]'

@ -46,7 +46,7 @@ from .helpers import (
class OriginKeyValueParser(KeyValueParser):
"""Composite argument parser for origin key/value pairs."""
def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser]
def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]:
"""Return a dictionary of key names and value parsers."""
versions = CONTROLLER_PYTHON_VERSIONS
@ -54,7 +54,7 @@ class OriginKeyValueParser(KeyValueParser):
python=PythonParser(versions=versions, allow_venv=True, allow_default=True),
)
def document(self, state): # type: (DocumentationState) -> t.Optional[str]
def document(self, state: DocumentationState) -> t.Optional[str]:
"""Generate and return documentation for this parser."""
python_parser = PythonParser(versions=CONTROLLER_PYTHON_VERSIONS, allow_venv=True, allow_default=True)
@ -69,7 +69,7 @@ class OriginKeyValueParser(KeyValueParser):
class ControllerKeyValueParser(KeyValueParser):
"""Composite argument parser for controller key/value pairs."""
def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser]
def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]:
"""Return a dictionary of key names and value parsers."""
versions = get_controller_pythons(state.root_namespace.controller, False)
allow_default = bool(get_controller_pythons(state.root_namespace.controller, True))
@ -79,7 +79,7 @@ class ControllerKeyValueParser(KeyValueParser):
python=PythonParser(versions=versions, allow_venv=allow_venv, allow_default=allow_default),
)
def document(self, state): # type: (DocumentationState) -> t.Optional[str]
def document(self, state: DocumentationState) -> t.Optional[str]:
"""Generate and return documentation for this parser."""
section_name = 'controller options'
@ -98,7 +98,7 @@ class DockerKeyValueParser(KeyValueParser):
self.versions = get_docker_pythons(image, controller, False)
self.allow_default = bool(get_docker_pythons(image, controller, True))
def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser]
def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]:
"""Return a dictionary of key names and value parsers."""
return dict(
python=PythonParser(versions=self.versions, allow_venv=False, allow_default=self.allow_default),
@ -107,7 +107,7 @@ class DockerKeyValueParser(KeyValueParser):
memory=IntegerParser(),
)
def document(self, state): # type: (DocumentationState) -> t.Optional[str]
def document(self, state: DocumentationState) -> t.Optional[str]:
"""Generate and return documentation for this parser."""
python_parser = PythonParser(versions=[], allow_venv=False, allow_default=self.allow_default)
@ -130,7 +130,7 @@ class PosixRemoteKeyValueParser(KeyValueParser):
self.versions = get_remote_pythons(name, controller, False)
self.allow_default = bool(get_remote_pythons(name, controller, True))
def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser]
def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]:
"""Return a dictionary of key names and value parsers."""
return dict(
become=ChoicesParser(list(SUPPORTED_BECOME_METHODS)),
@ -139,7 +139,7 @@ class PosixRemoteKeyValueParser(KeyValueParser):
python=PythonParser(versions=self.versions, allow_venv=False, allow_default=self.allow_default),
)
def document(self, state): # type: (DocumentationState) -> t.Optional[str]
def document(self, state: DocumentationState) -> t.Optional[str]:
"""Generate and return documentation for this parser."""
python_parser = PythonParser(versions=[], allow_venv=False, allow_default=self.allow_default)
@ -157,14 +157,14 @@ class PosixRemoteKeyValueParser(KeyValueParser):
class WindowsRemoteKeyValueParser(KeyValueParser):
"""Composite argument parser for Windows remote key/value pairs."""
def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser]
def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]:
"""Return a dictionary of key names and value parsers."""
return dict(
provider=ChoicesParser(REMOTE_PROVIDERS),
arch=ChoicesParser(REMOTE_ARCHITECTURES),
)
def document(self, state): # type: (DocumentationState) -> t.Optional[str]
def document(self, state: DocumentationState) -> t.Optional[str]:
"""Generate and return documentation for this parser."""
section_name = 'remote options'
@ -178,7 +178,7 @@ class WindowsRemoteKeyValueParser(KeyValueParser):
class NetworkRemoteKeyValueParser(KeyValueParser):
"""Composite argument parser for network remote key/value pairs."""
def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser]
def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]:
"""Return a dictionary of key names and value parsers."""
return dict(
provider=ChoicesParser(REMOTE_PROVIDERS),
@ -187,7 +187,7 @@ class NetworkRemoteKeyValueParser(KeyValueParser):
connection=AnyParser(),
)
def document(self, state): # type: (DocumentationState) -> t.Optional[str]
def document(self, state: DocumentationState) -> t.Optional[str]:
"""Generate and return documentation for this parser."""
section_name = 'remote options'
@ -203,13 +203,13 @@ class NetworkRemoteKeyValueParser(KeyValueParser):
class PosixSshKeyValueParser(KeyValueParser):
"""Composite argument parser for POSIX SSH host key/value pairs."""
def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser]
def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]:
"""Return a dictionary of key names and value parsers."""
return dict(
python=PythonParser(versions=list(SUPPORTED_PYTHON_VERSIONS), allow_venv=False, allow_default=False),
)
def document(self, state): # type: (DocumentationState) -> t.Optional[str]
def document(self, state: DocumentationState) -> t.Optional[str]:
"""Generate and return documentation for this parser."""
python_parser = PythonParser(versions=SUPPORTED_PYTHON_VERSIONS, allow_venv=False, allow_default=False)
@ -224,6 +224,6 @@ class PosixSshKeyValueParser(KeyValueParser):
class EmptyKeyValueParser(KeyValueParser):
"""Composite argument parser when a key/value parser is required but there are no keys available."""
def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser]
def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]:
"""Return a dictionary of key names and value parsers."""
return {}

@ -85,7 +85,7 @@ class PythonParser(Parser):
self.venv_choices = venv_choices
self.venv_choices = venv_choices
def parse(self, state): # type: (ParserState) -> t.Any
def parse(self, state: ParserState) -> t.Any:
"""Parse the input from the given state and return the result."""
boundary: ParserBoundary
@ -116,7 +116,7 @@ class PythonParser(Parser):
return python
def document(self, state): # type: (DocumentationState) -> t.Optional[str]
def document(self, state: DocumentationState) -> t.Optional[str]:
"""Generate and return documentation for this parser."""
docs = '[venv/[system-site-packages/]]' if self.allow_venv else ''
@ -133,10 +133,10 @@ class PythonParser(Parser):
class PlatformParser(ChoicesParser):
"""Composite argument parser for "{platform}/{version}" formatted choices."""
def __init__(self, choices): # type: (t.List[str]) -> None
def __init__(self, choices: t.List[str]) -> None:
super().__init__(choices, conditions=MatchConditions.CHOICE | MatchConditions.ANY)
def parse(self, state): # type: (ParserState) -> t.Any
def parse(self, state: ParserState) -> t.Any:
"""Parse the input from the given state and return the result."""
value = super().parse(state)
@ -153,7 +153,7 @@ class SshConnectionParser(Parser):
"""
EXPECTED_FORMAT = '{user}@{host}[:{port}]'
def parse(self, state): # type: (ParserState) -> t.Any
def parse(self, state: ParserState) -> t.Any:
"""Parse the input from the given state and return the result."""
namespace = state.current_namespace
@ -173,6 +173,6 @@ class SshConnectionParser(Parser):
return namespace
def document(self, state): # type: (DocumentationState) -> t.Optional[str]
def document(self, state: DocumentationState) -> t.Optional[str]:
"""Generate and return documentation for this parser."""
return self.EXPECTED_FORMAT

@ -68,11 +68,11 @@ COVERAGE_OUTPUT_FILE_NAME = 'coverage'
class CoverageConfig(EnvironmentConfig):
"""Configuration for the coverage command."""
def __init__(self, args): # type: (t.Any) -> None
def __init__(self, args: t.Any) -> None:
super().__init__(args, 'coverage')
def initialize_coverage(args, host_state): # type: (CoverageConfig, HostState) -> coverage_module
def initialize_coverage(args: CoverageConfig, host_state: HostState) -> coverage_module:
"""Delegate execution if requested, install requirements, then import and return the coverage module. Raises an exception if coverage is not available."""
configure_pypi_proxy(args, host_state.controller_profile) # coverage
install_requirements(args, host_state.controller_profile.python, coverage=True) # coverage
@ -93,7 +93,7 @@ def initialize_coverage(args, host_state): # type: (CoverageConfig, HostState)
return coverage
def run_coverage(args, host_state, output_file, command, cmd): # type: (CoverageConfig, HostState, str, str, t.List[str]) -> None
def run_coverage(args: CoverageConfig, host_state: HostState, output_file: str, command: str, cmd: t.List[str]) -> None:
"""Run the coverage cli tool with the specified options."""
env = common_environment()
env.update(dict(COVERAGE_FILE=output_file))
@ -112,22 +112,22 @@ def run_coverage(args, host_state, output_file, command, cmd): # type: (Coverag
display.warning(stderr)
def get_all_coverage_files(): # type: () -> t.List[str]
def get_all_coverage_files() -> t.List[str]:
"""Return a list of all coverage file paths."""
return get_python_coverage_files() + get_powershell_coverage_files()
def get_python_coverage_files(path=None): # type: (t.Optional[str]) -> t.List[str]
def get_python_coverage_files(path: t.Optional[str] = None) -> t.List[str]:
"""Return the list of Python coverage file paths."""
return get_coverage_files('python', path)
def get_powershell_coverage_files(path=None): # type: (t.Optional[str]) -> t.List[str]
def get_powershell_coverage_files(path: t.Optional[str] = None) -> t.List[str]:
"""Return the list of PowerShell coverage file paths."""
return get_coverage_files('powershell', path)
def get_coverage_files(language, path=None): # type: (str, t.Optional[str]) -> t.List[str]
def get_coverage_files(language: str, path: t.Optional[str] = None) -> t.List[str]:
"""Return the list of coverage file paths for the given language."""
coverage_dir = path or ResultType.COVERAGE.path
@ -143,7 +143,7 @@ def get_coverage_files(language, path=None): # type: (str, t.Optional[str]) ->
return coverage_files
def get_collection_path_regexes(): # type: () -> t.Tuple[t.Optional[t.Pattern], t.Optional[t.Pattern]]
def get_collection_path_regexes() -> t.Tuple[t.Optional[t.Pattern], t.Optional[t.Pattern]]:
"""Return a pair of regexes used for identifying and manipulating collection paths."""
if data_context().content.collection:
collection_search_re = re.compile(r'/%s/' % data_context().content.collection.directory)
@ -155,7 +155,7 @@ def get_collection_path_regexes(): # type: () -> t.Tuple[t.Optional[t.Pattern],
return collection_search_re, collection_sub_re
def get_python_modules(): # type: () -> t.Dict[str, str]
def get_python_modules() -> t.Dict[str, str]:
"""Return a dictionary of Ansible module names and their paths."""
return dict((target.module, target.path) for target in list(walk_module_targets()) if target.path.endswith('.py'))
@ -343,13 +343,13 @@ def sanitize_filename(
class PathChecker:
"""Checks code coverage paths to verify they are valid and reports on the findings."""
def __init__(self, args, collection_search_re=None): # type: (CoverageConfig, t.Optional[t.Pattern]) -> None
def __init__(self, args: CoverageConfig, collection_search_re: t.Optional[t.Pattern] = None) -> None:
self.args = args
self.collection_search_re = collection_search_re
self.invalid_paths = [] # type: t.List[str]
self.invalid_path_chars = 0
def check_path(self, path): # type: (str) -> bool
def check_path(self, path: str) -> bool:
"""Return True if the given coverage path is valid, otherwise display a warning and return False."""
if os.path.isfile(to_bytes(path)):
return True
@ -367,7 +367,7 @@ class PathChecker:
return False
def report(self): # type: () -> None
def report(self) -> None:
"""Display a warning regarding invalid paths if any were found."""
if self.invalid_paths:
display.warning('Ignored %d characters from %d invalid coverage path(s).' % (self.invalid_path_chars, len(self.invalid_paths)))

@ -9,7 +9,7 @@ from .. import (
class CoverageAnalyzeConfig(CoverageConfig):
"""Configuration for the `coverage analyze` command."""
def __init__(self, args): # type: (t.Any) -> None
def __init__(self, args: t.Any) -> None:
super().__init__(args)
# avoid mixing log messages with file output when using `/dev/stdout` for the output file on commands

@ -31,7 +31,7 @@ class CoverageAnalyzeTargetsConfig(CoverageAnalyzeConfig):
"""Configuration for the `coverage analyze targets` command."""
def make_report(target_indexes, arcs, lines): # type: (TargetIndexes, Arcs, Lines) -> t.Dict[str, t.Any]
def make_report(target_indexes: TargetIndexes, arcs: Arcs, lines: Lines) -> t.Dict[str, t.Any]:
"""Condense target indexes, arcs and lines into a compact report."""
set_indexes = {} # type: TargetSetIndexes
arc_refs = dict((path, dict((format_arc(arc), get_target_set_index(indexes, set_indexes)) for arc, indexes in data.items())) for path, data in arcs.items())
@ -65,7 +65,7 @@ def load_report(report): # type: (t.Dict[str, t.Any]) -> t.Tuple[t.List[str], A
return target_indexes, arcs, lines
def read_report(path): # type: (str) -> t.Tuple[t.List[str], Arcs, Lines]
def read_report(path: str) -> t.Tuple[t.List[str], Arcs, Lines]:
"""Read a JSON report from disk."""
try:
report = read_json_file(path)
@ -90,7 +90,7 @@ def write_report(args, report, path): # type: (CoverageAnalyzeTargetsConfig, t.
), verbosity=1)
def format_line(value): # type: (int) -> str
def format_line(value: int) -> str:
"""Format line as a string."""
return str(value) # putting this in a function keeps both pylint and mypy happy
@ -100,18 +100,18 @@ def format_arc(value): # type: (t.Tuple[int, int]) -> str
return '%d:%d' % value
def parse_arc(value): # type: (str) -> t.Tuple[int, int]
def parse_arc(value: str) -> t.Tuple[int, int]:
"""Parse an arc string into a tuple."""
first, last = tuple(map(int, value.split(':')))
return first, last
def get_target_set_index(data, target_set_indexes): # type: (t.Set[int], TargetSetIndexes) -> int
def get_target_set_index(data: t.Set[int], target_set_indexes: TargetSetIndexes) -> int:
"""Find or add the target set in the result set and return the target set index."""
return target_set_indexes.setdefault(frozenset(data), len(target_set_indexes))
def get_target_index(name, target_indexes): # type: (str, TargetIndexes) -> int
def get_target_index(name: str, target_indexes: TargetIndexes) -> int:
"""Find or add the target in the result set and return the target index."""
return target_indexes.setdefault(name, len(target_indexes))
@ -136,7 +136,7 @@ def expand_indexes(
return combined_data
def generate_indexes(target_indexes, data): # type: (TargetIndexes, NamedPoints) -> IndexedPoints
def generate_indexes(target_indexes: TargetIndexes, data: NamedPoints) -> IndexedPoints:
"""Return an indexed version of the given data (arcs or points)."""
results = {} # type: IndexedPoints

@ -28,14 +28,14 @@ from . import (
class CoverageAnalyzeTargetsCombineConfig(CoverageAnalyzeTargetsConfig):
"""Configuration for the `coverage analyze targets combine` command."""
def __init__(self, args): # type: (t.Any) -> None
def __init__(self, args: t.Any) -> None:
super().__init__(args)
self.input_files = args.input_file # type: t.List[str]
self.output_file = args.output_file # type: str
def command_coverage_analyze_targets_combine(args): # type: (CoverageAnalyzeTargetsCombineConfig) -> None
def command_coverage_analyze_targets_combine(args: CoverageAnalyzeTargetsCombineConfig) -> None:
"""Combine integration test target code coverage reports."""
host_state = prepare_profiles(args) # coverage analyze targets combine

@ -26,14 +26,14 @@ from . import (
class CoverageAnalyzeTargetsExpandConfig(CoverageAnalyzeTargetsConfig):
"""Configuration for the `coverage analyze targets expand` command."""
def __init__(self, args): # type: (t.Any) -> None
def __init__(self, args: t.Any) -> None:
super().__init__(args)
self.input_file = args.input_file # type: str
self.output_file = args.output_file # type: str
def command_coverage_analyze_targets_expand(args): # type: (CoverageAnalyzeTargetsExpandConfig) -> None
def command_coverage_analyze_targets_expand(args: CoverageAnalyzeTargetsExpandConfig) -> None:
"""Expand target names in an aggregated coverage file."""
host_state = prepare_profiles(args) # coverage analyze targets expand

@ -29,7 +29,7 @@ from . import (
class CoverageAnalyzeTargetsFilterConfig(CoverageAnalyzeTargetsConfig):
"""Configuration for the `coverage analyze targets filter` command."""
def __init__(self, args): # type: (t.Any) -> None
def __init__(self, args: t.Any) -> None:
super().__init__(args)
self.input_file = args.input_file # type: str
@ -40,7 +40,7 @@ class CoverageAnalyzeTargetsFilterConfig(CoverageAnalyzeTargetsConfig):
self.exclude_path = args.exclude_path # type: t.Optional[str]
def command_coverage_analyze_targets_filter(args): # type: (CoverageAnalyzeTargetsFilterConfig) -> None
def command_coverage_analyze_targets_filter(args: CoverageAnalyzeTargetsFilterConfig) -> None:
"""Filter target names in an aggregated coverage file."""
host_state = prepare_profiles(args) # coverage analyze targets filter

@ -52,14 +52,14 @@ from . import (
class CoverageAnalyzeTargetsGenerateConfig(CoverageAnalyzeTargetsConfig):
"""Configuration for the `coverage analyze targets generate` command."""
def __init__(self, args): # type: (t.Any) -> None
def __init__(self, args: t.Any) -> None:
super().__init__(args)
self.input_dir = args.input_dir or ResultType.COVERAGE.path # type: str
self.output_file = args.output_file # type: str
def command_coverage_analyze_targets_generate(args): # type: (CoverageAnalyzeTargetsGenerateConfig) -> None
def command_coverage_analyze_targets_generate(args: CoverageAnalyzeTargetsGenerateConfig) -> None:
"""Analyze code coverage data to determine which integration test targets provide coverage for each arc or line."""
host_state = prepare_profiles(args) # coverage analyze targets generate
@ -148,11 +148,11 @@ def prune_invalid_filenames(
del results[path]
def get_target_name(path): # type: (str) -> str
def get_target_name(path: str) -> str:
"""Extract the test target name from the given coverage path."""
return to_text(os.path.basename(path).split('=')[1])
def is_integration_coverage_file(path): # type: (str) -> bool
def is_integration_coverage_file(path: str) -> bool:
"""Returns True if the coverage file came from integration tests, otherwise False."""
return os.path.basename(path).split('=')[0] in ('integration', 'windows-integration', 'network-integration')

@ -32,7 +32,7 @@ from . import (
class CoverageAnalyzeTargetsMissingConfig(CoverageAnalyzeTargetsConfig):
"""Configuration for the `coverage analyze targets missing` command."""
def __init__(self, args): # type: (t.Any) -> None
def __init__(self, args: t.Any) -> None:
super().__init__(args)
self.from_file = args.from_file # type: str
@ -43,7 +43,7 @@ class CoverageAnalyzeTargetsMissingConfig(CoverageAnalyzeTargetsConfig):
self.only_exists = args.only_exists # type: bool
def command_coverage_analyze_targets_missing(args): # type: (CoverageAnalyzeTargetsMissingConfig) -> None
def command_coverage_analyze_targets_missing(args: CoverageAnalyzeTargetsMissingConfig) -> None:
"""Identify aggregated coverage in one file missing from another."""
host_state = prepare_profiles(args) # coverage analyze targets missing

@ -63,13 +63,13 @@ from . import (
TValue = t.TypeVar('TValue')
def command_coverage_combine(args): # type: (CoverageCombineConfig) -> None
def command_coverage_combine(args: CoverageCombineConfig) -> None:
"""Patch paths in coverage files and merge into a single file."""
host_state = prepare_profiles(args) # coverage combine
combine_coverage_files(args, host_state)
def combine_coverage_files(args, host_state): # type: (CoverageCombineConfig, HostState) -> t.List[str]
def combine_coverage_files(args: CoverageCombineConfig, host_state: HostState) -> t.List[str]:
"""Combine coverage and return a list of the resulting files."""
if args.delegate:
if isinstance(args.controller, (DockerConfig, RemoteConfig)):
@ -107,7 +107,7 @@ class ExportedCoverageDataNotFound(ApplicationError):
'The exported files must be in the directory: %s/' % ResultType.COVERAGE.relative_path)
def _command_coverage_combine_python(args, host_state): # type: (CoverageCombineConfig, HostState) -> t.List[str]
def _command_coverage_combine_python(args: CoverageCombineConfig, host_state: HostState) -> t.List[str]:
"""Combine Python coverage files and return a list of the output files."""
coverage = initialize_coverage(args, host_state)
@ -188,7 +188,7 @@ def _command_coverage_combine_python(args, host_state): # type: (CoverageCombin
return sorted(output_files)
def _command_coverage_combine_powershell(args): # type: (CoverageCombineConfig) -> t.List[str]
def _command_coverage_combine_powershell(args: CoverageCombineConfig) -> t.List[str]:
"""Combine PowerShell coverage files and return a list of the output files."""
coverage_files = get_powershell_coverage_files()
@ -262,7 +262,7 @@ def _command_coverage_combine_powershell(args): # type: (CoverageCombineConfig)
return sorted(output_files)
def _get_coverage_targets(args, walk_func): # type: (CoverageCombineConfig, t.Callable) -> t.List[t.Tuple[str, int]]
def _get_coverage_targets(args: CoverageCombineConfig, walk_func: t.Callable) -> t.List[t.Tuple[str, int]]:
"""Return a list of files to cover and the number of lines in each file, using the given function as the source of the files."""
sources = []
@ -316,7 +316,7 @@ def _build_stub_groups(
return groups
def get_coverage_group(args, coverage_file): # type: (CoverageCombineConfig, str) -> t.Optional[str]
def get_coverage_group(args: CoverageCombineConfig, coverage_file: str) -> t.Optional[str]:
"""Return the name of the coverage group for the specified coverage file, or None if no group was found."""
parts = os.path.basename(coverage_file).split('=', 4)
@ -350,7 +350,7 @@ def get_coverage_group(args, coverage_file): # type: (CoverageCombineConfig, st
class CoverageCombineConfig(CoverageConfig):
"""Configuration for the coverage combine command."""
def __init__(self, args): # type: (t.Any) -> None
def __init__(self, args: t.Any) -> None:
super().__init__(args)
self.group_by = frozenset(args.group_by) if args.group_by else frozenset() # type: t.FrozenSet[str]

@ -20,7 +20,7 @@ from . import (
)
def command_coverage_erase(args): # type: (CoverageEraseConfig) -> None
def command_coverage_erase(args: CoverageEraseConfig) -> None:
"""Erase code coverage data files collected during test runs."""
host_state = prepare_profiles(args) # coverage erase

@ -29,7 +29,7 @@ from . import (
)
def command_coverage_html(args): # type: (CoverageHtmlConfig) -> None
def command_coverage_html(args: CoverageHtmlConfig) -> None:
"""Generate an HTML coverage report."""
host_state = prepare_profiles(args) # coverage html
output_files = combine_coverage_files(args, host_state)

@ -30,7 +30,7 @@ from . import (
)
def command_coverage_report(args): # type: (CoverageReportConfig) -> None
def command_coverage_report(args: CoverageReportConfig) -> None:
"""Generate a console coverage report."""
host_state = prepare_profiles(args) # coverage report
output_files = combine_coverage_files(args, host_state)
@ -56,7 +56,7 @@ def command_coverage_report(args): # type: (CoverageReportConfig) -> None
run_coverage(args, host_state, output_file, 'report', options)
def _generate_powershell_output_report(args, coverage_file): # type: (CoverageReportConfig, str) -> str
def _generate_powershell_output_report(args: CoverageReportConfig, coverage_file: str) -> str:
"""Generate and return a PowerShell coverage report for the given coverage file."""
coverage_info = read_json_file(coverage_file)
@ -144,7 +144,7 @@ def _generate_powershell_output_report(args, coverage_file): # type: (CoverageR
class CoverageReportConfig(CoverageCombineConfig):
"""Configuration for the coverage report command."""
def __init__(self, args): # type: (t.Any) -> None
def __init__(self, args: t.Any) -> None:
super().__init__(args)
self.show_missing = args.show_missing # type: bool

@ -48,7 +48,7 @@ from . import (
)
def command_coverage_xml(args): # type: (CoverageXmlConfig) -> None
def command_coverage_xml(args: CoverageXmlConfig) -> None:
"""Generate an XML coverage report."""
host_state = prepare_profiles(args) # coverage xml
output_files = combine_coverage_files(args, host_state)
@ -69,7 +69,7 @@ def command_coverage_xml(args): # type: (CoverageXmlConfig) -> None
run_coverage(args, host_state, output_file, 'xml', ['-i', '-o', xml_path])
def _generate_powershell_xml(coverage_file): # type: (str) -> Element
def _generate_powershell_xml(coverage_file: str) -> Element:
"""Generate a PowerShell coverage report XML element from the specified coverage file and return it."""
coverage_info = read_json_file(coverage_file)

@ -45,7 +45,7 @@ from ...ci import (
class EnvConfig(CommonConfig):
"""Configuration for the `env` command."""
def __init__(self, args): # type: (t.Any) -> None
def __init__(self, args: t.Any) -> None:
super().__init__(args, 'env')
self.show = args.show
@ -58,14 +58,14 @@ class EnvConfig(CommonConfig):
self.show = True
def command_env(args): # type: (EnvConfig) -> None
def command_env(args: EnvConfig) -> None:
"""Entry point for the `env` command."""
show_dump_env(args)
list_files_env(args)
set_timeout(args)
def show_dump_env(args): # type: (EnvConfig) -> None
def show_dump_env(args: EnvConfig) -> None:
"""Show information about the current environment and/or write the information to disk."""
if not args.show and not args.dump:
return
@ -107,7 +107,7 @@ def show_dump_env(args): # type: (EnvConfig) -> None
write_json_test_results(ResultType.BOT, 'data-environment.json', data)
def list_files_env(args): # type: (EnvConfig) -> None
def list_files_env(args: EnvConfig) -> None:
"""List files on stdout."""
if not args.list_files:
return
@ -116,7 +116,7 @@ def list_files_env(args): # type: (EnvConfig) -> None
display.info(path)
def set_timeout(args): # type: (EnvConfig) -> None
def set_timeout(args: EnvConfig) -> None:
"""Set an execution timeout for subsequent ansible-test invocations."""
if args.timeout is None:
return
@ -166,7 +166,7 @@ def show_dict(data, verbose, root_verbosity=0, path=None): # type: (t.Dict[str,
display.info(indent + '%s: %s' % (key, value), verbosity=verbosity)
def get_docker_details(args): # type: (EnvConfig) -> t.Dict[str, t.Any]
def get_docker_details(args: EnvConfig) -> t.Dict[str, t.Any]:
"""Return details about docker."""
docker = get_docker_command()

@ -130,7 +130,7 @@ from .coverage import (
THostProfile = t.TypeVar('THostProfile', bound=HostProfile)
def generate_dependency_map(integration_targets): # type: (t.List[IntegrationTarget]) -> t.Dict[str, t.Set[IntegrationTarget]]
def generate_dependency_map(integration_targets: t.List[IntegrationTarget]) -> t.Dict[str, t.Set[IntegrationTarget]]:
"""Analyze the given list of integration test targets and return a dictionary expressing target names and the targets on which they depend."""
targets_dict = dict((target.name, target) for target in integration_targets)
target_dependencies = analyze_integration_target_dependencies(integration_targets)
@ -157,7 +157,7 @@ def generate_dependency_map(integration_targets): # type: (t.List[IntegrationTa
return dependency_map
def get_files_needed(target_dependencies): # type: (t.List[IntegrationTarget]) -> t.List[str]
def get_files_needed(target_dependencies: t.List[IntegrationTarget]) -> t.List[str]:
"""Return a list of files needed by the given list of target dependencies."""
files_needed = [] # type: t.List[str]
@ -174,7 +174,7 @@ def get_files_needed(target_dependencies): # type: (t.List[IntegrationTarget])
return files_needed
def check_inventory(args, inventory_path): # type: (IntegrationConfig, str) -> None
def check_inventory(args: IntegrationConfig, inventory_path: str) -> None:
"""Check the given inventory for issues."""
if not isinstance(args.controller, OriginConfig):
if os.path.exists(inventory_path):
@ -196,7 +196,7 @@ def get_inventory_absolute_path(args: IntegrationConfig, target: InventoryConfig
return path
def get_inventory_relative_path(args): # type: (IntegrationConfig) -> str
def get_inventory_relative_path(args: IntegrationConfig) -> str:
"""Return the inventory path used for the given integration configuration relative to the content root."""
inventory_names = {
PosixIntegrationConfig: 'inventory',
@ -207,7 +207,7 @@ def get_inventory_relative_path(args): # type: (IntegrationConfig) -> str
return os.path.join(data_context().content.integration_path, inventory_names[type(args)])
def delegate_inventory(args, inventory_path_src): # type: (IntegrationConfig, str) -> None
def delegate_inventory(args: IntegrationConfig, inventory_path_src: str) -> None:
"""Make the given inventory available during delegation."""
if isinstance(args, PosixIntegrationConfig):
return
@ -839,7 +839,7 @@ class IntegrationCache(CommonCache):
return self.get('dependency_map', lambda: generate_dependency_map(self.integration_targets))
def filter_profiles_for_target(args, profiles, target): # type: (IntegrationConfig, t.List[THostProfile], IntegrationTarget) -> t.List[THostProfile]
def filter_profiles_for_target(args: IntegrationConfig, profiles: t.List[THostProfile], target: IntegrationTarget) -> t.List[THostProfile]:
"""Return a list of profiles after applying target filters."""
if target.target_type == IntegrationTargetType.CONTROLLER:
profile_filter = get_target_filter(args, [args.controller], True)
@ -853,7 +853,7 @@ def filter_profiles_for_target(args, profiles, target): # type: (IntegrationCon
return profiles
def get_integration_filter(args, targets): # type: (IntegrationConfig, t.List[IntegrationTarget]) -> t.Set[str]
def get_integration_filter(args: IntegrationConfig, targets: t.List[IntegrationTarget]) -> t.Set[str]:
"""Return a list of test targets to skip based on the host(s) that will be used to run the specified test targets."""
invalid_targets = sorted(target.name for target in targets if target.target_type not in (IntegrationTargetType.CONTROLLER, IntegrationTargetType.TARGET))
@ -956,7 +956,7 @@ def command_integration_filter(args, # type: TIntegrationConfig
return host_state, internal_targets
def requirements(args, host_state): # type: (IntegrationConfig, HostState) -> None
def requirements(args: IntegrationConfig, host_state: HostState) -> None:
"""Install requirements."""
target_profile = host_state.target_profiles[0]

@ -55,7 +55,7 @@ from ....docker_util import (
@cache
def get_cloud_plugins(): # type: () -> t.Tuple[t.Dict[str, t.Type[CloudProvider]], t.Dict[str, t.Type[CloudEnvironment]]]
def get_cloud_plugins() -> t.Tuple[t.Dict[str, t.Type[CloudProvider]], t.Dict[str, t.Type[CloudEnvironment]]]:
"""Import cloud plugins and load them into the plugin dictionaries."""
import_plugins('commands/integration/cloud')
@ -69,13 +69,13 @@ def get_cloud_plugins(): # type: () -> t.Tuple[t.Dict[str, t.Type[CloudProvider
@cache
def get_provider_plugins(): # type: () -> t.Dict[str, t.Type[CloudProvider]]
def get_provider_plugins() -> t.Dict[str, t.Type[CloudProvider]]:
"""Return a dictionary of the available cloud provider plugins."""
return get_cloud_plugins()[0]
@cache
def get_environment_plugins(): # type: () -> t.Dict[str, t.Type[CloudEnvironment]]
def get_environment_plugins() -> t.Dict[str, t.Type[CloudEnvironment]]:
"""Return a dictionary of the available cloud environment plugins."""
return get_cloud_plugins()[1]
@ -96,7 +96,7 @@ def get_cloud_platforms(args, targets=None): # type: (TestConfig, t.Optional[t.
return sorted(cloud_platforms)
def get_cloud_platform(target): # type: (IntegrationTarget) -> t.Optional[str]
def get_cloud_platform(target: IntegrationTarget) -> t.Optional[str]:
"""Return the name of the cloud platform used for the given target, or None if no cloud platform is used."""
cloud_platforms = set(a.split('/')[1] for a in target.aliases if a.startswith('cloud/') and a.endswith('/') and a != 'cloud/')
@ -119,7 +119,7 @@ def get_cloud_providers(args, targets=None): # type: (IntegrationConfig, t.Opti
return [get_provider_plugins()[p](args) for p in get_cloud_platforms(args, targets)]
def get_cloud_environment(args, target): # type: (IntegrationConfig, IntegrationTarget) -> t.Optional[CloudEnvironment]
def get_cloud_environment(args: IntegrationConfig, target: IntegrationTarget) -> t.Optional[CloudEnvironment]:
"""Return the cloud environment for the given target, or None if no cloud environment is used for the target."""
cloud_platform = get_cloud_platform(target)
@ -185,7 +185,7 @@ class CloudBase(metaclass=abc.ABCMeta):
_MANAGED = 'managed'
_SETUP_EXECUTED = 'setup_executed'
def __init__(self, args): # type: (IntegrationConfig) -> None
def __init__(self, args: IntegrationConfig) -> None:
self.args = args
self.platform = self.__module__.rsplit('.', 1)[-1]
@ -204,42 +204,42 @@ class CloudBase(metaclass=abc.ABCMeta):
data_context().register_payload_callback(config_callback)
@property
def setup_executed(self): # type: () -> bool
def setup_executed(self) -> bool:
"""True if setup has been executed, otherwise False."""
return t.cast(bool, self._get_cloud_config(self._SETUP_EXECUTED, False))
@setup_executed.setter
def setup_executed(self, value): # type: (bool) -> None
def setup_executed(self, value: bool) -> None:
"""True if setup has been executed, otherwise False."""
self._set_cloud_config(self._SETUP_EXECUTED, value)
@property
def config_path(self): # type: () -> str
def config_path(self) -> str:
"""Path to the configuration file."""
return os.path.join(data_context().content.root, str(self._get_cloud_config(self._CONFIG_PATH)))
@config_path.setter
def config_path(self, value): # type: (str) -> None
def config_path(self, value: str) -> None:
"""Path to the configuration file."""
self._set_cloud_config(self._CONFIG_PATH, value)
@property
def resource_prefix(self): # type: () -> str
def resource_prefix(self) -> str:
"""Resource prefix."""
return str(self._get_cloud_config(self._RESOURCE_PREFIX))
@resource_prefix.setter
def resource_prefix(self, value): # type: (str) -> None
def resource_prefix(self, value: str) -> None:
"""Resource prefix."""
self._set_cloud_config(self._RESOURCE_PREFIX, value)
@property
def managed(self): # type: () -> bool
def managed(self) -> bool:
"""True if resources are managed by ansible-test, otherwise False."""
return t.cast(bool, self._get_cloud_config(self._MANAGED))
@managed.setter
def managed(self, value): # type: (bool) -> None
def managed(self, value: bool) -> None:
"""True if resources are managed by ansible-test, otherwise False."""
self._set_cloud_config(self._MANAGED, value)
@ -257,7 +257,7 @@ class CloudBase(metaclass=abc.ABCMeta):
class CloudProvider(CloudBase):
"""Base class for cloud provider plugins. Sets up cloud resources before delegation."""
def __init__(self, args, config_extension='.ini'): # type: (IntegrationConfig, str) -> None
def __init__(self, args: IntegrationConfig, config_extension: str = '.ini') -> None:
super().__init__(args)
self.ci_provider = get_ci_provider()
@ -297,19 +297,19 @@ class CloudProvider(CloudBase):
display.warning('Excluding tests marked "%s" which requires container support or config (see "%s"): %s'
% (skip.rstrip('/'), self.config_template_path, ', '.join(skipped)))
def setup(self): # type: () -> None
def setup(self) -> None:
"""Setup the cloud resource before delegation and register a cleanup callback."""
self.resource_prefix = self.ci_provider.generate_resource_prefix()
self.resource_prefix = re.sub(r'[^a-zA-Z0-9]+', '-', self.resource_prefix)[:63].lower().rstrip('-')
atexit.register(self.cleanup)
def cleanup(self): # type: () -> None
def cleanup(self) -> None:
"""Clean up the cloud resource and any temporary configuration files after tests complete."""
if self.remove_config:
os.remove(self.config_path)
def _use_static_config(self): # type: () -> bool
def _use_static_config(self) -> bool:
"""Use a static config file if available. Returns True if static config is used, otherwise returns False."""
if os.path.isfile(self.config_static_path):
display.info('Using existing %s cloud config: %s' % (self.platform, self.config_static_path), verbosity=1)
@ -322,7 +322,7 @@ class CloudProvider(CloudBase):
return static
def _write_config(self, content): # type: (str) -> None
def _write_config(self, content: str) -> None:
"""Write the given content to the config file."""
prefix = '%s-' % os.path.splitext(os.path.basename(self.config_static_path))[0]
@ -337,7 +337,7 @@ class CloudProvider(CloudBase):
config_fd.write(to_bytes(content))
config_fd.flush()
def _read_config_template(self): # type: () -> str
def _read_config_template(self) -> str:
"""Read and return the configuration template."""
lines = read_text_file(self.config_template_path).splitlines()
lines = [line for line in lines if not line.startswith('#')]
@ -356,7 +356,7 @@ class CloudProvider(CloudBase):
class CloudEnvironment(CloudBase):
"""Base class for cloud environment plugins. Updates integration test environment after delegation."""
def setup_once(self): # type: () -> None
def setup_once(self) -> None:
"""Run setup if it has not already been run."""
if self.setup_executed:
return
@ -364,14 +364,14 @@ class CloudEnvironment(CloudBase):
self.setup()
self.setup_executed = True
def setup(self): # type: () -> None
def setup(self) -> None:
"""Setup which should be done once per environment instead of once per test target."""
@abc.abstractmethod
def get_environment_config(self): # type: () -> CloudEnvironmentConfig
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
def on_failure(self, target, tries): # type: (IntegrationTarget, int) -> None
def on_failure(self, target: IntegrationTarget, tries: int) -> None:
"""Callback to run when an integration target fails."""

@ -23,7 +23,7 @@ class ACMEProvider(CloudProvider):
"""ACME plugin. Sets up cloud resources for tests."""
DOCKER_SIMULATOR_NAME = 'acme-simulator'
def __init__(self, args): # type: (IntegrationConfig) -> None
def __init__(self, args: IntegrationConfig) -> None:
super().__init__(args)
# The simulator must be pinned to a specific version to guarantee CI passes with the version used.
@ -34,7 +34,7 @@ class ACMEProvider(CloudProvider):
self.uses_docker = True
def setup(self): # type: () -> None
def setup(self) -> None:
"""Setup the cloud resource before delegation and register a cleanup callback."""
super().setup()
@ -43,7 +43,7 @@ class ACMEProvider(CloudProvider):
else:
self._setup_dynamic()
def _setup_dynamic(self): # type: () -> None
def _setup_dynamic(self) -> None:
"""Create a ACME test container using docker."""
ports = [
5000, # control port for flask app in container
@ -62,13 +62,13 @@ class ACMEProvider(CloudProvider):
self._set_cloud_config('acme_host', self.DOCKER_SIMULATOR_NAME)
def _setup_static(self): # type: () -> None
def _setup_static(self) -> None:
raise NotImplementedError()
class ACMEEnvironment(CloudEnvironment):
"""ACME environment plugin. Updates integration test environment after delegation."""
def get_environment_config(self): # type: () -> CloudEnvironmentConfig
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
ansible_vars = dict(
acme_host=self._get_cloud_config('acme_host'),

@ -37,7 +37,7 @@ from . import (
class AwsCloudProvider(CloudProvider):
"""AWS cloud provider plugin. Sets up cloud resources before delegation."""
def __init__(self, args): # type: (IntegrationConfig) -> None
def __init__(self, args: IntegrationConfig) -> None:
super().__init__(args)
self.uses_config = True
@ -51,7 +51,7 @@ class AwsCloudProvider(CloudProvider):
super().filter(targets, exclude)
def setup(self): # type: () -> None
def setup(self) -> None:
"""Setup the cloud resource before delegation and register a cleanup callback."""
super().setup()
@ -63,7 +63,7 @@ class AwsCloudProvider(CloudProvider):
if not self._use_static_config():
self._setup_dynamic()
def _setup_dynamic(self): # type: () -> None
def _setup_dynamic(self) -> None:
"""Request AWS credentials through the Ansible Core CI service."""
display.info('Provisioning %s cloud environment.' % self.platform, verbosity=1)
@ -90,14 +90,14 @@ class AwsCloudProvider(CloudProvider):
self._write_config(config)
def _create_ansible_core_ci(self): # type: () -> AnsibleCoreCI
def _create_ansible_core_ci(self) -> AnsibleCoreCI:
"""Return an AWS instance of AnsibleCoreCI."""
return AnsibleCoreCI(self.args, CloudResource(platform='aws'))
class AwsCloudEnvironment(CloudEnvironment):
"""AWS cloud environment plugin. Updates integration test environment after delegation."""
def get_environment_config(self): # type: () -> CloudEnvironmentConfig
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
parser = configparser.ConfigParser()
parser.read(self.config_path)
@ -123,7 +123,7 @@ class AwsCloudEnvironment(CloudEnvironment):
callback_plugins=['aws_resource_actions'],
)
def on_failure(self, target, tries): # type: (IntegrationTarget, int) -> None
def on_failure(self, target: IntegrationTarget, tries: int) -> None:
"""Callback to run when an integration target fails."""
if not tries and self.managed:
display.notice('If %s failed due to permissions, the IAM test policy may need to be updated. '

@ -31,7 +31,7 @@ from . import (
class AzureCloudProvider(CloudProvider):
"""Azure cloud provider plugin. Sets up cloud resources before delegation."""
def __init__(self, args): # type: (IntegrationConfig) -> None
def __init__(self, args: IntegrationConfig) -> None:
super().__init__(args)
self.aci = None # type: t.Optional[AnsibleCoreCI]
@ -47,7 +47,7 @@ class AzureCloudProvider(CloudProvider):
super().filter(targets, exclude)
def setup(self): # type: () -> None
def setup(self) -> None:
"""Setup the cloud resource before delegation and register a cleanup callback."""
super().setup()
@ -56,14 +56,14 @@ class AzureCloudProvider(CloudProvider):
get_config(self.config_path) # check required variables
def cleanup(self): # type: () -> None
def cleanup(self) -> None:
"""Clean up the cloud resource and any temporary configuration files after tests complete."""
if self.aci:
self.aci.stop()
super().cleanup()
def _setup_dynamic(self): # type: () -> None
def _setup_dynamic(self) -> None:
"""Request Azure credentials through ansible-core-ci."""
display.info('Provisioning %s cloud environment.' % self.platform, verbosity=1)
@ -96,14 +96,14 @@ class AzureCloudProvider(CloudProvider):
self._write_config(config)
def _create_ansible_core_ci(self): # type: () -> AnsibleCoreCI
def _create_ansible_core_ci(self) -> AnsibleCoreCI:
"""Return an Azure instance of AnsibleCoreCI."""
return AnsibleCoreCI(self.args, CloudResource(platform='azure'))
class AzureCloudEnvironment(CloudEnvironment):
"""Azure cloud environment plugin. Updates integration test environment after delegation."""
def get_environment_config(self): # type: () -> CloudEnvironmentConfig
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
env_vars = get_config(self.config_path)
@ -121,7 +121,7 @@ class AzureCloudEnvironment(CloudEnvironment):
ansible_vars=ansible_vars,
)
def on_failure(self, target, tries): # type: (IntegrationTarget, int) -> None
def on_failure(self, target: IntegrationTarget, tries: int) -> None:
"""Callback to run when an integration target fails."""
if not tries and self.managed:
display.notice('If %s failed due to permissions, the test policy may need to be updated.' % target.name)

@ -25,12 +25,12 @@ from . import (
class CloudscaleCloudProvider(CloudProvider):
"""Cloudscale cloud provider plugin. Sets up cloud resources before delegation."""
def __init__(self, args): # type: (IntegrationConfig) -> None
def __init__(self, args: IntegrationConfig) -> None:
super().__init__(args)
self.uses_config = True
def setup(self): # type: () -> None
def setup(self) -> None:
"""Setup the cloud resource before delegation and register a cleanup callback."""
super().setup()
@ -39,7 +39,7 @@ class CloudscaleCloudProvider(CloudProvider):
class CloudscaleCloudEnvironment(CloudEnvironment):
"""Cloudscale cloud environment plugin. Updates integration test environment after delegation."""
def get_environment_config(self): # type: () -> CloudEnvironmentConfig
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
parser = configparser.ConfigParser()
parser.read(self.config_path)

@ -37,7 +37,7 @@ class CsCloudProvider(CloudProvider):
"""CloudStack cloud provider plugin. Sets up cloud resources before delegation."""
DOCKER_SIMULATOR_NAME = 'cloudstack-sim'
def __init__(self, args): # type: (IntegrationConfig) -> None
def __init__(self, args: IntegrationConfig) -> None:
super().__init__(args)
self.image = os.environ.get('ANSIBLE_CLOUDSTACK_CONTAINER', 'quay.io/ansible/cloudstack-test-container:1.4.0')
@ -47,7 +47,7 @@ class CsCloudProvider(CloudProvider):
self.uses_docker = True
self.uses_config = True
def setup(self): # type: () -> None
def setup(self) -> None:
"""Setup the cloud resource before delegation and register a cleanup callback."""
super().setup()
@ -56,7 +56,7 @@ class CsCloudProvider(CloudProvider):
else:
self._setup_dynamic()
def _setup_static(self): # type: () -> None
def _setup_static(self) -> None:
"""Configure CloudStack tests for use with static configuration."""
parser = configparser.ConfigParser()
parser.read(self.config_static_path)
@ -81,7 +81,7 @@ class CsCloudProvider(CloudProvider):
display.info('Read cs host "%s" and port %d from config: %s' % (self.host, self.port, self.config_static_path), verbosity=1)
def _setup_dynamic(self): # type: () -> None
def _setup_dynamic(self) -> None:
"""Create a CloudStack simulator using docker."""
config = self._read_config_template()
@ -129,7 +129,7 @@ class CsCloudProvider(CloudProvider):
self._write_config(config)
def _get_credentials(self, container_name): # type: (str) -> t.Dict[str, t.Any]
def _get_credentials(self, container_name: str) -> t.Dict[str, t.Any]:
"""Wait for the CloudStack simulator to return credentials."""
def check(value):
"""Return True if the given configuration is valid JSON, otherwise return False."""
@ -148,7 +148,7 @@ class CsCloudProvider(CloudProvider):
class CsCloudEnvironment(CloudEnvironment):
"""CloudStack cloud environment plugin. Updates integration test environment after delegation."""
def get_environment_config(self): # type: () -> CloudEnvironmentConfig
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
parser = configparser.ConfigParser()
parser.read(self.config_path)

@ -20,12 +20,12 @@ from . import (
class DigitalOceanCloudProvider(CloudProvider):
"""Checks if a configuration file has been passed or fixtures are going to be used for testing"""
def __init__(self, args): # type: (IntegrationConfig) -> None
def __init__(self, args: IntegrationConfig) -> None:
super().__init__(args)
self.uses_config = True
def setup(self): # type: () -> None
def setup(self) -> None:
"""Setup the cloud resource before delegation and register a cleanup callback."""
super().setup()
@ -34,7 +34,7 @@ class DigitalOceanCloudProvider(CloudProvider):
class DigitalOceanCloudEnvironment(CloudEnvironment):
"""Updates integration test environment after delegation. Will setup the config file as parameter."""
def get_environment_config(self): # type: () -> CloudEnvironmentConfig
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
parser = configparser.ConfigParser()
parser.read(self.config_path)

@ -32,7 +32,7 @@ class ForemanProvider(CloudProvider):
# https://github.com/ansible/foreman-test-container
DOCKER_IMAGE = 'quay.io/ansible/foreman-test-container:1.4.0'
def __init__(self, args): # type: (IntegrationConfig) -> None
def __init__(self, args: IntegrationConfig) -> None:
super().__init__(args)
self.__container_from_env = os.environ.get('ANSIBLE_FRMNSIM_CONTAINER')
@ -46,7 +46,7 @@ class ForemanProvider(CloudProvider):
self.uses_docker = True
def setup(self): # type: () -> None
def setup(self) -> None:
"""Setup cloud resource before delegation and reg cleanup callback."""
super().setup()
@ -55,7 +55,7 @@ class ForemanProvider(CloudProvider):
else:
self._setup_dynamic()
def _setup_dynamic(self): # type: () -> None
def _setup_dynamic(self) -> None:
"""Spawn a Foreman stub within docker container."""
foreman_port = 8080
@ -76,13 +76,13 @@ class ForemanProvider(CloudProvider):
self._set_cloud_config('FOREMAN_HOST', self.DOCKER_SIMULATOR_NAME)
self._set_cloud_config('FOREMAN_PORT', str(foreman_port))
def _setup_static(self): # type: () -> None
def _setup_static(self) -> None:
raise NotImplementedError()
class ForemanEnvironment(CloudEnvironment):
"""Foreman environment plugin. Updates integration test environment after delegation."""
def get_environment_config(self): # type: () -> CloudEnvironmentConfig
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
env_vars = dict(
FOREMAN_HOST=str(self._get_cloud_config('FOREMAN_HOST')),

@ -77,7 +77,7 @@ class GalaxyProvider(CloudProvider):
Galaxy plugin. Sets up pulp (ansible-galaxy) servers for tests.
The pulp source itself resides at: https://github.com/pulp/pulp-oci-images
"""
def __init__(self, args): # type: (IntegrationConfig) -> None
def __init__(self, args: IntegrationConfig) -> None:
super().__init__(args)
# Cannot use the latest container image as either galaxy_ng 4.2.0rc2 or pulp 0.5.0 has sporatic issues with
@ -91,7 +91,7 @@ class GalaxyProvider(CloudProvider):
self.uses_docker = True
def setup(self): # type: () -> None
def setup(self) -> None:
"""Setup cloud resource before delegation and reg cleanup callback."""
super().setup()
@ -143,7 +143,7 @@ class GalaxyProvider(CloudProvider):
class GalaxyEnvironment(CloudEnvironment):
"""Galaxy environment plugin. Updates integration test environment after delegation."""
def get_environment_config(self): # type: () -> CloudEnvironmentConfig
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
pulp_user = str(self._get_cloud_config('PULP_USER'))
pulp_password = str(self._get_cloud_config('PULP_PASSWORD'))

@ -22,12 +22,12 @@ from . import (
class GcpCloudProvider(CloudProvider):
"""GCP cloud provider plugin. Sets up cloud resources before delegation."""
def __init__(self, args): # type: (IntegrationConfig) -> None
def __init__(self, args: IntegrationConfig) -> None:
super().__init__(args)
self.uses_config = True
def setup(self): # type: () -> None
def setup(self) -> None:
"""Setup the cloud resource before delegation and register a cleanup callback."""
super().setup()
@ -39,7 +39,7 @@ class GcpCloudProvider(CloudProvider):
class GcpCloudEnvironment(CloudEnvironment):
"""GCP cloud environment plugin. Updates integration test environment after delegation."""
def get_environment_config(self): # type: () -> CloudEnvironmentConfig
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
parser = configparser.ConfigParser()
parser.read(self.config_path)

@ -30,7 +30,7 @@ from . import (
class HcloudCloudProvider(CloudProvider):
"""Hetzner Cloud provider plugin. Sets up cloud resources before delegation."""
def __init__(self, args): # type: (IntegrationConfig) -> None
def __init__(self, args: IntegrationConfig) -> None:
super().__init__(args)
self.uses_config = True
@ -44,14 +44,14 @@ class HcloudCloudProvider(CloudProvider):
super().filter(targets, exclude)
def setup(self): # type: () -> None
def setup(self) -> None:
"""Setup the cloud resource before delegation and register a cleanup callback."""
super().setup()
if not self._use_static_config():
self._setup_dynamic()
def _setup_dynamic(self): # type: () -> None
def _setup_dynamic(self) -> None:
"""Request Hetzner credentials through the Ansible Core CI service."""
display.info('Provisioning %s cloud environment.' % self.platform, verbosity=1)
@ -77,14 +77,14 @@ class HcloudCloudProvider(CloudProvider):
self._write_config(config)
def _create_ansible_core_ci(self): # type: () -> AnsibleCoreCI
def _create_ansible_core_ci(self) -> AnsibleCoreCI:
"""Return a Heztner instance of AnsibleCoreCI."""
return AnsibleCoreCI(self.args, CloudResource(platform='hetzner'))
class HcloudCloudEnvironment(CloudEnvironment):
"""Hetzner Cloud cloud environment plugin. Updates integration test environment after delegation."""
def get_environment_config(self): # type: () -> CloudEnvironmentConfig
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
parser = configparser.ConfigParser()
parser.read(self.config_path)

@ -28,14 +28,14 @@ KRB5_PASSWORD_ENV = 'KRB5_PASSWORD'
class HttptesterProvider(CloudProvider):
"""HTTP Tester provider plugin. Sets up resources before delegation."""
def __init__(self, args): # type: (IntegrationConfig) -> None
def __init__(self, args: IntegrationConfig) -> None:
super().__init__(args)
self.image = os.environ.get('ANSIBLE_HTTP_TEST_CONTAINER', 'quay.io/ansible/http-test-container:2.1.0')
self.uses_docker = True
def setup(self): # type: () -> None
def setup(self) -> None:
"""Setup resources before delegation."""
super().setup()
@ -82,7 +82,7 @@ class HttptesterProvider(CloudProvider):
class HttptesterEnvironment(CloudEnvironment):
"""HTTP Tester environment plugin. Updates integration test environment after delegation."""
def get_environment_config(self): # type: () -> CloudEnvironmentConfig
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
return CloudEnvironmentConfig(
env_vars=dict(

@ -32,7 +32,7 @@ class NiosProvider(CloudProvider):
# https://github.com/ansible/nios-test-container
DOCKER_IMAGE = 'quay.io/ansible/nios-test-container:1.4.0'
def __init__(self, args): # type: (IntegrationConfig) -> None
def __init__(self, args: IntegrationConfig) -> None:
super().__init__(args)
self.__container_from_env = os.environ.get('ANSIBLE_NIOSSIM_CONTAINER')
@ -47,7 +47,7 @@ class NiosProvider(CloudProvider):
self.uses_docker = True
def setup(self): # type: () -> None
def setup(self) -> None:
"""Setup cloud resource before delegation and reg cleanup callback."""
super().setup()
@ -56,7 +56,7 @@ class NiosProvider(CloudProvider):
else:
self._setup_dynamic()
def _setup_dynamic(self): # type: () -> None
def _setup_dynamic(self) -> None:
"""Spawn a NIOS simulator within docker container."""
nios_port = 443
@ -76,13 +76,13 @@ class NiosProvider(CloudProvider):
self._set_cloud_config('NIOS_HOST', self.DOCKER_SIMULATOR_NAME)
def _setup_static(self): # type: () -> None
def _setup_static(self) -> None:
raise NotImplementedError()
class NiosEnvironment(CloudEnvironment):
"""NIOS environment plugin. Updates integration test environment after delegation."""
def get_environment_config(self): # type: () -> CloudEnvironmentConfig
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
ansible_vars = dict(
nios_provider=dict(

@ -16,7 +16,7 @@ from . import (
class OpenNebulaCloudProvider(CloudProvider):
"""Checks if a configuration file has been passed or fixtures are going to be used for testing"""
def setup(self): # type: () -> None
def setup(self) -> None:
"""Setup the cloud resource before delegation and register a cleanup callback."""
super().setup()
@ -25,7 +25,7 @@ class OpenNebulaCloudProvider(CloudProvider):
self.uses_config = True
def _setup_dynamic(self): # type: () -> None
def _setup_dynamic(self) -> None:
display.info('No config file provided, will run test from fixtures')
config = self._read_config_template()
@ -42,7 +42,7 @@ class OpenNebulaCloudProvider(CloudProvider):
class OpenNebulaCloudEnvironment(CloudEnvironment):
"""Updates integration test environment after delegation. Will setup the config file as parameter."""
def get_environment_config(self): # type: () -> CloudEnvironmentConfig
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
parser = configparser.ConfigParser()
parser.read(self.config_path)

@ -32,7 +32,7 @@ class OpenShiftCloudProvider(CloudProvider):
"""OpenShift cloud provider plugin. Sets up cloud resources before delegation."""
DOCKER_CONTAINER_NAME = 'openshift-origin'
def __init__(self, args): # type: (IntegrationConfig) -> None
def __init__(self, args: IntegrationConfig) -> None:
super().__init__(args, config_extension='.kubeconfig')
# The image must be pinned to a specific version to guarantee CI passes with the version used.
@ -41,7 +41,7 @@ class OpenShiftCloudProvider(CloudProvider):
self.uses_docker = True
self.uses_config = True
def setup(self): # type: () -> None
def setup(self) -> None:
"""Setup the cloud resource before delegation and register a cleanup callback."""
super().setup()
@ -50,7 +50,7 @@ class OpenShiftCloudProvider(CloudProvider):
else:
self._setup_dynamic()
def _setup_static(self): # type: () -> None
def _setup_static(self) -> None:
"""Configure OpenShift tests for use with static configuration."""
config = read_text_file(self.config_static_path)
@ -59,7 +59,7 @@ class OpenShiftCloudProvider(CloudProvider):
if not match:
display.warning('Could not find OpenShift endpoint in kubeconfig.')
def _setup_dynamic(self): # type: () -> None
def _setup_dynamic(self) -> None:
"""Create a OpenShift container using docker."""
port = 8443
@ -90,7 +90,7 @@ class OpenShiftCloudProvider(CloudProvider):
self._write_config(config)
def _get_config(self, container_name, server): # type: (str, str) -> str
def _get_config(self, container_name: str, server: str) -> str:
"""Get OpenShift config from container."""
stdout = wait_for_file(self.args, container_name, '/var/lib/origin/openshift.local.config/master/admin.kubeconfig', sleep=10, tries=30)
@ -103,7 +103,7 @@ class OpenShiftCloudProvider(CloudProvider):
class OpenShiftCloudEnvironment(CloudEnvironment):
"""OpenShift cloud environment plugin. Updates integration test environment after delegation."""
def get_environment_config(self): # type: () -> CloudEnvironmentConfig
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
env_vars = dict(
K8S_AUTH_KUBECONFIG=self.config_path,

@ -20,12 +20,12 @@ from . import (
class ScalewayCloudProvider(CloudProvider):
"""Checks if a configuration file has been passed or fixtures are going to be used for testing"""
def __init__(self, args): # type: (IntegrationConfig) -> None
def __init__(self, args: IntegrationConfig) -> None:
super().__init__(args)
self.uses_config = True
def setup(self): # type: () -> None
def setup(self) -> None:
"""Setup the cloud resource before delegation and register a cleanup callback."""
super().setup()
@ -34,7 +34,7 @@ class ScalewayCloudProvider(CloudProvider):
class ScalewayCloudEnvironment(CloudEnvironment):
"""Updates integration test environment after delegation. Will setup the config file as parameter."""
def get_environment_config(self): # type: () -> CloudEnvironmentConfig
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
parser = configparser.ConfigParser()
parser.read(self.config_path)

@ -29,7 +29,7 @@ class VcenterProvider(CloudProvider):
"""VMware vcenter/esx plugin. Sets up cloud resources for tests."""
DOCKER_SIMULATOR_NAME = 'vcenter-simulator'
def __init__(self, args): # type: (IntegrationConfig) -> None
def __init__(self, args: IntegrationConfig) -> None:
super().__init__(args)
# The simulator must be pinned to a specific version to guarantee CI passes with the version used.
@ -49,7 +49,7 @@ class VcenterProvider(CloudProvider):
self.uses_docker = False
self.uses_config = True
def setup(self): # type: () -> None
def setup(self) -> None:
"""Setup the cloud resource before delegation and register a cleanup callback."""
super().setup()
@ -64,7 +64,7 @@ class VcenterProvider(CloudProvider):
else:
raise ApplicationError('Unknown vmware_test_platform: %s' % self.vmware_test_platform)
def _setup_dynamic_simulator(self): # type: () -> None
def _setup_dynamic_simulator(self) -> None:
"""Create a vcenter simulator using docker."""
ports = [
443,
@ -85,14 +85,14 @@ class VcenterProvider(CloudProvider):
self._set_cloud_config('vcenter_hostname', self.DOCKER_SIMULATOR_NAME)
def _setup_static(self): # type: () -> None
def _setup_static(self) -> None:
if not os.path.exists(self.config_static_path):
raise ApplicationError('Configuration file does not exist: %s' % self.config_static_path)
class VcenterEnvironment(CloudEnvironment):
"""VMware vcenter/esx environment plugin. Updates integration test environment after delegation."""
def get_environment_config(self): # type: () -> CloudEnvironmentConfig
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
try:
# We may be in a container, so we cannot just reach VMWARE_TEST_PLATFORM,

@ -20,12 +20,12 @@ from . import (
class VultrCloudProvider(CloudProvider):
"""Checks if a configuration file has been passed or fixtures are going to be used for testing"""
def __init__(self, args): # type: (IntegrationConfig) -> None
def __init__(self, args: IntegrationConfig) -> None:
super().__init__(args)
self.uses_config = True
def setup(self): # type: () -> None
def setup(self) -> None:
"""Setup the cloud resource before delegation and register a cleanup callback."""
super().setup()
@ -34,7 +34,7 @@ class VultrCloudProvider(CloudProvider):
class VultrCloudEnvironment(CloudEnvironment):
"""Updates integration test environment after delegation. Will setup the config file as parameter."""
def get_environment_config(self): # type: () -> CloudEnvironmentConfig
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
parser = configparser.ConfigParser()
parser.read(self.config_path)

@ -81,13 +81,13 @@ THostConfig = t.TypeVar('THostConfig', bound=HostConfig)
class CoverageHandler(t.Generic[THostConfig], metaclass=abc.ABCMeta):
"""Base class for configuring hosts for integration test code coverage."""
def __init__(self, args, host_state, inventory_path): # type: (IntegrationConfig, HostState, str) -> None
def __init__(self, args: IntegrationConfig, host_state: HostState, inventory_path: str) -> None:
self.args = args
self.host_state = host_state
self.inventory_path = inventory_path
self.profiles = self.get_profiles()
def get_profiles(self): # type: () -> t.List[HostProfile]
def get_profiles(self) -> t.List[HostProfile]:
"""Return a list of profiles relevant for this handler."""
profile_type = get_generic_type(type(self), HostConfig)
profiles = [profile for profile in self.host_state.target_profiles if isinstance(profile.config, profile_type)]
@ -96,19 +96,19 @@ class CoverageHandler(t.Generic[THostConfig], metaclass=abc.ABCMeta):
@property
@abc.abstractmethod
def is_active(self): # type: () -> bool
def is_active(self) -> bool:
"""True if the handler should be used, otherwise False."""
@abc.abstractmethod
def setup(self): # type: () -> None
def setup(self) -> None:
"""Perform setup for code coverage."""
@abc.abstractmethod
def teardown(self): # type: () -> None
def teardown(self) -> None:
"""Perform teardown for code coverage."""
@abc.abstractmethod
def create_inventory(self): # type: () -> None
def create_inventory(self) -> None:
"""Create inventory, if needed."""
@abc.abstractmethod
@ -123,13 +123,13 @@ class CoverageHandler(t.Generic[THostConfig], metaclass=abc.ABCMeta):
class PosixCoverageHandler(CoverageHandler[PosixConfig]):
"""Configure integration test code coverage for POSIX hosts."""
def __init__(self, args, host_state, inventory_path): # type: (IntegrationConfig, HostState, str) -> None
def __init__(self, args: IntegrationConfig, host_state: HostState, inventory_path: str) -> None:
super().__init__(args, host_state, inventory_path)
# Common temporary directory used on all POSIX hosts that will be created world writeable.
self.common_temp_path = f'/tmp/ansible-test-{generate_name()}'
def get_profiles(self): # type: () -> t.List[HostProfile]
def get_profiles(self) -> t.List[HostProfile]:
"""Return a list of profiles relevant for this handler."""
profiles = super().get_profiles()
profiles = [profile for profile in profiles if not isinstance(profile, ControllerProfile) or
@ -138,21 +138,21 @@ class PosixCoverageHandler(CoverageHandler[PosixConfig]):
return profiles
@property
def is_active(self): # type: () -> bool
def is_active(self) -> bool:
"""True if the handler should be used, otherwise False."""
return True
@property
def target_profile(self): # type: () -> t.Optional[PosixProfile]
def target_profile(self) -> t.Optional[PosixProfile]:
"""The POSIX target profile, if it uses a different Python interpreter than the controller, otherwise None."""
return t.cast(PosixProfile, self.profiles[0]) if self.profiles else None
def setup(self): # type: () -> None
def setup(self) -> None:
"""Perform setup for code coverage."""
self.setup_controller()
self.setup_target()
def teardown(self): # type: () -> None
def teardown(self) -> None:
"""Perform teardown for code coverage."""
self.teardown_controller()
self.teardown_target()
@ -180,7 +180,7 @@ class PosixCoverageHandler(CoverageHandler[PosixConfig]):
self.run_playbook('posix_coverage_setup.yml', self.get_playbook_variables())
def teardown_controller(self): # type: () -> None
def teardown_controller(self) -> None:
"""Perform teardown for code coverage on the controller."""
coverage_temp_path = os.path.join(self.common_temp_path, ResultType.COVERAGE.name)
platform = get_coverage_platform(self.args.controller)
@ -190,7 +190,7 @@ class PosixCoverageHandler(CoverageHandler[PosixConfig]):
remove_tree(self.common_temp_path)
def teardown_target(self): # type: () -> None
def teardown_target(self) -> None:
"""Perform teardown for code coverage on the target."""
if not self.target_profile:
return
@ -243,11 +243,11 @@ class PosixCoverageHandler(CoverageHandler[PosixConfig]):
return variables
def create_inventory(self): # type: () -> None
def create_inventory(self) -> None:
"""Create inventory."""
create_posix_inventory(self.args, self.inventory_path, self.host_state.target_profiles)
def get_playbook_variables(self): # type: () -> t.Dict[str, str]
def get_playbook_variables(self) -> t.Dict[str, str]:
"""Return a dictionary of variables for setup and teardown of POSIX coverage."""
return dict(
common_temp_dir=self.common_temp_path,
@ -262,22 +262,22 @@ class PosixCoverageHandler(CoverageHandler[PosixConfig]):
class WindowsCoverageHandler(CoverageHandler[WindowsConfig]):
"""Configure integration test code coverage for Windows hosts."""
def __init__(self, args, host_state, inventory_path): # type: (IntegrationConfig, HostState, str) -> None
def __init__(self, args: IntegrationConfig, host_state: HostState, inventory_path: str) -> None:
super().__init__(args, host_state, inventory_path)
# Common temporary directory used on all Windows hosts that will be created writable by everyone.
self.remote_temp_path = f'C:\\ansible_test_coverage_{generate_name()}'
@property
def is_active(self): # type: () -> bool
def is_active(self) -> bool:
"""True if the handler should be used, otherwise False."""
return bool(self.profiles) and not self.args.coverage_check
def setup(self): # type: () -> None
def setup(self) -> None:
"""Perform setup for code coverage."""
self.run_playbook('windows_coverage_setup.yml', self.get_playbook_variables())
def teardown(self): # type: () -> None
def teardown(self) -> None:
"""Perform teardown for code coverage."""
with tempfile.TemporaryDirectory() as local_temp_path:
variables = self.get_playbook_variables()
@ -320,11 +320,11 @@ class WindowsCoverageHandler(CoverageHandler[WindowsConfig]):
return variables
def create_inventory(self): # type: () -> None
def create_inventory(self) -> None:
"""Create inventory."""
create_windows_inventory(self.args, self.inventory_path, self.host_state.target_profiles)
def get_playbook_variables(self): # type: () -> t.Dict[str, str]
def get_playbook_variables(self) -> t.Dict[str, str]:
"""Return a dictionary of variables for setup and teardown of Windows coverage."""
return dict(
remote_temp_path=self.remote_temp_path,
@ -333,7 +333,7 @@ class WindowsCoverageHandler(CoverageHandler[WindowsConfig]):
class CoverageManager:
"""Manager for code coverage configuration and state."""
def __init__(self, args, host_state, inventory_path): # type: (IntegrationConfig, HostState, str) -> None
def __init__(self, args: IntegrationConfig, host_state: HostState, inventory_path: str) -> None:
self.args = args
self.host_state = host_state
self.inventory_path = inventory_path
@ -348,7 +348,7 @@ class CoverageManager:
self.handlers = [handler for handler in handlers if handler.is_active]
def setup(self): # type: () -> None
def setup(self) -> None:
"""Perform setup for code coverage."""
if not self.args.coverage:
return
@ -356,7 +356,7 @@ class CoverageManager:
for handler in self.handlers:
handler.setup()
def teardown(self): # type: () -> None
def teardown(self) -> None:
"""Perform teardown for code coverage."""
if not self.args.coverage:
return
@ -378,12 +378,12 @@ class CoverageManager:
@cache
def get_config_handler_type_map(): # type: () -> t.Dict[t.Type[HostConfig], t.Type[CoverageHandler]]
def get_config_handler_type_map() -> t.Dict[t.Type[HostConfig], t.Type[CoverageHandler]]:
"""Create and return a mapping of HostConfig types to CoverageHandler types."""
return get_type_map(CoverageHandler, HostConfig)
def get_handler_type(config_type): # type: (t.Type[HostConfig]) -> t.Optional[t.Type[CoverageHandler]]
def get_handler_type(config_type: t.Type[HostConfig]) -> t.Optional[t.Type[CoverageHandler]]:
"""Return the coverage handler type associated with the given host config type if found, otherwise return None."""
queue = [config_type]
type_map = get_config_handler_type_map()
@ -400,7 +400,7 @@ def get_handler_type(config_type): # type: (t.Type[HostConfig]) -> t.Optional[t
return None
def update_coverage_filename(original_filename, platform): # type: (str, str) -> str
def update_coverage_filename(original_filename: str, platform: str) -> str:
"""Validate the given filename and insert the specified platform, then return the result."""
parts = original_filename.split('=')

@ -47,7 +47,7 @@ THostProfile = t.TypeVar('THostProfile', bound=HostProfile)
class TargetFilter(t.Generic[THostConfig], metaclass=abc.ABCMeta):
"""Base class for target filters."""
def __init__(self, args, configs, controller): # type: (IntegrationConfig, t.List[THostConfig], bool) -> None
def __init__(self, args: IntegrationConfig, configs: t.List[THostConfig], controller: bool) -> None:
self.args = args
self.configs = configs
self.controller = controller
@ -59,7 +59,7 @@ class TargetFilter(t.Generic[THostConfig], metaclass=abc.ABCMeta):
self.allow_destructive = args.allow_destructive
@property
def config(self): # type: () -> THostConfig
def config(self) -> THostConfig:
"""The configuration to filter. Only valid when there is a single config."""
if len(self.configs) != 1:
raise Exception()
@ -82,7 +82,7 @@ class TargetFilter(t.Generic[THostConfig], metaclass=abc.ABCMeta):
self.apply_skip(f'"{skip}"', reason, skipped, exclude)
def apply_skip(self, marked, reason, skipped, exclude): # type: (str, str, t.List[str], t.Set[str]) -> None
def apply_skip(self, marked: str, reason: str, skipped: t.List[str], exclude: t.Set[str]) -> None:
"""Apply the provided skips to the given exclude list."""
if not skipped:
return
@ -90,12 +90,12 @@ class TargetFilter(t.Generic[THostConfig], metaclass=abc.ABCMeta):
exclude.update(skipped)
display.warning(f'Excluding {self.host_type} tests marked {marked} {reason}: {", ".join(skipped)}')
def filter_profiles(self, profiles, target): # type: (t.List[THostProfile], IntegrationTarget) -> t.List[THostProfile]
def filter_profiles(self, profiles: t.List[THostProfile], target: IntegrationTarget) -> t.List[THostProfile]:
"""Filter the list of profiles, returning only those which are not skipped for the given target."""
del target
return profiles
def filter_targets(self, targets, exclude): # type: (t.List[IntegrationTarget], t.Set[str]) -> None
def filter_targets(self, targets: t.List[IntegrationTarget], exclude: t.Set[str]) -> None:
"""Filter the list of targets, adding any which this host profile cannot support to the provided exclude list."""
if self.controller and self.args.host_settings.controller_fallback and targets:
affected_targets = [target.name for target in targets]
@ -138,7 +138,7 @@ class TargetFilter(t.Generic[THostConfig], metaclass=abc.ABCMeta):
class PosixTargetFilter(TargetFilter[TPosixConfig]):
"""Target filter for POSIX hosts."""
def filter_targets(self, targets, exclude): # type: (t.List[IntegrationTarget], t.Set[str]) -> None
def filter_targets(self, targets: t.List[IntegrationTarget], exclude: t.Set[str]) -> None:
"""Filter the list of targets, adding any which this host profile cannot support to the provided exclude list."""
super().filter_targets(targets, exclude)
@ -151,7 +151,7 @@ class PosixTargetFilter(TargetFilter[TPosixConfig]):
class DockerTargetFilter(PosixTargetFilter[DockerConfig]):
"""Target filter for docker hosts."""
def filter_targets(self, targets, exclude): # type: (t.List[IntegrationTarget], t.Set[str]) -> None
def filter_targets(self, targets: t.List[IntegrationTarget], exclude: t.Set[str]) -> None:
"""Filter the list of targets, adding any which this host profile cannot support to the provided exclude list."""
super().filter_targets(targets, exclude)
@ -167,7 +167,7 @@ class PosixSshTargetFilter(PosixTargetFilter[PosixSshConfig]):
class RemoteTargetFilter(TargetFilter[TRemoteConfig]):
"""Target filter for remote Ansible Core CI managed hosts."""
def filter_profiles(self, profiles, target): # type: (t.List[THostProfile], IntegrationTarget) -> t.List[THostProfile]
def filter_profiles(self, profiles: t.List[THostProfile], target: IntegrationTarget) -> t.List[THostProfile]:
"""Filter the list of profiles, returning only those which are not skipped for the given target."""
profiles = super().filter_profiles(profiles, target)
@ -181,7 +181,7 @@ class RemoteTargetFilter(TargetFilter[TRemoteConfig]):
return profiles
def filter_targets(self, targets, exclude): # type: (t.List[IntegrationTarget], t.Set[str]) -> None
def filter_targets(self, targets: t.List[IntegrationTarget], exclude: t.Set[str]) -> None:
"""Filter the list of targets, adding any which this host profile cannot support to the provided exclude list."""
super().filter_targets(targets, exclude)
@ -224,7 +224,7 @@ class NetworkInventoryTargetFilter(TargetFilter[NetworkInventoryConfig]):
class OriginTargetFilter(PosixTargetFilter[OriginConfig]):
"""Target filter for localhost."""
def filter_targets(self, targets, exclude): # type: (t.List[IntegrationTarget], t.Set[str]) -> None
def filter_targets(self, targets: t.List[IntegrationTarget], exclude: t.Set[str]) -> None:
"""Filter the list of targets, adding any which this host profile cannot support to the provided exclude list."""
super().filter_targets(targets, exclude)
@ -235,12 +235,12 @@ class OriginTargetFilter(PosixTargetFilter[OriginConfig]):
@cache
def get_host_target_type_map(): # type: () -> t.Dict[t.Type[HostConfig], t.Type[TargetFilter]]
def get_host_target_type_map() -> t.Dict[t.Type[HostConfig], t.Type[TargetFilter]]:
"""Create and return a mapping of HostConfig types to TargetFilter types."""
return get_type_map(TargetFilter, HostConfig)
def get_target_filter(args, configs, controller): # type: (IntegrationConfig, t.List[HostConfig], bool) -> TargetFilter
def get_target_filter(args: IntegrationConfig, configs: t.List[HostConfig], controller: bool) -> TargetFilter:
"""Return an integration test target filter instance for the provided host configurations."""
target_type = type(configs[0])
@ -254,12 +254,12 @@ def get_target_filter(args, configs, controller): # type: (IntegrationConfig, t
return filter_instance
def get_remote_skip_aliases(config): # type: (RemoteConfig) -> t.Dict[str, str]
def get_remote_skip_aliases(config: RemoteConfig) -> t.Dict[str, str]:
"""Return a dictionary of skip aliases and the reason why they apply."""
return get_platform_skip_aliases(config.platform, config.version, config.arch)
def get_platform_skip_aliases(platform, version, arch): # type: (str, str, t.Optional[str]) -> t.Dict[str, str]
def get_platform_skip_aliases(platform: str, version: str, arch: t.Optional[str]) -> t.Dict[str, str]:
"""Return a dictionary of skip aliases and the reason why they apply."""
skips = {
f'skip/{platform}': platform,

@ -39,7 +39,7 @@ from ...host_configs import (
)
def command_network_integration(args): # type: (NetworkIntegrationConfig) -> None
def command_network_integration(args: NetworkIntegrationConfig) -> None:
"""Entry point for the `network-integration` command."""
handle_layout_messages(data_context().content.integration_messages)

@ -32,7 +32,7 @@ from ...data import (
)
def command_posix_integration(args): # type: (PosixIntegrationConfig) -> None
def command_posix_integration(args: PosixIntegrationConfig) -> None:
"""Entry point for the `integration` command."""
handle_layout_messages(data_context().content.integration_messages)

@ -45,7 +45,7 @@ from ...data import (
)
def command_windows_integration(args): # type: (WindowsIntegrationConfig) -> None
def command_windows_integration(args: WindowsIntegrationConfig) -> None:
"""Entry point for the `windows-integration` command."""
handle_layout_messages(data_context().content.integration_messages)

@ -131,7 +131,7 @@ DOCUMENTABLE_PLUGINS = (
created_venvs = [] # type: t.List[str]
def command_sanity(args): # type: (SanityConfig) -> None
def command_sanity(args: SanityConfig) -> None:
"""Run sanity tests."""
create_result_directories(args)
@ -307,7 +307,7 @@ def command_sanity(args): # type: (SanityConfig) -> None
@cache
def collect_code_smell_tests(): # type: () -> t.Tuple[SanityTest, ...]
def collect_code_smell_tests() -> t.Tuple[SanityTest, ...]:
"""Return a tuple of available code smell sanity tests."""
paths = glob.glob(os.path.join(SANITY_ROOT, 'code-smell', '*.py'))
@ -326,7 +326,7 @@ class SanityIgnoreParser:
"""Parser for the consolidated sanity test ignore file."""
NO_CODE = '_'
def __init__(self, args): # type: (SanityConfig) -> None
def __init__(self, args: SanityConfig) -> None:
if data_context().content.collection:
ansible_version = '%s.%s' % tuple(get_ansible_version().split('.')[:2])
@ -509,7 +509,7 @@ class SanityIgnoreParser:
self.ignores[test_name][path][error_code] = line_no
@staticmethod
def load(args): # type: (SanityConfig) -> SanityIgnoreParser
def load(args: SanityConfig) -> SanityIgnoreParser:
"""Return the current SanityIgnore instance, initializing it if needed."""
try:
return SanityIgnoreParser.instance # type: ignore[attr-defined]
@ -546,11 +546,11 @@ class SanityIgnoreProcessor:
self.skip_entries = self.parser.skips.get(full_name, {})
self.used_line_numbers = set() # type: t.Set[int]
def filter_skipped_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
def filter_skipped_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]:
"""Return the given targets, with any skipped paths filtered out."""
return sorted(target for target in targets if target.path not in self.skip_entries)
def process_errors(self, errors, paths): # type: (t.List[SanityMessage], t.List[str]) -> t.List[SanityMessage]
def process_errors(self, errors: t.List[SanityMessage], paths: t.List[str]) -> t.List[SanityMessage]:
"""Return the given errors filtered for ignores and with any settings related errors included."""
errors = self.filter_messages(errors)
errors.extend(self.get_errors(paths))
@ -559,7 +559,7 @@ class SanityIgnoreProcessor:
return errors
def filter_messages(self, messages): # type: (t.List[SanityMessage]) -> t.List[SanityMessage]
def filter_messages(self, messages: t.List[SanityMessage]) -> t.List[SanityMessage]:
"""Return a filtered list of the given messages using the entries that have been loaded."""
filtered = []
@ -581,7 +581,7 @@ class SanityIgnoreProcessor:
return filtered
def get_errors(self, paths): # type: (t.List[str]) -> t.List[SanityMessage]
def get_errors(self, paths: t.List[str]) -> t.List[SanityMessage]:
"""Return error messages related to issues with the file."""
messages = [] # type: t.List[SanityMessage]
@ -617,13 +617,13 @@ class SanityIgnoreProcessor:
class SanitySuccess(TestSuccess):
"""Sanity test success."""
def __init__(self, test, python_version=None): # type: (str, t.Optional[str]) -> None
def __init__(self, test: str, python_version: t.Optional[str] = None) -> None:
super().__init__(COMMAND, test, python_version)
class SanitySkipped(TestSkipped):
"""Sanity test skipped."""
def __init__(self, test, python_version=None): # type: (str, t.Optional[str]) -> None
def __init__(self, test: str, python_version: t.Optional[str] = None) -> None:
super().__init__(COMMAND, test, python_version)
@ -650,14 +650,14 @@ class SanityTargets:
self.include = include
@staticmethod
def create(include, exclude, require): # type: (t.List[str], t.List[str], t.List[str]) -> SanityTargets
def create(include: t.List[str], exclude: t.List[str], require: t.List[str]) -> SanityTargets:
"""Create a SanityTargets instance from the given include, exclude and require lists."""
_targets = SanityTargets.get_targets()
_include = walk_internal_targets(_targets, include, exclude, require)
return SanityTargets(_targets, _include)
@staticmethod
def filter_and_inject_targets(test, targets): # type: (SanityTest, t.Iterable[TestTarget]) -> t.List[TestTarget]
def filter_and_inject_targets(test: SanityTest, targets: t.Iterable[TestTarget]) -> t.List[TestTarget]:
"""Filter and inject targets based on test requirements and the given target list."""
test_targets = list(targets)
@ -680,7 +680,7 @@ class SanityTargets:
return test_targets
@staticmethod
def get_targets(): # type: () -> t.Tuple[TestTarget, ...]
def get_targets() -> t.Tuple[TestTarget, ...]:
"""Return a tuple of sanity test targets. Uses a cached version when available."""
try:
return SanityTargets.get_targets.targets # type: ignore[attr-defined]
@ -696,7 +696,7 @@ class SanityTest(metaclass=abc.ABCMeta):
"""Sanity test base class."""
ansible_only = False
def __init__(self, name=None): # type: (t.Optional[str]) -> None
def __init__(self, name: t.Optional[str] = None) -> None:
if not name:
name = self.__class__.__name__
name = re.sub(r'Test$', '', name) # drop Test suffix
@ -712,58 +712,58 @@ class SanityTest(metaclass=abc.ABCMeta):
self.optional_error_codes = set() # type: t.Set[str]
@property
def error_code(self): # type: () -> t.Optional[str]
def error_code(self) -> t.Optional[str]:
"""Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes."""
return None
@property
def can_ignore(self): # type: () -> bool
def can_ignore(self) -> bool:
"""True if the test supports ignore entries."""
return True
@property
def can_skip(self): # type: () -> bool
def can_skip(self) -> bool:
"""True if the test supports skip entries."""
return not self.all_targets and not self.no_targets
@property
def all_targets(self): # type: () -> bool
def all_targets(self) -> bool:
"""True if test targets will not be filtered using includes, excludes, requires or changes. Mutually exclusive with no_targets."""
return False
@property
def no_targets(self): # type: () -> bool
def no_targets(self) -> bool:
"""True if the test does not use test targets. Mutually exclusive with all_targets."""
return False
@property
def include_directories(self): # type: () -> bool
def include_directories(self) -> bool:
"""True if the test targets should include directories."""
return False
@property
def include_symlinks(self): # type: () -> bool
def include_symlinks(self) -> bool:
"""True if the test targets should include symlinks."""
return False
@property
def py2_compat(self): # type: () -> bool
def py2_compat(self) -> bool:
"""True if the test only applies to code that runs on Python 2.x."""
return False
@property
def supported_python_versions(self): # type: () -> t.Optional[t.Tuple[str, ...]]
def supported_python_versions(self) -> t.Optional[t.Tuple[str, ...]]:
"""A tuple of supported Python versions or None if the test does not depend on specific Python versions."""
return CONTROLLER_PYTHON_VERSIONS
def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget] # pylint: disable=unused-argument
def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]: # pylint: disable=unused-argument
"""Return the given list of test targets, filtered to include only those relevant for the test."""
if self.no_targets:
return []
raise NotImplementedError('Sanity test "%s" must implement "filter_targets" or set "no_targets" to True.' % self.name)
def filter_targets_by_version(self, args, targets, python_version): # type: (SanityConfig, t.List[TestTarget], str) -> t.List[TestTarget]
def filter_targets_by_version(self, args: SanityConfig, targets: t.List[TestTarget], python_version: str) -> t.List[TestTarget]:
"""Return the given list of test targets, filtered to include only those relevant for the test, taking into account the Python version."""
del python_version # python_version is not used here, but derived classes may make use of it
@ -785,7 +785,7 @@ class SanityTest(metaclass=abc.ABCMeta):
return targets
@staticmethod
def filter_remote_targets(targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
def filter_remote_targets(targets: t.List[TestTarget]) -> t.List[TestTarget]:
"""Return a filtered list of the given targets, including only those that require support for remote-only Python versions."""
targets = [target for target in targets if (
is_subdir(target.path, data_context().content.module_path) or
@ -811,15 +811,15 @@ class SanityTest(metaclass=abc.ABCMeta):
class SanitySingleVersion(SanityTest, metaclass=abc.ABCMeta):
"""Base class for sanity test plugins which should run on a single python version."""
@property
def require_libyaml(self): # type: () -> bool
def require_libyaml(self) -> bool:
"""True if the test requires PyYAML to have libyaml support."""
return False
@abc.abstractmethod
def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
def test(self, args: SanityConfig, targets: SanityTargets, python: PythonConfig) -> TestResult:
"""Run the sanity test and return the result."""
def load_processor(self, args): # type: (SanityConfig) -> SanityIgnoreProcessor
def load_processor(self, args: SanityConfig) -> SanityIgnoreProcessor:
"""Load the ignore processor for this sanity test."""
return SanityIgnoreProcessor(args, self, None)
@ -890,32 +890,32 @@ class SanityCodeSmellTest(SanitySingleVersion):
raise ApplicationError('Sanity test "%s" option "no_targets" is mutually exclusive with options: %s' % (self.name, ', '.join(problems)))
@property
def all_targets(self): # type: () -> bool
def all_targets(self) -> bool:
"""True if test targets will not be filtered using includes, excludes, requires or changes. Mutually exclusive with no_targets."""
return self.__all_targets
@property
def no_targets(self): # type: () -> bool
def no_targets(self) -> bool:
"""True if the test does not use test targets. Mutually exclusive with all_targets."""
return self.__no_targets
@property
def include_directories(self): # type: () -> bool
def include_directories(self) -> bool:
"""True if the test targets should include directories."""
return self.__include_directories
@property
def include_symlinks(self): # type: () -> bool
def include_symlinks(self) -> bool:
"""True if the test targets should include symlinks."""
return self.__include_symlinks
@property
def py2_compat(self): # type: () -> bool
def py2_compat(self) -> bool:
"""True if the test only applies to code that runs on Python 2.x."""
return self.__py2_compat
@property
def supported_python_versions(self): # type: () -> t.Optional[t.Tuple[str, ...]]
def supported_python_versions(self) -> t.Optional[t.Tuple[str, ...]]:
"""A tuple of supported Python versions or None if the test does not depend on specific Python versions."""
versions = super().supported_python_versions
@ -927,7 +927,7 @@ class SanityCodeSmellTest(SanitySingleVersion):
return versions
def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]:
"""Return the given list of test targets, filtered to include only those relevant for the test."""
if self.no_targets:
return []
@ -954,7 +954,7 @@ class SanityCodeSmellTest(SanitySingleVersion):
return targets
def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
def test(self, args: SanityConfig, targets: SanityTargets, python: PythonConfig) -> TestResult:
"""Run the sanity test and return the result."""
cmd = [python.path, self.path]
@ -1022,7 +1022,7 @@ class SanityCodeSmellTest(SanitySingleVersion):
return SanitySuccess(self.name)
def load_processor(self, args): # type: (SanityConfig) -> SanityIgnoreProcessor
def load_processor(self, args: SanityConfig) -> SanityIgnoreProcessor:
"""Load the ignore processor for this sanity test."""
return SanityIgnoreProcessor(args, self, None)
@ -1030,15 +1030,15 @@ class SanityCodeSmellTest(SanitySingleVersion):
class SanityVersionNeutral(SanityTest, metaclass=abc.ABCMeta):
"""Base class for sanity test plugins which are idependent of the python version being used."""
@abc.abstractmethod
def test(self, args, targets): # type: (SanityConfig, SanityTargets) -> TestResult
def test(self, args: SanityConfig, targets: SanityTargets) -> TestResult:
"""Run the sanity test and return the result."""
def load_processor(self, args): # type: (SanityConfig) -> SanityIgnoreProcessor
def load_processor(self, args: SanityConfig) -> SanityIgnoreProcessor:
"""Load the ignore processor for this sanity test."""
return SanityIgnoreProcessor(args, self, None)
@property
def supported_python_versions(self): # type: () -> t.Optional[t.Tuple[str, ...]]
def supported_python_versions(self) -> t.Optional[t.Tuple[str, ...]]:
"""A tuple of supported Python versions or None if the test does not depend on specific Python versions."""
return None
@ -1046,24 +1046,24 @@ class SanityVersionNeutral(SanityTest, metaclass=abc.ABCMeta):
class SanityMultipleVersion(SanityTest, metaclass=abc.ABCMeta):
"""Base class for sanity test plugins which should run on multiple python versions."""
@abc.abstractmethod
def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
def test(self, args: SanityConfig, targets: SanityTargets, python: PythonConfig) -> TestResult:
"""Run the sanity test and return the result."""
def load_processor(self, args, python_version): # type: (SanityConfig, str) -> SanityIgnoreProcessor
def load_processor(self, args: SanityConfig, python_version: str) -> SanityIgnoreProcessor:
"""Load the ignore processor for this sanity test."""
return SanityIgnoreProcessor(args, self, python_version)
@property
def needs_pypi(self): # type: () -> bool
def needs_pypi(self) -> bool:
"""True if the test requires PyPI, otherwise False."""
return False
@property
def supported_python_versions(self): # type: () -> t.Optional[t.Tuple[str, ...]]
def supported_python_versions(self) -> t.Optional[t.Tuple[str, ...]]:
"""A tuple of supported Python versions or None if the test does not depend on specific Python versions."""
return SUPPORTED_PYTHON_VERSIONS
def filter_targets_by_version(self, args, targets, python_version): # type: (SanityConfig, t.List[TestTarget], str) -> t.List[TestTarget]
def filter_targets_by_version(self, args: SanityConfig, targets: t.List[TestTarget], python_version: str) -> t.List[TestTarget]:
"""Return the given list of test targets, filtered to include only those relevant for the test, taking into account the Python version."""
if not python_version:
raise Exception('python_version is required to filter multi-version tests')
@ -1084,7 +1084,7 @@ class SanityMultipleVersion(SanityTest, metaclass=abc.ABCMeta):
@cache
def sanity_get_tests(): # type: () -> t.Tuple[SanityTest, ...]
def sanity_get_tests() -> t.Tuple[SanityTest, ...]:
"""Return a tuple of the available sanity tests."""
import_plugins('commands/sanity')
sanity_plugins = {} # type: t.Dict[str, t.Type[SanityTest]]
@ -1163,7 +1163,7 @@ def create_sanity_virtualenv(
return virtualenv_python
def check_sanity_virtualenv_yaml(python): # type: (VirtualPythonConfig) -> t.Optional[bool]
def check_sanity_virtualenv_yaml(python: VirtualPythonConfig) -> t.Optional[bool]:
"""Return True if PyYAML has libyaml support for the given sanity virtual environment, False if it does not and None if it was not found."""
virtualenv_path = os.path.dirname(os.path.dirname(python.path))
meta_yaml = os.path.join(virtualenv_path, 'meta.yaml.json')

@ -49,7 +49,7 @@ from ...host_configs import (
class AnsibleDocTest(SanitySingleVersion):
"""Sanity test for ansible-doc."""
def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]:
"""Return the given list of test targets, filtered to include only those relevant for the test."""
plugin_paths = [plugin_path for plugin_type, plugin_path in data_context().content.plugin_paths.items() if plugin_type in DOCUMENTABLE_PLUGINS]
@ -59,7 +59,7 @@ class AnsibleDocTest(SanitySingleVersion):
and any(is_subdir(target.path, path) for path in plugin_paths)
]
def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
def test(self, args: SanityConfig, targets: SanityTargets, python: PythonConfig) -> TestResult:
settings = self.load_processor(args)
paths = [target.path for target in targets.include]

@ -42,16 +42,16 @@ class BinSymlinksTest(SanityVersionNeutral):
ansible_only = True
@property
def can_ignore(self): # type: () -> bool
def can_ignore(self) -> bool:
"""True if the test supports ignore entries."""
return False
@property
def no_targets(self): # type: () -> bool
def no_targets(self) -> bool:
"""True if the test does not use test targets. Mutually exclusive with all_targets."""
return True
def test(self, args, targets): # type: (SanityConfig, SanityTargets) -> TestResult
def test(self, args: SanityConfig, targets: SanityTargets) -> TestResult:
bin_root = ANSIBLE_BIN_PATH
bin_names = os.listdir(bin_root)
bin_paths = sorted(os.path.join(bin_root, path) for path in bin_names)

@ -44,11 +44,11 @@ from ...host_configs import (
class CompileTest(SanityMultipleVersion):
"""Sanity test for proper python syntax."""
def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]:
"""Return the given list of test targets, filtered to include only those relevant for the test."""
return [target for target in targets if os.path.splitext(target.path)[1] == '.py' or is_subdir(target.path, 'bin')]
def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
def test(self, args: SanityConfig, targets: SanityTargets, python: PythonConfig) -> TestResult:
if args.prime_venvs:
return SanitySkipped(self.name, python_version=python.version)

@ -27,16 +27,16 @@ from ...config import (
class IgnoresTest(SanityVersionNeutral):
"""Sanity test for sanity test ignore entries."""
@property
def can_ignore(self): # type: () -> bool
def can_ignore(self) -> bool:
"""True if the test supports ignore entries."""
return False
@property
def no_targets(self): # type: () -> bool
def no_targets(self) -> bool:
"""True if the test does not use test targets. Mutually exclusive with all_targets."""
return True
def test(self, args, targets): # type: (SanityConfig, SanityTargets) -> TestResult
def test(self, args: SanityConfig, targets: SanityTargets) -> TestResult:
sanity_ignore = SanityIgnoreParser.load(args)
messages = [] # type: t.List[SanityMessage]

@ -73,7 +73,7 @@ from ...venv import (
)
def _get_module_test(module_restrictions): # type: (bool) -> t.Callable[[str], bool]
def _get_module_test(module_restrictions: bool) -> t.Callable[[str], bool]:
"""Create a predicate which tests whether a path can be used by modules or not."""
module_path = data_context().content.module_path
module_utils_path = data_context().content.module_utils_path
@ -84,7 +84,7 @@ def _get_module_test(module_restrictions): # type: (bool) -> t.Callable[[str],
class ImportTest(SanityMultipleVersion):
"""Sanity test for proper import exception handling."""
def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]:
"""Return the given list of test targets, filtered to include only those relevant for the test."""
if data_context().content.is_ansible:
# all of ansible-core must pass the import test, not just plugins/modules
@ -99,11 +99,11 @@ class ImportTest(SanityMultipleVersion):
any(is_subdir(target.path, path) for path in paths)]
@property
def needs_pypi(self): # type: () -> bool
def needs_pypi(self) -> bool:
"""True if the test requires PyPI, otherwise False."""
return True
def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
def test(self, args: SanityConfig, targets: SanityTargets, python: PythonConfig) -> TestResult:
settings = self.load_processor(args, python.version)
paths = [target.path for target in targets.include]
@ -208,7 +208,7 @@ class ImportTest(SanityMultipleVersion):
@cache
def get_ansible_test_python_path(): # type: () -> str
def get_ansible_test_python_path() -> str:
"""
Return a directory usable for PYTHONPATH, containing only the ansible-test collection loader.
The temporary directory created will be cached for the lifetime of the process and cleaned up at exit.

@ -108,16 +108,16 @@ class IntegrationAliasesTest(SanitySingleVersion):
self._ci_test_groups = {} # type: t.Dict[str, t.List[int]]
@property
def can_ignore(self): # type: () -> bool
def can_ignore(self) -> bool:
"""True if the test supports ignore entries."""
return False
@property
def no_targets(self): # type: () -> bool
def no_targets(self) -> bool:
"""True if the test does not use test targets. Mutually exclusive with all_targets."""
return True
def load_ci_config(self, python): # type: (PythonConfig) -> t.Dict[str, t.Any]
def load_ci_config(self, python: PythonConfig) -> t.Dict[str, t.Any]:
"""Load and return the CI YAML configuration."""
if not self._ci_config:
self._ci_config = self.load_yaml(python, self.CI_YML)
@ -125,7 +125,7 @@ class IntegrationAliasesTest(SanitySingleVersion):
return self._ci_config
@property
def ci_test_groups(self): # type: () -> t.Dict[str, t.List[int]]
def ci_test_groups(self) -> t.Dict[str, t.List[int]]:
"""Return a dictionary of CI test names and their group(s)."""
if not self._ci_test_groups:
test_groups = {} # type: t.Dict[str, t.Set[int]]
@ -171,7 +171,7 @@ class IntegrationAliasesTest(SanitySingleVersion):
return self._ci_test_groups
def format_test_group_alias(self, name, fallback=''): # type: (str, str) -> str
def format_test_group_alias(self, name: str, fallback: str = '') -> str:
"""Return a test group alias using the given name and fallback."""
group_numbers = self.ci_test_groups.get(name, None)
@ -195,12 +195,12 @@ class IntegrationAliasesTest(SanitySingleVersion):
return alias
def load_yaml(self, python, path): # type: (PythonConfig, str) -> t.Dict[str, t.Any]
def load_yaml(self, python: PythonConfig, path: str) -> t.Dict[str, t.Any]:
"""Load the specified YAML file and return the contents."""
yaml_to_json_path = os.path.join(SANITY_ROOT, self.name, 'yaml_to_json.py')
return json.loads(raw_command([python.path, yaml_to_json_path], data=read_text_file(path), capture=True)[0])
def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
def test(self, args: SanityConfig, targets: SanityTargets, python: PythonConfig) -> TestResult:
if args.explain:
return SanitySuccess(self.name)
@ -230,7 +230,7 @@ class IntegrationAliasesTest(SanitySingleVersion):
return SanitySuccess(self.name)
def check_posix_targets(self, args): # type: (SanityConfig) -> t.List[SanityMessage]
def check_posix_targets(self, args: SanityConfig) -> t.List[SanityMessage]:
"""Check POSIX integration test targets and return messages with any issues found."""
posix_targets = tuple(walk_posix_integration_targets())
@ -325,7 +325,7 @@ class IntegrationAliasesTest(SanitySingleVersion):
return messages
def check_changes(self, args, results): # type: (SanityConfig, Results) -> None
def check_changes(self, args: SanityConfig, results: Results) -> None:
"""Check changes and store results in the provided result dictionary."""
integration_targets = list(walk_integration_targets())
module_targets = list(walk_module_targets())
@ -373,7 +373,7 @@ class IntegrationAliasesTest(SanitySingleVersion):
results.comments += comments
results.labels.update(labels)
def format_comment(self, template, targets): # type: (str, t.List[str]) -> t.Optional[str]
def format_comment(self, template: str, targets: t.List[str]) -> t.Optional[str]:
"""Format and return a comment based on the given template and targets, or None if there are no targets."""
if not targets:
return None

@ -67,23 +67,23 @@ class MypyTest(SanityMultipleVersion):
'lib/ansible/module_utils/compat/_selectors2.py',
)
def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]:
"""Return the given list of test targets, filtered to include only those relevant for the test."""
return [target for target in targets if os.path.splitext(target.path)[1] == '.py' and target.path not in self.vendored_paths and (
target.path.startswith('lib/ansible/') or target.path.startswith('test/lib/ansible_test/_internal/')
or target.path.startswith('test/lib/ansible_test/_util/target/sanity/import/'))]
@property
def error_code(self): # type: () -> t.Optional[str]
def error_code(self) -> t.Optional[str]:
"""Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes."""
return 'ansible-test'
@property
def needs_pypi(self): # type: () -> bool
def needs_pypi(self) -> bool:
"""True if the test requires PyPI, otherwise False."""
return True
def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
def test(self, args: SanityConfig, targets: SanityTargets, python: PythonConfig) -> TestResult:
if sys.version_info >= (3, 11):
display.warning(f'Skipping sanity test "{self.name}" which can test Python {args.controller_python.version}, but cannot run under that version.')
return SanitySkipped(self.name, python.version)

@ -44,15 +44,15 @@ from ...host_configs import (
class Pep8Test(SanitySingleVersion):
"""Sanity test for PEP 8 style guidelines using pycodestyle."""
@property
def error_code(self): # type: () -> t.Optional[str]
def error_code(self) -> t.Optional[str]:
"""Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes."""
return 'A100'
def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]:
"""Return the given list of test targets, filtered to include only those relevant for the test."""
return [target for target in targets if os.path.splitext(target.path)[1] == '.py' or is_subdir(target.path, 'bin')]
def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
def test(self, args: SanityConfig, targets: SanityTargets, python: PythonConfig) -> TestResult:
current_ignore_file = os.path.join(SANITY_ROOT, 'pep8', 'current-ignore.txt')
current_ignore = sorted(read_lines_without_comments(current_ignore_file, remove_blank_lines=True))

@ -46,15 +46,15 @@ from ...data import (
class PslintTest(SanityVersionNeutral):
"""Sanity test using PSScriptAnalyzer."""
@property
def error_code(self): # type: () -> t.Optional[str]
def error_code(self) -> t.Optional[str]:
"""Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes."""
return 'AnsibleTest'
def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]:
"""Return the given list of test targets, filtered to include only those relevant for the test."""
return [target for target in targets if os.path.splitext(target.path)[1] in ('.ps1', '.psm1', '.psd1')]
def test(self, args, targets): # type: (SanityConfig, SanityTargets) -> TestResult
def test(self, args: SanityConfig, targets: SanityTargets) -> TestResult:
settings = self.load_processor(args)
paths = [target.path for target in targets.include]

@ -70,20 +70,20 @@ class PylintTest(SanitySingleVersion):
])
@property
def supported_python_versions(self): # type: () -> t.Optional[t.Tuple[str, ...]]
def supported_python_versions(self) -> t.Optional[t.Tuple[str, ...]]:
"""A tuple of supported Python versions or None if the test does not depend on specific Python versions."""
return tuple(version for version in CONTROLLER_PYTHON_VERSIONS if str_to_version(version) < (3, 11))
@property
def error_code(self): # type: () -> t.Optional[str]
def error_code(self) -> t.Optional[str]:
"""Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes."""
return 'ansible-test'
def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]:
"""Return the given list of test targets, filtered to include only those relevant for the test."""
return [target for target in targets if os.path.splitext(target.path)[1] == '.py' or is_subdir(target.path, 'bin')]
def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
def test(self, args: SanityConfig, targets: SanityTargets, python: PythonConfig) -> TestResult:
plugin_dir = os.path.join(SANITY_ROOT, 'pylint', 'plugins')
plugin_names = sorted(p[0] for p in [
os.path.splitext(p) for p in os.listdir(plugin_dir)] if p[1] == '.py' and p[0] != '__init__')
@ -113,9 +113,9 @@ class PylintTest(SanitySingleVersion):
contexts.append((context_name, sorted(filtered_paths)))
available_paths -= filtered_paths
def filter_path(path_filter=None): # type: (str) -> t.Callable[[str], bool]
def filter_path(path_filter: str = None) -> t.Callable[[str], bool]:
"""Return a function that filters out paths which are not a subdirectory of the given path."""
def context_filter(path_to_filter): # type: (str) -> bool
def context_filter(path_to_filter: str) -> bool:
"""Return true if the given path matches, otherwise return False."""
return is_subdir(path_to_filter, path_filter)

@ -30,16 +30,16 @@ class SanityDocsTest(SanityVersionNeutral):
ansible_only = True
@property
def can_ignore(self): # type: () -> bool
def can_ignore(self) -> bool:
"""True if the test supports ignore entries."""
return False
@property
def no_targets(self): # type: () -> bool
def no_targets(self) -> bool:
"""True if the test does not use test targets. Mutually exclusive with all_targets."""
return True
def test(self, args, targets): # type: (SanityConfig, SanityTargets) -> TestResult
def test(self, args: SanityConfig, targets: SanityTargets) -> TestResult:
sanity_dir = 'docs/docsite/rst/dev_guide/testing/sanity'
sanity_docs = set(part[0] for part in (os.path.splitext(os.path.basename(path)) for path in data_context().content.get_files(sanity_dir))
if part[1] == '.rst')

@ -45,15 +45,15 @@ from ...config import (
class ShellcheckTest(SanityVersionNeutral):
"""Sanity test using shellcheck."""
@property
def error_code(self): # type: () -> t.Optional[str]
def error_code(self) -> t.Optional[str]:
"""Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes."""
return 'AT1000'
def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]:
"""Return the given list of test targets, filtered to include only those relevant for the test."""
return [target for target in targets if os.path.splitext(target.path)[1] == '.sh']
def test(self, args, targets): # type: (SanityConfig, SanityTargets) -> TestResult
def test(self, args: SanityConfig, targets: SanityTargets) -> TestResult:
exclude_file = os.path.join(SANITY_ROOT, 'shellcheck', 'exclude.txt')
exclude = set(read_lines_without_comments(exclude_file, remove_blank_lines=True, optional=True))

@ -78,11 +78,11 @@ class ValidateModulesTest(SanitySingleVersion):
self._exclusions.add('lib/ansible/plugins/cache/base.py')
@property
def error_code(self): # type: () -> t.Optional[str]
def error_code(self) -> t.Optional[str]:
"""Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes."""
return 'A100'
def get_plugin_type(self, target): # type: (TestTarget) -> t.Optional[str]
def get_plugin_type(self, target: TestTarget) -> t.Optional[str]:
"""Return the plugin type of the given target, or None if it is not a plugin or module."""
if target.path.endswith('/__init__.py'):
return None
@ -96,11 +96,11 @@ class ValidateModulesTest(SanitySingleVersion):
return None
def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]:
"""Return the given list of test targets, filtered to include only those relevant for the test."""
return [target for target in targets if self.get_plugin_type(target) is not None]
def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
def test(self, args: SanityConfig, targets: SanityTargets, python: PythonConfig) -> TestResult:
env = ansible_environment(args, color=False)
settings = self.load_processor(args)

@ -48,16 +48,16 @@ from ...host_configs import (
class YamllintTest(SanitySingleVersion):
"""Sanity test using yamllint."""
@property
def error_code(self): # type: () -> t.Optional[str]
def error_code(self) -> t.Optional[str]:
"""Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes."""
return 'ansible-test'
@property
def require_libyaml(self): # type: () -> bool
def require_libyaml(self) -> bool:
"""True if the test requires PyYAML to have libyaml support."""
return True
def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]:
"""Return the given list of test targets, filtered to include only those relevant for the test."""
yaml_targets = [target for target in targets if os.path.splitext(target.path)[1] in ('.yml', '.yaml')]
@ -72,7 +72,7 @@ class YamllintTest(SanitySingleVersion):
return yaml_targets
def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
def test(self, args: SanityConfig, targets: SanityTargets, python: PythonConfig) -> TestResult:
settings = self.load_processor(args)
paths = [target.path for target in targets.include]
@ -86,7 +86,7 @@ class YamllintTest(SanitySingleVersion):
return SanitySuccess(self.name)
@staticmethod
def test_paths(args, paths, python): # type: (SanityConfig, t.List[str], PythonConfig) -> t.List[SanityMessage]
def test_paths(args: SanityConfig, paths: t.List[str], python: PythonConfig) -> t.List[SanityMessage]:
"""Test the specified paths using the given Python and return the results."""
cmd = [
python.path,

@ -46,7 +46,7 @@ from ...inventory import (
)
def command_shell(args): # type: (ShellConfig) -> None
def command_shell(args: ShellConfig) -> None:
"""Entry point for the `shell` command."""
if args.raw and isinstance(args.targets[0], ControllerConfig):
raise ApplicationError('The --raw option has no effect on the controller.')

@ -93,7 +93,7 @@ class TestContext:
module_utils = 'module_utils'
def command_units(args): # type: (UnitsConfig) -> None
def command_units(args: UnitsConfig) -> None:
"""Run unit tests."""
handle_layout_messages(data_context().content.unit_messages)
@ -297,7 +297,7 @@ def command_units(args): # type: (UnitsConfig) -> None
raise
def get_units_ansible_python_path(args, test_context): # type: (UnitsConfig, str) -> str
def get_units_ansible_python_path(args: UnitsConfig, test_context: str) -> str:
"""
Return a directory usable for PYTHONPATH, containing only the modules and module_utils portion of the ansible package.
The temporary directory created will be cached for the lifetime of the process and cleaned up at exit.

@ -42,21 +42,21 @@ class PosixCompletionConfig(CompletionConfig, metaclass=abc.ABCMeta):
"""Base class for completion configuration of POSIX environments."""
@property
@abc.abstractmethod
def supported_pythons(self): # type: () -> t.List[str]
def supported_pythons(self) -> t.List[str]:
"""Return a list of the supported Python versions."""
@abc.abstractmethod
def get_python_path(self, version): # type: (str) -> str
def get_python_path(self, version: str) -> str:
"""Return the path of the requested Python version."""
def get_default_python(self, controller): # type: (bool) -> str
def get_default_python(self, controller: bool) -> str:
"""Return the default Python version for a controller or target as specified."""
context_pythons = CONTROLLER_PYTHON_VERSIONS if controller else SUPPORTED_PYTHON_VERSIONS
version = [python for python in self.supported_pythons if python in context_pythons][0]
return version
@property
def controller_supported(self): # type: () -> bool
def controller_supported(self) -> bool:
"""True if at least one Python version is provided which supports the controller, otherwise False."""
return any(version in CONTROLLER_PYTHON_VERSIONS for version in self.supported_pythons)
@ -68,13 +68,13 @@ class PythonCompletionConfig(PosixCompletionConfig, metaclass=abc.ABCMeta):
python_dir: str = '/usr/bin'
@property
def supported_pythons(self): # type: () -> t.List[str]
def supported_pythons(self) -> t.List[str]:
"""Return a list of the supported Python versions."""
versions = self.python.split(',') if self.python else []
versions = [version for version in versions if version in SUPPORTED_PYTHON_VERSIONS]
return versions
def get_python_path(self, version): # type: (str) -> str
def get_python_path(self, version: str) -> str:
"""Return the path of the requested Python version."""
return os.path.join(self.python_dir, f'python{version}')
@ -111,11 +111,11 @@ class RemoteCompletionConfig(CompletionConfig):
@dataclasses.dataclass(frozen=True)
class InventoryCompletionConfig(CompletionConfig):
"""Configuration for inventory files."""
def __init__(self): # type: () -> None
def __init__(self) -> None:
super().__init__(name='inventory')
@property
def is_default(self): # type: () -> bool
def is_default(self) -> bool:
"""True if the completion entry is only used for defaults, otherwise False."""
return False
@ -123,14 +123,14 @@ class InventoryCompletionConfig(CompletionConfig):
@dataclasses.dataclass(frozen=True)
class PosixSshCompletionConfig(PythonCompletionConfig):
"""Configuration for a POSIX host reachable over SSH."""
def __init__(self, user, host): # type: (str, str) -> None
def __init__(self, user: str, host: str) -> None:
super().__init__(
name=f'{user}@{host}',
python=','.join(SUPPORTED_PYTHON_VERSIONS),
)
@property
def is_default(self): # type: () -> bool
def is_default(self) -> bool:
"""True if the completion entry is only used for defaults, otherwise False."""
return False
@ -196,7 +196,7 @@ class WindowsRemoteCompletionConfig(RemoteCompletionConfig):
TCompletionConfig = t.TypeVar('TCompletionConfig', bound=CompletionConfig)
def load_completion(name, completion_type): # type: (str, t.Type[TCompletionConfig]) -> t.Dict[str, TCompletionConfig]
def load_completion(name: str, completion_type: t.Type[TCompletionConfig]) -> t.Dict[str, TCompletionConfig]:
"""Load the named completion entries, returning them in dictionary form using the specified completion type."""
lines = read_lines_without_comments(os.path.join(ANSIBLE_TEST_DATA_ROOT, 'completion', '%s.txt' % name), remove_blank_lines=True)
@ -216,7 +216,7 @@ def load_completion(name, completion_type): # type: (str, t.Type[TCompletionCon
return completion
def parse_completion_entry(value): # type: (str) -> t.Tuple[str, t.Dict[str, str]]
def parse_completion_entry(value: str) -> t.Tuple[str, t.Dict[str, str]]:
"""Parse the given completion entry, returning the entry name and a dictionary of key/value settings."""
values = value.split()
@ -242,24 +242,24 @@ def filter_completion(
@cache
def docker_completion(): # type: () -> t.Dict[str, DockerCompletionConfig]
def docker_completion() -> t.Dict[str, DockerCompletionConfig]:
"""Return docker completion entries."""
return load_completion('docker', DockerCompletionConfig)
@cache
def remote_completion(): # type: () -> t.Dict[str, PosixRemoteCompletionConfig]
def remote_completion() -> t.Dict[str, PosixRemoteCompletionConfig]:
"""Return remote completion entries."""
return load_completion('remote', PosixRemoteCompletionConfig)
@cache
def windows_completion(): # type: () -> t.Dict[str, WindowsRemoteCompletionConfig]
def windows_completion() -> t.Dict[str, WindowsRemoteCompletionConfig]:
"""Return windows completion entries."""
return load_completion('windows', WindowsRemoteCompletionConfig)
@cache
def network_completion(): # type: () -> t.Dict[str, NetworkRemoteCompletionConfig]
def network_completion() -> t.Dict[str, NetworkRemoteCompletionConfig]:
"""Return network completion entries."""
return load_completion('network', NetworkRemoteCompletionConfig)

@ -67,7 +67,7 @@ class ContentConfig:
class EnvironmentConfig(CommonConfig):
"""Configuration common to all commands which execute in an environment."""
def __init__(self, args, command): # type: (t.Any, str) -> None
def __init__(self, args: t.Any, command: str) -> None:
super().__init__(args, command)
self.host_settings = args.host_settings # type: HostSettings
@ -133,16 +133,16 @@ class EnvironmentConfig(CommonConfig):
display.warning('The --no-pip-check option is deprecated and has no effect. It will be removed in a future version of ansible-test.')
@property
def controller(self): # type: () -> ControllerHostConfig
def controller(self) -> ControllerHostConfig:
"""Host configuration for the controller."""
return self.host_settings.controller
@property
def targets(self): # type: () -> t.List[HostConfig]
def targets(self) -> t.List[HostConfig]:
"""Host configuration for the targets."""
return self.host_settings.targets
def only_target(self, target_type): # type: (t.Type[THostConfig]) -> THostConfig
def only_target(self, target_type: t.Type[THostConfig]) -> THostConfig:
"""
Return the host configuration for the target.
Requires that there is exactly one target of the specified type.
@ -159,7 +159,7 @@ class EnvironmentConfig(CommonConfig):
return target
def only_targets(self, target_type): # type: (t.Type[THostConfig]) -> t.List[THostConfig]
def only_targets(self, target_type: t.Type[THostConfig]) -> t.List[THostConfig]:
"""
Return a list of target host configurations.
Requires that there are one or more targets, all the specified type.
@ -172,7 +172,7 @@ class EnvironmentConfig(CommonConfig):
return t.cast(t.List[THostConfig], self.targets)
@property
def target_type(self): # type: () -> t.Type[HostConfig]
def target_type(self) -> t.Type[HostConfig]:
"""
The true type of the target(s).
If the target is the controller, the controller type is returned.
@ -193,7 +193,7 @@ class EnvironmentConfig(CommonConfig):
class TestConfig(EnvironmentConfig):
"""Configuration common to all test commands."""
def __init__(self, args, command): # type: (t.Any, str) -> None
def __init__(self, args: t.Any, command: str) -> None:
super().__init__(args, command)
self.coverage = args.coverage # type: bool
@ -234,7 +234,7 @@ class TestConfig(EnvironmentConfig):
class ShellConfig(EnvironmentConfig):
"""Configuration for the shell command."""
def __init__(self, args): # type: (t.Any) -> None
def __init__(self, args: t.Any) -> None:
super().__init__(args, 'shell')
self.cmd = args.cmd # type: t.List[str]
@ -247,7 +247,7 @@ class ShellConfig(EnvironmentConfig):
class SanityConfig(TestConfig):
"""Configuration for the sanity command."""
def __init__(self, args): # type: (t.Any) -> None
def __init__(self, args: t.Any) -> None:
super().__init__(args, 'sanity')
self.test = args.test # type: t.List[str]
@ -272,7 +272,7 @@ class SanityConfig(TestConfig):
class IntegrationConfig(TestConfig):
"""Configuration for the integration command."""
def __init__(self, args, command): # type: (t.Any, str) -> None
def __init__(self, args: t.Any, command: str) -> None:
super().__init__(args, command)
self.start_at = args.start_at # type: str
@ -299,7 +299,7 @@ class IntegrationConfig(TestConfig):
self.explain = True
self.display_stderr = True
def get_ansible_config(self): # type: () -> str
def get_ansible_config(self) -> str:
"""Return the path to the Ansible config for the given config."""
ansible_config_relative_path = os.path.join(data_context().content.integration_path, '%s.cfg' % self.command)
ansible_config_path = os.path.join(data_context().content.root, ansible_config_relative_path)
@ -316,19 +316,19 @@ TIntegrationConfig = t.TypeVar('TIntegrationConfig', bound=IntegrationConfig)
class PosixIntegrationConfig(IntegrationConfig):
"""Configuration for the posix integration command."""
def __init__(self, args): # type: (t.Any) -> None
def __init__(self, args: t.Any) -> None:
super().__init__(args, 'integration')
class WindowsIntegrationConfig(IntegrationConfig):
"""Configuration for the windows integration command."""
def __init__(self, args): # type: (t.Any) -> None
def __init__(self, args: t.Any) -> None:
super().__init__(args, 'windows-integration')
class NetworkIntegrationConfig(IntegrationConfig):
"""Configuration for the network integration command."""
def __init__(self, args): # type: (t.Any) -> None
def __init__(self, args: t.Any) -> None:
super().__init__(args, 'network-integration')
self.testcase = args.testcase # type: str
@ -336,7 +336,7 @@ class NetworkIntegrationConfig(IntegrationConfig):
class UnitsConfig(TestConfig):
"""Configuration for the units command."""
def __init__(self, args): # type: (t.Any) -> None
def __init__(self, args: t.Any) -> None:
super().__init__(args, 'units')
self.collect_only = args.collect_only # type: bool

@ -89,7 +89,7 @@ class Connection(metaclass=abc.ABCMeta):
class LocalConnection(Connection):
"""Connect to localhost."""
def __init__(self, args): # type: (EnvironmentConfig) -> None
def __init__(self, args: EnvironmentConfig) -> None:
self.args = args
def run(self,
@ -116,7 +116,7 @@ class LocalConnection(Connection):
class SshConnection(Connection):
"""Connect to a host using SSH."""
def __init__(self, args, settings, become=None): # type: (EnvironmentConfig, SshConnectionDetail, t.Optional[Become]) -> None
def __init__(self, args: EnvironmentConfig, settings: SshConnectionDetail, become: t.Optional[Become] = None) -> None:
self.args = args
self.settings = settings
self.become = become
@ -163,7 +163,7 @@ class SshConnection(Connection):
options.append(f'{self.settings.user}@{self.settings.host}')
options.append(shlex.join(command))
def error_callback(ex): # type: (SubprocessError) -> None
def error_callback(ex: SubprocessError) -> None:
"""Error handler."""
self.capture_log_details(ssh_logfile.name, ex)
@ -180,7 +180,7 @@ class SshConnection(Connection):
)
@staticmethod
def capture_log_details(path, ex): # type: (str, SubprocessError) -> None
def capture_log_details(path: str, ex: SubprocessError) -> None:
"""Read the specified SSH debug log and add relevant details to the provided exception."""
if ex.status != 255:
return
@ -211,7 +211,7 @@ class SshConnection(Connection):
class DockerConnection(Connection):
"""Connect to a host using Docker."""
def __init__(self, args, container_id, user=None): # type: (EnvironmentConfig, str, t.Optional[str]) -> None
def __init__(self, args: EnvironmentConfig, container_id: str, user: t.Optional[str] = None) -> None:
self.args = args
self.container_id = container_id
self.user = user # type: t.Optional[str]
@ -247,10 +247,10 @@ class DockerConnection(Connection):
output_stream=output_stream,
)
def inspect(self): # type: () -> DockerInspect
def inspect(self) -> DockerInspect:
"""Inspect the container and return a DockerInspect instance with the results."""
return docker_inspect(self.args, self.container_id)
def disconnect_network(self, network): # type: (str) -> None
def disconnect_network(self, network: str) -> None:
"""Disconnect the container from the specified network."""
docker_network_disconnect(self.args, self.container_id, network)

@ -220,7 +220,7 @@ def run_support_container(
return descriptor
def get_container_database(args): # type: (EnvironmentConfig) -> ContainerDatabase
def get_container_database(args: EnvironmentConfig) -> ContainerDatabase:
"""Return the current container database, creating it as needed, or returning the one provided on the command line through delegation."""
try:
return get_container_database.database # type: ignore[attr-defined]
@ -260,7 +260,7 @@ class ContainerAccess:
# port redirections to create through host_ip -- if not set, no port redirections will be used
self.forwards = forwards
def port_map(self): # type: () -> t.List[t.Tuple[int, int]]
def port_map(self) -> t.List[t.Tuple[int, int]]:
"""Return a port map for accessing this container."""
if self.forwards:
ports = list(self.forwards.items())
@ -284,7 +284,7 @@ class ContainerAccess:
forwards=forwards,
)
def to_dict(self): # type: () -> t.Dict[str, t.Any]
def to_dict(self) -> t.Dict[str, t.Any]:
"""Return a dict of the current instance."""
value: t.Dict[str, t.Any] = dict(
host_ip=self.host_ip,
@ -315,7 +315,7 @@ class ContainerDatabase:
for context_name, containers in contexts.items()))
for access_name, contexts in data.items()))
def to_dict(self): # type: () -> t.Dict[str, t.Any]
def to_dict(self) -> t.Dict[str, t.Any]:
"""Return a dict of the current instance."""
return dict((access_name,
dict((context_name,
@ -325,12 +325,12 @@ class ContainerDatabase:
for access_name, contexts in self.data.items())
def local_ssh(args, python): # type: (EnvironmentConfig, PythonConfig) -> SshConnectionDetail
def local_ssh(args: EnvironmentConfig, python: PythonConfig) -> SshConnectionDetail:
"""Return SSH connection details for localhost, connecting as root to the default SSH port."""
return SshConnectionDetail('localhost', 'localhost', None, 'root', SshKey(args).key, python.path)
def root_ssh(ssh): # type: (SshConnection) -> SshConnectionDetail
def root_ssh(ssh: SshConnection) -> SshConnectionDetail:
"""Return the SSH connection details from the given SSH connection. If become was specified, the user will be changed to `root`."""
settings = ssh.settings.__dict__.copy()
@ -342,7 +342,7 @@ def root_ssh(ssh): # type: (SshConnection) -> SshConnectionDetail
return SshConnectionDetail(**settings)
def create_container_database(args): # type: (EnvironmentConfig) -> ContainerDatabase
def create_container_database(args: EnvironmentConfig) -> ContainerDatabase:
"""Create and return a container database with information necessary for all test hosts to make use of relevant support containers."""
origin = {} # type: t.Dict[str, t.Dict[str, ContainerAccess]]
control = {} # type: t.Dict[str, t.Dict[str, ContainerAccess]]
@ -441,11 +441,11 @@ def create_container_database(args): # type: (EnvironmentConfig) -> ContainerDa
class SupportContainerContext:
"""Context object for tracking information relating to access of support containers."""
def __init__(self, containers, process): # type: (ContainerDatabase, t.Optional[SshProcess]) -> None
def __init__(self, containers: ContainerDatabase, process: t.Optional[SshProcess]) -> None:
self.containers = containers
self.process = process
def close(self): # type: () -> None
def close(self) -> None:
"""Close the process maintaining the port forwards."""
if not self.process:
return # forwarding not in use
@ -569,13 +569,13 @@ class ContainerDescriptor:
self.env = env
self.details = None # type: t.Optional[SupportContainer]
def start(self, args): # type: (EnvironmentConfig) -> None
def start(self, args: EnvironmentConfig) -> None:
"""Start the container. Used for containers which are created, but not started."""
docker_start(args, self.name)
self.register(args)
def register(self, args): # type: (EnvironmentConfig) -> SupportContainer
def register(self, args: EnvironmentConfig) -> SupportContainer:
"""Record the container's runtime details. Must be used after the container has been started."""
if self.details:
raise Exception('Container already registered: %s' % self.name)
@ -657,7 +657,7 @@ def wait_for_file(args, # type: EnvironmentConfig
raise ApplicationError('Timeout waiting for container "%s" to provide file: %s' % (container_name, path))
def cleanup_containers(args): # type: (EnvironmentConfig) -> None
def cleanup_containers(args: EnvironmentConfig) -> None:
"""Clean up containers."""
for container in support_containers.values():
if container.cleanup == CleanupMode.YES:

@ -85,7 +85,7 @@ def parse_content_config(data: t.Any) -> ContentConfig:
)
def load_config(path): # type: (str) -> t.Optional[ContentConfig]
def load_config(path: str) -> t.Optional[ContentConfig]:
"""Load and parse the specified config file and return the result or None if loading/parsing failed."""
if YAML_IMPORT_ERROR:
raise ApplicationError('The "PyYAML" module is required to parse config: %s' % YAML_IMPORT_ERROR)
@ -112,7 +112,7 @@ def load_config(path): # type: (str) -> t.Optional[ContentConfig]
return config
def get_content_config(args): # type: (EnvironmentConfig) -> ContentConfig
def get_content_config(args: EnvironmentConfig) -> ContentConfig:
"""
Parse and return the content configuration (if any) for the current collection.
For ansible-core, a default configuration is used.
@ -149,7 +149,7 @@ def get_content_config(args): # type: (EnvironmentConfig) -> ContentConfig
return config
def parse_python_requires(value): # type: (t.Any) -> tuple[str, ...]
def parse_python_requires(value: t.Any) -> tuple[str, ...]:
"""Parse the given 'python_requires' version specifier and return the matching Python versions."""
if not isinstance(value, str):
raise ValueError('python_requires must must be of type `str` not type `%s`' % type(value))

@ -206,7 +206,7 @@ class AnsibleCoreCI:
raise self._create_http_error(response)
def get(self, tries=3, sleep=15, always_raise_on=None): # type: (int, int, t.Optional[t.List[int]]) -> t.Optional[InstanceConnection]
def get(self, tries: int = 3, sleep: int = 15, always_raise_on: t.Optional[t.List[int]] = None) -> t.Optional[InstanceConnection]:
"""Get instance connection information."""
if not self.started:
display.info(f'Skipping invalid {self.label} instance.', verbosity=1)
@ -270,7 +270,7 @@ class AnsibleCoreCI:
return self.connection
def wait(self, iterations=90): # type: (t.Optional[int]) -> None
def wait(self, iterations: t.Optional[int] = 90) -> None:
"""Wait for the instance to become ready."""
for _iteration in range(1, iterations):
if self.get().running:
@ -378,7 +378,7 @@ class AnsibleCoreCI:
return True
def _save(self): # type: () -> None
def _save(self) -> None:
"""Save instance information."""
if self.args.explain:
return
@ -387,7 +387,7 @@ class AnsibleCoreCI:
write_json_file(self.path, config, create_directories=True)
def save(self): # type: () -> t.Dict[str, str]
def save(self) -> t.Dict[str, str]:
"""Save instance details and return as a dictionary."""
return dict(
label=self.resource.get_label(),
@ -396,7 +396,7 @@ class AnsibleCoreCI:
)
@staticmethod
def _create_http_error(response): # type: (HttpResponse) -> ApplicationError
def _create_http_error(response: HttpResponse) -> ApplicationError:
"""Return an exception created from the given HTTP response."""
response_json = response.json()
stack_trace = ''
@ -423,7 +423,7 @@ class AnsibleCoreCI:
class CoreHttpError(HttpError):
"""HTTP response as an error."""
def __init__(self, status, remote_message, remote_stack_trace): # type: (int, str, str) -> None
def __init__(self, status: int, remote_message: str, remote_stack_trace: str) -> None:
super().__init__(status, f'{remote_message}{remote_stack_trace}')
self.remote_message = remote_message
@ -437,7 +437,7 @@ class SshKey:
PUB_NAME = f'{KEY_NAME}.pub'
@mutex
def __init__(self, args): # type: (EnvironmentConfig) -> None
def __init__(self, args: EnvironmentConfig) -> None:
key_pair = self.get_key_pair()
if not key_pair:
@ -466,7 +466,7 @@ class SshKey:
self.key_contents = read_text_file(self.key).strip()
@staticmethod
def get_relative_in_tree_private_key_path(): # type: () -> str
def get_relative_in_tree_private_key_path() -> str:
"""Return the ansible-test SSH private key path relative to the content tree."""
temp_dir = ResultType.TMP.relative_path
@ -474,7 +474,7 @@ class SshKey:
return key
def get_in_tree_key_pair_paths(self): # type: () -> t.Optional[t.Tuple[str, str]]
def get_in_tree_key_pair_paths(self) -> t.Optional[t.Tuple[str, str]]:
"""Return the ansible-test SSH key pair paths from the content tree."""
temp_dir = ResultType.TMP.path
@ -483,7 +483,7 @@ class SshKey:
return key, pub
def get_source_key_pair_paths(self): # type: () -> t.Optional[t.Tuple[str, str]]
def get_source_key_pair_paths(self) -> t.Optional[t.Tuple[str, str]]:
"""Return the ansible-test SSH key pair paths for the current user."""
base_dir = os.path.expanduser('~/.ansible/test/')
@ -492,7 +492,7 @@ class SshKey:
return key, pub
def get_key_pair(self): # type: () -> t.Optional[t.Tuple[str, str]]
def get_key_pair(self) -> t.Optional[t.Tuple[str, str]]:
"""Return the ansible-test SSH key pair paths if present, otherwise return None."""
key, pub = self.get_in_tree_key_pair_paths()
@ -506,7 +506,7 @@ class SshKey:
return None
def generate_key_pair(self, args): # type: (EnvironmentConfig) -> t.Tuple[str, str]
def generate_key_pair(self, args: EnvironmentConfig) -> t.Tuple[str, str]:
"""Generate an SSH key pair for use by all ansible-test invocations for the current user."""
key, pub = self.get_source_key_pair_paths()

@ -155,7 +155,7 @@ def cover_python(
return intercept_python(args, python, cmd, env, capture, data, cwd)
def get_coverage_platform(config): # type: (HostConfig) -> str
def get_coverage_platform(config: HostConfig) -> str:
"""Return the platform label for the given host config."""
if isinstance(config, PosixRemoteConfig):
platform = f'remote-{sanitize_host_name(config.name)}'
@ -203,7 +203,7 @@ def get_coverage_environment(
return env
def get_coverage_config(args): # type: (TestConfig) -> str
def get_coverage_config(args: TestConfig) -> str:
"""Return the path to the coverage config, creating the config if it does not already exist."""
try:
return get_coverage_config.path # type: ignore[attr-defined]
@ -228,7 +228,7 @@ def get_coverage_config(args): # type: (TestConfig) -> str
return path
def generate_coverage_config(args): # type: (TestConfig) -> str
def generate_coverage_config(args: TestConfig) -> str:
"""Generate code coverage configuration for tests."""
if data_context().content.collection:
coverage_config = generate_collection_coverage_config(args)
@ -238,7 +238,7 @@ def generate_coverage_config(args): # type: (TestConfig) -> str
return coverage_config
def generate_ansible_coverage_config(): # type: () -> str
def generate_ansible_coverage_config() -> str:
"""Generate code coverage configuration for Ansible tests."""
coverage_config = '''
[run]
@ -259,7 +259,7 @@ omit =
return coverage_config
def generate_collection_coverage_config(args): # type: (TestConfig) -> str
def generate_collection_coverage_config(args: TestConfig) -> str:
"""Generate code coverage configuration for Ansible Collection tests."""
coverage_config = '''
[run]

@ -73,7 +73,7 @@ class DataContext:
self.content = content # type: ContentLayout
def create_collection_layouts(self): # type: () -> t.List[ContentLayout]
def create_collection_layouts(self) -> t.List[ContentLayout]:
"""
Return a list of collection layouts, one for each collection in the same collection root as the current collection layout.
An empty list is returned if the current content layout is not a collection layout.
@ -165,7 +165,7 @@ class DataContext:
return tuple((os.path.join(source_provider.root, path), path) for path in source_provider.get_paths(source_provider.root))
@property
def ansible_source(self): # type: () -> t.Tuple[t.Tuple[str, str], ...]
def ansible_source(self) -> t.Tuple[t.Tuple[str, str], ...]:
"""Return a tuple of Ansible source files with both absolute and relative paths."""
if not self.__ansible_source:
self.__ansible_source = self.__create_ansible_source()
@ -220,7 +220,7 @@ class DataContext:
@cache
def data_context(): # type: () -> DataContext
def data_context() -> DataContext:
"""Initialize provider plugins."""
provider_types = (
'layout',

@ -78,7 +78,7 @@ from .content_config import (
@contextlib.contextmanager
def delegation_context(args, host_state): # type: (EnvironmentConfig, HostState) -> t.Iterator[None]
def delegation_context(args: EnvironmentConfig, host_state: HostState) -> t.Iterator[None]:
"""Context manager for serialized host state during delegation."""
make_dirs(ResultType.TMP.path)
@ -99,7 +99,7 @@ def delegation_context(args, host_state): # type: (EnvironmentConfig, HostState
args.host_path = None
def delegate(args, host_state, exclude, require): # type: (CommonConfig, HostState, t.List[str], t.List[str]) -> None
def delegate(args: CommonConfig, host_state: HostState, exclude: t.List[str], require: t.List[str]) -> None:
"""Delegate execution of ansible-test to another environment."""
assert isinstance(args, EnvironmentConfig)
@ -121,7 +121,7 @@ def delegate(args, host_state, exclude, require): # type: (CommonConfig, HostSt
delegate_command(args, host_state, exclude, require)
def delegate_command(args, host_state, exclude, require): # type: (EnvironmentConfig, HostState, t.List[str], t.List[str]) -> None
def delegate_command(args: EnvironmentConfig, host_state: HostState, exclude: t.List[str], require: t.List[str]) -> None:
"""Delegate execution based on the provided host state."""
con = host_state.controller_profile.get_origin_controller_connection()
working_directory = host_state.controller_profile.get_working_directory()
@ -227,7 +227,7 @@ def insert_options(command, options):
return result
def download_results(args, con, content_root, success): # type: (EnvironmentConfig, Connection, str, bool) -> None
def download_results(args: EnvironmentConfig, con: Connection, content_root: str, success: bool) -> None:
"""Download results from a delegated controller."""
remote_results_root = os.path.join(content_root, data_context().content.results_path)
local_test_root = os.path.dirname(os.path.join(data_context().content.root, data_context().content.results_path))

@ -11,32 +11,32 @@ from .util import (
)
def parse_diff(lines): # type: (t.List[str]) -> t.List[FileDiff]
def parse_diff(lines: t.List[str]) -> t.List[FileDiff]:
"""Parse the given diff lines and return a list of FileDiff objects representing the changes of each file."""
return DiffParser(lines).files
class FileDiff:
"""Parsed diff for a single file."""
def __init__(self, old_path, new_path): # type: (str, str) -> None
def __init__(self, old_path: str, new_path: str) -> None:
self.old = DiffSide(old_path, new=False)
self.new = DiffSide(new_path, new=True)
self.headers = [] # type: t.List[str]
self.binary = False
def append_header(self, line): # type: (str) -> None
def append_header(self, line: str) -> None:
"""Append the given line to the list of headers for this file."""
self.headers.append(line)
@property
def is_complete(self): # type: () -> bool
def is_complete(self) -> bool:
"""True if the diff is complete, otherwise False."""
return self.old.is_complete and self.new.is_complete
class DiffSide:
"""Parsed diff for a single 'side' of a single file."""
def __init__(self, path, new): # type: (str, bool) -> None
def __init__(self, path: str, new: bool) -> None:
self.path = path
self.new = new
self.prefix = '+' if self.new else '-'
@ -51,13 +51,13 @@ class DiffSide:
self._lines_remaining = 0
self._range_start = 0
def set_start(self, line_start, line_count): # type: (int, int) -> None
def set_start(self, line_start: int, line_count: int) -> None:
"""Set the starting line and line count."""
self._next_line_number = line_start
self._lines_remaining = line_count
self._range_start = 0
def append(self, line): # type: (str) -> None
def append(self, line: str) -> None:
"""Append the given line."""
if self._lines_remaining <= 0:
raise Exception('Diff range overflow.')
@ -93,11 +93,11 @@ class DiffSide:
self._next_line_number += 1
@property
def is_complete(self): # type: () -> bool
def is_complete(self) -> bool:
"""True if the diff is complete, otherwise False."""
return self._lines_remaining == 0
def format_lines(self, context=True): # type: (bool) -> t.List[str]
def format_lines(self, context: bool = True) -> t.List[str]:
"""Format the diff and return a list of lines, optionally including context."""
if context:
lines = self.lines_and_context
@ -109,7 +109,7 @@ class DiffSide:
class DiffParser:
"""Parse diff lines."""
def __init__(self, lines): # type: (t.List[str]) -> None
def __init__(self, lines: t.List[str]) -> None:
self.lines = lines
self.files = [] # type: t.List[FileDiff]
@ -146,7 +146,7 @@ class DiffParser:
self.complete_file()
def process_start(self): # type: () -> None
def process_start(self) -> None:
"""Process a diff start line."""
self.complete_file()
@ -158,7 +158,7 @@ class DiffParser:
self.file = FileDiff(match.group('old_path'), match.group('new_path'))
self.action = self.process_continue
def process_range(self): # type: () -> None
def process_range(self) -> None:
"""Process a diff range line."""
match = re.search(r'^@@ -((?P<old_start>[0-9]+),)?(?P<old_count>[0-9]+) \+((?P<new_start>[0-9]+),)?(?P<new_count>[0-9]+) @@', self.line)
@ -169,7 +169,7 @@ class DiffParser:
self.file.new.set_start(int(match.group('new_start') or 1), int(match.group('new_count')))
self.action = self.process_content
def process_continue(self): # type: () -> None
def process_continue(self) -> None:
"""Process a diff start, range or header line."""
if self.line.startswith('diff '):
self.process_start()
@ -178,7 +178,7 @@ class DiffParser:
else:
self.process_header()
def process_header(self): # type: () -> None
def process_header(self) -> None:
"""Process a diff header line."""
if self.line.startswith('Binary files '):
self.file.binary = True
@ -189,7 +189,7 @@ class DiffParser:
else:
self.file.append_header(self.line)
def process_content(self): # type: () -> None
def process_content(self) -> None:
"""Process a diff content line."""
if self.line == r'\ No newline at end of file':
if self.previous_line.startswith(' '):
@ -218,7 +218,7 @@ class DiffParser:
else:
raise Exception('Unexpected diff content line.')
def complete_file(self): # type: () -> None
def complete_file(self) -> None:
"""Complete processing of the current file, if any."""
if not self.file:
return

@ -45,13 +45,13 @@ MAX_NUM_OPEN_FILES = 10240
class DockerCommand:
"""Details about the available docker command."""
def __init__(self, command, executable, version): # type: (str, str, str) -> None
def __init__(self, command: str, executable: str, version: str) -> None:
self.command = command
self.executable = executable
self.version = version
@staticmethod
def detect(): # type: () -> t.Optional[DockerCommand]
def detect() -> t.Optional[DockerCommand]:
"""Detect and return the available docker command, or None."""
if os.environ.get('ANSIBLE_TEST_PREFER_PODMAN'):
commands = list(reversed(DOCKER_COMMANDS))
@ -74,7 +74,7 @@ class DockerCommand:
return None
def require_docker(): # type: () -> DockerCommand
def require_docker() -> DockerCommand:
"""Return the docker command to invoke. Raises an exception if docker is not available."""
if command := get_docker_command():
return command
@ -83,18 +83,18 @@ def require_docker(): # type: () -> DockerCommand
@cache
def get_docker_command(): # type: () -> t.Optional[DockerCommand]
def get_docker_command() -> t.Optional[DockerCommand]:
"""Return the docker command to invoke, or None if docker is not available."""
return DockerCommand.detect()
def docker_available(): # type: () -> bool
def docker_available() -> bool:
"""Return True if docker is available, otherwise return False."""
return bool(get_docker_command())
@cache
def get_docker_host_ip(): # type: () -> str
def get_docker_host_ip() -> str:
"""Return the IP of the Docker host."""
docker_host_ip = socket.gethostbyname(get_docker_hostname())
@ -104,7 +104,7 @@ def get_docker_host_ip(): # type: () -> str
@cache
def get_docker_hostname(): # type: () -> str
def get_docker_hostname() -> str:
"""Return the hostname of the Docker service."""
docker_host = os.environ.get('DOCKER_HOST')
@ -123,7 +123,7 @@ def get_docker_hostname(): # type: () -> str
@cache
def get_podman_host_ip(): # type: () -> str
def get_podman_host_ip() -> str:
"""Return the IP of the Podman host."""
podman_host_ip = socket.gethostbyname(get_podman_hostname())
@ -133,7 +133,7 @@ def get_podman_host_ip(): # type: () -> str
@cache
def get_podman_default_hostname(): # type: () -> t.Optional[str]
def get_podman_default_hostname() -> t.Optional[str]:
"""
Return the default hostname of the Podman service.
@ -160,7 +160,7 @@ def get_podman_default_hostname(): # type: () -> t.Optional[str]
@cache
def _get_podman_remote(): # type: () -> t.Optional[str]
def _get_podman_remote() -> t.Optional[str]:
# URL value resolution precedence:
# - command line value
# - environment variable CONTAINER_HOST
@ -183,7 +183,7 @@ def _get_podman_remote(): # type: () -> t.Optional[str]
@cache
def get_podman_hostname(): # type: () -> str
def get_podman_hostname() -> str:
"""Return the hostname of the Podman service."""
hostname = _get_podman_remote()
@ -195,7 +195,7 @@ def get_podman_hostname(): # type: () -> str
@cache
def get_docker_container_id(): # type: () -> t.Optional[str]
def get_docker_container_id() -> t.Optional[str]:
"""Return the current container ID if running in a container, otherwise return None."""
path = '/proc/self/cpuset'
container_id = None
@ -219,7 +219,7 @@ def get_docker_container_id(): # type: () -> t.Optional[str]
return container_id
def get_docker_preferred_network_name(args): # type: (EnvironmentConfig) -> str
def get_docker_preferred_network_name(args: EnvironmentConfig) -> str:
"""
Return the preferred network name for use with Docker. The selection logic is:
- the network selected by the user with `--docker-network`
@ -249,12 +249,12 @@ def get_docker_preferred_network_name(args): # type: (EnvironmentConfig) -> str
return network
def is_docker_user_defined_network(network): # type: (str) -> bool
def is_docker_user_defined_network(network: str) -> bool:
"""Return True if the network being used is a user-defined network."""
return bool(network) and network != 'bridge'
def docker_pull(args, image): # type: (EnvironmentConfig, str) -> None
def docker_pull(args: EnvironmentConfig, image: str) -> None:
"""
Pull the specified image if it is not available.
Images without a tag or digest will not be pulled.
@ -279,7 +279,7 @@ def docker_pull(args, image): # type: (EnvironmentConfig, str) -> None
raise ApplicationError('Failed to pull docker image "%s".' % image)
def docker_cp_to(args, container_id, src, dst): # type: (EnvironmentConfig, str, str, str) -> None
def docker_cp_to(args: EnvironmentConfig, container_id: str, src: str, dst: str) -> None:
"""Copy a file to the specified container."""
docker_command(args, ['cp', src, '%s:%s' % (container_id, dst)], capture=True)
@ -327,7 +327,7 @@ def docker_run(
raise ApplicationError('Failed to run docker image "%s".' % image)
def docker_start(args, container_id, options=None): # type: (EnvironmentConfig, str, t.Optional[t.List[str]]) -> t.Tuple[t.Optional[str], t.Optional[str]]
def docker_start(args: EnvironmentConfig, container_id: str, options: t.Optional[t.List[str]] = None) -> t.Tuple[t.Optional[str], t.Optional[str]]:
"""
Start a docker container by name or ID
"""
@ -345,7 +345,7 @@ def docker_start(args, container_id, options=None): # type: (EnvironmentConfig,
raise ApplicationError('Failed to run docker container "%s".' % container_id)
def docker_rm(args, container_id): # type: (EnvironmentConfig, str) -> None
def docker_rm(args: EnvironmentConfig, container_id: str) -> None:
"""Remove the specified container."""
try:
docker_command(args, ['rm', '-f', container_id], capture=True)
@ -377,70 +377,70 @@ class DockerInspect:
# primary properties
@property
def id(self): # type: () -> str
def id(self) -> str:
"""Return the ID of the container."""
return self.inspection['Id']
@property
def network_settings(self): # type: () -> t.Dict[str, t.Any]
def network_settings(self) -> t.Dict[str, t.Any]:
"""Return a dictionary of the container network settings."""
return self.inspection['NetworkSettings']
@property
def state(self): # type: () -> t.Dict[str, t.Any]
def state(self) -> t.Dict[str, t.Any]:
"""Return a dictionary of the container state."""
return self.inspection['State']
@property
def config(self): # type: () -> t.Dict[str, t.Any]
def config(self) -> t.Dict[str, t.Any]:
"""Return a dictionary of the container configuration."""
return self.inspection['Config']
# nested properties
@property
def ports(self): # type: () -> t.Dict[str, t.List[t.Dict[str, str]]]
def ports(self) -> t.Dict[str, t.List[t.Dict[str, str]]]:
"""Return a dictionary of ports the container has published."""
return self.network_settings['Ports']
@property
def networks(self): # type: () -> t.Optional[t.Dict[str, t.Dict[str, t.Any]]]
def networks(self) -> t.Optional[t.Dict[str, t.Dict[str, t.Any]]]:
"""Return a dictionary of the networks the container is attached to, or None if running under podman, which does not support networks."""
return self.network_settings.get('Networks')
@property
def running(self): # type: () -> bool
def running(self) -> bool:
"""Return True if the container is running, otherwise False."""
return self.state['Running']
@property
def env(self): # type: () -> t.List[str]
def env(self) -> t.List[str]:
"""Return a list of the environment variables used to create the container."""
return self.config['Env']
@property
def image(self): # type: () -> str
def image(self) -> str:
"""Return the image used to create the container."""
return self.config['Image']
# functions
def env_dict(self): # type: () -> t.Dict[str, str]
def env_dict(self) -> t.Dict[str, str]:
"""Return a dictionary of the environment variables used to create the container."""
return dict((item[0], item[1]) for item in [e.split('=', 1) for e in self.env])
def get_tcp_port(self, port): # type: (int) -> t.Optional[t.List[t.Dict[str, str]]]
def get_tcp_port(self, port: int) -> t.Optional[t.List[t.Dict[str, str]]]:
"""Return a list of the endpoints published by the container for the specified TCP port, or None if it is not published."""
return self.ports.get('%d/tcp' % port)
def get_network_names(self): # type: () -> t.Optional[t.List[str]]
def get_network_names(self) -> t.Optional[t.List[str]]:
"""Return a list of the network names the container is attached to."""
if self.networks is None:
return None
return sorted(self.networks)
def get_network_name(self): # type: () -> str
def get_network_name(self) -> str:
"""Return the network name the container is attached to. Raises an exception if no network, or more than one, is attached."""
networks = self.get_network_names()
@ -452,7 +452,7 @@ class DockerInspect:
return networks[0]
def get_ip_address(self): # type: () -> t.Optional[str]
def get_ip_address(self) -> t.Optional[str]:
"""Return the IP address of the container for the preferred docker network."""
if self.networks:
network_name = get_docker_preferred_network_name(self.args)
@ -472,7 +472,7 @@ class DockerInspect:
return ipaddress
def docker_inspect(args, identifier, always=False): # type: (EnvironmentConfig, str, bool) -> DockerInspect
def docker_inspect(args: EnvironmentConfig, identifier: str, always: bool = False) -> DockerInspect:
"""
Return the results of `docker container inspect` for the specified container.
Raises a ContainerNotFoundError if the container was not found.
@ -493,12 +493,12 @@ def docker_inspect(args, identifier, always=False): # type: (EnvironmentConfig,
raise ContainerNotFoundError(identifier)
def docker_network_disconnect(args, container_id, network): # type: (EnvironmentConfig, str, str) -> None
def docker_network_disconnect(args: EnvironmentConfig, container_id: str, network: str) -> None:
"""Disconnect the specified docker container from the given network."""
docker_command(args, ['network', 'disconnect', network, container_id], capture=True)
def docker_image_exists(args, image): # type: (EnvironmentConfig, str) -> bool
def docker_image_exists(args: EnvironmentConfig, image: str) -> bool:
"""Return True if the image exists, otherwise False."""
try:
docker_command(args, ['image', 'inspect', image], capture=True)
@ -531,13 +531,13 @@ def docker_exec(
output_stream=output_stream, data=data)
def docker_info(args): # type: (CommonConfig) -> t.Dict[str, t.Any]
def docker_info(args: CommonConfig) -> t.Dict[str, t.Any]:
"""Return a dictionary containing details from the `docker info` command."""
stdout, _dummy = docker_command(args, ['info', '--format', '{{json .}}'], capture=True, always=True)
return json.loads(stdout)
def docker_version(args): # type: (CommonConfig) -> t.Dict[str, t.Any]
def docker_version(args: CommonConfig) -> t.Dict[str, t.Any]:
"""Return a dictionary containing details from the `docker version` command."""
stdout, _dummy = docker_command(args, ['version', '--format', '{{json .}}'], capture=True, always=True)
return json.loads(stdout)
@ -565,7 +565,7 @@ def docker_command(
output_stream=output_stream, data=data)
def docker_environment(): # type: () -> t.Dict[str, str]
def docker_environment() -> t.Dict[str, str]:
"""Return a dictionary of docker related environment variables found in the current environment."""
env = common_environment()
env.update(dict((key, os.environ[key]) for key in os.environ if key.startswith('DOCKER_') or key.startswith('CONTAINER_')))

@ -6,17 +6,17 @@ import typing as t
ENCODING = 'utf-8'
def to_optional_bytes(value, errors='strict'): # type: (t.Optional[t.AnyStr], str) -> t.Optional[bytes]
def to_optional_bytes(value: t.Optional[t.AnyStr], errors: str = 'strict') -> t.Optional[bytes]:
"""Return the given value as bytes encoded using UTF-8 if not already bytes, or None if the value is None."""
return None if value is None else to_bytes(value, errors)
def to_optional_text(value, errors='strict'): # type: (t.Optional[t.AnyStr], str) -> t.Optional[str]
def to_optional_text(value: t.Optional[t.AnyStr], errors: str = 'strict') -> t.Optional[str]:
"""Return the given value as text decoded using UTF-8 if not already text, or None if the value is None."""
return None if value is None else to_text(value, errors)
def to_bytes(value, errors='strict'): # type: (t.AnyStr, str) -> bytes
def to_bytes(value: t.AnyStr, errors: str = 'strict') -> bytes:
"""Return the given value as bytes encoded using UTF-8 if not already bytes."""
if isinstance(value, bytes):
return value
@ -27,7 +27,7 @@ def to_bytes(value, errors='strict'): # type: (t.AnyStr, str) -> bytes
raise Exception('value is not bytes or text: %s' % type(value))
def to_text(value, errors='strict'): # type: (t.AnyStr, str) -> str
def to_text(value: t.AnyStr, errors: str = 'strict') -> str:
"""Return the given value as text decoded using UTF-8 if not already text."""
if isinstance(value, bytes):
return value.decode(ENCODING, errors)

@ -33,7 +33,7 @@ from .provisioning import (
)
def get_changes_filter(args): # type: (TestConfig) -> t.List[str]
def get_changes_filter(args: TestConfig) -> t.List[str]:
"""Return a list of targets which should be tested based on the changes made."""
paths = detect_changes(args)
@ -57,7 +57,7 @@ def get_changes_filter(args): # type: (TestConfig) -> t.List[str]
return args.metadata.change_description.targets
def detect_changes(args): # type: (TestConfig) -> t.Optional[t.List[str]]
def detect_changes(args: TestConfig) -> t.Optional[t.List[str]]:
"""Return a list of changed paths."""
if args.changed:
paths = get_ci_provider().detect_changes(args)
@ -93,7 +93,7 @@ class NoTestsForChanges(ApplicationWarning):
class Delegate(Exception):
"""Trigger command delegation."""
def __init__(self, host_state, exclude=None, require=None): # type: (HostState, t.List[str], t.List[str]) -> None
def __init__(self, host_state: HostState, exclude: t.List[str] = None, require: t.List[str] = None) -> None:
super().__init__()
self.host_state = host_state
@ -103,7 +103,7 @@ class Delegate(Exception):
class ListTargets(Exception):
"""List integration test targets instead of executing them."""
def __init__(self, target_names): # type: (t.List[str]) -> None
def __init__(self, target_names: t.List[str]) -> None:
super().__init__()
self.target_names = target_names

@ -12,23 +12,23 @@ from .util import (
class Git:
"""Wrapper around git command-line tools."""
def __init__(self, root=None): # type: (t.Optional[str]) -> None
def __init__(self, root: t.Optional[str] = None) -> None:
self.git = 'git'
self.root = root
def get_diff(self, args, git_options=None): # type: (t.List[str], t.Optional[t.List[str]]) -> t.List[str]
def get_diff(self, args: t.List[str], git_options: t.Optional[t.List[str]] = None) -> t.List[str]:
"""Run `git diff` and return the result as a list."""
cmd = ['diff'] + args
if git_options is None:
git_options = ['-c', 'core.quotePath=']
return self.run_git_split(git_options + cmd, '\n', str_errors='replace')
def get_diff_names(self, args): # type: (t.List[str]) -> t.List[str]
def get_diff_names(self, args: t.List[str]) -> t.List[str]:
"""Return a list of file names from the `git diff` command."""
cmd = ['diff', '--name-only', '--no-renames', '-z'] + args
return self.run_git_split(cmd, '\0')
def get_submodule_paths(self): # type: () -> t.List[str]
def get_submodule_paths(self) -> t.List[str]:
"""Return a list of submodule paths recursively."""
cmd = ['submodule', 'status', '--recursive']
output = self.run_git_split(cmd, '\n')
@ -45,22 +45,22 @@ class Git:
return submodule_paths
def get_file_names(self, args): # type: (t.List[str]) -> t.List[str]
def get_file_names(self, args: t.List[str]) -> t.List[str]:
"""Return a list of file names from the `git ls-files` command."""
cmd = ['ls-files', '-z'] + args
return self.run_git_split(cmd, '\0')
def get_branches(self): # type: () -> t.List[str]
def get_branches(self) -> t.List[str]:
"""Return the list of branches."""
cmd = ['for-each-ref', 'refs/heads/', '--format', '%(refname:strip=2)']
return self.run_git_split(cmd)
def get_branch(self): # type: () -> str
def get_branch(self) -> str:
"""Return the current branch name."""
cmd = ['symbolic-ref', '--short', 'HEAD']
return self.run_git(cmd).strip()
def get_rev_list(self, commits=None, max_count=None): # type: (t.Optional[t.List[str]], t.Optional[int]) -> t.List[str]
def get_rev_list(self, commits: t.Optional[t.List[str]] = None, max_count: t.Optional[int] = None) -> t.List[str]:
"""Return the list of results from the `git rev-list` command."""
cmd = ['rev-list']
@ -74,12 +74,12 @@ class Git:
return self.run_git_split(cmd)
def get_branch_fork_point(self, branch): # type: (str) -> str
def get_branch_fork_point(self, branch: str) -> str:
"""Return a reference to the point at which the given branch was forked."""
cmd = ['merge-base', '--fork-point', branch]
return self.run_git(cmd).strip()
def is_valid_ref(self, ref): # type: (str) -> bool
def is_valid_ref(self, ref: str) -> bool:
"""Return True if the given reference is valid, otherwise return False."""
cmd = ['show', ref]
try:
@ -88,7 +88,7 @@ class Git:
except SubprocessError:
return False
def run_git_split(self, cmd, separator=None, str_errors='strict'): # type: (t.List[str], t.Optional[str], str) -> t.List[str]
def run_git_split(self, cmd: t.List[str], separator: t.Optional[str] = None, str_errors: str = 'strict') -> t.List[str]:
"""Run the given `git` command and return the results as a list."""
output = self.run_git(cmd, str_errors=str_errors).strip(separator)
@ -97,6 +97,6 @@ class Git:
return output.split(separator)
def run_git(self, cmd, str_errors='strict'): # type: (t.List[str], str) -> str
def run_git(self, cmd: t.List[str], str_errors: str = 'strict') -> str:
"""Run the given `git` command and return the results as a string."""
return raw_command([self.git] + cmd, cwd=self.root, capture=True, str_errors=str_errors)[0]

@ -50,14 +50,14 @@ class OriginCompletionConfig(PosixCompletionConfig):
super().__init__(name='origin')
@property
def supported_pythons(self): # type: () -> t.List[str]
def supported_pythons(self) -> t.List[str]:
"""Return a list of the supported Python versions."""
current_version = version_to_str(sys.version_info[:2])
versions = [version for version in SUPPORTED_PYTHON_VERSIONS if version == current_version] + \
[version for version in SUPPORTED_PYTHON_VERSIONS if version != current_version]
return versions
def get_python_path(self, version): # type: (str) -> str
def get_python_path(self, version: str) -> str:
"""Return the path of the requested Python version."""
version = find_python(version)
return version
@ -74,7 +74,7 @@ class HostContext:
controller_config: t.Optional['PosixConfig']
@property
def controller(self): # type: () -> bool
def controller(self) -> bool:
"""True if the context is for the controller, otherwise False."""
return not self.controller_config
@ -83,15 +83,15 @@ class HostContext:
class HostConfig(metaclass=abc.ABCMeta):
"""Base class for host configuration."""
@abc.abstractmethod
def get_defaults(self, context): # type: (HostContext) -> CompletionConfig
def get_defaults(self, context: HostContext) -> CompletionConfig:
"""Return the default settings."""
@abc.abstractmethod
def apply_defaults(self, context, defaults): # type: (HostContext, CompletionConfig) -> None
def apply_defaults(self, context: HostContext, defaults: CompletionConfig) -> None:
"""Apply default settings."""
@property
def is_managed(self): # type: () -> bool
def is_managed(self) -> bool:
"""
True if the host is a managed instance, otherwise False.
Managed instances are used exclusively by ansible-test and can safely have destructive operations performed without explicit permission from the user.
@ -106,16 +106,16 @@ class PythonConfig(metaclass=abc.ABCMeta):
path: t.Optional[str] = None
@property
def tuple(self): # type: () -> t.Tuple[int, ...]
def tuple(self) -> t.Tuple[int, ...]:
"""Return the Python version as a tuple."""
return str_to_version(self.version)
@property
def major_version(self): # type: () -> int
def major_version(self) -> int:
"""Return the Python major version."""
return self.tuple[0]
def apply_defaults(self, context, defaults): # type: (HostContext, PosixCompletionConfig) -> None
def apply_defaults(self, context: HostContext, defaults: PosixCompletionConfig) -> None:
"""Apply default settings."""
if self.version in (None, 'default'):
self.version = defaults.get_default_python(context.controller)
@ -130,7 +130,7 @@ class PythonConfig(metaclass=abc.ABCMeta):
@property
@abc.abstractmethod
def is_managed(self): # type: () -> bool
def is_managed(self) -> bool:
"""
True if this Python is a managed instance, otherwise False.
Managed instances are used exclusively by ansible-test and can safely have requirements installed without explicit permission from the user.
@ -141,7 +141,7 @@ class PythonConfig(metaclass=abc.ABCMeta):
class NativePythonConfig(PythonConfig):
"""Configuration for native Python."""
@property
def is_managed(self): # type: () -> bool
def is_managed(self) -> bool:
"""
True if this Python is a managed instance, otherwise False.
Managed instances are used exclusively by ansible-test and can safely have requirements installed without explicit permission from the user.
@ -154,7 +154,7 @@ class VirtualPythonConfig(PythonConfig):
"""Configuration for Python in a virtual environment."""
system_site_packages: t.Optional[bool] = None
def apply_defaults(self, context, defaults): # type: (HostContext, PosixCompletionConfig) -> None
def apply_defaults(self, context: HostContext, defaults: PosixCompletionConfig) -> None:
"""Apply default settings."""
super().apply_defaults(context, defaults)
@ -162,7 +162,7 @@ class VirtualPythonConfig(PythonConfig):
self.system_site_packages = False
@property
def is_managed(self): # type: () -> bool
def is_managed(self) -> bool:
"""
True if this Python is a managed instance, otherwise False.
Managed instances are used exclusively by ansible-test and can safely have requirements installed without explicit permission from the user.
@ -177,14 +177,14 @@ class PosixConfig(HostConfig, metaclass=abc.ABCMeta):
@property
@abc.abstractmethod
def have_root(self): # type: () -> bool
def have_root(self) -> bool:
"""True if root is available, otherwise False."""
@abc.abstractmethod
def get_defaults(self, context): # type: (HostContext) -> PosixCompletionConfig
def get_defaults(self, context: HostContext) -> PosixCompletionConfig:
"""Return the default settings."""
def apply_defaults(self, context, defaults): # type: (HostContext, CompletionConfig) -> None
def apply_defaults(self, context: HostContext, defaults: CompletionConfig) -> None:
"""Apply default settings."""
assert isinstance(defaults, PosixCompletionConfig)
@ -198,7 +198,7 @@ class PosixConfig(HostConfig, metaclass=abc.ABCMeta):
class ControllerHostConfig(PosixConfig, metaclass=abc.ABCMeta):
"""Base class for host configurations which support the controller."""
@abc.abstractmethod
def get_default_targets(self, context): # type: (HostContext) -> t.List[ControllerConfig]
def get_default_targets(self, context: HostContext) -> t.List[ControllerConfig]:
"""Return the default targets for this host config."""
@ -210,16 +210,16 @@ class RemoteConfig(HostConfig, metaclass=abc.ABCMeta):
arch: t.Optional[str] = None
@property
def platform(self): # type: () -> str
def platform(self) -> str:
"""The name of the platform."""
return self.name.partition('/')[0]
@property
def version(self): # type: () -> str
def version(self) -> str:
"""The version of the platform."""
return self.name.partition('/')[2]
def apply_defaults(self, context, defaults): # type: (HostContext, CompletionConfig) -> None
def apply_defaults(self, context: HostContext, defaults: CompletionConfig) -> None:
"""Apply default settings."""
assert isinstance(defaults, RemoteCompletionConfig)
@ -232,7 +232,7 @@ class RemoteConfig(HostConfig, metaclass=abc.ABCMeta):
self.arch = self.arch or defaults.arch or Architecture.X86_64
@property
def is_managed(self): # type: () -> bool
def is_managed(self) -> bool:
"""
True if this host is a managed instance, otherwise False.
Managed instances are used exclusively by ansible-test and can safely have destructive operations performed without explicit permission from the user.
@ -247,7 +247,7 @@ class PosixSshConfig(PosixConfig):
host: t.Optional[str] = None
port: t.Optional[int] = None
def get_defaults(self, context): # type: (HostContext) -> PosixSshCompletionConfig
def get_defaults(self, context: HostContext) -> PosixSshCompletionConfig:
"""Return the default settings."""
return PosixSshCompletionConfig(
user=self.user,
@ -255,7 +255,7 @@ class PosixSshConfig(PosixConfig):
)
@property
def have_root(self): # type: () -> bool
def have_root(self) -> bool:
"""True if root is available, otherwise False."""
return self.user == 'root'
@ -265,11 +265,11 @@ class InventoryConfig(HostConfig):
"""Configuration using inventory."""
path: t.Optional[str] = None
def get_defaults(self, context): # type: (HostContext) -> InventoryCompletionConfig
def get_defaults(self, context: HostContext) -> InventoryCompletionConfig:
"""Return the default settings."""
return InventoryCompletionConfig()
def apply_defaults(self, context, defaults): # type: (HostContext, CompletionConfig) -> None
def apply_defaults(self, context: HostContext, defaults: CompletionConfig) -> None:
"""Apply default settings."""
assert isinstance(defaults, InventoryCompletionConfig)
@ -283,7 +283,7 @@ class DockerConfig(ControllerHostConfig, PosixConfig):
privileged: t.Optional[bool] = None
seccomp: t.Optional[str] = None
def get_defaults(self, context): # type: (HostContext) -> DockerCompletionConfig
def get_defaults(self, context: HostContext) -> DockerCompletionConfig:
"""Return the default settings."""
return filter_completion(docker_completion()).get(self.name) or DockerCompletionConfig(
name=self.name,
@ -291,7 +291,7 @@ class DockerConfig(ControllerHostConfig, PosixConfig):
placeholder=True,
)
def get_default_targets(self, context): # type: (HostContext) -> t.List[ControllerConfig]
def get_default_targets(self, context: HostContext) -> t.List[ControllerConfig]:
"""Return the default targets for this host config."""
if self.name in filter_completion(docker_completion()):
defaults = self.get_defaults(context)
@ -301,7 +301,7 @@ class DockerConfig(ControllerHostConfig, PosixConfig):
return [ControllerConfig(python=NativePythonConfig(version=version, path=path)) for version, path in pythons.items()]
def apply_defaults(self, context, defaults): # type: (HostContext, CompletionConfig) -> None
def apply_defaults(self, context: HostContext, defaults: CompletionConfig) -> None:
"""Apply default settings."""
assert isinstance(defaults, DockerCompletionConfig)
@ -317,7 +317,7 @@ class DockerConfig(ControllerHostConfig, PosixConfig):
self.privileged = False
@property
def is_managed(self): # type: () -> bool
def is_managed(self) -> bool:
"""
True if this host is a managed instance, otherwise False.
Managed instances are used exclusively by ansible-test and can safely have destructive operations performed without explicit permission from the user.
@ -325,7 +325,7 @@ class DockerConfig(ControllerHostConfig, PosixConfig):
return True
@property
def have_root(self): # type: () -> bool
def have_root(self) -> bool:
"""True if root is available, otherwise False."""
return True
@ -335,14 +335,14 @@ class PosixRemoteConfig(RemoteConfig, ControllerHostConfig, PosixConfig):
"""Configuration for a POSIX remote host."""
become: t.Optional[str] = None
def get_defaults(self, context): # type: (HostContext) -> PosixRemoteCompletionConfig
def get_defaults(self, context: HostContext) -> PosixRemoteCompletionConfig:
"""Return the default settings."""
return filter_completion(remote_completion()).get(self.name) or remote_completion().get(self.platform) or PosixRemoteCompletionConfig(
name=self.name,
placeholder=True,
)
def get_default_targets(self, context): # type: (HostContext) -> t.List[ControllerConfig]
def get_default_targets(self, context: HostContext) -> t.List[ControllerConfig]:
"""Return the default targets for this host config."""
if self.name in filter_completion(remote_completion()):
defaults = self.get_defaults(context)
@ -352,7 +352,7 @@ class PosixRemoteConfig(RemoteConfig, ControllerHostConfig, PosixConfig):
return [ControllerConfig(python=NativePythonConfig(version=version, path=path)) for version, path in pythons.items()]
def apply_defaults(self, context, defaults): # type: (HostContext, CompletionConfig) -> None
def apply_defaults(self, context: HostContext, defaults: CompletionConfig) -> None:
"""Apply default settings."""
assert isinstance(defaults, PosixRemoteCompletionConfig)
@ -361,7 +361,7 @@ class PosixRemoteConfig(RemoteConfig, ControllerHostConfig, PosixConfig):
self.become = self.become or defaults.become
@property
def have_root(self): # type: () -> bool
def have_root(self) -> bool:
"""True if root is available, otherwise False."""
return True
@ -374,7 +374,7 @@ class WindowsConfig(HostConfig, metaclass=abc.ABCMeta):
@dataclasses.dataclass
class WindowsRemoteConfig(RemoteConfig, WindowsConfig):
"""Configuration for a remote Windows host."""
def get_defaults(self, context): # type: (HostContext) -> WindowsRemoteCompletionConfig
def get_defaults(self, context: HostContext) -> WindowsRemoteCompletionConfig:
"""Return the default settings."""
return filter_completion(windows_completion()).get(self.name) or windows_completion().get(self.platform)
@ -395,14 +395,14 @@ class NetworkRemoteConfig(RemoteConfig, NetworkConfig):
collection: t.Optional[str] = None
connection: t.Optional[str] = None
def get_defaults(self, context): # type: (HostContext) -> NetworkRemoteCompletionConfig
def get_defaults(self, context: HostContext) -> NetworkRemoteCompletionConfig:
"""Return the default settings."""
return filter_completion(network_completion()).get(self.name) or NetworkRemoteCompletionConfig(
name=self.name,
placeholder=True,
)
def apply_defaults(self, context, defaults): # type: (HostContext, CompletionConfig) -> None
def apply_defaults(self, context: HostContext, defaults: CompletionConfig) -> None:
"""Apply default settings."""
assert isinstance(defaults, NetworkRemoteCompletionConfig)
@ -420,16 +420,16 @@ class NetworkInventoryConfig(InventoryConfig, NetworkConfig):
@dataclasses.dataclass
class OriginConfig(ControllerHostConfig, PosixConfig):
"""Configuration for the origin host."""
def get_defaults(self, context): # type: (HostContext) -> OriginCompletionConfig
def get_defaults(self, context: HostContext) -> OriginCompletionConfig:
"""Return the default settings."""
return OriginCompletionConfig()
def get_default_targets(self, context): # type: (HostContext) -> t.List[ControllerConfig]
def get_default_targets(self, context: HostContext) -> t.List[ControllerConfig]:
"""Return the default targets for this host config."""
return [ControllerConfig(python=NativePythonConfig(version=version, path=path)) for version, path in get_available_python_versions().items()]
@property
def have_root(self): # type: () -> bool
def have_root(self) -> bool:
"""True if root is available, otherwise False."""
return os.getuid() == 0
@ -439,11 +439,11 @@ class ControllerConfig(PosixConfig):
"""Configuration for the controller host."""
controller: t.Optional[PosixConfig] = None
def get_defaults(self, context): # type: (HostContext) -> PosixCompletionConfig
def get_defaults(self, context: HostContext) -> PosixCompletionConfig:
"""Return the default settings."""
return context.controller_config.get_defaults(context)
def apply_defaults(self, context, defaults): # type: (HostContext, CompletionConfig) -> None
def apply_defaults(self, context: HostContext, defaults: CompletionConfig) -> None:
"""Apply default settings."""
assert isinstance(defaults, PosixCompletionConfig)
@ -456,7 +456,7 @@ class ControllerConfig(PosixConfig):
super().apply_defaults(context, defaults)
@property
def is_managed(self): # type: () -> bool
def is_managed(self) -> bool:
"""
True if the host is a managed instance, otherwise False.
Managed instances are used exclusively by ansible-test and can safely have destructive operations performed without explicit permission from the user.
@ -464,7 +464,7 @@ class ControllerConfig(PosixConfig):
return self.controller.is_managed
@property
def have_root(self): # type: () -> bool
def have_root(self) -> bool:
"""True if root is available, otherwise False."""
return self.controller.have_root
@ -491,13 +491,13 @@ class HostSettings:
filtered_args: t.List[str]
controller_fallback: t.Optional[FallbackDetail]
def serialize(self, path): # type: (str) -> None
def serialize(self, path: str) -> None:
"""Serialize the host settings to the given path."""
with open_binary_file(path, 'wb') as settings_file:
pickle.dump(self, settings_file)
@staticmethod
def deserialize(path): # type: (str) -> HostSettings
def deserialize(path: str) -> HostSettings:
"""Deserialize host settings from the path."""
with open_binary_file(path) as settings_file:
return pickle.load(settings_file)

@ -120,7 +120,7 @@ class Inventory:
"""Return an inventory instance created from the given hostname and variables."""
return Inventory(host_groups=dict(all={name: variables}))
def write(self, args, path): # type: (CommonConfig, str) -> None
def write(self, args: CommonConfig, path: str) -> None:
"""Write the given inventory to the specified path on disk."""
# NOTE: Switching the inventory generation to write JSON would be nice, but is currently not possible due to the use of hard-coded inventory filenames.
@ -173,19 +173,19 @@ class HostProfile(t.Generic[THostConfig], metaclass=abc.ABCMeta):
self.cache = {} # type: t.Dict[str, t.Any]
"""Cache that must not be persisted across delegation."""
def provision(self): # type: () -> None
def provision(self) -> None:
"""Provision the host before delegation."""
def setup(self): # type: () -> None
def setup(self) -> None:
"""Perform out-of-band setup before delegation."""
def deprovision(self): # type: () -> None
def deprovision(self) -> None:
"""Deprovision the host after delegation has completed."""
def wait(self): # type: () -> None
def wait(self) -> None:
"""Wait for the instance to be ready. Executed before delegation for the controller and after delegation for targets."""
def configure(self): # type: () -> None
def configure(self) -> None:
"""Perform in-band configuration. Executed before delegation for the controller and after delegation for targets."""
def __getstate__(self):
@ -201,7 +201,7 @@ class HostProfile(t.Generic[THostConfig], metaclass=abc.ABCMeta):
class PosixProfile(HostProfile[TPosixConfig], metaclass=abc.ABCMeta):
"""Base class for POSIX host profiles."""
@property
def python(self): # type: () -> PythonConfig
def python(self) -> PythonConfig:
"""
The Python to use for this profile.
If it is a virtual python, it will be created the first time it is requested.
@ -222,25 +222,25 @@ class PosixProfile(HostProfile[TPosixConfig], metaclass=abc.ABCMeta):
class ControllerHostProfile(PosixProfile[TControllerHostConfig], metaclass=abc.ABCMeta):
"""Base class for profiles usable as a controller."""
@abc.abstractmethod
def get_origin_controller_connection(self): # type: () -> Connection
def get_origin_controller_connection(self) -> Connection:
"""Return a connection for accessing the host as a controller from the origin."""
@abc.abstractmethod
def get_working_directory(self): # type: () -> str
def get_working_directory(self) -> str:
"""Return the working directory for the host."""
class SshTargetHostProfile(HostProfile[THostConfig], metaclass=abc.ABCMeta):
"""Base class for profiles offering SSH connectivity."""
@abc.abstractmethod
def get_controller_target_connections(self): # type: () -> t.List[SshConnection]
def get_controller_target_connections(self) -> t.List[SshConnection]:
"""Return SSH connection(s) for accessing the host as a target from the controller."""
class RemoteProfile(SshTargetHostProfile[TRemoteConfig], metaclass=abc.ABCMeta):
"""Base class for remote instance profiles."""
@property
def core_ci_state(self): # type: () -> t.Optional[t.Dict[str, str]]
def core_ci_state(self) -> t.Optional[t.Dict[str, str]]:
"""The saved Ansible Core CI state."""
return self.state.get('core_ci')
@ -249,29 +249,29 @@ class RemoteProfile(SshTargetHostProfile[TRemoteConfig], metaclass=abc.ABCMeta):
"""The saved Ansible Core CI state."""
self.state['core_ci'] = value
def provision(self): # type: () -> None
def provision(self) -> None:
"""Provision the host before delegation."""
self.core_ci = self.create_core_ci(load=True)
self.core_ci.start()
self.core_ci_state = self.core_ci.save()
def deprovision(self): # type: () -> None
def deprovision(self) -> None:
"""Deprovision the host after delegation has completed."""
if self.args.remote_terminate == TerminateMode.ALWAYS or (self.args.remote_terminate == TerminateMode.SUCCESS and self.args.success):
self.delete_instance()
@property
def core_ci(self): # type: () -> t.Optional[AnsibleCoreCI]
def core_ci(self) -> t.Optional[AnsibleCoreCI]:
"""Return the cached AnsibleCoreCI instance, if any, otherwise None."""
return self.cache.get('core_ci')
@core_ci.setter
def core_ci(self, value): # type: (AnsibleCoreCI) -> None
def core_ci(self, value: AnsibleCoreCI) -> None:
"""Cache the given AnsibleCoreCI instance."""
self.cache['core_ci'] = value
def get_instance(self): # type: () -> t.Optional[AnsibleCoreCI]
def get_instance(self) -> t.Optional[AnsibleCoreCI]:
"""Return the current AnsibleCoreCI instance, loading it if not already loaded."""
if not self.core_ci and self.core_ci_state:
self.core_ci = self.create_core_ci(load=False)
@ -288,14 +288,14 @@ class RemoteProfile(SshTargetHostProfile[TRemoteConfig], metaclass=abc.ABCMeta):
core_ci.stop()
def wait_for_instance(self): # type: () -> AnsibleCoreCI
def wait_for_instance(self) -> AnsibleCoreCI:
"""Wait for an AnsibleCoreCI VM instance to become ready."""
core_ci = self.get_instance()
core_ci.wait()
return core_ci
def create_core_ci(self, load): # type: (bool) -> AnsibleCoreCI
def create_core_ci(self, load: bool) -> AnsibleCoreCI:
"""Create and return an AnsibleCoreCI instance."""
if not self.config.arch:
raise InternalError(f'No arch specified for config: {self.config}')
@ -315,7 +315,7 @@ class RemoteProfile(SshTargetHostProfile[TRemoteConfig], metaclass=abc.ABCMeta):
class ControllerProfile(SshTargetHostProfile[ControllerConfig], PosixProfile[ControllerConfig]):
"""Host profile for the controller as a target."""
def get_controller_target_connections(self): # type: () -> t.List[SshConnection]
def get_controller_target_connections(self) -> t.List[SshConnection]:
"""Return SSH connection(s) for accessing the host as a target from the controller."""
settings = SshConnectionDetail(
name='localhost',
@ -332,16 +332,16 @@ class ControllerProfile(SshTargetHostProfile[ControllerConfig], PosixProfile[Con
class DockerProfile(ControllerHostProfile[DockerConfig], SshTargetHostProfile[DockerConfig]):
"""Host profile for a docker instance."""
@property
def container_name(self): # type: () -> t.Optional[str]
def container_name(self) -> t.Optional[str]:
"""Return the stored container name, if any, otherwise None."""
return self.state.get('container_name')
@container_name.setter
def container_name(self, value): # type: (str) -> None
def container_name(self, value: str) -> None:
"""Store the given container name."""
self.state['container_name'] = value
def provision(self): # type: () -> None
def provision(self) -> None:
"""Provision the host before delegation."""
container = run_support_container(
args=self.args,
@ -359,7 +359,7 @@ class DockerProfile(ControllerHostProfile[DockerConfig], SshTargetHostProfile[Do
self.container_name = container.name
def setup(self): # type: () -> None
def setup(self) -> None:
"""Perform out-of-band setup before delegation."""
bootstrapper = BootstrapDocker(
controller=self.controller,
@ -372,7 +372,7 @@ class DockerProfile(ControllerHostProfile[DockerConfig], SshTargetHostProfile[Do
docker_exec(self.args, self.container_name, [shell], data=setup_sh, capture=False)
def deprovision(self): # type: () -> None
def deprovision(self) -> None:
"""Deprovision the host after delegation has completed."""
if not self.container_name:
return # provision was never called or did not succeed, so there is no container to remove
@ -380,7 +380,7 @@ class DockerProfile(ControllerHostProfile[DockerConfig], SshTargetHostProfile[Do
if self.args.docker_terminate == TerminateMode.ALWAYS or (self.args.docker_terminate == TerminateMode.SUCCESS and self.args.success):
docker_rm(self.args, self.container_name)
def wait(self): # type: () -> None
def wait(self) -> None:
"""Wait for the instance to be ready. Executed before delegation for the controller and after delegation for targets."""
if not self.controller:
con = self.get_controller_target_connections()[0]
@ -396,7 +396,7 @@ class DockerProfile(ControllerHostProfile[DockerConfig], SshTargetHostProfile[Do
else:
return
def get_controller_target_connections(self): # type: () -> t.List[SshConnection]
def get_controller_target_connections(self) -> t.List[SshConnection]:
"""Return SSH connection(s) for accessing the host as a target from the controller."""
containers = get_container_database(self.args)
access = containers.data[HostType.control]['__test_hosts__'][self.container_name]
@ -415,15 +415,15 @@ class DockerProfile(ControllerHostProfile[DockerConfig], SshTargetHostProfile[Do
return [SshConnection(self.args, settings)]
def get_origin_controller_connection(self): # type: () -> DockerConnection
def get_origin_controller_connection(self) -> DockerConnection:
"""Return a connection for accessing the host as a controller from the origin."""
return DockerConnection(self.args, self.container_name)
def get_working_directory(self): # type: () -> str
def get_working_directory(self) -> str:
"""Return the working directory for the host."""
return '/root'
def get_docker_run_options(self): # type: () -> t.List[str]
def get_docker_run_options(self) -> t.List[str]:
"""Return a list of options needed to run the container."""
options = [
'--volume', '/sys/fs/cgroup:/sys/fs/cgroup:ro',
@ -453,11 +453,11 @@ class NetworkInventoryProfile(HostProfile[NetworkInventoryConfig]):
class NetworkRemoteProfile(RemoteProfile[NetworkRemoteConfig]):
"""Host profile for a network remote instance."""
def wait(self): # type: () -> None
def wait(self) -> None:
"""Wait for the instance to be ready. Executed before delegation for the controller and after delegation for targets."""
self.wait_until_ready()
def get_inventory_variables(self): # type: () -> t.Dict[str, t.Optional[t.Union[str, int]]]
def get_inventory_variables(self) -> t.Dict[str, t.Optional[t.Union[str, int]]]:
"""Return inventory variables for accessing this host."""
core_ci = self.wait_for_instance()
connection = core_ci.connection
@ -474,7 +474,7 @@ class NetworkRemoteProfile(RemoteProfile[NetworkRemoteConfig]):
return variables
def wait_until_ready(self): # type: () -> None
def wait_until_ready(self) -> None:
"""Wait for the host to respond to an Ansible module request."""
core_ci = self.wait_for_instance()
@ -501,7 +501,7 @@ class NetworkRemoteProfile(RemoteProfile[NetworkRemoteConfig]):
raise ApplicationError(f'Timeout waiting for {self.config.name} instance {core_ci.instance_id}.')
def get_controller_target_connections(self): # type: () -> t.List[SshConnection]
def get_controller_target_connections(self) -> t.List[SshConnection]:
"""Return SSH connection(s) for accessing the host as a target from the controller."""
core_ci = self.wait_for_instance()
@ -518,22 +518,22 @@ class NetworkRemoteProfile(RemoteProfile[NetworkRemoteConfig]):
class OriginProfile(ControllerHostProfile[OriginConfig]):
"""Host profile for origin."""
def get_origin_controller_connection(self): # type: () -> LocalConnection
def get_origin_controller_connection(self) -> LocalConnection:
"""Return a connection for accessing the host as a controller from the origin."""
return LocalConnection(self.args)
def get_working_directory(self): # type: () -> str
def get_working_directory(self) -> str:
"""Return the working directory for the host."""
return os.getcwd()
class PosixRemoteProfile(ControllerHostProfile[PosixRemoteConfig], RemoteProfile[PosixRemoteConfig]):
"""Host profile for a POSIX remote instance."""
def wait(self): # type: () -> None
def wait(self) -> None:
"""Wait for the instance to be ready. Executed before delegation for the controller and after delegation for targets."""
self.wait_until_ready()
def configure(self): # type: () -> None
def configure(self) -> None:
"""Perform in-band configuration. Executed before delegation for the controller and after delegation for targets."""
# a target uses a single python version, but a controller may include additional versions for targets running on the controller
python_versions = [self.python.version] + [target.python.version for target in self.targets if isinstance(target, ControllerConfig)]
@ -558,7 +558,7 @@ class PosixRemoteProfile(ControllerHostProfile[PosixRemoteConfig], RemoteProfile
ssh = self.get_origin_controller_connection()
ssh.run([shell], data=setup_sh, capture=False)
def get_ssh_connection(self): # type: () -> SshConnection
def get_ssh_connection(self) -> SshConnection:
"""Return an SSH connection for accessing the host."""
core_ci = self.wait_for_instance()
@ -581,7 +581,7 @@ class PosixRemoteProfile(ControllerHostProfile[PosixRemoteConfig], RemoteProfile
return SshConnection(self.args, settings, become)
def wait_until_ready(self): # type: () -> str
def wait_until_ready(self) -> str:
"""Wait for instance to respond to SSH, returning the current working directory once connected."""
core_ci = self.wait_for_instance()
@ -596,15 +596,15 @@ class PosixRemoteProfile(ControllerHostProfile[PosixRemoteConfig], RemoteProfile
raise ApplicationError(f'Timeout waiting for {self.config.name} instance {core_ci.instance_id}.')
def get_controller_target_connections(self): # type: () -> t.List[SshConnection]
def get_controller_target_connections(self) -> t.List[SshConnection]:
"""Return SSH connection(s) for accessing the host as a target from the controller."""
return [self.get_ssh_connection()]
def get_origin_controller_connection(self): # type: () -> SshConnection
def get_origin_controller_connection(self) -> SshConnection:
"""Return a connection for accessing the host as a controller from the origin."""
return self.get_ssh_connection()
def get_working_directory(self): # type: () -> str
def get_working_directory(self) -> str:
"""Return the working directory for the host."""
if not self.pwd:
ssh = self.get_origin_controller_connection()
@ -623,19 +623,19 @@ class PosixRemoteProfile(ControllerHostProfile[PosixRemoteConfig], RemoteProfile
return self.pwd
@property
def pwd(self): # type: () -> t.Optional[str]
def pwd(self) -> t.Optional[str]:
"""Return the cached pwd, if any, otherwise None."""
return self.cache.get('pwd')
@pwd.setter
def pwd(self, value): # type: (str) -> None
def pwd(self, value: str) -> None:
"""Cache the given pwd."""
self.cache['pwd'] = value
class PosixSshProfile(SshTargetHostProfile[PosixSshConfig], PosixProfile[PosixSshConfig]):
"""Host profile for a POSIX SSH instance."""
def get_controller_target_connections(self): # type: () -> t.List[SshConnection]
def get_controller_target_connections(self) -> t.List[SshConnection]:
"""Return SSH connection(s) for accessing the host as a target from the controller."""
settings = SshConnectionDetail(
name='target',
@ -651,7 +651,7 @@ class PosixSshProfile(SshTargetHostProfile[PosixSshConfig], PosixProfile[PosixSs
class WindowsInventoryProfile(SshTargetHostProfile[WindowsInventoryConfig]):
"""Host profile for a Windows inventory."""
def get_controller_target_connections(self): # type: () -> t.List[SshConnection]
def get_controller_target_connections(self) -> t.List[SshConnection]:
"""Return SSH connection(s) for accessing the host as a target from the controller."""
inventory = parse_inventory(self.args, self.config.path)
hosts = get_hosts(inventory, 'windows')
@ -675,11 +675,11 @@ class WindowsInventoryProfile(SshTargetHostProfile[WindowsInventoryConfig]):
class WindowsRemoteProfile(RemoteProfile[WindowsRemoteConfig]):
"""Host profile for a Windows remote instance."""
def wait(self): # type: () -> None
def wait(self) -> None:
"""Wait for the instance to be ready. Executed before delegation for the controller and after delegation for targets."""
self.wait_until_ready()
def get_inventory_variables(self): # type: () -> t.Dict[str, t.Optional[t.Union[str, int]]]
def get_inventory_variables(self) -> t.Dict[str, t.Optional[t.Union[str, int]]]:
"""Return inventory variables for accessing this host."""
core_ci = self.wait_for_instance()
connection = core_ci.connection
@ -705,7 +705,7 @@ class WindowsRemoteProfile(RemoteProfile[WindowsRemoteConfig]):
return variables
def wait_until_ready(self): # type: () -> None
def wait_until_ready(self) -> None:
"""Wait for the host to respond to an Ansible module request."""
core_ci = self.wait_for_instance()
@ -732,7 +732,7 @@ class WindowsRemoteProfile(RemoteProfile[WindowsRemoteConfig]):
raise ApplicationError(f'Timeout waiting for {self.config.name} instance {core_ci.instance_id}.')
def get_controller_target_connections(self): # type: () -> t.List[SshConnection]
def get_controller_target_connections(self) -> t.List[SshConnection]:
"""Return SSH connection(s) for accessing the host as a target from the controller."""
core_ci = self.wait_for_instance()
@ -749,7 +749,7 @@ class WindowsRemoteProfile(RemoteProfile[WindowsRemoteConfig]):
@cache
def get_config_profile_type_map(): # type: () -> t.Dict[t.Type[HostConfig], t.Type[HostProfile]]
def get_config_profile_type_map() -> t.Dict[t.Type[HostConfig], t.Type[HostProfile]]:
"""Create and return a mapping of HostConfig types to HostProfile types."""
return get_type_map(HostProfile, HostConfig)

@ -22,7 +22,7 @@ from .util_common import (
class HttpClient:
"""Make HTTP requests via curl."""
def __init__(self, args, always=False, insecure=False, proxy=None): # type: (CommonConfig, bool, bool, t.Optional[str]) -> None
def __init__(self, args: CommonConfig, always: bool = False, insecure: bool = False, proxy: t.Optional[str] = None) -> None:
self.args = args
self.always = always
self.insecure = insecure
@ -31,11 +31,11 @@ class HttpClient:
self.username = None
self.password = None
def get(self, url): # type: (str) -> HttpResponse
def get(self, url: str) -> HttpResponse:
"""Perform an HTTP GET and return the response."""
return self.request('GET', url)
def delete(self, url): # type: (str) -> HttpResponse
def delete(self, url: str) -> HttpResponse:
"""Perform an HTTP DELETE and return the response."""
return self.request('DELETE', url)
@ -113,13 +113,13 @@ class HttpClient:
class HttpResponse:
"""HTTP response from curl."""
def __init__(self, method, url, status_code, response): # type: (str, str, int, str) -> None
def __init__(self, method: str, url: str, status_code: int, response: str) -> None:
self.method = method
self.url = url
self.status_code = status_code
self.response = response
def json(self): # type: () -> t.Any
def json(self) -> t.Any:
"""Return the response parsed as JSON, raising an exception if parsing fails."""
try:
return json.loads(self.response)
@ -129,6 +129,6 @@ class HttpResponse:
class HttpError(ApplicationError):
"""HTTP response as an error."""
def __init__(self, status, message): # type: (int, str) -> None
def __init__(self, status: int, message: str) -> None:
super().__init__('%s: %s' % (status, message))
self.status = status

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save