From 3eb0485dd92c88cc92152d3656d94492db44b183 Mon Sep 17 00:00:00 2001 From: Matt Clay Date: Wed, 3 Aug 2022 15:08:55 -0700 Subject: [PATCH] ansible-test - Use more native type hints. (#78435) * ansible-test - Use more native type hints. Simple search and replace to switch from comments to native type hints for return types of functions with no arguments. * ansible-test - Use more native type hints. Conversion of simple single-line function annotation type comments to native type hints. * ansible-test - Use more native type hints. Conversion of single-line function annotation type comments with default values to native type hints. * ansible-test - Use more native type hints. Manual conversion of type annotation comments for functions which have pylint directives. --- test/lib/ansible_test/_internal/__init__.py | 2 +- .../ansible_test/_internal/ansible_util.py | 16 +-- test/lib/ansible_test/_internal/become.py | 20 ++-- test/lib/ansible_test/_internal/bootstrap.py | 10 +- test/lib/ansible_test/_internal/cache.py | 2 +- .../lib/ansible_test/_internal/ci/__init__.py | 36 +++---- test/lib/ansible_test/_internal/ci/azp.py | 28 ++--- test/lib/ansible_test/_internal/ci/local.py | 26 ++--- .../_internal/classification/__init__.py | 34 +++--- .../_internal/classification/common.py | 2 +- .../_internal/classification/csharp.py | 8 +- .../_internal/classification/powershell.py | 8 +- .../_internal/classification/python.py | 26 ++--- .../ansible_test/_internal/cli/__init__.py | 2 +- .../lib/ansible_test/_internal/cli/actions.py | 20 ++-- .../_internal/cli/argparsing/__init__.py | 6 +- .../_internal/cli/argparsing/argcompletion.py | 6 +- .../_internal/cli/argparsing/parsers.py | 102 +++++++++--------- .../_internal/cli/commands/__init__.py | 2 +- test/lib/ansible_test/_internal/cli/compat.py | 14 +-- .../ansible_test/_internal/cli/converters.py | 4 +- .../_internal/cli/environments.py | 8 +- .../_internal/cli/parsers/__init__.py | 58 +++++----- .../cli/parsers/base_argument_parsers.py | 18 ++-- .../_internal/cli/parsers/helpers.py | 6 +- .../cli/parsers/host_config_parsers.py | 74 ++++++------- .../cli/parsers/key_value_parsers.py | 30 +++--- .../_internal/cli/parsers/value_parsers.py | 12 +-- .../_internal/commands/coverage/__init__.py | 24 ++--- .../commands/coverage/analyze/__init__.py | 2 +- .../coverage/analyze/targets/__init__.py | 14 +-- .../coverage/analyze/targets/combine.py | 4 +- .../coverage/analyze/targets/expand.py | 4 +- .../coverage/analyze/targets/filter.py | 4 +- .../coverage/analyze/targets/generate.py | 8 +- .../coverage/analyze/targets/missing.py | 4 +- .../_internal/commands/coverage/combine.py | 14 +-- .../_internal/commands/coverage/erase.py | 2 +- .../_internal/commands/coverage/html.py | 2 +- .../_internal/commands/coverage/report.py | 6 +- .../_internal/commands/coverage/xml.py | 4 +- .../_internal/commands/env/__init__.py | 12 +-- .../commands/integration/__init__.py | 16 +-- .../commands/integration/cloud/__init__.py | 48 ++++----- .../commands/integration/cloud/acme.py | 10 +- .../commands/integration/cloud/aws.py | 12 +-- .../commands/integration/cloud/azure.py | 14 +-- .../commands/integration/cloud/cloudscale.py | 6 +- .../commands/integration/cloud/cs.py | 12 +-- .../integration/cloud/digitalocean.py | 6 +- .../commands/integration/cloud/foreman.py | 10 +- .../commands/integration/cloud/galaxy.py | 6 +- .../commands/integration/cloud/gcp.py | 6 +- .../commands/integration/cloud/hcloud.py | 10 +- .../commands/integration/cloud/httptester.py | 6 +- .../commands/integration/cloud/nios.py | 10 +- .../commands/integration/cloud/opennebula.py | 6 +- .../commands/integration/cloud/openshift.py | 12 +-- .../commands/integration/cloud/scaleway.py | 6 +- .../commands/integration/cloud/vcenter.py | 10 +- .../commands/integration/cloud/vultr.py | 6 +- .../commands/integration/coverage.py | 56 +++++----- .../_internal/commands/integration/filters.py | 28 ++--- .../_internal/commands/integration/network.py | 2 +- .../_internal/commands/integration/posix.py | 2 +- .../_internal/commands/integration/windows.py | 2 +- .../_internal/commands/sanity/__init__.py | 96 ++++++++--------- .../_internal/commands/sanity/ansible_doc.py | 4 +- .../_internal/commands/sanity/bin_symlinks.py | 6 +- .../_internal/commands/sanity/compile.py | 4 +- .../_internal/commands/sanity/ignores.py | 6 +- .../_internal/commands/sanity/import.py | 10 +- .../commands/sanity/integration_aliases.py | 20 ++-- .../_internal/commands/sanity/mypy.py | 8 +- .../_internal/commands/sanity/pep8.py | 6 +- .../_internal/commands/sanity/pslint.py | 6 +- .../_internal/commands/sanity/pylint.py | 12 +-- .../_internal/commands/sanity/sanity_docs.py | 6 +- .../_internal/commands/sanity/shellcheck.py | 6 +- .../commands/sanity/validate_modules.py | 8 +- .../_internal/commands/sanity/yamllint.py | 10 +- .../_internal/commands/shell/__init__.py | 2 +- .../_internal/commands/units/__init__.py | 4 +- test/lib/ansible_test/_internal/completion.py | 32 +++--- test/lib/ansible_test/_internal/config.py | 30 +++--- .../lib/ansible_test/_internal/connections.py | 14 +-- test/lib/ansible_test/_internal/containers.py | 24 ++--- .../ansible_test/_internal/content_config.py | 6 +- test/lib/ansible_test/_internal/core_ci.py | 24 ++--- .../ansible_test/_internal/coverage_util.py | 10 +- test/lib/ansible_test/_internal/data.py | 6 +- test/lib/ansible_test/_internal/delegation.py | 8 +- test/lib/ansible_test/_internal/diff.py | 32 +++--- .../lib/ansible_test/_internal/docker_util.py | 76 ++++++------- test/lib/ansible_test/_internal/encoding.py | 8 +- test/lib/ansible_test/_internal/executor.py | 8 +- test/lib/ansible_test/_internal/git.py | 24 ++--- .../ansible_test/_internal/host_configs.py | 92 ++++++++-------- .../ansible_test/_internal/host_profiles.py | 102 +++++++++--------- test/lib/ansible_test/_internal/http.py | 12 +-- test/lib/ansible_test/_internal/inventory.py | 8 +- test/lib/ansible_test/_internal/io.py | 14 +-- test/lib/ansible_test/_internal/metadata.py | 14 +-- test/lib/ansible_test/_internal/payload.py | 6 +- .../_internal/provider/__init__.py | 8 +- .../_internal/provider/layout/__init__.py | 22 ++-- .../_internal/provider/layout/ansible.py | 4 +- .../_internal/provider/layout/collection.py | 10 +- .../_internal/provider/layout/unsupported.py | 4 +- .../_internal/provider/source/__init__.py | 2 +- .../_internal/provider/source/git.py | 6 +- .../_internal/provider/source/installed.py | 4 +- .../_internal/provider/source/unsupported.py | 4 +- .../_internal/provider/source/unversioned.py | 4 +- .../ansible_test/_internal/provisioning.py | 18 ++-- test/lib/ansible_test/_internal/pypi_proxy.py | 16 +-- .../_internal/python_requirements.py | 32 +++--- test/lib/ansible_test/_internal/ssh.py | 8 +- test/lib/ansible_test/_internal/target.py | 26 ++--- test/lib/ansible_test/_internal/test.py | 64 +++++------ test/lib/ansible_test/_internal/thread.py | 2 +- test/lib/ansible_test/_internal/timeout.py | 8 +- test/lib/ansible_test/_internal/util.py | 58 +++++----- .../lib/ansible_test/_internal/util_common.py | 38 +++---- test/lib/ansible_test/_internal/venv.py | 6 +- 125 files changed, 1047 insertions(+), 1047 deletions(-) diff --git a/test/lib/ansible_test/_internal/__init__.py b/test/lib/ansible_test/_internal/__init__.py index 33e773063dd..8205258a9e2 100644 --- a/test/lib/ansible_test/_internal/__init__.py +++ b/test/lib/ansible_test/_internal/__init__.py @@ -48,7 +48,7 @@ from .provisioning import ( ) -def main(cli_args=None): # type: (t.Optional[t.List[str]]) -> None +def main(cli_args: t.Optional[t.List[str]] = None) -> None: """Main program function.""" try: os.chdir(data_context().content.root) diff --git a/test/lib/ansible_test/_internal/ansible_util.py b/test/lib/ansible_test/_internal/ansible_util.py index 0fe3db26aee..a4c92aae27e 100644 --- a/test/lib/ansible_test/_internal/ansible_util.py +++ b/test/lib/ansible_test/_internal/ansible_util.py @@ -52,7 +52,7 @@ from .host_configs import ( ) -def parse_inventory(args, inventory_path): # type: (EnvironmentConfig, str) -> t.Dict[str, t.Any] +def parse_inventory(args: EnvironmentConfig, inventory_path: str) -> t.Dict[str, t.Any]: """Return a dict parsed from the given inventory file.""" cmd = ['ansible-inventory', '-i', inventory_path, '--list'] env = ansible_environment(args) @@ -69,7 +69,7 @@ def get_hosts(inventory, group_name): # type: (t.Dict[str, t.Any], str) -> t.Di return hosts -def ansible_environment(args, color=True, ansible_config=None): # type: (CommonConfig, bool, t.Optional[str]) -> t.Dict[str, str] +def ansible_environment(args: CommonConfig, color: bool = True, ansible_config: t.Optional[str] = None) -> t.Dict[str, str]: """Return a dictionary of environment variables to use when running Ansible commands.""" env = common_environment() path = env['PATH'] @@ -138,7 +138,7 @@ def ansible_environment(args, color=True, ansible_config=None): # type: (Common return env -def configure_plugin_paths(args): # type: (CommonConfig) -> t.Dict[str, str] +def configure_plugin_paths(args: CommonConfig) -> t.Dict[str, str]: """Return environment variables with paths to plugins relevant for the current command.""" if not isinstance(args, IntegrationConfig): return {} @@ -192,7 +192,7 @@ def configure_plugin_paths(args): # type: (CommonConfig) -> t.Dict[str, str] return env -def get_ansible_python_path(args): # type: (CommonConfig) -> str +def get_ansible_python_path(args: CommonConfig) -> str: """ Return a directory usable for PYTHONPATH, containing only the ansible package. If a temporary directory is required, it will be cached for the lifetime of the process and cleaned up at exit. @@ -221,7 +221,7 @@ def get_ansible_python_path(args): # type: (CommonConfig) -> str return python_path -def generate_egg_info(path): # type: (str) -> None +def generate_egg_info(path: str) -> None: """Generate an egg-info in the specified base directory.""" # minimal PKG-INFO stub following the format defined in PEP 241 # required for older setuptools versions to avoid a traceback when importing pkg_resources from packages like cryptography @@ -247,18 +247,18 @@ License: GPLv3+ class CollectionDetail: """Collection detail.""" - def __init__(self): # type: () -> None + def __init__(self) -> None: self.version = None # type: t.Optional[str] class CollectionDetailError(ApplicationError): """An error occurred retrieving collection detail.""" - def __init__(self, reason): # type: (str) -> None + def __init__(self, reason: str) -> None: super().__init__('Error collecting collection detail: %s' % reason) self.reason = reason -def get_collection_detail(python): # type: (PythonConfig) -> CollectionDetail +def get_collection_detail(python: PythonConfig) -> CollectionDetail: """Return collection detail.""" collection = data_context().content.collection directory = os.path.join(collection.root, collection.directory) diff --git a/test/lib/ansible_test/_internal/become.py b/test/lib/ansible_test/_internal/become.py index efdd39c72e9..f8606dbbab2 100644 --- a/test/lib/ansible_test/_internal/become.py +++ b/test/lib/ansible_test/_internal/become.py @@ -19,22 +19,22 @@ class Become(metaclass=abc.ABCMeta): @property @abc.abstractmethod - def method(self): # type: () -> str + def method(self) -> str: """The name of the Ansible become plugin that is equivalent to this.""" @abc.abstractmethod - def prepare_command(self, command): # type: (t.List[str]) -> t.List[str] + def prepare_command(self, command: t.List[str]) -> t.List[str]: """Return the given command, if any, with privilege escalation.""" class Doas(Become): """Become using 'doas'.""" @property - def method(self): # type: () -> str + def method(self) -> str: """The name of the Ansible become plugin that is equivalent to this.""" raise NotImplementedError('Ansible has no built-in doas become plugin.') - def prepare_command(self, command): # type: (t.List[str]) -> t.List[str] + def prepare_command(self, command: t.List[str]) -> t.List[str]: """Return the given command, if any, with privilege escalation.""" become = ['doas', '-n'] @@ -54,7 +54,7 @@ class DoasSudo(Doas): return 'doas_sudo' @property - def method(self): # type: () -> str + def method(self) -> str: """The name of the Ansible become plugin that is equivalent to this.""" return 'sudo' @@ -62,11 +62,11 @@ class DoasSudo(Doas): class Su(Become): """Become using 'su'.""" @property - def method(self): # type: () -> str + def method(self) -> str: """The name of the Ansible become plugin that is equivalent to this.""" return 'su' - def prepare_command(self, command): # type: (t.List[str]) -> t.List[str] + def prepare_command(self, command: t.List[str]) -> t.List[str]: """Return the given command, if any, with privilege escalation.""" become = ['su', '-l', 'root'] @@ -84,7 +84,7 @@ class SuSudo(Su): return 'su_sudo' @property - def method(self): # type: () -> str + def method(self) -> str: """The name of the Ansible become plugin that is equivalent to this.""" return 'sudo' @@ -92,11 +92,11 @@ class SuSudo(Su): class Sudo(Become): """Become using 'sudo'.""" @property - def method(self): # type: () -> str + def method(self) -> str: """The name of the Ansible become plugin that is equivalent to this.""" return 'sudo' - def prepare_command(self, command): # type: (t.List[str]) -> t.List[str] + def prepare_command(self, command: t.List[str]) -> t.List[str]: """Return the given command, if any, with privilege escalation.""" become = ['sudo', '-in'] diff --git a/test/lib/ansible_test/_internal/bootstrap.py b/test/lib/ansible_test/_internal/bootstrap.py index 326973978a9..15a12aa47a2 100644 --- a/test/lib/ansible_test/_internal/bootstrap.py +++ b/test/lib/ansible_test/_internal/bootstrap.py @@ -31,11 +31,11 @@ class Bootstrap: ssh_key: SshKey @property - def bootstrap_type(self): # type: () -> str + def bootstrap_type(self) -> str: """The bootstrap type to pass to the bootstrapping script.""" return self.__class__.__name__.replace('Bootstrap', '').lower() - def get_variables(self): # type: () -> t.Dict[str, t.Union[str, t.List[str]]] + def get_variables(self) -> t.Dict[str, t.Union[str, t.List[str]]]: """The variables to template in the bootstrapping script.""" return dict( bootstrap_type=self.bootstrap_type, @@ -46,7 +46,7 @@ class Bootstrap: ssh_public_key=self.ssh_key.pub_contents, ) - def get_script(self): # type: () -> str + def get_script(self) -> str: """Return a shell script to bootstrap the specified host.""" path = os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'setup', 'bootstrap.sh') @@ -65,7 +65,7 @@ class Bootstrap: @dataclasses.dataclass class BootstrapDocker(Bootstrap): """Bootstrap docker instances.""" - def get_variables(self): # type: () -> t.Dict[str, t.Union[str, t.List[str]]] + def get_variables(self) -> t.Dict[str, t.Union[str, t.List[str]]]: """The variables to template in the bootstrapping script.""" variables = super().get_variables() @@ -83,7 +83,7 @@ class BootstrapRemote(Bootstrap): platform: str platform_version: str - def get_variables(self): # type: () -> t.Dict[str, t.Union[str, t.List[str]]] + def get_variables(self) -> t.Dict[str, t.Union[str, t.List[str]]]: """The variables to template in the bootstrapping script.""" variables = super().get_variables() diff --git a/test/lib/ansible_test/_internal/cache.py b/test/lib/ansible_test/_internal/cache.py index 50a6f5e57b5..ec72371c166 100644 --- a/test/lib/ansible_test/_internal/cache.py +++ b/test/lib/ansible_test/_internal/cache.py @@ -12,7 +12,7 @@ TValue = t.TypeVar('TValue') class CommonCache: """Common cache.""" - def __init__(self, args): # type: (CommonConfig) -> None + def __init__(self, args: CommonConfig) -> None: self.args = args def get(self, key, factory): # type: (str, t.Callable[[], TValue]) -> TValue diff --git a/test/lib/ansible_test/_internal/ci/__init__.py b/test/lib/ansible_test/_internal/ci/__init__.py index 3d0f79e83af..01c6fbc0601 100644 --- a/test/lib/ansible_test/_internal/ci/__init__.py +++ b/test/lib/ansible_test/_internal/ci/__init__.py @@ -43,46 +43,46 @@ class CIProvider(metaclass=abc.ABCMeta): @staticmethod @abc.abstractmethod - def is_supported(): # type: () -> bool + def is_supported() -> bool: """Return True if this provider is supported in the current running environment.""" @property @abc.abstractmethod - def code(self): # type: () -> str + def code(self) -> str: """Return a unique code representing this provider.""" @property @abc.abstractmethod - def name(self): # type: () -> str + def name(self) -> str: """Return descriptive name for this provider.""" @abc.abstractmethod - def generate_resource_prefix(self): # type: () -> str + def generate_resource_prefix(self) -> str: """Return a resource prefix specific to this CI provider.""" @abc.abstractmethod - def get_base_branch(self): # type: () -> str + def get_base_branch(self) -> str: """Return the base branch or an empty string.""" @abc.abstractmethod - def detect_changes(self, args): # type: (TestConfig) -> t.Optional[t.List[str]] + def detect_changes(self, args: TestConfig) -> t.Optional[t.List[str]]: """Initialize change detection.""" @abc.abstractmethod - def supports_core_ci_auth(self): # type: () -> bool + def supports_core_ci_auth(self) -> bool: """Return True if Ansible Core CI is supported.""" @abc.abstractmethod - def prepare_core_ci_auth(self): # type: () -> t.Dict[str, t.Any] + def prepare_core_ci_auth(self) -> t.Dict[str, t.Any]: """Return authentication details for Ansible Core CI.""" @abc.abstractmethod - def get_git_details(self, args): # type: (CommonConfig) -> t.Optional[t.Dict[str, t.Any]] + def get_git_details(self, args: CommonConfig) -> t.Optional[t.Dict[str, t.Any]]: """Return details about git in the current environment.""" @cache -def get_ci_provider(): # type: () -> CIProvider +def get_ci_provider() -> CIProvider: """Return a CI provider instance for the current environment.""" provider = None @@ -111,7 +111,7 @@ class AuthHelper(metaclass=abc.ABCMeta): request.update(signature=signature) - def initialize_private_key(self): # type: () -> str + def initialize_private_key(self) -> str: """ Initialize and publish a new key pair (if needed) and return the private key. The private key is cached across ansible-test invocations, so it is only generated and published once per CI job. @@ -127,21 +127,21 @@ class AuthHelper(metaclass=abc.ABCMeta): return private_key_pem @abc.abstractmethod - def sign_bytes(self, payload_bytes): # type: (bytes) -> bytes + def sign_bytes(self, payload_bytes: bytes) -> bytes: """Sign the given payload and return the signature, initializing a new key pair if required.""" @abc.abstractmethod - def publish_public_key(self, public_key_pem): # type: (str) -> None + def publish_public_key(self, public_key_pem: str) -> None: """Publish the given public key.""" @abc.abstractmethod - def generate_private_key(self): # type: () -> str + def generate_private_key(self) -> str: """Generate a new key pair, publishing the public key and returning the private key.""" class CryptographyAuthHelper(AuthHelper, metaclass=abc.ABCMeta): """Cryptography based public key based authentication helper for Ansible Core CI.""" - def sign_bytes(self, payload_bytes): # type: (bytes) -> bytes + def sign_bytes(self, payload_bytes: bytes) -> bytes: """Sign the given payload and return the signature, initializing a new key pair if required.""" # import cryptography here to avoid overhead and failures in environments which do not use/provide it from cryptography.hazmat.backends import default_backend @@ -156,7 +156,7 @@ class CryptographyAuthHelper(AuthHelper, metaclass=abc.ABCMeta): return signature_raw_bytes - def generate_private_key(self): # type: () -> str + def generate_private_key(self) -> str: """Generate a new key pair, publishing the public key and returning the private key.""" # import cryptography here to avoid overhead and failures in environments which do not use/provide it from cryptography.hazmat.backends import default_backend @@ -184,7 +184,7 @@ class CryptographyAuthHelper(AuthHelper, metaclass=abc.ABCMeta): class OpenSSLAuthHelper(AuthHelper, metaclass=abc.ABCMeta): """OpenSSL based public key based authentication helper for Ansible Core CI.""" - def sign_bytes(self, payload_bytes): # type: (bytes) -> bytes + def sign_bytes(self, payload_bytes: bytes) -> bytes: """Sign the given payload and return the signature, initializing a new key pair if required.""" private_key_pem = self.initialize_private_key() @@ -202,7 +202,7 @@ class OpenSSLAuthHelper(AuthHelper, metaclass=abc.ABCMeta): return signature_raw_bytes - def generate_private_key(self): # type: () -> str + def generate_private_key(self) -> str: """Generate a new key pair, publishing the public key and returning the private key.""" private_key_pem = raw_command(['openssl', 'ecparam', '-genkey', '-name', 'secp384r1', '-noout'], capture=True)[0] public_key_pem = raw_command(['openssl', 'ec', '-pubout'], data=private_key_pem, capture=True)[0] diff --git a/test/lib/ansible_test/_internal/ci/azp.py b/test/lib/ansible_test/_internal/ci/azp.py index d5b3999ae34..0f5826d25ad 100644 --- a/test/lib/ansible_test/_internal/ci/azp.py +++ b/test/lib/ansible_test/_internal/ci/azp.py @@ -44,21 +44,21 @@ class AzurePipelines(CIProvider): self.auth = AzurePipelinesAuthHelper() @staticmethod - def is_supported(): # type: () -> bool + def is_supported() -> bool: """Return True if this provider is supported in the current running environment.""" return os.environ.get('SYSTEM_COLLECTIONURI', '').startswith('https://dev.azure.com/') @property - def code(self): # type: () -> str + def code(self) -> str: """Return a unique code representing this provider.""" return CODE @property - def name(self): # type: () -> str + def name(self) -> str: """Return descriptive name for this provider.""" return 'Azure Pipelines' - def generate_resource_prefix(self): # type: () -> str + def generate_resource_prefix(self) -> str: """Return a resource prefix specific to this CI provider.""" try: prefix = 'azp-%s-%s-%s' % ( @@ -71,7 +71,7 @@ class AzurePipelines(CIProvider): return prefix - def get_base_branch(self): # type: () -> str + def get_base_branch(self) -> str: """Return the base branch or an empty string.""" base_branch = os.environ.get('SYSTEM_PULLREQUEST_TARGETBRANCH') or os.environ.get('BUILD_SOURCEBRANCHNAME') @@ -80,7 +80,7 @@ class AzurePipelines(CIProvider): return base_branch or '' - def detect_changes(self, args): # type: (TestConfig) -> t.Optional[t.List[str]] + def detect_changes(self, args: TestConfig) -> t.Optional[t.List[str]]: """Initialize change detection.""" result = AzurePipelinesChanges(args) @@ -102,11 +102,11 @@ class AzurePipelines(CIProvider): return result.paths - def supports_core_ci_auth(self): # type: () -> bool + def supports_core_ci_auth(self) -> bool: """Return True if Ansible Core CI is supported.""" return True - def prepare_core_ci_auth(self): # type: () -> t.Dict[str, t.Any] + def prepare_core_ci_auth(self) -> t.Dict[str, t.Any]: """Return authentication details for Ansible Core CI.""" try: request = dict( @@ -126,7 +126,7 @@ class AzurePipelines(CIProvider): return auth - def get_git_details(self, args): # type: (CommonConfig) -> t.Optional[t.Dict[str, t.Any]] + def get_git_details(self, args: CommonConfig) -> t.Optional[t.Dict[str, t.Any]]: """Return details about git in the current environment.""" changes = AzurePipelinesChanges(args) @@ -143,7 +143,7 @@ class AzurePipelinesAuthHelper(CryptographyAuthHelper): Authentication helper for Azure Pipelines. Based on cryptography since it is provided by the default Azure Pipelines environment. """ - def publish_public_key(self, public_key_pem): # type: (str) -> None + def publish_public_key(self, public_key_pem: str) -> None: """Publish the given public key.""" try: agent_temp_directory = os.environ['AGENT_TEMPDIRECTORY'] @@ -162,7 +162,7 @@ class AzurePipelinesAuthHelper(CryptographyAuthHelper): class AzurePipelinesChanges: """Change information for an Azure Pipelines build.""" - def __init__(self, args): # type: (CommonConfig) -> None + def __init__(self, args: CommonConfig) -> None: self.args = args self.git = Git() @@ -213,7 +213,7 @@ class AzurePipelinesChanges: self.paths = None # act as though change detection not enabled, do not filter targets self.diff = [] - def get_successful_merge_run_commits(self): # type: () -> t.Set[str] + def get_successful_merge_run_commits(self) -> t.Set[str]: """Return a set of recent successsful merge commits from Azure Pipelines.""" parameters = dict( maxBuildsPerDefinition=100, # max 5000 @@ -241,7 +241,7 @@ class AzurePipelinesChanges: return commits - def get_last_successful_commit(self, commits): # type: (t.Set[str]) -> t.Optional[str] + def get_last_successful_commit(self, commits: t.Set[str]) -> t.Optional[str]: """Return the last successful commit from git history that is found in the given commit list, or None.""" commit_history = self.git.get_rev_list(max_count=100) ordered_successful_commits = [commit for commit in commit_history if commit in commits] @@ -249,7 +249,7 @@ class AzurePipelinesChanges: return last_successful_commit -def vso_add_attachment(file_type, file_name, path): # type: (str, str, str) -> None +def vso_add_attachment(file_type: str, file_name: str, path: str) -> None: """Upload and attach a file to the current timeline record.""" vso('task.addattachment', dict(type=file_type, name=file_name), path) diff --git a/test/lib/ansible_test/_internal/ci/local.py b/test/lib/ansible_test/_internal/ci/local.py index e1277533817..47b142df0a9 100644 --- a/test/lib/ansible_test/_internal/ci/local.py +++ b/test/lib/ansible_test/_internal/ci/local.py @@ -39,21 +39,21 @@ class Local(CIProvider): priority = 1000 @staticmethod - def is_supported(): # type: () -> bool + def is_supported() -> bool: """Return True if this provider is supported in the current running environment.""" return True @property - def code(self): # type: () -> str + def code(self) -> str: """Return a unique code representing this provider.""" return CODE @property - def name(self): # type: () -> str + def name(self) -> str: """Return descriptive name for this provider.""" return 'Local' - def generate_resource_prefix(self): # type: () -> str + def generate_resource_prefix(self) -> str: """Return a resource prefix specific to this CI provider.""" prefix = 'ansible-test-%d-%s' % ( random.randint(10000000, 99999999), @@ -62,11 +62,11 @@ class Local(CIProvider): return prefix - def get_base_branch(self): # type: () -> str + def get_base_branch(self) -> str: """Return the base branch or an empty string.""" return '' - def detect_changes(self, args): # type: (TestConfig) -> t.Optional[t.List[str]] + def detect_changes(self, args: TestConfig) -> t.Optional[t.List[str]]: """Initialize change detection.""" result = LocalChanges(args) @@ -116,12 +116,12 @@ class Local(CIProvider): return sorted(names) - def supports_core_ci_auth(self): # type: () -> bool + def supports_core_ci_auth(self) -> bool: """Return True if Ansible Core CI is supported.""" path = self._get_aci_key_path() return os.path.exists(path) - def prepare_core_ci_auth(self): # type: () -> t.Dict[str, t.Any] + def prepare_core_ci_auth(self) -> t.Dict[str, t.Any]: """Return authentication details for Ansible Core CI.""" path = self._get_aci_key_path() auth_key = read_text_file(path).strip() @@ -137,19 +137,19 @@ class Local(CIProvider): return auth - def get_git_details(self, args): # type: (CommonConfig) -> t.Optional[t.Dict[str, t.Any]] + def get_git_details(self, args: CommonConfig) -> t.Optional[t.Dict[str, t.Any]]: """Return details about git in the current environment.""" return None # not yet implemented for local @staticmethod - def _get_aci_key_path(): # type: () -> str + def _get_aci_key_path() -> str: path = os.path.expanduser('~/.ansible-core-ci.key') return path class InvalidBranch(ApplicationError): """Exception for invalid branch specification.""" - def __init__(self, branch, reason): # type: (str, str) -> None + def __init__(self, branch: str, reason: str) -> None: message = 'Invalid branch: %s\n%s' % (branch, reason) super().__init__(message) @@ -159,7 +159,7 @@ class InvalidBranch(ApplicationError): class LocalChanges: """Change information for local work.""" - def __init__(self, args): # type: (TestConfig) -> None + def __init__(self, args: TestConfig) -> None: self.args = args self.git = Git() @@ -198,7 +198,7 @@ class LocalChanges: # diff of all tracked files from fork point to working copy self.diff = self.git.get_diff([self.fork_point]) - def is_official_branch(self, name): # type: (str) -> bool + def is_official_branch(self, name: str) -> bool: """Return True if the given branch name an official branch for development or releases.""" if self.args.base_branch: return name == self.args.base_branch diff --git a/test/lib/ansible_test/_internal/classification/__init__.py b/test/lib/ansible_test/_internal/classification/__init__.py index 7a7e918b8d0..adbddb8a228 100644 --- a/test/lib/ansible_test/_internal/classification/__init__.py +++ b/test/lib/ansible_test/_internal/classification/__init__.py @@ -54,7 +54,7 @@ from ..data import ( FOCUSED_TARGET = '__focused__' -def categorize_changes(args, paths, verbose_command=None): # type: (TestConfig, t.List[str], t.Optional[str]) -> ChangeDescription +def categorize_changes(args: TestConfig, paths: t.List[str], verbose_command: t.Optional[str] = None) -> ChangeDescription: """Categorize the given list of changed paths and return a description of the changes.""" mapper = PathMapper(args) @@ -176,7 +176,7 @@ def categorize_changes(args, paths, verbose_command=None): # type: (TestConfig, class PathMapper: """Map file paths to test commands and targets.""" - def __init__(self, args): # type: (TestConfig) -> None + def __init__(self, args: TestConfig) -> None: self.args = args self.integration_all_target = get_integration_all_target(self.args) @@ -219,7 +219,7 @@ class PathMapper: self.paths_to_dependent_targets[path].add(target) - def get_dependent_paths(self, path): # type: (str) -> t.List[str] + def get_dependent_paths(self, path: str) -> t.List[str]: """Return a list of paths which depend on the given path, recursively expanding dependent paths as well.""" unprocessed_paths = set(self.get_dependent_paths_non_recursive(path)) paths = set() @@ -238,7 +238,7 @@ class PathMapper: return sorted(paths) - def get_dependent_paths_non_recursive(self, path): # type: (str) -> t.List[str] + def get_dependent_paths_non_recursive(self, path: str) -> t.List[str]: """Return a list of paths which depend on the given path, including dependent integration test target paths.""" paths = self.get_dependent_paths_internal(path) paths += [target.path + '/' for target in self.paths_to_dependent_targets.get(path, set())] @@ -246,7 +246,7 @@ class PathMapper: return paths - def get_dependent_paths_internal(self, path): # type: (str) -> t.List[str] + def get_dependent_paths_internal(self, path: str) -> t.List[str]: """Return a list of paths which depend on the given path.""" ext = os.path.splitext(os.path.split(path)[1])[1] @@ -265,7 +265,7 @@ class PathMapper: return [] - def get_python_module_utils_usage(self, path): # type: (str) -> t.List[str] + def get_python_module_utils_usage(self, path: str) -> t.List[str]: """Return a list of paths which depend on the given path which is a Python module_utils file.""" if not self.python_module_utils_imports: display.info('Analyzing python module_utils imports...') @@ -278,7 +278,7 @@ class PathMapper: return sorted(self.python_module_utils_imports[name]) - def get_powershell_module_utils_usage(self, path): # type: (str) -> t.List[str] + def get_powershell_module_utils_usage(self, path: str) -> t.List[str]: """Return a list of paths which depend on the given path which is a PowerShell module_utils file.""" if not self.powershell_module_utils_imports: display.info('Analyzing powershell module_utils imports...') @@ -291,7 +291,7 @@ class PathMapper: return sorted(self.powershell_module_utils_imports[name]) - def get_csharp_module_utils_usage(self, path): # type: (str) -> t.List[str] + def get_csharp_module_utils_usage(self, path: str) -> t.List[str]: """Return a list of paths which depend on the given path which is a C# module_utils file.""" if not self.csharp_module_utils_imports: display.info('Analyzing C# module_utils imports...') @@ -304,7 +304,7 @@ class PathMapper: return sorted(self.csharp_module_utils_imports[name]) - def get_integration_target_usage(self, path): # type: (str) -> t.List[str] + def get_integration_target_usage(self, path: str) -> t.List[str]: """Return a list of paths which depend on the given path which is an integration target file.""" target_name = path.split('/')[3] dependents = [os.path.join(data_context().content.integration_targets_path, target) + os.path.sep @@ -312,7 +312,7 @@ class PathMapper: return dependents - def classify(self, path): # type: (str) -> t.Optional[t.Dict[str, str]] + def classify(self, path: str) -> t.Optional[t.Dict[str, str]]: """Classify the given path and return an optional dictionary of the results.""" result = self._classify(path) @@ -326,7 +326,7 @@ class PathMapper: return result - def _classify(self, path): # type: (str) -> t.Optional[t.Dict[str, str]] + def _classify(self, path: str) -> t.Optional[t.Dict[str, str]]: """Return the classification for the given path.""" if data_context().content.is_ansible: return self._classify_ansible(path) @@ -336,7 +336,7 @@ class PathMapper: return None - def _classify_common(self, path): # type: (str) -> t.Optional[t.Dict[str, str]] + def _classify_common(self, path: str) -> t.Optional[t.Dict[str, str]]: """Return the classification for the given path using rules common to all layouts.""" dirname = os.path.dirname(path) filename = os.path.basename(path) @@ -621,7 +621,7 @@ class PathMapper: return None - def _classify_collection(self, path): # type: (str) -> t.Optional[t.Dict[str, str]] + def _classify_collection(self, path: str) -> t.Optional[t.Dict[str, str]]: """Return the classification for the given path using rules specific to collections.""" result = self._classify_common(path) @@ -659,7 +659,7 @@ class PathMapper: return None - def _classify_ansible(self, path): # type: (str) -> t.Optional[t.Dict[str, str]] + def _classify_ansible(self, path: str) -> t.Optional[t.Dict[str, str]]: """Return the classification for the given path using rules specific to Ansible.""" if path.startswith('test/units/compat/'): return { @@ -850,7 +850,7 @@ class PathMapper: return None # unknown, will result in fall-back to run all tests - def _simple_plugin_tests(self, plugin_type, plugin_name): # type: (str, str) -> t.Dict[str, t.Optional[str]] + def _simple_plugin_tests(self, plugin_type: str, plugin_name: str) -> t.Dict[str, t.Optional[str]]: """ Return tests for the given plugin type and plugin name. This function is useful for plugin types which do not require special processing. @@ -876,7 +876,7 @@ class PathMapper: ) -def all_tests(args, force=False): # type: (TestConfig, bool) -> t.Dict[str, str] +def all_tests(args: TestConfig, force: bool = False) -> t.Dict[str, str]: """Return the targets for each test command when all tests should be run.""" if force: integration_all_target = 'all' @@ -892,7 +892,7 @@ def all_tests(args, force=False): # type: (TestConfig, bool) -> t.Dict[str, str } -def get_integration_all_target(args): # type: (TestConfig) -> str +def get_integration_all_target(args: TestConfig) -> str: """Return the target to use when all tests should be run.""" if isinstance(args, IntegrationConfig): return args.changed_all_target diff --git a/test/lib/ansible_test/_internal/classification/common.py b/test/lib/ansible_test/_internal/classification/common.py index 406838271d7..a999b6e9345 100644 --- a/test/lib/ansible_test/_internal/classification/common.py +++ b/test/lib/ansible_test/_internal/classification/common.py @@ -8,7 +8,7 @@ from ..data import ( ) -def resolve_csharp_ps_util(import_name, path): # type: (str, str) -> str +def resolve_csharp_ps_util(import_name: str, path: str) -> str: """Return the fully qualified name of the given import if possible, otherwise return the original import name.""" if data_context().content.is_ansible or not import_name.startswith('.'): # We don't support relative paths for builtin utils, there's no point. diff --git a/test/lib/ansible_test/_internal/classification/csharp.py b/test/lib/ansible_test/_internal/classification/csharp.py index af7f9c7f06a..c7994454dfb 100644 --- a/test/lib/ansible_test/_internal/classification/csharp.py +++ b/test/lib/ansible_test/_internal/classification/csharp.py @@ -26,7 +26,7 @@ from ..target import ( ) -def get_csharp_module_utils_imports(powershell_targets, csharp_targets): # type: (t.List[TestTarget], t.List[TestTarget]) -> t.Dict[str, t.Set[str]] +def get_csharp_module_utils_imports(powershell_targets: t.List[TestTarget], csharp_targets: t.List[TestTarget]) -> t.Dict[str, t.Set[str]]: """Return a dictionary of module_utils names mapped to sets of powershell file paths.""" module_utils = enumerate_module_utils() @@ -51,7 +51,7 @@ def get_csharp_module_utils_imports(powershell_targets, csharp_targets): # type return imports -def get_csharp_module_utils_name(path): # type: (str) -> str +def get_csharp_module_utils_name(path: str) -> str: """Return a namespace and name from the given module_utils path.""" base_path = data_context().content.module_utils_csharp_path @@ -65,14 +65,14 @@ def get_csharp_module_utils_name(path): # type: (str) -> str return name -def enumerate_module_utils(): # type: () -> t.Set[str] +def enumerate_module_utils() -> t.Set[str]: """Return a set of available module_utils imports.""" return set(get_csharp_module_utils_name(p) for p in data_context().content.walk_files(data_context().content.module_utils_csharp_path) if os.path.splitext(p)[1] == '.cs') -def extract_csharp_module_utils_imports(path, module_utils, is_pure_csharp): # type: (str, t.Set[str], bool) -> t.Set[str] +def extract_csharp_module_utils_imports(path: str, module_utils: t.Set[str], is_pure_csharp: bool) -> t.Set[str]: """Return a set of module_utils imports found in the specified source file.""" imports = set() if is_pure_csharp: diff --git a/test/lib/ansible_test/_internal/classification/powershell.py b/test/lib/ansible_test/_internal/classification/powershell.py index bc73b7487c0..a714ff98c0f 100644 --- a/test/lib/ansible_test/_internal/classification/powershell.py +++ b/test/lib/ansible_test/_internal/classification/powershell.py @@ -26,7 +26,7 @@ from ..target import ( ) -def get_powershell_module_utils_imports(powershell_targets): # type: (t.List[TestTarget]) -> t.Dict[str, t.Set[str]] +def get_powershell_module_utils_imports(powershell_targets: t.List[TestTarget]) -> t.Dict[str, t.Set[str]]: """Return a dictionary of module_utils names mapped to sets of powershell file paths.""" module_utils = enumerate_module_utils() @@ -48,7 +48,7 @@ def get_powershell_module_utils_imports(powershell_targets): # type: (t.List[Te return imports -def get_powershell_module_utils_name(path): # type: (str) -> str +def get_powershell_module_utils_name(path: str) -> str: """Return a namespace and name from the given module_utils path.""" base_path = data_context().content.module_utils_powershell_path @@ -62,14 +62,14 @@ def get_powershell_module_utils_name(path): # type: (str) -> str return name -def enumerate_module_utils(): # type: () -> t.Set[str] +def enumerate_module_utils() -> t.Set[str]: """Return a set of available module_utils imports.""" return set(get_powershell_module_utils_name(p) for p in data_context().content.walk_files(data_context().content.module_utils_powershell_path) if os.path.splitext(p)[1] == '.psm1') -def extract_powershell_module_utils_imports(path, module_utils): # type: (str, t.Set[str]) -> t.Set[str] +def extract_powershell_module_utils_imports(path: str, module_utils: t.Set[str]) -> t.Set[str]: """Return a set of module_utils imports found in the specified source file.""" imports = set() diff --git a/test/lib/ansible_test/_internal/classification/python.py b/test/lib/ansible_test/_internal/classification/python.py index 3dd572e519f..14e8490a9a4 100644 --- a/test/lib/ansible_test/_internal/classification/python.py +++ b/test/lib/ansible_test/_internal/classification/python.py @@ -29,7 +29,7 @@ VIRTUAL_PACKAGES = { } -def get_python_module_utils_imports(compile_targets): # type: (t.List[TestTarget]) -> t.Dict[str, t.Set[str]] +def get_python_module_utils_imports(compile_targets: t.List[TestTarget]) -> t.Dict[str, t.Set[str]]: """Return a dictionary of module_utils names mapped to sets of python file paths.""" module_utils = enumerate_module_utils() @@ -41,7 +41,7 @@ def get_python_module_utils_imports(compile_targets): # type: (t.List[TestTarge for target in compile_targets: imports_by_target_path[target.path] = extract_python_module_utils_imports(target.path, module_utils) - def recurse_import(import_name, depth=0, seen=None): # type: (str, int, t.Optional[t.Set[str]]) -> t.Set[str] + def recurse_import(import_name: str, depth: int = 0, seen: t.Optional[t.Set[str]] = None) -> t.Set[str]: """Recursively expand module_utils imports from module_utils files.""" display.info('module_utils import: %s%s' % (' ' * depth, import_name), verbosity=4) @@ -126,7 +126,7 @@ def get_python_module_utils_imports(compile_targets): # type: (t.List[TestTarge return imports -def get_python_module_utils_name(path): # type: (str) -> str +def get_python_module_utils_name(path: str) -> str: """Return a namespace and name from the given module_utils path.""" base_path = data_context().content.module_utils_path @@ -163,7 +163,7 @@ def enumerate_module_utils(): return set(module_utils) -def extract_python_module_utils_imports(path, module_utils): # type: (str, t.Set[str]) -> t.Set[str] +def extract_python_module_utils_imports(path: str, module_utils: t.Set[str]) -> t.Set[str]: """Return a list of module_utils imports found in the specified source file.""" # Python code must be read as bytes to avoid a SyntaxError when the source uses comments to declare the file encoding. # See: https://www.python.org/dev/peps/pep-0263 @@ -183,7 +183,7 @@ def extract_python_module_utils_imports(path, module_utils): # type: (str, t.Se return finder.imports -def get_import_path(name, package=False): # type: (str, bool) -> str +def get_import_path(name: str, package: bool = False) -> str: """Return a path from an import name.""" if package: filename = os.path.join(name.replace('.', '/'), '__init__.py') @@ -202,7 +202,7 @@ def get_import_path(name, package=False): # type: (str, bool) -> str return path -def path_to_module(path): # type: (str) -> str +def path_to_module(path: str) -> str: """Convert the given path to a module name.""" module = os.path.splitext(path)[0].replace(os.path.sep, '.') @@ -212,7 +212,7 @@ def path_to_module(path): # type: (str) -> str return module -def relative_to_absolute(name, level, module, path, lineno): # type: (str, int, str, str, int) -> str +def relative_to_absolute(name: str, level: int, module: str, path: str, lineno: int) -> str: """Convert a relative import to an absolute import.""" if level <= 0: absolute_name = name @@ -233,7 +233,7 @@ def relative_to_absolute(name, level, module, path, lineno): # type: (str, int, class ModuleUtilFinder(ast.NodeVisitor): """AST visitor to find valid module_utils imports.""" - def __init__(self, path, module_utils): # type: (str, t.Set[str]) -> None + def __init__(self, path: str, module_utils: t.Set[str]) -> None: self.path = path self.module_utils = module_utils self.imports = set() # type: t.Set[str] @@ -277,7 +277,7 @@ class ModuleUtilFinder(ast.NodeVisitor): self.module = path_to_module(os.path.join(data_context().content.collection.directory, self.path)) # pylint: disable=locally-disabled, invalid-name - def visit_Import(self, node): # type: (ast.Import) -> None + def visit_Import(self, node: ast.Import) -> None: """Visit an import node.""" self.generic_visit(node) @@ -286,7 +286,7 @@ class ModuleUtilFinder(ast.NodeVisitor): self.add_imports([alias.name for alias in node.names], node.lineno) # pylint: disable=locally-disabled, invalid-name - def visit_ImportFrom(self, node): # type: (ast.ImportFrom) -> None + def visit_ImportFrom(self, node: ast.ImportFrom) -> None: """Visit an import from node.""" self.generic_visit(node) @@ -304,7 +304,7 @@ class ModuleUtilFinder(ast.NodeVisitor): # from ansible_collections.{ns}.{col}.plugins.module_utils.MODULE[.MODULE] import MODULE[, MODULE] self.add_imports(['%s.%s' % (module, alias.name) for alias in node.names], node.lineno) - def add_import(self, name, line_number): # type: (str, int) -> None + def add_import(self, name: str, line_number: int) -> None: """Record the specified import.""" import_name = name @@ -325,14 +325,14 @@ class ModuleUtilFinder(ast.NodeVisitor): # This error should be detected by unit or integration tests. display.warning('%s:%d Invalid module_utils import: %s' % (self.path, line_number, import_name)) - def add_imports(self, names, line_no): # type: (t.List[str], int) -> None + def add_imports(self, names: t.List[str], line_no: int) -> None: """Add the given import names if they are module_utils imports.""" for name in names: if self.is_module_util_name(name): self.add_import(name, line_no) @staticmethod - def is_module_util_name(name): # type: (str) -> bool + def is_module_util_name(name: str) -> bool: """Return True if the given name is a module_util name for the content under test. External module_utils are ignored.""" if data_context().content.is_ansible and name.startswith('ansible.module_utils.'): return True diff --git a/test/lib/ansible_test/_internal/cli/__init__.py b/test/lib/ansible_test/_internal/cli/__init__.py index 64280e820bc..f44a1cf1743 100644 --- a/test/lib/ansible_test/_internal/cli/__init__.py +++ b/test/lib/ansible_test/_internal/cli/__init__.py @@ -28,7 +28,7 @@ from ..util import ( ) -def parse_args(argv=None): # type: (t.Optional[t.List[str]]) -> argparse.Namespace +def parse_args(argv: t.Optional[t.List[str]] = None) -> argparse.Namespace: """Parse command line arguments.""" completer = CompositeActionCompletionFinder() diff --git a/test/lib/ansible_test/_internal/cli/actions.py b/test/lib/ansible_test/_internal/cli/actions.py index e22a7b0e59e..3359a848e43 100644 --- a/test/lib/ansible_test/_internal/cli/actions.py +++ b/test/lib/ansible_test/_internal/cli/actions.py @@ -22,69 +22,69 @@ from .parsers import ( class OriginControllerAction(CompositeAction): """Composite action parser for the controller when the only option is `origin`.""" - def create_parser(self): # type: () -> NamespaceParser + def create_parser(self) -> NamespaceParser: """Return a namespace parser to parse the argument associated with this action.""" return OriginControllerParser() class DelegatedControllerAction(CompositeAction): """Composite action parser for the controller when delegation is supported.""" - def create_parser(self): # type: () -> NamespaceParser + def create_parser(self) -> NamespaceParser: """Return a namespace parser to parse the argument associated with this action.""" return DelegatedControllerParser() class PosixTargetAction(CompositeAction): """Composite action parser for a POSIX target.""" - def create_parser(self): # type: () -> NamespaceParser + def create_parser(self) -> NamespaceParser: """Return a namespace parser to parse the argument associated with this action.""" return PosixTargetParser() class WindowsTargetAction(CompositeAction): """Composite action parser for a Windows target.""" - def create_parser(self): # type: () -> NamespaceParser + def create_parser(self) -> NamespaceParser: """Return a namespace parser to parse the argument associated with this action.""" return WindowsTargetParser() class NetworkTargetAction(CompositeAction): """Composite action parser for a network target.""" - def create_parser(self): # type: () -> NamespaceParser + def create_parser(self) -> NamespaceParser: """Return a namespace parser to parse the argument associated with this action.""" return NetworkTargetParser() class SanityPythonTargetAction(CompositeAction): """Composite action parser for a sanity target.""" - def create_parser(self): # type: () -> NamespaceParser + def create_parser(self) -> NamespaceParser: """Return a namespace parser to parse the argument associated with this action.""" return SanityPythonTargetParser() class UnitsPythonTargetAction(CompositeAction): """Composite action parser for a units target.""" - def create_parser(self): # type: () -> NamespaceParser + def create_parser(self) -> NamespaceParser: """Return a namespace parser to parse the argument associated with this action.""" return UnitsPythonTargetParser() class PosixSshTargetAction(CompositeAction): """Composite action parser for a POSIX SSH target.""" - def create_parser(self): # type: () -> NamespaceParser + def create_parser(self) -> NamespaceParser: """Return a namespace parser to parse the argument associated with this action.""" return PosixSshTargetParser() class WindowsSshTargetAction(CompositeAction): """Composite action parser for a Windows SSH target.""" - def create_parser(self): # type: () -> NamespaceParser + def create_parser(self) -> NamespaceParser: """Return a namespace parser to parse the argument associated with this action.""" return WindowsSshTargetParser() class NetworkSshTargetAction(CompositeAction): """Composite action parser for a network SSH target.""" - def create_parser(self): # type: () -> NamespaceParser + def create_parser(self) -> NamespaceParser: """Return a namespace parser to parse the argument associated with this action.""" return NetworkSshTargetParser() diff --git a/test/lib/ansible_test/_internal/cli/argparsing/__init__.py b/test/lib/ansible_test/_internal/cli/argparsing/__init__.py index 66dfc4e4a0f..9536ab07995 100644 --- a/test/lib/ansible_test/_internal/cli/argparsing/__init__.py +++ b/test/lib/ansible_test/_internal/cli/argparsing/__init__.py @@ -104,7 +104,7 @@ class CompositeAction(argparse.Action, metaclass=abc.ABCMeta): register_safe_action(type(self)) @abc.abstractmethod - def create_parser(self): # type: () -> NamespaceParser + def create_parser(self) -> NamespaceParser: """Return a namespace parser to parse the argument associated with this action.""" def __call__( @@ -163,7 +163,7 @@ class CompositeActionCompletionFinder(RegisteredCompletionFinder): return completions -def detect_file_listing(value, mode): # type: (str, ParserMode) -> bool +def detect_file_listing(value: str, mode: ParserMode) -> bool: """ Return True if Bash will show a file listing and redraw the prompt, otherwise return False. @@ -198,7 +198,7 @@ def detect_file_listing(value, mode): # type: (str, ParserMode) -> bool return listing -def detect_false_file_completion(value, mode): # type: (str, ParserMode) -> bool +def detect_false_file_completion(value: str, mode: ParserMode) -> bool: """ Return True if Bash will provide an incorrect file completion, otherwise return False. diff --git a/test/lib/ansible_test/_internal/cli/argparsing/argcompletion.py b/test/lib/ansible_test/_internal/cli/argparsing/argcompletion.py index 4e90191a596..df19b3382d9 100644 --- a/test/lib/ansible_test/_internal/cli/argparsing/argcompletion.py +++ b/test/lib/ansible_test/_internal/cli/argparsing/argcompletion.py @@ -62,18 +62,18 @@ class CompType(enum.Enum): """ @property - def list_mode(self): # type: () -> bool + def list_mode(self) -> bool: """True if completion is running in list mode, otherwise False.""" return self in (CompType.LIST, CompType.LIST_AMBIGUOUS, CompType.LIST_UNMODIFIED) -def register_safe_action(action_type): # type: (t.Type[argparse.Action]) -> None +def register_safe_action(action_type: t.Type[argparse.Action]) -> None: """Register the given action as a safe action for argcomplete to use during completion if it is not already registered.""" if argcomplete and action_type not in argcomplete.safe_actions: argcomplete.safe_actions += (action_type,) -def get_comp_type(): # type: () -> t.Optional[CompType] +def get_comp_type() -> t.Optional[CompType]: """Parse the COMP_TYPE environment variable (if present) and return the associated CompType enum value.""" value = os.environ.get('COMP_TYPE') comp_type = CompType(chr(int(value))) if value else None diff --git a/test/lib/ansible_test/_internal/cli/argparsing/parsers.py b/test/lib/ansible_test/_internal/cli/argparsing/parsers.py index cdd9956bb87..bf08d75bb21 100644 --- a/test/lib/ansible_test/_internal/cli/argparsing/parsers.py +++ b/test/lib/ansible_test/_internal/cli/argparsing/parsers.py @@ -47,7 +47,7 @@ class CompletionSuccess(Completion): matches: t.List[str] = dataclasses.field(default_factory=list) @property - def preserve(self): # type: () -> bool + def preserve(self) -> bool: """ True if argcomplete should not mangle completion values, otherwise False. Only used when more than one completion exists to avoid overwriting the word undergoing completion. @@ -55,7 +55,7 @@ class CompletionSuccess(Completion): return len(self.matches) > 1 and self.list_mode @property - def completions(self): # type: () -> t.List[str] + def completions(self) -> t.List[str]: """List of completion values to return to argcomplete.""" completions = self.matches continuation = '' if self.list_mode else self.continuation @@ -98,11 +98,11 @@ class ParserState: parts: t.List[str] = dataclasses.field(default_factory=list) @property - def incomplete(self): # type: () -> bool + def incomplete(self) -> bool: """True if parsing is incomplete (unparsed input remains), otherwise False.""" return self.remainder is not None - def match(self, value, choices): # type: (str, t.List[str]) -> bool + def match(self, value: str, choices: t.List[str]) -> bool: """Return True if the given value matches the provided choices, taking into account parsing boundaries, otherwise return False.""" if self.current_boundary: delimiters, delimiter = self.current_boundary.delimiters, self.current_boundary.match @@ -121,7 +121,7 @@ class ParserState: return False - def read(self): # type: () -> str + def read(self) -> str: """Read and return the next input segment, taking into account parsing boundaries.""" delimiters = "".join(boundary.delimiters for boundary in self.boundaries) @@ -154,26 +154,26 @@ class ParserState: return value @property - def root_namespace(self): # type: () -> t.Any + def root_namespace(self) -> t.Any: """THe root namespace.""" return self.namespaces[0] @property - def current_namespace(self): # type: () -> t.Any + def current_namespace(self) -> t.Any: """The current namespace.""" return self.namespaces[-1] @property - def current_boundary(self): # type: () -> t.Optional[ParserBoundary] + def current_boundary(self) -> t.Optional[ParserBoundary]: """The current parser boundary, if any, otherwise None.""" return self.boundaries[-1] if self.boundaries else None - def set_namespace(self, namespace): # type: (t.Any) -> None + def set_namespace(self, namespace: t.Any) -> None: """Set the current namespace.""" self.namespaces.append(namespace) @contextlib.contextmanager - def delimit(self, delimiters, required=True): # type: (str, bool) -> t.Iterator[ParserBoundary] + def delimit(self, delimiters: str, required: bool = True) -> t.Iterator[ParserBoundary]: """Context manager for delimiting parsing of input.""" boundary = ParserBoundary(delimiters=delimiters, required=required) @@ -197,10 +197,10 @@ class DocumentationState: class Parser(metaclass=abc.ABCMeta): """Base class for all composite argument parsers.""" @abc.abstractmethod - def parse(self, state): # type: (ParserState) -> t.Any + def parse(self, state: ParserState) -> t.Any: """Parse the input from the given state and return the result.""" - def document(self, state): # type: (DocumentationState) -> t.Optional[str] + def document(self, state: DocumentationState) -> t.Optional[str]: """Generate and return documentation for this parser.""" raise Exception(f'Undocumented parser: {type(self)}') @@ -217,22 +217,22 @@ class MatchConditions(enum.Flag): class DynamicChoicesParser(Parser, metaclass=abc.ABCMeta): """Base class for composite argument parsers which use a list of choices that can be generated during completion.""" - def __init__(self, conditions=MatchConditions.CHOICE): # type: (MatchConditions) -> None + def __init__(self, conditions: MatchConditions = MatchConditions.CHOICE) -> None: self.conditions = conditions @abc.abstractmethod - def get_choices(self, value): # type: (str) -> t.List[str] + def get_choices(self, value: str) -> t.List[str]: """Return a list of valid choices based on the given input value.""" - def no_completion_match(self, value): # type: (str) -> CompletionUnavailable # pylint: disable=unused-argument + def no_completion_match(self, value: str) -> CompletionUnavailable: # pylint: disable=unused-argument """Return an instance of CompletionUnavailable when no match was found for the given value.""" return CompletionUnavailable() - def no_choices_available(self, value): # type: (str) -> ParserError # pylint: disable=unused-argument + def no_choices_available(self, value: str) -> ParserError: # pylint: disable=unused-argument """Return an instance of ParserError when parsing fails and no choices are available.""" return ParserError('No choices available.') - def parse(self, state): # type: (ParserState) -> t.Any + def parse(self, state: ParserState) -> t.Any: """Parse the input from the given state and return the result.""" value = state.read() choices = self.get_choices(value) @@ -272,16 +272,16 @@ class DynamicChoicesParser(Parser, metaclass=abc.ABCMeta): class ChoicesParser(DynamicChoicesParser): """Composite argument parser which relies on a static list of choices.""" - def __init__(self, choices, conditions=MatchConditions.CHOICE): # type: (t.List[str], MatchConditions) -> None + def __init__(self, choices: t.List[str], conditions: MatchConditions = MatchConditions.CHOICE) -> None: self.choices = choices super().__init__(conditions=conditions) - def get_choices(self, value): # type: (str) -> t.List[str] + def get_choices(self, value: str) -> t.List[str]: """Return a list of valid choices based on the given input value.""" return self.choices - def document(self, state): # type: (DocumentationState) -> t.Optional[str] + def document(self, state: DocumentationState) -> t.Optional[str]: """Generate and return documentation for this parser.""" return '|'.join(self.choices) @@ -290,12 +290,12 @@ class IntegerParser(DynamicChoicesParser): """Composite argument parser for integers.""" PATTERN = re.compile('^[1-9][0-9]*$') - def __init__(self, maximum=None): # type: (t.Optional[int]) -> None + def __init__(self, maximum: t.Optional[int] = None) -> None: self.maximum = maximum super().__init__() - def get_choices(self, value): # type: (str) -> t.List[str] + def get_choices(self, value: str) -> t.List[str]: """Return a list of valid choices based on the given input value.""" if not value: numbers = list(range(1, 10)) @@ -313,12 +313,12 @@ class IntegerParser(DynamicChoicesParser): return [str(n) for n in numbers] - def parse(self, state): # type: (ParserState) -> t.Any + def parse(self, state: ParserState) -> t.Any: """Parse the input from the given state and return the result.""" value = super().parse(state) return int(value) - def document(self, state): # type: (DocumentationState) -> t.Optional[str] + def document(self, state: DocumentationState) -> t.Optional[str]: """Generate and return documentation for this parser.""" return '{integer}' @@ -328,7 +328,7 @@ class BooleanParser(ChoicesParser): def __init__(self): super().__init__(['yes', 'no']) - def parse(self, state): # type: (ParserState) -> bool + def parse(self, state: ParserState) -> bool: """Parse the input from the given state and return the result.""" value = super().parse(state) return value == 'yes' @@ -336,7 +336,7 @@ class BooleanParser(ChoicesParser): class AnyParser(ChoicesParser): """Composite argument parser which accepts any input value.""" - def __init__(self, nothing=False, no_match_message=None): # type: (bool, t.Optional[str]) -> None + def __init__(self, nothing: bool = False, no_match_message: t.Optional[str] = None) -> None: self.no_match_message = no_match_message conditions = MatchConditions.ANY @@ -346,14 +346,14 @@ class AnyParser(ChoicesParser): super().__init__([], conditions=conditions) - def no_completion_match(self, value): # type: (str) -> CompletionUnavailable + def no_completion_match(self, value: str) -> CompletionUnavailable: """Return an instance of CompletionUnavailable when no match was found for the given value.""" if self.no_match_message: return CompletionUnavailable(message=self.no_match_message) return super().no_completion_match(value) - def no_choices_available(self, value): # type: (str) -> ParserError + def no_choices_available(self, value: str) -> ParserError: """Return an instance of ParserError when parsing fails and no choices are available.""" if self.no_match_message: return ParserError(self.no_match_message) @@ -365,12 +365,12 @@ class RelativePathNameParser(DynamicChoicesParser): """Composite argument parser for relative path names.""" RELATIVE_NAMES = ['.', '..'] - def __init__(self, choices): # type: (t.List[str]) -> None + def __init__(self, choices: t.List[str]) -> None: self.choices = choices super().__init__() - def get_choices(self, value): # type: (str) -> t.List[str] + def get_choices(self, value: str) -> t.List[str]: """Return a list of valid choices based on the given input value.""" choices = list(self.choices) @@ -384,7 +384,7 @@ class RelativePathNameParser(DynamicChoicesParser): class FileParser(Parser): """Composite argument parser for absolute or relative file paths.""" - def parse(self, state): # type: (ParserState) -> str + def parse(self, state: ParserState) -> str: """Parse the input from the given state and return the result.""" if state.mode == ParserMode.PARSE: path = AnyParser().parse(state) @@ -416,7 +416,7 @@ class FileParser(Parser): class AbsolutePathParser(Parser): """Composite argument parser for absolute file paths. Paths are only verified for proper syntax, not for existence.""" - def parse(self, state): # type: (ParserState) -> t.Any + def parse(self, state: ParserState) -> t.Any: """Parse the input from the given state and return the result.""" path = '' @@ -434,7 +434,7 @@ class AbsolutePathParser(Parser): class NamespaceParser(Parser, metaclass=abc.ABCMeta): """Base class for composite argument parsers that store their results in a namespace.""" - def parse(self, state): # type: (ParserState) -> t.Any + def parse(self, state: ParserState) -> t.Any: """Parse the input from the given state and return the result.""" namespace = state.current_namespace current = getattr(namespace, self.dest) @@ -458,38 +458,38 @@ class NamespaceParser(Parser, metaclass=abc.ABCMeta): return value - def get_value(self, state): # type: (ParserState) -> t.Any + def get_value(self, state: ParserState) -> t.Any: """Parse the input from the given state and return the result, without storing the result in the namespace.""" return super().parse(state) @property - def use_list(self): # type: () -> bool + def use_list(self) -> bool: """True if the destination is a list, otherwise False.""" return False @property - def limit_one(self): # type: () -> bool + def limit_one(self) -> bool: """True if only one target is allowed, otherwise False.""" return not self.use_list @property @abc.abstractmethod - def dest(self): # type: () -> str + def dest(self) -> str: """The name of the attribute where the value should be stored.""" class NamespaceWrappedParser(NamespaceParser): """Composite argument parser that wraps a non-namespace parser and stores the result in a namespace.""" - def __init__(self, dest, parser): # type: (str, Parser) -> None + def __init__(self, dest: str, parser: Parser) -> None: self._dest = dest self.parser = parser - def get_value(self, state): # type: (ParserState) -> t.Any + def get_value(self, state: ParserState) -> t.Any: """Parse the input from the given state and return the result, without storing the result in the namespace.""" return self.parser.parse(state) @property - def dest(self): # type: () -> str + def dest(self) -> str: """The name of the attribute where the value should be stored.""" return self._dest @@ -497,10 +497,10 @@ class NamespaceWrappedParser(NamespaceParser): class KeyValueParser(Parser, metaclass=abc.ABCMeta): """Base class for key/value composite argument parsers.""" @abc.abstractmethod - def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser] + def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]: """Return a dictionary of key names and value parsers.""" - def parse(self, state): # type: (ParserState) -> t.Any + def parse(self, state: ParserState) -> t.Any: """Parse the input from the given state and return the result.""" namespace = state.current_namespace parsers = self.get_parsers(state) @@ -522,7 +522,7 @@ class KeyValueParser(Parser, metaclass=abc.ABCMeta): class PairParser(Parser, metaclass=abc.ABCMeta): """Base class for composite argument parsers consisting of a left and right argument parser, with input separated by a delimiter.""" - def parse(self, state): # type: (ParserState) -> t.Any + def parse(self, state: ParserState) -> t.Any: """Parse the input from the given state and return the result.""" namespace = self.create_namespace() @@ -537,39 +537,39 @@ class PairParser(Parser, metaclass=abc.ABCMeta): return namespace @property - def required(self): # type: () -> bool + def required(self) -> bool: """True if the delimiter (and thus right parser) is required, otherwise False.""" return False @property - def delimiter(self): # type: () -> str + def delimiter(self) -> str: """The delimiter to use between the left and right parser.""" return PAIR_DELIMITER @abc.abstractmethod - def create_namespace(self): # type: () -> t.Any + def create_namespace(self) -> t.Any: """Create and return a namespace.""" @abc.abstractmethod - def get_left_parser(self, state): # type: (ParserState) -> Parser + def get_left_parser(self, state: ParserState) -> Parser: """Return the parser for the left side.""" @abc.abstractmethod - def get_right_parser(self, choice): # type: (t.Any) -> Parser + def get_right_parser(self, choice: t.Any) -> Parser: """Return the parser for the right side.""" class TypeParser(Parser, metaclass=abc.ABCMeta): """Base class for composite argument parsers which parse a type name, a colon and then parse results based on the type given by the type name.""" - def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser] # pylint: disable=unused-argument + def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]: # pylint: disable=unused-argument """Return a dictionary of type names and type parsers.""" return self.get_stateless_parsers() @abc.abstractmethod - def get_stateless_parsers(self): # type: () -> t.Dict[str, Parser] + def get_stateless_parsers(self) -> t.Dict[str, Parser]: """Return a dictionary of type names and type parsers.""" - def parse(self, state): # type: (ParserState) -> t.Any + def parse(self, state: ParserState) -> t.Any: """Parse the input from the given state and return the result.""" parsers = self.get_parsers(state) diff --git a/test/lib/ansible_test/_internal/cli/commands/__init__.py b/test/lib/ansible_test/_internal/cli/commands/__init__.py index 81bb465372f..a14b5538363 100644 --- a/test/lib/ansible_test/_internal/cli/commands/__init__.py +++ b/test/lib/ansible_test/_internal/cli/commands/__init__.py @@ -227,7 +227,7 @@ def do_commands( do_units(subparsers, test, completer) -def color(value): # type: (str) -> bool +def color(value: str) -> bool: """Strict converter for color option.""" if value == 'yes': return True diff --git a/test/lib/ansible_test/_internal/cli/compat.py b/test/lib/ansible_test/_internal/cli/compat.py index 0a23c2306f3..e5af4105ab1 100644 --- a/test/lib/ansible_test/_internal/cli/compat.py +++ b/test/lib/ansible_test/_internal/cli/compat.py @@ -55,17 +55,17 @@ from ..data import ( ) -def filter_python(version, versions): # type: (t.Optional[str], t.Optional[t.Sequence[str]]) -> t.Optional[str] +def filter_python(version: t.Optional[str], versions: t.Optional[t.Sequence[str]]) -> t.Optional[str]: """If a Python version is given and is in the given version list, return that Python version, otherwise return None.""" return version if version in versions else None -def controller_python(version): # type: (t.Optional[str]) -> t.Optional[str] +def controller_python(version: t.Optional[str]) -> t.Optional[str]: """If a Python version is given and is supported by the controller, return that Python version, otherwise return None.""" return filter_python(version, CONTROLLER_PYTHON_VERSIONS) -def get_fallback_remote_controller(): # type: () -> str +def get_fallback_remote_controller() -> str: """Return the remote fallback platform for the controller.""" platform = 'freebsd' # lower cost than RHEL and macOS candidates = [item for item in filter_completion(remote_completion()).values() if item.controller_supported and item.platform == platform] @@ -73,7 +73,7 @@ def get_fallback_remote_controller(): # type: () -> str return fallback.name -def get_option_name(name): # type: (str) -> str +def get_option_name(name: str) -> str: """Return a command-line option name from the given option name.""" if name == 'targets': name = 'target' @@ -144,14 +144,14 @@ class LegacyHostOptions: delattr(namespace, field.name) @staticmethod - def purge_args(args): # type: (t.List[str]) -> t.List[str] + def purge_args(args: t.List[str]) -> t.List[str]: """Purge legacy host options from the given command line arguments.""" fields = dataclasses.fields(LegacyHostOptions) # type: t.Tuple[dataclasses.Field, ...] filters = {get_option_name(field.name): 0 if field.type is t.Optional[bool] else 1 for field in fields} # type: t.Dict[str, int] return filter_args(args, filters) - def get_options_used(self): # type: () -> t.Tuple[str, ...] + def get_options_used(self) -> t.Tuple[str, ...]: """Return a tuple of the command line options used.""" fields = dataclasses.fields(self) # type: t.Tuple[dataclasses.Field, ...] options = tuple(sorted(get_option_name(field.name) for field in fields if getattr(self, field.name))) @@ -278,7 +278,7 @@ def controller_targets( return targets -def native_python(options): # type: (LegacyHostOptions) -> t.Optional[NativePythonConfig] +def native_python(options: LegacyHostOptions) -> t.Optional[NativePythonConfig]: """Return a NativePythonConfig for the given version if it is not None, otherwise return None.""" if not options.python and not options.python_interpreter: return None diff --git a/test/lib/ansible_test/_internal/cli/converters.py b/test/lib/ansible_test/_internal/cli/converters.py index 46562738ef0..f14ef2b3352 100644 --- a/test/lib/ansible_test/_internal/cli/converters.py +++ b/test/lib/ansible_test/_internal/cli/converters.py @@ -5,12 +5,12 @@ import argparse import typing as t -def key_value_type(value): # type: (str) -> t.Tuple[str, str] +def key_value_type(value: str) -> t.Tuple[str, str]: """Wrapper around key_value.""" return key_value(value) -def key_value(value): # type: (str) -> t.Tuple[str, str] +def key_value(value: str) -> t.Tuple[str, str]: """Type parsing and validation for argparse key/value pairs separated by an '=' character.""" parts = value.split('=') diff --git a/test/lib/ansible_test/_internal/cli/environments.py b/test/lib/ansible_test/_internal/cli/environments.py index e3e759fda52..e4fa77a370d 100644 --- a/test/lib/ansible_test/_internal/cli/environments.py +++ b/test/lib/ansible_test/_internal/cli/environments.py @@ -172,7 +172,7 @@ def add_composite_environment_options( action_types = [] # type: t.List[t.Type[CompositeAction]] - def register_action_type(action_type): # type: (t.Type[CompositeAction]) -> t.Type[CompositeAction] + def register_action_type(action_type: t.Type[CompositeAction]) -> t.Type[CompositeAction]: """Register the provided composite action type and return it.""" action_types.append(action_type) return action_type @@ -577,16 +577,16 @@ def complete_network_platform_connection(prefix: str, parsed_args: argparse.Name return [i + '=' for i in images if i.startswith(left) and (not parsed_args.platform_connection or i not in [x[0] for x in parsed_args.platform_connection])] -def get_remote_platform_choices(controller=False): # type: (bool) -> t.List[str] +def get_remote_platform_choices(controller: bool = False) -> t.List[str]: """Return a list of supported remote platforms matching the given prefix.""" return sorted(filter_completion(remote_completion(), controller_only=controller)) -def get_windows_platform_choices(): # type: () -> t.List[str] +def get_windows_platform_choices() -> t.List[str]: """Return a list of supported Windows versions matching the given prefix.""" return sorted(f'windows/{windows.version}' for windows in filter_completion(windows_completion()).values()) -def get_windows_version_choices(): # type: () -> t.List[str] +def get_windows_version_choices() -> t.List[str]: """Return a list of supported Windows versions.""" return sorted(windows.version for windows in filter_completion(windows_completion()).values()) diff --git a/test/lib/ansible_test/_internal/cli/parsers/__init__.py b/test/lib/ansible_test/_internal/cli/parsers/__init__.py index e870d9f8cae..acffbdf83e3 100644 --- a/test/lib/ansible_test/_internal/cli/parsers/__init__.py +++ b/test/lib/ansible_test/_internal/cli/parsers/__init__.py @@ -53,13 +53,13 @@ from .base_argument_parsers import ( class OriginControllerParser(ControllerNamespaceParser, TypeParser): """Composite argument parser for the controller when delegation is not supported.""" - def get_stateless_parsers(self): # type: () -> t.Dict[str, Parser] + def get_stateless_parsers(self) -> t.Dict[str, Parser]: """Return a dictionary of type names and type parsers.""" return dict( origin=OriginParser(), ) - def document(self, state): # type: (DocumentationState) -> t.Optional[str] + def document(self, state: DocumentationState) -> t.Optional[str]: """Generate and return documentation for this parser.""" section = '--controller options:' @@ -71,7 +71,7 @@ class OriginControllerParser(ControllerNamespaceParser, TypeParser): class DelegatedControllerParser(ControllerNamespaceParser, TypeParser): """Composite argument parser for the controller when delegation is supported.""" - def get_stateless_parsers(self): # type: () -> t.Dict[str, Parser] + def get_stateless_parsers(self) -> t.Dict[str, Parser]: """Return a dictionary of type names and type parsers.""" parsers: t.Dict[str, Parser] = dict( origin=OriginParser(), @@ -85,7 +85,7 @@ class DelegatedControllerParser(ControllerNamespaceParser, TypeParser): return parsers - def document(self, state): # type: (DocumentationState) -> t.Optional[str] + def document(self, state: DocumentationState) -> t.Optional[str]: """Generate and return documentation for this parser.""" section = '--controller options:' @@ -97,7 +97,7 @@ class DelegatedControllerParser(ControllerNamespaceParser, TypeParser): class PosixTargetParser(TargetNamespaceParser, TypeParser): """Composite argument parser for a POSIX target.""" - def get_stateless_parsers(self): # type: () -> t.Dict[str, Parser] + def get_stateless_parsers(self) -> t.Dict[str, Parser]: """Return a dictionary of type names and type parsers.""" parsers: t.Dict[str, Parser] = dict( controller=ControllerParser(), @@ -115,7 +115,7 @@ class PosixTargetParser(TargetNamespaceParser, TypeParser): return parsers - def document(self, state): # type: (DocumentationState) -> t.Optional[str] + def document(self, state: DocumentationState) -> t.Optional[str]: """Generate and return documentation for this parser.""" section = f'{self.option_name} options (choose one):' @@ -128,19 +128,19 @@ class PosixTargetParser(TargetNamespaceParser, TypeParser): class WindowsTargetParser(TargetsNamespaceParser, TypeParser): """Composite argument parser for a Windows target.""" @property - def allow_inventory(self): # type: () -> bool + def allow_inventory(self) -> bool: """True if inventory is allowed, otherwise False.""" return True - def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser] + def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]: """Return a dictionary of type names and type parsers.""" return self.get_internal_parsers(state.root_namespace.targets) - def get_stateless_parsers(self): # type: () -> t.Dict[str, Parser] + def get_stateless_parsers(self) -> t.Dict[str, Parser]: """Return a dictionary of type names and type parsers.""" return self.get_internal_parsers([]) - def get_internal_parsers(self, targets): # type: (t.List[WindowsConfig]) -> t.Dict[str, Parser] + def get_internal_parsers(self, targets: t.List[WindowsConfig]) -> t.Dict[str, Parser]: """Return a dictionary of type names and type parsers.""" parsers = {} # type: t.Dict[str, Parser] @@ -157,7 +157,7 @@ class WindowsTargetParser(TargetsNamespaceParser, TypeParser): return parsers - def document(self, state): # type: (DocumentationState) -> t.Optional[str] + def document(self, state: DocumentationState) -> t.Optional[str]: """Generate and return documentation for this parser.""" section = f'{self.option_name} options (choose one):' @@ -170,19 +170,19 @@ class WindowsTargetParser(TargetsNamespaceParser, TypeParser): class NetworkTargetParser(TargetsNamespaceParser, TypeParser): """Composite argument parser for a network target.""" @property - def allow_inventory(self): # type: () -> bool + def allow_inventory(self) -> bool: """True if inventory is allowed, otherwise False.""" return True - def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser] + def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]: """Return a dictionary of type names and type parsers.""" return self.get_internal_parsers(state.root_namespace.targets) - def get_stateless_parsers(self): # type: () -> t.Dict[str, Parser] + def get_stateless_parsers(self) -> t.Dict[str, Parser]: """Return a dictionary of type names and type parsers.""" return self.get_internal_parsers([]) - def get_internal_parsers(self, targets): # type: (t.List[NetworkConfig]) -> t.Dict[str, Parser] + def get_internal_parsers(self, targets: t.List[NetworkConfig]) -> t.Dict[str, Parser]: """Return a dictionary of type names and type parsers.""" parsers = {} # type: t.Dict[str, Parser] @@ -199,7 +199,7 @@ class NetworkTargetParser(TargetsNamespaceParser, TypeParser): return parsers - def document(self, state): # type: (DocumentationState) -> t.Optional[str] + def document(self, state: DocumentationState) -> t.Optional[str]: """Generate and return documentation for this parser.""" section = f'{self.option_name} options (choose one):' @@ -211,17 +211,17 @@ class NetworkTargetParser(TargetsNamespaceParser, TypeParser): class PythonTargetParser(TargetsNamespaceParser, Parser): """Composite argument parser for a Python target.""" - def __init__(self, allow_venv): # type: (bool) -> None + def __init__(self, allow_venv: bool) -> None: super().__init__() self.allow_venv = allow_venv @property - def option_name(self): # type: () -> str + def option_name(self) -> str: """The option name used for this parser.""" return '--target-python' - def get_value(self, state): # type: (ParserState) -> t.Any + def get_value(self, state: ParserState) -> t.Any: """Parse the input from the given state and return the result, without storing the result in the namespace.""" versions = list(SUPPORTED_PYTHON_VERSIONS) @@ -235,7 +235,7 @@ class PythonTargetParser(TargetsNamespaceParser, Parser): return value - def document(self, state): # type: (DocumentationState) -> t.Optional[str] + def document(self, state: DocumentationState) -> t.Optional[str]: """Generate and return documentation for this parser.""" section = f'{self.option_name} options (choose one):' @@ -249,20 +249,20 @@ class PythonTargetParser(TargetsNamespaceParser, Parser): class SanityPythonTargetParser(PythonTargetParser): """Composite argument parser for a sanity Python target.""" - def __init__(self): # type: () -> None + def __init__(self) -> None: super().__init__(allow_venv=False) class UnitsPythonTargetParser(PythonTargetParser): """Composite argument parser for a units Python target.""" - def __init__(self): # type: () -> None + def __init__(self) -> None: super().__init__(allow_venv=True) class PosixSshTargetParser(PosixTargetParser): """Composite argument parser for a POSIX SSH target.""" @property - def option_name(self): # type: () -> str + def option_name(self) -> str: """The option name used for this parser.""" return '--target-posix' @@ -270,17 +270,17 @@ class PosixSshTargetParser(PosixTargetParser): class WindowsSshTargetParser(WindowsTargetParser): """Composite argument parser for a Windows SSH target.""" @property - def option_name(self): # type: () -> str + def option_name(self) -> str: """The option name used for this parser.""" return '--target-windows' @property - def allow_inventory(self): # type: () -> bool + def allow_inventory(self) -> bool: """True if inventory is allowed, otherwise False.""" return False @property - def limit_one(self): # type: () -> bool + def limit_one(self) -> bool: """True if only one target is allowed, otherwise False.""" return True @@ -288,16 +288,16 @@ class WindowsSshTargetParser(WindowsTargetParser): class NetworkSshTargetParser(NetworkTargetParser): """Composite argument parser for a network SSH target.""" @property - def option_name(self): # type: () -> str + def option_name(self) -> str: """The option name used for this parser.""" return '--target-network' @property - def allow_inventory(self): # type: () -> bool + def allow_inventory(self) -> bool: """True if inventory is allowed, otherwise False.""" return False @property - def limit_one(self): # type: () -> bool + def limit_one(self) -> bool: """True if only one target is allowed, otherwise False.""" return True diff --git a/test/lib/ansible_test/_internal/cli/parsers/base_argument_parsers.py b/test/lib/ansible_test/_internal/cli/parsers/base_argument_parsers.py index 2f17affa02b..ed933bd535c 100644 --- a/test/lib/ansible_test/_internal/cli/parsers/base_argument_parsers.py +++ b/test/lib/ansible_test/_internal/cli/parsers/base_argument_parsers.py @@ -14,11 +14,11 @@ from ..argparsing.parsers import ( class ControllerNamespaceParser(NamespaceParser, metaclass=abc.ABCMeta): """Base class for controller namespace parsers.""" @property - def dest(self): # type: () -> str + def dest(self) -> str: """The name of the attribute where the value should be stored.""" return 'controller' - def parse(self, state): # type: (ParserState) -> t.Any + def parse(self, state: ParserState) -> t.Any: """Parse the input from the given state and return the result.""" if state.root_namespace.targets: raise ControllerRequiredFirstError() @@ -29,22 +29,22 @@ class ControllerNamespaceParser(NamespaceParser, metaclass=abc.ABCMeta): class TargetNamespaceParser(NamespaceParser, metaclass=abc.ABCMeta): """Base class for target namespace parsers involving a single target.""" @property - def option_name(self): # type: () -> str + def option_name(self) -> str: """The option name used for this parser.""" return '--target' @property - def dest(self): # type: () -> str + def dest(self) -> str: """The name of the attribute where the value should be stored.""" return 'targets' @property - def use_list(self): # type: () -> bool + def use_list(self) -> bool: """True if the destination is a list, otherwise False.""" return True @property - def limit_one(self): # type: () -> bool + def limit_one(self) -> bool: """True if only one target is allowed, otherwise False.""" return True @@ -52,17 +52,17 @@ class TargetNamespaceParser(NamespaceParser, metaclass=abc.ABCMeta): class TargetsNamespaceParser(NamespaceParser, metaclass=abc.ABCMeta): """Base class for controller namespace parsers involving multiple targets.""" @property - def option_name(self): # type: () -> str + def option_name(self) -> str: """The option name used for this parser.""" return '--target' @property - def dest(self): # type: () -> str + def dest(self) -> str: """The name of the attribute where the value should be stored.""" return 'targets' @property - def use_list(self): # type: () -> bool + def use_list(self) -> bool: """True if the destination is a list, otherwise False.""" return True diff --git a/test/lib/ansible_test/_internal/cli/parsers/helpers.py b/test/lib/ansible_test/_internal/cli/parsers/helpers.py index 03f3cb79bc8..f415d421d86 100644 --- a/test/lib/ansible_test/_internal/cli/parsers/helpers.py +++ b/test/lib/ansible_test/_internal/cli/parsers/helpers.py @@ -21,7 +21,7 @@ from ...host_configs import ( ) -def get_docker_pythons(name, controller, strict): # type: (str, bool, bool) -> t.List[str] +def get_docker_pythons(name: str, controller: bool, strict: bool) -> t.List[str]: """Return a list of docker instance Python versions supported by the specified host config.""" image_config = filter_completion(docker_completion()).get(name) available_pythons = CONTROLLER_PYTHON_VERSIONS if controller else SUPPORTED_PYTHON_VERSIONS @@ -34,7 +34,7 @@ def get_docker_pythons(name, controller, strict): # type: (str, bool, bool) -> return supported_pythons -def get_remote_pythons(name, controller, strict): # type: (str, bool, bool) -> t.List[str] +def get_remote_pythons(name: str, controller: bool, strict: bool) -> t.List[str]: """Return a list of remote instance Python versions supported by the specified host config.""" platform_config = filter_completion(remote_completion()).get(name) available_pythons = CONTROLLER_PYTHON_VERSIONS if controller else SUPPORTED_PYTHON_VERSIONS @@ -47,7 +47,7 @@ def get_remote_pythons(name, controller, strict): # type: (str, bool, bool) -> return supported_pythons -def get_controller_pythons(controller_config, strict): # type: (HostConfig, bool) -> t.List[str] +def get_controller_pythons(controller_config: HostConfig, strict: bool) -> t.List[str]: """Return a list of controller Python versions supported by the specified host config.""" if isinstance(controller_config, DockerConfig): pythons = get_docker_pythons(controller_config.name, False, strict) diff --git a/test/lib/ansible_test/_internal/cli/parsers/host_config_parsers.py b/test/lib/ansible_test/_internal/cli/parsers/host_config_parsers.py index 8a7e0ef9645..70e89db8b4a 100644 --- a/test/lib/ansible_test/_internal/cli/parsers/host_config_parsers.py +++ b/test/lib/ansible_test/_internal/cli/parsers/host_config_parsers.py @@ -63,7 +63,7 @@ from .helpers import ( class OriginParser(Parser): """Composite argument parser for the origin.""" - def parse(self, state): # type: (ParserState) -> t.Any + def parse(self, state: ParserState) -> t.Any: """Parse the input from the given state and return the result.""" namespace = OriginConfig() @@ -74,14 +74,14 @@ class OriginParser(Parser): return namespace - def document(self, state): # type: (DocumentationState) -> t.Optional[str] + def document(self, state: DocumentationState) -> t.Optional[str]: """Generate and return documentation for this parser.""" return OriginKeyValueParser().document(state) class ControllerParser(Parser): """Composite argument parser for the controller.""" - def parse(self, state): # type: (ParserState) -> t.Any + def parse(self, state: ParserState) -> t.Any: """Parse the input from the given state and return the result.""" namespace = ControllerConfig() @@ -92,30 +92,30 @@ class ControllerParser(Parser): return namespace - def document(self, state): # type: (DocumentationState) -> t.Optional[str] + def document(self, state: DocumentationState) -> t.Optional[str]: """Generate and return documentation for this parser.""" return ControllerKeyValueParser().document(state) class DockerParser(PairParser): """Composite argument parser for a docker host.""" - def __init__(self, controller): # type: (bool) -> None + def __init__(self, controller: bool) -> None: self.controller = controller - def create_namespace(self): # type: () -> t.Any + def create_namespace(self) -> t.Any: """Create and return a namespace.""" return DockerConfig() - def get_left_parser(self, state): # type: (ParserState) -> Parser + def get_left_parser(self, state: ParserState) -> Parser: """Return the parser for the left side.""" return NamespaceWrappedParser('name', ChoicesParser(list(filter_completion(docker_completion(), controller_only=self.controller)), conditions=MatchConditions.CHOICE | MatchConditions.ANY)) - def get_right_parser(self, choice): # type: (t.Any) -> Parser + def get_right_parser(self, choice: t.Any) -> Parser: """Return the parser for the right side.""" return DockerKeyValueParser(choice, self.controller) - def parse(self, state): # type: (ParserState) -> t.Any + def parse(self, state: ParserState) -> t.Any: """Parse the input from the given state and return the result.""" value = super().parse(state) # type: DockerConfig @@ -124,7 +124,7 @@ class DockerParser(PairParser): return value - def document(self, state): # type: (DocumentationState) -> t.Optional[str] + def document(self, state: DocumentationState) -> t.Optional[str]: """Generate and return documentation for this parser.""" default = 'default' content = '\n'.join([f' {image} ({", ".join(get_docker_pythons(image, self.controller, False))})' @@ -142,22 +142,22 @@ class DockerParser(PairParser): class PosixRemoteParser(PairParser): """Composite argument parser for a POSIX remote host.""" - def __init__(self, controller): # type: (bool) -> None + def __init__(self, controller: bool) -> None: self.controller = controller - def create_namespace(self): # type: () -> t.Any + def create_namespace(self) -> t.Any: """Create and return a namespace.""" return PosixRemoteConfig() - def get_left_parser(self, state): # type: (ParserState) -> Parser + def get_left_parser(self, state: ParserState) -> Parser: """Return the parser for the left side.""" return NamespaceWrappedParser('name', PlatformParser(list(filter_completion(remote_completion(), controller_only=self.controller)))) - def get_right_parser(self, choice): # type: (t.Any) -> Parser + def get_right_parser(self, choice: t.Any) -> Parser: """Return the parser for the right side.""" return PosixRemoteKeyValueParser(choice, self.controller) - def parse(self, state): # type: (ParserState) -> t.Any + def parse(self, state: ParserState) -> t.Any: """Parse the input from the given state and return the result.""" value = super().parse(state) # type: PosixRemoteConfig @@ -166,7 +166,7 @@ class PosixRemoteParser(PairParser): return value - def document(self, state): # type: (DocumentationState) -> t.Optional[str] + def document(self, state: DocumentationState) -> t.Optional[str]: """Generate and return documentation for this parser.""" default = get_fallback_remote_controller() content = '\n'.join([f' {name} ({", ".join(get_remote_pythons(name, self.controller, False))})' @@ -184,11 +184,11 @@ class PosixRemoteParser(PairParser): class WindowsRemoteParser(PairParser): """Composite argument parser for a Windows remote host.""" - def create_namespace(self): # type: () -> t.Any + def create_namespace(self) -> t.Any: """Create and return a namespace.""" return WindowsRemoteConfig() - def get_left_parser(self, state): # type: (ParserState) -> Parser + def get_left_parser(self, state: ParserState) -> Parser: """Return the parser for the left side.""" names = list(filter_completion(windows_completion())) @@ -197,11 +197,11 @@ class WindowsRemoteParser(PairParser): return NamespaceWrappedParser('name', PlatformParser(names)) - def get_right_parser(self, choice): # type: (t.Any) -> Parser + def get_right_parser(self, choice: t.Any) -> Parser: """Return the parser for the right side.""" return WindowsRemoteKeyValueParser() - def document(self, state): # type: (DocumentationState) -> t.Optional[str] + def document(self, state: DocumentationState) -> t.Optional[str]: """Generate and return documentation for this parser.""" content = '\n'.join([f' {name}' for name, item in filter_completion(windows_completion()).items()]) @@ -217,11 +217,11 @@ class WindowsRemoteParser(PairParser): class NetworkRemoteParser(PairParser): """Composite argument parser for a network remote host.""" - def create_namespace(self): # type: () -> t.Any + def create_namespace(self) -> t.Any: """Create and return a namespace.""" return NetworkRemoteConfig() - def get_left_parser(self, state): # type: (ParserState) -> Parser + def get_left_parser(self, state: ParserState) -> Parser: """Return the parser for the left side.""" names = list(filter_completion(network_completion())) @@ -230,11 +230,11 @@ class NetworkRemoteParser(PairParser): return NamespaceWrappedParser('name', PlatformParser(names)) - def get_right_parser(self, choice): # type: (t.Any) -> Parser + def get_right_parser(self, choice: t.Any) -> Parser: """Return the parser for the right side.""" return NetworkRemoteKeyValueParser() - def document(self, state): # type: (DocumentationState) -> t.Optional[str] + def document(self, state: DocumentationState) -> t.Optional[str]: """Generate and return documentation for this parser.""" content = '\n'.join([f' {name}' for name, item in filter_completion(network_completion()).items()]) @@ -250,61 +250,61 @@ class NetworkRemoteParser(PairParser): class WindowsInventoryParser(PairParser): """Composite argument parser for a Windows inventory.""" - def create_namespace(self): # type: () -> t.Any + def create_namespace(self) -> t.Any: """Create and return a namespace.""" return WindowsInventoryConfig() - def get_left_parser(self, state): # type: (ParserState) -> Parser + def get_left_parser(self, state: ParserState) -> Parser: """Return the parser for the left side.""" return NamespaceWrappedParser('path', FileParser()) - def get_right_parser(self, choice): # type: (t.Any) -> Parser + def get_right_parser(self, choice: t.Any) -> Parser: """Return the parser for the right side.""" return EmptyKeyValueParser() - def document(self, state): # type: (DocumentationState) -> t.Optional[str] + def document(self, state: DocumentationState) -> t.Optional[str]: """Generate and return documentation for this parser.""" return '{path} # INI format inventory file' class NetworkInventoryParser(PairParser): """Composite argument parser for a network inventory.""" - def create_namespace(self): # type: () -> t.Any + def create_namespace(self) -> t.Any: """Create and return a namespace.""" return NetworkInventoryConfig() - def get_left_parser(self, state): # type: (ParserState) -> Parser + def get_left_parser(self, state: ParserState) -> Parser: """Return the parser for the left side.""" return NamespaceWrappedParser('path', FileParser()) - def get_right_parser(self, choice): # type: (t.Any) -> Parser + def get_right_parser(self, choice: t.Any) -> Parser: """Return the parser for the right side.""" return EmptyKeyValueParser() - def document(self, state): # type: (DocumentationState) -> t.Optional[str] + def document(self, state: DocumentationState) -> t.Optional[str]: """Generate and return documentation for this parser.""" return '{path} # INI format inventory file' class PosixSshParser(PairParser): """Composite argument parser for a POSIX SSH host.""" - def create_namespace(self): # type: () -> t.Any + def create_namespace(self) -> t.Any: """Create and return a namespace.""" return PosixSshConfig() - def get_left_parser(self, state): # type: (ParserState) -> Parser + def get_left_parser(self, state: ParserState) -> Parser: """Return the parser for the left side.""" return SshConnectionParser() - def get_right_parser(self, choice): # type: (t.Any) -> Parser + def get_right_parser(self, choice: t.Any) -> Parser: """Return the parser for the right side.""" return PosixSshKeyValueParser() @property - def required(self): # type: () -> bool + def required(self) -> bool: """True if the delimiter (and thus right parser) is required, otherwise False.""" return True - def document(self, state): # type: (DocumentationState) -> t.Optional[str] + def document(self, state: DocumentationState) -> t.Optional[str]: """Generate and return documentation for this parser.""" return f'{SshConnectionParser().document(state)}[,{PosixSshKeyValueParser().document(state)}]' diff --git a/test/lib/ansible_test/_internal/cli/parsers/key_value_parsers.py b/test/lib/ansible_test/_internal/cli/parsers/key_value_parsers.py index 0ee7d6754d6..763b6264310 100644 --- a/test/lib/ansible_test/_internal/cli/parsers/key_value_parsers.py +++ b/test/lib/ansible_test/_internal/cli/parsers/key_value_parsers.py @@ -46,7 +46,7 @@ from .helpers import ( class OriginKeyValueParser(KeyValueParser): """Composite argument parser for origin key/value pairs.""" - def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser] + def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]: """Return a dictionary of key names and value parsers.""" versions = CONTROLLER_PYTHON_VERSIONS @@ -54,7 +54,7 @@ class OriginKeyValueParser(KeyValueParser): python=PythonParser(versions=versions, allow_venv=True, allow_default=True), ) - def document(self, state): # type: (DocumentationState) -> t.Optional[str] + def document(self, state: DocumentationState) -> t.Optional[str]: """Generate and return documentation for this parser.""" python_parser = PythonParser(versions=CONTROLLER_PYTHON_VERSIONS, allow_venv=True, allow_default=True) @@ -69,7 +69,7 @@ class OriginKeyValueParser(KeyValueParser): class ControllerKeyValueParser(KeyValueParser): """Composite argument parser for controller key/value pairs.""" - def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser] + def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]: """Return a dictionary of key names and value parsers.""" versions = get_controller_pythons(state.root_namespace.controller, False) allow_default = bool(get_controller_pythons(state.root_namespace.controller, True)) @@ -79,7 +79,7 @@ class ControllerKeyValueParser(KeyValueParser): python=PythonParser(versions=versions, allow_venv=allow_venv, allow_default=allow_default), ) - def document(self, state): # type: (DocumentationState) -> t.Optional[str] + def document(self, state: DocumentationState) -> t.Optional[str]: """Generate and return documentation for this parser.""" section_name = 'controller options' @@ -98,7 +98,7 @@ class DockerKeyValueParser(KeyValueParser): self.versions = get_docker_pythons(image, controller, False) self.allow_default = bool(get_docker_pythons(image, controller, True)) - def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser] + def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]: """Return a dictionary of key names and value parsers.""" return dict( python=PythonParser(versions=self.versions, allow_venv=False, allow_default=self.allow_default), @@ -107,7 +107,7 @@ class DockerKeyValueParser(KeyValueParser): memory=IntegerParser(), ) - def document(self, state): # type: (DocumentationState) -> t.Optional[str] + def document(self, state: DocumentationState) -> t.Optional[str]: """Generate and return documentation for this parser.""" python_parser = PythonParser(versions=[], allow_venv=False, allow_default=self.allow_default) @@ -130,7 +130,7 @@ class PosixRemoteKeyValueParser(KeyValueParser): self.versions = get_remote_pythons(name, controller, False) self.allow_default = bool(get_remote_pythons(name, controller, True)) - def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser] + def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]: """Return a dictionary of key names and value parsers.""" return dict( become=ChoicesParser(list(SUPPORTED_BECOME_METHODS)), @@ -139,7 +139,7 @@ class PosixRemoteKeyValueParser(KeyValueParser): python=PythonParser(versions=self.versions, allow_venv=False, allow_default=self.allow_default), ) - def document(self, state): # type: (DocumentationState) -> t.Optional[str] + def document(self, state: DocumentationState) -> t.Optional[str]: """Generate and return documentation for this parser.""" python_parser = PythonParser(versions=[], allow_venv=False, allow_default=self.allow_default) @@ -157,14 +157,14 @@ class PosixRemoteKeyValueParser(KeyValueParser): class WindowsRemoteKeyValueParser(KeyValueParser): """Composite argument parser for Windows remote key/value pairs.""" - def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser] + def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]: """Return a dictionary of key names and value parsers.""" return dict( provider=ChoicesParser(REMOTE_PROVIDERS), arch=ChoicesParser(REMOTE_ARCHITECTURES), ) - def document(self, state): # type: (DocumentationState) -> t.Optional[str] + def document(self, state: DocumentationState) -> t.Optional[str]: """Generate and return documentation for this parser.""" section_name = 'remote options' @@ -178,7 +178,7 @@ class WindowsRemoteKeyValueParser(KeyValueParser): class NetworkRemoteKeyValueParser(KeyValueParser): """Composite argument parser for network remote key/value pairs.""" - def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser] + def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]: """Return a dictionary of key names and value parsers.""" return dict( provider=ChoicesParser(REMOTE_PROVIDERS), @@ -187,7 +187,7 @@ class NetworkRemoteKeyValueParser(KeyValueParser): connection=AnyParser(), ) - def document(self, state): # type: (DocumentationState) -> t.Optional[str] + def document(self, state: DocumentationState) -> t.Optional[str]: """Generate and return documentation for this parser.""" section_name = 'remote options' @@ -203,13 +203,13 @@ class NetworkRemoteKeyValueParser(KeyValueParser): class PosixSshKeyValueParser(KeyValueParser): """Composite argument parser for POSIX SSH host key/value pairs.""" - def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser] + def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]: """Return a dictionary of key names and value parsers.""" return dict( python=PythonParser(versions=list(SUPPORTED_PYTHON_VERSIONS), allow_venv=False, allow_default=False), ) - def document(self, state): # type: (DocumentationState) -> t.Optional[str] + def document(self, state: DocumentationState) -> t.Optional[str]: """Generate and return documentation for this parser.""" python_parser = PythonParser(versions=SUPPORTED_PYTHON_VERSIONS, allow_venv=False, allow_default=False) @@ -224,6 +224,6 @@ class PosixSshKeyValueParser(KeyValueParser): class EmptyKeyValueParser(KeyValueParser): """Composite argument parser when a key/value parser is required but there are no keys available.""" - def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser] + def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]: """Return a dictionary of key names and value parsers.""" return {} diff --git a/test/lib/ansible_test/_internal/cli/parsers/value_parsers.py b/test/lib/ansible_test/_internal/cli/parsers/value_parsers.py index d09ab7cc211..06dfc2b4ab2 100644 --- a/test/lib/ansible_test/_internal/cli/parsers/value_parsers.py +++ b/test/lib/ansible_test/_internal/cli/parsers/value_parsers.py @@ -85,7 +85,7 @@ class PythonParser(Parser): self.venv_choices = venv_choices self.venv_choices = venv_choices - def parse(self, state): # type: (ParserState) -> t.Any + def parse(self, state: ParserState) -> t.Any: """Parse the input from the given state and return the result.""" boundary: ParserBoundary @@ -116,7 +116,7 @@ class PythonParser(Parser): return python - def document(self, state): # type: (DocumentationState) -> t.Optional[str] + def document(self, state: DocumentationState) -> t.Optional[str]: """Generate and return documentation for this parser.""" docs = '[venv/[system-site-packages/]]' if self.allow_venv else '' @@ -133,10 +133,10 @@ class PythonParser(Parser): class PlatformParser(ChoicesParser): """Composite argument parser for "{platform}/{version}" formatted choices.""" - def __init__(self, choices): # type: (t.List[str]) -> None + def __init__(self, choices: t.List[str]) -> None: super().__init__(choices, conditions=MatchConditions.CHOICE | MatchConditions.ANY) - def parse(self, state): # type: (ParserState) -> t.Any + def parse(self, state: ParserState) -> t.Any: """Parse the input from the given state and return the result.""" value = super().parse(state) @@ -153,7 +153,7 @@ class SshConnectionParser(Parser): """ EXPECTED_FORMAT = '{user}@{host}[:{port}]' - def parse(self, state): # type: (ParserState) -> t.Any + def parse(self, state: ParserState) -> t.Any: """Parse the input from the given state and return the result.""" namespace = state.current_namespace @@ -173,6 +173,6 @@ class SshConnectionParser(Parser): return namespace - def document(self, state): # type: (DocumentationState) -> t.Optional[str] + def document(self, state: DocumentationState) -> t.Optional[str]: """Generate and return documentation for this parser.""" return self.EXPECTED_FORMAT diff --git a/test/lib/ansible_test/_internal/commands/coverage/__init__.py b/test/lib/ansible_test/_internal/commands/coverage/__init__.py index 434582af9a5..d827e549d83 100644 --- a/test/lib/ansible_test/_internal/commands/coverage/__init__.py +++ b/test/lib/ansible_test/_internal/commands/coverage/__init__.py @@ -68,11 +68,11 @@ COVERAGE_OUTPUT_FILE_NAME = 'coverage' class CoverageConfig(EnvironmentConfig): """Configuration for the coverage command.""" - def __init__(self, args): # type: (t.Any) -> None + def __init__(self, args: t.Any) -> None: super().__init__(args, 'coverage') -def initialize_coverage(args, host_state): # type: (CoverageConfig, HostState) -> coverage_module +def initialize_coverage(args: CoverageConfig, host_state: HostState) -> coverage_module: """Delegate execution if requested, install requirements, then import and return the coverage module. Raises an exception if coverage is not available.""" configure_pypi_proxy(args, host_state.controller_profile) # coverage install_requirements(args, host_state.controller_profile.python, coverage=True) # coverage @@ -93,7 +93,7 @@ def initialize_coverage(args, host_state): # type: (CoverageConfig, HostState) return coverage -def run_coverage(args, host_state, output_file, command, cmd): # type: (CoverageConfig, HostState, str, str, t.List[str]) -> None +def run_coverage(args: CoverageConfig, host_state: HostState, output_file: str, command: str, cmd: t.List[str]) -> None: """Run the coverage cli tool with the specified options.""" env = common_environment() env.update(dict(COVERAGE_FILE=output_file)) @@ -112,22 +112,22 @@ def run_coverage(args, host_state, output_file, command, cmd): # type: (Coverag display.warning(stderr) -def get_all_coverage_files(): # type: () -> t.List[str] +def get_all_coverage_files() -> t.List[str]: """Return a list of all coverage file paths.""" return get_python_coverage_files() + get_powershell_coverage_files() -def get_python_coverage_files(path=None): # type: (t.Optional[str]) -> t.List[str] +def get_python_coverage_files(path: t.Optional[str] = None) -> t.List[str]: """Return the list of Python coverage file paths.""" return get_coverage_files('python', path) -def get_powershell_coverage_files(path=None): # type: (t.Optional[str]) -> t.List[str] +def get_powershell_coverage_files(path: t.Optional[str] = None) -> t.List[str]: """Return the list of PowerShell coverage file paths.""" return get_coverage_files('powershell', path) -def get_coverage_files(language, path=None): # type: (str, t.Optional[str]) -> t.List[str] +def get_coverage_files(language: str, path: t.Optional[str] = None) -> t.List[str]: """Return the list of coverage file paths for the given language.""" coverage_dir = path or ResultType.COVERAGE.path @@ -143,7 +143,7 @@ def get_coverage_files(language, path=None): # type: (str, t.Optional[str]) -> return coverage_files -def get_collection_path_regexes(): # type: () -> t.Tuple[t.Optional[t.Pattern], t.Optional[t.Pattern]] +def get_collection_path_regexes() -> t.Tuple[t.Optional[t.Pattern], t.Optional[t.Pattern]]: """Return a pair of regexes used for identifying and manipulating collection paths.""" if data_context().content.collection: collection_search_re = re.compile(r'/%s/' % data_context().content.collection.directory) @@ -155,7 +155,7 @@ def get_collection_path_regexes(): # type: () -> t.Tuple[t.Optional[t.Pattern], return collection_search_re, collection_sub_re -def get_python_modules(): # type: () -> t.Dict[str, str] +def get_python_modules() -> t.Dict[str, str]: """Return a dictionary of Ansible module names and their paths.""" return dict((target.module, target.path) for target in list(walk_module_targets()) if target.path.endswith('.py')) @@ -343,13 +343,13 @@ def sanitize_filename( class PathChecker: """Checks code coverage paths to verify they are valid and reports on the findings.""" - def __init__(self, args, collection_search_re=None): # type: (CoverageConfig, t.Optional[t.Pattern]) -> None + def __init__(self, args: CoverageConfig, collection_search_re: t.Optional[t.Pattern] = None) -> None: self.args = args self.collection_search_re = collection_search_re self.invalid_paths = [] # type: t.List[str] self.invalid_path_chars = 0 - def check_path(self, path): # type: (str) -> bool + def check_path(self, path: str) -> bool: """Return True if the given coverage path is valid, otherwise display a warning and return False.""" if os.path.isfile(to_bytes(path)): return True @@ -367,7 +367,7 @@ class PathChecker: return False - def report(self): # type: () -> None + def report(self) -> None: """Display a warning regarding invalid paths if any were found.""" if self.invalid_paths: display.warning('Ignored %d characters from %d invalid coverage path(s).' % (self.invalid_path_chars, len(self.invalid_paths))) diff --git a/test/lib/ansible_test/_internal/commands/coverage/analyze/__init__.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/__init__.py index 16521bef4f1..37859e8fdfe 100644 --- a/test/lib/ansible_test/_internal/commands/coverage/analyze/__init__.py +++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/__init__.py @@ -9,7 +9,7 @@ from .. import ( class CoverageAnalyzeConfig(CoverageConfig): """Configuration for the `coverage analyze` command.""" - def __init__(self, args): # type: (t.Any) -> None + def __init__(self, args: t.Any) -> None: super().__init__(args) # avoid mixing log messages with file output when using `/dev/stdout` for the output file on commands diff --git a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/__init__.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/__init__.py index 267969886ea..fd20c0b80ed 100644 --- a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/__init__.py +++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/__init__.py @@ -31,7 +31,7 @@ class CoverageAnalyzeTargetsConfig(CoverageAnalyzeConfig): """Configuration for the `coverage analyze targets` command.""" -def make_report(target_indexes, arcs, lines): # type: (TargetIndexes, Arcs, Lines) -> t.Dict[str, t.Any] +def make_report(target_indexes: TargetIndexes, arcs: Arcs, lines: Lines) -> t.Dict[str, t.Any]: """Condense target indexes, arcs and lines into a compact report.""" set_indexes = {} # type: TargetSetIndexes arc_refs = dict((path, dict((format_arc(arc), get_target_set_index(indexes, set_indexes)) for arc, indexes in data.items())) for path, data in arcs.items()) @@ -65,7 +65,7 @@ def load_report(report): # type: (t.Dict[str, t.Any]) -> t.Tuple[t.List[str], A return target_indexes, arcs, lines -def read_report(path): # type: (str) -> t.Tuple[t.List[str], Arcs, Lines] +def read_report(path: str) -> t.Tuple[t.List[str], Arcs, Lines]: """Read a JSON report from disk.""" try: report = read_json_file(path) @@ -90,7 +90,7 @@ def write_report(args, report, path): # type: (CoverageAnalyzeTargetsConfig, t. ), verbosity=1) -def format_line(value): # type: (int) -> str +def format_line(value: int) -> str: """Format line as a string.""" return str(value) # putting this in a function keeps both pylint and mypy happy @@ -100,18 +100,18 @@ def format_arc(value): # type: (t.Tuple[int, int]) -> str return '%d:%d' % value -def parse_arc(value): # type: (str) -> t.Tuple[int, int] +def parse_arc(value: str) -> t.Tuple[int, int]: """Parse an arc string into a tuple.""" first, last = tuple(map(int, value.split(':'))) return first, last -def get_target_set_index(data, target_set_indexes): # type: (t.Set[int], TargetSetIndexes) -> int +def get_target_set_index(data: t.Set[int], target_set_indexes: TargetSetIndexes) -> int: """Find or add the target set in the result set and return the target set index.""" return target_set_indexes.setdefault(frozenset(data), len(target_set_indexes)) -def get_target_index(name, target_indexes): # type: (str, TargetIndexes) -> int +def get_target_index(name: str, target_indexes: TargetIndexes) -> int: """Find or add the target in the result set and return the target index.""" return target_indexes.setdefault(name, len(target_indexes)) @@ -136,7 +136,7 @@ def expand_indexes( return combined_data -def generate_indexes(target_indexes, data): # type: (TargetIndexes, NamedPoints) -> IndexedPoints +def generate_indexes(target_indexes: TargetIndexes, data: NamedPoints) -> IndexedPoints: """Return an indexed version of the given data (arcs or points).""" results = {} # type: IndexedPoints diff --git a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/combine.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/combine.py index 1ea9d59eb4c..a76f7c02062 100644 --- a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/combine.py +++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/combine.py @@ -28,14 +28,14 @@ from . import ( class CoverageAnalyzeTargetsCombineConfig(CoverageAnalyzeTargetsConfig): """Configuration for the `coverage analyze targets combine` command.""" - def __init__(self, args): # type: (t.Any) -> None + def __init__(self, args: t.Any) -> None: super().__init__(args) self.input_files = args.input_file # type: t.List[str] self.output_file = args.output_file # type: str -def command_coverage_analyze_targets_combine(args): # type: (CoverageAnalyzeTargetsCombineConfig) -> None +def command_coverage_analyze_targets_combine(args: CoverageAnalyzeTargetsCombineConfig) -> None: """Combine integration test target code coverage reports.""" host_state = prepare_profiles(args) # coverage analyze targets combine diff --git a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/expand.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/expand.py index d9283424606..e48aa732915 100644 --- a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/expand.py +++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/expand.py @@ -26,14 +26,14 @@ from . import ( class CoverageAnalyzeTargetsExpandConfig(CoverageAnalyzeTargetsConfig): """Configuration for the `coverage analyze targets expand` command.""" - def __init__(self, args): # type: (t.Any) -> None + def __init__(self, args: t.Any) -> None: super().__init__(args) self.input_file = args.input_file # type: str self.output_file = args.output_file # type: str -def command_coverage_analyze_targets_expand(args): # type: (CoverageAnalyzeTargetsExpandConfig) -> None +def command_coverage_analyze_targets_expand(args: CoverageAnalyzeTargetsExpandConfig) -> None: """Expand target names in an aggregated coverage file.""" host_state = prepare_profiles(args) # coverage analyze targets expand diff --git a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/filter.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/filter.py index e5e0dff774d..a8224102ffb 100644 --- a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/filter.py +++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/filter.py @@ -29,7 +29,7 @@ from . import ( class CoverageAnalyzeTargetsFilterConfig(CoverageAnalyzeTargetsConfig): """Configuration for the `coverage analyze targets filter` command.""" - def __init__(self, args): # type: (t.Any) -> None + def __init__(self, args: t.Any) -> None: super().__init__(args) self.input_file = args.input_file # type: str @@ -40,7 +40,7 @@ class CoverageAnalyzeTargetsFilterConfig(CoverageAnalyzeTargetsConfig): self.exclude_path = args.exclude_path # type: t.Optional[str] -def command_coverage_analyze_targets_filter(args): # type: (CoverageAnalyzeTargetsFilterConfig) -> None +def command_coverage_analyze_targets_filter(args: CoverageAnalyzeTargetsFilterConfig) -> None: """Filter target names in an aggregated coverage file.""" host_state = prepare_profiles(args) # coverage analyze targets filter diff --git a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/generate.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/generate.py index 54b2516fc79..c1eac6b686e 100644 --- a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/generate.py +++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/generate.py @@ -52,14 +52,14 @@ from . import ( class CoverageAnalyzeTargetsGenerateConfig(CoverageAnalyzeTargetsConfig): """Configuration for the `coverage analyze targets generate` command.""" - def __init__(self, args): # type: (t.Any) -> None + def __init__(self, args: t.Any) -> None: super().__init__(args) self.input_dir = args.input_dir or ResultType.COVERAGE.path # type: str self.output_file = args.output_file # type: str -def command_coverage_analyze_targets_generate(args): # type: (CoverageAnalyzeTargetsGenerateConfig) -> None +def command_coverage_analyze_targets_generate(args: CoverageAnalyzeTargetsGenerateConfig) -> None: """Analyze code coverage data to determine which integration test targets provide coverage for each arc or line.""" host_state = prepare_profiles(args) # coverage analyze targets generate @@ -148,11 +148,11 @@ def prune_invalid_filenames( del results[path] -def get_target_name(path): # type: (str) -> str +def get_target_name(path: str) -> str: """Extract the test target name from the given coverage path.""" return to_text(os.path.basename(path).split('=')[1]) -def is_integration_coverage_file(path): # type: (str) -> bool +def is_integration_coverage_file(path: str) -> bool: """Returns True if the coverage file came from integration tests, otherwise False.""" return os.path.basename(path).split('=')[0] in ('integration', 'windows-integration', 'network-integration') diff --git a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/missing.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/missing.py index f3cdfe5b957..54fef64d789 100644 --- a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/missing.py +++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/missing.py @@ -32,7 +32,7 @@ from . import ( class CoverageAnalyzeTargetsMissingConfig(CoverageAnalyzeTargetsConfig): """Configuration for the `coverage analyze targets missing` command.""" - def __init__(self, args): # type: (t.Any) -> None + def __init__(self, args: t.Any) -> None: super().__init__(args) self.from_file = args.from_file # type: str @@ -43,7 +43,7 @@ class CoverageAnalyzeTargetsMissingConfig(CoverageAnalyzeTargetsConfig): self.only_exists = args.only_exists # type: bool -def command_coverage_analyze_targets_missing(args): # type: (CoverageAnalyzeTargetsMissingConfig) -> None +def command_coverage_analyze_targets_missing(args: CoverageAnalyzeTargetsMissingConfig) -> None: """Identify aggregated coverage in one file missing from another.""" host_state = prepare_profiles(args) # coverage analyze targets missing diff --git a/test/lib/ansible_test/_internal/commands/coverage/combine.py b/test/lib/ansible_test/_internal/commands/coverage/combine.py index 070eef5ce91..5a262a4012f 100644 --- a/test/lib/ansible_test/_internal/commands/coverage/combine.py +++ b/test/lib/ansible_test/_internal/commands/coverage/combine.py @@ -63,13 +63,13 @@ from . import ( TValue = t.TypeVar('TValue') -def command_coverage_combine(args): # type: (CoverageCombineConfig) -> None +def command_coverage_combine(args: CoverageCombineConfig) -> None: """Patch paths in coverage files and merge into a single file.""" host_state = prepare_profiles(args) # coverage combine combine_coverage_files(args, host_state) -def combine_coverage_files(args, host_state): # type: (CoverageCombineConfig, HostState) -> t.List[str] +def combine_coverage_files(args: CoverageCombineConfig, host_state: HostState) -> t.List[str]: """Combine coverage and return a list of the resulting files.""" if args.delegate: if isinstance(args.controller, (DockerConfig, RemoteConfig)): @@ -107,7 +107,7 @@ class ExportedCoverageDataNotFound(ApplicationError): 'The exported files must be in the directory: %s/' % ResultType.COVERAGE.relative_path) -def _command_coverage_combine_python(args, host_state): # type: (CoverageCombineConfig, HostState) -> t.List[str] +def _command_coverage_combine_python(args: CoverageCombineConfig, host_state: HostState) -> t.List[str]: """Combine Python coverage files and return a list of the output files.""" coverage = initialize_coverage(args, host_state) @@ -188,7 +188,7 @@ def _command_coverage_combine_python(args, host_state): # type: (CoverageCombin return sorted(output_files) -def _command_coverage_combine_powershell(args): # type: (CoverageCombineConfig) -> t.List[str] +def _command_coverage_combine_powershell(args: CoverageCombineConfig) -> t.List[str]: """Combine PowerShell coverage files and return a list of the output files.""" coverage_files = get_powershell_coverage_files() @@ -262,7 +262,7 @@ def _command_coverage_combine_powershell(args): # type: (CoverageCombineConfig) return sorted(output_files) -def _get_coverage_targets(args, walk_func): # type: (CoverageCombineConfig, t.Callable) -> t.List[t.Tuple[str, int]] +def _get_coverage_targets(args: CoverageCombineConfig, walk_func: t.Callable) -> t.List[t.Tuple[str, int]]: """Return a list of files to cover and the number of lines in each file, using the given function as the source of the files.""" sources = [] @@ -316,7 +316,7 @@ def _build_stub_groups( return groups -def get_coverage_group(args, coverage_file): # type: (CoverageCombineConfig, str) -> t.Optional[str] +def get_coverage_group(args: CoverageCombineConfig, coverage_file: str) -> t.Optional[str]: """Return the name of the coverage group for the specified coverage file, or None if no group was found.""" parts = os.path.basename(coverage_file).split('=', 4) @@ -350,7 +350,7 @@ def get_coverage_group(args, coverage_file): # type: (CoverageCombineConfig, st class CoverageCombineConfig(CoverageConfig): """Configuration for the coverage combine command.""" - def __init__(self, args): # type: (t.Any) -> None + def __init__(self, args: t.Any) -> None: super().__init__(args) self.group_by = frozenset(args.group_by) if args.group_by else frozenset() # type: t.FrozenSet[str] diff --git a/test/lib/ansible_test/_internal/commands/coverage/erase.py b/test/lib/ansible_test/_internal/commands/coverage/erase.py index 9a459a38a5c..70b685c5352 100644 --- a/test/lib/ansible_test/_internal/commands/coverage/erase.py +++ b/test/lib/ansible_test/_internal/commands/coverage/erase.py @@ -20,7 +20,7 @@ from . import ( ) -def command_coverage_erase(args): # type: (CoverageEraseConfig) -> None +def command_coverage_erase(args: CoverageEraseConfig) -> None: """Erase code coverage data files collected during test runs.""" host_state = prepare_profiles(args) # coverage erase diff --git a/test/lib/ansible_test/_internal/commands/coverage/html.py b/test/lib/ansible_test/_internal/commands/coverage/html.py index 12caa179a27..e3063c0efbc 100644 --- a/test/lib/ansible_test/_internal/commands/coverage/html.py +++ b/test/lib/ansible_test/_internal/commands/coverage/html.py @@ -29,7 +29,7 @@ from . import ( ) -def command_coverage_html(args): # type: (CoverageHtmlConfig) -> None +def command_coverage_html(args: CoverageHtmlConfig) -> None: """Generate an HTML coverage report.""" host_state = prepare_profiles(args) # coverage html output_files = combine_coverage_files(args, host_state) diff --git a/test/lib/ansible_test/_internal/commands/coverage/report.py b/test/lib/ansible_test/_internal/commands/coverage/report.py index 2d53362e6d6..454c025ebf3 100644 --- a/test/lib/ansible_test/_internal/commands/coverage/report.py +++ b/test/lib/ansible_test/_internal/commands/coverage/report.py @@ -30,7 +30,7 @@ from . import ( ) -def command_coverage_report(args): # type: (CoverageReportConfig) -> None +def command_coverage_report(args: CoverageReportConfig) -> None: """Generate a console coverage report.""" host_state = prepare_profiles(args) # coverage report output_files = combine_coverage_files(args, host_state) @@ -56,7 +56,7 @@ def command_coverage_report(args): # type: (CoverageReportConfig) -> None run_coverage(args, host_state, output_file, 'report', options) -def _generate_powershell_output_report(args, coverage_file): # type: (CoverageReportConfig, str) -> str +def _generate_powershell_output_report(args: CoverageReportConfig, coverage_file: str) -> str: """Generate and return a PowerShell coverage report for the given coverage file.""" coverage_info = read_json_file(coverage_file) @@ -144,7 +144,7 @@ def _generate_powershell_output_report(args, coverage_file): # type: (CoverageR class CoverageReportConfig(CoverageCombineConfig): """Configuration for the coverage report command.""" - def __init__(self, args): # type: (t.Any) -> None + def __init__(self, args: t.Any) -> None: super().__init__(args) self.show_missing = args.show_missing # type: bool diff --git a/test/lib/ansible_test/_internal/commands/coverage/xml.py b/test/lib/ansible_test/_internal/commands/coverage/xml.py index c498d1c2b25..8c77621c601 100644 --- a/test/lib/ansible_test/_internal/commands/coverage/xml.py +++ b/test/lib/ansible_test/_internal/commands/coverage/xml.py @@ -48,7 +48,7 @@ from . import ( ) -def command_coverage_xml(args): # type: (CoverageXmlConfig) -> None +def command_coverage_xml(args: CoverageXmlConfig) -> None: """Generate an XML coverage report.""" host_state = prepare_profiles(args) # coverage xml output_files = combine_coverage_files(args, host_state) @@ -69,7 +69,7 @@ def command_coverage_xml(args): # type: (CoverageXmlConfig) -> None run_coverage(args, host_state, output_file, 'xml', ['-i', '-o', xml_path]) -def _generate_powershell_xml(coverage_file): # type: (str) -> Element +def _generate_powershell_xml(coverage_file: str) -> Element: """Generate a PowerShell coverage report XML element from the specified coverage file and return it.""" coverage_info = read_json_file(coverage_file) diff --git a/test/lib/ansible_test/_internal/commands/env/__init__.py b/test/lib/ansible_test/_internal/commands/env/__init__.py index d8f11b87e9a..4040ff4815c 100644 --- a/test/lib/ansible_test/_internal/commands/env/__init__.py +++ b/test/lib/ansible_test/_internal/commands/env/__init__.py @@ -45,7 +45,7 @@ from ...ci import ( class EnvConfig(CommonConfig): """Configuration for the `env` command.""" - def __init__(self, args): # type: (t.Any) -> None + def __init__(self, args: t.Any) -> None: super().__init__(args, 'env') self.show = args.show @@ -58,14 +58,14 @@ class EnvConfig(CommonConfig): self.show = True -def command_env(args): # type: (EnvConfig) -> None +def command_env(args: EnvConfig) -> None: """Entry point for the `env` command.""" show_dump_env(args) list_files_env(args) set_timeout(args) -def show_dump_env(args): # type: (EnvConfig) -> None +def show_dump_env(args: EnvConfig) -> None: """Show information about the current environment and/or write the information to disk.""" if not args.show and not args.dump: return @@ -107,7 +107,7 @@ def show_dump_env(args): # type: (EnvConfig) -> None write_json_test_results(ResultType.BOT, 'data-environment.json', data) -def list_files_env(args): # type: (EnvConfig) -> None +def list_files_env(args: EnvConfig) -> None: """List files on stdout.""" if not args.list_files: return @@ -116,7 +116,7 @@ def list_files_env(args): # type: (EnvConfig) -> None display.info(path) -def set_timeout(args): # type: (EnvConfig) -> None +def set_timeout(args: EnvConfig) -> None: """Set an execution timeout for subsequent ansible-test invocations.""" if args.timeout is None: return @@ -166,7 +166,7 @@ def show_dict(data, verbose, root_verbosity=0, path=None): # type: (t.Dict[str, display.info(indent + '%s: %s' % (key, value), verbosity=verbosity) -def get_docker_details(args): # type: (EnvConfig) -> t.Dict[str, t.Any] +def get_docker_details(args: EnvConfig) -> t.Dict[str, t.Any]: """Return details about docker.""" docker = get_docker_command() diff --git a/test/lib/ansible_test/_internal/commands/integration/__init__.py b/test/lib/ansible_test/_internal/commands/integration/__init__.py index bd0f32c2fec..aab9c5d89f1 100644 --- a/test/lib/ansible_test/_internal/commands/integration/__init__.py +++ b/test/lib/ansible_test/_internal/commands/integration/__init__.py @@ -130,7 +130,7 @@ from .coverage import ( THostProfile = t.TypeVar('THostProfile', bound=HostProfile) -def generate_dependency_map(integration_targets): # type: (t.List[IntegrationTarget]) -> t.Dict[str, t.Set[IntegrationTarget]] +def generate_dependency_map(integration_targets: t.List[IntegrationTarget]) -> t.Dict[str, t.Set[IntegrationTarget]]: """Analyze the given list of integration test targets and return a dictionary expressing target names and the targets on which they depend.""" targets_dict = dict((target.name, target) for target in integration_targets) target_dependencies = analyze_integration_target_dependencies(integration_targets) @@ -157,7 +157,7 @@ def generate_dependency_map(integration_targets): # type: (t.List[IntegrationTa return dependency_map -def get_files_needed(target_dependencies): # type: (t.List[IntegrationTarget]) -> t.List[str] +def get_files_needed(target_dependencies: t.List[IntegrationTarget]) -> t.List[str]: """Return a list of files needed by the given list of target dependencies.""" files_needed = [] # type: t.List[str] @@ -174,7 +174,7 @@ def get_files_needed(target_dependencies): # type: (t.List[IntegrationTarget]) return files_needed -def check_inventory(args, inventory_path): # type: (IntegrationConfig, str) -> None +def check_inventory(args: IntegrationConfig, inventory_path: str) -> None: """Check the given inventory for issues.""" if not isinstance(args.controller, OriginConfig): if os.path.exists(inventory_path): @@ -196,7 +196,7 @@ def get_inventory_absolute_path(args: IntegrationConfig, target: InventoryConfig return path -def get_inventory_relative_path(args): # type: (IntegrationConfig) -> str +def get_inventory_relative_path(args: IntegrationConfig) -> str: """Return the inventory path used for the given integration configuration relative to the content root.""" inventory_names = { PosixIntegrationConfig: 'inventory', @@ -207,7 +207,7 @@ def get_inventory_relative_path(args): # type: (IntegrationConfig) -> str return os.path.join(data_context().content.integration_path, inventory_names[type(args)]) -def delegate_inventory(args, inventory_path_src): # type: (IntegrationConfig, str) -> None +def delegate_inventory(args: IntegrationConfig, inventory_path_src: str) -> None: """Make the given inventory available during delegation.""" if isinstance(args, PosixIntegrationConfig): return @@ -839,7 +839,7 @@ class IntegrationCache(CommonCache): return self.get('dependency_map', lambda: generate_dependency_map(self.integration_targets)) -def filter_profiles_for_target(args, profiles, target): # type: (IntegrationConfig, t.List[THostProfile], IntegrationTarget) -> t.List[THostProfile] +def filter_profiles_for_target(args: IntegrationConfig, profiles: t.List[THostProfile], target: IntegrationTarget) -> t.List[THostProfile]: """Return a list of profiles after applying target filters.""" if target.target_type == IntegrationTargetType.CONTROLLER: profile_filter = get_target_filter(args, [args.controller], True) @@ -853,7 +853,7 @@ def filter_profiles_for_target(args, profiles, target): # type: (IntegrationCon return profiles -def get_integration_filter(args, targets): # type: (IntegrationConfig, t.List[IntegrationTarget]) -> t.Set[str] +def get_integration_filter(args: IntegrationConfig, targets: t.List[IntegrationTarget]) -> t.Set[str]: """Return a list of test targets to skip based on the host(s) that will be used to run the specified test targets.""" invalid_targets = sorted(target.name for target in targets if target.target_type not in (IntegrationTargetType.CONTROLLER, IntegrationTargetType.TARGET)) @@ -956,7 +956,7 @@ def command_integration_filter(args, # type: TIntegrationConfig return host_state, internal_targets -def requirements(args, host_state): # type: (IntegrationConfig, HostState) -> None +def requirements(args: IntegrationConfig, host_state: HostState) -> None: """Install requirements.""" target_profile = host_state.target_profiles[0] diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/__init__.py b/test/lib/ansible_test/_internal/commands/integration/cloud/__init__.py index 013f022d0c0..1c137bd0619 100644 --- a/test/lib/ansible_test/_internal/commands/integration/cloud/__init__.py +++ b/test/lib/ansible_test/_internal/commands/integration/cloud/__init__.py @@ -55,7 +55,7 @@ from ....docker_util import ( @cache -def get_cloud_plugins(): # type: () -> t.Tuple[t.Dict[str, t.Type[CloudProvider]], t.Dict[str, t.Type[CloudEnvironment]]] +def get_cloud_plugins() -> t.Tuple[t.Dict[str, t.Type[CloudProvider]], t.Dict[str, t.Type[CloudEnvironment]]]: """Import cloud plugins and load them into the plugin dictionaries.""" import_plugins('commands/integration/cloud') @@ -69,13 +69,13 @@ def get_cloud_plugins(): # type: () -> t.Tuple[t.Dict[str, t.Type[CloudProvider @cache -def get_provider_plugins(): # type: () -> t.Dict[str, t.Type[CloudProvider]] +def get_provider_plugins() -> t.Dict[str, t.Type[CloudProvider]]: """Return a dictionary of the available cloud provider plugins.""" return get_cloud_plugins()[0] @cache -def get_environment_plugins(): # type: () -> t.Dict[str, t.Type[CloudEnvironment]] +def get_environment_plugins() -> t.Dict[str, t.Type[CloudEnvironment]]: """Return a dictionary of the available cloud environment plugins.""" return get_cloud_plugins()[1] @@ -96,7 +96,7 @@ def get_cloud_platforms(args, targets=None): # type: (TestConfig, t.Optional[t. return sorted(cloud_platforms) -def get_cloud_platform(target): # type: (IntegrationTarget) -> t.Optional[str] +def get_cloud_platform(target: IntegrationTarget) -> t.Optional[str]: """Return the name of the cloud platform used for the given target, or None if no cloud platform is used.""" cloud_platforms = set(a.split('/')[1] for a in target.aliases if a.startswith('cloud/') and a.endswith('/') and a != 'cloud/') @@ -119,7 +119,7 @@ def get_cloud_providers(args, targets=None): # type: (IntegrationConfig, t.Opti return [get_provider_plugins()[p](args) for p in get_cloud_platforms(args, targets)] -def get_cloud_environment(args, target): # type: (IntegrationConfig, IntegrationTarget) -> t.Optional[CloudEnvironment] +def get_cloud_environment(args: IntegrationConfig, target: IntegrationTarget) -> t.Optional[CloudEnvironment]: """Return the cloud environment for the given target, or None if no cloud environment is used for the target.""" cloud_platform = get_cloud_platform(target) @@ -185,7 +185,7 @@ class CloudBase(metaclass=abc.ABCMeta): _MANAGED = 'managed' _SETUP_EXECUTED = 'setup_executed' - def __init__(self, args): # type: (IntegrationConfig) -> None + def __init__(self, args: IntegrationConfig) -> None: self.args = args self.platform = self.__module__.rsplit('.', 1)[-1] @@ -204,42 +204,42 @@ class CloudBase(metaclass=abc.ABCMeta): data_context().register_payload_callback(config_callback) @property - def setup_executed(self): # type: () -> bool + def setup_executed(self) -> bool: """True if setup has been executed, otherwise False.""" return t.cast(bool, self._get_cloud_config(self._SETUP_EXECUTED, False)) @setup_executed.setter - def setup_executed(self, value): # type: (bool) -> None + def setup_executed(self, value: bool) -> None: """True if setup has been executed, otherwise False.""" self._set_cloud_config(self._SETUP_EXECUTED, value) @property - def config_path(self): # type: () -> str + def config_path(self) -> str: """Path to the configuration file.""" return os.path.join(data_context().content.root, str(self._get_cloud_config(self._CONFIG_PATH))) @config_path.setter - def config_path(self, value): # type: (str) -> None + def config_path(self, value: str) -> None: """Path to the configuration file.""" self._set_cloud_config(self._CONFIG_PATH, value) @property - def resource_prefix(self): # type: () -> str + def resource_prefix(self) -> str: """Resource prefix.""" return str(self._get_cloud_config(self._RESOURCE_PREFIX)) @resource_prefix.setter - def resource_prefix(self, value): # type: (str) -> None + def resource_prefix(self, value: str) -> None: """Resource prefix.""" self._set_cloud_config(self._RESOURCE_PREFIX, value) @property - def managed(self): # type: () -> bool + def managed(self) -> bool: """True if resources are managed by ansible-test, otherwise False.""" return t.cast(bool, self._get_cloud_config(self._MANAGED)) @managed.setter - def managed(self, value): # type: (bool) -> None + def managed(self, value: bool) -> None: """True if resources are managed by ansible-test, otherwise False.""" self._set_cloud_config(self._MANAGED, value) @@ -257,7 +257,7 @@ class CloudBase(metaclass=abc.ABCMeta): class CloudProvider(CloudBase): """Base class for cloud provider plugins. Sets up cloud resources before delegation.""" - def __init__(self, args, config_extension='.ini'): # type: (IntegrationConfig, str) -> None + def __init__(self, args: IntegrationConfig, config_extension: str = '.ini') -> None: super().__init__(args) self.ci_provider = get_ci_provider() @@ -297,19 +297,19 @@ class CloudProvider(CloudBase): display.warning('Excluding tests marked "%s" which requires container support or config (see "%s"): %s' % (skip.rstrip('/'), self.config_template_path, ', '.join(skipped))) - def setup(self): # type: () -> None + def setup(self) -> None: """Setup the cloud resource before delegation and register a cleanup callback.""" self.resource_prefix = self.ci_provider.generate_resource_prefix() self.resource_prefix = re.sub(r'[^a-zA-Z0-9]+', '-', self.resource_prefix)[:63].lower().rstrip('-') atexit.register(self.cleanup) - def cleanup(self): # type: () -> None + def cleanup(self) -> None: """Clean up the cloud resource and any temporary configuration files after tests complete.""" if self.remove_config: os.remove(self.config_path) - def _use_static_config(self): # type: () -> bool + def _use_static_config(self) -> bool: """Use a static config file if available. Returns True if static config is used, otherwise returns False.""" if os.path.isfile(self.config_static_path): display.info('Using existing %s cloud config: %s' % (self.platform, self.config_static_path), verbosity=1) @@ -322,7 +322,7 @@ class CloudProvider(CloudBase): return static - def _write_config(self, content): # type: (str) -> None + def _write_config(self, content: str) -> None: """Write the given content to the config file.""" prefix = '%s-' % os.path.splitext(os.path.basename(self.config_static_path))[0] @@ -337,7 +337,7 @@ class CloudProvider(CloudBase): config_fd.write(to_bytes(content)) config_fd.flush() - def _read_config_template(self): # type: () -> str + def _read_config_template(self) -> str: """Read and return the configuration template.""" lines = read_text_file(self.config_template_path).splitlines() lines = [line for line in lines if not line.startswith('#')] @@ -356,7 +356,7 @@ class CloudProvider(CloudBase): class CloudEnvironment(CloudBase): """Base class for cloud environment plugins. Updates integration test environment after delegation.""" - def setup_once(self): # type: () -> None + def setup_once(self) -> None: """Run setup if it has not already been run.""" if self.setup_executed: return @@ -364,14 +364,14 @@ class CloudEnvironment(CloudBase): self.setup() self.setup_executed = True - def setup(self): # type: () -> None + def setup(self) -> None: """Setup which should be done once per environment instead of once per test target.""" @abc.abstractmethod - def get_environment_config(self): # type: () -> CloudEnvironmentConfig + def get_environment_config(self) -> CloudEnvironmentConfig: """Return environment configuration for use in the test environment after delegation.""" - def on_failure(self, target, tries): # type: (IntegrationTarget, int) -> None + def on_failure(self, target: IntegrationTarget, tries: int) -> None: """Callback to run when an integration target fails.""" diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/acme.py b/test/lib/ansible_test/_internal/commands/integration/cloud/acme.py index 42d6f0bc210..8a83ed2b768 100644 --- a/test/lib/ansible_test/_internal/commands/integration/cloud/acme.py +++ b/test/lib/ansible_test/_internal/commands/integration/cloud/acme.py @@ -23,7 +23,7 @@ class ACMEProvider(CloudProvider): """ACME plugin. Sets up cloud resources for tests.""" DOCKER_SIMULATOR_NAME = 'acme-simulator' - def __init__(self, args): # type: (IntegrationConfig) -> None + def __init__(self, args: IntegrationConfig) -> None: super().__init__(args) # The simulator must be pinned to a specific version to guarantee CI passes with the version used. @@ -34,7 +34,7 @@ class ACMEProvider(CloudProvider): self.uses_docker = True - def setup(self): # type: () -> None + def setup(self) -> None: """Setup the cloud resource before delegation and register a cleanup callback.""" super().setup() @@ -43,7 +43,7 @@ class ACMEProvider(CloudProvider): else: self._setup_dynamic() - def _setup_dynamic(self): # type: () -> None + def _setup_dynamic(self) -> None: """Create a ACME test container using docker.""" ports = [ 5000, # control port for flask app in container @@ -62,13 +62,13 @@ class ACMEProvider(CloudProvider): self._set_cloud_config('acme_host', self.DOCKER_SIMULATOR_NAME) - def _setup_static(self): # type: () -> None + def _setup_static(self) -> None: raise NotImplementedError() class ACMEEnvironment(CloudEnvironment): """ACME environment plugin. Updates integration test environment after delegation.""" - def get_environment_config(self): # type: () -> CloudEnvironmentConfig + def get_environment_config(self) -> CloudEnvironmentConfig: """Return environment configuration for use in the test environment after delegation.""" ansible_vars = dict( acme_host=self._get_cloud_config('acme_host'), diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/aws.py b/test/lib/ansible_test/_internal/commands/integration/cloud/aws.py index a67a0f89a94..4c3b00f63e3 100644 --- a/test/lib/ansible_test/_internal/commands/integration/cloud/aws.py +++ b/test/lib/ansible_test/_internal/commands/integration/cloud/aws.py @@ -37,7 +37,7 @@ from . import ( class AwsCloudProvider(CloudProvider): """AWS cloud provider plugin. Sets up cloud resources before delegation.""" - def __init__(self, args): # type: (IntegrationConfig) -> None + def __init__(self, args: IntegrationConfig) -> None: super().__init__(args) self.uses_config = True @@ -51,7 +51,7 @@ class AwsCloudProvider(CloudProvider): super().filter(targets, exclude) - def setup(self): # type: () -> None + def setup(self) -> None: """Setup the cloud resource before delegation and register a cleanup callback.""" super().setup() @@ -63,7 +63,7 @@ class AwsCloudProvider(CloudProvider): if not self._use_static_config(): self._setup_dynamic() - def _setup_dynamic(self): # type: () -> None + def _setup_dynamic(self) -> None: """Request AWS credentials through the Ansible Core CI service.""" display.info('Provisioning %s cloud environment.' % self.platform, verbosity=1) @@ -90,14 +90,14 @@ class AwsCloudProvider(CloudProvider): self._write_config(config) - def _create_ansible_core_ci(self): # type: () -> AnsibleCoreCI + def _create_ansible_core_ci(self) -> AnsibleCoreCI: """Return an AWS instance of AnsibleCoreCI.""" return AnsibleCoreCI(self.args, CloudResource(platform='aws')) class AwsCloudEnvironment(CloudEnvironment): """AWS cloud environment plugin. Updates integration test environment after delegation.""" - def get_environment_config(self): # type: () -> CloudEnvironmentConfig + def get_environment_config(self) -> CloudEnvironmentConfig: """Return environment configuration for use in the test environment after delegation.""" parser = configparser.ConfigParser() parser.read(self.config_path) @@ -123,7 +123,7 @@ class AwsCloudEnvironment(CloudEnvironment): callback_plugins=['aws_resource_actions'], ) - def on_failure(self, target, tries): # type: (IntegrationTarget, int) -> None + def on_failure(self, target: IntegrationTarget, tries: int) -> None: """Callback to run when an integration target fails.""" if not tries and self.managed: display.notice('If %s failed due to permissions, the IAM test policy may need to be updated. ' diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/azure.py b/test/lib/ansible_test/_internal/commands/integration/cloud/azure.py index f67d1adf254..aa50532d138 100644 --- a/test/lib/ansible_test/_internal/commands/integration/cloud/azure.py +++ b/test/lib/ansible_test/_internal/commands/integration/cloud/azure.py @@ -31,7 +31,7 @@ from . import ( class AzureCloudProvider(CloudProvider): """Azure cloud provider plugin. Sets up cloud resources before delegation.""" - def __init__(self, args): # type: (IntegrationConfig) -> None + def __init__(self, args: IntegrationConfig) -> None: super().__init__(args) self.aci = None # type: t.Optional[AnsibleCoreCI] @@ -47,7 +47,7 @@ class AzureCloudProvider(CloudProvider): super().filter(targets, exclude) - def setup(self): # type: () -> None + def setup(self) -> None: """Setup the cloud resource before delegation and register a cleanup callback.""" super().setup() @@ -56,14 +56,14 @@ class AzureCloudProvider(CloudProvider): get_config(self.config_path) # check required variables - def cleanup(self): # type: () -> None + def cleanup(self) -> None: """Clean up the cloud resource and any temporary configuration files after tests complete.""" if self.aci: self.aci.stop() super().cleanup() - def _setup_dynamic(self): # type: () -> None + def _setup_dynamic(self) -> None: """Request Azure credentials through ansible-core-ci.""" display.info('Provisioning %s cloud environment.' % self.platform, verbosity=1) @@ -96,14 +96,14 @@ class AzureCloudProvider(CloudProvider): self._write_config(config) - def _create_ansible_core_ci(self): # type: () -> AnsibleCoreCI + def _create_ansible_core_ci(self) -> AnsibleCoreCI: """Return an Azure instance of AnsibleCoreCI.""" return AnsibleCoreCI(self.args, CloudResource(platform='azure')) class AzureCloudEnvironment(CloudEnvironment): """Azure cloud environment plugin. Updates integration test environment after delegation.""" - def get_environment_config(self): # type: () -> CloudEnvironmentConfig + def get_environment_config(self) -> CloudEnvironmentConfig: """Return environment configuration for use in the test environment after delegation.""" env_vars = get_config(self.config_path) @@ -121,7 +121,7 @@ class AzureCloudEnvironment(CloudEnvironment): ansible_vars=ansible_vars, ) - def on_failure(self, target, tries): # type: (IntegrationTarget, int) -> None + def on_failure(self, target: IntegrationTarget, tries: int) -> None: """Callback to run when an integration target fails.""" if not tries and self.managed: display.notice('If %s failed due to permissions, the test policy may need to be updated.' % target.name) diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/cloudscale.py b/test/lib/ansible_test/_internal/commands/integration/cloud/cloudscale.py index 0a17fb25466..f453ef3ead7 100644 --- a/test/lib/ansible_test/_internal/commands/integration/cloud/cloudscale.py +++ b/test/lib/ansible_test/_internal/commands/integration/cloud/cloudscale.py @@ -25,12 +25,12 @@ from . import ( class CloudscaleCloudProvider(CloudProvider): """Cloudscale cloud provider plugin. Sets up cloud resources before delegation.""" - def __init__(self, args): # type: (IntegrationConfig) -> None + def __init__(self, args: IntegrationConfig) -> None: super().__init__(args) self.uses_config = True - def setup(self): # type: () -> None + def setup(self) -> None: """Setup the cloud resource before delegation and register a cleanup callback.""" super().setup() @@ -39,7 +39,7 @@ class CloudscaleCloudProvider(CloudProvider): class CloudscaleCloudEnvironment(CloudEnvironment): """Cloudscale cloud environment plugin. Updates integration test environment after delegation.""" - def get_environment_config(self): # type: () -> CloudEnvironmentConfig + def get_environment_config(self) -> CloudEnvironmentConfig: """Return environment configuration for use in the test environment after delegation.""" parser = configparser.ConfigParser() parser.read(self.config_path) diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/cs.py b/test/lib/ansible_test/_internal/commands/integration/cloud/cs.py index 8ffcabfb32e..6888a384b32 100644 --- a/test/lib/ansible_test/_internal/commands/integration/cloud/cs.py +++ b/test/lib/ansible_test/_internal/commands/integration/cloud/cs.py @@ -37,7 +37,7 @@ class CsCloudProvider(CloudProvider): """CloudStack cloud provider plugin. Sets up cloud resources before delegation.""" DOCKER_SIMULATOR_NAME = 'cloudstack-sim' - def __init__(self, args): # type: (IntegrationConfig) -> None + def __init__(self, args: IntegrationConfig) -> None: super().__init__(args) self.image = os.environ.get('ANSIBLE_CLOUDSTACK_CONTAINER', 'quay.io/ansible/cloudstack-test-container:1.4.0') @@ -47,7 +47,7 @@ class CsCloudProvider(CloudProvider): self.uses_docker = True self.uses_config = True - def setup(self): # type: () -> None + def setup(self) -> None: """Setup the cloud resource before delegation and register a cleanup callback.""" super().setup() @@ -56,7 +56,7 @@ class CsCloudProvider(CloudProvider): else: self._setup_dynamic() - def _setup_static(self): # type: () -> None + def _setup_static(self) -> None: """Configure CloudStack tests for use with static configuration.""" parser = configparser.ConfigParser() parser.read(self.config_static_path) @@ -81,7 +81,7 @@ class CsCloudProvider(CloudProvider): display.info('Read cs host "%s" and port %d from config: %s' % (self.host, self.port, self.config_static_path), verbosity=1) - def _setup_dynamic(self): # type: () -> None + def _setup_dynamic(self) -> None: """Create a CloudStack simulator using docker.""" config = self._read_config_template() @@ -129,7 +129,7 @@ class CsCloudProvider(CloudProvider): self._write_config(config) - def _get_credentials(self, container_name): # type: (str) -> t.Dict[str, t.Any] + def _get_credentials(self, container_name: str) -> t.Dict[str, t.Any]: """Wait for the CloudStack simulator to return credentials.""" def check(value): """Return True if the given configuration is valid JSON, otherwise return False.""" @@ -148,7 +148,7 @@ class CsCloudProvider(CloudProvider): class CsCloudEnvironment(CloudEnvironment): """CloudStack cloud environment plugin. Updates integration test environment after delegation.""" - def get_environment_config(self): # type: () -> CloudEnvironmentConfig + def get_environment_config(self) -> CloudEnvironmentConfig: """Return environment configuration for use in the test environment after delegation.""" parser = configparser.ConfigParser() parser.read(self.config_path) diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/digitalocean.py b/test/lib/ansible_test/_internal/commands/integration/cloud/digitalocean.py index 00b05d75e26..a46bf70e8a1 100644 --- a/test/lib/ansible_test/_internal/commands/integration/cloud/digitalocean.py +++ b/test/lib/ansible_test/_internal/commands/integration/cloud/digitalocean.py @@ -20,12 +20,12 @@ from . import ( class DigitalOceanCloudProvider(CloudProvider): """Checks if a configuration file has been passed or fixtures are going to be used for testing""" - def __init__(self, args): # type: (IntegrationConfig) -> None + def __init__(self, args: IntegrationConfig) -> None: super().__init__(args) self.uses_config = True - def setup(self): # type: () -> None + def setup(self) -> None: """Setup the cloud resource before delegation and register a cleanup callback.""" super().setup() @@ -34,7 +34,7 @@ class DigitalOceanCloudProvider(CloudProvider): class DigitalOceanCloudEnvironment(CloudEnvironment): """Updates integration test environment after delegation. Will setup the config file as parameter.""" - def get_environment_config(self): # type: () -> CloudEnvironmentConfig + def get_environment_config(self) -> CloudEnvironmentConfig: """Return environment configuration for use in the test environment after delegation.""" parser = configparser.ConfigParser() parser.read(self.config_path) diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/foreman.py b/test/lib/ansible_test/_internal/commands/integration/cloud/foreman.py index 86a38fef24b..c2413ee8e87 100644 --- a/test/lib/ansible_test/_internal/commands/integration/cloud/foreman.py +++ b/test/lib/ansible_test/_internal/commands/integration/cloud/foreman.py @@ -32,7 +32,7 @@ class ForemanProvider(CloudProvider): # https://github.com/ansible/foreman-test-container DOCKER_IMAGE = 'quay.io/ansible/foreman-test-container:1.4.0' - def __init__(self, args): # type: (IntegrationConfig) -> None + def __init__(self, args: IntegrationConfig) -> None: super().__init__(args) self.__container_from_env = os.environ.get('ANSIBLE_FRMNSIM_CONTAINER') @@ -46,7 +46,7 @@ class ForemanProvider(CloudProvider): self.uses_docker = True - def setup(self): # type: () -> None + def setup(self) -> None: """Setup cloud resource before delegation and reg cleanup callback.""" super().setup() @@ -55,7 +55,7 @@ class ForemanProvider(CloudProvider): else: self._setup_dynamic() - def _setup_dynamic(self): # type: () -> None + def _setup_dynamic(self) -> None: """Spawn a Foreman stub within docker container.""" foreman_port = 8080 @@ -76,13 +76,13 @@ class ForemanProvider(CloudProvider): self._set_cloud_config('FOREMAN_HOST', self.DOCKER_SIMULATOR_NAME) self._set_cloud_config('FOREMAN_PORT', str(foreman_port)) - def _setup_static(self): # type: () -> None + def _setup_static(self) -> None: raise NotImplementedError() class ForemanEnvironment(CloudEnvironment): """Foreman environment plugin. Updates integration test environment after delegation.""" - def get_environment_config(self): # type: () -> CloudEnvironmentConfig + def get_environment_config(self) -> CloudEnvironmentConfig: """Return environment configuration for use in the test environment after delegation.""" env_vars = dict( FOREMAN_HOST=str(self._get_cloud_config('FOREMAN_HOST')), diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py b/test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py index 302a2919153..e180a024af7 100644 --- a/test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py +++ b/test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py @@ -77,7 +77,7 @@ class GalaxyProvider(CloudProvider): Galaxy plugin. Sets up pulp (ansible-galaxy) servers for tests. The pulp source itself resides at: https://github.com/pulp/pulp-oci-images """ - def __init__(self, args): # type: (IntegrationConfig) -> None + def __init__(self, args: IntegrationConfig) -> None: super().__init__(args) # Cannot use the latest container image as either galaxy_ng 4.2.0rc2 or pulp 0.5.0 has sporatic issues with @@ -91,7 +91,7 @@ class GalaxyProvider(CloudProvider): self.uses_docker = True - def setup(self): # type: () -> None + def setup(self) -> None: """Setup cloud resource before delegation and reg cleanup callback.""" super().setup() @@ -143,7 +143,7 @@ class GalaxyProvider(CloudProvider): class GalaxyEnvironment(CloudEnvironment): """Galaxy environment plugin. Updates integration test environment after delegation.""" - def get_environment_config(self): # type: () -> CloudEnvironmentConfig + def get_environment_config(self) -> CloudEnvironmentConfig: """Return environment configuration for use in the test environment after delegation.""" pulp_user = str(self._get_cloud_config('PULP_USER')) pulp_password = str(self._get_cloud_config('PULP_PASSWORD')) diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/gcp.py b/test/lib/ansible_test/_internal/commands/integration/cloud/gcp.py index b23097a7be2..28ffb7b6996 100644 --- a/test/lib/ansible_test/_internal/commands/integration/cloud/gcp.py +++ b/test/lib/ansible_test/_internal/commands/integration/cloud/gcp.py @@ -22,12 +22,12 @@ from . import ( class GcpCloudProvider(CloudProvider): """GCP cloud provider plugin. Sets up cloud resources before delegation.""" - def __init__(self, args): # type: (IntegrationConfig) -> None + def __init__(self, args: IntegrationConfig) -> None: super().__init__(args) self.uses_config = True - def setup(self): # type: () -> None + def setup(self) -> None: """Setup the cloud resource before delegation and register a cleanup callback.""" super().setup() @@ -39,7 +39,7 @@ class GcpCloudProvider(CloudProvider): class GcpCloudEnvironment(CloudEnvironment): """GCP cloud environment plugin. Updates integration test environment after delegation.""" - def get_environment_config(self): # type: () -> CloudEnvironmentConfig + def get_environment_config(self) -> CloudEnvironmentConfig: """Return environment configuration for use in the test environment after delegation.""" parser = configparser.ConfigParser() parser.read(self.config_path) diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/hcloud.py b/test/lib/ansible_test/_internal/commands/integration/cloud/hcloud.py index 6912aff36dd..5e6b6380827 100644 --- a/test/lib/ansible_test/_internal/commands/integration/cloud/hcloud.py +++ b/test/lib/ansible_test/_internal/commands/integration/cloud/hcloud.py @@ -30,7 +30,7 @@ from . import ( class HcloudCloudProvider(CloudProvider): """Hetzner Cloud provider plugin. Sets up cloud resources before delegation.""" - def __init__(self, args): # type: (IntegrationConfig) -> None + def __init__(self, args: IntegrationConfig) -> None: super().__init__(args) self.uses_config = True @@ -44,14 +44,14 @@ class HcloudCloudProvider(CloudProvider): super().filter(targets, exclude) - def setup(self): # type: () -> None + def setup(self) -> None: """Setup the cloud resource before delegation and register a cleanup callback.""" super().setup() if not self._use_static_config(): self._setup_dynamic() - def _setup_dynamic(self): # type: () -> None + def _setup_dynamic(self) -> None: """Request Hetzner credentials through the Ansible Core CI service.""" display.info('Provisioning %s cloud environment.' % self.platform, verbosity=1) @@ -77,14 +77,14 @@ class HcloudCloudProvider(CloudProvider): self._write_config(config) - def _create_ansible_core_ci(self): # type: () -> AnsibleCoreCI + def _create_ansible_core_ci(self) -> AnsibleCoreCI: """Return a Heztner instance of AnsibleCoreCI.""" return AnsibleCoreCI(self.args, CloudResource(platform='hetzner')) class HcloudCloudEnvironment(CloudEnvironment): """Hetzner Cloud cloud environment plugin. Updates integration test environment after delegation.""" - def get_environment_config(self): # type: () -> CloudEnvironmentConfig + def get_environment_config(self) -> CloudEnvironmentConfig: """Return environment configuration for use in the test environment after delegation.""" parser = configparser.ConfigParser() parser.read(self.config_path) diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/httptester.py b/test/lib/ansible_test/_internal/commands/integration/cloud/httptester.py index b74b24570e7..e250eed773c 100644 --- a/test/lib/ansible_test/_internal/commands/integration/cloud/httptester.py +++ b/test/lib/ansible_test/_internal/commands/integration/cloud/httptester.py @@ -28,14 +28,14 @@ KRB5_PASSWORD_ENV = 'KRB5_PASSWORD' class HttptesterProvider(CloudProvider): """HTTP Tester provider plugin. Sets up resources before delegation.""" - def __init__(self, args): # type: (IntegrationConfig) -> None + def __init__(self, args: IntegrationConfig) -> None: super().__init__(args) self.image = os.environ.get('ANSIBLE_HTTP_TEST_CONTAINER', 'quay.io/ansible/http-test-container:2.1.0') self.uses_docker = True - def setup(self): # type: () -> None + def setup(self) -> None: """Setup resources before delegation.""" super().setup() @@ -82,7 +82,7 @@ class HttptesterProvider(CloudProvider): class HttptesterEnvironment(CloudEnvironment): """HTTP Tester environment plugin. Updates integration test environment after delegation.""" - def get_environment_config(self): # type: () -> CloudEnvironmentConfig + def get_environment_config(self) -> CloudEnvironmentConfig: """Return environment configuration for use in the test environment after delegation.""" return CloudEnvironmentConfig( env_vars=dict( diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/nios.py b/test/lib/ansible_test/_internal/commands/integration/cloud/nios.py index dee73aa68fa..df0ebb0eebd 100644 --- a/test/lib/ansible_test/_internal/commands/integration/cloud/nios.py +++ b/test/lib/ansible_test/_internal/commands/integration/cloud/nios.py @@ -32,7 +32,7 @@ class NiosProvider(CloudProvider): # https://github.com/ansible/nios-test-container DOCKER_IMAGE = 'quay.io/ansible/nios-test-container:1.4.0' - def __init__(self, args): # type: (IntegrationConfig) -> None + def __init__(self, args: IntegrationConfig) -> None: super().__init__(args) self.__container_from_env = os.environ.get('ANSIBLE_NIOSSIM_CONTAINER') @@ -47,7 +47,7 @@ class NiosProvider(CloudProvider): self.uses_docker = True - def setup(self): # type: () -> None + def setup(self) -> None: """Setup cloud resource before delegation and reg cleanup callback.""" super().setup() @@ -56,7 +56,7 @@ class NiosProvider(CloudProvider): else: self._setup_dynamic() - def _setup_dynamic(self): # type: () -> None + def _setup_dynamic(self) -> None: """Spawn a NIOS simulator within docker container.""" nios_port = 443 @@ -76,13 +76,13 @@ class NiosProvider(CloudProvider): self._set_cloud_config('NIOS_HOST', self.DOCKER_SIMULATOR_NAME) - def _setup_static(self): # type: () -> None + def _setup_static(self) -> None: raise NotImplementedError() class NiosEnvironment(CloudEnvironment): """NIOS environment plugin. Updates integration test environment after delegation.""" - def get_environment_config(self): # type: () -> CloudEnvironmentConfig + def get_environment_config(self) -> CloudEnvironmentConfig: """Return environment configuration for use in the test environment after delegation.""" ansible_vars = dict( nios_provider=dict( diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/opennebula.py b/test/lib/ansible_test/_internal/commands/integration/cloud/opennebula.py index 3019f3102bc..d005a3ca570 100644 --- a/test/lib/ansible_test/_internal/commands/integration/cloud/opennebula.py +++ b/test/lib/ansible_test/_internal/commands/integration/cloud/opennebula.py @@ -16,7 +16,7 @@ from . import ( class OpenNebulaCloudProvider(CloudProvider): """Checks if a configuration file has been passed or fixtures are going to be used for testing""" - def setup(self): # type: () -> None + def setup(self) -> None: """Setup the cloud resource before delegation and register a cleanup callback.""" super().setup() @@ -25,7 +25,7 @@ class OpenNebulaCloudProvider(CloudProvider): self.uses_config = True - def _setup_dynamic(self): # type: () -> None + def _setup_dynamic(self) -> None: display.info('No config file provided, will run test from fixtures') config = self._read_config_template() @@ -42,7 +42,7 @@ class OpenNebulaCloudProvider(CloudProvider): class OpenNebulaCloudEnvironment(CloudEnvironment): """Updates integration test environment after delegation. Will setup the config file as parameter.""" - def get_environment_config(self): # type: () -> CloudEnvironmentConfig + def get_environment_config(self) -> CloudEnvironmentConfig: """Return environment configuration for use in the test environment after delegation.""" parser = configparser.ConfigParser() parser.read(self.config_path) diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/openshift.py b/test/lib/ansible_test/_internal/commands/integration/cloud/openshift.py index 10f63ac05aa..da930c01ee6 100644 --- a/test/lib/ansible_test/_internal/commands/integration/cloud/openshift.py +++ b/test/lib/ansible_test/_internal/commands/integration/cloud/openshift.py @@ -32,7 +32,7 @@ class OpenShiftCloudProvider(CloudProvider): """OpenShift cloud provider plugin. Sets up cloud resources before delegation.""" DOCKER_CONTAINER_NAME = 'openshift-origin' - def __init__(self, args): # type: (IntegrationConfig) -> None + def __init__(self, args: IntegrationConfig) -> None: super().__init__(args, config_extension='.kubeconfig') # The image must be pinned to a specific version to guarantee CI passes with the version used. @@ -41,7 +41,7 @@ class OpenShiftCloudProvider(CloudProvider): self.uses_docker = True self.uses_config = True - def setup(self): # type: () -> None + def setup(self) -> None: """Setup the cloud resource before delegation and register a cleanup callback.""" super().setup() @@ -50,7 +50,7 @@ class OpenShiftCloudProvider(CloudProvider): else: self._setup_dynamic() - def _setup_static(self): # type: () -> None + def _setup_static(self) -> None: """Configure OpenShift tests for use with static configuration.""" config = read_text_file(self.config_static_path) @@ -59,7 +59,7 @@ class OpenShiftCloudProvider(CloudProvider): if not match: display.warning('Could not find OpenShift endpoint in kubeconfig.') - def _setup_dynamic(self): # type: () -> None + def _setup_dynamic(self) -> None: """Create a OpenShift container using docker.""" port = 8443 @@ -90,7 +90,7 @@ class OpenShiftCloudProvider(CloudProvider): self._write_config(config) - def _get_config(self, container_name, server): # type: (str, str) -> str + def _get_config(self, container_name: str, server: str) -> str: """Get OpenShift config from container.""" stdout = wait_for_file(self.args, container_name, '/var/lib/origin/openshift.local.config/master/admin.kubeconfig', sleep=10, tries=30) @@ -103,7 +103,7 @@ class OpenShiftCloudProvider(CloudProvider): class OpenShiftCloudEnvironment(CloudEnvironment): """OpenShift cloud environment plugin. Updates integration test environment after delegation.""" - def get_environment_config(self): # type: () -> CloudEnvironmentConfig + def get_environment_config(self) -> CloudEnvironmentConfig: """Return environment configuration for use in the test environment after delegation.""" env_vars = dict( K8S_AUTH_KUBECONFIG=self.config_path, diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/scaleway.py b/test/lib/ansible_test/_internal/commands/integration/cloud/scaleway.py index 1ef158b2b2c..04c2d89b99c 100644 --- a/test/lib/ansible_test/_internal/commands/integration/cloud/scaleway.py +++ b/test/lib/ansible_test/_internal/commands/integration/cloud/scaleway.py @@ -20,12 +20,12 @@ from . import ( class ScalewayCloudProvider(CloudProvider): """Checks if a configuration file has been passed or fixtures are going to be used for testing""" - def __init__(self, args): # type: (IntegrationConfig) -> None + def __init__(self, args: IntegrationConfig) -> None: super().__init__(args) self.uses_config = True - def setup(self): # type: () -> None + def setup(self) -> None: """Setup the cloud resource before delegation and register a cleanup callback.""" super().setup() @@ -34,7 +34,7 @@ class ScalewayCloudProvider(CloudProvider): class ScalewayCloudEnvironment(CloudEnvironment): """Updates integration test environment after delegation. Will setup the config file as parameter.""" - def get_environment_config(self): # type: () -> CloudEnvironmentConfig + def get_environment_config(self) -> CloudEnvironmentConfig: """Return environment configuration for use in the test environment after delegation.""" parser = configparser.ConfigParser() parser.read(self.config_path) diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/vcenter.py b/test/lib/ansible_test/_internal/commands/integration/cloud/vcenter.py index 36a65b7b415..df1651f92f7 100644 --- a/test/lib/ansible_test/_internal/commands/integration/cloud/vcenter.py +++ b/test/lib/ansible_test/_internal/commands/integration/cloud/vcenter.py @@ -29,7 +29,7 @@ class VcenterProvider(CloudProvider): """VMware vcenter/esx plugin. Sets up cloud resources for tests.""" DOCKER_SIMULATOR_NAME = 'vcenter-simulator' - def __init__(self, args): # type: (IntegrationConfig) -> None + def __init__(self, args: IntegrationConfig) -> None: super().__init__(args) # The simulator must be pinned to a specific version to guarantee CI passes with the version used. @@ -49,7 +49,7 @@ class VcenterProvider(CloudProvider): self.uses_docker = False self.uses_config = True - def setup(self): # type: () -> None + def setup(self) -> None: """Setup the cloud resource before delegation and register a cleanup callback.""" super().setup() @@ -64,7 +64,7 @@ class VcenterProvider(CloudProvider): else: raise ApplicationError('Unknown vmware_test_platform: %s' % self.vmware_test_platform) - def _setup_dynamic_simulator(self): # type: () -> None + def _setup_dynamic_simulator(self) -> None: """Create a vcenter simulator using docker.""" ports = [ 443, @@ -85,14 +85,14 @@ class VcenterProvider(CloudProvider): self._set_cloud_config('vcenter_hostname', self.DOCKER_SIMULATOR_NAME) - def _setup_static(self): # type: () -> None + def _setup_static(self) -> None: if not os.path.exists(self.config_static_path): raise ApplicationError('Configuration file does not exist: %s' % self.config_static_path) class VcenterEnvironment(CloudEnvironment): """VMware vcenter/esx environment plugin. Updates integration test environment after delegation.""" - def get_environment_config(self): # type: () -> CloudEnvironmentConfig + def get_environment_config(self) -> CloudEnvironmentConfig: """Return environment configuration for use in the test environment after delegation.""" try: # We may be in a container, so we cannot just reach VMWARE_TEST_PLATFORM, diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/vultr.py b/test/lib/ansible_test/_internal/commands/integration/cloud/vultr.py index 2e8b1b3fac3..1993cdabed6 100644 --- a/test/lib/ansible_test/_internal/commands/integration/cloud/vultr.py +++ b/test/lib/ansible_test/_internal/commands/integration/cloud/vultr.py @@ -20,12 +20,12 @@ from . import ( class VultrCloudProvider(CloudProvider): """Checks if a configuration file has been passed or fixtures are going to be used for testing""" - def __init__(self, args): # type: (IntegrationConfig) -> None + def __init__(self, args: IntegrationConfig) -> None: super().__init__(args) self.uses_config = True - def setup(self): # type: () -> None + def setup(self) -> None: """Setup the cloud resource before delegation and register a cleanup callback.""" super().setup() @@ -34,7 +34,7 @@ class VultrCloudProvider(CloudProvider): class VultrCloudEnvironment(CloudEnvironment): """Updates integration test environment after delegation. Will setup the config file as parameter.""" - def get_environment_config(self): # type: () -> CloudEnvironmentConfig + def get_environment_config(self) -> CloudEnvironmentConfig: """Return environment configuration for use in the test environment after delegation.""" parser = configparser.ConfigParser() parser.read(self.config_path) diff --git a/test/lib/ansible_test/_internal/commands/integration/coverage.py b/test/lib/ansible_test/_internal/commands/integration/coverage.py index 3146181afcf..63bfe9e33d0 100644 --- a/test/lib/ansible_test/_internal/commands/integration/coverage.py +++ b/test/lib/ansible_test/_internal/commands/integration/coverage.py @@ -81,13 +81,13 @@ THostConfig = t.TypeVar('THostConfig', bound=HostConfig) class CoverageHandler(t.Generic[THostConfig], metaclass=abc.ABCMeta): """Base class for configuring hosts for integration test code coverage.""" - def __init__(self, args, host_state, inventory_path): # type: (IntegrationConfig, HostState, str) -> None + def __init__(self, args: IntegrationConfig, host_state: HostState, inventory_path: str) -> None: self.args = args self.host_state = host_state self.inventory_path = inventory_path self.profiles = self.get_profiles() - def get_profiles(self): # type: () -> t.List[HostProfile] + def get_profiles(self) -> t.List[HostProfile]: """Return a list of profiles relevant for this handler.""" profile_type = get_generic_type(type(self), HostConfig) profiles = [profile for profile in self.host_state.target_profiles if isinstance(profile.config, profile_type)] @@ -96,19 +96,19 @@ class CoverageHandler(t.Generic[THostConfig], metaclass=abc.ABCMeta): @property @abc.abstractmethod - def is_active(self): # type: () -> bool + def is_active(self) -> bool: """True if the handler should be used, otherwise False.""" @abc.abstractmethod - def setup(self): # type: () -> None + def setup(self) -> None: """Perform setup for code coverage.""" @abc.abstractmethod - def teardown(self): # type: () -> None + def teardown(self) -> None: """Perform teardown for code coverage.""" @abc.abstractmethod - def create_inventory(self): # type: () -> None + def create_inventory(self) -> None: """Create inventory, if needed.""" @abc.abstractmethod @@ -123,13 +123,13 @@ class CoverageHandler(t.Generic[THostConfig], metaclass=abc.ABCMeta): class PosixCoverageHandler(CoverageHandler[PosixConfig]): """Configure integration test code coverage for POSIX hosts.""" - def __init__(self, args, host_state, inventory_path): # type: (IntegrationConfig, HostState, str) -> None + def __init__(self, args: IntegrationConfig, host_state: HostState, inventory_path: str) -> None: super().__init__(args, host_state, inventory_path) # Common temporary directory used on all POSIX hosts that will be created world writeable. self.common_temp_path = f'/tmp/ansible-test-{generate_name()}' - def get_profiles(self): # type: () -> t.List[HostProfile] + def get_profiles(self) -> t.List[HostProfile]: """Return a list of profiles relevant for this handler.""" profiles = super().get_profiles() profiles = [profile for profile in profiles if not isinstance(profile, ControllerProfile) or @@ -138,21 +138,21 @@ class PosixCoverageHandler(CoverageHandler[PosixConfig]): return profiles @property - def is_active(self): # type: () -> bool + def is_active(self) -> bool: """True if the handler should be used, otherwise False.""" return True @property - def target_profile(self): # type: () -> t.Optional[PosixProfile] + def target_profile(self) -> t.Optional[PosixProfile]: """The POSIX target profile, if it uses a different Python interpreter than the controller, otherwise None.""" return t.cast(PosixProfile, self.profiles[0]) if self.profiles else None - def setup(self): # type: () -> None + def setup(self) -> None: """Perform setup for code coverage.""" self.setup_controller() self.setup_target() - def teardown(self): # type: () -> None + def teardown(self) -> None: """Perform teardown for code coverage.""" self.teardown_controller() self.teardown_target() @@ -180,7 +180,7 @@ class PosixCoverageHandler(CoverageHandler[PosixConfig]): self.run_playbook('posix_coverage_setup.yml', self.get_playbook_variables()) - def teardown_controller(self): # type: () -> None + def teardown_controller(self) -> None: """Perform teardown for code coverage on the controller.""" coverage_temp_path = os.path.join(self.common_temp_path, ResultType.COVERAGE.name) platform = get_coverage_platform(self.args.controller) @@ -190,7 +190,7 @@ class PosixCoverageHandler(CoverageHandler[PosixConfig]): remove_tree(self.common_temp_path) - def teardown_target(self): # type: () -> None + def teardown_target(self) -> None: """Perform teardown for code coverage on the target.""" if not self.target_profile: return @@ -243,11 +243,11 @@ class PosixCoverageHandler(CoverageHandler[PosixConfig]): return variables - def create_inventory(self): # type: () -> None + def create_inventory(self) -> None: """Create inventory.""" create_posix_inventory(self.args, self.inventory_path, self.host_state.target_profiles) - def get_playbook_variables(self): # type: () -> t.Dict[str, str] + def get_playbook_variables(self) -> t.Dict[str, str]: """Return a dictionary of variables for setup and teardown of POSIX coverage.""" return dict( common_temp_dir=self.common_temp_path, @@ -262,22 +262,22 @@ class PosixCoverageHandler(CoverageHandler[PosixConfig]): class WindowsCoverageHandler(CoverageHandler[WindowsConfig]): """Configure integration test code coverage for Windows hosts.""" - def __init__(self, args, host_state, inventory_path): # type: (IntegrationConfig, HostState, str) -> None + def __init__(self, args: IntegrationConfig, host_state: HostState, inventory_path: str) -> None: super().__init__(args, host_state, inventory_path) # Common temporary directory used on all Windows hosts that will be created writable by everyone. self.remote_temp_path = f'C:\\ansible_test_coverage_{generate_name()}' @property - def is_active(self): # type: () -> bool + def is_active(self) -> bool: """True if the handler should be used, otherwise False.""" return bool(self.profiles) and not self.args.coverage_check - def setup(self): # type: () -> None + def setup(self) -> None: """Perform setup for code coverage.""" self.run_playbook('windows_coverage_setup.yml', self.get_playbook_variables()) - def teardown(self): # type: () -> None + def teardown(self) -> None: """Perform teardown for code coverage.""" with tempfile.TemporaryDirectory() as local_temp_path: variables = self.get_playbook_variables() @@ -320,11 +320,11 @@ class WindowsCoverageHandler(CoverageHandler[WindowsConfig]): return variables - def create_inventory(self): # type: () -> None + def create_inventory(self) -> None: """Create inventory.""" create_windows_inventory(self.args, self.inventory_path, self.host_state.target_profiles) - def get_playbook_variables(self): # type: () -> t.Dict[str, str] + def get_playbook_variables(self) -> t.Dict[str, str]: """Return a dictionary of variables for setup and teardown of Windows coverage.""" return dict( remote_temp_path=self.remote_temp_path, @@ -333,7 +333,7 @@ class WindowsCoverageHandler(CoverageHandler[WindowsConfig]): class CoverageManager: """Manager for code coverage configuration and state.""" - def __init__(self, args, host_state, inventory_path): # type: (IntegrationConfig, HostState, str) -> None + def __init__(self, args: IntegrationConfig, host_state: HostState, inventory_path: str) -> None: self.args = args self.host_state = host_state self.inventory_path = inventory_path @@ -348,7 +348,7 @@ class CoverageManager: self.handlers = [handler for handler in handlers if handler.is_active] - def setup(self): # type: () -> None + def setup(self) -> None: """Perform setup for code coverage.""" if not self.args.coverage: return @@ -356,7 +356,7 @@ class CoverageManager: for handler in self.handlers: handler.setup() - def teardown(self): # type: () -> None + def teardown(self) -> None: """Perform teardown for code coverage.""" if not self.args.coverage: return @@ -378,12 +378,12 @@ class CoverageManager: @cache -def get_config_handler_type_map(): # type: () -> t.Dict[t.Type[HostConfig], t.Type[CoverageHandler]] +def get_config_handler_type_map() -> t.Dict[t.Type[HostConfig], t.Type[CoverageHandler]]: """Create and return a mapping of HostConfig types to CoverageHandler types.""" return get_type_map(CoverageHandler, HostConfig) -def get_handler_type(config_type): # type: (t.Type[HostConfig]) -> t.Optional[t.Type[CoverageHandler]] +def get_handler_type(config_type: t.Type[HostConfig]) -> t.Optional[t.Type[CoverageHandler]]: """Return the coverage handler type associated with the given host config type if found, otherwise return None.""" queue = [config_type] type_map = get_config_handler_type_map() @@ -400,7 +400,7 @@ def get_handler_type(config_type): # type: (t.Type[HostConfig]) -> t.Optional[t return None -def update_coverage_filename(original_filename, platform): # type: (str, str) -> str +def update_coverage_filename(original_filename: str, platform: str) -> str: """Validate the given filename and insert the specified platform, then return the result.""" parts = original_filename.split('=') diff --git a/test/lib/ansible_test/_internal/commands/integration/filters.py b/test/lib/ansible_test/_internal/commands/integration/filters.py index 35acae52c83..6b8c46f230b 100644 --- a/test/lib/ansible_test/_internal/commands/integration/filters.py +++ b/test/lib/ansible_test/_internal/commands/integration/filters.py @@ -47,7 +47,7 @@ THostProfile = t.TypeVar('THostProfile', bound=HostProfile) class TargetFilter(t.Generic[THostConfig], metaclass=abc.ABCMeta): """Base class for target filters.""" - def __init__(self, args, configs, controller): # type: (IntegrationConfig, t.List[THostConfig], bool) -> None + def __init__(self, args: IntegrationConfig, configs: t.List[THostConfig], controller: bool) -> None: self.args = args self.configs = configs self.controller = controller @@ -59,7 +59,7 @@ class TargetFilter(t.Generic[THostConfig], metaclass=abc.ABCMeta): self.allow_destructive = args.allow_destructive @property - def config(self): # type: () -> THostConfig + def config(self) -> THostConfig: """The configuration to filter. Only valid when there is a single config.""" if len(self.configs) != 1: raise Exception() @@ -82,7 +82,7 @@ class TargetFilter(t.Generic[THostConfig], metaclass=abc.ABCMeta): self.apply_skip(f'"{skip}"', reason, skipped, exclude) - def apply_skip(self, marked, reason, skipped, exclude): # type: (str, str, t.List[str], t.Set[str]) -> None + def apply_skip(self, marked: str, reason: str, skipped: t.List[str], exclude: t.Set[str]) -> None: """Apply the provided skips to the given exclude list.""" if not skipped: return @@ -90,12 +90,12 @@ class TargetFilter(t.Generic[THostConfig], metaclass=abc.ABCMeta): exclude.update(skipped) display.warning(f'Excluding {self.host_type} tests marked {marked} {reason}: {", ".join(skipped)}') - def filter_profiles(self, profiles, target): # type: (t.List[THostProfile], IntegrationTarget) -> t.List[THostProfile] + def filter_profiles(self, profiles: t.List[THostProfile], target: IntegrationTarget) -> t.List[THostProfile]: """Filter the list of profiles, returning only those which are not skipped for the given target.""" del target return profiles - def filter_targets(self, targets, exclude): # type: (t.List[IntegrationTarget], t.Set[str]) -> None + def filter_targets(self, targets: t.List[IntegrationTarget], exclude: t.Set[str]) -> None: """Filter the list of targets, adding any which this host profile cannot support to the provided exclude list.""" if self.controller and self.args.host_settings.controller_fallback and targets: affected_targets = [target.name for target in targets] @@ -138,7 +138,7 @@ class TargetFilter(t.Generic[THostConfig], metaclass=abc.ABCMeta): class PosixTargetFilter(TargetFilter[TPosixConfig]): """Target filter for POSIX hosts.""" - def filter_targets(self, targets, exclude): # type: (t.List[IntegrationTarget], t.Set[str]) -> None + def filter_targets(self, targets: t.List[IntegrationTarget], exclude: t.Set[str]) -> None: """Filter the list of targets, adding any which this host profile cannot support to the provided exclude list.""" super().filter_targets(targets, exclude) @@ -151,7 +151,7 @@ class PosixTargetFilter(TargetFilter[TPosixConfig]): class DockerTargetFilter(PosixTargetFilter[DockerConfig]): """Target filter for docker hosts.""" - def filter_targets(self, targets, exclude): # type: (t.List[IntegrationTarget], t.Set[str]) -> None + def filter_targets(self, targets: t.List[IntegrationTarget], exclude: t.Set[str]) -> None: """Filter the list of targets, adding any which this host profile cannot support to the provided exclude list.""" super().filter_targets(targets, exclude) @@ -167,7 +167,7 @@ class PosixSshTargetFilter(PosixTargetFilter[PosixSshConfig]): class RemoteTargetFilter(TargetFilter[TRemoteConfig]): """Target filter for remote Ansible Core CI managed hosts.""" - def filter_profiles(self, profiles, target): # type: (t.List[THostProfile], IntegrationTarget) -> t.List[THostProfile] + def filter_profiles(self, profiles: t.List[THostProfile], target: IntegrationTarget) -> t.List[THostProfile]: """Filter the list of profiles, returning only those which are not skipped for the given target.""" profiles = super().filter_profiles(profiles, target) @@ -181,7 +181,7 @@ class RemoteTargetFilter(TargetFilter[TRemoteConfig]): return profiles - def filter_targets(self, targets, exclude): # type: (t.List[IntegrationTarget], t.Set[str]) -> None + def filter_targets(self, targets: t.List[IntegrationTarget], exclude: t.Set[str]) -> None: """Filter the list of targets, adding any which this host profile cannot support to the provided exclude list.""" super().filter_targets(targets, exclude) @@ -224,7 +224,7 @@ class NetworkInventoryTargetFilter(TargetFilter[NetworkInventoryConfig]): class OriginTargetFilter(PosixTargetFilter[OriginConfig]): """Target filter for localhost.""" - def filter_targets(self, targets, exclude): # type: (t.List[IntegrationTarget], t.Set[str]) -> None + def filter_targets(self, targets: t.List[IntegrationTarget], exclude: t.Set[str]) -> None: """Filter the list of targets, adding any which this host profile cannot support to the provided exclude list.""" super().filter_targets(targets, exclude) @@ -235,12 +235,12 @@ class OriginTargetFilter(PosixTargetFilter[OriginConfig]): @cache -def get_host_target_type_map(): # type: () -> t.Dict[t.Type[HostConfig], t.Type[TargetFilter]] +def get_host_target_type_map() -> t.Dict[t.Type[HostConfig], t.Type[TargetFilter]]: """Create and return a mapping of HostConfig types to TargetFilter types.""" return get_type_map(TargetFilter, HostConfig) -def get_target_filter(args, configs, controller): # type: (IntegrationConfig, t.List[HostConfig], bool) -> TargetFilter +def get_target_filter(args: IntegrationConfig, configs: t.List[HostConfig], controller: bool) -> TargetFilter: """Return an integration test target filter instance for the provided host configurations.""" target_type = type(configs[0]) @@ -254,12 +254,12 @@ def get_target_filter(args, configs, controller): # type: (IntegrationConfig, t return filter_instance -def get_remote_skip_aliases(config): # type: (RemoteConfig) -> t.Dict[str, str] +def get_remote_skip_aliases(config: RemoteConfig) -> t.Dict[str, str]: """Return a dictionary of skip aliases and the reason why they apply.""" return get_platform_skip_aliases(config.platform, config.version, config.arch) -def get_platform_skip_aliases(platform, version, arch): # type: (str, str, t.Optional[str]) -> t.Dict[str, str] +def get_platform_skip_aliases(platform: str, version: str, arch: t.Optional[str]) -> t.Dict[str, str]: """Return a dictionary of skip aliases and the reason why they apply.""" skips = { f'skip/{platform}': platform, diff --git a/test/lib/ansible_test/_internal/commands/integration/network.py b/test/lib/ansible_test/_internal/commands/integration/network.py index 778384f41c0..d28416c5551 100644 --- a/test/lib/ansible_test/_internal/commands/integration/network.py +++ b/test/lib/ansible_test/_internal/commands/integration/network.py @@ -39,7 +39,7 @@ from ...host_configs import ( ) -def command_network_integration(args): # type: (NetworkIntegrationConfig) -> None +def command_network_integration(args: NetworkIntegrationConfig) -> None: """Entry point for the `network-integration` command.""" handle_layout_messages(data_context().content.integration_messages) diff --git a/test/lib/ansible_test/_internal/commands/integration/posix.py b/test/lib/ansible_test/_internal/commands/integration/posix.py index be78359c4e1..d4c50d34d8d 100644 --- a/test/lib/ansible_test/_internal/commands/integration/posix.py +++ b/test/lib/ansible_test/_internal/commands/integration/posix.py @@ -32,7 +32,7 @@ from ...data import ( ) -def command_posix_integration(args): # type: (PosixIntegrationConfig) -> None +def command_posix_integration(args: PosixIntegrationConfig) -> None: """Entry point for the `integration` command.""" handle_layout_messages(data_context().content.integration_messages) diff --git a/test/lib/ansible_test/_internal/commands/integration/windows.py b/test/lib/ansible_test/_internal/commands/integration/windows.py index d14ae11bc03..aa201c423c3 100644 --- a/test/lib/ansible_test/_internal/commands/integration/windows.py +++ b/test/lib/ansible_test/_internal/commands/integration/windows.py @@ -45,7 +45,7 @@ from ...data import ( ) -def command_windows_integration(args): # type: (WindowsIntegrationConfig) -> None +def command_windows_integration(args: WindowsIntegrationConfig) -> None: """Entry point for the `windows-integration` command.""" handle_layout_messages(data_context().content.integration_messages) diff --git a/test/lib/ansible_test/_internal/commands/sanity/__init__.py b/test/lib/ansible_test/_internal/commands/sanity/__init__.py index 6b31fa088a1..83a191f80ec 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/__init__.py +++ b/test/lib/ansible_test/_internal/commands/sanity/__init__.py @@ -131,7 +131,7 @@ DOCUMENTABLE_PLUGINS = ( created_venvs = [] # type: t.List[str] -def command_sanity(args): # type: (SanityConfig) -> None +def command_sanity(args: SanityConfig) -> None: """Run sanity tests.""" create_result_directories(args) @@ -307,7 +307,7 @@ def command_sanity(args): # type: (SanityConfig) -> None @cache -def collect_code_smell_tests(): # type: () -> t.Tuple[SanityTest, ...] +def collect_code_smell_tests() -> t.Tuple[SanityTest, ...]: """Return a tuple of available code smell sanity tests.""" paths = glob.glob(os.path.join(SANITY_ROOT, 'code-smell', '*.py')) @@ -326,7 +326,7 @@ class SanityIgnoreParser: """Parser for the consolidated sanity test ignore file.""" NO_CODE = '_' - def __init__(self, args): # type: (SanityConfig) -> None + def __init__(self, args: SanityConfig) -> None: if data_context().content.collection: ansible_version = '%s.%s' % tuple(get_ansible_version().split('.')[:2]) @@ -509,7 +509,7 @@ class SanityIgnoreParser: self.ignores[test_name][path][error_code] = line_no @staticmethod - def load(args): # type: (SanityConfig) -> SanityIgnoreParser + def load(args: SanityConfig) -> SanityIgnoreParser: """Return the current SanityIgnore instance, initializing it if needed.""" try: return SanityIgnoreParser.instance # type: ignore[attr-defined] @@ -546,11 +546,11 @@ class SanityIgnoreProcessor: self.skip_entries = self.parser.skips.get(full_name, {}) self.used_line_numbers = set() # type: t.Set[int] - def filter_skipped_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget] + def filter_skipped_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]: """Return the given targets, with any skipped paths filtered out.""" return sorted(target for target in targets if target.path not in self.skip_entries) - def process_errors(self, errors, paths): # type: (t.List[SanityMessage], t.List[str]) -> t.List[SanityMessage] + def process_errors(self, errors: t.List[SanityMessage], paths: t.List[str]) -> t.List[SanityMessage]: """Return the given errors filtered for ignores and with any settings related errors included.""" errors = self.filter_messages(errors) errors.extend(self.get_errors(paths)) @@ -559,7 +559,7 @@ class SanityIgnoreProcessor: return errors - def filter_messages(self, messages): # type: (t.List[SanityMessage]) -> t.List[SanityMessage] + def filter_messages(self, messages: t.List[SanityMessage]) -> t.List[SanityMessage]: """Return a filtered list of the given messages using the entries that have been loaded.""" filtered = [] @@ -581,7 +581,7 @@ class SanityIgnoreProcessor: return filtered - def get_errors(self, paths): # type: (t.List[str]) -> t.List[SanityMessage] + def get_errors(self, paths: t.List[str]) -> t.List[SanityMessage]: """Return error messages related to issues with the file.""" messages = [] # type: t.List[SanityMessage] @@ -617,13 +617,13 @@ class SanityIgnoreProcessor: class SanitySuccess(TestSuccess): """Sanity test success.""" - def __init__(self, test, python_version=None): # type: (str, t.Optional[str]) -> None + def __init__(self, test: str, python_version: t.Optional[str] = None) -> None: super().__init__(COMMAND, test, python_version) class SanitySkipped(TestSkipped): """Sanity test skipped.""" - def __init__(self, test, python_version=None): # type: (str, t.Optional[str]) -> None + def __init__(self, test: str, python_version: t.Optional[str] = None) -> None: super().__init__(COMMAND, test, python_version) @@ -650,14 +650,14 @@ class SanityTargets: self.include = include @staticmethod - def create(include, exclude, require): # type: (t.List[str], t.List[str], t.List[str]) -> SanityTargets + def create(include: t.List[str], exclude: t.List[str], require: t.List[str]) -> SanityTargets: """Create a SanityTargets instance from the given include, exclude and require lists.""" _targets = SanityTargets.get_targets() _include = walk_internal_targets(_targets, include, exclude, require) return SanityTargets(_targets, _include) @staticmethod - def filter_and_inject_targets(test, targets): # type: (SanityTest, t.Iterable[TestTarget]) -> t.List[TestTarget] + def filter_and_inject_targets(test: SanityTest, targets: t.Iterable[TestTarget]) -> t.List[TestTarget]: """Filter and inject targets based on test requirements and the given target list.""" test_targets = list(targets) @@ -680,7 +680,7 @@ class SanityTargets: return test_targets @staticmethod - def get_targets(): # type: () -> t.Tuple[TestTarget, ...] + def get_targets() -> t.Tuple[TestTarget, ...]: """Return a tuple of sanity test targets. Uses a cached version when available.""" try: return SanityTargets.get_targets.targets # type: ignore[attr-defined] @@ -696,7 +696,7 @@ class SanityTest(metaclass=abc.ABCMeta): """Sanity test base class.""" ansible_only = False - def __init__(self, name=None): # type: (t.Optional[str]) -> None + def __init__(self, name: t.Optional[str] = None) -> None: if not name: name = self.__class__.__name__ name = re.sub(r'Test$', '', name) # drop Test suffix @@ -712,58 +712,58 @@ class SanityTest(metaclass=abc.ABCMeta): self.optional_error_codes = set() # type: t.Set[str] @property - def error_code(self): # type: () -> t.Optional[str] + def error_code(self) -> t.Optional[str]: """Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes.""" return None @property - def can_ignore(self): # type: () -> bool + def can_ignore(self) -> bool: """True if the test supports ignore entries.""" return True @property - def can_skip(self): # type: () -> bool + def can_skip(self) -> bool: """True if the test supports skip entries.""" return not self.all_targets and not self.no_targets @property - def all_targets(self): # type: () -> bool + def all_targets(self) -> bool: """True if test targets will not be filtered using includes, excludes, requires or changes. Mutually exclusive with no_targets.""" return False @property - def no_targets(self): # type: () -> bool + def no_targets(self) -> bool: """True if the test does not use test targets. Mutually exclusive with all_targets.""" return False @property - def include_directories(self): # type: () -> bool + def include_directories(self) -> bool: """True if the test targets should include directories.""" return False @property - def include_symlinks(self): # type: () -> bool + def include_symlinks(self) -> bool: """True if the test targets should include symlinks.""" return False @property - def py2_compat(self): # type: () -> bool + def py2_compat(self) -> bool: """True if the test only applies to code that runs on Python 2.x.""" return False @property - def supported_python_versions(self): # type: () -> t.Optional[t.Tuple[str, ...]] + def supported_python_versions(self) -> t.Optional[t.Tuple[str, ...]]: """A tuple of supported Python versions or None if the test does not depend on specific Python versions.""" return CONTROLLER_PYTHON_VERSIONS - def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget] # pylint: disable=unused-argument + def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]: # pylint: disable=unused-argument """Return the given list of test targets, filtered to include only those relevant for the test.""" if self.no_targets: return [] raise NotImplementedError('Sanity test "%s" must implement "filter_targets" or set "no_targets" to True.' % self.name) - def filter_targets_by_version(self, args, targets, python_version): # type: (SanityConfig, t.List[TestTarget], str) -> t.List[TestTarget] + def filter_targets_by_version(self, args: SanityConfig, targets: t.List[TestTarget], python_version: str) -> t.List[TestTarget]: """Return the given list of test targets, filtered to include only those relevant for the test, taking into account the Python version.""" del python_version # python_version is not used here, but derived classes may make use of it @@ -785,7 +785,7 @@ class SanityTest(metaclass=abc.ABCMeta): return targets @staticmethod - def filter_remote_targets(targets): # type: (t.List[TestTarget]) -> t.List[TestTarget] + def filter_remote_targets(targets: t.List[TestTarget]) -> t.List[TestTarget]: """Return a filtered list of the given targets, including only those that require support for remote-only Python versions.""" targets = [target for target in targets if ( is_subdir(target.path, data_context().content.module_path) or @@ -811,15 +811,15 @@ class SanityTest(metaclass=abc.ABCMeta): class SanitySingleVersion(SanityTest, metaclass=abc.ABCMeta): """Base class for sanity test plugins which should run on a single python version.""" @property - def require_libyaml(self): # type: () -> bool + def require_libyaml(self) -> bool: """True if the test requires PyYAML to have libyaml support.""" return False @abc.abstractmethod - def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult + def test(self, args: SanityConfig, targets: SanityTargets, python: PythonConfig) -> TestResult: """Run the sanity test and return the result.""" - def load_processor(self, args): # type: (SanityConfig) -> SanityIgnoreProcessor + def load_processor(self, args: SanityConfig) -> SanityIgnoreProcessor: """Load the ignore processor for this sanity test.""" return SanityIgnoreProcessor(args, self, None) @@ -890,32 +890,32 @@ class SanityCodeSmellTest(SanitySingleVersion): raise ApplicationError('Sanity test "%s" option "no_targets" is mutually exclusive with options: %s' % (self.name, ', '.join(problems))) @property - def all_targets(self): # type: () -> bool + def all_targets(self) -> bool: """True if test targets will not be filtered using includes, excludes, requires or changes. Mutually exclusive with no_targets.""" return self.__all_targets @property - def no_targets(self): # type: () -> bool + def no_targets(self) -> bool: """True if the test does not use test targets. Mutually exclusive with all_targets.""" return self.__no_targets @property - def include_directories(self): # type: () -> bool + def include_directories(self) -> bool: """True if the test targets should include directories.""" return self.__include_directories @property - def include_symlinks(self): # type: () -> bool + def include_symlinks(self) -> bool: """True if the test targets should include symlinks.""" return self.__include_symlinks @property - def py2_compat(self): # type: () -> bool + def py2_compat(self) -> bool: """True if the test only applies to code that runs on Python 2.x.""" return self.__py2_compat @property - def supported_python_versions(self): # type: () -> t.Optional[t.Tuple[str, ...]] + def supported_python_versions(self) -> t.Optional[t.Tuple[str, ...]]: """A tuple of supported Python versions or None if the test does not depend on specific Python versions.""" versions = super().supported_python_versions @@ -927,7 +927,7 @@ class SanityCodeSmellTest(SanitySingleVersion): return versions - def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget] + def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]: """Return the given list of test targets, filtered to include only those relevant for the test.""" if self.no_targets: return [] @@ -954,7 +954,7 @@ class SanityCodeSmellTest(SanitySingleVersion): return targets - def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult + def test(self, args: SanityConfig, targets: SanityTargets, python: PythonConfig) -> TestResult: """Run the sanity test and return the result.""" cmd = [python.path, self.path] @@ -1022,7 +1022,7 @@ class SanityCodeSmellTest(SanitySingleVersion): return SanitySuccess(self.name) - def load_processor(self, args): # type: (SanityConfig) -> SanityIgnoreProcessor + def load_processor(self, args: SanityConfig) -> SanityIgnoreProcessor: """Load the ignore processor for this sanity test.""" return SanityIgnoreProcessor(args, self, None) @@ -1030,15 +1030,15 @@ class SanityCodeSmellTest(SanitySingleVersion): class SanityVersionNeutral(SanityTest, metaclass=abc.ABCMeta): """Base class for sanity test plugins which are idependent of the python version being used.""" @abc.abstractmethod - def test(self, args, targets): # type: (SanityConfig, SanityTargets) -> TestResult + def test(self, args: SanityConfig, targets: SanityTargets) -> TestResult: """Run the sanity test and return the result.""" - def load_processor(self, args): # type: (SanityConfig) -> SanityIgnoreProcessor + def load_processor(self, args: SanityConfig) -> SanityIgnoreProcessor: """Load the ignore processor for this sanity test.""" return SanityIgnoreProcessor(args, self, None) @property - def supported_python_versions(self): # type: () -> t.Optional[t.Tuple[str, ...]] + def supported_python_versions(self) -> t.Optional[t.Tuple[str, ...]]: """A tuple of supported Python versions or None if the test does not depend on specific Python versions.""" return None @@ -1046,24 +1046,24 @@ class SanityVersionNeutral(SanityTest, metaclass=abc.ABCMeta): class SanityMultipleVersion(SanityTest, metaclass=abc.ABCMeta): """Base class for sanity test plugins which should run on multiple python versions.""" @abc.abstractmethod - def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult + def test(self, args: SanityConfig, targets: SanityTargets, python: PythonConfig) -> TestResult: """Run the sanity test and return the result.""" - def load_processor(self, args, python_version): # type: (SanityConfig, str) -> SanityIgnoreProcessor + def load_processor(self, args: SanityConfig, python_version: str) -> SanityIgnoreProcessor: """Load the ignore processor for this sanity test.""" return SanityIgnoreProcessor(args, self, python_version) @property - def needs_pypi(self): # type: () -> bool + def needs_pypi(self) -> bool: """True if the test requires PyPI, otherwise False.""" return False @property - def supported_python_versions(self): # type: () -> t.Optional[t.Tuple[str, ...]] + def supported_python_versions(self) -> t.Optional[t.Tuple[str, ...]]: """A tuple of supported Python versions or None if the test does not depend on specific Python versions.""" return SUPPORTED_PYTHON_VERSIONS - def filter_targets_by_version(self, args, targets, python_version): # type: (SanityConfig, t.List[TestTarget], str) -> t.List[TestTarget] + def filter_targets_by_version(self, args: SanityConfig, targets: t.List[TestTarget], python_version: str) -> t.List[TestTarget]: """Return the given list of test targets, filtered to include only those relevant for the test, taking into account the Python version.""" if not python_version: raise Exception('python_version is required to filter multi-version tests') @@ -1084,7 +1084,7 @@ class SanityMultipleVersion(SanityTest, metaclass=abc.ABCMeta): @cache -def sanity_get_tests(): # type: () -> t.Tuple[SanityTest, ...] +def sanity_get_tests() -> t.Tuple[SanityTest, ...]: """Return a tuple of the available sanity tests.""" import_plugins('commands/sanity') sanity_plugins = {} # type: t.Dict[str, t.Type[SanityTest]] @@ -1163,7 +1163,7 @@ def create_sanity_virtualenv( return virtualenv_python -def check_sanity_virtualenv_yaml(python): # type: (VirtualPythonConfig) -> t.Optional[bool] +def check_sanity_virtualenv_yaml(python: VirtualPythonConfig) -> t.Optional[bool]: """Return True if PyYAML has libyaml support for the given sanity virtual environment, False if it does not and None if it was not found.""" virtualenv_path = os.path.dirname(os.path.dirname(python.path)) meta_yaml = os.path.join(virtualenv_path, 'meta.yaml.json') diff --git a/test/lib/ansible_test/_internal/commands/sanity/ansible_doc.py b/test/lib/ansible_test/_internal/commands/sanity/ansible_doc.py index 96de8130a9a..d00925ffcf8 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/ansible_doc.py +++ b/test/lib/ansible_test/_internal/commands/sanity/ansible_doc.py @@ -49,7 +49,7 @@ from ...host_configs import ( class AnsibleDocTest(SanitySingleVersion): """Sanity test for ansible-doc.""" - def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget] + def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]: """Return the given list of test targets, filtered to include only those relevant for the test.""" plugin_paths = [plugin_path for plugin_type, plugin_path in data_context().content.plugin_paths.items() if plugin_type in DOCUMENTABLE_PLUGINS] @@ -59,7 +59,7 @@ class AnsibleDocTest(SanitySingleVersion): and any(is_subdir(target.path, path) for path in plugin_paths) ] - def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult + def test(self, args: SanityConfig, targets: SanityTargets, python: PythonConfig) -> TestResult: settings = self.load_processor(args) paths = [target.path for target in targets.include] diff --git a/test/lib/ansible_test/_internal/commands/sanity/bin_symlinks.py b/test/lib/ansible_test/_internal/commands/sanity/bin_symlinks.py index 5dc582fa0ee..3516ab2c510 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/bin_symlinks.py +++ b/test/lib/ansible_test/_internal/commands/sanity/bin_symlinks.py @@ -42,16 +42,16 @@ class BinSymlinksTest(SanityVersionNeutral): ansible_only = True @property - def can_ignore(self): # type: () -> bool + def can_ignore(self) -> bool: """True if the test supports ignore entries.""" return False @property - def no_targets(self): # type: () -> bool + def no_targets(self) -> bool: """True if the test does not use test targets. Mutually exclusive with all_targets.""" return True - def test(self, args, targets): # type: (SanityConfig, SanityTargets) -> TestResult + def test(self, args: SanityConfig, targets: SanityTargets) -> TestResult: bin_root = ANSIBLE_BIN_PATH bin_names = os.listdir(bin_root) bin_paths = sorted(os.path.join(bin_root, path) for path in bin_names) diff --git a/test/lib/ansible_test/_internal/commands/sanity/compile.py b/test/lib/ansible_test/_internal/commands/sanity/compile.py index 292f89cc54a..f087c84b8ad 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/compile.py +++ b/test/lib/ansible_test/_internal/commands/sanity/compile.py @@ -44,11 +44,11 @@ from ...host_configs import ( class CompileTest(SanityMultipleVersion): """Sanity test for proper python syntax.""" - def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget] + def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]: """Return the given list of test targets, filtered to include only those relevant for the test.""" return [target for target in targets if os.path.splitext(target.path)[1] == '.py' or is_subdir(target.path, 'bin')] - def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult + def test(self, args: SanityConfig, targets: SanityTargets, python: PythonConfig) -> TestResult: if args.prime_venvs: return SanitySkipped(self.name, python_version=python.version) diff --git a/test/lib/ansible_test/_internal/commands/sanity/ignores.py b/test/lib/ansible_test/_internal/commands/sanity/ignores.py index 867243adfee..d425053341c 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/ignores.py +++ b/test/lib/ansible_test/_internal/commands/sanity/ignores.py @@ -27,16 +27,16 @@ from ...config import ( class IgnoresTest(SanityVersionNeutral): """Sanity test for sanity test ignore entries.""" @property - def can_ignore(self): # type: () -> bool + def can_ignore(self) -> bool: """True if the test supports ignore entries.""" return False @property - def no_targets(self): # type: () -> bool + def no_targets(self) -> bool: """True if the test does not use test targets. Mutually exclusive with all_targets.""" return True - def test(self, args, targets): # type: (SanityConfig, SanityTargets) -> TestResult + def test(self, args: SanityConfig, targets: SanityTargets) -> TestResult: sanity_ignore = SanityIgnoreParser.load(args) messages = [] # type: t.List[SanityMessage] diff --git a/test/lib/ansible_test/_internal/commands/sanity/import.py b/test/lib/ansible_test/_internal/commands/sanity/import.py index 9098c5e6d30..c3ba0648552 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/import.py +++ b/test/lib/ansible_test/_internal/commands/sanity/import.py @@ -73,7 +73,7 @@ from ...venv import ( ) -def _get_module_test(module_restrictions): # type: (bool) -> t.Callable[[str], bool] +def _get_module_test(module_restrictions: bool) -> t.Callable[[str], bool]: """Create a predicate which tests whether a path can be used by modules or not.""" module_path = data_context().content.module_path module_utils_path = data_context().content.module_utils_path @@ -84,7 +84,7 @@ def _get_module_test(module_restrictions): # type: (bool) -> t.Callable[[str], class ImportTest(SanityMultipleVersion): """Sanity test for proper import exception handling.""" - def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget] + def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]: """Return the given list of test targets, filtered to include only those relevant for the test.""" if data_context().content.is_ansible: # all of ansible-core must pass the import test, not just plugins/modules @@ -99,11 +99,11 @@ class ImportTest(SanityMultipleVersion): any(is_subdir(target.path, path) for path in paths)] @property - def needs_pypi(self): # type: () -> bool + def needs_pypi(self) -> bool: """True if the test requires PyPI, otherwise False.""" return True - def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult + def test(self, args: SanityConfig, targets: SanityTargets, python: PythonConfig) -> TestResult: settings = self.load_processor(args, python.version) paths = [target.path for target in targets.include] @@ -208,7 +208,7 @@ class ImportTest(SanityMultipleVersion): @cache -def get_ansible_test_python_path(): # type: () -> str +def get_ansible_test_python_path() -> str: """ Return a directory usable for PYTHONPATH, containing only the ansible-test collection loader. The temporary directory created will be cached for the lifetime of the process and cleaned up at exit. diff --git a/test/lib/ansible_test/_internal/commands/sanity/integration_aliases.py b/test/lib/ansible_test/_internal/commands/sanity/integration_aliases.py index 091d00c060a..c16fdadb8f3 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/integration_aliases.py +++ b/test/lib/ansible_test/_internal/commands/sanity/integration_aliases.py @@ -108,16 +108,16 @@ class IntegrationAliasesTest(SanitySingleVersion): self._ci_test_groups = {} # type: t.Dict[str, t.List[int]] @property - def can_ignore(self): # type: () -> bool + def can_ignore(self) -> bool: """True if the test supports ignore entries.""" return False @property - def no_targets(self): # type: () -> bool + def no_targets(self) -> bool: """True if the test does not use test targets. Mutually exclusive with all_targets.""" return True - def load_ci_config(self, python): # type: (PythonConfig) -> t.Dict[str, t.Any] + def load_ci_config(self, python: PythonConfig) -> t.Dict[str, t.Any]: """Load and return the CI YAML configuration.""" if not self._ci_config: self._ci_config = self.load_yaml(python, self.CI_YML) @@ -125,7 +125,7 @@ class IntegrationAliasesTest(SanitySingleVersion): return self._ci_config @property - def ci_test_groups(self): # type: () -> t.Dict[str, t.List[int]] + def ci_test_groups(self) -> t.Dict[str, t.List[int]]: """Return a dictionary of CI test names and their group(s).""" if not self._ci_test_groups: test_groups = {} # type: t.Dict[str, t.Set[int]] @@ -171,7 +171,7 @@ class IntegrationAliasesTest(SanitySingleVersion): return self._ci_test_groups - def format_test_group_alias(self, name, fallback=''): # type: (str, str) -> str + def format_test_group_alias(self, name: str, fallback: str = '') -> str: """Return a test group alias using the given name and fallback.""" group_numbers = self.ci_test_groups.get(name, None) @@ -195,12 +195,12 @@ class IntegrationAliasesTest(SanitySingleVersion): return alias - def load_yaml(self, python, path): # type: (PythonConfig, str) -> t.Dict[str, t.Any] + def load_yaml(self, python: PythonConfig, path: str) -> t.Dict[str, t.Any]: """Load the specified YAML file and return the contents.""" yaml_to_json_path = os.path.join(SANITY_ROOT, self.name, 'yaml_to_json.py') return json.loads(raw_command([python.path, yaml_to_json_path], data=read_text_file(path), capture=True)[0]) - def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult + def test(self, args: SanityConfig, targets: SanityTargets, python: PythonConfig) -> TestResult: if args.explain: return SanitySuccess(self.name) @@ -230,7 +230,7 @@ class IntegrationAliasesTest(SanitySingleVersion): return SanitySuccess(self.name) - def check_posix_targets(self, args): # type: (SanityConfig) -> t.List[SanityMessage] + def check_posix_targets(self, args: SanityConfig) -> t.List[SanityMessage]: """Check POSIX integration test targets and return messages with any issues found.""" posix_targets = tuple(walk_posix_integration_targets()) @@ -325,7 +325,7 @@ class IntegrationAliasesTest(SanitySingleVersion): return messages - def check_changes(self, args, results): # type: (SanityConfig, Results) -> None + def check_changes(self, args: SanityConfig, results: Results) -> None: """Check changes and store results in the provided result dictionary.""" integration_targets = list(walk_integration_targets()) module_targets = list(walk_module_targets()) @@ -373,7 +373,7 @@ class IntegrationAliasesTest(SanitySingleVersion): results.comments += comments results.labels.update(labels) - def format_comment(self, template, targets): # type: (str, t.List[str]) -> t.Optional[str] + def format_comment(self, template: str, targets: t.List[str]) -> t.Optional[str]: """Format and return a comment based on the given template and targets, or None if there are no targets.""" if not targets: return None diff --git a/test/lib/ansible_test/_internal/commands/sanity/mypy.py b/test/lib/ansible_test/_internal/commands/sanity/mypy.py index a5ba8bf1b50..e82d1eee688 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/mypy.py +++ b/test/lib/ansible_test/_internal/commands/sanity/mypy.py @@ -67,23 +67,23 @@ class MypyTest(SanityMultipleVersion): 'lib/ansible/module_utils/compat/_selectors2.py', ) - def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget] + def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]: """Return the given list of test targets, filtered to include only those relevant for the test.""" return [target for target in targets if os.path.splitext(target.path)[1] == '.py' and target.path not in self.vendored_paths and ( target.path.startswith('lib/ansible/') or target.path.startswith('test/lib/ansible_test/_internal/') or target.path.startswith('test/lib/ansible_test/_util/target/sanity/import/'))] @property - def error_code(self): # type: () -> t.Optional[str] + def error_code(self) -> t.Optional[str]: """Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes.""" return 'ansible-test' @property - def needs_pypi(self): # type: () -> bool + def needs_pypi(self) -> bool: """True if the test requires PyPI, otherwise False.""" return True - def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult + def test(self, args: SanityConfig, targets: SanityTargets, python: PythonConfig) -> TestResult: if sys.version_info >= (3, 11): display.warning(f'Skipping sanity test "{self.name}" which can test Python {args.controller_python.version}, but cannot run under that version.') return SanitySkipped(self.name, python.version) diff --git a/test/lib/ansible_test/_internal/commands/sanity/pep8.py b/test/lib/ansible_test/_internal/commands/sanity/pep8.py index 2610e730d91..f1c3e0357bb 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/pep8.py +++ b/test/lib/ansible_test/_internal/commands/sanity/pep8.py @@ -44,15 +44,15 @@ from ...host_configs import ( class Pep8Test(SanitySingleVersion): """Sanity test for PEP 8 style guidelines using pycodestyle.""" @property - def error_code(self): # type: () -> t.Optional[str] + def error_code(self) -> t.Optional[str]: """Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes.""" return 'A100' - def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget] + def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]: """Return the given list of test targets, filtered to include only those relevant for the test.""" return [target for target in targets if os.path.splitext(target.path)[1] == '.py' or is_subdir(target.path, 'bin')] - def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult + def test(self, args: SanityConfig, targets: SanityTargets, python: PythonConfig) -> TestResult: current_ignore_file = os.path.join(SANITY_ROOT, 'pep8', 'current-ignore.txt') current_ignore = sorted(read_lines_without_comments(current_ignore_file, remove_blank_lines=True)) diff --git a/test/lib/ansible_test/_internal/commands/sanity/pslint.py b/test/lib/ansible_test/_internal/commands/sanity/pslint.py index 6eb2e5d60b1..38a3399d321 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/pslint.py +++ b/test/lib/ansible_test/_internal/commands/sanity/pslint.py @@ -46,15 +46,15 @@ from ...data import ( class PslintTest(SanityVersionNeutral): """Sanity test using PSScriptAnalyzer.""" @property - def error_code(self): # type: () -> t.Optional[str] + def error_code(self) -> t.Optional[str]: """Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes.""" return 'AnsibleTest' - def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget] + def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]: """Return the given list of test targets, filtered to include only those relevant for the test.""" return [target for target in targets if os.path.splitext(target.path)[1] in ('.ps1', '.psm1', '.psd1')] - def test(self, args, targets): # type: (SanityConfig, SanityTargets) -> TestResult + def test(self, args: SanityConfig, targets: SanityTargets) -> TestResult: settings = self.load_processor(args) paths = [target.path for target in targets.include] diff --git a/test/lib/ansible_test/_internal/commands/sanity/pylint.py b/test/lib/ansible_test/_internal/commands/sanity/pylint.py index edc50e86f9a..6a6b0126bb8 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/pylint.py +++ b/test/lib/ansible_test/_internal/commands/sanity/pylint.py @@ -70,20 +70,20 @@ class PylintTest(SanitySingleVersion): ]) @property - def supported_python_versions(self): # type: () -> t.Optional[t.Tuple[str, ...]] + def supported_python_versions(self) -> t.Optional[t.Tuple[str, ...]]: """A tuple of supported Python versions or None if the test does not depend on specific Python versions.""" return tuple(version for version in CONTROLLER_PYTHON_VERSIONS if str_to_version(version) < (3, 11)) @property - def error_code(self): # type: () -> t.Optional[str] + def error_code(self) -> t.Optional[str]: """Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes.""" return 'ansible-test' - def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget] + def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]: """Return the given list of test targets, filtered to include only those relevant for the test.""" return [target for target in targets if os.path.splitext(target.path)[1] == '.py' or is_subdir(target.path, 'bin')] - def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult + def test(self, args: SanityConfig, targets: SanityTargets, python: PythonConfig) -> TestResult: plugin_dir = os.path.join(SANITY_ROOT, 'pylint', 'plugins') plugin_names = sorted(p[0] for p in [ os.path.splitext(p) for p in os.listdir(plugin_dir)] if p[1] == '.py' and p[0] != '__init__') @@ -113,9 +113,9 @@ class PylintTest(SanitySingleVersion): contexts.append((context_name, sorted(filtered_paths))) available_paths -= filtered_paths - def filter_path(path_filter=None): # type: (str) -> t.Callable[[str], bool] + def filter_path(path_filter: str = None) -> t.Callable[[str], bool]: """Return a function that filters out paths which are not a subdirectory of the given path.""" - def context_filter(path_to_filter): # type: (str) -> bool + def context_filter(path_to_filter: str) -> bool: """Return true if the given path matches, otherwise return False.""" return is_subdir(path_to_filter, path_filter) diff --git a/test/lib/ansible_test/_internal/commands/sanity/sanity_docs.py b/test/lib/ansible_test/_internal/commands/sanity/sanity_docs.py index a663bd96f39..4f14a3a2f2b 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/sanity_docs.py +++ b/test/lib/ansible_test/_internal/commands/sanity/sanity_docs.py @@ -30,16 +30,16 @@ class SanityDocsTest(SanityVersionNeutral): ansible_only = True @property - def can_ignore(self): # type: () -> bool + def can_ignore(self) -> bool: """True if the test supports ignore entries.""" return False @property - def no_targets(self): # type: () -> bool + def no_targets(self) -> bool: """True if the test does not use test targets. Mutually exclusive with all_targets.""" return True - def test(self, args, targets): # type: (SanityConfig, SanityTargets) -> TestResult + def test(self, args: SanityConfig, targets: SanityTargets) -> TestResult: sanity_dir = 'docs/docsite/rst/dev_guide/testing/sanity' sanity_docs = set(part[0] for part in (os.path.splitext(os.path.basename(path)) for path in data_context().content.get_files(sanity_dir)) if part[1] == '.rst') diff --git a/test/lib/ansible_test/_internal/commands/sanity/shellcheck.py b/test/lib/ansible_test/_internal/commands/sanity/shellcheck.py index 19805ea9aac..b4e4bd325f1 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/shellcheck.py +++ b/test/lib/ansible_test/_internal/commands/sanity/shellcheck.py @@ -45,15 +45,15 @@ from ...config import ( class ShellcheckTest(SanityVersionNeutral): """Sanity test using shellcheck.""" @property - def error_code(self): # type: () -> t.Optional[str] + def error_code(self) -> t.Optional[str]: """Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes.""" return 'AT1000' - def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget] + def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]: """Return the given list of test targets, filtered to include only those relevant for the test.""" return [target for target in targets if os.path.splitext(target.path)[1] == '.sh'] - def test(self, args, targets): # type: (SanityConfig, SanityTargets) -> TestResult + def test(self, args: SanityConfig, targets: SanityTargets) -> TestResult: exclude_file = os.path.join(SANITY_ROOT, 'shellcheck', 'exclude.txt') exclude = set(read_lines_without_comments(exclude_file, remove_blank_lines=True, optional=True)) diff --git a/test/lib/ansible_test/_internal/commands/sanity/validate_modules.py b/test/lib/ansible_test/_internal/commands/sanity/validate_modules.py index f1d448804c1..6df28dcc65d 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/validate_modules.py +++ b/test/lib/ansible_test/_internal/commands/sanity/validate_modules.py @@ -78,11 +78,11 @@ class ValidateModulesTest(SanitySingleVersion): self._exclusions.add('lib/ansible/plugins/cache/base.py') @property - def error_code(self): # type: () -> t.Optional[str] + def error_code(self) -> t.Optional[str]: """Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes.""" return 'A100' - def get_plugin_type(self, target): # type: (TestTarget) -> t.Optional[str] + def get_plugin_type(self, target: TestTarget) -> t.Optional[str]: """Return the plugin type of the given target, or None if it is not a plugin or module.""" if target.path.endswith('/__init__.py'): return None @@ -96,11 +96,11 @@ class ValidateModulesTest(SanitySingleVersion): return None - def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget] + def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]: """Return the given list of test targets, filtered to include only those relevant for the test.""" return [target for target in targets if self.get_plugin_type(target) is not None] - def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult + def test(self, args: SanityConfig, targets: SanityTargets, python: PythonConfig) -> TestResult: env = ansible_environment(args, color=False) settings = self.load_processor(args) diff --git a/test/lib/ansible_test/_internal/commands/sanity/yamllint.py b/test/lib/ansible_test/_internal/commands/sanity/yamllint.py index 4ca6dfe8c1a..007d25c9b2e 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/yamllint.py +++ b/test/lib/ansible_test/_internal/commands/sanity/yamllint.py @@ -48,16 +48,16 @@ from ...host_configs import ( class YamllintTest(SanitySingleVersion): """Sanity test using yamllint.""" @property - def error_code(self): # type: () -> t.Optional[str] + def error_code(self) -> t.Optional[str]: """Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes.""" return 'ansible-test' @property - def require_libyaml(self): # type: () -> bool + def require_libyaml(self) -> bool: """True if the test requires PyYAML to have libyaml support.""" return True - def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget] + def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]: """Return the given list of test targets, filtered to include only those relevant for the test.""" yaml_targets = [target for target in targets if os.path.splitext(target.path)[1] in ('.yml', '.yaml')] @@ -72,7 +72,7 @@ class YamllintTest(SanitySingleVersion): return yaml_targets - def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult + def test(self, args: SanityConfig, targets: SanityTargets, python: PythonConfig) -> TestResult: settings = self.load_processor(args) paths = [target.path for target in targets.include] @@ -86,7 +86,7 @@ class YamllintTest(SanitySingleVersion): return SanitySuccess(self.name) @staticmethod - def test_paths(args, paths, python): # type: (SanityConfig, t.List[str], PythonConfig) -> t.List[SanityMessage] + def test_paths(args: SanityConfig, paths: t.List[str], python: PythonConfig) -> t.List[SanityMessage]: """Test the specified paths using the given Python and return the results.""" cmd = [ python.path, diff --git a/test/lib/ansible_test/_internal/commands/shell/__init__.py b/test/lib/ansible_test/_internal/commands/shell/__init__.py index e62437ead7e..a95092a8f61 100644 --- a/test/lib/ansible_test/_internal/commands/shell/__init__.py +++ b/test/lib/ansible_test/_internal/commands/shell/__init__.py @@ -46,7 +46,7 @@ from ...inventory import ( ) -def command_shell(args): # type: (ShellConfig) -> None +def command_shell(args: ShellConfig) -> None: """Entry point for the `shell` command.""" if args.raw and isinstance(args.targets[0], ControllerConfig): raise ApplicationError('The --raw option has no effect on the controller.') diff --git a/test/lib/ansible_test/_internal/commands/units/__init__.py b/test/lib/ansible_test/_internal/commands/units/__init__.py index 42330a3bd43..4ea4f810cba 100644 --- a/test/lib/ansible_test/_internal/commands/units/__init__.py +++ b/test/lib/ansible_test/_internal/commands/units/__init__.py @@ -93,7 +93,7 @@ class TestContext: module_utils = 'module_utils' -def command_units(args): # type: (UnitsConfig) -> None +def command_units(args: UnitsConfig) -> None: """Run unit tests.""" handle_layout_messages(data_context().content.unit_messages) @@ -297,7 +297,7 @@ def command_units(args): # type: (UnitsConfig) -> None raise -def get_units_ansible_python_path(args, test_context): # type: (UnitsConfig, str) -> str +def get_units_ansible_python_path(args: UnitsConfig, test_context: str) -> str: """ Return a directory usable for PYTHONPATH, containing only the modules and module_utils portion of the ansible package. The temporary directory created will be cached for the lifetime of the process and cleaned up at exit. diff --git a/test/lib/ansible_test/_internal/completion.py b/test/lib/ansible_test/_internal/completion.py index fd94ef10444..254cd0bb3c4 100644 --- a/test/lib/ansible_test/_internal/completion.py +++ b/test/lib/ansible_test/_internal/completion.py @@ -42,21 +42,21 @@ class PosixCompletionConfig(CompletionConfig, metaclass=abc.ABCMeta): """Base class for completion configuration of POSIX environments.""" @property @abc.abstractmethod - def supported_pythons(self): # type: () -> t.List[str] + def supported_pythons(self) -> t.List[str]: """Return a list of the supported Python versions.""" @abc.abstractmethod - def get_python_path(self, version): # type: (str) -> str + def get_python_path(self, version: str) -> str: """Return the path of the requested Python version.""" - def get_default_python(self, controller): # type: (bool) -> str + def get_default_python(self, controller: bool) -> str: """Return the default Python version for a controller or target as specified.""" context_pythons = CONTROLLER_PYTHON_VERSIONS if controller else SUPPORTED_PYTHON_VERSIONS version = [python for python in self.supported_pythons if python in context_pythons][0] return version @property - def controller_supported(self): # type: () -> bool + def controller_supported(self) -> bool: """True if at least one Python version is provided which supports the controller, otherwise False.""" return any(version in CONTROLLER_PYTHON_VERSIONS for version in self.supported_pythons) @@ -68,13 +68,13 @@ class PythonCompletionConfig(PosixCompletionConfig, metaclass=abc.ABCMeta): python_dir: str = '/usr/bin' @property - def supported_pythons(self): # type: () -> t.List[str] + def supported_pythons(self) -> t.List[str]: """Return a list of the supported Python versions.""" versions = self.python.split(',') if self.python else [] versions = [version for version in versions if version in SUPPORTED_PYTHON_VERSIONS] return versions - def get_python_path(self, version): # type: (str) -> str + def get_python_path(self, version: str) -> str: """Return the path of the requested Python version.""" return os.path.join(self.python_dir, f'python{version}') @@ -111,11 +111,11 @@ class RemoteCompletionConfig(CompletionConfig): @dataclasses.dataclass(frozen=True) class InventoryCompletionConfig(CompletionConfig): """Configuration for inventory files.""" - def __init__(self): # type: () -> None + def __init__(self) -> None: super().__init__(name='inventory') @property - def is_default(self): # type: () -> bool + def is_default(self) -> bool: """True if the completion entry is only used for defaults, otherwise False.""" return False @@ -123,14 +123,14 @@ class InventoryCompletionConfig(CompletionConfig): @dataclasses.dataclass(frozen=True) class PosixSshCompletionConfig(PythonCompletionConfig): """Configuration for a POSIX host reachable over SSH.""" - def __init__(self, user, host): # type: (str, str) -> None + def __init__(self, user: str, host: str) -> None: super().__init__( name=f'{user}@{host}', python=','.join(SUPPORTED_PYTHON_VERSIONS), ) @property - def is_default(self): # type: () -> bool + def is_default(self) -> bool: """True if the completion entry is only used for defaults, otherwise False.""" return False @@ -196,7 +196,7 @@ class WindowsRemoteCompletionConfig(RemoteCompletionConfig): TCompletionConfig = t.TypeVar('TCompletionConfig', bound=CompletionConfig) -def load_completion(name, completion_type): # type: (str, t.Type[TCompletionConfig]) -> t.Dict[str, TCompletionConfig] +def load_completion(name: str, completion_type: t.Type[TCompletionConfig]) -> t.Dict[str, TCompletionConfig]: """Load the named completion entries, returning them in dictionary form using the specified completion type.""" lines = read_lines_without_comments(os.path.join(ANSIBLE_TEST_DATA_ROOT, 'completion', '%s.txt' % name), remove_blank_lines=True) @@ -216,7 +216,7 @@ def load_completion(name, completion_type): # type: (str, t.Type[TCompletionCon return completion -def parse_completion_entry(value): # type: (str) -> t.Tuple[str, t.Dict[str, str]] +def parse_completion_entry(value: str) -> t.Tuple[str, t.Dict[str, str]]: """Parse the given completion entry, returning the entry name and a dictionary of key/value settings.""" values = value.split() @@ -242,24 +242,24 @@ def filter_completion( @cache -def docker_completion(): # type: () -> t.Dict[str, DockerCompletionConfig] +def docker_completion() -> t.Dict[str, DockerCompletionConfig]: """Return docker completion entries.""" return load_completion('docker', DockerCompletionConfig) @cache -def remote_completion(): # type: () -> t.Dict[str, PosixRemoteCompletionConfig] +def remote_completion() -> t.Dict[str, PosixRemoteCompletionConfig]: """Return remote completion entries.""" return load_completion('remote', PosixRemoteCompletionConfig) @cache -def windows_completion(): # type: () -> t.Dict[str, WindowsRemoteCompletionConfig] +def windows_completion() -> t.Dict[str, WindowsRemoteCompletionConfig]: """Return windows completion entries.""" return load_completion('windows', WindowsRemoteCompletionConfig) @cache -def network_completion(): # type: () -> t.Dict[str, NetworkRemoteCompletionConfig] +def network_completion() -> t.Dict[str, NetworkRemoteCompletionConfig]: """Return network completion entries.""" return load_completion('network', NetworkRemoteCompletionConfig) diff --git a/test/lib/ansible_test/_internal/config.py b/test/lib/ansible_test/_internal/config.py index 64e504cbd90..ce6c842c4c3 100644 --- a/test/lib/ansible_test/_internal/config.py +++ b/test/lib/ansible_test/_internal/config.py @@ -67,7 +67,7 @@ class ContentConfig: class EnvironmentConfig(CommonConfig): """Configuration common to all commands which execute in an environment.""" - def __init__(self, args, command): # type: (t.Any, str) -> None + def __init__(self, args: t.Any, command: str) -> None: super().__init__(args, command) self.host_settings = args.host_settings # type: HostSettings @@ -133,16 +133,16 @@ class EnvironmentConfig(CommonConfig): display.warning('The --no-pip-check option is deprecated and has no effect. It will be removed in a future version of ansible-test.') @property - def controller(self): # type: () -> ControllerHostConfig + def controller(self) -> ControllerHostConfig: """Host configuration for the controller.""" return self.host_settings.controller @property - def targets(self): # type: () -> t.List[HostConfig] + def targets(self) -> t.List[HostConfig]: """Host configuration for the targets.""" return self.host_settings.targets - def only_target(self, target_type): # type: (t.Type[THostConfig]) -> THostConfig + def only_target(self, target_type: t.Type[THostConfig]) -> THostConfig: """ Return the host configuration for the target. Requires that there is exactly one target of the specified type. @@ -159,7 +159,7 @@ class EnvironmentConfig(CommonConfig): return target - def only_targets(self, target_type): # type: (t.Type[THostConfig]) -> t.List[THostConfig] + def only_targets(self, target_type: t.Type[THostConfig]) -> t.List[THostConfig]: """ Return a list of target host configurations. Requires that there are one or more targets, all the specified type. @@ -172,7 +172,7 @@ class EnvironmentConfig(CommonConfig): return t.cast(t.List[THostConfig], self.targets) @property - def target_type(self): # type: () -> t.Type[HostConfig] + def target_type(self) -> t.Type[HostConfig]: """ The true type of the target(s). If the target is the controller, the controller type is returned. @@ -193,7 +193,7 @@ class EnvironmentConfig(CommonConfig): class TestConfig(EnvironmentConfig): """Configuration common to all test commands.""" - def __init__(self, args, command): # type: (t.Any, str) -> None + def __init__(self, args: t.Any, command: str) -> None: super().__init__(args, command) self.coverage = args.coverage # type: bool @@ -234,7 +234,7 @@ class TestConfig(EnvironmentConfig): class ShellConfig(EnvironmentConfig): """Configuration for the shell command.""" - def __init__(self, args): # type: (t.Any) -> None + def __init__(self, args: t.Any) -> None: super().__init__(args, 'shell') self.cmd = args.cmd # type: t.List[str] @@ -247,7 +247,7 @@ class ShellConfig(EnvironmentConfig): class SanityConfig(TestConfig): """Configuration for the sanity command.""" - def __init__(self, args): # type: (t.Any) -> None + def __init__(self, args: t.Any) -> None: super().__init__(args, 'sanity') self.test = args.test # type: t.List[str] @@ -272,7 +272,7 @@ class SanityConfig(TestConfig): class IntegrationConfig(TestConfig): """Configuration for the integration command.""" - def __init__(self, args, command): # type: (t.Any, str) -> None + def __init__(self, args: t.Any, command: str) -> None: super().__init__(args, command) self.start_at = args.start_at # type: str @@ -299,7 +299,7 @@ class IntegrationConfig(TestConfig): self.explain = True self.display_stderr = True - def get_ansible_config(self): # type: () -> str + def get_ansible_config(self) -> str: """Return the path to the Ansible config for the given config.""" ansible_config_relative_path = os.path.join(data_context().content.integration_path, '%s.cfg' % self.command) ansible_config_path = os.path.join(data_context().content.root, ansible_config_relative_path) @@ -316,19 +316,19 @@ TIntegrationConfig = t.TypeVar('TIntegrationConfig', bound=IntegrationConfig) class PosixIntegrationConfig(IntegrationConfig): """Configuration for the posix integration command.""" - def __init__(self, args): # type: (t.Any) -> None + def __init__(self, args: t.Any) -> None: super().__init__(args, 'integration') class WindowsIntegrationConfig(IntegrationConfig): """Configuration for the windows integration command.""" - def __init__(self, args): # type: (t.Any) -> None + def __init__(self, args: t.Any) -> None: super().__init__(args, 'windows-integration') class NetworkIntegrationConfig(IntegrationConfig): """Configuration for the network integration command.""" - def __init__(self, args): # type: (t.Any) -> None + def __init__(self, args: t.Any) -> None: super().__init__(args, 'network-integration') self.testcase = args.testcase # type: str @@ -336,7 +336,7 @@ class NetworkIntegrationConfig(IntegrationConfig): class UnitsConfig(TestConfig): """Configuration for the units command.""" - def __init__(self, args): # type: (t.Any) -> None + def __init__(self, args: t.Any) -> None: super().__init__(args, 'units') self.collect_only = args.collect_only # type: bool diff --git a/test/lib/ansible_test/_internal/connections.py b/test/lib/ansible_test/_internal/connections.py index 0bcdb346aeb..0e527634234 100644 --- a/test/lib/ansible_test/_internal/connections.py +++ b/test/lib/ansible_test/_internal/connections.py @@ -89,7 +89,7 @@ class Connection(metaclass=abc.ABCMeta): class LocalConnection(Connection): """Connect to localhost.""" - def __init__(self, args): # type: (EnvironmentConfig) -> None + def __init__(self, args: EnvironmentConfig) -> None: self.args = args def run(self, @@ -116,7 +116,7 @@ class LocalConnection(Connection): class SshConnection(Connection): """Connect to a host using SSH.""" - def __init__(self, args, settings, become=None): # type: (EnvironmentConfig, SshConnectionDetail, t.Optional[Become]) -> None + def __init__(self, args: EnvironmentConfig, settings: SshConnectionDetail, become: t.Optional[Become] = None) -> None: self.args = args self.settings = settings self.become = become @@ -163,7 +163,7 @@ class SshConnection(Connection): options.append(f'{self.settings.user}@{self.settings.host}') options.append(shlex.join(command)) - def error_callback(ex): # type: (SubprocessError) -> None + def error_callback(ex: SubprocessError) -> None: """Error handler.""" self.capture_log_details(ssh_logfile.name, ex) @@ -180,7 +180,7 @@ class SshConnection(Connection): ) @staticmethod - def capture_log_details(path, ex): # type: (str, SubprocessError) -> None + def capture_log_details(path: str, ex: SubprocessError) -> None: """Read the specified SSH debug log and add relevant details to the provided exception.""" if ex.status != 255: return @@ -211,7 +211,7 @@ class SshConnection(Connection): class DockerConnection(Connection): """Connect to a host using Docker.""" - def __init__(self, args, container_id, user=None): # type: (EnvironmentConfig, str, t.Optional[str]) -> None + def __init__(self, args: EnvironmentConfig, container_id: str, user: t.Optional[str] = None) -> None: self.args = args self.container_id = container_id self.user = user # type: t.Optional[str] @@ -247,10 +247,10 @@ class DockerConnection(Connection): output_stream=output_stream, ) - def inspect(self): # type: () -> DockerInspect + def inspect(self) -> DockerInspect: """Inspect the container and return a DockerInspect instance with the results.""" return docker_inspect(self.args, self.container_id) - def disconnect_network(self, network): # type: (str) -> None + def disconnect_network(self, network: str) -> None: """Disconnect the container from the specified network.""" docker_network_disconnect(self.args, self.container_id, network) diff --git a/test/lib/ansible_test/_internal/containers.py b/test/lib/ansible_test/_internal/containers.py index f4d16714a65..7b36def1235 100644 --- a/test/lib/ansible_test/_internal/containers.py +++ b/test/lib/ansible_test/_internal/containers.py @@ -220,7 +220,7 @@ def run_support_container( return descriptor -def get_container_database(args): # type: (EnvironmentConfig) -> ContainerDatabase +def get_container_database(args: EnvironmentConfig) -> ContainerDatabase: """Return the current container database, creating it as needed, or returning the one provided on the command line through delegation.""" try: return get_container_database.database # type: ignore[attr-defined] @@ -260,7 +260,7 @@ class ContainerAccess: # port redirections to create through host_ip -- if not set, no port redirections will be used self.forwards = forwards - def port_map(self): # type: () -> t.List[t.Tuple[int, int]] + def port_map(self) -> t.List[t.Tuple[int, int]]: """Return a port map for accessing this container.""" if self.forwards: ports = list(self.forwards.items()) @@ -284,7 +284,7 @@ class ContainerAccess: forwards=forwards, ) - def to_dict(self): # type: () -> t.Dict[str, t.Any] + def to_dict(self) -> t.Dict[str, t.Any]: """Return a dict of the current instance.""" value: t.Dict[str, t.Any] = dict( host_ip=self.host_ip, @@ -315,7 +315,7 @@ class ContainerDatabase: for context_name, containers in contexts.items())) for access_name, contexts in data.items())) - def to_dict(self): # type: () -> t.Dict[str, t.Any] + def to_dict(self) -> t.Dict[str, t.Any]: """Return a dict of the current instance.""" return dict((access_name, dict((context_name, @@ -325,12 +325,12 @@ class ContainerDatabase: for access_name, contexts in self.data.items()) -def local_ssh(args, python): # type: (EnvironmentConfig, PythonConfig) -> SshConnectionDetail +def local_ssh(args: EnvironmentConfig, python: PythonConfig) -> SshConnectionDetail: """Return SSH connection details for localhost, connecting as root to the default SSH port.""" return SshConnectionDetail('localhost', 'localhost', None, 'root', SshKey(args).key, python.path) -def root_ssh(ssh): # type: (SshConnection) -> SshConnectionDetail +def root_ssh(ssh: SshConnection) -> SshConnectionDetail: """Return the SSH connection details from the given SSH connection. If become was specified, the user will be changed to `root`.""" settings = ssh.settings.__dict__.copy() @@ -342,7 +342,7 @@ def root_ssh(ssh): # type: (SshConnection) -> SshConnectionDetail return SshConnectionDetail(**settings) -def create_container_database(args): # type: (EnvironmentConfig) -> ContainerDatabase +def create_container_database(args: EnvironmentConfig) -> ContainerDatabase: """Create and return a container database with information necessary for all test hosts to make use of relevant support containers.""" origin = {} # type: t.Dict[str, t.Dict[str, ContainerAccess]] control = {} # type: t.Dict[str, t.Dict[str, ContainerAccess]] @@ -441,11 +441,11 @@ def create_container_database(args): # type: (EnvironmentConfig) -> ContainerDa class SupportContainerContext: """Context object for tracking information relating to access of support containers.""" - def __init__(self, containers, process): # type: (ContainerDatabase, t.Optional[SshProcess]) -> None + def __init__(self, containers: ContainerDatabase, process: t.Optional[SshProcess]) -> None: self.containers = containers self.process = process - def close(self): # type: () -> None + def close(self) -> None: """Close the process maintaining the port forwards.""" if not self.process: return # forwarding not in use @@ -569,13 +569,13 @@ class ContainerDescriptor: self.env = env self.details = None # type: t.Optional[SupportContainer] - def start(self, args): # type: (EnvironmentConfig) -> None + def start(self, args: EnvironmentConfig) -> None: """Start the container. Used for containers which are created, but not started.""" docker_start(args, self.name) self.register(args) - def register(self, args): # type: (EnvironmentConfig) -> SupportContainer + def register(self, args: EnvironmentConfig) -> SupportContainer: """Record the container's runtime details. Must be used after the container has been started.""" if self.details: raise Exception('Container already registered: %s' % self.name) @@ -657,7 +657,7 @@ def wait_for_file(args, # type: EnvironmentConfig raise ApplicationError('Timeout waiting for container "%s" to provide file: %s' % (container_name, path)) -def cleanup_containers(args): # type: (EnvironmentConfig) -> None +def cleanup_containers(args: EnvironmentConfig) -> None: """Clean up containers.""" for container in support_containers.values(): if container.cleanup == CleanupMode.YES: diff --git a/test/lib/ansible_test/_internal/content_config.py b/test/lib/ansible_test/_internal/content_config.py index 39a8d4125ca..7ac1876c09e 100644 --- a/test/lib/ansible_test/_internal/content_config.py +++ b/test/lib/ansible_test/_internal/content_config.py @@ -85,7 +85,7 @@ def parse_content_config(data: t.Any) -> ContentConfig: ) -def load_config(path): # type: (str) -> t.Optional[ContentConfig] +def load_config(path: str) -> t.Optional[ContentConfig]: """Load and parse the specified config file and return the result or None if loading/parsing failed.""" if YAML_IMPORT_ERROR: raise ApplicationError('The "PyYAML" module is required to parse config: %s' % YAML_IMPORT_ERROR) @@ -112,7 +112,7 @@ def load_config(path): # type: (str) -> t.Optional[ContentConfig] return config -def get_content_config(args): # type: (EnvironmentConfig) -> ContentConfig +def get_content_config(args: EnvironmentConfig) -> ContentConfig: """ Parse and return the content configuration (if any) for the current collection. For ansible-core, a default configuration is used. @@ -149,7 +149,7 @@ def get_content_config(args): # type: (EnvironmentConfig) -> ContentConfig return config -def parse_python_requires(value): # type: (t.Any) -> tuple[str, ...] +def parse_python_requires(value: t.Any) -> tuple[str, ...]: """Parse the given 'python_requires' version specifier and return the matching Python versions.""" if not isinstance(value, str): raise ValueError('python_requires must must be of type `str` not type `%s`' % type(value)) diff --git a/test/lib/ansible_test/_internal/core_ci.py b/test/lib/ansible_test/_internal/core_ci.py index c53c0bfb0fe..5bf4c75a448 100644 --- a/test/lib/ansible_test/_internal/core_ci.py +++ b/test/lib/ansible_test/_internal/core_ci.py @@ -206,7 +206,7 @@ class AnsibleCoreCI: raise self._create_http_error(response) - def get(self, tries=3, sleep=15, always_raise_on=None): # type: (int, int, t.Optional[t.List[int]]) -> t.Optional[InstanceConnection] + def get(self, tries: int = 3, sleep: int = 15, always_raise_on: t.Optional[t.List[int]] = None) -> t.Optional[InstanceConnection]: """Get instance connection information.""" if not self.started: display.info(f'Skipping invalid {self.label} instance.', verbosity=1) @@ -270,7 +270,7 @@ class AnsibleCoreCI: return self.connection - def wait(self, iterations=90): # type: (t.Optional[int]) -> None + def wait(self, iterations: t.Optional[int] = 90) -> None: """Wait for the instance to become ready.""" for _iteration in range(1, iterations): if self.get().running: @@ -378,7 +378,7 @@ class AnsibleCoreCI: return True - def _save(self): # type: () -> None + def _save(self) -> None: """Save instance information.""" if self.args.explain: return @@ -387,7 +387,7 @@ class AnsibleCoreCI: write_json_file(self.path, config, create_directories=True) - def save(self): # type: () -> t.Dict[str, str] + def save(self) -> t.Dict[str, str]: """Save instance details and return as a dictionary.""" return dict( label=self.resource.get_label(), @@ -396,7 +396,7 @@ class AnsibleCoreCI: ) @staticmethod - def _create_http_error(response): # type: (HttpResponse) -> ApplicationError + def _create_http_error(response: HttpResponse) -> ApplicationError: """Return an exception created from the given HTTP response.""" response_json = response.json() stack_trace = '' @@ -423,7 +423,7 @@ class AnsibleCoreCI: class CoreHttpError(HttpError): """HTTP response as an error.""" - def __init__(self, status, remote_message, remote_stack_trace): # type: (int, str, str) -> None + def __init__(self, status: int, remote_message: str, remote_stack_trace: str) -> None: super().__init__(status, f'{remote_message}{remote_stack_trace}') self.remote_message = remote_message @@ -437,7 +437,7 @@ class SshKey: PUB_NAME = f'{KEY_NAME}.pub' @mutex - def __init__(self, args): # type: (EnvironmentConfig) -> None + def __init__(self, args: EnvironmentConfig) -> None: key_pair = self.get_key_pair() if not key_pair: @@ -466,7 +466,7 @@ class SshKey: self.key_contents = read_text_file(self.key).strip() @staticmethod - def get_relative_in_tree_private_key_path(): # type: () -> str + def get_relative_in_tree_private_key_path() -> str: """Return the ansible-test SSH private key path relative to the content tree.""" temp_dir = ResultType.TMP.relative_path @@ -474,7 +474,7 @@ class SshKey: return key - def get_in_tree_key_pair_paths(self): # type: () -> t.Optional[t.Tuple[str, str]] + def get_in_tree_key_pair_paths(self) -> t.Optional[t.Tuple[str, str]]: """Return the ansible-test SSH key pair paths from the content tree.""" temp_dir = ResultType.TMP.path @@ -483,7 +483,7 @@ class SshKey: return key, pub - def get_source_key_pair_paths(self): # type: () -> t.Optional[t.Tuple[str, str]] + def get_source_key_pair_paths(self) -> t.Optional[t.Tuple[str, str]]: """Return the ansible-test SSH key pair paths for the current user.""" base_dir = os.path.expanduser('~/.ansible/test/') @@ -492,7 +492,7 @@ class SshKey: return key, pub - def get_key_pair(self): # type: () -> t.Optional[t.Tuple[str, str]] + def get_key_pair(self) -> t.Optional[t.Tuple[str, str]]: """Return the ansible-test SSH key pair paths if present, otherwise return None.""" key, pub = self.get_in_tree_key_pair_paths() @@ -506,7 +506,7 @@ class SshKey: return None - def generate_key_pair(self, args): # type: (EnvironmentConfig) -> t.Tuple[str, str] + def generate_key_pair(self, args: EnvironmentConfig) -> t.Tuple[str, str]: """Generate an SSH key pair for use by all ansible-test invocations for the current user.""" key, pub = self.get_source_key_pair_paths() diff --git a/test/lib/ansible_test/_internal/coverage_util.py b/test/lib/ansible_test/_internal/coverage_util.py index 3f77c601698..5ad92cea8e4 100644 --- a/test/lib/ansible_test/_internal/coverage_util.py +++ b/test/lib/ansible_test/_internal/coverage_util.py @@ -155,7 +155,7 @@ def cover_python( return intercept_python(args, python, cmd, env, capture, data, cwd) -def get_coverage_platform(config): # type: (HostConfig) -> str +def get_coverage_platform(config: HostConfig) -> str: """Return the platform label for the given host config.""" if isinstance(config, PosixRemoteConfig): platform = f'remote-{sanitize_host_name(config.name)}' @@ -203,7 +203,7 @@ def get_coverage_environment( return env -def get_coverage_config(args): # type: (TestConfig) -> str +def get_coverage_config(args: TestConfig) -> str: """Return the path to the coverage config, creating the config if it does not already exist.""" try: return get_coverage_config.path # type: ignore[attr-defined] @@ -228,7 +228,7 @@ def get_coverage_config(args): # type: (TestConfig) -> str return path -def generate_coverage_config(args): # type: (TestConfig) -> str +def generate_coverage_config(args: TestConfig) -> str: """Generate code coverage configuration for tests.""" if data_context().content.collection: coverage_config = generate_collection_coverage_config(args) @@ -238,7 +238,7 @@ def generate_coverage_config(args): # type: (TestConfig) -> str return coverage_config -def generate_ansible_coverage_config(): # type: () -> str +def generate_ansible_coverage_config() -> str: """Generate code coverage configuration for Ansible tests.""" coverage_config = ''' [run] @@ -259,7 +259,7 @@ omit = return coverage_config -def generate_collection_coverage_config(args): # type: (TestConfig) -> str +def generate_collection_coverage_config(args: TestConfig) -> str: """Generate code coverage configuration for Ansible Collection tests.""" coverage_config = ''' [run] diff --git a/test/lib/ansible_test/_internal/data.py b/test/lib/ansible_test/_internal/data.py index a7e3bc0fec1..243b6d71623 100644 --- a/test/lib/ansible_test/_internal/data.py +++ b/test/lib/ansible_test/_internal/data.py @@ -73,7 +73,7 @@ class DataContext: self.content = content # type: ContentLayout - def create_collection_layouts(self): # type: () -> t.List[ContentLayout] + def create_collection_layouts(self) -> t.List[ContentLayout]: """ Return a list of collection layouts, one for each collection in the same collection root as the current collection layout. An empty list is returned if the current content layout is not a collection layout. @@ -165,7 +165,7 @@ class DataContext: return tuple((os.path.join(source_provider.root, path), path) for path in source_provider.get_paths(source_provider.root)) @property - def ansible_source(self): # type: () -> t.Tuple[t.Tuple[str, str], ...] + def ansible_source(self) -> t.Tuple[t.Tuple[str, str], ...]: """Return a tuple of Ansible source files with both absolute and relative paths.""" if not self.__ansible_source: self.__ansible_source = self.__create_ansible_source() @@ -220,7 +220,7 @@ class DataContext: @cache -def data_context(): # type: () -> DataContext +def data_context() -> DataContext: """Initialize provider plugins.""" provider_types = ( 'layout', diff --git a/test/lib/ansible_test/_internal/delegation.py b/test/lib/ansible_test/_internal/delegation.py index fb092d5f49c..43807be3e0e 100644 --- a/test/lib/ansible_test/_internal/delegation.py +++ b/test/lib/ansible_test/_internal/delegation.py @@ -78,7 +78,7 @@ from .content_config import ( @contextlib.contextmanager -def delegation_context(args, host_state): # type: (EnvironmentConfig, HostState) -> t.Iterator[None] +def delegation_context(args: EnvironmentConfig, host_state: HostState) -> t.Iterator[None]: """Context manager for serialized host state during delegation.""" make_dirs(ResultType.TMP.path) @@ -99,7 +99,7 @@ def delegation_context(args, host_state): # type: (EnvironmentConfig, HostState args.host_path = None -def delegate(args, host_state, exclude, require): # type: (CommonConfig, HostState, t.List[str], t.List[str]) -> None +def delegate(args: CommonConfig, host_state: HostState, exclude: t.List[str], require: t.List[str]) -> None: """Delegate execution of ansible-test to another environment.""" assert isinstance(args, EnvironmentConfig) @@ -121,7 +121,7 @@ def delegate(args, host_state, exclude, require): # type: (CommonConfig, HostSt delegate_command(args, host_state, exclude, require) -def delegate_command(args, host_state, exclude, require): # type: (EnvironmentConfig, HostState, t.List[str], t.List[str]) -> None +def delegate_command(args: EnvironmentConfig, host_state: HostState, exclude: t.List[str], require: t.List[str]) -> None: """Delegate execution based on the provided host state.""" con = host_state.controller_profile.get_origin_controller_connection() working_directory = host_state.controller_profile.get_working_directory() @@ -227,7 +227,7 @@ def insert_options(command, options): return result -def download_results(args, con, content_root, success): # type: (EnvironmentConfig, Connection, str, bool) -> None +def download_results(args: EnvironmentConfig, con: Connection, content_root: str, success: bool) -> None: """Download results from a delegated controller.""" remote_results_root = os.path.join(content_root, data_context().content.results_path) local_test_root = os.path.dirname(os.path.join(data_context().content.root, data_context().content.results_path)) diff --git a/test/lib/ansible_test/_internal/diff.py b/test/lib/ansible_test/_internal/diff.py index a8e1c113a13..7c2b471d313 100644 --- a/test/lib/ansible_test/_internal/diff.py +++ b/test/lib/ansible_test/_internal/diff.py @@ -11,32 +11,32 @@ from .util import ( ) -def parse_diff(lines): # type: (t.List[str]) -> t.List[FileDiff] +def parse_diff(lines: t.List[str]) -> t.List[FileDiff]: """Parse the given diff lines and return a list of FileDiff objects representing the changes of each file.""" return DiffParser(lines).files class FileDiff: """Parsed diff for a single file.""" - def __init__(self, old_path, new_path): # type: (str, str) -> None + def __init__(self, old_path: str, new_path: str) -> None: self.old = DiffSide(old_path, new=False) self.new = DiffSide(new_path, new=True) self.headers = [] # type: t.List[str] self.binary = False - def append_header(self, line): # type: (str) -> None + def append_header(self, line: str) -> None: """Append the given line to the list of headers for this file.""" self.headers.append(line) @property - def is_complete(self): # type: () -> bool + def is_complete(self) -> bool: """True if the diff is complete, otherwise False.""" return self.old.is_complete and self.new.is_complete class DiffSide: """Parsed diff for a single 'side' of a single file.""" - def __init__(self, path, new): # type: (str, bool) -> None + def __init__(self, path: str, new: bool) -> None: self.path = path self.new = new self.prefix = '+' if self.new else '-' @@ -51,13 +51,13 @@ class DiffSide: self._lines_remaining = 0 self._range_start = 0 - def set_start(self, line_start, line_count): # type: (int, int) -> None + def set_start(self, line_start: int, line_count: int) -> None: """Set the starting line and line count.""" self._next_line_number = line_start self._lines_remaining = line_count self._range_start = 0 - def append(self, line): # type: (str) -> None + def append(self, line: str) -> None: """Append the given line.""" if self._lines_remaining <= 0: raise Exception('Diff range overflow.') @@ -93,11 +93,11 @@ class DiffSide: self._next_line_number += 1 @property - def is_complete(self): # type: () -> bool + def is_complete(self) -> bool: """True if the diff is complete, otherwise False.""" return self._lines_remaining == 0 - def format_lines(self, context=True): # type: (bool) -> t.List[str] + def format_lines(self, context: bool = True) -> t.List[str]: """Format the diff and return a list of lines, optionally including context.""" if context: lines = self.lines_and_context @@ -109,7 +109,7 @@ class DiffSide: class DiffParser: """Parse diff lines.""" - def __init__(self, lines): # type: (t.List[str]) -> None + def __init__(self, lines: t.List[str]) -> None: self.lines = lines self.files = [] # type: t.List[FileDiff] @@ -146,7 +146,7 @@ class DiffParser: self.complete_file() - def process_start(self): # type: () -> None + def process_start(self) -> None: """Process a diff start line.""" self.complete_file() @@ -158,7 +158,7 @@ class DiffParser: self.file = FileDiff(match.group('old_path'), match.group('new_path')) self.action = self.process_continue - def process_range(self): # type: () -> None + def process_range(self) -> None: """Process a diff range line.""" match = re.search(r'^@@ -((?P[0-9]+),)?(?P[0-9]+) \+((?P[0-9]+),)?(?P[0-9]+) @@', self.line) @@ -169,7 +169,7 @@ class DiffParser: self.file.new.set_start(int(match.group('new_start') or 1), int(match.group('new_count'))) self.action = self.process_content - def process_continue(self): # type: () -> None + def process_continue(self) -> None: """Process a diff start, range or header line.""" if self.line.startswith('diff '): self.process_start() @@ -178,7 +178,7 @@ class DiffParser: else: self.process_header() - def process_header(self): # type: () -> None + def process_header(self) -> None: """Process a diff header line.""" if self.line.startswith('Binary files '): self.file.binary = True @@ -189,7 +189,7 @@ class DiffParser: else: self.file.append_header(self.line) - def process_content(self): # type: () -> None + def process_content(self) -> None: """Process a diff content line.""" if self.line == r'\ No newline at end of file': if self.previous_line.startswith(' '): @@ -218,7 +218,7 @@ class DiffParser: else: raise Exception('Unexpected diff content line.') - def complete_file(self): # type: () -> None + def complete_file(self) -> None: """Complete processing of the current file, if any.""" if not self.file: return diff --git a/test/lib/ansible_test/_internal/docker_util.py b/test/lib/ansible_test/_internal/docker_util.py index 6de2547938b..4604403d682 100644 --- a/test/lib/ansible_test/_internal/docker_util.py +++ b/test/lib/ansible_test/_internal/docker_util.py @@ -45,13 +45,13 @@ MAX_NUM_OPEN_FILES = 10240 class DockerCommand: """Details about the available docker command.""" - def __init__(self, command, executable, version): # type: (str, str, str) -> None + def __init__(self, command: str, executable: str, version: str) -> None: self.command = command self.executable = executable self.version = version @staticmethod - def detect(): # type: () -> t.Optional[DockerCommand] + def detect() -> t.Optional[DockerCommand]: """Detect and return the available docker command, or None.""" if os.environ.get('ANSIBLE_TEST_PREFER_PODMAN'): commands = list(reversed(DOCKER_COMMANDS)) @@ -74,7 +74,7 @@ class DockerCommand: return None -def require_docker(): # type: () -> DockerCommand +def require_docker() -> DockerCommand: """Return the docker command to invoke. Raises an exception if docker is not available.""" if command := get_docker_command(): return command @@ -83,18 +83,18 @@ def require_docker(): # type: () -> DockerCommand @cache -def get_docker_command(): # type: () -> t.Optional[DockerCommand] +def get_docker_command() -> t.Optional[DockerCommand]: """Return the docker command to invoke, or None if docker is not available.""" return DockerCommand.detect() -def docker_available(): # type: () -> bool +def docker_available() -> bool: """Return True if docker is available, otherwise return False.""" return bool(get_docker_command()) @cache -def get_docker_host_ip(): # type: () -> str +def get_docker_host_ip() -> str: """Return the IP of the Docker host.""" docker_host_ip = socket.gethostbyname(get_docker_hostname()) @@ -104,7 +104,7 @@ def get_docker_host_ip(): # type: () -> str @cache -def get_docker_hostname(): # type: () -> str +def get_docker_hostname() -> str: """Return the hostname of the Docker service.""" docker_host = os.environ.get('DOCKER_HOST') @@ -123,7 +123,7 @@ def get_docker_hostname(): # type: () -> str @cache -def get_podman_host_ip(): # type: () -> str +def get_podman_host_ip() -> str: """Return the IP of the Podman host.""" podman_host_ip = socket.gethostbyname(get_podman_hostname()) @@ -133,7 +133,7 @@ def get_podman_host_ip(): # type: () -> str @cache -def get_podman_default_hostname(): # type: () -> t.Optional[str] +def get_podman_default_hostname() -> t.Optional[str]: """ Return the default hostname of the Podman service. @@ -160,7 +160,7 @@ def get_podman_default_hostname(): # type: () -> t.Optional[str] @cache -def _get_podman_remote(): # type: () -> t.Optional[str] +def _get_podman_remote() -> t.Optional[str]: # URL value resolution precedence: # - command line value # - environment variable CONTAINER_HOST @@ -183,7 +183,7 @@ def _get_podman_remote(): # type: () -> t.Optional[str] @cache -def get_podman_hostname(): # type: () -> str +def get_podman_hostname() -> str: """Return the hostname of the Podman service.""" hostname = _get_podman_remote() @@ -195,7 +195,7 @@ def get_podman_hostname(): # type: () -> str @cache -def get_docker_container_id(): # type: () -> t.Optional[str] +def get_docker_container_id() -> t.Optional[str]: """Return the current container ID if running in a container, otherwise return None.""" path = '/proc/self/cpuset' container_id = None @@ -219,7 +219,7 @@ def get_docker_container_id(): # type: () -> t.Optional[str] return container_id -def get_docker_preferred_network_name(args): # type: (EnvironmentConfig) -> str +def get_docker_preferred_network_name(args: EnvironmentConfig) -> str: """ Return the preferred network name for use with Docker. The selection logic is: - the network selected by the user with `--docker-network` @@ -249,12 +249,12 @@ def get_docker_preferred_network_name(args): # type: (EnvironmentConfig) -> str return network -def is_docker_user_defined_network(network): # type: (str) -> bool +def is_docker_user_defined_network(network: str) -> bool: """Return True if the network being used is a user-defined network.""" return bool(network) and network != 'bridge' -def docker_pull(args, image): # type: (EnvironmentConfig, str) -> None +def docker_pull(args: EnvironmentConfig, image: str) -> None: """ Pull the specified image if it is not available. Images without a tag or digest will not be pulled. @@ -279,7 +279,7 @@ def docker_pull(args, image): # type: (EnvironmentConfig, str) -> None raise ApplicationError('Failed to pull docker image "%s".' % image) -def docker_cp_to(args, container_id, src, dst): # type: (EnvironmentConfig, str, str, str) -> None +def docker_cp_to(args: EnvironmentConfig, container_id: str, src: str, dst: str) -> None: """Copy a file to the specified container.""" docker_command(args, ['cp', src, '%s:%s' % (container_id, dst)], capture=True) @@ -327,7 +327,7 @@ def docker_run( raise ApplicationError('Failed to run docker image "%s".' % image) -def docker_start(args, container_id, options=None): # type: (EnvironmentConfig, str, t.Optional[t.List[str]]) -> t.Tuple[t.Optional[str], t.Optional[str]] +def docker_start(args: EnvironmentConfig, container_id: str, options: t.Optional[t.List[str]] = None) -> t.Tuple[t.Optional[str], t.Optional[str]]: """ Start a docker container by name or ID """ @@ -345,7 +345,7 @@ def docker_start(args, container_id, options=None): # type: (EnvironmentConfig, raise ApplicationError('Failed to run docker container "%s".' % container_id) -def docker_rm(args, container_id): # type: (EnvironmentConfig, str) -> None +def docker_rm(args: EnvironmentConfig, container_id: str) -> None: """Remove the specified container.""" try: docker_command(args, ['rm', '-f', container_id], capture=True) @@ -377,70 +377,70 @@ class DockerInspect: # primary properties @property - def id(self): # type: () -> str + def id(self) -> str: """Return the ID of the container.""" return self.inspection['Id'] @property - def network_settings(self): # type: () -> t.Dict[str, t.Any] + def network_settings(self) -> t.Dict[str, t.Any]: """Return a dictionary of the container network settings.""" return self.inspection['NetworkSettings'] @property - def state(self): # type: () -> t.Dict[str, t.Any] + def state(self) -> t.Dict[str, t.Any]: """Return a dictionary of the container state.""" return self.inspection['State'] @property - def config(self): # type: () -> t.Dict[str, t.Any] + def config(self) -> t.Dict[str, t.Any]: """Return a dictionary of the container configuration.""" return self.inspection['Config'] # nested properties @property - def ports(self): # type: () -> t.Dict[str, t.List[t.Dict[str, str]]] + def ports(self) -> t.Dict[str, t.List[t.Dict[str, str]]]: """Return a dictionary of ports the container has published.""" return self.network_settings['Ports'] @property - def networks(self): # type: () -> t.Optional[t.Dict[str, t.Dict[str, t.Any]]] + def networks(self) -> t.Optional[t.Dict[str, t.Dict[str, t.Any]]]: """Return a dictionary of the networks the container is attached to, or None if running under podman, which does not support networks.""" return self.network_settings.get('Networks') @property - def running(self): # type: () -> bool + def running(self) -> bool: """Return True if the container is running, otherwise False.""" return self.state['Running'] @property - def env(self): # type: () -> t.List[str] + def env(self) -> t.List[str]: """Return a list of the environment variables used to create the container.""" return self.config['Env'] @property - def image(self): # type: () -> str + def image(self) -> str: """Return the image used to create the container.""" return self.config['Image'] # functions - def env_dict(self): # type: () -> t.Dict[str, str] + def env_dict(self) -> t.Dict[str, str]: """Return a dictionary of the environment variables used to create the container.""" return dict((item[0], item[1]) for item in [e.split('=', 1) for e in self.env]) - def get_tcp_port(self, port): # type: (int) -> t.Optional[t.List[t.Dict[str, str]]] + def get_tcp_port(self, port: int) -> t.Optional[t.List[t.Dict[str, str]]]: """Return a list of the endpoints published by the container for the specified TCP port, or None if it is not published.""" return self.ports.get('%d/tcp' % port) - def get_network_names(self): # type: () -> t.Optional[t.List[str]] + def get_network_names(self) -> t.Optional[t.List[str]]: """Return a list of the network names the container is attached to.""" if self.networks is None: return None return sorted(self.networks) - def get_network_name(self): # type: () -> str + def get_network_name(self) -> str: """Return the network name the container is attached to. Raises an exception if no network, or more than one, is attached.""" networks = self.get_network_names() @@ -452,7 +452,7 @@ class DockerInspect: return networks[0] - def get_ip_address(self): # type: () -> t.Optional[str] + def get_ip_address(self) -> t.Optional[str]: """Return the IP address of the container for the preferred docker network.""" if self.networks: network_name = get_docker_preferred_network_name(self.args) @@ -472,7 +472,7 @@ class DockerInspect: return ipaddress -def docker_inspect(args, identifier, always=False): # type: (EnvironmentConfig, str, bool) -> DockerInspect +def docker_inspect(args: EnvironmentConfig, identifier: str, always: bool = False) -> DockerInspect: """ Return the results of `docker container inspect` for the specified container. Raises a ContainerNotFoundError if the container was not found. @@ -493,12 +493,12 @@ def docker_inspect(args, identifier, always=False): # type: (EnvironmentConfig, raise ContainerNotFoundError(identifier) -def docker_network_disconnect(args, container_id, network): # type: (EnvironmentConfig, str, str) -> None +def docker_network_disconnect(args: EnvironmentConfig, container_id: str, network: str) -> None: """Disconnect the specified docker container from the given network.""" docker_command(args, ['network', 'disconnect', network, container_id], capture=True) -def docker_image_exists(args, image): # type: (EnvironmentConfig, str) -> bool +def docker_image_exists(args: EnvironmentConfig, image: str) -> bool: """Return True if the image exists, otherwise False.""" try: docker_command(args, ['image', 'inspect', image], capture=True) @@ -531,13 +531,13 @@ def docker_exec( output_stream=output_stream, data=data) -def docker_info(args): # type: (CommonConfig) -> t.Dict[str, t.Any] +def docker_info(args: CommonConfig) -> t.Dict[str, t.Any]: """Return a dictionary containing details from the `docker info` command.""" stdout, _dummy = docker_command(args, ['info', '--format', '{{json .}}'], capture=True, always=True) return json.loads(stdout) -def docker_version(args): # type: (CommonConfig) -> t.Dict[str, t.Any] +def docker_version(args: CommonConfig) -> t.Dict[str, t.Any]: """Return a dictionary containing details from the `docker version` command.""" stdout, _dummy = docker_command(args, ['version', '--format', '{{json .}}'], capture=True, always=True) return json.loads(stdout) @@ -565,7 +565,7 @@ def docker_command( output_stream=output_stream, data=data) -def docker_environment(): # type: () -> t.Dict[str, str] +def docker_environment() -> t.Dict[str, str]: """Return a dictionary of docker related environment variables found in the current environment.""" env = common_environment() env.update(dict((key, os.environ[key]) for key in os.environ if key.startswith('DOCKER_') or key.startswith('CONTAINER_'))) diff --git a/test/lib/ansible_test/_internal/encoding.py b/test/lib/ansible_test/_internal/encoding.py index 3c6314dfbf8..11f0d75c26b 100644 --- a/test/lib/ansible_test/_internal/encoding.py +++ b/test/lib/ansible_test/_internal/encoding.py @@ -6,17 +6,17 @@ import typing as t ENCODING = 'utf-8' -def to_optional_bytes(value, errors='strict'): # type: (t.Optional[t.AnyStr], str) -> t.Optional[bytes] +def to_optional_bytes(value: t.Optional[t.AnyStr], errors: str = 'strict') -> t.Optional[bytes]: """Return the given value as bytes encoded using UTF-8 if not already bytes, or None if the value is None.""" return None if value is None else to_bytes(value, errors) -def to_optional_text(value, errors='strict'): # type: (t.Optional[t.AnyStr], str) -> t.Optional[str] +def to_optional_text(value: t.Optional[t.AnyStr], errors: str = 'strict') -> t.Optional[str]: """Return the given value as text decoded using UTF-8 if not already text, or None if the value is None.""" return None if value is None else to_text(value, errors) -def to_bytes(value, errors='strict'): # type: (t.AnyStr, str) -> bytes +def to_bytes(value: t.AnyStr, errors: str = 'strict') -> bytes: """Return the given value as bytes encoded using UTF-8 if not already bytes.""" if isinstance(value, bytes): return value @@ -27,7 +27,7 @@ def to_bytes(value, errors='strict'): # type: (t.AnyStr, str) -> bytes raise Exception('value is not bytes or text: %s' % type(value)) -def to_text(value, errors='strict'): # type: (t.AnyStr, str) -> str +def to_text(value: t.AnyStr, errors: str = 'strict') -> str: """Return the given value as text decoded using UTF-8 if not already text.""" if isinstance(value, bytes): return value.decode(ENCODING, errors) diff --git a/test/lib/ansible_test/_internal/executor.py b/test/lib/ansible_test/_internal/executor.py index 2dd53de81d3..9a9b08984d5 100644 --- a/test/lib/ansible_test/_internal/executor.py +++ b/test/lib/ansible_test/_internal/executor.py @@ -33,7 +33,7 @@ from .provisioning import ( ) -def get_changes_filter(args): # type: (TestConfig) -> t.List[str] +def get_changes_filter(args: TestConfig) -> t.List[str]: """Return a list of targets which should be tested based on the changes made.""" paths = detect_changes(args) @@ -57,7 +57,7 @@ def get_changes_filter(args): # type: (TestConfig) -> t.List[str] return args.metadata.change_description.targets -def detect_changes(args): # type: (TestConfig) -> t.Optional[t.List[str]] +def detect_changes(args: TestConfig) -> t.Optional[t.List[str]]: """Return a list of changed paths.""" if args.changed: paths = get_ci_provider().detect_changes(args) @@ -93,7 +93,7 @@ class NoTestsForChanges(ApplicationWarning): class Delegate(Exception): """Trigger command delegation.""" - def __init__(self, host_state, exclude=None, require=None): # type: (HostState, t.List[str], t.List[str]) -> None + def __init__(self, host_state: HostState, exclude: t.List[str] = None, require: t.List[str] = None) -> None: super().__init__() self.host_state = host_state @@ -103,7 +103,7 @@ class Delegate(Exception): class ListTargets(Exception): """List integration test targets instead of executing them.""" - def __init__(self, target_names): # type: (t.List[str]) -> None + def __init__(self, target_names: t.List[str]) -> None: super().__init__() self.target_names = target_names diff --git a/test/lib/ansible_test/_internal/git.py b/test/lib/ansible_test/_internal/git.py index 038f3988376..3db6d422b25 100644 --- a/test/lib/ansible_test/_internal/git.py +++ b/test/lib/ansible_test/_internal/git.py @@ -12,23 +12,23 @@ from .util import ( class Git: """Wrapper around git command-line tools.""" - def __init__(self, root=None): # type: (t.Optional[str]) -> None + def __init__(self, root: t.Optional[str] = None) -> None: self.git = 'git' self.root = root - def get_diff(self, args, git_options=None): # type: (t.List[str], t.Optional[t.List[str]]) -> t.List[str] + def get_diff(self, args: t.List[str], git_options: t.Optional[t.List[str]] = None) -> t.List[str]: """Run `git diff` and return the result as a list.""" cmd = ['diff'] + args if git_options is None: git_options = ['-c', 'core.quotePath='] return self.run_git_split(git_options + cmd, '\n', str_errors='replace') - def get_diff_names(self, args): # type: (t.List[str]) -> t.List[str] + def get_diff_names(self, args: t.List[str]) -> t.List[str]: """Return a list of file names from the `git diff` command.""" cmd = ['diff', '--name-only', '--no-renames', '-z'] + args return self.run_git_split(cmd, '\0') - def get_submodule_paths(self): # type: () -> t.List[str] + def get_submodule_paths(self) -> t.List[str]: """Return a list of submodule paths recursively.""" cmd = ['submodule', 'status', '--recursive'] output = self.run_git_split(cmd, '\n') @@ -45,22 +45,22 @@ class Git: return submodule_paths - def get_file_names(self, args): # type: (t.List[str]) -> t.List[str] + def get_file_names(self, args: t.List[str]) -> t.List[str]: """Return a list of file names from the `git ls-files` command.""" cmd = ['ls-files', '-z'] + args return self.run_git_split(cmd, '\0') - def get_branches(self): # type: () -> t.List[str] + def get_branches(self) -> t.List[str]: """Return the list of branches.""" cmd = ['for-each-ref', 'refs/heads/', '--format', '%(refname:strip=2)'] return self.run_git_split(cmd) - def get_branch(self): # type: () -> str + def get_branch(self) -> str: """Return the current branch name.""" cmd = ['symbolic-ref', '--short', 'HEAD'] return self.run_git(cmd).strip() - def get_rev_list(self, commits=None, max_count=None): # type: (t.Optional[t.List[str]], t.Optional[int]) -> t.List[str] + def get_rev_list(self, commits: t.Optional[t.List[str]] = None, max_count: t.Optional[int] = None) -> t.List[str]: """Return the list of results from the `git rev-list` command.""" cmd = ['rev-list'] @@ -74,12 +74,12 @@ class Git: return self.run_git_split(cmd) - def get_branch_fork_point(self, branch): # type: (str) -> str + def get_branch_fork_point(self, branch: str) -> str: """Return a reference to the point at which the given branch was forked.""" cmd = ['merge-base', '--fork-point', branch] return self.run_git(cmd).strip() - def is_valid_ref(self, ref): # type: (str) -> bool + def is_valid_ref(self, ref: str) -> bool: """Return True if the given reference is valid, otherwise return False.""" cmd = ['show', ref] try: @@ -88,7 +88,7 @@ class Git: except SubprocessError: return False - def run_git_split(self, cmd, separator=None, str_errors='strict'): # type: (t.List[str], t.Optional[str], str) -> t.List[str] + def run_git_split(self, cmd: t.List[str], separator: t.Optional[str] = None, str_errors: str = 'strict') -> t.List[str]: """Run the given `git` command and return the results as a list.""" output = self.run_git(cmd, str_errors=str_errors).strip(separator) @@ -97,6 +97,6 @@ class Git: return output.split(separator) - def run_git(self, cmd, str_errors='strict'): # type: (t.List[str], str) -> str + def run_git(self, cmd: t.List[str], str_errors: str = 'strict') -> str: """Run the given `git` command and return the results as a string.""" return raw_command([self.git] + cmd, cwd=self.root, capture=True, str_errors=str_errors)[0] diff --git a/test/lib/ansible_test/_internal/host_configs.py b/test/lib/ansible_test/_internal/host_configs.py index 6234090fb3b..fa08bf93130 100644 --- a/test/lib/ansible_test/_internal/host_configs.py +++ b/test/lib/ansible_test/_internal/host_configs.py @@ -50,14 +50,14 @@ class OriginCompletionConfig(PosixCompletionConfig): super().__init__(name='origin') @property - def supported_pythons(self): # type: () -> t.List[str] + def supported_pythons(self) -> t.List[str]: """Return a list of the supported Python versions.""" current_version = version_to_str(sys.version_info[:2]) versions = [version for version in SUPPORTED_PYTHON_VERSIONS if version == current_version] + \ [version for version in SUPPORTED_PYTHON_VERSIONS if version != current_version] return versions - def get_python_path(self, version): # type: (str) -> str + def get_python_path(self, version: str) -> str: """Return the path of the requested Python version.""" version = find_python(version) return version @@ -74,7 +74,7 @@ class HostContext: controller_config: t.Optional['PosixConfig'] @property - def controller(self): # type: () -> bool + def controller(self) -> bool: """True if the context is for the controller, otherwise False.""" return not self.controller_config @@ -83,15 +83,15 @@ class HostContext: class HostConfig(metaclass=abc.ABCMeta): """Base class for host configuration.""" @abc.abstractmethod - def get_defaults(self, context): # type: (HostContext) -> CompletionConfig + def get_defaults(self, context: HostContext) -> CompletionConfig: """Return the default settings.""" @abc.abstractmethod - def apply_defaults(self, context, defaults): # type: (HostContext, CompletionConfig) -> None + def apply_defaults(self, context: HostContext, defaults: CompletionConfig) -> None: """Apply default settings.""" @property - def is_managed(self): # type: () -> bool + def is_managed(self) -> bool: """ True if the host is a managed instance, otherwise False. Managed instances are used exclusively by ansible-test and can safely have destructive operations performed without explicit permission from the user. @@ -106,16 +106,16 @@ class PythonConfig(metaclass=abc.ABCMeta): path: t.Optional[str] = None @property - def tuple(self): # type: () -> t.Tuple[int, ...] + def tuple(self) -> t.Tuple[int, ...]: """Return the Python version as a tuple.""" return str_to_version(self.version) @property - def major_version(self): # type: () -> int + def major_version(self) -> int: """Return the Python major version.""" return self.tuple[0] - def apply_defaults(self, context, defaults): # type: (HostContext, PosixCompletionConfig) -> None + def apply_defaults(self, context: HostContext, defaults: PosixCompletionConfig) -> None: """Apply default settings.""" if self.version in (None, 'default'): self.version = defaults.get_default_python(context.controller) @@ -130,7 +130,7 @@ class PythonConfig(metaclass=abc.ABCMeta): @property @abc.abstractmethod - def is_managed(self): # type: () -> bool + def is_managed(self) -> bool: """ True if this Python is a managed instance, otherwise False. Managed instances are used exclusively by ansible-test and can safely have requirements installed without explicit permission from the user. @@ -141,7 +141,7 @@ class PythonConfig(metaclass=abc.ABCMeta): class NativePythonConfig(PythonConfig): """Configuration for native Python.""" @property - def is_managed(self): # type: () -> bool + def is_managed(self) -> bool: """ True if this Python is a managed instance, otherwise False. Managed instances are used exclusively by ansible-test and can safely have requirements installed without explicit permission from the user. @@ -154,7 +154,7 @@ class VirtualPythonConfig(PythonConfig): """Configuration for Python in a virtual environment.""" system_site_packages: t.Optional[bool] = None - def apply_defaults(self, context, defaults): # type: (HostContext, PosixCompletionConfig) -> None + def apply_defaults(self, context: HostContext, defaults: PosixCompletionConfig) -> None: """Apply default settings.""" super().apply_defaults(context, defaults) @@ -162,7 +162,7 @@ class VirtualPythonConfig(PythonConfig): self.system_site_packages = False @property - def is_managed(self): # type: () -> bool + def is_managed(self) -> bool: """ True if this Python is a managed instance, otherwise False. Managed instances are used exclusively by ansible-test and can safely have requirements installed without explicit permission from the user. @@ -177,14 +177,14 @@ class PosixConfig(HostConfig, metaclass=abc.ABCMeta): @property @abc.abstractmethod - def have_root(self): # type: () -> bool + def have_root(self) -> bool: """True if root is available, otherwise False.""" @abc.abstractmethod - def get_defaults(self, context): # type: (HostContext) -> PosixCompletionConfig + def get_defaults(self, context: HostContext) -> PosixCompletionConfig: """Return the default settings.""" - def apply_defaults(self, context, defaults): # type: (HostContext, CompletionConfig) -> None + def apply_defaults(self, context: HostContext, defaults: CompletionConfig) -> None: """Apply default settings.""" assert isinstance(defaults, PosixCompletionConfig) @@ -198,7 +198,7 @@ class PosixConfig(HostConfig, metaclass=abc.ABCMeta): class ControllerHostConfig(PosixConfig, metaclass=abc.ABCMeta): """Base class for host configurations which support the controller.""" @abc.abstractmethod - def get_default_targets(self, context): # type: (HostContext) -> t.List[ControllerConfig] + def get_default_targets(self, context: HostContext) -> t.List[ControllerConfig]: """Return the default targets for this host config.""" @@ -210,16 +210,16 @@ class RemoteConfig(HostConfig, metaclass=abc.ABCMeta): arch: t.Optional[str] = None @property - def platform(self): # type: () -> str + def platform(self) -> str: """The name of the platform.""" return self.name.partition('/')[0] @property - def version(self): # type: () -> str + def version(self) -> str: """The version of the platform.""" return self.name.partition('/')[2] - def apply_defaults(self, context, defaults): # type: (HostContext, CompletionConfig) -> None + def apply_defaults(self, context: HostContext, defaults: CompletionConfig) -> None: """Apply default settings.""" assert isinstance(defaults, RemoteCompletionConfig) @@ -232,7 +232,7 @@ class RemoteConfig(HostConfig, metaclass=abc.ABCMeta): self.arch = self.arch or defaults.arch or Architecture.X86_64 @property - def is_managed(self): # type: () -> bool + def is_managed(self) -> bool: """ True if this host is a managed instance, otherwise False. Managed instances are used exclusively by ansible-test and can safely have destructive operations performed without explicit permission from the user. @@ -247,7 +247,7 @@ class PosixSshConfig(PosixConfig): host: t.Optional[str] = None port: t.Optional[int] = None - def get_defaults(self, context): # type: (HostContext) -> PosixSshCompletionConfig + def get_defaults(self, context: HostContext) -> PosixSshCompletionConfig: """Return the default settings.""" return PosixSshCompletionConfig( user=self.user, @@ -255,7 +255,7 @@ class PosixSshConfig(PosixConfig): ) @property - def have_root(self): # type: () -> bool + def have_root(self) -> bool: """True if root is available, otherwise False.""" return self.user == 'root' @@ -265,11 +265,11 @@ class InventoryConfig(HostConfig): """Configuration using inventory.""" path: t.Optional[str] = None - def get_defaults(self, context): # type: (HostContext) -> InventoryCompletionConfig + def get_defaults(self, context: HostContext) -> InventoryCompletionConfig: """Return the default settings.""" return InventoryCompletionConfig() - def apply_defaults(self, context, defaults): # type: (HostContext, CompletionConfig) -> None + def apply_defaults(self, context: HostContext, defaults: CompletionConfig) -> None: """Apply default settings.""" assert isinstance(defaults, InventoryCompletionConfig) @@ -283,7 +283,7 @@ class DockerConfig(ControllerHostConfig, PosixConfig): privileged: t.Optional[bool] = None seccomp: t.Optional[str] = None - def get_defaults(self, context): # type: (HostContext) -> DockerCompletionConfig + def get_defaults(self, context: HostContext) -> DockerCompletionConfig: """Return the default settings.""" return filter_completion(docker_completion()).get(self.name) or DockerCompletionConfig( name=self.name, @@ -291,7 +291,7 @@ class DockerConfig(ControllerHostConfig, PosixConfig): placeholder=True, ) - def get_default_targets(self, context): # type: (HostContext) -> t.List[ControllerConfig] + def get_default_targets(self, context: HostContext) -> t.List[ControllerConfig]: """Return the default targets for this host config.""" if self.name in filter_completion(docker_completion()): defaults = self.get_defaults(context) @@ -301,7 +301,7 @@ class DockerConfig(ControllerHostConfig, PosixConfig): return [ControllerConfig(python=NativePythonConfig(version=version, path=path)) for version, path in pythons.items()] - def apply_defaults(self, context, defaults): # type: (HostContext, CompletionConfig) -> None + def apply_defaults(self, context: HostContext, defaults: CompletionConfig) -> None: """Apply default settings.""" assert isinstance(defaults, DockerCompletionConfig) @@ -317,7 +317,7 @@ class DockerConfig(ControllerHostConfig, PosixConfig): self.privileged = False @property - def is_managed(self): # type: () -> bool + def is_managed(self) -> bool: """ True if this host is a managed instance, otherwise False. Managed instances are used exclusively by ansible-test and can safely have destructive operations performed without explicit permission from the user. @@ -325,7 +325,7 @@ class DockerConfig(ControllerHostConfig, PosixConfig): return True @property - def have_root(self): # type: () -> bool + def have_root(self) -> bool: """True if root is available, otherwise False.""" return True @@ -335,14 +335,14 @@ class PosixRemoteConfig(RemoteConfig, ControllerHostConfig, PosixConfig): """Configuration for a POSIX remote host.""" become: t.Optional[str] = None - def get_defaults(self, context): # type: (HostContext) -> PosixRemoteCompletionConfig + def get_defaults(self, context: HostContext) -> PosixRemoteCompletionConfig: """Return the default settings.""" return filter_completion(remote_completion()).get(self.name) or remote_completion().get(self.platform) or PosixRemoteCompletionConfig( name=self.name, placeholder=True, ) - def get_default_targets(self, context): # type: (HostContext) -> t.List[ControllerConfig] + def get_default_targets(self, context: HostContext) -> t.List[ControllerConfig]: """Return the default targets for this host config.""" if self.name in filter_completion(remote_completion()): defaults = self.get_defaults(context) @@ -352,7 +352,7 @@ class PosixRemoteConfig(RemoteConfig, ControllerHostConfig, PosixConfig): return [ControllerConfig(python=NativePythonConfig(version=version, path=path)) for version, path in pythons.items()] - def apply_defaults(self, context, defaults): # type: (HostContext, CompletionConfig) -> None + def apply_defaults(self, context: HostContext, defaults: CompletionConfig) -> None: """Apply default settings.""" assert isinstance(defaults, PosixRemoteCompletionConfig) @@ -361,7 +361,7 @@ class PosixRemoteConfig(RemoteConfig, ControllerHostConfig, PosixConfig): self.become = self.become or defaults.become @property - def have_root(self): # type: () -> bool + def have_root(self) -> bool: """True if root is available, otherwise False.""" return True @@ -374,7 +374,7 @@ class WindowsConfig(HostConfig, metaclass=abc.ABCMeta): @dataclasses.dataclass class WindowsRemoteConfig(RemoteConfig, WindowsConfig): """Configuration for a remote Windows host.""" - def get_defaults(self, context): # type: (HostContext) -> WindowsRemoteCompletionConfig + def get_defaults(self, context: HostContext) -> WindowsRemoteCompletionConfig: """Return the default settings.""" return filter_completion(windows_completion()).get(self.name) or windows_completion().get(self.platform) @@ -395,14 +395,14 @@ class NetworkRemoteConfig(RemoteConfig, NetworkConfig): collection: t.Optional[str] = None connection: t.Optional[str] = None - def get_defaults(self, context): # type: (HostContext) -> NetworkRemoteCompletionConfig + def get_defaults(self, context: HostContext) -> NetworkRemoteCompletionConfig: """Return the default settings.""" return filter_completion(network_completion()).get(self.name) or NetworkRemoteCompletionConfig( name=self.name, placeholder=True, ) - def apply_defaults(self, context, defaults): # type: (HostContext, CompletionConfig) -> None + def apply_defaults(self, context: HostContext, defaults: CompletionConfig) -> None: """Apply default settings.""" assert isinstance(defaults, NetworkRemoteCompletionConfig) @@ -420,16 +420,16 @@ class NetworkInventoryConfig(InventoryConfig, NetworkConfig): @dataclasses.dataclass class OriginConfig(ControllerHostConfig, PosixConfig): """Configuration for the origin host.""" - def get_defaults(self, context): # type: (HostContext) -> OriginCompletionConfig + def get_defaults(self, context: HostContext) -> OriginCompletionConfig: """Return the default settings.""" return OriginCompletionConfig() - def get_default_targets(self, context): # type: (HostContext) -> t.List[ControllerConfig] + def get_default_targets(self, context: HostContext) -> t.List[ControllerConfig]: """Return the default targets for this host config.""" return [ControllerConfig(python=NativePythonConfig(version=version, path=path)) for version, path in get_available_python_versions().items()] @property - def have_root(self): # type: () -> bool + def have_root(self) -> bool: """True if root is available, otherwise False.""" return os.getuid() == 0 @@ -439,11 +439,11 @@ class ControllerConfig(PosixConfig): """Configuration for the controller host.""" controller: t.Optional[PosixConfig] = None - def get_defaults(self, context): # type: (HostContext) -> PosixCompletionConfig + def get_defaults(self, context: HostContext) -> PosixCompletionConfig: """Return the default settings.""" return context.controller_config.get_defaults(context) - def apply_defaults(self, context, defaults): # type: (HostContext, CompletionConfig) -> None + def apply_defaults(self, context: HostContext, defaults: CompletionConfig) -> None: """Apply default settings.""" assert isinstance(defaults, PosixCompletionConfig) @@ -456,7 +456,7 @@ class ControllerConfig(PosixConfig): super().apply_defaults(context, defaults) @property - def is_managed(self): # type: () -> bool + def is_managed(self) -> bool: """ True if the host is a managed instance, otherwise False. Managed instances are used exclusively by ansible-test and can safely have destructive operations performed without explicit permission from the user. @@ -464,7 +464,7 @@ class ControllerConfig(PosixConfig): return self.controller.is_managed @property - def have_root(self): # type: () -> bool + def have_root(self) -> bool: """True if root is available, otherwise False.""" return self.controller.have_root @@ -491,13 +491,13 @@ class HostSettings: filtered_args: t.List[str] controller_fallback: t.Optional[FallbackDetail] - def serialize(self, path): # type: (str) -> None + def serialize(self, path: str) -> None: """Serialize the host settings to the given path.""" with open_binary_file(path, 'wb') as settings_file: pickle.dump(self, settings_file) @staticmethod - def deserialize(path): # type: (str) -> HostSettings + def deserialize(path: str) -> HostSettings: """Deserialize host settings from the path.""" with open_binary_file(path) as settings_file: return pickle.load(settings_file) diff --git a/test/lib/ansible_test/_internal/host_profiles.py b/test/lib/ansible_test/_internal/host_profiles.py index 7f2b11e9181..eb3ab2a85d6 100644 --- a/test/lib/ansible_test/_internal/host_profiles.py +++ b/test/lib/ansible_test/_internal/host_profiles.py @@ -120,7 +120,7 @@ class Inventory: """Return an inventory instance created from the given hostname and variables.""" return Inventory(host_groups=dict(all={name: variables})) - def write(self, args, path): # type: (CommonConfig, str) -> None + def write(self, args: CommonConfig, path: str) -> None: """Write the given inventory to the specified path on disk.""" # NOTE: Switching the inventory generation to write JSON would be nice, but is currently not possible due to the use of hard-coded inventory filenames. @@ -173,19 +173,19 @@ class HostProfile(t.Generic[THostConfig], metaclass=abc.ABCMeta): self.cache = {} # type: t.Dict[str, t.Any] """Cache that must not be persisted across delegation.""" - def provision(self): # type: () -> None + def provision(self) -> None: """Provision the host before delegation.""" - def setup(self): # type: () -> None + def setup(self) -> None: """Perform out-of-band setup before delegation.""" - def deprovision(self): # type: () -> None + def deprovision(self) -> None: """Deprovision the host after delegation has completed.""" - def wait(self): # type: () -> None + def wait(self) -> None: """Wait for the instance to be ready. Executed before delegation for the controller and after delegation for targets.""" - def configure(self): # type: () -> None + def configure(self) -> None: """Perform in-band configuration. Executed before delegation for the controller and after delegation for targets.""" def __getstate__(self): @@ -201,7 +201,7 @@ class HostProfile(t.Generic[THostConfig], metaclass=abc.ABCMeta): class PosixProfile(HostProfile[TPosixConfig], metaclass=abc.ABCMeta): """Base class for POSIX host profiles.""" @property - def python(self): # type: () -> PythonConfig + def python(self) -> PythonConfig: """ The Python to use for this profile. If it is a virtual python, it will be created the first time it is requested. @@ -222,25 +222,25 @@ class PosixProfile(HostProfile[TPosixConfig], metaclass=abc.ABCMeta): class ControllerHostProfile(PosixProfile[TControllerHostConfig], metaclass=abc.ABCMeta): """Base class for profiles usable as a controller.""" @abc.abstractmethod - def get_origin_controller_connection(self): # type: () -> Connection + def get_origin_controller_connection(self) -> Connection: """Return a connection for accessing the host as a controller from the origin.""" @abc.abstractmethod - def get_working_directory(self): # type: () -> str + def get_working_directory(self) -> str: """Return the working directory for the host.""" class SshTargetHostProfile(HostProfile[THostConfig], metaclass=abc.ABCMeta): """Base class for profiles offering SSH connectivity.""" @abc.abstractmethod - def get_controller_target_connections(self): # type: () -> t.List[SshConnection] + def get_controller_target_connections(self) -> t.List[SshConnection]: """Return SSH connection(s) for accessing the host as a target from the controller.""" class RemoteProfile(SshTargetHostProfile[TRemoteConfig], metaclass=abc.ABCMeta): """Base class for remote instance profiles.""" @property - def core_ci_state(self): # type: () -> t.Optional[t.Dict[str, str]] + def core_ci_state(self) -> t.Optional[t.Dict[str, str]]: """The saved Ansible Core CI state.""" return self.state.get('core_ci') @@ -249,29 +249,29 @@ class RemoteProfile(SshTargetHostProfile[TRemoteConfig], metaclass=abc.ABCMeta): """The saved Ansible Core CI state.""" self.state['core_ci'] = value - def provision(self): # type: () -> None + def provision(self) -> None: """Provision the host before delegation.""" self.core_ci = self.create_core_ci(load=True) self.core_ci.start() self.core_ci_state = self.core_ci.save() - def deprovision(self): # type: () -> None + def deprovision(self) -> None: """Deprovision the host after delegation has completed.""" if self.args.remote_terminate == TerminateMode.ALWAYS or (self.args.remote_terminate == TerminateMode.SUCCESS and self.args.success): self.delete_instance() @property - def core_ci(self): # type: () -> t.Optional[AnsibleCoreCI] + def core_ci(self) -> t.Optional[AnsibleCoreCI]: """Return the cached AnsibleCoreCI instance, if any, otherwise None.""" return self.cache.get('core_ci') @core_ci.setter - def core_ci(self, value): # type: (AnsibleCoreCI) -> None + def core_ci(self, value: AnsibleCoreCI) -> None: """Cache the given AnsibleCoreCI instance.""" self.cache['core_ci'] = value - def get_instance(self): # type: () -> t.Optional[AnsibleCoreCI] + def get_instance(self) -> t.Optional[AnsibleCoreCI]: """Return the current AnsibleCoreCI instance, loading it if not already loaded.""" if not self.core_ci and self.core_ci_state: self.core_ci = self.create_core_ci(load=False) @@ -288,14 +288,14 @@ class RemoteProfile(SshTargetHostProfile[TRemoteConfig], metaclass=abc.ABCMeta): core_ci.stop() - def wait_for_instance(self): # type: () -> AnsibleCoreCI + def wait_for_instance(self) -> AnsibleCoreCI: """Wait for an AnsibleCoreCI VM instance to become ready.""" core_ci = self.get_instance() core_ci.wait() return core_ci - def create_core_ci(self, load): # type: (bool) -> AnsibleCoreCI + def create_core_ci(self, load: bool) -> AnsibleCoreCI: """Create and return an AnsibleCoreCI instance.""" if not self.config.arch: raise InternalError(f'No arch specified for config: {self.config}') @@ -315,7 +315,7 @@ class RemoteProfile(SshTargetHostProfile[TRemoteConfig], metaclass=abc.ABCMeta): class ControllerProfile(SshTargetHostProfile[ControllerConfig], PosixProfile[ControllerConfig]): """Host profile for the controller as a target.""" - def get_controller_target_connections(self): # type: () -> t.List[SshConnection] + def get_controller_target_connections(self) -> t.List[SshConnection]: """Return SSH connection(s) for accessing the host as a target from the controller.""" settings = SshConnectionDetail( name='localhost', @@ -332,16 +332,16 @@ class ControllerProfile(SshTargetHostProfile[ControllerConfig], PosixProfile[Con class DockerProfile(ControllerHostProfile[DockerConfig], SshTargetHostProfile[DockerConfig]): """Host profile for a docker instance.""" @property - def container_name(self): # type: () -> t.Optional[str] + def container_name(self) -> t.Optional[str]: """Return the stored container name, if any, otherwise None.""" return self.state.get('container_name') @container_name.setter - def container_name(self, value): # type: (str) -> None + def container_name(self, value: str) -> None: """Store the given container name.""" self.state['container_name'] = value - def provision(self): # type: () -> None + def provision(self) -> None: """Provision the host before delegation.""" container = run_support_container( args=self.args, @@ -359,7 +359,7 @@ class DockerProfile(ControllerHostProfile[DockerConfig], SshTargetHostProfile[Do self.container_name = container.name - def setup(self): # type: () -> None + def setup(self) -> None: """Perform out-of-band setup before delegation.""" bootstrapper = BootstrapDocker( controller=self.controller, @@ -372,7 +372,7 @@ class DockerProfile(ControllerHostProfile[DockerConfig], SshTargetHostProfile[Do docker_exec(self.args, self.container_name, [shell], data=setup_sh, capture=False) - def deprovision(self): # type: () -> None + def deprovision(self) -> None: """Deprovision the host after delegation has completed.""" if not self.container_name: return # provision was never called or did not succeed, so there is no container to remove @@ -380,7 +380,7 @@ class DockerProfile(ControllerHostProfile[DockerConfig], SshTargetHostProfile[Do if self.args.docker_terminate == TerminateMode.ALWAYS or (self.args.docker_terminate == TerminateMode.SUCCESS and self.args.success): docker_rm(self.args, self.container_name) - def wait(self): # type: () -> None + def wait(self) -> None: """Wait for the instance to be ready. Executed before delegation for the controller and after delegation for targets.""" if not self.controller: con = self.get_controller_target_connections()[0] @@ -396,7 +396,7 @@ class DockerProfile(ControllerHostProfile[DockerConfig], SshTargetHostProfile[Do else: return - def get_controller_target_connections(self): # type: () -> t.List[SshConnection] + def get_controller_target_connections(self) -> t.List[SshConnection]: """Return SSH connection(s) for accessing the host as a target from the controller.""" containers = get_container_database(self.args) access = containers.data[HostType.control]['__test_hosts__'][self.container_name] @@ -415,15 +415,15 @@ class DockerProfile(ControllerHostProfile[DockerConfig], SshTargetHostProfile[Do return [SshConnection(self.args, settings)] - def get_origin_controller_connection(self): # type: () -> DockerConnection + def get_origin_controller_connection(self) -> DockerConnection: """Return a connection for accessing the host as a controller from the origin.""" return DockerConnection(self.args, self.container_name) - def get_working_directory(self): # type: () -> str + def get_working_directory(self) -> str: """Return the working directory for the host.""" return '/root' - def get_docker_run_options(self): # type: () -> t.List[str] + def get_docker_run_options(self) -> t.List[str]: """Return a list of options needed to run the container.""" options = [ '--volume', '/sys/fs/cgroup:/sys/fs/cgroup:ro', @@ -453,11 +453,11 @@ class NetworkInventoryProfile(HostProfile[NetworkInventoryConfig]): class NetworkRemoteProfile(RemoteProfile[NetworkRemoteConfig]): """Host profile for a network remote instance.""" - def wait(self): # type: () -> None + def wait(self) -> None: """Wait for the instance to be ready. Executed before delegation for the controller and after delegation for targets.""" self.wait_until_ready() - def get_inventory_variables(self): # type: () -> t.Dict[str, t.Optional[t.Union[str, int]]] + def get_inventory_variables(self) -> t.Dict[str, t.Optional[t.Union[str, int]]]: """Return inventory variables for accessing this host.""" core_ci = self.wait_for_instance() connection = core_ci.connection @@ -474,7 +474,7 @@ class NetworkRemoteProfile(RemoteProfile[NetworkRemoteConfig]): return variables - def wait_until_ready(self): # type: () -> None + def wait_until_ready(self) -> None: """Wait for the host to respond to an Ansible module request.""" core_ci = self.wait_for_instance() @@ -501,7 +501,7 @@ class NetworkRemoteProfile(RemoteProfile[NetworkRemoteConfig]): raise ApplicationError(f'Timeout waiting for {self.config.name} instance {core_ci.instance_id}.') - def get_controller_target_connections(self): # type: () -> t.List[SshConnection] + def get_controller_target_connections(self) -> t.List[SshConnection]: """Return SSH connection(s) for accessing the host as a target from the controller.""" core_ci = self.wait_for_instance() @@ -518,22 +518,22 @@ class NetworkRemoteProfile(RemoteProfile[NetworkRemoteConfig]): class OriginProfile(ControllerHostProfile[OriginConfig]): """Host profile for origin.""" - def get_origin_controller_connection(self): # type: () -> LocalConnection + def get_origin_controller_connection(self) -> LocalConnection: """Return a connection for accessing the host as a controller from the origin.""" return LocalConnection(self.args) - def get_working_directory(self): # type: () -> str + def get_working_directory(self) -> str: """Return the working directory for the host.""" return os.getcwd() class PosixRemoteProfile(ControllerHostProfile[PosixRemoteConfig], RemoteProfile[PosixRemoteConfig]): """Host profile for a POSIX remote instance.""" - def wait(self): # type: () -> None + def wait(self) -> None: """Wait for the instance to be ready. Executed before delegation for the controller and after delegation for targets.""" self.wait_until_ready() - def configure(self): # type: () -> None + def configure(self) -> None: """Perform in-band configuration. Executed before delegation for the controller and after delegation for targets.""" # a target uses a single python version, but a controller may include additional versions for targets running on the controller python_versions = [self.python.version] + [target.python.version for target in self.targets if isinstance(target, ControllerConfig)] @@ -558,7 +558,7 @@ class PosixRemoteProfile(ControllerHostProfile[PosixRemoteConfig], RemoteProfile ssh = self.get_origin_controller_connection() ssh.run([shell], data=setup_sh, capture=False) - def get_ssh_connection(self): # type: () -> SshConnection + def get_ssh_connection(self) -> SshConnection: """Return an SSH connection for accessing the host.""" core_ci = self.wait_for_instance() @@ -581,7 +581,7 @@ class PosixRemoteProfile(ControllerHostProfile[PosixRemoteConfig], RemoteProfile return SshConnection(self.args, settings, become) - def wait_until_ready(self): # type: () -> str + def wait_until_ready(self) -> str: """Wait for instance to respond to SSH, returning the current working directory once connected.""" core_ci = self.wait_for_instance() @@ -596,15 +596,15 @@ class PosixRemoteProfile(ControllerHostProfile[PosixRemoteConfig], RemoteProfile raise ApplicationError(f'Timeout waiting for {self.config.name} instance {core_ci.instance_id}.') - def get_controller_target_connections(self): # type: () -> t.List[SshConnection] + def get_controller_target_connections(self) -> t.List[SshConnection]: """Return SSH connection(s) for accessing the host as a target from the controller.""" return [self.get_ssh_connection()] - def get_origin_controller_connection(self): # type: () -> SshConnection + def get_origin_controller_connection(self) -> SshConnection: """Return a connection for accessing the host as a controller from the origin.""" return self.get_ssh_connection() - def get_working_directory(self): # type: () -> str + def get_working_directory(self) -> str: """Return the working directory for the host.""" if not self.pwd: ssh = self.get_origin_controller_connection() @@ -623,19 +623,19 @@ class PosixRemoteProfile(ControllerHostProfile[PosixRemoteConfig], RemoteProfile return self.pwd @property - def pwd(self): # type: () -> t.Optional[str] + def pwd(self) -> t.Optional[str]: """Return the cached pwd, if any, otherwise None.""" return self.cache.get('pwd') @pwd.setter - def pwd(self, value): # type: (str) -> None + def pwd(self, value: str) -> None: """Cache the given pwd.""" self.cache['pwd'] = value class PosixSshProfile(SshTargetHostProfile[PosixSshConfig], PosixProfile[PosixSshConfig]): """Host profile for a POSIX SSH instance.""" - def get_controller_target_connections(self): # type: () -> t.List[SshConnection] + def get_controller_target_connections(self) -> t.List[SshConnection]: """Return SSH connection(s) for accessing the host as a target from the controller.""" settings = SshConnectionDetail( name='target', @@ -651,7 +651,7 @@ class PosixSshProfile(SshTargetHostProfile[PosixSshConfig], PosixProfile[PosixSs class WindowsInventoryProfile(SshTargetHostProfile[WindowsInventoryConfig]): """Host profile for a Windows inventory.""" - def get_controller_target_connections(self): # type: () -> t.List[SshConnection] + def get_controller_target_connections(self) -> t.List[SshConnection]: """Return SSH connection(s) for accessing the host as a target from the controller.""" inventory = parse_inventory(self.args, self.config.path) hosts = get_hosts(inventory, 'windows') @@ -675,11 +675,11 @@ class WindowsInventoryProfile(SshTargetHostProfile[WindowsInventoryConfig]): class WindowsRemoteProfile(RemoteProfile[WindowsRemoteConfig]): """Host profile for a Windows remote instance.""" - def wait(self): # type: () -> None + def wait(self) -> None: """Wait for the instance to be ready. Executed before delegation for the controller and after delegation for targets.""" self.wait_until_ready() - def get_inventory_variables(self): # type: () -> t.Dict[str, t.Optional[t.Union[str, int]]] + def get_inventory_variables(self) -> t.Dict[str, t.Optional[t.Union[str, int]]]: """Return inventory variables for accessing this host.""" core_ci = self.wait_for_instance() connection = core_ci.connection @@ -705,7 +705,7 @@ class WindowsRemoteProfile(RemoteProfile[WindowsRemoteConfig]): return variables - def wait_until_ready(self): # type: () -> None + def wait_until_ready(self) -> None: """Wait for the host to respond to an Ansible module request.""" core_ci = self.wait_for_instance() @@ -732,7 +732,7 @@ class WindowsRemoteProfile(RemoteProfile[WindowsRemoteConfig]): raise ApplicationError(f'Timeout waiting for {self.config.name} instance {core_ci.instance_id}.') - def get_controller_target_connections(self): # type: () -> t.List[SshConnection] + def get_controller_target_connections(self) -> t.List[SshConnection]: """Return SSH connection(s) for accessing the host as a target from the controller.""" core_ci = self.wait_for_instance() @@ -749,7 +749,7 @@ class WindowsRemoteProfile(RemoteProfile[WindowsRemoteConfig]): @cache -def get_config_profile_type_map(): # type: () -> t.Dict[t.Type[HostConfig], t.Type[HostProfile]] +def get_config_profile_type_map() -> t.Dict[t.Type[HostConfig], t.Type[HostProfile]]: """Create and return a mapping of HostConfig types to HostProfile types.""" return get_type_map(HostProfile, HostConfig) diff --git a/test/lib/ansible_test/_internal/http.py b/test/lib/ansible_test/_internal/http.py index dbe310698ef..6802d712e3a 100644 --- a/test/lib/ansible_test/_internal/http.py +++ b/test/lib/ansible_test/_internal/http.py @@ -22,7 +22,7 @@ from .util_common import ( class HttpClient: """Make HTTP requests via curl.""" - def __init__(self, args, always=False, insecure=False, proxy=None): # type: (CommonConfig, bool, bool, t.Optional[str]) -> None + def __init__(self, args: CommonConfig, always: bool = False, insecure: bool = False, proxy: t.Optional[str] = None) -> None: self.args = args self.always = always self.insecure = insecure @@ -31,11 +31,11 @@ class HttpClient: self.username = None self.password = None - def get(self, url): # type: (str) -> HttpResponse + def get(self, url: str) -> HttpResponse: """Perform an HTTP GET and return the response.""" return self.request('GET', url) - def delete(self, url): # type: (str) -> HttpResponse + def delete(self, url: str) -> HttpResponse: """Perform an HTTP DELETE and return the response.""" return self.request('DELETE', url) @@ -113,13 +113,13 @@ class HttpClient: class HttpResponse: """HTTP response from curl.""" - def __init__(self, method, url, status_code, response): # type: (str, str, int, str) -> None + def __init__(self, method: str, url: str, status_code: int, response: str) -> None: self.method = method self.url = url self.status_code = status_code self.response = response - def json(self): # type: () -> t.Any + def json(self) -> t.Any: """Return the response parsed as JSON, raising an exception if parsing fails.""" try: return json.loads(self.response) @@ -129,6 +129,6 @@ class HttpResponse: class HttpError(ApplicationError): """HTTP response as an error.""" - def __init__(self, status, message): # type: (int, str) -> None + def __init__(self, status: int, message: str) -> None: super().__init__('%s: %s' % (status, message)) self.status = status diff --git a/test/lib/ansible_test/_internal/inventory.py b/test/lib/ansible_test/_internal/inventory.py index f5a245ec890..5528ac43ebe 100644 --- a/test/lib/ansible_test/_internal/inventory.py +++ b/test/lib/ansible_test/_internal/inventory.py @@ -26,7 +26,7 @@ from .host_profiles import ( ) -def create_controller_inventory(args, path, controller_host): # type: (EnvironmentConfig, str, ControllerHostProfile) -> None +def create_controller_inventory(args: EnvironmentConfig, path: str, controller_host: ControllerHostProfile) -> None: """Create and return inventory for use in controller-only integration tests.""" inventory = Inventory( host_groups=dict( @@ -43,7 +43,7 @@ def create_controller_inventory(args, path, controller_host): # type: (Environm inventory.write(args, path) -def create_windows_inventory(args, path, target_hosts): # type: (EnvironmentConfig, str, t.List[HostProfile]) -> None +def create_windows_inventory(args: EnvironmentConfig, path: str, target_hosts: t.List[HostProfile]) -> None: """Create and return inventory for use in target Windows integration tests.""" first = target_hosts[0] @@ -78,7 +78,7 @@ def create_windows_inventory(args, path, target_hosts): # type: (EnvironmentCon inventory.write(args, path) -def create_network_inventory(args, path, target_hosts): # type: (EnvironmentConfig, str, t.List[HostProfile]) -> None +def create_network_inventory(args: EnvironmentConfig, path: str, target_hosts: t.List[HostProfile]) -> None: """Create and return inventory for use in target network integration tests.""" first = target_hosts[0] @@ -112,7 +112,7 @@ def create_network_inventory(args, path, target_hosts): # type: (EnvironmentCon inventory.write(args, path) -def create_posix_inventory(args, path, target_hosts, needs_ssh=False): # type: (EnvironmentConfig, str, t.List[HostProfile], bool) -> None +def create_posix_inventory(args: EnvironmentConfig, path: str, target_hosts: t.List[HostProfile], needs_ssh: bool = False) -> None: """Create and return inventory for use in POSIX integration tests.""" target_hosts = t.cast(t.List[SshTargetHostProfile], target_hosts) diff --git a/test/lib/ansible_test/_internal/io.py b/test/lib/ansible_test/_internal/io.py index e2ef7a50f84..e29a1a56a54 100644 --- a/test/lib/ansible_test/_internal/io.py +++ b/test/lib/ansible_test/_internal/io.py @@ -14,23 +14,23 @@ from .encoding import ( ) -def read_json_file(path): # type: (str) -> t.Any +def read_json_file(path: str) -> t.Any: """Parse and return the json content from the specified path.""" return json.loads(read_text_file(path)) -def read_text_file(path): # type: (str) -> str +def read_text_file(path: str) -> str: """Return the contents of the specified path as text.""" return to_text(read_binary_file(path)) -def read_binary_file(path): # type: (str) -> bytes +def read_binary_file(path: str) -> bytes: """Return the contents of the specified path as bytes.""" with open_binary_file(path) as file_obj: return file_obj.read() -def make_dirs(path): # type: (str) -> None +def make_dirs(path: str) -> None: """Create a directory at path, including any necessary parent directories.""" try: os.makedirs(to_bytes(path)) @@ -58,7 +58,7 @@ def write_json_file(path, # type: str return text_content -def write_text_file(path, content, create_directories=False): # type: (str, str, bool) -> None +def write_text_file(path: str, content: str, create_directories: bool = False) -> None: """Write the given text content to the specified path, optionally creating missing directories.""" if create_directories: make_dirs(os.path.dirname(path)) @@ -67,7 +67,7 @@ def write_text_file(path, content, create_directories=False): # type: (str, str file_obj.write(to_bytes(content)) -def open_text_file(path, mode='r'): # type: (str, str) -> t.IO[str] +def open_text_file(path: str, mode: str = 'r') -> t.IO[str]: """Open the given path for text access.""" if 'b' in mode: raise Exception('mode cannot include "b" for text files: %s' % mode) @@ -75,7 +75,7 @@ def open_text_file(path, mode='r'): # type: (str, str) -> t.IO[str] return io.open(to_bytes(path), mode, encoding=ENCODING) # pylint: disable=consider-using-with -def open_binary_file(path, mode='rb'): # type: (str, str) -> t.IO[bytes] +def open_binary_file(path: str, mode: str = 'rb') -> t.IO[bytes]: """Open the given path for binary access.""" if 'b' not in mode: raise Exception('mode must include "b" for binary files: %s' % mode) diff --git a/test/lib/ansible_test/_internal/metadata.py b/test/lib/ansible_test/_internal/metadata.py index e7f82b0aacb..5ade408581b 100644 --- a/test/lib/ansible_test/_internal/metadata.py +++ b/test/lib/ansible_test/_internal/metadata.py @@ -26,7 +26,7 @@ class Metadata: self.change_description = None # type: t.Optional[ChangeDescription] self.ci_provider = None # type: t.Optional[str] - def populate_changes(self, diff): # type: (t.Optional[t.List[str]]) -> None + def populate_changes(self, diff: t.Optional[t.List[str]]) -> None: """Populate the changeset using the given diff.""" patches = parse_diff(diff) patches = sorted(patches, key=lambda k: k.new.path) # type: t.List[FileDiff] @@ -45,7 +45,7 @@ class Metadata: # failed tests involving deleted files should be using line 0 since there is no content remaining self.changes[path] = ((0, 0),) - def to_dict(self): # type: () -> t.Dict[str, t.Any] + def to_dict(self) -> t.Dict[str, t.Any]: """Return a dictionary representation of the metadata.""" return dict( changes=self.changes, @@ -54,7 +54,7 @@ class Metadata: change_description=self.change_description.to_dict(), ) - def to_file(self, path): # type: (str) -> None + def to_file(self, path: str) -> None: """Write the metadata to the specified file.""" data = self.to_dict() @@ -63,7 +63,7 @@ class Metadata: write_json_file(path, data) @staticmethod - def from_file(path): # type: (str) -> Metadata + def from_file(path: str) -> Metadata: """Return metadata loaded from the specified file.""" data = read_json_file(path) return Metadata.from_dict(data) @@ -91,16 +91,16 @@ class ChangeDescription: self.no_integration_paths = [] # type: t.List[str] @property - def targets(self): # type: () -> t.Optional[t.List[str]] + def targets(self) -> t.Optional[t.List[str]]: """Optional list of target names.""" return self.regular_command_targets.get(self.command) @property - def focused_targets(self): # type: () -> t.Optional[t.List[str]] + def focused_targets(self) -> t.Optional[t.List[str]]: """Optional list of focused target names.""" return self.focused_command_targets.get(self.command) - def to_dict(self): # type: () -> t.Dict[str, t.Any] + def to_dict(self) -> t.Dict[str, t.Any]: """Return a dictionary representation of the change description.""" return dict( command=self.command, diff --git a/test/lib/ansible_test/_internal/payload.py b/test/lib/ansible_test/_internal/payload.py index e6ccc6ed5ff..3f342fd2ecf 100644 --- a/test/lib/ansible_test/_internal/payload.py +++ b/test/lib/ansible_test/_internal/payload.py @@ -38,7 +38,7 @@ tarfile.pwd = None # type: ignore[attr-defined] # undocumented attribute tarfile.grp = None # type: ignore[attr-defined] # undocumented attribute -def create_payload(args, dst_path): # type: (CommonConfig, str) -> None +def create_payload(args: CommonConfig, dst_path: str) -> None: """Create a payload for delegation.""" if args.explain: return @@ -46,7 +46,7 @@ def create_payload(args, dst_path): # type: (CommonConfig, str) -> None files = list(data_context().ansible_source) filters = {} - def make_executable(tar_info): # type: (tarfile.TarInfo) -> t.Optional[tarfile.TarInfo] + def make_executable(tar_info: tarfile.TarInfo) -> t.Optional[tarfile.TarInfo]: """Make the given file executable.""" tar_info.mode |= stat.S_IXUSR | stat.S_IXOTH | stat.S_IXGRP return tar_info @@ -117,7 +117,7 @@ def create_payload(args, dst_path): # type: (CommonConfig, str) -> None display.info('Created a %d byte payload archive containing %d files in %d seconds.' % (payload_size_bytes, len(files), duration), verbosity=1) -def create_temporary_bin_files(args): # type: (CommonConfig) -> t.Tuple[t.Tuple[str, str], ...] +def create_temporary_bin_files(args: CommonConfig) -> t.Tuple[t.Tuple[str, str], ...]: """Create a temporary ansible bin directory populated using the symlink map.""" if args.explain: temp_path = '/tmp/ansible-tmp-bin' diff --git a/test/lib/ansible_test/_internal/provider/__init__.py b/test/lib/ansible_test/_internal/provider/__init__.py index 78346142658..22978a5fbd9 100644 --- a/test/lib/ansible_test/_internal/provider/__init__.py +++ b/test/lib/ansible_test/_internal/provider/__init__.py @@ -11,7 +11,7 @@ from ..util import ( ) -def get_path_provider_classes(provider_type): # type: (t.Type[TPathProvider]) -> t.List[t.Type[TPathProvider]] +def get_path_provider_classes(provider_type: t.Type[TPathProvider]) -> t.List[t.Type[TPathProvider]]: """Return a list of path provider classes of the given type.""" return sorted(get_subclasses(provider_type), key=lambda c: (c.priority, c.__name__)) @@ -48,7 +48,7 @@ def find_path_provider(provider_type, # type: t.Type[TPathProvider] class ProviderNotFoundForPath(ApplicationError): """Exception generated when a path based provider cannot be found for a given path.""" - def __init__(self, provider_type, path): # type: (t.Type, str) -> None + def __init__(self, provider_type: t.Type, path: str) -> None: super().__init__('No %s found for path: %s' % (provider_type.__name__, path)) self.provider_type = provider_type @@ -60,12 +60,12 @@ class PathProvider(metaclass=abc.ABCMeta): sequence = 500 priority = 500 - def __init__(self, root): # type: (str) -> None + def __init__(self, root: str) -> None: self.root = root @staticmethod @abc.abstractmethod - def is_content_root(path): # type: (str) -> bool + def is_content_root(path: str) -> bool: """Return True if the given path is a content root for this provider.""" diff --git a/test/lib/ansible_test/_internal/provider/layout/__init__.py b/test/lib/ansible_test/_internal/provider/layout/__init__.py index 9fd13550e5e..35ce95ea9b3 100644 --- a/test/lib/ansible_test/_internal/provider/layout/__init__.py +++ b/test/lib/ansible_test/_internal/provider/layout/__init__.py @@ -28,14 +28,14 @@ class Layout: self.__paths_tree = paths_to_tree(self.__paths) self.__files_tree = paths_to_tree(self.__files) - def all_files(self, include_symlinked_directories=False): # type: (bool) -> t.List[str] + def all_files(self, include_symlinked_directories: bool = False) -> t.List[str]: """Return a list of all file paths.""" if include_symlinked_directories: return self.__paths return self.__files - def walk_files(self, directory, include_symlinked_directories=False): # type: (str, bool) -> t.List[str] + def walk_files(self, directory: str, include_symlinked_directories: bool = False) -> t.List[str]: """Return a list of file paths found recursively under the given directory.""" if include_symlinked_directories: tree = self.__paths_tree @@ -59,13 +59,13 @@ class Layout: return files - def get_dirs(self, directory): # type: (str) -> t.List[str] + def get_dirs(self, directory: str) -> t.List[str]: """Return a list directory paths found directly under the given directory.""" parts = directory.rstrip(os.path.sep).split(os.path.sep) item = get_tree_item(self.__files_tree, parts) return [os.path.join(directory, key) for key in item[0].keys()] if item else [] - def get_files(self, directory): # type: (str) -> t.List[str] + def get_files(self, directory: str) -> t.List[str]: """Return a list of file paths found directly under the given directory.""" parts = directory.rstrip(os.path.sep).split(os.path.sep) item = get_tree_item(self.__files_tree, parts) @@ -114,7 +114,7 @@ class ContentLayout(Layout): self.is_ansible = root == ANSIBLE_SOURCE_ROOT @property - def prefix(self): # type: () -> str + def prefix(self) -> str: """Return the collection prefix or an empty string if not a collection.""" if self.collection: return self.collection.prefix @@ -122,17 +122,17 @@ class ContentLayout(Layout): return '' @property - def module_path(self): # type: () -> t.Optional[str] + def module_path(self) -> t.Optional[str]: """Return the path where modules are found, if any.""" return self.plugin_paths.get('modules') @property - def module_utils_path(self): # type: () -> t.Optional[str] + def module_utils_path(self) -> t.Optional[str]: """Return the path where module_utils are found, if any.""" return self.plugin_paths.get('module_utils') @property - def module_utils_powershell_path(self): # type: () -> t.Optional[str] + def module_utils_powershell_path(self) -> t.Optional[str]: """Return the path where powershell module_utils are found, if any.""" if self.is_ansible: return os.path.join(self.plugin_paths['module_utils'], 'powershell') @@ -140,7 +140,7 @@ class ContentLayout(Layout): return self.plugin_paths.get('module_utils') @property - def module_utils_csharp_path(self): # type: () -> t.Optional[str] + def module_utils_csharp_path(self) -> t.Optional[str]: """Return the path where csharp module_utils are found, if any.""" if self.is_ansible: return os.path.join(self.plugin_paths['module_utils'], 'csharp') @@ -200,11 +200,11 @@ class LayoutProvider(PathProvider): ) @abc.abstractmethod - def create(self, root, paths): # type: (str, t.List[str]) -> ContentLayout + def create(self, root: str, paths: t.List[str]) -> ContentLayout: """Create a layout using the given root and paths.""" -def paths_to_tree(paths): # type: (t.List[str]) -> t.Tuple[t.Dict[str, t.Any], t.List[str]] +def paths_to_tree(paths: t.List[str]) -> t.Tuple[t.Dict[str, t.Any], t.List[str]]: """Return a filesystem tree from the given list of paths.""" tree = {}, [] # type: t.Tuple[t.Dict[str, t.Any], t.List[str]] diff --git a/test/lib/ansible_test/_internal/provider/layout/ansible.py b/test/lib/ansible_test/_internal/provider/layout/ansible.py index 345faa7c504..40f906e65f2 100644 --- a/test/lib/ansible_test/_internal/provider/layout/ansible.py +++ b/test/lib/ansible_test/_internal/provider/layout/ansible.py @@ -13,11 +13,11 @@ from . import ( class AnsibleLayout(LayoutProvider): """Layout provider for Ansible source.""" @staticmethod - def is_content_root(path): # type: (str) -> bool + def is_content_root(path: str) -> bool: """Return True if the given path is a content root for this provider.""" return os.path.exists(os.path.join(path, 'setup.py')) and os.path.exists(os.path.join(path, 'bin/ansible-test')) - def create(self, root, paths): # type: (str, t.List[str]) -> ContentLayout + def create(self, root: str, paths: t.List[str]) -> ContentLayout: """Create a Layout using the given root and paths.""" plugin_paths = dict((p, os.path.join('lib/ansible/plugins', p)) for p in self.PLUGIN_TYPES) diff --git a/test/lib/ansible_test/_internal/provider/layout/collection.py b/test/lib/ansible_test/_internal/provider/layout/collection.py index 6b826ee4a30..ec4fac4e927 100644 --- a/test/lib/ansible_test/_internal/provider/layout/collection.py +++ b/test/lib/ansible_test/_internal/provider/layout/collection.py @@ -19,14 +19,14 @@ from ...util import ( class CollectionLayout(LayoutProvider): """Layout provider for Ansible collections.""" @staticmethod - def is_content_root(path): # type: (str) -> bool + def is_content_root(path: str) -> bool: """Return True if the given path is a content root for this provider.""" if os.path.basename(os.path.dirname(os.path.dirname(path))) == 'ansible_collections': return True return False - def create(self, root, paths): # type: (str, t.List[str]) -> ContentLayout + def create(self, root: str, paths: t.List[str]) -> ContentLayout: """Create a Layout using the given root and paths.""" plugin_paths = dict((p, os.path.join('plugins', p)) for p in self.PLUGIN_TYPES) @@ -77,7 +77,7 @@ class CollectionLayout(LayoutProvider): ) @staticmethod - def __check_test_path(paths, messages): # type: (t.List[str], LayoutMessages) -> None + def __check_test_path(paths: t.List[str], messages: LayoutMessages) -> None: modern_test_path = 'tests/' modern_test_path_found = any(path.startswith(modern_test_path) for path in paths) legacy_test_path = 'test/' @@ -89,7 +89,7 @@ class CollectionLayout(LayoutProvider): messages.warning.append('Ignoring tests in "%s" that should be in "%s".' % (legacy_test_path, modern_test_path)) @staticmethod - def __check_integration_path(paths, messages): # type: (t.List[str], LayoutMessages) -> str + def __check_integration_path(paths: t.List[str], messages: LayoutMessages) -> str: modern_integration_path = 'roles/test/' modern_integration_path_found = any(path.startswith(modern_integration_path) for path in paths) legacy_integration_path = 'tests/integration/targets/' @@ -111,7 +111,7 @@ class CollectionLayout(LayoutProvider): return integration_targets_path @staticmethod - def __check_unit_path(paths, messages): # type: (t.List[str], LayoutMessages) -> None + def __check_unit_path(paths: t.List[str], messages: LayoutMessages) -> None: modern_unit_path = 'tests/unit/' modern_unit_path_found = any(path.startswith(modern_unit_path) for path in paths) legacy_unit_path = 'tests/units/' # test/units/ will be covered by the warnings for test/ vs tests/ diff --git a/test/lib/ansible_test/_internal/provider/layout/unsupported.py b/test/lib/ansible_test/_internal/provider/layout/unsupported.py index 80a9129198b..46e10ac664b 100644 --- a/test/lib/ansible_test/_internal/provider/layout/unsupported.py +++ b/test/lib/ansible_test/_internal/provider/layout/unsupported.py @@ -14,11 +14,11 @@ class UnsupportedLayout(LayoutProvider): sequence = 0 # disable automatic detection @staticmethod - def is_content_root(path): # type: (str) -> bool + def is_content_root(path: str) -> bool: """Return True if the given path is a content root for this provider.""" return False - def create(self, root, paths): # type: (str, t.List[str]) -> ContentLayout + def create(self, root: str, paths: t.List[str]) -> ContentLayout: """Create a Layout using the given root and paths.""" plugin_paths = dict((p, p) for p in self.PLUGIN_TYPES) diff --git a/test/lib/ansible_test/_internal/provider/source/__init__.py b/test/lib/ansible_test/_internal/provider/source/__init__.py index 359c5d6a2e1..24ab10ffb4d 100644 --- a/test/lib/ansible_test/_internal/provider/source/__init__.py +++ b/test/lib/ansible_test/_internal/provider/source/__init__.py @@ -12,5 +12,5 @@ from .. import ( class SourceProvider(PathProvider): """Base class for source providers.""" @abc.abstractmethod - def get_paths(self, path): # type: (str) -> t.List[str] + def get_paths(self, path: str) -> t.List[str]: """Return the list of available content paths under the given path.""" diff --git a/test/lib/ansible_test/_internal/provider/source/git.py b/test/lib/ansible_test/_internal/provider/source/git.py index 96f85dc73dd..0463f8a5e1a 100644 --- a/test/lib/ansible_test/_internal/provider/source/git.py +++ b/test/lib/ansible_test/_internal/provider/source/git.py @@ -24,11 +24,11 @@ from . import ( class GitSource(SourceProvider): """Source provider for a content root managed by git version control.""" @staticmethod - def is_content_root(path): # type: (str) -> bool + def is_content_root(path: str) -> bool: """Return True if the given path is a content root for this provider.""" return os.path.exists(os.path.join(path, '.git')) - def get_paths(self, path): # type: (str) -> t.List[str] + def get_paths(self, path: str) -> t.List[str]: """Return the list of available content paths under the given path.""" paths = self.__get_paths(path) @@ -57,7 +57,7 @@ class GitSource(SourceProvider): return paths @staticmethod - def __get_paths(path): # type: (str) -> t.List[str] + def __get_paths(path: str) -> t.List[str]: """Return the list of available content paths under the given path.""" git = Git(path) paths = git.get_file_names(['--cached', '--others', '--exclude-standard']) diff --git a/test/lib/ansible_test/_internal/provider/source/installed.py b/test/lib/ansible_test/_internal/provider/source/installed.py index f4ed6f3cbc1..1a37df0e4b0 100644 --- a/test/lib/ansible_test/_internal/provider/source/installed.py +++ b/test/lib/ansible_test/_internal/provider/source/installed.py @@ -14,11 +14,11 @@ class InstalledSource(SourceProvider): sequence = 0 # disable automatic detection @staticmethod - def is_content_root(path): # type: (str) -> bool + def is_content_root(path: str) -> bool: """Return True if the given path is a content root for this provider.""" return False - def get_paths(self, path): # type: (str) -> t.List[str] + def get_paths(self, path: str) -> t.List[str]: """Return the list of available content paths under the given path.""" paths = [] diff --git a/test/lib/ansible_test/_internal/provider/source/unsupported.py b/test/lib/ansible_test/_internal/provider/source/unsupported.py index ff5562c62c6..e16572e7d83 100644 --- a/test/lib/ansible_test/_internal/provider/source/unsupported.py +++ b/test/lib/ansible_test/_internal/provider/source/unsupported.py @@ -13,10 +13,10 @@ class UnsupportedSource(SourceProvider): sequence = 0 # disable automatic detection @staticmethod - def is_content_root(path): # type: (str) -> bool + def is_content_root(path: str) -> bool: """Return True if the given path is a content root for this provider.""" return False - def get_paths(self, path): # type: (str) -> t.List[str] + def get_paths(self, path: str) -> t.List[str]: """Return the list of available content paths under the given path.""" return [] diff --git a/test/lib/ansible_test/_internal/provider/source/unversioned.py b/test/lib/ansible_test/_internal/provider/source/unversioned.py index a78060cf8ac..a804b9c1834 100644 --- a/test/lib/ansible_test/_internal/provider/source/unversioned.py +++ b/test/lib/ansible_test/_internal/provider/source/unversioned.py @@ -22,11 +22,11 @@ class UnversionedSource(SourceProvider): sequence = 0 # disable automatic detection @staticmethod - def is_content_root(path): # type: (str) -> bool + def is_content_root(path: str) -> bool: """Return True if the given path is a content root for this provider.""" return False - def get_paths(self, path): # type: (str) -> t.List[str] + def get_paths(self, path: str) -> t.List[str]: """Return the list of available content paths under the given path.""" paths = [] diff --git a/test/lib/ansible_test/_internal/provisioning.py b/test/lib/ansible_test/_internal/provisioning.py index 85d91326f18..e1f819d8c94 100644 --- a/test/lib/ansible_test/_internal/provisioning.py +++ b/test/lib/ansible_test/_internal/provisioning.py @@ -57,17 +57,17 @@ class HostState: target_profiles: t.List[HostProfile] @property - def profiles(self): # type: () -> t.List[HostProfile] + def profiles(self) -> t.List[HostProfile]: """Return all the profiles as a list.""" return [t.cast(HostProfile, self.controller_profile)] + self.target_profiles - def serialize(self, path): # type: (str) -> None + def serialize(self, path: str) -> None: """Serialize the host state to the given path.""" with open_binary_file(path, 'wb') as state_file: pickle.dump(self, state_file) @staticmethod - def deserialize(args, path): # type: (EnvironmentConfig, str) -> HostState + def deserialize(args: EnvironmentConfig, path: str) -> HostState: """Deserialize host state from the given args and path.""" with open_binary_file(path) as state_file: host_state = pickle.load(state_file) # type: HostState @@ -79,12 +79,12 @@ class HostState: return host_state - def get_controller_target_connections(self): # type: () -> t.List[SshConnection] + def get_controller_target_connections(self) -> t.List[SshConnection]: """Return SSH connection(s) for accessing all target hosts from the controller.""" return list(itertools.chain.from_iterable([target.get_controller_target_connections() for target in self.target_profiles if isinstance(target, SshTargetHostProfile)])) - def targets(self, profile_type): # type: (t.Type[THostProfile]) -> t.List[THostProfile] + def targets(self, profile_type: t.Type[THostProfile]) -> t.List[THostProfile]: """The list of target(s), verified to be of the specified type.""" if not self.target_profiles: raise Exception('No target profiles found.') @@ -123,7 +123,7 @@ def prepare_profiles( atexit.register(functools.partial(cleanup_profiles, host_state)) - def provision(profile): # type: (HostProfile) -> None + def provision(profile: HostProfile) -> None: """Provision the given profile.""" profile.provision() @@ -140,7 +140,7 @@ def prepare_profiles( if requirements: requirements(args, host_state) - def configure(profile): # type: (HostProfile) -> None + def configure(profile: HostProfile) -> None: """Configure the given profile.""" profile.wait() @@ -152,7 +152,7 @@ def prepare_profiles( return host_state -def check_controller_python(args, host_state): # type: (EnvironmentConfig, HostState) -> None +def check_controller_python(args: EnvironmentConfig, host_state: HostState) -> None: """Check the running environment to make sure it is what we expected.""" sys_version = version_to_str(sys.version_info[:2]) controller_python = host_state.controller_profile.python @@ -168,7 +168,7 @@ def check_controller_python(args, host_state): # type: (EnvironmentConfig, Host args.controller_python = controller_python -def cleanup_profiles(host_state): # type: (HostState) -> None +def cleanup_profiles(host_state: HostState) -> None: """Cleanup provisioned hosts when exiting.""" for profile in host_state.profiles: profile.deprovision() diff --git a/test/lib/ansible_test/_internal/pypi_proxy.py b/test/lib/ansible_test/_internal/pypi_proxy.py index 461c5a2c17f..804dde8e870 100644 --- a/test/lib/ansible_test/_internal/pypi_proxy.py +++ b/test/lib/ansible_test/_internal/pypi_proxy.py @@ -50,7 +50,7 @@ from .inventory import ( ) -def run_pypi_proxy(args, targets_use_pypi): # type: (EnvironmentConfig, bool) -> None +def run_pypi_proxy(args: EnvironmentConfig, targets_use_pypi: bool) -> None: """Run a PyPI proxy support container.""" if args.pypi_endpoint: return # user has overridden the proxy endpoint, there is nothing to provision @@ -82,7 +82,7 @@ def run_pypi_proxy(args, targets_use_pypi): # type: (EnvironmentConfig, bool) - ) -def configure_pypi_proxy(args, profile): # type: (EnvironmentConfig, HostProfile) -> None +def configure_pypi_proxy(args: EnvironmentConfig, profile: HostProfile) -> None: """Configure the environment to use a PyPI proxy, if present.""" if args.pypi_endpoint: pypi_endpoint = args.pypi_endpoint @@ -108,13 +108,13 @@ def configure_pypi_proxy(args, profile): # type: (EnvironmentConfig, HostProfil configure_target_pypi_proxy(args, profile, pypi_endpoint, pypi_hostname) -def configure_controller_pypi_proxy(args, profile, pypi_endpoint, pypi_hostname): # type: (EnvironmentConfig, HostProfile, str, str) -> None +def configure_controller_pypi_proxy(args: EnvironmentConfig, profile: HostProfile, pypi_endpoint: str, pypi_hostname: str) -> None: """Configure the controller environment to use a PyPI proxy.""" configure_pypi_proxy_pip(args, profile, pypi_endpoint, pypi_hostname) configure_pypi_proxy_easy_install(args, profile, pypi_endpoint) -def configure_target_pypi_proxy(args, profile, pypi_endpoint, pypi_hostname): # type: (EnvironmentConfig, HostProfile, str, str) -> None +def configure_target_pypi_proxy(args: EnvironmentConfig, profile: HostProfile, pypi_endpoint: str, pypi_hostname: str) -> None: """Configure the target environment to use a PyPI proxy.""" inventory_path = process_scoped_temporary_file(args) @@ -132,7 +132,7 @@ def configure_target_pypi_proxy(args, profile, pypi_endpoint, pypi_hostname): # atexit.register(cleanup_pypi_proxy) -def configure_pypi_proxy_pip(args, profile, pypi_endpoint, pypi_hostname): # type: (EnvironmentConfig, HostProfile, str, str) -> None +def configure_pypi_proxy_pip(args: EnvironmentConfig, profile: HostProfile, pypi_endpoint: str, pypi_hostname: str) -> None: """Configure a custom index for pip based installs.""" pip_conf_path = os.path.expanduser('~/.pip/pip.conf') pip_conf = ''' @@ -141,7 +141,7 @@ index-url = {0} trusted-host = {1} '''.format(pypi_endpoint, pypi_hostname).strip() - def pip_conf_cleanup(): # type: () -> None + def pip_conf_cleanup() -> None: """Remove custom pip PyPI config.""" display.info('Removing custom PyPI config: %s' % pip_conf_path, verbosity=1) os.remove(pip_conf_path) @@ -157,7 +157,7 @@ trusted-host = {1} atexit.register(pip_conf_cleanup) -def configure_pypi_proxy_easy_install(args, profile, pypi_endpoint): # type: (EnvironmentConfig, HostProfile, str) -> None +def configure_pypi_proxy_easy_install(args: EnvironmentConfig, profile: HostProfile, pypi_endpoint: str) -> None: """Configure a custom index for easy_install based installs.""" pydistutils_cfg_path = os.path.expanduser('~/.pydistutils.cfg') pydistutils_cfg = ''' @@ -168,7 +168,7 @@ index_url = {0} if os.path.exists(pydistutils_cfg_path) and not profile.config.is_managed: raise ApplicationError('Refusing to overwrite existing file: %s' % pydistutils_cfg_path) - def pydistutils_cfg_cleanup(): # type: () -> None + def pydistutils_cfg_cleanup() -> None: """Remove custom PyPI config.""" display.info('Removing custom PyPI config: %s' % pydistutils_cfg_path, verbosity=1) os.remove(pydistutils_cfg_path) diff --git a/test/lib/ansible_test/_internal/python_requirements.py b/test/lib/ansible_test/_internal/python_requirements.py index d152739d28d..3206aab9c14 100644 --- a/test/lib/ansible_test/_internal/python_requirements.py +++ b/test/lib/ansible_test/_internal/python_requirements.py @@ -70,7 +70,7 @@ VIRTUALENV_VERSION = '16.7.12' class PipUnavailableError(ApplicationError): """Exception raised when pip is not available.""" - def __init__(self, python): # type: (PythonConfig) -> None + def __init__(self, python: PythonConfig) -> None: super().__init__(f'Python {python.version} at "{python.path}" does not have pip available.') @@ -78,7 +78,7 @@ class PipUnavailableError(ApplicationError): class PipCommand: """Base class for pip commands.""""" - def serialize(self): # type: () -> t.Tuple[str, t.Dict[str, t.Any]] + def serialize(self) -> t.Tuple[str, t.Dict[str, t.Any]]: """Return a serialized representation of this command.""" name = type(self).__name__[3:].lower() return name, self.__dict__ @@ -91,7 +91,7 @@ class PipInstall(PipCommand): constraints: t.List[t.Tuple[str, str]] packages: t.List[str] - def has_package(self, name): # type: (str) -> bool + def has_package(self, name: str) -> bool: """Return True if the specified package will be installed, otherwise False.""" name = name.lower() @@ -182,7 +182,7 @@ def install_requirements( check_pyyaml(python) -def collect_bootstrap(python): # type: (PythonConfig) -> t.List[PipCommand] +def collect_bootstrap(python: PythonConfig) -> t.List[PipCommand]: """Return the details necessary to bootstrap pip into an empty virtual environment.""" infrastructure_packages = get_venv_packages(python) pip_version = infrastructure_packages['pip'] @@ -298,12 +298,12 @@ def collect_general_install( return collect_install(requirements_paths, constraints_paths) -def collect_package_install(packages, constraints=True): # type: (t.List[str], bool) -> t.List[PipInstall] +def collect_package_install(packages: t.List[str], constraints: bool = True) -> t.List[PipInstall]: """Return the details necessary to install the specified packages.""" return collect_install([], [], packages, constraints=constraints) -def collect_sanity_install(sanity): # type: (str) -> t.List[PipInstall] +def collect_sanity_install(sanity: str) -> t.List[PipInstall]: """Return the details necessary for the specified sanity pip install(s).""" requirements_paths = [] # type: t.List[t.Tuple[str, str]] constraints_paths = [] # type: t.List[t.Tuple[str, str]] @@ -318,7 +318,7 @@ def collect_sanity_install(sanity): # type: (str) -> t.List[PipInstall] return collect_install(requirements_paths, constraints_paths, constraints=False) -def collect_units_install(): # type: () -> t.List[PipInstall] +def collect_units_install() -> t.List[PipInstall]: """Return details necessary for the specified units pip install(s).""" requirements_paths = [] # type: t.List[t.Tuple[str, str]] constraints_paths = [] # type: t.List[t.Tuple[str, str]] @@ -332,7 +332,7 @@ def collect_units_install(): # type: () -> t.List[PipInstall] return collect_install(requirements_paths, constraints_paths) -def collect_integration_install(command, controller): # type: (str, bool) -> t.List[PipInstall] +def collect_integration_install(command: str, controller: bool) -> t.List[PipInstall]: """Return details necessary for the specified integration pip install(s).""" requirements_paths = [] # type: t.List[t.Tuple[str, str]] constraints_paths = [] # type: t.List[t.Tuple[str, str]] @@ -395,7 +395,7 @@ def collect_install( return installs -def collect_uninstall(packages, ignore_errors=False): # type: (t.List[str], bool) -> t.List[PipUninstall] +def collect_uninstall(packages: t.List[str], ignore_errors: bool = False) -> t.List[PipUninstall]: """Return the details necessary for the specified pip uninstall.""" uninstall = PipUninstall( packages=packages, @@ -408,7 +408,7 @@ def collect_uninstall(packages, ignore_errors=False): # type: (t.List[str], boo # Support -def get_venv_packages(python): # type: (PythonConfig) -> t.Dict[str, str] +def get_venv_packages(python: PythonConfig) -> t.Dict[str, str]: """Return a dictionary of Python packages needed for a consistent virtual environment specific to the given Python version.""" # NOTE: This same information is needed for building the base-test-container image. @@ -443,7 +443,7 @@ def get_venv_packages(python): # type: (PythonConfig) -> t.Dict[str, str] return packages -def requirements_allowed(args, controller): # type: (EnvironmentConfig, bool) -> bool +def requirements_allowed(args: EnvironmentConfig, controller: bool) -> bool: """ Return True if requirements can be installed, otherwise return False. @@ -464,7 +464,7 @@ def requirements_allowed(args, controller): # type: (EnvironmentConfig, bool) - return target.is_managed or target.python.is_managed -def prepare_pip_script(commands): # type: (t.List[PipCommand]) -> str +def prepare_pip_script(commands: t.List[PipCommand]) -> str: """Generate a Python script to perform the requested pip commands.""" data = [command.serialize() for command in commands] @@ -486,7 +486,7 @@ def prepare_pip_script(commands): # type: (t.List[PipCommand]) -> str return script -def usable_pip_file(path): # type: (t.Optional[str]) -> bool +def usable_pip_file(path: t.Optional[str]) -> bool: """Return True if the specified pip file is usable, otherwise False.""" return bool(path) and os.path.exists(path) and bool(os.path.getsize(path)) @@ -494,7 +494,7 @@ def usable_pip_file(path): # type: (t.Optional[str]) -> bool # Cryptography -def is_cryptography_available(python): # type: (str) -> bool +def is_cryptography_available(python: str) -> bool: """Return True if cryptography is available for the given python.""" try: raw_command([python, '-c', 'import cryptography'], capture=True) @@ -504,7 +504,7 @@ def is_cryptography_available(python): # type: (str) -> bool return True -def get_cryptography_requirements(python): # type: (PythonConfig) -> t.List[str] +def get_cryptography_requirements(python: PythonConfig) -> t.List[str]: """ Return the correct cryptography and pyopenssl requirements for the given python version. The version of cryptography installed depends on the python version and openssl version. @@ -534,7 +534,7 @@ def get_cryptography_requirements(python): # type: (PythonConfig) -> t.List[str return requirements -def get_openssl_version(python): # type: (PythonConfig) -> t.Optional[t.Tuple[int, ...]] +def get_openssl_version(python: PythonConfig) -> t.Optional[t.Tuple[int, ...]]: """Return the openssl version.""" if not python.version.startswith('2.'): # OpenSSL version checking only works on Python 3.x. diff --git a/test/lib/ansible_test/_internal/ssh.py b/test/lib/ansible_test/_internal/ssh.py index c2dfb277c34..035e25a4e1b 100644 --- a/test/lib/ansible_test/_internal/ssh.py +++ b/test/lib/ansible_test/_internal/ssh.py @@ -45,13 +45,13 @@ class SshConnectionDetail: class SshProcess: """Wrapper around an SSH process.""" - def __init__(self, process): # type: (t.Optional[subprocess.Popen]) -> None + def __init__(self, process: t.Optional[subprocess.Popen]) -> None: self._process = process self.pending_forwards = None # type: t.Optional[t.List[t.Tuple[str, int]]] self.forwards = {} # type: t.Dict[t.Tuple[str, int], int] - def terminate(self): # type: () -> None + def terminate(self) -> None: """Terminate the SSH process.""" if not self._process: return # explain mode @@ -62,7 +62,7 @@ class SshProcess: except Exception: # pylint: disable=broad-except pass - def wait(self): # type: () -> None + def wait(self) -> None: """Wait for the SSH process to terminate.""" if not self._process: return # explain mode @@ -232,7 +232,7 @@ def create_ssh_port_redirects( return process -def generate_ssh_inventory(ssh_connections): # type: (t.List[SshConnectionDetail]) -> str +def generate_ssh_inventory(ssh_connections: t.List[SshConnectionDetail]) -> str: """Return an inventory file in JSON format, created from the provided SSH connection details.""" inventory = dict( all=dict( diff --git a/test/lib/ansible_test/_internal/target.py b/test/lib/ansible_test/_internal/target.py index 652359d7897..38404d39c4b 100644 --- a/test/lib/ansible_test/_internal/target.py +++ b/test/lib/ansible_test/_internal/target.py @@ -43,7 +43,7 @@ def find_target_completion(target_func, prefix, short): # type: (t.Callable[[], return ['%s' % ex] -def walk_completion_targets(targets, prefix, short=False): # type: (t.Iterable[CompletionTarget], str, bool) -> t.Tuple[str, ...] +def walk_completion_targets(targets: t.Iterable[CompletionTarget], prefix: str, short: bool = False) -> t.Tuple[str, ...]: """Return a tuple of targets from the given target iterable which match the given prefix.""" aliases = set(alias for target in targets for alias in target.aliases) @@ -150,48 +150,48 @@ def walk_module_targets(): yield target -def walk_units_targets(): # type: () -> t.Iterable[TestTarget] +def walk_units_targets() -> t.Iterable[TestTarget]: """Return an iterable of units targets.""" return walk_test_targets(path=data_context().content.unit_path, module_path=data_context().content.unit_module_path, extensions=('.py',), prefix='test_') -def walk_compile_targets(include_symlinks=True): # type: (bool) -> t.Iterable[TestTarget] +def walk_compile_targets(include_symlinks: bool = True) -> t.Iterable[TestTarget]: """Return an iterable of compile targets.""" return walk_test_targets(module_path=data_context().content.module_path, extensions=('.py',), extra_dirs=('bin',), include_symlinks=include_symlinks) -def walk_powershell_targets(include_symlinks=True): # type: (bool) -> t.Iterable[TestTarget] +def walk_powershell_targets(include_symlinks: bool = True) -> t.Iterable[TestTarget]: """Return an iterable of PowerShell targets.""" return walk_test_targets(module_path=data_context().content.module_path, extensions=('.ps1', '.psm1'), include_symlinks=include_symlinks) -def walk_sanity_targets(): # type: () -> t.Iterable[TestTarget] +def walk_sanity_targets() -> t.Iterable[TestTarget]: """Return an iterable of sanity targets.""" return walk_test_targets(module_path=data_context().content.module_path, include_symlinks=True, include_symlinked_directories=True) -def walk_posix_integration_targets(include_hidden=False): # type: (bool) -> t.Iterable[IntegrationTarget] +def walk_posix_integration_targets(include_hidden: bool = False) -> t.Iterable[IntegrationTarget]: """Return an iterable of POSIX integration targets.""" for target in walk_integration_targets(): if 'posix/' in target.aliases or (include_hidden and 'hidden/posix/' in target.aliases): yield target -def walk_network_integration_targets(include_hidden=False): # type: (bool) -> t.Iterable[IntegrationTarget] +def walk_network_integration_targets(include_hidden: bool = False) -> t.Iterable[IntegrationTarget]: """Return an iterable of network integration targets.""" for target in walk_integration_targets(): if 'network/' in target.aliases or (include_hidden and 'hidden/network/' in target.aliases): yield target -def walk_windows_integration_targets(include_hidden=False): # type: (bool) -> t.Iterable[IntegrationTarget] +def walk_windows_integration_targets(include_hidden: bool = False) -> t.Iterable[IntegrationTarget]: """Return an iterable of windows integration targets.""" for target in walk_integration_targets(): if 'windows/' in target.aliases or (include_hidden and 'hidden/windows/' in target.aliases): yield target -def walk_integration_targets(): # type: () -> t.Iterable[IntegrationTarget] +def walk_integration_targets() -> t.Iterable[IntegrationTarget]: """Return an iterable of integration targets.""" path = data_context().content.integration_targets_path modules = frozenset(target.module for target in walk_module_targets()) @@ -309,7 +309,7 @@ def walk_test_targets( yield TestTarget(file_path, module_path, prefix, path, symlink) -def analyze_integration_target_dependencies(integration_targets): # type: (t.List[IntegrationTarget]) -> t.Dict[str, t.Set[str]] +def analyze_integration_target_dependencies(integration_targets: t.List[IntegrationTarget]) -> t.Dict[str, t.Set[str]]: """Analyze the given list of integration test targets and return a dictionary expressing target names and the target names which depend on them.""" real_target_root = os.path.realpath(data_context().content.integration_targets_path) + '/' @@ -499,7 +499,7 @@ class IntegrationTargetType(enum.Enum): CONFLICT = enum.auto() -def extract_plugin_references(name, aliases): # type: (str, t.List[str]) -> t.List[t.Tuple[str, str]] +def extract_plugin_references(name: str, aliases: t.List[str]) -> t.List[t.Tuple[str, str]]: """Return a list of plugin references found in the given integration test target name and aliases.""" plugins = content_plugins() found = [] # type: t.List[t.Tuple[str, str]] @@ -524,7 +524,7 @@ def extract_plugin_references(name, aliases): # type: (str, t.List[str]) -> t.L return found -def categorize_integration_test(name, aliases, force_target): # type: (str, t.List[str], bool) -> t.Tuple[IntegrationTargetType, IntegrationTargetType] +def categorize_integration_test(name: str, aliases: t.List[str], force_target: bool) -> t.Tuple[IntegrationTargetType, IntegrationTargetType]: """Return the integration test target types (used and actual) based on the given target name and aliases.""" context_controller = f'context/{IntegrationTargetType.CONTROLLER.name.lower()}' in aliases context_target = f'context/{IntegrationTargetType.TARGET.name.lower()}' in aliases or force_target @@ -710,7 +710,7 @@ class IntegrationTarget(CompletionTarget): class TargetPatternsNotMatched(ApplicationError): """One or more targets were not matched when a match was required.""" - def __init__(self, patterns): # type: (t.Set[str]) -> None + def __init__(self, patterns: t.Set[str]) -> None: self.patterns = sorted(patterns) if len(patterns) > 1: diff --git a/test/lib/ansible_test/_internal/test.py b/test/lib/ansible_test/_internal/test.py index 6d2cf2a3440..c99bbe9c4a4 100644 --- a/test/lib/ansible_test/_internal/test.py +++ b/test/lib/ansible_test/_internal/test.py @@ -38,7 +38,7 @@ def calculate_best_confidence(choices, metadata): # type: (t.Tuple[t.Tuple[str, return best_confidence -def calculate_confidence(path, line, metadata): # type: (str, int, Metadata) -> int +def calculate_confidence(path: str, line: int, metadata: Metadata) -> int: """Return the confidence level for a test result associated with the given file path and line number.""" ranges = metadata.changes.get(path) @@ -60,7 +60,7 @@ def calculate_confidence(path, line, metadata): # type: (str, int, Metadata) -> class TestResult: """Base class for test results.""" - def __init__(self, command, test, python_version=None): # type: (str, str, t.Optional[str]) -> None + def __init__(self, command: str, test: str, python_version: t.Optional[str] = None) -> None: self.command = command self.test = test self.python_version = python_version @@ -69,7 +69,7 @@ class TestResult: if self.python_version: self.name += '-python-%s' % self.python_version - def write(self, args): # type: (TestConfig) -> None + def write(self, args: TestConfig) -> None: """Write the test results to various locations.""" self.write_console() self.write_bot(args) @@ -80,19 +80,19 @@ class TestResult: if args.junit: self.write_junit(args) - def write_console(self): # type: () -> None + def write_console(self) -> None: """Write results to console.""" - def write_lint(self): # type: () -> None + def write_lint(self) -> None: """Write lint results to stdout.""" - def write_bot(self, args): # type: (TestConfig) -> None + def write_bot(self, args: TestConfig) -> None: """Write results to a file for ansibullbot to consume.""" - def write_junit(self, args): # type: (TestConfig) -> None + def write_junit(self, args: TestConfig) -> None: """Write results to a junit XML file.""" - def create_result_name(self, extension): # type: (str) -> str + def create_result_name(self, extension: str) -> str: """Return the name of the result file using the given extension.""" name = 'ansible-test-%s' % self.command @@ -106,7 +106,7 @@ class TestResult: return name - def save_junit(self, args, test_case): # type: (TestConfig, junit_xml.TestCase) -> None + def save_junit(self, args: TestConfig, test_case: junit_xml.TestCase) -> None: """Save the given test case results to disk as JUnit XML.""" suites = junit_xml.TestSuites( suites=[ @@ -128,12 +128,12 @@ class TestResult: class TestTimeout(TestResult): """Test timeout.""" - def __init__(self, timeout_duration): # type: (int) -> None + def __init__(self, timeout_duration: int) -> None: super().__init__(command='timeout', test='') self.timeout_duration = timeout_duration - def write(self, args): # type: (TestConfig) -> None + def write(self, args: TestConfig) -> None: """Write the test results to various locations.""" message = 'Tests were aborted after exceeding the %d minute time limit.' % self.timeout_duration @@ -180,7 +180,7 @@ One or more of the following situations may be responsible: class TestSuccess(TestResult): """Test success.""" - def write_junit(self, args): # type: (TestConfig) -> None + def write_junit(self, args: TestConfig) -> None: """Write results to a junit XML file.""" test_case = junit_xml.TestCase(classname=self.command, name=self.name) @@ -189,19 +189,19 @@ class TestSuccess(TestResult): class TestSkipped(TestResult): """Test skipped.""" - def __init__(self, command, test, python_version=None): # type: (str, str, t.Optional[str]) -> None + def __init__(self, command: str, test: str, python_version: t.Optional[str] = None) -> None: super().__init__(command, test, python_version) self.reason = None # type: t.Optional[str] - def write_console(self): # type: () -> None + def write_console(self) -> None: """Write results to console.""" if self.reason: display.warning(self.reason) else: display.info('No tests applicable.', verbosity=1) - def write_junit(self, args): # type: (TestConfig) -> None + def write_junit(self, args: TestConfig) -> None: """Write results to a junit XML file.""" test_case = junit_xml.TestCase( classname=self.command, @@ -232,14 +232,14 @@ class TestFailure(TestResult): self.messages = messages self.summary = summary - def write(self, args): # type: (TestConfig) -> None + def write(self, args: TestConfig) -> None: """Write the test results to various locations.""" if args.metadata.changes: self.populate_confidence(args.metadata) super().write(args) - def write_console(self): # type: () -> None + def write_console(self) -> None: """Write results to console.""" if self.summary: display.error(self.summary) @@ -258,7 +258,7 @@ class TestFailure(TestResult): if doc_url: display.info('See documentation for help: %s' % doc_url) - def write_lint(self): # type: () -> None + def write_lint(self) -> None: """Write lint results to stdout.""" if self.summary: command = self.format_command() @@ -270,7 +270,7 @@ class TestFailure(TestResult): for message in self.messages: print(message) # display goes to stderr, this should be on stdout - def write_junit(self, args): # type: (TestConfig) -> None + def write_junit(self, args: TestConfig) -> None: """Write results to a junit XML file.""" title = self.format_title() output = self.format_block() @@ -288,7 +288,7 @@ class TestFailure(TestResult): self.save_junit(args, test_case) - def write_bot(self, args): # type: (TestConfig) -> None + def write_bot(self, args: TestConfig) -> None: """Write results to a file for ansibullbot to consume.""" docs = self.find_docs() message = self.format_title(help_link=docs) @@ -315,13 +315,13 @@ class TestFailure(TestResult): write_json_test_results(ResultType.BOT, self.create_result_name('.json'), bot_data) - def populate_confidence(self, metadata): # type: (Metadata) -> None + def populate_confidence(self, metadata: Metadata) -> None: """Populate test result confidence using the provided metadata.""" for message in self.messages: if message.confidence is None: message.confidence = calculate_confidence(message.path, message.line, metadata) - def format_command(self): # type: () -> str + def format_command(self) -> str: """Return a string representing the CLI command associated with the test failure.""" command = 'ansible-test %s' % self.command @@ -356,7 +356,7 @@ class TestFailure(TestResult): return url - def format_title(self, help_link=None): # type: (t.Optional[str]) -> str + def format_title(self, help_link: t.Optional[str] = None) -> str: """Return a string containing a title/heading for this test failure, including an optional help link to explain the test.""" command = self.format_command() @@ -374,7 +374,7 @@ class TestFailure(TestResult): return title - def format_block(self): # type: () -> str + def format_block(self) -> str: """Format the test summary or messages as a block of text and return the result.""" if self.summary: block = self.summary @@ -411,37 +411,37 @@ class TestMessage: self.confidence = confidence @property - def path(self): # type: () -> str + def path(self) -> str: """Return the path.""" return self.__path @property - def line(self): # type: () -> int + def line(self) -> int: """Return the line number, or 0 if none is available.""" return self.__line @property - def column(self): # type: () -> int + def column(self) -> int: """Return the column number, or 0 if none is available.""" return self.__column @property - def level(self): # type: () -> str + def level(self) -> str: """Return the level.""" return self.__level @property - def code(self): # type: () -> t.Optional[str] + def code(self) -> t.Optional[str]: """Return the code, if any.""" return self.__code @property - def message(self): # type: () -> str + def message(self) -> str: """Return the message.""" return self.__message @property - def tuple(self): # type: () -> t.Tuple[str, int, int, str, t.Optional[str], str] + def tuple(self) -> t.Tuple[str, int, int, str, t.Optional[str], str]: """Return a tuple with all the immutable values of this test message.""" return self.__path, self.__line, self.__column, self.__level, self.__code, self.__message @@ -469,7 +469,7 @@ class TestMessage: def __str__(self): return self.format() - def format(self, show_confidence=False): # type: (bool) -> str + def format(self, show_confidence: bool = False) -> str: """Return a string representation of this message, optionally including the confidence level.""" if self.__code: msg = '%s: %s' % (self.__code, self.__message) diff --git a/test/lib/ansible_test/_internal/thread.py b/test/lib/ansible_test/_internal/thread.py index f74b365d969..ffae33ca591 100644 --- a/test/lib/ansible_test/_internal/thread.py +++ b/test/lib/ansible_test/_internal/thread.py @@ -48,7 +48,7 @@ class WrappedThread(threading.Thread): return result -def mutex(func): # type: (TCallable) -> TCallable +def mutex(func: TCallable) -> TCallable: """Enforce exclusive access on a decorated function.""" lock = threading.Lock() diff --git a/test/lib/ansible_test/_internal/timeout.py b/test/lib/ansible_test/_internal/timeout.py index c255f5ce9f1..df982d6e5f9 100644 --- a/test/lib/ansible_test/_internal/timeout.py +++ b/test/lib/ansible_test/_internal/timeout.py @@ -35,7 +35,7 @@ from .test import ( ) -def get_timeout(): # type: () -> t.Optional[t.Dict[str, t.Any]] +def get_timeout() -> t.Optional[t.Dict[str, t.Any]]: """Return details about the currently set timeout, if any, otherwise return None.""" if not os.path.exists(TIMEOUT_PATH): return None @@ -46,13 +46,13 @@ def get_timeout(): # type: () -> t.Optional[t.Dict[str, t.Any]] return data -def configure_timeout(args): # type: (CommonConfig) -> None +def configure_timeout(args: CommonConfig) -> None: """Configure the timeout.""" if isinstance(args, TestConfig): configure_test_timeout(args) # only tests are subject to the timeout -def configure_test_timeout(args): # type: (TestConfig) -> None +def configure_test_timeout(args: TestConfig) -> None: """Configure the test timeout.""" timeout = get_timeout() @@ -81,7 +81,7 @@ def configure_test_timeout(args): # type: (TestConfig) -> None raise ApplicationError('Tests aborted after exceeding the %d minute time limit.' % timeout_duration) - def timeout_waiter(timeout_seconds): # type: (int) -> None + def timeout_waiter(timeout_seconds: int) -> None: """Background thread which will kill the current process if the timeout elapses.""" time.sleep(timeout_seconds) os.kill(os.getpid(), signal.SIGUSR1) diff --git a/test/lib/ansible_test/_internal/util.py b/test/lib/ansible_test/_internal/util.py index ec03c0cc6e8..2a45c8c6b1c 100644 --- a/test/lib/ansible_test/_internal/util.py +++ b/test/lib/ansible_test/_internal/util.py @@ -235,7 +235,7 @@ def filter_args(args, filters): # type: (t.List[str], t.Dict[str, int]) -> t.Li return result -def read_lines_without_comments(path, remove_blank_lines=False, optional=False): # type: (str, bool, bool) -> t.List[str] +def read_lines_without_comments(path: str, remove_blank_lines: bool = False, optional: bool = False) -> t.List[str]: """ Returns lines from the specified text file with comments removed. Comments are any content from a hash symbol to the end of a line. @@ -309,7 +309,7 @@ def find_executable(executable, cwd=None, path=None, required=True): # type: (s return match -def find_python(version, path=None, required=True): # type: (str, t.Optional[str], bool) -> t.Optional[str] +def find_python(version: str, path: t.Optional[str] = None, required: bool = True) -> t.Optional[str]: """ Find and return the full path to the specified Python version. If required, an exception will be raised not found. @@ -326,7 +326,7 @@ def find_python(version, path=None, required=True): # type: (str, t.Optional[st @cache -def get_ansible_version(): # type: () -> str +def get_ansible_version() -> str: """Return the Ansible version.""" # ansible may not be in our sys.path # avoids a symlink to release.py since ansible placement relative to ansible-test may change during delegation @@ -339,7 +339,7 @@ def get_ansible_version(): # type: () -> str @cache -def get_available_python_versions(): # type: () -> t.Dict[str, str] +def get_available_python_versions() -> t.Dict[str, str]: """Return a dictionary indicating which supported Python versions are available.""" return dict((version, path) for version, path in ((version, find_python(version, required=False)) for version in SUPPORTED_PYTHON_VERSIONS) if path) @@ -665,7 +665,7 @@ def report_locale() -> None: display.warning(LOCALE_WARNING) -def pass_vars(required, optional): # type: (t.Collection[str], t.Collection[str]) -> t.Dict[str, str] +def pass_vars(required: t.Collection[str], optional: t.Collection[str]) -> t.Dict[str, str]: """Return a filtered dictionary of environment variables based on the current environment.""" env = {} @@ -682,7 +682,7 @@ def pass_vars(required, optional): # type: (t.Collection[str], t.Collection[str return env -def remove_tree(path): # type: (str) -> None +def remove_tree(path: str) -> None: """Remove the specified directory, siliently continuing if the directory does not exist.""" try: shutil.rmtree(to_bytes(path)) @@ -691,7 +691,7 @@ def remove_tree(path): # type: (str) -> None raise -def is_binary_file(path): # type: (str) -> bool +def is_binary_file(path: str) -> bool: """Return True if the specified file is a binary file, otherwise return False.""" assume_text = { '.cfg', @@ -747,12 +747,12 @@ def is_binary_file(path): # type: (str) -> bool return b'\0' in path_fd.read(4096) -def generate_name(length=8): # type: (int) -> str +def generate_name(length: int = 8) -> str: """Generate and return a random name.""" return ''.join(random.choice(string.ascii_letters + string.digits) for _idx in range(length)) -def generate_password(): # type: () -> str +def generate_password() -> str: """Generate and return random password.""" chars = [ string.ascii_letters, @@ -801,11 +801,11 @@ class Display: if os.isatty(0): self.rows, self.columns = unpack('HHHH', fcntl.ioctl(0, TIOCGWINSZ, pack('HHHH', 0, 0, 0, 0)))[:2] - def __warning(self, message): # type: (str) -> None + def __warning(self, message: str) -> None: """Internal implementation for displaying a warning message.""" self.print_message('WARNING: %s' % message, color=self.purple) - def review_warnings(self): # type: () -> None + def review_warnings(self) -> None: """Review all warnings which previously occurred.""" if not self.warnings: return @@ -815,7 +815,7 @@ class Display: for warning in self.warnings: self.__warning(warning) - def warning(self, message, unique=False, verbosity=0): # type: (str, bool, int) -> None + def warning(self, message: str, unique: bool = False, verbosity: int = 0) -> None: """Display a warning level message.""" if verbosity > self.verbosity: return @@ -829,19 +829,19 @@ class Display: self.__warning(message) self.warnings.append(message) - def notice(self, message): # type: (str) -> None + def notice(self, message: str) -> None: """Display a notice level message.""" self.print_message('NOTICE: %s' % message, color=self.purple) - def error(self, message): # type: (str) -> None + def error(self, message: str) -> None: """Display an error level message.""" self.print_message('ERROR: %s' % message, color=self.red) - def fatal(self, message): # type: (str) -> None + def fatal(self, message: str) -> None: """Display a fatal level message.""" self.print_message('FATAL: %s' % message, color=self.red, stderr=True) - def info(self, message, verbosity=0, truncate=False): # type: (str, int, bool) -> None + def info(self, message: str, verbosity: int = 0, truncate: bool = False) -> None: """Display an info level message.""" if self.verbosity >= verbosity: color = self.verbosity_colors.get(verbosity, self.yellow) @@ -929,7 +929,7 @@ class SubprocessError(ApplicationError): class MissingEnvironmentVariable(ApplicationError): """Error caused by missing environment variable.""" - def __init__(self, name): # type: (str) -> None + def __init__(self, name: str) -> None: super().__init__('Missing environment variable: %s' % name) self.name = name @@ -949,7 +949,7 @@ def retry(func, ex_type=SubprocessError, sleep=10, attempts=10, warn=True): return func() -def parse_to_list_of_dict(pattern, value): # type: (str, str) -> t.List[t.Dict[str, str]] +def parse_to_list_of_dict(pattern: str, value: str) -> t.List[t.Dict[str, str]]: """Parse lines from the given value using the specified pattern and return the extracted list of key/value pair dictionaries.""" matched = [] unmatched = [] @@ -968,7 +968,7 @@ def parse_to_list_of_dict(pattern, value): # type: (str, str) -> t.List[t.Dict[ return matched -def get_subclasses(class_type): # type: (t.Type[C]) -> t.List[t.Type[C]] +def get_subclasses(class_type: t.Type[C]) -> t.List[t.Type[C]]: """Returns a list of types that are concrete subclasses of the given type.""" subclasses = set() # type: t.Set[t.Type[C]] queue = [class_type] # type: t.List[t.Type[C]] @@ -985,7 +985,7 @@ def get_subclasses(class_type): # type: (t.Type[C]) -> t.List[t.Type[C]] return sorted(subclasses, key=lambda sc: sc.__name__) -def is_subdir(candidate_path, path): # type: (str, str) -> bool +def is_subdir(candidate_path: str, path: str) -> bool: """Returns true if candidate_path is path or a subdirectory of path.""" if not path.endswith(os.path.sep): path += os.path.sep @@ -996,7 +996,7 @@ def is_subdir(candidate_path, path): # type: (str, str) -> bool return candidate_path.startswith(path) -def paths_to_dirs(paths): # type: (t.List[str]) -> t.List[str] +def paths_to_dirs(paths: t.List[str]) -> t.List[str]: """Returns a list of directories extracted from the given list of paths.""" dir_names = set() @@ -1012,7 +1012,7 @@ def paths_to_dirs(paths): # type: (t.List[str]) -> t.List[str] return sorted(dir_names) -def str_to_version(version): # type: (str) -> t.Tuple[int, ...] +def str_to_version(version: str) -> t.Tuple[int, ...]: """Return a version tuple from a version string.""" return tuple(int(n) for n in version.split('.')) @@ -1022,12 +1022,12 @@ def version_to_str(version): # type: (t.Tuple[int, ...]) -> str return '.'.join(str(n) for n in version) -def sorted_versions(versions): # type: (t.List[str]) -> t.List[str] +def sorted_versions(versions: t.List[str]) -> t.List[str]: """Return a sorted copy of the given list of versions.""" return [version_to_str(version) for version in sorted(str_to_version(version) for version in versions)] -def import_plugins(directory, root=None): # type: (str, t.Optional[str]) -> None +def import_plugins(directory: str, root: t.Optional[str] = None) -> None: """ Import plugins from the given directory relative to the given root. If the root is not provided, the 'lib' directory for the test runner will be used. @@ -1055,7 +1055,7 @@ def load_plugins(base_type, database): # type: (t.Type[C], t.Dict[str, t.Type[C database[plugin] = plugins[plugin] -def load_module(path, name): # type: (str, str) -> None +def load_module(path: str, name: str) -> None: """Load a Python module using the given name and path.""" if name in sys.modules: return @@ -1071,24 +1071,24 @@ def sanitize_host_name(name): return re.sub('[^A-Za-z0-9]+', '-', name)[:63].strip('-') -def get_generic_type(base_type, generic_base_type): # type: (t.Type, t.Type[TValue]) -> t.Optional[t.Type[TValue]] +def get_generic_type(base_type: t.Type, generic_base_type: t.Type[TValue]) -> t.Optional[t.Type[TValue]]: """Return the generic type arg derived from the generic_base_type type that is associated with the base_type type, if any, otherwise return None.""" # noinspection PyUnresolvedReferences type_arg = t.get_args(base_type.__orig_bases__[0])[0] return None if isinstance(type_arg, generic_base_type) else type_arg -def get_type_associations(base_type, generic_base_type): # type: (t.Type[TBase], t.Type[TValue]) -> t.List[t.Tuple[t.Type[TValue], t.Type[TBase]]] +def get_type_associations(base_type: t.Type[TBase], generic_base_type: t.Type[TValue]) -> t.List[t.Tuple[t.Type[TValue], t.Type[TBase]]]: """Create and return a list of tuples associating generic_base_type derived types with a corresponding base_type derived type.""" return [item for item in [(get_generic_type(sc_type, generic_base_type), sc_type) for sc_type in get_subclasses(base_type)] if item[1]] -def get_type_map(base_type, generic_base_type): # type: (t.Type[TBase], t.Type[TValue]) -> t.Dict[t.Type[TValue], t.Type[TBase]] +def get_type_map(base_type: t.Type[TBase], generic_base_type: t.Type[TValue]) -> t.Dict[t.Type[TValue], t.Type[TBase]]: """Create and return a mapping of generic_base_type derived types to base_type derived types.""" return {item[0]: item[1] for item in get_type_associations(base_type, generic_base_type)} -def verify_sys_executable(path): # type: (str) -> t.Optional[str] +def verify_sys_executable(path: str) -> t.Optional[str]: """Verify that the given path references the current Python interpreter. If not, return the expected path, otherwise return None.""" if path == sys.executable: return None diff --git a/test/lib/ansible_test/_internal/util_common.py b/test/lib/ansible_test/_internal/util_common.py index f7b8bfc59b5..7e292d6852b 100644 --- a/test/lib/ansible_test/_internal/util_common.py +++ b/test/lib/ansible_test/_internal/util_common.py @@ -63,7 +63,7 @@ CHECK_YAML_VERSIONS = {} # type: t.Dict[str, t.Any] class ShellScriptTemplate: """A simple substitution template for shell scripts.""" - def __init__(self, template): # type: (str) -> None + def __init__(self, template: str) -> None: self.template = template def substitute(self, **kwargs: t.Union[str, t.List[str]]) -> str: @@ -102,20 +102,20 @@ class ResultType: ResultType.REPORTS = ResultType('reports') ResultType.TMP = ResultType('.tmp') - def __init__(self, name): # type: (str) -> None + def __init__(self, name: str) -> None: self.name = name @property - def relative_path(self): # type: () -> str + def relative_path(self) -> str: """The content relative path to the results.""" return os.path.join(data_context().content.results_path, self.name) @property - def path(self): # type: () -> str + def path(self) -> str: """The absolute path to the results.""" return os.path.join(data_context().content.root, self.relative_path) - def __str__(self): # type: () -> str + def __str__(self) -> str: return self.name @@ -125,7 +125,7 @@ ResultType._populate() # pylint: disable=protected-access class CommonConfig: """Configuration common to all commands.""" - def __init__(self, args, command): # type: (t.Any, str) -> None + def __init__(self, args: t.Any, command: str) -> None: self.command = command self.interactive = False self.check_layout = True @@ -144,12 +144,12 @@ class CommonConfig: self.cache = {} # type: t.Dict[str, t.Any] - def get_ansible_config(self): # type: () -> str + def get_ansible_config(self) -> str: """Return the path to the Ansible config for the given config.""" return os.path.join(ANSIBLE_TEST_DATA_ROOT, 'ansible.cfg') -def create_result_directories(args): # type: (CommonConfig) -> None +def create_result_directories(args: CommonConfig) -> None: """Create result directories.""" if args.explain: return @@ -158,7 +158,7 @@ def create_result_directories(args): # type: (CommonConfig) -> None make_dirs(ResultType.DATA.path) -def handle_layout_messages(messages): # type: (t.Optional[LayoutMessages]) -> None +def handle_layout_messages(messages: t.Optional[LayoutMessages]) -> None: """Display the given layout messages.""" if not messages: return @@ -173,7 +173,7 @@ def handle_layout_messages(messages): # type: (t.Optional[LayoutMessages]) -> N raise ApplicationError('\n'.join(messages.error)) -def process_scoped_temporary_file(args, prefix='ansible-test-', suffix=None): # type: (CommonConfig, t.Optional[str], t.Optional[str]) -> str +def process_scoped_temporary_file(args: CommonConfig, prefix: t.Optional[str] = 'ansible-test-', suffix: t.Optional[str] = None) -> str: """Return the path to a temporary file that will be automatically removed when the process exits.""" if args.explain: path = os.path.join(tempfile.gettempdir(), f'{prefix or tempfile.gettempprefix()}{generate_name()}{suffix or ""}') @@ -185,7 +185,7 @@ def process_scoped_temporary_file(args, prefix='ansible-test-', suffix=None): # return path -def process_scoped_temporary_directory(args, prefix='ansible-test-', suffix=None): # type: (CommonConfig, t.Optional[str], t.Optional[str]) -> str +def process_scoped_temporary_directory(args: CommonConfig, prefix: t.Optional[str] = 'ansible-test-', suffix: t.Optional[str] = None) -> str: """Return the path to a temporary directory that will be automatically removed when the process exits.""" if args.explain: path = os.path.join(tempfile.gettempdir(), f'{prefix or tempfile.gettempprefix()}{generate_name()}{suffix or ""}') @@ -197,7 +197,7 @@ def process_scoped_temporary_directory(args, prefix='ansible-test-', suffix=None @contextlib.contextmanager -def named_temporary_file(args, prefix, suffix, directory, content): # type: (CommonConfig, str, str, t.Optional[str], str) -> t.Iterator[str] +def named_temporary_file(args: CommonConfig, prefix: str, suffix: str, directory: t.Optional[str], content: str) -> t.Iterator[str]: """Context manager for a named temporary file.""" if args.explain: yield os.path.join(directory or '/tmp', '%stemp%s' % (prefix, suffix)) @@ -220,14 +220,14 @@ def write_json_test_results(category, # type: ResultType write_json_file(path, content, create_directories=True, formatted=formatted, encoder=encoder) -def write_text_test_results(category, name, content): # type: (ResultType, str, str) -> None +def write_text_test_results(category: ResultType, name: str, content: str) -> None: """Write the given text content to the specified test results path, creating directories as needed.""" path = os.path.join(category.path, name) write_text_file(path, content, create_directories=True) @cache -def get_injector_path(): # type: () -> str +def get_injector_path() -> str: """Return the path to a directory which contains a `python.py` executable and associated injector scripts.""" injector_path = tempfile.mkdtemp(prefix='ansible-test-', suffix='-injector', dir='/tmp') @@ -269,7 +269,7 @@ def get_injector_path(): # type: () -> str return injector_path -def set_shebang(script, executable): # type: (str, str) -> str +def set_shebang(script: str, executable: str) -> str: """Return the given script with the specified executable used for the shebang.""" prefix = '#!' shebang = prefix + executable @@ -292,7 +292,7 @@ def set_shebang(script, executable): # type: (str, str) -> str return script -def get_python_path(interpreter): # type: (str) -> str +def get_python_path(interpreter: str) -> str: """Return the path to a directory which contains a `python` executable that runs the specified interpreter.""" python_path = PYTHON_PATHS.get(interpreter) @@ -329,14 +329,14 @@ def get_python_path(interpreter): # type: (str) -> str return python_path -def create_temp_dir(prefix=None, suffix=None, base_dir=None): # type: (t.Optional[str], t.Optional[str], t.Optional[str]) -> str +def create_temp_dir(prefix: t.Optional[str] = None, suffix: t.Optional[str] = None, base_dir: t.Optional[str] = None) -> str: """Create a temporary directory that persists until the current process exits.""" temp_path = tempfile.mkdtemp(prefix=prefix or 'tmp', suffix=suffix or '', dir=base_dir) atexit.register(remove_tree, temp_path) return temp_path -def create_interpreter_wrapper(interpreter, injected_interpreter): # type: (str, str) -> None +def create_interpreter_wrapper(interpreter: str, injected_interpreter: str) -> None: """Create a wrapper for the given Python interpreter at the specified path.""" # sys.executable is used for the shebang to guarantee it is a binary instead of a script # injected_interpreter could be a script from the system or our own wrapper created for the --venv option @@ -431,7 +431,7 @@ def yamlcheck(python): return result['cloader'] -def check_pyyaml(python, required=True, quiet=False): # type: (PythonConfig, bool, bool) -> t.Optional[bool] +def check_pyyaml(python: PythonConfig, required: bool = True, quiet: bool = False) -> t.Optional[bool]: """ Return True if PyYAML has libyaml support, False if it does not and None if it was not found. The result is cached if True or required. diff --git a/test/lib/ansible_test/_internal/venv.py b/test/lib/ansible_test/_internal/venv.py index 21dded627cb..202f0eeac06 100644 --- a/test/lib/ansible_test/_internal/venv.py +++ b/test/lib/ansible_test/_internal/venv.py @@ -129,7 +129,7 @@ def create_virtual_environment(args, # type: EnvironmentConfig return False -def iterate_real_pythons(version): # type: (str) -> t.Iterable[str] +def iterate_real_pythons(version: str) -> t.Iterable[str]: """ Iterate through available real python interpreters of the requested version. The current interpreter will be checked and then the path will be searched. @@ -169,7 +169,7 @@ def iterate_real_pythons(version): # type: (str) -> t.Iterable[str] yield found_python -def get_python_real_prefix(python_path): # type: (str) -> t.Optional[str] +def get_python_real_prefix(python_path: str) -> t.Optional[str]: """ Return the real prefix of the specified interpreter or None if the interpreter is not a virtual environment created by 'virtualenv'. """ @@ -245,7 +245,7 @@ def run_virtualenv(args, # type: EnvironmentConfig return True -def get_virtualenv_version(args, python): # type: (EnvironmentConfig, str) -> t.Optional[t.Tuple[int, ...]] +def get_virtualenv_version(args: EnvironmentConfig, python: str) -> t.Optional[t.Tuple[int, ...]]: """Get the virtualenv version for the given python interpreter, if available, otherwise return None.""" try: cache = get_virtualenv_version.cache # type: ignore[attr-defined]