ansible-test - Avoid use of deprecated type hints. (#78456)

* ansible-test - Avoid use of deprecated type hints.

PEP 585 deprecated many container types in the `typing` module in favor of the actual types, which support subscripting as of Python 3.9.

Conversion of `t.Type` was skipped since PyCharm does not currently recognize it.

* ansible-test - Fix `t` and `c` imports/shadowing.
pull/78466/head
Matt Clay 2 years ago committed by GitHub
parent 5bee66fc5d
commit 85acf4d1e5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -48,7 +48,7 @@ from .provisioning import (
)
def main(cli_args: t.Optional[t.List[str]] = None) -> None:
def main(cli_args: t.Optional[list[str]] = None) -> None:
"""Main program function."""
try:
os.chdir(data_context().content.root)

@ -52,7 +52,7 @@ from .host_configs import (
)
def parse_inventory(args: EnvironmentConfig, inventory_path: str) -> t.Dict[str, t.Any]:
def parse_inventory(args: EnvironmentConfig, inventory_path: str) -> dict[str, t.Any]:
"""Return a dict parsed from the given inventory file."""
cmd = ['ansible-inventory', '-i', inventory_path, '--list']
env = ansible_environment(args)
@ -60,7 +60,7 @@ def parse_inventory(args: EnvironmentConfig, inventory_path: str) -> t.Dict[str,
return inventory
def get_hosts(inventory: t.Dict[str, t.Any], group_name: str) -> t.Dict[str, t.Dict[str, t.Any]]:
def get_hosts(inventory: dict[str, t.Any], group_name: str) -> dict[str, dict[str, t.Any]]:
"""Return a dict of hosts from the specified group in the given inventory."""
hostvars = inventory.get('_meta', {}).get('hostvars', {})
group = inventory.get(group_name, {})
@ -69,7 +69,7 @@ def get_hosts(inventory: t.Dict[str, t.Any], group_name: str) -> t.Dict[str, t.D
return hosts
def ansible_environment(args: CommonConfig, color: bool = True, ansible_config: t.Optional[str] = None) -> t.Dict[str, str]:
def ansible_environment(args: CommonConfig, color: bool = True, ansible_config: t.Optional[str] = None) -> dict[str, str]:
"""Return a dictionary of environment variables to use when running Ansible commands."""
env = common_environment()
path = env['PATH']
@ -138,7 +138,7 @@ def ansible_environment(args: CommonConfig, color: bool = True, ansible_config:
return env
def configure_plugin_paths(args: CommonConfig) -> t.Dict[str, str]:
def configure_plugin_paths(args: CommonConfig) -> dict[str, str]:
"""Return environment variables with paths to plugins relevant for the current command."""
if not isinstance(args, IntegrationConfig):
return {}
@ -283,7 +283,7 @@ def run_playbook(
inventory_path: str,
playbook: str,
capture: bool,
variables: t.Optional[t.Dict[str, t.Any]] = None,
variables: t.Optional[dict[str, t.Any]] = None,
) -> None:
"""Run the specified playbook using the given inventory file and playbook variables."""
playbook_path = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'playbooks', playbook)

@ -3,7 +3,6 @@ from __future__ import annotations
import abc
import shlex
import typing as t
from .util import (
get_subclasses,
@ -23,7 +22,7 @@ class Become(metaclass=abc.ABCMeta):
"""The name of the Ansible become plugin that is equivalent to this."""
@abc.abstractmethod
def prepare_command(self, command: t.List[str]) -> t.List[str]:
def prepare_command(self, command: list[str]) -> list[str]:
"""Return the given command, if any, with privilege escalation."""
@ -34,7 +33,7 @@ class Doas(Become):
"""The name of the Ansible become plugin that is equivalent to this."""
raise NotImplementedError('Ansible has no built-in doas become plugin.')
def prepare_command(self, command: t.List[str]) -> t.List[str]:
def prepare_command(self, command: list[str]) -> list[str]:
"""Return the given command, if any, with privilege escalation."""
become = ['doas', '-n']
@ -66,7 +65,7 @@ class Su(Become):
"""The name of the Ansible become plugin that is equivalent to this."""
return 'su'
def prepare_command(self, command: t.List[str]) -> t.List[str]:
def prepare_command(self, command: list[str]) -> list[str]:
"""Return the given command, if any, with privilege escalation."""
become = ['su', '-l', 'root']
@ -96,7 +95,7 @@ class Sudo(Become):
"""The name of the Ansible become plugin that is equivalent to this."""
return 'sudo'
def prepare_command(self, command: t.List[str]) -> t.List[str]:
def prepare_command(self, command: list[str]) -> list[str]:
"""Return the given command, if any, with privilege escalation."""
become = ['sudo', '-in']

@ -27,7 +27,7 @@ from .core_ci import (
class Bootstrap:
"""Base class for bootstrapping systems."""
controller: bool
python_versions: t.List[str]
python_versions: list[str]
ssh_key: SshKey
@property
@ -35,7 +35,7 @@ class Bootstrap:
"""The bootstrap type to pass to the bootstrapping script."""
return self.__class__.__name__.replace('Bootstrap', '').lower()
def get_variables(self) -> t.Dict[str, t.Union[str, t.List[str]]]:
def get_variables(self) -> dict[str, t.Union[str, list[str]]]:
"""The variables to template in the bootstrapping script."""
return dict(
bootstrap_type=self.bootstrap_type,
@ -65,7 +65,7 @@ class Bootstrap:
@dataclasses.dataclass
class BootstrapDocker(Bootstrap):
"""Bootstrap docker instances."""
def get_variables(self) -> t.Dict[str, t.Union[str, t.List[str]]]:
def get_variables(self) -> dict[str, t.Union[str, list[str]]]:
"""The variables to template in the bootstrapping script."""
variables = super().get_variables()
@ -83,7 +83,7 @@ class BootstrapRemote(Bootstrap):
platform: str
platform_version: str
def get_variables(self) -> t.Dict[str, t.Union[str, t.List[str]]]:
def get_variables(self) -> dict[str, t.Union[str, list[str]]]:
"""The variables to template in the bootstrapping script."""
variables = super().get_variables()

@ -1,6 +1,7 @@
"""Cache for commonly shared data that is intended to be immutable."""
from __future__ import annotations
import collections.abc as c
import typing as t
from .config import (
@ -15,14 +16,14 @@ class CommonCache:
def __init__(self, args: CommonConfig) -> None:
self.args = args
def get(self, key: str, factory: t.Callable[[], TValue]) -> TValue:
def get(self, key: str, factory: c.Callable[[], TValue]) -> TValue:
"""Return the value from the cache identified by the given key, using the specified factory method if it is not found."""
if key not in self.args.cache:
self.args.cache[key] = factory()
return self.args.cache[key]
def get_with_args(self, key: str, factory: t.Callable[[CommonConfig], TValue]) -> TValue:
def get_with_args(self, key: str, factory: c.Callable[[CommonConfig], TValue]) -> TValue:
"""Return the value from the cache identified by the given key, using the specified factory method (which accepts args) if it is not found."""
if key not in self.args.cache:
self.args.cache[key] = factory(self.args)

@ -65,7 +65,7 @@ class CIProvider(metaclass=abc.ABCMeta):
"""Return the base branch or an empty string."""
@abc.abstractmethod
def detect_changes(self, args: TestConfig) -> t.Optional[t.List[str]]:
def detect_changes(self, args: TestConfig) -> t.Optional[list[str]]:
"""Initialize change detection."""
@abc.abstractmethod
@ -73,11 +73,11 @@ class CIProvider(metaclass=abc.ABCMeta):
"""Return True if Ansible Core CI is supported."""
@abc.abstractmethod
def prepare_core_ci_auth(self) -> t.Dict[str, t.Any]:
def prepare_core_ci_auth(self) -> dict[str, t.Any]:
"""Return authentication details for Ansible Core CI."""
@abc.abstractmethod
def get_git_details(self, args: CommonConfig) -> t.Optional[t.Dict[str, t.Any]]:
def get_git_details(self, args: CommonConfig) -> t.Optional[dict[str, t.Any]]:
"""Return details about git in the current environment."""
@ -88,7 +88,7 @@ def get_ci_provider() -> CIProvider:
import_plugins('ci')
candidates = sorted(get_subclasses(CIProvider), key=lambda c: (c.priority, c.__name__))
candidates = sorted(get_subclasses(CIProvider), key=lambda subclass: (subclass.priority, subclass.__name__))
for candidate in candidates:
if candidate.is_supported():
@ -103,7 +103,7 @@ def get_ci_provider() -> CIProvider:
class AuthHelper(metaclass=abc.ABCMeta):
"""Public key based authentication helper for Ansible Core CI."""
def sign_request(self, request: t.Dict[str, t.Any]) -> None:
def sign_request(self, request: dict[str, t.Any]) -> None:
"""Sign the given auth request and make the public key available."""
payload_bytes = to_bytes(json.dumps(request, sort_keys=True))
signature_raw_bytes = self.sign_bytes(payload_bytes)

@ -80,7 +80,7 @@ class AzurePipelines(CIProvider):
return base_branch or ''
def detect_changes(self, args: TestConfig) -> t.Optional[t.List[str]]:
def detect_changes(self, args: TestConfig) -> t.Optional[list[str]]:
"""Initialize change detection."""
result = AzurePipelinesChanges(args)
@ -106,7 +106,7 @@ class AzurePipelines(CIProvider):
"""Return True if Ansible Core CI is supported."""
return True
def prepare_core_ci_auth(self) -> t.Dict[str, t.Any]:
def prepare_core_ci_auth(self) -> dict[str, t.Any]:
"""Return authentication details for Ansible Core CI."""
try:
request = dict(
@ -126,7 +126,7 @@ class AzurePipelines(CIProvider):
return auth
def get_git_details(self, args: CommonConfig) -> t.Optional[t.Dict[str, t.Any]]:
def get_git_details(self, args: CommonConfig) -> t.Optional[dict[str, t.Any]]:
"""Return details about git in the current environment."""
changes = AzurePipelinesChanges(args)
@ -213,7 +213,7 @@ class AzurePipelinesChanges:
self.paths = None # act as though change detection not enabled, do not filter targets
self.diff = []
def get_successful_merge_run_commits(self) -> t.Set[str]:
def get_successful_merge_run_commits(self) -> set[str]:
"""Return a set of recent successsful merge commits from Azure Pipelines."""
parameters = dict(
maxBuildsPerDefinition=100, # max 5000
@ -241,7 +241,7 @@ class AzurePipelinesChanges:
return commits
def get_last_successful_commit(self, commits: t.Set[str]) -> t.Optional[str]:
def get_last_successful_commit(self, commits: set[str]) -> t.Optional[str]:
"""Return the last successful commit from git history that is found in the given commit list, or None."""
commit_history = self.git.get_rev_list(max_count=100)
ordered_successful_commits = [commit for commit in commit_history if commit in commits]
@ -254,7 +254,7 @@ def vso_add_attachment(file_type: str, file_name: str, path: str) -> None:
vso('task.addattachment', dict(type=file_type, name=file_name), path)
def vso(name: str, data: t.Dict[str, str], message: str) -> None:
def vso(name: str, data: dict[str, str], message: str) -> None:
"""
Write a logging command for the Azure Pipelines agent to process.
See: https://docs.microsoft.com/en-us/azure/devops/pipelines/scripts/logging-commands?view=azure-devops&tabs=bash

@ -66,7 +66,7 @@ class Local(CIProvider):
"""Return the base branch or an empty string."""
return ''
def detect_changes(self, args: TestConfig) -> t.Optional[t.List[str]]:
def detect_changes(self, args: TestConfig) -> t.Optional[list[str]]:
"""Initialize change detection."""
result = LocalChanges(args)
@ -121,7 +121,7 @@ class Local(CIProvider):
path = self._get_aci_key_path()
return os.path.exists(path)
def prepare_core_ci_auth(self) -> t.Dict[str, t.Any]:
def prepare_core_ci_auth(self) -> dict[str, t.Any]:
"""Return authentication details for Ansible Core CI."""
path = self._get_aci_key_path()
auth_key = read_text_file(path).strip()
@ -137,7 +137,7 @@ class Local(CIProvider):
return auth
def get_git_details(self, args: CommonConfig) -> t.Optional[t.Dict[str, t.Any]]:
def get_git_details(self, args: CommonConfig) -> t.Optional[dict[str, t.Any]]:
"""Return details about git in the current environment."""
return None # not yet implemented for local

@ -54,11 +54,11 @@ from ..data import (
FOCUSED_TARGET = '__focused__'
def categorize_changes(args: TestConfig, paths: t.List[str], verbose_command: t.Optional[str] = None) -> ChangeDescription:
def categorize_changes(args: TestConfig, paths: list[str], verbose_command: t.Optional[str] = None) -> ChangeDescription:
"""Categorize the given list of changed paths and return a description of the changes."""
mapper = PathMapper(args)
commands: t.Dict[str, t.Set[str]] = {
commands: dict[str, set[str]] = {
'sanity': set(),
'units': set(),
'integration': set(),
@ -68,10 +68,10 @@ def categorize_changes(args: TestConfig, paths: t.List[str], verbose_command: t.
focused_commands = collections.defaultdict(set)
deleted_paths: t.Set[str] = set()
original_paths: t.Set[str] = set()
additional_paths: t.Set[str] = set()
no_integration_paths: t.Set[str] = set()
deleted_paths: set[str] = set()
original_paths: set[str] = set()
additional_paths: set[str] = set()
no_integration_paths: set[str] = set()
for path in paths:
if not os.path.exists(path):
@ -156,8 +156,8 @@ def categorize_changes(args: TestConfig, paths: t.List[str], verbose_command: t.
if any(target == 'all' for target in targets):
commands[command] = {'all'}
sorted_commands = dict((c, sorted(targets)) for c, targets in commands.items() if targets)
focused_commands = dict((c, sorted(targets)) for c, targets in focused_commands.items())
sorted_commands = dict((cmd, sorted(targets)) for cmd, targets in commands.items() if targets)
focused_commands = dict((cmd, sorted(targets)) for cmd, targets in focused_commands.items())
for command, targets in sorted_commands.items():
if targets == ['all']:
@ -206,11 +206,11 @@ class PathMapper:
self.prefixes = load_integration_prefixes()
self.integration_dependencies = analyze_integration_target_dependencies(self.integration_targets)
self.python_module_utils_imports: t.Dict[str, t.Set[str]] = {} # populated on first use to reduce overhead when not needed
self.powershell_module_utils_imports: t.Dict[str, t.Set[str]] = {} # populated on first use to reduce overhead when not needed
self.csharp_module_utils_imports: t.Dict[str, t.Set[str]] = {} # populated on first use to reduce overhead when not needed
self.python_module_utils_imports: dict[str, set[str]] = {} # populated on first use to reduce overhead when not needed
self.powershell_module_utils_imports: dict[str, set[str]] = {} # populated on first use to reduce overhead when not needed
self.csharp_module_utils_imports: dict[str, set[str]] = {} # populated on first use to reduce overhead when not needed
self.paths_to_dependent_targets: t.Dict[str, t.Set[IntegrationTarget]] = {}
self.paths_to_dependent_targets: dict[str, set[IntegrationTarget]] = {}
for target in self.integration_targets:
for path in target.needs_file:
@ -219,7 +219,7 @@ class PathMapper:
self.paths_to_dependent_targets[path].add(target)
def get_dependent_paths(self, path: str) -> t.List[str]:
def get_dependent_paths(self, path: str) -> list[str]:
"""Return a list of paths which depend on the given path, recursively expanding dependent paths as well."""
unprocessed_paths = set(self.get_dependent_paths_non_recursive(path))
paths = set()
@ -238,7 +238,7 @@ class PathMapper:
return sorted(paths)
def get_dependent_paths_non_recursive(self, path: str) -> t.List[str]:
def get_dependent_paths_non_recursive(self, path: str) -> list[str]:
"""Return a list of paths which depend on the given path, including dependent integration test target paths."""
paths = self.get_dependent_paths_internal(path)
paths += [target.path + '/' for target in self.paths_to_dependent_targets.get(path, set())]
@ -246,7 +246,7 @@ class PathMapper:
return paths
def get_dependent_paths_internal(self, path: str) -> t.List[str]:
def get_dependent_paths_internal(self, path: str) -> list[str]:
"""Return a list of paths which depend on the given path."""
ext = os.path.splitext(os.path.split(path)[1])[1]
@ -265,7 +265,7 @@ class PathMapper:
return []
def get_python_module_utils_usage(self, path: str) -> t.List[str]:
def get_python_module_utils_usage(self, path: str) -> list[str]:
"""Return a list of paths which depend on the given path which is a Python module_utils file."""
if not self.python_module_utils_imports:
display.info('Analyzing python module_utils imports...')
@ -278,7 +278,7 @@ class PathMapper:
return sorted(self.python_module_utils_imports[name])
def get_powershell_module_utils_usage(self, path: str) -> t.List[str]:
def get_powershell_module_utils_usage(self, path: str) -> list[str]:
"""Return a list of paths which depend on the given path which is a PowerShell module_utils file."""
if not self.powershell_module_utils_imports:
display.info('Analyzing powershell module_utils imports...')
@ -291,7 +291,7 @@ class PathMapper:
return sorted(self.powershell_module_utils_imports[name])
def get_csharp_module_utils_usage(self, path: str) -> t.List[str]:
def get_csharp_module_utils_usage(self, path: str) -> list[str]:
"""Return a list of paths which depend on the given path which is a C# module_utils file."""
if not self.csharp_module_utils_imports:
display.info('Analyzing C# module_utils imports...')
@ -304,7 +304,7 @@ class PathMapper:
return sorted(self.csharp_module_utils_imports[name])
def get_integration_target_usage(self, path: str) -> t.List[str]:
def get_integration_target_usage(self, path: str) -> list[str]:
"""Return a list of paths which depend on the given path which is an integration target file."""
target_name = path.split('/')[3]
dependents = [os.path.join(data_context().content.integration_targets_path, target) + os.path.sep
@ -312,7 +312,7 @@ class PathMapper:
return dependents
def classify(self, path: str) -> t.Optional[t.Dict[str, str]]:
def classify(self, path: str) -> t.Optional[dict[str, str]]:
"""Classify the given path and return an optional dictionary of the results."""
result = self._classify(path)
@ -326,7 +326,7 @@ class PathMapper:
return result
def _classify(self, path: str) -> t.Optional[t.Dict[str, str]]:
def _classify(self, path: str) -> t.Optional[dict[str, str]]:
"""Return the classification for the given path."""
if data_context().content.is_ansible:
return self._classify_ansible(path)
@ -336,13 +336,13 @@ class PathMapper:
return None
def _classify_common(self, path: str) -> t.Optional[t.Dict[str, str]]:
def _classify_common(self, path: str) -> t.Optional[dict[str, str]]:
"""Return the classification for the given path using rules common to all layouts."""
dirname = os.path.dirname(path)
filename = os.path.basename(path)
name, ext = os.path.splitext(filename)
minimal: t.Dict[str, str] = {}
minimal: dict[str, str] = {}
if os.path.sep not in path:
if filename in (
@ -621,7 +621,7 @@ class PathMapper:
return None
def _classify_collection(self, path: str) -> t.Optional[t.Dict[str, str]]:
def _classify_collection(self, path: str) -> t.Optional[dict[str, str]]:
"""Return the classification for the given path using rules specific to collections."""
result = self._classify_common(path)
@ -631,7 +631,7 @@ class PathMapper:
filename = os.path.basename(path)
dummy, ext = os.path.splitext(filename)
minimal: t.Dict[str, str] = {}
minimal: dict[str, str] = {}
if path.startswith('changelogs/'):
return minimal
@ -659,7 +659,7 @@ class PathMapper:
return None
def _classify_ansible(self, path: str) -> t.Optional[t.Dict[str, str]]:
def _classify_ansible(self, path: str) -> t.Optional[dict[str, str]]:
"""Return the classification for the given path using rules specific to Ansible."""
if path.startswith('test/units/compat/'):
return {
@ -675,7 +675,7 @@ class PathMapper:
filename = os.path.basename(path)
name, ext = os.path.splitext(filename)
minimal: t.Dict[str, str] = {}
minimal: dict[str, str] = {}
if path.startswith('bin/'):
return all_tests(self.args) # broad impact, run all tests
@ -850,7 +850,7 @@ class PathMapper:
return None # unknown, will result in fall-back to run all tests
def _simple_plugin_tests(self, plugin_type: str, plugin_name: str) -> t.Dict[str, t.Optional[str]]:
def _simple_plugin_tests(self, plugin_type: str, plugin_name: str) -> dict[str, t.Optional[str]]:
"""
Return tests for the given plugin type and plugin name.
This function is useful for plugin types which do not require special processing.
@ -876,7 +876,7 @@ class PathMapper:
)
def all_tests(args: TestConfig, force: bool = False) -> t.Dict[str, str]:
def all_tests(args: TestConfig, force: bool = False) -> dict[str, str]:
"""Return the targets for each test command when all tests should be run."""
if force:
integration_all_target = 'all'

@ -3,7 +3,6 @@ from __future__ import annotations
import os
import re
import typing as t
from ..io import (
open_text_file,
@ -26,7 +25,7 @@ from ..target import (
)
def get_csharp_module_utils_imports(powershell_targets: t.List[TestTarget], csharp_targets: t.List[TestTarget]) -> t.Dict[str, t.Set[str]]:
def get_csharp_module_utils_imports(powershell_targets: list[TestTarget], csharp_targets: list[TestTarget]) -> dict[str, set[str]]:
"""Return a dictionary of module_utils names mapped to sets of powershell file paths."""
module_utils = enumerate_module_utils()
@ -38,7 +37,7 @@ def get_csharp_module_utils_imports(powershell_targets: t.List[TestTarget], csha
for target in csharp_targets:
imports_by_target_path[target.path] = extract_csharp_module_utils_imports(target.path, module_utils, True)
imports: t.Dict[str, t.Set[str]] = {module_util: set() for module_util in module_utils}
imports: dict[str, set[str]] = {module_util: set() for module_util in module_utils}
for target_path, modules in imports_by_target_path.items():
for module_util in modules:
@ -65,14 +64,14 @@ def get_csharp_module_utils_name(path: str) -> str:
return name
def enumerate_module_utils() -> t.Set[str]:
def enumerate_module_utils() -> set[str]:
"""Return a set of available module_utils imports."""
return set(get_csharp_module_utils_name(p)
for p in data_context().content.walk_files(data_context().content.module_utils_csharp_path)
if os.path.splitext(p)[1] == '.cs')
def extract_csharp_module_utils_imports(path: str, module_utils: t.Set[str], is_pure_csharp: bool) -> t.Set[str]:
def extract_csharp_module_utils_imports(path: str, module_utils: set[str], is_pure_csharp: bool) -> set[str]:
"""Return a set of module_utils imports found in the specified source file."""
imports = set()
if is_pure_csharp:

@ -3,7 +3,6 @@ from __future__ import annotations
import os
import re
import typing as t
from ..io import (
read_text_file,
@ -26,7 +25,7 @@ from ..target import (
)
def get_powershell_module_utils_imports(powershell_targets: t.List[TestTarget]) -> t.Dict[str, t.Set[str]]:
def get_powershell_module_utils_imports(powershell_targets: list[TestTarget]) -> dict[str, set[str]]:
"""Return a dictionary of module_utils names mapped to sets of powershell file paths."""
module_utils = enumerate_module_utils()
@ -35,7 +34,7 @@ def get_powershell_module_utils_imports(powershell_targets: t.List[TestTarget])
for target in powershell_targets:
imports_by_target_path[target.path] = extract_powershell_module_utils_imports(target.path, module_utils)
imports: t.Dict[str, t.Set[str]] = {module_util: set() for module_util in module_utils}
imports: dict[str, set[str]] = {module_util: set() for module_util in module_utils}
for target_path, modules in imports_by_target_path.items():
for module_util in modules:
@ -62,14 +61,14 @@ def get_powershell_module_utils_name(path: str) -> str:
return name
def enumerate_module_utils() -> t.Set[str]:
def enumerate_module_utils() -> set[str]:
"""Return a set of available module_utils imports."""
return set(get_powershell_module_utils_name(p)
for p in data_context().content.walk_files(data_context().content.module_utils_powershell_path)
if os.path.splitext(p)[1] == '.psm1')
def extract_powershell_module_utils_imports(path: str, module_utils: t.Set[str]) -> t.Set[str]:
def extract_powershell_module_utils_imports(path: str, module_utils: set[str]) -> set[str]:
"""Return a set of module_utils imports found in the specified source file."""
imports = set()

@ -29,7 +29,7 @@ VIRTUAL_PACKAGES = {
}
def get_python_module_utils_imports(compile_targets: t.List[TestTarget]) -> t.Dict[str, t.Set[str]]:
def get_python_module_utils_imports(compile_targets: list[TestTarget]) -> dict[str, set[str]]:
"""Return a dictionary of module_utils names mapped to sets of python file paths."""
module_utils = enumerate_module_utils()
@ -41,7 +41,7 @@ def get_python_module_utils_imports(compile_targets: t.List[TestTarget]) -> t.Di
for target in compile_targets:
imports_by_target_path[target.path] = extract_python_module_utils_imports(target.path, module_utils)
def recurse_import(import_name: str, depth: int = 0, seen: t.Optional[t.Set[str]] = None) -> t.Set[str]:
def recurse_import(import_name: str, depth: int = 0, seen: t.Optional[set[str]] = None) -> set[str]:
"""Recursively expand module_utils imports from module_utils files."""
display.info('module_utils import: %s%s' % (' ' * depth, import_name), verbosity=4)
@ -102,7 +102,7 @@ def get_python_module_utils_imports(compile_targets: t.List[TestTarget]) -> t.Di
display.info('%s inherits import %s via %s' % (target_path, module_util_import, module_util), verbosity=6)
modules.add(module_util_import)
imports: t.Dict[str, t.Set[str]] = {module_util: set() for module_util in module_utils | virtual_utils}
imports: dict[str, set[str]] = {module_util: set() for module_util in module_utils | virtual_utils}
for target_path, modules in imports_by_target_path.items():
for module_util in modules:
@ -163,7 +163,7 @@ def enumerate_module_utils():
return set(module_utils)
def extract_python_module_utils_imports(path: str, module_utils: t.Set[str]) -> t.Set[str]:
def extract_python_module_utils_imports(path: str, module_utils: set[str]) -> set[str]:
"""Return a list of module_utils imports found in the specified source file."""
# Python code must be read as bytes to avoid a SyntaxError when the source uses comments to declare the file encoding.
# See: https://www.python.org/dev/peps/pep-0263
@ -233,10 +233,10 @@ def relative_to_absolute(name: str, level: int, module: str, path: str, lineno:
class ModuleUtilFinder(ast.NodeVisitor):
"""AST visitor to find valid module_utils imports."""
def __init__(self, path: str, module_utils: t.Set[str]) -> None:
def __init__(self, path: str, module_utils: set[str]) -> None:
self.path = path
self.module_utils = module_utils
self.imports: t.Set[str] = set()
self.imports: set[str] = set()
# implicitly import parent package
@ -325,7 +325,7 @@ class ModuleUtilFinder(ast.NodeVisitor):
# This error should be detected by unit or integration tests.
display.warning('%s:%d Invalid module_utils import: %s' % (self.path, line_number, import_name))
def add_imports(self, names: t.List[str], line_no: int) -> None:
def add_imports(self, names: list[str], line_no: int) -> None:
"""Add the given import names if they are module_utils imports."""
for name in names:
if self.is_module_util_name(name):

@ -28,7 +28,7 @@ from ..util import (
)
def parse_args(argv: t.Optional[t.List[str]] = None) -> argparse.Namespace:
def parse_args(argv: t.Optional[list[str]] = None) -> argparse.Namespace:
"""Parse command line arguments."""
completer = CompositeActionCompletionFinder()

@ -37,7 +37,7 @@ class RegisteredCompletionFinder(OptionCompletionFinder):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.registered_completions: t.Optional[t.List[str]] = None
self.registered_completions: t.Optional[list[str]] = None
def completer(
self,
@ -45,7 +45,7 @@ class RegisteredCompletionFinder(OptionCompletionFinder):
action: argparse.Action,
parsed_args: argparse.Namespace,
**kwargs,
) -> t.List[str]:
) -> list[str]:
"""
Return a list of completions for the specified prefix and action.
Use this as the completer function for argcomplete.
@ -67,7 +67,7 @@ class RegisteredCompletionFinder(OptionCompletionFinder):
prefix: str,
action: argparse.Action,
parsed_args: argparse.Namespace,
) -> t.List[str]:
) -> list[str]:
"""
Return a list of completions for the specified prefix and action.
Called by the complete function.
@ -86,7 +86,7 @@ class RegisteredCompletionFinder(OptionCompletionFinder):
class CompositeAction(argparse.Action, metaclass=abc.ABCMeta):
"""Base class for actions that parse composite arguments."""
documentation_state: t.Dict[t.Type[CompositeAction], DocumentationState] = {}
documentation_state: dict[t.Type[CompositeAction], DocumentationState] = {}
def __init__(
self,
@ -139,7 +139,7 @@ class CompositeActionCompletionFinder(RegisteredCompletionFinder):
prefix: str,
action: argparse.Action,
parsed_args: argparse.Namespace,
) -> t.List[str]:
) -> list[str]:
"""Return a list of completions appropriate for the given prefix and action, taking into account the arguments that have already been parsed."""
assert isinstance(action, CompositeAction)

@ -2,6 +2,7 @@
from __future__ import annotations
import abc
import collections.abc as c
import contextlib
import dataclasses
import enum
@ -44,7 +45,7 @@ class CompletionSuccess(Completion):
list_mode: bool
consumed: str
continuation: str
matches: t.List[str] = dataclasses.field(default_factory=list)
matches: list[str] = dataclasses.field(default_factory=list)
@property
def preserve(self) -> bool:
@ -55,7 +56,7 @@ class CompletionSuccess(Completion):
return len(self.matches) > 1 and self.list_mode
@property
def completions(self) -> t.List[str]:
def completions(self) -> list[str]:
"""List of completion values to return to argcomplete."""
completions = self.matches
continuation = '' if self.list_mode else self.continuation
@ -93,16 +94,16 @@ class ParserState:
mode: ParserMode
remainder: str = ''
consumed: str = ''
boundaries: t.List[ParserBoundary] = dataclasses.field(default_factory=list)
namespaces: t.List[t.Any] = dataclasses.field(default_factory=list)
parts: t.List[str] = dataclasses.field(default_factory=list)
boundaries: list[ParserBoundary] = dataclasses.field(default_factory=list)
namespaces: list[t.Any] = dataclasses.field(default_factory=list)
parts: list[str] = dataclasses.field(default_factory=list)
@property
def incomplete(self) -> bool:
"""True if parsing is incomplete (unparsed input remains), otherwise False."""
return self.remainder is not None
def match(self, value: str, choices: t.List[str]) -> bool:
def match(self, value: str, choices: list[str]) -> bool:
"""Return True if the given value matches the provided choices, taking into account parsing boundaries, otherwise return False."""
if self.current_boundary:
delimiters, delimiter = self.current_boundary.delimiters, self.current_boundary.match
@ -173,7 +174,7 @@ class ParserState:
self.namespaces.append(namespace)
@contextlib.contextmanager
def delimit(self, delimiters: str, required: bool = True) -> t.Iterator[ParserBoundary]:
def delimit(self, delimiters: str, required: bool = True) -> c.Iterator[ParserBoundary]:
"""Context manager for delimiting parsing of input."""
boundary = ParserBoundary(delimiters=delimiters, required=required)
@ -191,7 +192,7 @@ class ParserState:
@dataclasses.dataclass
class DocumentationState:
"""State of the composite argument parser's generated documentation."""
sections: t.Dict[str, str] = dataclasses.field(default_factory=dict)
sections: dict[str, str] = dataclasses.field(default_factory=dict)
class Parser(metaclass=abc.ABCMeta):
@ -221,7 +222,7 @@ class DynamicChoicesParser(Parser, metaclass=abc.ABCMeta):
self.conditions = conditions
@abc.abstractmethod
def get_choices(self, value: str) -> t.List[str]:
def get_choices(self, value: str) -> list[str]:
"""Return a list of valid choices based on the given input value."""
def no_completion_match(self, value: str) -> CompletionUnavailable: # pylint: disable=unused-argument
@ -272,12 +273,12 @@ class DynamicChoicesParser(Parser, metaclass=abc.ABCMeta):
class ChoicesParser(DynamicChoicesParser):
"""Composite argument parser which relies on a static list of choices."""
def __init__(self, choices: t.List[str], conditions: MatchConditions = MatchConditions.CHOICE) -> None:
def __init__(self, choices: list[str], conditions: MatchConditions = MatchConditions.CHOICE) -> None:
self.choices = choices
super().__init__(conditions=conditions)
def get_choices(self, value: str) -> t.List[str]:
def get_choices(self, value: str) -> list[str]:
"""Return a list of valid choices based on the given input value."""
return self.choices
@ -295,7 +296,7 @@ class IntegerParser(DynamicChoicesParser):
super().__init__()
def get_choices(self, value: str) -> t.List[str]:
def get_choices(self, value: str) -> list[str]:
"""Return a list of valid choices based on the given input value."""
if not value:
numbers = list(range(1, 10))
@ -365,12 +366,12 @@ class RelativePathNameParser(DynamicChoicesParser):
"""Composite argument parser for relative path names."""
RELATIVE_NAMES = ['.', '..']
def __init__(self, choices: t.List[str]) -> None:
def __init__(self, choices: list[str]) -> None:
self.choices = choices
super().__init__()
def get_choices(self, value: str) -> t.List[str]:
def get_choices(self, value: str) -> list[str]:
"""Return a list of valid choices based on the given input value."""
choices = list(self.choices)
@ -399,7 +400,7 @@ class FileParser(Parser):
directory = path or '.'
try:
with os.scandir(directory) as scan: # type: t.Iterator[os.DirEntry]
with os.scandir(directory) as scan: # type: c.Iterator[os.DirEntry]
choices = [f'{item.name}{PATH_DELIMITER}' if item.is_dir() else item.name for item in scan]
except OSError:
choices = []
@ -497,7 +498,7 @@ class NamespaceWrappedParser(NamespaceParser):
class KeyValueParser(Parser, metaclass=abc.ABCMeta):
"""Base class for key/value composite argument parsers."""
@abc.abstractmethod
def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]:
def get_parsers(self, state: ParserState) -> dict[str, Parser]:
"""Return a dictionary of key names and value parsers."""
def parse(self, state: ParserState) -> t.Any:
@ -561,12 +562,12 @@ class PairParser(Parser, metaclass=abc.ABCMeta):
class TypeParser(Parser, metaclass=abc.ABCMeta):
"""Base class for composite argument parsers which parse a type name, a colon and then parse results based on the type given by the type name."""
def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]: # pylint: disable=unused-argument
def get_parsers(self, state: ParserState) -> dict[str, Parser]: # pylint: disable=unused-argument
"""Return a dictionary of type names and type parsers."""
return self.get_stateless_parsers()
@abc.abstractmethod
def get_stateless_parsers(self) -> t.Dict[str, Parser]:
def get_stateless_parsers(self) -> dict[str, Parser]:
"""Return a dictionary of type names and type parsers."""
def parse(self, state: ParserState) -> t.Any:

@ -2,6 +2,7 @@
from __future__ import annotations
import argparse
import collections.abc as c
import typing as t
from ....commands.coverage.combine import (
@ -20,7 +21,7 @@ from ...environments import (
def do_combine(
subparsers,
parent: argparse.ArgumentParser,
add_coverage_common: t.Callable[[argparse.ArgumentParser], None],
add_coverage_common: c.Callable[[argparse.ArgumentParser], None],
completer: CompositeActionCompletionFinder,
) -> None:
"""Command line parsing for the `coverage combine` command."""

@ -2,6 +2,7 @@
from __future__ import annotations
import argparse
import collections.abc as c
import typing as t
from ....commands.coverage.html import (
@ -20,7 +21,7 @@ from ...environments import (
def do_html(
subparsers,
parent: argparse.ArgumentParser,
add_coverage_common: t.Callable[[argparse.ArgumentParser], None],
add_coverage_common: c.Callable[[argparse.ArgumentParser], None],
completer: CompositeActionCompletionFinder,
) -> None:
"""Command line parsing for the `coverage html` command."""

@ -2,6 +2,7 @@
from __future__ import annotations
import argparse
import collections.abc as c
import typing as t
from ....commands.coverage.report import (
@ -20,7 +21,7 @@ from ...environments import (
def do_report(
subparsers,
parent: argparse.ArgumentParser,
add_coverage_common: t.Callable[[argparse.ArgumentParser], None],
add_coverage_common: c.Callable[[argparse.ArgumentParser], None],
completer: CompositeActionCompletionFinder,
) -> None:
"""Command line parsing for the `coverage report` command."""

@ -2,6 +2,7 @@
from __future__ import annotations
import argparse
import collections.abc as c
import typing as t
from ....commands.coverage.xml import (
@ -20,7 +21,7 @@ from ...environments import (
def do_xml(
subparsers,
parent: argparse.ArgumentParser,
add_coverage_common: t.Callable[[argparse.ArgumentParser], None],
add_coverage_common: c.Callable[[argparse.ArgumentParser], None],
completer: CompositeActionCompletionFinder,
) -> None:
"""Command line parsing for the `coverage xml` command."""

@ -2,6 +2,7 @@
from __future__ import annotations
import argparse
import collections.abc as c
import os
import typing as t
@ -36,7 +37,7 @@ from ...completers import (
def do_network_integration(
subparsers,
parent: argparse.ArgumentParser,
add_integration_common: t.Callable[[argparse.ArgumentParser], None],
add_integration_common: c.Callable[[argparse.ArgumentParser], None],
completer: CompositeActionCompletionFinder,
):
"""Command line parsing for the `network-integration` command."""
@ -64,7 +65,7 @@ def do_network_integration(
add_environments(parser, completer, ControllerMode.DELEGATED, TargetMode.NETWORK_INTEGRATION) # network-integration
def complete_network_testcase(prefix: str, parsed_args: argparse.Namespace, **_) -> t.List[str]:
def complete_network_testcase(prefix: str, parsed_args: argparse.Namespace, **_) -> list[str]:
"""Return a list of test cases matching the given prefix if only one target was parsed from the command line, otherwise return an empty list."""
testcases = []

@ -2,6 +2,7 @@
from __future__ import annotations
import argparse
import collections.abc as c
import typing as t
from ....commands.integration.posix import (
@ -27,7 +28,7 @@ from ...environments import (
def do_posix_integration(
subparsers,
parent: argparse.ArgumentParser,
add_integration_common: t.Callable[[argparse.ArgumentParser], None],
add_integration_common: c.Callable[[argparse.ArgumentParser], None],
completer: CompositeActionCompletionFinder,
):
"""Command line parsing for the `integration` command."""

@ -2,6 +2,7 @@
from __future__ import annotations
import argparse
import collections.abc as c
import typing as t
from ....commands.integration.windows import (
@ -27,7 +28,7 @@ from ...environments import (
def do_windows_integration(
subparsers,
parent: argparse.ArgumentParser,
add_integration_common: t.Callable[[argparse.ArgumentParser], None],
add_integration_common: c.Callable[[argparse.ArgumentParser], None],
completer: CompositeActionCompletionFinder,
):
"""Command line parsing for the `windows-integration` command."""

@ -2,6 +2,7 @@
from __future__ import annotations
import argparse
import collections.abc as c
import dataclasses
import enum
import os
@ -55,7 +56,7 @@ from ..data import (
)
def filter_python(version: t.Optional[str], versions: t.Optional[t.Sequence[str]]) -> t.Optional[str]:
def filter_python(version: t.Optional[str], versions: t.Optional[c.Sequence[str]]) -> t.Optional[str]:
"""If a Python version is given and is in the given version list, return that Python version, otherwise return None."""
return version if version in versions else None
@ -120,10 +121,10 @@ class LegacyHostOptions:
docker_privileged: t.Optional[bool] = None
docker_seccomp: t.Optional[str] = None
docker_memory: t.Optional[int] = None
windows: t.Optional[t.List[str]] = None
platform: t.Optional[t.List[str]] = None
platform_collection: t.Optional[t.List[t.Tuple[str, str]]] = None
platform_connection: t.Optional[t.List[t.Tuple[str, str]]] = None
windows: t.Optional[list[str]] = None
platform: t.Optional[list[str]] = None
platform_collection: t.Optional[list[tuple[str, str]]] = None
platform_connection: t.Optional[list[tuple[str, str]]] = None
inventory: t.Optional[str] = None
@staticmethod
@ -144,16 +145,16 @@ class LegacyHostOptions:
delattr(namespace, field.name)
@staticmethod
def purge_args(args: t.List[str]) -> t.List[str]:
def purge_args(args: list[str]) -> list[str]:
"""Purge legacy host options from the given command line arguments."""
fields: t.Tuple[dataclasses.Field, ...] = dataclasses.fields(LegacyHostOptions)
filters: t.Dict[str, int] = {get_option_name(field.name): 0 if field.type is t.Optional[bool] else 1 for field in fields}
fields: tuple[dataclasses.Field, ...] = dataclasses.fields(LegacyHostOptions)
filters: dict[str, int] = {get_option_name(field.name): 0 if field.type is t.Optional[bool] else 1 for field in fields}
return filter_args(args, filters)
def get_options_used(self) -> t.Tuple[str, ...]:
def get_options_used(self) -> tuple[str, ...]:
"""Return a tuple of the command line options used."""
fields: t.Tuple[dataclasses.Field, ...] = dataclasses.fields(self)
fields: tuple[dataclasses.Field, ...] = dataclasses.fields(self)
options = tuple(sorted(get_option_name(field.name) for field in fields if getattr(self, field.name)))
return options
@ -190,7 +191,7 @@ class TargetMode(enum.Enum):
def convert_legacy_args(
argv: t.List[str],
argv: list[str],
args: t.Union[argparse.Namespace, types.SimpleNamespace],
mode: TargetMode,
) -> HostSettings:
@ -241,7 +242,7 @@ def convert_legacy_args(
args.targets = targets
if used_default_pythons:
control_targets = t.cast(t.List[ControllerConfig], targets)
control_targets = t.cast(list[ControllerConfig], targets)
skipped_python_versions = sorted_versions(list(set(SUPPORTED_PYTHON_VERSIONS) - {target.python.version for target in control_targets}))
else:
skipped_python_versions = []
@ -264,11 +265,11 @@ def controller_targets(
mode: TargetMode,
options: LegacyHostOptions,
controller: ControllerHostConfig,
) -> t.List[HostConfig]:
) -> list[HostConfig]:
"""Return the configuration for controller targets."""
python = native_python(options)
targets: t.List[HostConfig]
targets: list[HostConfig]
if python:
targets = [ControllerConfig(python=python)]
@ -289,7 +290,7 @@ def native_python(options: LegacyHostOptions) -> t.Optional[NativePythonConfig]:
def get_legacy_host_config(
mode: TargetMode,
options: LegacyHostOptions,
) -> t.Tuple[ControllerHostConfig, t.List[HostConfig], t.Optional[FallbackDetail]]:
) -> tuple[ControllerHostConfig, list[HostConfig], t.Optional[FallbackDetail]]:
"""
Returns controller and target host configs derived from the provided legacy host options.
The goal is to match the original behavior, by using non-split testing whenever possible.
@ -300,10 +301,10 @@ def get_legacy_host_config(
docker_fallback = 'default'
remote_fallback = get_fallback_remote_controller()
controller_fallback: t.Optional[t.Tuple[str, str, FallbackReason]] = None
controller_fallback: t.Optional[tuple[str, str, FallbackReason]] = None
controller: t.Optional[ControllerHostConfig]
targets: t.List[HostConfig]
targets: list[HostConfig]
if options.venv:
if controller_python(options.python) or not options.python:
@ -327,7 +328,7 @@ def get_legacy_host_config(
targets = [ControllerConfig(python=VirtualPythonConfig(version=target.python.version, path=target.python.path,
system_site_packages=options.venv_system_site_packages)) for target in control_targets]
else:
targets = t.cast(t.List[HostConfig], control_targets)
targets = t.cast(list[HostConfig], control_targets)
else:
targets = [ControllerConfig(python=VirtualPythonConfig(version=options.python or 'default',
system_site_packages=options.venv_system_site_packages))]
@ -455,8 +456,8 @@ def get_legacy_host_config(
def handle_non_posix_targets(
mode: TargetMode,
options: LegacyHostOptions,
targets: t.List[HostConfig],
) -> t.List[HostConfig]:
targets: list[HostConfig],
) -> list[HostConfig]:
"""Return a list of non-POSIX targets if the target mode is non-POSIX."""
if mode == TargetMode.WINDOWS_INTEGRATION:
if options.windows:
@ -478,7 +479,7 @@ def handle_non_posix_targets(
if entry.platform == platform:
entry.connection = connection
targets = t.cast(t.List[HostConfig], network_targets)
targets = t.cast(list[HostConfig], network_targets)
else:
targets = [NetworkInventoryConfig(path=options.inventory)]
@ -488,16 +489,16 @@ def handle_non_posix_targets(
def default_targets(
mode: TargetMode,
controller: ControllerHostConfig,
) -> t.List[HostConfig]:
) -> list[HostConfig]:
"""Return a list of default targets for the given target mode."""
targets: t.List[HostConfig]
targets: list[HostConfig]
if mode == TargetMode.WINDOWS_INTEGRATION:
targets = [WindowsInventoryConfig(path=os.path.abspath(os.path.join(data_context().content.integration_path, 'inventory.winrm')))]
elif mode == TargetMode.NETWORK_INTEGRATION:
targets = [NetworkInventoryConfig(path=os.path.abspath(os.path.join(data_context().content.integration_path, 'inventory.networking')))]
elif mode.multiple_pythons:
targets = t.cast(t.List[HostConfig], controller.get_default_targets(HostContext(controller_config=controller)))
targets = t.cast(list[HostConfig], controller.get_default_targets(HostContext(controller_config=controller)))
else:
targets = [ControllerConfig()]

@ -2,7 +2,6 @@
from __future__ import annotations
import argparse
import typing as t
from ..target import (
find_target_completion,
@ -13,14 +12,14 @@ from .argparsing.argcompletion import (
)
def complete_target(completer: OptionCompletionFinder, prefix: str, parsed_args: argparse.Namespace, **_) -> t.List[str]:
def complete_target(completer: OptionCompletionFinder, prefix: str, parsed_args: argparse.Namespace, **_) -> list[str]:
"""Perform completion for the targets configured for the command being parsed."""
matches = find_target_completion(parsed_args.targets_func, prefix, completer.list_mode)
completer.disable_completion_mangling = completer.list_mode and len(matches) > 1
return matches
def complete_choices(choices: t.List[str], prefix: str, **_) -> t.List[str]:
def complete_choices(choices: list[str], prefix: str, **_) -> list[str]:
"""Perform completion using the provided choices."""
matches = [choice for choice in choices if choice.startswith(prefix)]
return matches

@ -2,15 +2,14 @@
from __future__ import annotations
import argparse
import typing as t
def key_value_type(value: str) -> t.Tuple[str, str]:
def key_value_type(value: str) -> tuple[str, str]:
"""Wrapper around key_value."""
return key_value(value)
def key_value(value: str) -> t.Tuple[str, str]:
def key_value(value: str) -> tuple[str, str]:
"""Type parsing and validation for argparse key/value pairs separated by an '=' character."""
parts = value.split('=')

@ -160,7 +160,7 @@ def add_composite_environment_options(
completer: CompositeActionCompletionFinder,
controller_mode: ControllerMode,
target_mode: TargetMode,
) -> t.List[t.Type[CompositeAction]]:
) -> list[t.Type[CompositeAction]]:
"""Add composite options for controlling the test environment."""
composite_parser = t.cast(argparse.ArgumentParser, parser.add_argument_group(
title='composite environment arguments (mutually exclusive with "environment arguments" above)'))
@ -170,7 +170,7 @@ def add_composite_environment_options(
help=argparse.SUPPRESS,
)
action_types: t.List[t.Type[CompositeAction]] = []
action_types: list[t.Type[CompositeAction]] = []
def register_action_type(action_type: t.Type[CompositeAction]) -> t.Type[CompositeAction]:
"""Register the provided composite action type and return it."""
@ -263,7 +263,7 @@ def add_environments_python(
target_mode: TargetMode,
) -> None:
"""Add environment arguments to control the Python version(s) used."""
python_versions: t.Tuple[str, ...]
python_versions: tuple[str, ...]
if target_mode.has_python:
python_versions = SUPPORTED_PYTHON_VERSIONS
@ -544,24 +544,24 @@ def add_environment_remote(
)
def complete_remote_stage(prefix: str, **_) -> t.List[str]:
def complete_remote_stage(prefix: str, **_) -> list[str]:
"""Return a list of supported stages matching the given prefix."""
return [stage for stage in ('prod', 'dev') if stage.startswith(prefix)]
def complete_windows(prefix: str, parsed_args: argparse.Namespace, **_) -> t.List[str]:
def complete_windows(prefix: str, parsed_args: argparse.Namespace, **_) -> list[str]:
"""Return a list of supported Windows versions matching the given prefix, excluding versions already parsed from the command line."""
return [i for i in get_windows_version_choices() if i.startswith(prefix) and (not parsed_args.windows or i not in parsed_args.windows)]
def complete_network_platform(prefix: str, parsed_args: argparse.Namespace, **_) -> t.List[str]:
def complete_network_platform(prefix: str, parsed_args: argparse.Namespace, **_) -> list[str]:
"""Return a list of supported network platforms matching the given prefix, excluding platforms already parsed from the command line."""
images = sorted(filter_completion(network_completion()))
return [i for i in images if i.startswith(prefix) and (not parsed_args.platform or i not in parsed_args.platform)]
def complete_network_platform_collection(prefix: str, parsed_args: argparse.Namespace, **_) -> t.List[str]:
def complete_network_platform_collection(prefix: str, parsed_args: argparse.Namespace, **_) -> list[str]:
"""Return a list of supported network platforms matching the given prefix, excluding collection platforms already parsed from the command line."""
left = prefix.split('=')[0]
images = sorted(set(image.platform for image in filter_completion(network_completion()).values()))
@ -569,7 +569,7 @@ def complete_network_platform_collection(prefix: str, parsed_args: argparse.Name
return [i + '=' for i in images if i.startswith(left) and (not parsed_args.platform_collection or i not in [x[0] for x in parsed_args.platform_collection])]
def complete_network_platform_connection(prefix: str, parsed_args: argparse.Namespace, **_) -> t.List[str]:
def complete_network_platform_connection(prefix: str, parsed_args: argparse.Namespace, **_) -> list[str]:
"""Return a list of supported network platforms matching the given prefix, excluding connection platforms already parsed from the command line."""
left = prefix.split('=')[0]
images = sorted(set(image.platform for image in filter_completion(network_completion()).values()))
@ -577,16 +577,16 @@ def complete_network_platform_connection(prefix: str, parsed_args: argparse.Name
return [i + '=' for i in images if i.startswith(left) and (not parsed_args.platform_connection or i not in [x[0] for x in parsed_args.platform_connection])]
def get_remote_platform_choices(controller: bool = False) -> t.List[str]:
def get_remote_platform_choices(controller: bool = False) -> list[str]:
"""Return a list of supported remote platforms matching the given prefix."""
return sorted(filter_completion(remote_completion(), controller_only=controller))
def get_windows_platform_choices() -> t.List[str]:
def get_windows_platform_choices() -> list[str]:
"""Return a list of supported Windows versions matching the given prefix."""
return sorted(f'windows/{windows.version}' for windows in filter_completion(windows_completion()).values())
def get_windows_version_choices() -> t.List[str]:
def get_windows_version_choices() -> list[str]:
"""Return a list of supported Windows versions."""
return sorted(windows.version for windows in filter_completion(windows_completion()).values())

@ -53,7 +53,7 @@ from .base_argument_parsers import (
class OriginControllerParser(ControllerNamespaceParser, TypeParser):
"""Composite argument parser for the controller when delegation is not supported."""
def get_stateless_parsers(self) -> t.Dict[str, Parser]:
def get_stateless_parsers(self) -> dict[str, Parser]:
"""Return a dictionary of type names and type parsers."""
return dict(
origin=OriginParser(),
@ -71,9 +71,9 @@ class OriginControllerParser(ControllerNamespaceParser, TypeParser):
class DelegatedControllerParser(ControllerNamespaceParser, TypeParser):
"""Composite argument parser for the controller when delegation is supported."""
def get_stateless_parsers(self) -> t.Dict[str, Parser]:
def get_stateless_parsers(self) -> dict[str, Parser]:
"""Return a dictionary of type names and type parsers."""
parsers: t.Dict[str, Parser] = dict(
parsers: dict[str, Parser] = dict(
origin=OriginParser(),
docker=DockerParser(controller=True),
)
@ -97,9 +97,9 @@ class DelegatedControllerParser(ControllerNamespaceParser, TypeParser):
class PosixTargetParser(TargetNamespaceParser, TypeParser):
"""Composite argument parser for a POSIX target."""
def get_stateless_parsers(self) -> t.Dict[str, Parser]:
def get_stateless_parsers(self) -> dict[str, Parser]:
"""Return a dictionary of type names and type parsers."""
parsers: t.Dict[str, Parser] = dict(
parsers: dict[str, Parser] = dict(
controller=ControllerParser(),
docker=DockerParser(controller=False),
)
@ -132,17 +132,17 @@ class WindowsTargetParser(TargetsNamespaceParser, TypeParser):
"""True if inventory is allowed, otherwise False."""
return True
def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]:
def get_parsers(self, state: ParserState) -> dict[str, Parser]:
"""Return a dictionary of type names and type parsers."""
return self.get_internal_parsers(state.root_namespace.targets)
def get_stateless_parsers(self) -> t.Dict[str, Parser]:
def get_stateless_parsers(self) -> dict[str, Parser]:
"""Return a dictionary of type names and type parsers."""
return self.get_internal_parsers([])
def get_internal_parsers(self, targets: t.List[WindowsConfig]) -> t.Dict[str, Parser]:
def get_internal_parsers(self, targets: list[WindowsConfig]) -> dict[str, Parser]:
"""Return a dictionary of type names and type parsers."""
parsers: t.Dict[str, Parser] = {}
parsers: dict[str, Parser] = {}
if self.allow_inventory and not targets:
parsers.update(
@ -174,17 +174,17 @@ class NetworkTargetParser(TargetsNamespaceParser, TypeParser):
"""True if inventory is allowed, otherwise False."""
return True
def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]:
def get_parsers(self, state: ParserState) -> dict[str, Parser]:
"""Return a dictionary of type names and type parsers."""
return self.get_internal_parsers(state.root_namespace.targets)
def get_stateless_parsers(self) -> t.Dict[str, Parser]:
def get_stateless_parsers(self) -> dict[str, Parser]:
"""Return a dictionary of type names and type parsers."""
return self.get_internal_parsers([])
def get_internal_parsers(self, targets: t.List[NetworkConfig]) -> t.Dict[str, Parser]:
def get_internal_parsers(self, targets: list[NetworkConfig]) -> dict[str, Parser]:
"""Return a dictionary of type names and type parsers."""
parsers: t.Dict[str, Parser] = {}
parsers: dict[str, Parser] = {}
if self.allow_inventory and not targets:
parsers.update(

@ -1,8 +1,6 @@
"""Helper functions for composite parsers."""
from __future__ import annotations
import typing as t
from ...constants import (
CONTROLLER_PYTHON_VERSIONS,
SUPPORTED_PYTHON_VERSIONS,
@ -21,7 +19,7 @@ from ...host_configs import (
)
def get_docker_pythons(name: str, controller: bool, strict: bool) -> t.List[str]:
def get_docker_pythons(name: str, controller: bool, strict: bool) -> list[str]:
"""Return a list of docker instance Python versions supported by the specified host config."""
image_config = filter_completion(docker_completion()).get(name)
available_pythons = CONTROLLER_PYTHON_VERSIONS if controller else SUPPORTED_PYTHON_VERSIONS
@ -34,7 +32,7 @@ def get_docker_pythons(name: str, controller: bool, strict: bool) -> t.List[str]
return supported_pythons
def get_remote_pythons(name: str, controller: bool, strict: bool) -> t.List[str]:
def get_remote_pythons(name: str, controller: bool, strict: bool) -> list[str]:
"""Return a list of remote instance Python versions supported by the specified host config."""
platform_config = filter_completion(remote_completion()).get(name)
available_pythons = CONTROLLER_PYTHON_VERSIONS if controller else SUPPORTED_PYTHON_VERSIONS
@ -47,7 +45,7 @@ def get_remote_pythons(name: str, controller: bool, strict: bool) -> t.List[str]
return supported_pythons
def get_controller_pythons(controller_config: HostConfig, strict: bool) -> t.List[str]:
def get_controller_pythons(controller_config: HostConfig, strict: bool) -> list[str]:
"""Return a list of controller Python versions supported by the specified host config."""
if isinstance(controller_config, DockerConfig):
pythons = get_docker_pythons(controller_config.name, False, strict)

@ -46,7 +46,7 @@ from .helpers import (
class OriginKeyValueParser(KeyValueParser):
"""Composite argument parser for origin key/value pairs."""
def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]:
def get_parsers(self, state: ParserState) -> dict[str, Parser]:
"""Return a dictionary of key names and value parsers."""
versions = CONTROLLER_PYTHON_VERSIONS
@ -69,7 +69,7 @@ class OriginKeyValueParser(KeyValueParser):
class ControllerKeyValueParser(KeyValueParser):
"""Composite argument parser for controller key/value pairs."""
def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]:
def get_parsers(self, state: ParserState) -> dict[str, Parser]:
"""Return a dictionary of key names and value parsers."""
versions = get_controller_pythons(state.root_namespace.controller, False)
allow_default = bool(get_controller_pythons(state.root_namespace.controller, True))
@ -98,7 +98,7 @@ class DockerKeyValueParser(KeyValueParser):
self.versions = get_docker_pythons(image, controller, False)
self.allow_default = bool(get_docker_pythons(image, controller, True))
def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]:
def get_parsers(self, state: ParserState) -> dict[str, Parser]:
"""Return a dictionary of key names and value parsers."""
return dict(
python=PythonParser(versions=self.versions, allow_venv=False, allow_default=self.allow_default),
@ -130,7 +130,7 @@ class PosixRemoteKeyValueParser(KeyValueParser):
self.versions = get_remote_pythons(name, controller, False)
self.allow_default = bool(get_remote_pythons(name, controller, True))
def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]:
def get_parsers(self, state: ParserState) -> dict[str, Parser]:
"""Return a dictionary of key names and value parsers."""
return dict(
become=ChoicesParser(list(SUPPORTED_BECOME_METHODS)),
@ -157,7 +157,7 @@ class PosixRemoteKeyValueParser(KeyValueParser):
class WindowsRemoteKeyValueParser(KeyValueParser):
"""Composite argument parser for Windows remote key/value pairs."""
def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]:
def get_parsers(self, state: ParserState) -> dict[str, Parser]:
"""Return a dictionary of key names and value parsers."""
return dict(
provider=ChoicesParser(REMOTE_PROVIDERS),
@ -178,7 +178,7 @@ class WindowsRemoteKeyValueParser(KeyValueParser):
class NetworkRemoteKeyValueParser(KeyValueParser):
"""Composite argument parser for network remote key/value pairs."""
def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]:
def get_parsers(self, state: ParserState) -> dict[str, Parser]:
"""Return a dictionary of key names and value parsers."""
return dict(
provider=ChoicesParser(REMOTE_PROVIDERS),
@ -203,7 +203,7 @@ class NetworkRemoteKeyValueParser(KeyValueParser):
class PosixSshKeyValueParser(KeyValueParser):
"""Composite argument parser for POSIX SSH host key/value pairs."""
def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]:
def get_parsers(self, state: ParserState) -> dict[str, Parser]:
"""Return a dictionary of key names and value parsers."""
return dict(
python=PythonParser(versions=list(SUPPORTED_PYTHON_VERSIONS), allow_venv=False, allow_default=False),
@ -224,6 +224,6 @@ class PosixSshKeyValueParser(KeyValueParser):
class EmptyKeyValueParser(KeyValueParser):
"""Composite argument parser when a key/value parser is required but there are no keys available."""
def get_parsers(self, state: ParserState) -> t.Dict[str, Parser]:
def get_parsers(self, state: ParserState) -> dict[str, Parser]:
"""Return a dictionary of key names and value parsers."""
return {}

@ -1,6 +1,7 @@
"""Composite argument value parsers used by other parsers."""
from __future__ import annotations
import collections.abc as c
import typing as t
from ...host_configs import (
@ -60,7 +61,7 @@ class PythonParser(Parser):
The origin host and unknown environments assume all relevant Python versions are available.
"""
def __init__(self,
versions: t.Sequence[str],
versions: c.Sequence[str],
*,
allow_default: bool,
allow_venv: bool,
@ -133,7 +134,7 @@ class PythonParser(Parser):
class PlatformParser(ChoicesParser):
"""Composite argument parser for "{platform}/{version}" formatted choices."""
def __init__(self, choices: t.List[str]) -> None:
def __init__(self, choices: list[str]) -> None:
super().__init__(choices, conditions=MatchConditions.CHOICE | MatchConditions.ANY)
def parse(self, state: ParserState) -> t.Any:

@ -1,6 +1,7 @@
"""Common logic for the coverage subcommand."""
from __future__ import annotations
import collections.abc as c
import errno
import json
import os
@ -93,7 +94,7 @@ def initialize_coverage(args: CoverageConfig, host_state: HostState) -> coverage
return coverage
def run_coverage(args: CoverageConfig, host_state: HostState, output_file: str, command: str, cmd: t.List[str]) -> None:
def run_coverage(args: CoverageConfig, host_state: HostState, output_file: str, command: str, cmd: list[str]) -> None:
"""Run the coverage cli tool with the specified options."""
env = common_environment()
env.update(dict(COVERAGE_FILE=output_file))
@ -112,22 +113,22 @@ def run_coverage(args: CoverageConfig, host_state: HostState, output_file: str,
display.warning(stderr)
def get_all_coverage_files() -> t.List[str]:
def get_all_coverage_files() -> list[str]:
"""Return a list of all coverage file paths."""
return get_python_coverage_files() + get_powershell_coverage_files()
def get_python_coverage_files(path: t.Optional[str] = None) -> t.List[str]:
def get_python_coverage_files(path: t.Optional[str] = None) -> list[str]:
"""Return the list of Python coverage file paths."""
return get_coverage_files('python', path)
def get_powershell_coverage_files(path: t.Optional[str] = None) -> t.List[str]:
def get_powershell_coverage_files(path: t.Optional[str] = None) -> list[str]:
"""Return the list of PowerShell coverage file paths."""
return get_coverage_files('powershell', path)
def get_coverage_files(language: str, path: t.Optional[str] = None) -> t.List[str]:
def get_coverage_files(language: str, path: t.Optional[str] = None) -> list[str]:
"""Return the list of coverage file paths for the given language."""
coverage_dir = path or ResultType.COVERAGE.path
@ -143,7 +144,7 @@ def get_coverage_files(language: str, path: t.Optional[str] = None) -> t.List[st
return coverage_files
def get_collection_path_regexes() -> t.Tuple[t.Optional[t.Pattern], t.Optional[t.Pattern]]:
def get_collection_path_regexes() -> tuple[t.Optional[t.Pattern], t.Optional[t.Pattern]]:
"""Return a pair of regexes used for identifying and manipulating collection paths."""
if data_context().content.collection:
collection_search_re = re.compile(r'/%s/' % data_context().content.collection.directory)
@ -155,7 +156,7 @@ def get_collection_path_regexes() -> t.Tuple[t.Optional[t.Pattern], t.Optional[t
return collection_search_re, collection_sub_re
def get_python_modules() -> t.Dict[str, str]:
def get_python_modules() -> dict[str, str]:
"""Return a dictionary of Ansible module names and their paths."""
return dict((target.module, target.path) for target in list(walk_module_targets()) if target.path.endswith('.py'))
@ -163,10 +164,10 @@ def get_python_modules() -> t.Dict[str, str]:
def enumerate_python_arcs(
path: str,
coverage: coverage_module,
modules: t.Dict[str, str],
modules: dict[str, str],
collection_search_re: t.Optional[t.Pattern],
collection_sub_re: t.Optional[t.Pattern],
) -> t.Generator[t.Tuple[str, t.Set[t.Tuple[int, int]]], None, None]:
) -> c.Generator[tuple[str, set[tuple[int, int]]], None, None]:
"""Enumerate Python code coverage arcs in the given file."""
if os.path.getsize(path) == 0:
display.warning('Empty coverage file: %s' % path, verbosity=2)
@ -192,7 +193,7 @@ def enumerate_python_arcs(
yield filename, set(arcs)
PythonArcs = t.Dict[str, t.List[t.Tuple[int, int]]]
PythonArcs = dict[str, list[tuple[int, int]]]
"""Python coverage arcs."""
@ -241,7 +242,7 @@ def enumerate_powershell_lines(
path: str,
collection_search_re: t.Optional[t.Pattern],
collection_sub_re: t.Optional[t.Pattern],
) -> t.Generator[t.Tuple[str, t.Dict[int, int]], None, None]:
) -> c.Generator[tuple[str, dict[int, int]], None, None]:
"""Enumerate PowerShell code coverage lines in the given file."""
if os.path.getsize(path) == 0:
display.warning('Empty coverage file: %s' % path, verbosity=2)
@ -278,7 +279,7 @@ def enumerate_powershell_lines(
def sanitize_filename(
filename: str,
modules: t.Optional[t.Dict[str, str]] = None,
modules: t.Optional[dict[str, str]] = None,
collection_search_re: t.Optional[t.Pattern] = None,
collection_sub_re: t.Optional[t.Pattern] = None,
) -> t.Optional[str]:
@ -346,7 +347,7 @@ class PathChecker:
def __init__(self, args: CoverageConfig, collection_search_re: t.Optional[t.Pattern] = None) -> None:
self.args = args
self.collection_search_re = collection_search_re
self.invalid_paths: t.List[str] = []
self.invalid_paths: list[str] = []
self.invalid_path_chars = 0
def check_path(self, path: str) -> bool:

@ -1,6 +1,7 @@
"""Analyze integration test target code coverage."""
from __future__ import annotations
import collections.abc as c
import os
import typing as t
@ -18,20 +19,20 @@ from .. import (
CoverageAnalyzeConfig,
)
TargetKey = t.TypeVar('TargetKey', int, t.Tuple[int, int])
NamedPoints = t.Dict[str, t.Dict[TargetKey, t.Set[str]]]
IndexedPoints = t.Dict[str, t.Dict[TargetKey, t.Set[int]]]
Arcs = t.Dict[str, t.Dict[t.Tuple[int, int], t.Set[int]]]
Lines = t.Dict[str, t.Dict[int, t.Set[int]]]
TargetIndexes = t.Dict[str, int]
TargetSetIndexes = t.Dict[t.FrozenSet[int], int]
TargetKey = t.TypeVar('TargetKey', int, tuple[int, int])
NamedPoints = dict[str, dict[TargetKey, set[str]]]
IndexedPoints = dict[str, dict[TargetKey, set[int]]]
Arcs = dict[str, dict[tuple[int, int], set[int]]]
Lines = dict[str, dict[int, set[int]]]
TargetIndexes = dict[str, int]
TargetSetIndexes = dict[frozenset[int], int]
class CoverageAnalyzeTargetsConfig(CoverageAnalyzeConfig):
"""Configuration for the `coverage analyze targets` command."""
def make_report(target_indexes: TargetIndexes, arcs: Arcs, lines: Lines) -> t.Dict[str, t.Any]:
def make_report(target_indexes: TargetIndexes, arcs: Arcs, lines: Lines) -> dict[str, t.Any]:
"""Condense target indexes, arcs and lines into a compact report."""
set_indexes: TargetSetIndexes = {}
arc_refs = dict((path, dict((format_arc(arc), get_target_set_index(indexes, set_indexes)) for arc, indexes in data.items())) for path, data in arcs.items())
@ -47,13 +48,13 @@ def make_report(target_indexes: TargetIndexes, arcs: Arcs, lines: Lines) -> t.Di
return report
def load_report(report: t.Dict[str, t.Any]) -> t.Tuple[t.List[str], Arcs, Lines]:
def load_report(report: dict[str, t.Any]) -> tuple[list[str], Arcs, Lines]:
"""Extract target indexes, arcs and lines from an existing report."""
try:
target_indexes: t.List[str] = report['targets']
target_sets: t.List[t.List[int]] = report['target_sets']
arc_data: t.Dict[str, t.Dict[str, int]] = report['arcs']
line_data: t.Dict[str, t.Dict[int, int]] = report['lines']
target_indexes: list[str] = report['targets']
target_sets: list[list[int]] = report['target_sets']
arc_data: dict[str, dict[str, int]] = report['arcs']
line_data: dict[str, dict[int, int]] = report['lines']
except KeyError as ex:
raise ApplicationError('Document is missing key "%s".' % ex.args)
except TypeError:
@ -65,7 +66,7 @@ def load_report(report: t.Dict[str, t.Any]) -> t.Tuple[t.List[str], Arcs, Lines]
return target_indexes, arcs, lines
def read_report(path: str) -> t.Tuple[t.List[str], Arcs, Lines]:
def read_report(path: str) -> tuple[list[str], Arcs, Lines]:
"""Read a JSON report from disk."""
try:
report = read_json_file(path)
@ -78,7 +79,7 @@ def read_report(path: str) -> t.Tuple[t.List[str], Arcs, Lines]:
raise ApplicationError('File "%s" is not an aggregated coverage data file. %s' % (path, ex))
def write_report(args: CoverageAnalyzeTargetsConfig, report: t.Dict[str, t.Any], path: str) -> None:
def write_report(args: CoverageAnalyzeTargetsConfig, report: dict[str, t.Any], path: str) -> None:
"""Write a JSON report to disk."""
if args.explain:
return
@ -95,18 +96,18 @@ def format_line(value: int) -> str:
return str(value) # putting this in a function keeps both pylint and mypy happy
def format_arc(value: t.Tuple[int, int]) -> str:
def format_arc(value: tuple[int, int]) -> str:
"""Format an arc tuple as a string."""
return '%d:%d' % value
def parse_arc(value: str) -> t.Tuple[int, int]:
def parse_arc(value: str) -> tuple[int, int]:
"""Parse an arc string into a tuple."""
first, last = tuple(map(int, value.split(':')))
return first, last
def get_target_set_index(data: t.Set[int], target_set_indexes: TargetSetIndexes) -> int:
def get_target_set_index(data: set[int], target_set_indexes: TargetSetIndexes) -> int:
"""Find or add the target set in the result set and return the target set index."""
return target_set_indexes.setdefault(frozenset(data), len(target_set_indexes))
@ -118,11 +119,11 @@ def get_target_index(name: str, target_indexes: TargetIndexes) -> int:
def expand_indexes(
source_data: IndexedPoints,
source_index: t.List[str],
format_func: t.Callable[[TargetKey], str],
source_index: list[str],
format_func: c.Callable[[TargetKey], str],
) -> NamedPoints:
"""Expand indexes from the source into target names for easier processing of the data (arcs or lines)."""
combined_data: t.Dict[str, t.Dict[t.Any, t.Set[str]]] = {}
combined_data: dict[str, dict[t.Any, set[str]]] = {}
for covered_path, covered_points in source_data.items():
combined_points = combined_data.setdefault(covered_path, {})

@ -31,7 +31,7 @@ class CoverageAnalyzeTargetsCombineConfig(CoverageAnalyzeTargetsConfig):
def __init__(self, args: t.Any) -> None:
super().__init__(args)
self.input_files: t.List[str] = args.input_file
self.input_files: list[str] = args.input_file
self.output_file: str = args.output_file
@ -59,7 +59,7 @@ def command_coverage_analyze_targets_combine(args: CoverageAnalyzeTargetsCombine
def merge_indexes(
source_data: IndexedPoints,
source_index: t.List[str],
source_index: list[str],
combined_data: IndexedPoints,
combined_index: TargetIndexes,
) -> None:

@ -1,6 +1,7 @@
"""Filter an aggregated coverage file, keeping only the specified targets."""
from __future__ import annotations
import collections.abc as c
import re
import typing as t
@ -34,8 +35,8 @@ class CoverageAnalyzeTargetsFilterConfig(CoverageAnalyzeTargetsConfig):
self.input_file: str = args.input_file
self.output_file: str = args.output_file
self.include_targets: t.List[str] = args.include_targets
self.exclude_targets: t.List[str] = args.exclude_targets
self.include_targets: list[str] = args.include_targets
self.exclude_targets: list[str] = args.exclude_targets
self.include_path: t.Optional[str] = args.include_path
self.exclude_path: t.Optional[str] = args.exclude_path
@ -92,8 +93,8 @@ def command_coverage_analyze_targets_filter(args: CoverageAnalyzeTargetsFilterCo
def filter_data(
data: NamedPoints,
path_filter_func: t.Callable[[str], bool],
target_filter_func: t.Callable[[t.Set[str]], t.Set[str]],
path_filter_func: c.Callable[[str], bool],
target_filter_func: c.Callable[[set[str]], set[str]],
) -> NamedPoints:
"""Filter the data set using the specified filter function."""
result: NamedPoints = {}

@ -137,7 +137,7 @@ def analyze_powershell_coverage(
def prune_invalid_filenames(
args: CoverageAnalyzeTargetsGenerateConfig,
results: t.Dict[str, t.Any],
results: dict[str, t.Any],
collection_search_re: t.Optional[t.Pattern] = None,
) -> None:
"""Remove invalid filenames from the given result set."""

@ -67,7 +67,7 @@ def command_coverage_analyze_targets_missing(args: CoverageAnalyzeTargetsMissing
def find_gaps(
from_data: IndexedPoints,
from_index: t.List[str],
from_index: list[str],
to_data: IndexedPoints,
target_indexes: TargetIndexes,
only_exists: bool,
@ -92,9 +92,9 @@ def find_gaps(
def find_missing(
from_data: IndexedPoints,
from_index: t.List[str],
from_index: list[str],
to_data: IndexedPoints,
to_index: t.List[str],
to_index: list[str],
target_indexes: TargetIndexes,
only_exists: bool,
) -> IndexedPoints:

@ -1,6 +1,7 @@
"""Combine code coverage files."""
from __future__ import annotations
import collections.abc as c
import os
import json
import typing as t
@ -69,7 +70,7 @@ def command_coverage_combine(args: CoverageCombineConfig) -> None:
combine_coverage_files(args, host_state)
def combine_coverage_files(args: CoverageCombineConfig, host_state: HostState) -> t.List[str]:
def combine_coverage_files(args: CoverageCombineConfig, host_state: HostState) -> list[str]:
"""Combine coverage and return a list of the resulting files."""
if args.delegate:
if isinstance(args.controller, (DockerConfig, RemoteConfig)):
@ -81,7 +82,7 @@ def combine_coverage_files(args: CoverageCombineConfig, host_state: HostState) -
pairs = [(path, os.path.relpath(path, data_context().content.root)) for path in exported_paths]
def coverage_callback(files: t.List[t.Tuple[str, str]]) -> None:
def coverage_callback(files: list[tuple[str, str]]) -> None:
"""Add the coverage files to the payload file list."""
display.info('Including %d exported coverage file(s) in payload.' % len(pairs), verbosity=1)
files.extend(pairs)
@ -107,7 +108,7 @@ class ExportedCoverageDataNotFound(ApplicationError):
'The exported files must be in the directory: %s/' % ResultType.COVERAGE.relative_path)
def _command_coverage_combine_python(args: CoverageCombineConfig, host_state: HostState) -> t.List[str]:
def _command_coverage_combine_python(args: CoverageCombineConfig, host_state: HostState) -> list[str]:
"""Combine Python coverage files and return a list of the output files."""
coverage = initialize_coverage(args, host_state)
@ -188,7 +189,7 @@ def _command_coverage_combine_python(args: CoverageCombineConfig, host_state: Ho
return sorted(output_files)
def _command_coverage_combine_powershell(args: CoverageCombineConfig) -> t.List[str]:
def _command_coverage_combine_powershell(args: CoverageCombineConfig) -> list[str]:
"""Combine PowerShell coverage files and return a list of the output files."""
coverage_files = get_powershell_coverage_files()
@ -262,7 +263,7 @@ def _command_coverage_combine_powershell(args: CoverageCombineConfig) -> t.List[
return sorted(output_files)
def _get_coverage_targets(args: CoverageCombineConfig, walk_func: t.Callable) -> t.List[t.Tuple[str, int]]:
def _get_coverage_targets(args: CoverageCombineConfig, walk_func: c.Callable) -> list[tuple[str, int]]:
"""Return a list of files to cover and the number of lines in each file, using the given function as the source of the files."""
sources = []
@ -284,7 +285,7 @@ def _get_coverage_targets(args: CoverageCombineConfig, walk_func: t.Callable) ->
def _build_stub_groups(
args: CoverageCombineConfig,
sources: list[tuple[str, int]],
default_stub_value: t.Callable[[list[str]], dict[str, TValue]],
default_stub_value: c.Callable[[list[str]], dict[str, TValue]],
) -> dict[str, dict[str, TValue]]:
"""
Split the given list of sources with line counts into groups, maintaining a maximum line count for each group.
@ -353,7 +354,7 @@ class CoverageCombineConfig(CoverageConfig):
def __init__(self, args: t.Any) -> None:
super().__init__(args)
self.group_by: t.FrozenSet[str] = frozenset(args.group_by) if args.group_by else frozenset()
self.group_by: frozenset[str] = frozenset(args.group_by) if args.group_by else frozenset()
self.all: bool = args.all
self.stub: bool = args.stub

@ -81,7 +81,7 @@ def _generate_powershell_output_report(args: CoverageReportConfig, coverage_file
continue
stmts = len(hit_info)
miss = len([c for c in hit_info.values() if c == 0])
miss = len([hit for hit in hit_info.values() if hit == 0])
name_padding = max(name_padding, len(filename) + 3)

@ -3,7 +3,6 @@ from __future__ import annotations
import os
import time
import typing as t
from xml.etree.ElementTree import (
Comment,
@ -76,7 +75,7 @@ def _generate_powershell_xml(coverage_file: str) -> Element:
content_root = data_context().content.root
is_ansible = data_context().content.is_ansible
packages: t.Dict[str, t.Dict[str, t.Dict[str, int]]] = {}
packages: dict[str, dict[str, dict[str, int]]] = {}
for path, results in coverage_info.items():
filename = os.path.splitext(os.path.basename(path))[0]
@ -131,7 +130,7 @@ def _generate_powershell_xml(coverage_file: str) -> Element:
return elem_coverage
def _add_cobertura_package(packages: Element, package_name: str, package_data: t.Dict[str, t.Dict[str, int]]) -> t.Tuple[int, int]:
def _add_cobertura_package(packages: Element, package_name: str, package_data: dict[str, dict[str, int]]) -> tuple[int, int]:
"""Add a package element to the given packages element."""
elem_package = SubElement(packages, 'package')
elem_classes = SubElement(elem_package, 'classes')

@ -144,7 +144,7 @@ def set_timeout(args: EnvConfig) -> None:
os.remove(TIMEOUT_PATH)
def show_dict(data: t.Dict[str, t.Any], verbose: t.Dict[str, int], root_verbosity: int = 0, path: t.Optional[t.List[str]] = None) -> None:
def show_dict(data: dict[str, t.Any], verbose: dict[str, int], root_verbosity: int = 0, path: t.Optional[list[str]] = None) -> None:
"""Show a dict with varying levels of verbosity."""
path = path if path else []
@ -166,7 +166,7 @@ def show_dict(data: t.Dict[str, t.Any], verbose: t.Dict[str, int], root_verbosit
display.info(indent + '%s: %s' % (key, value), verbosity=verbosity)
def get_docker_details(args: EnvConfig) -> t.Dict[str, t.Any]:
def get_docker_details(args: EnvConfig) -> dict[str, t.Any]:
"""Return details about docker."""
docker = get_docker_command()

@ -1,6 +1,7 @@
"""Ansible integration test infrastructure."""
from __future__ import annotations
import collections.abc as c
import contextlib
import datetime
import json
@ -130,11 +131,11 @@ from .coverage import (
THostProfile = t.TypeVar('THostProfile', bound=HostProfile)
def generate_dependency_map(integration_targets: t.List[IntegrationTarget]) -> t.Dict[str, t.Set[IntegrationTarget]]:
def generate_dependency_map(integration_targets: list[IntegrationTarget]) -> dict[str, set[IntegrationTarget]]:
"""Analyze the given list of integration test targets and return a dictionary expressing target names and the targets on which they depend."""
targets_dict = dict((target.name, target) for target in integration_targets)
target_dependencies = analyze_integration_target_dependencies(integration_targets)
dependency_map: t.Dict[str, t.Set[IntegrationTarget]] = {}
dependency_map: dict[str, set[IntegrationTarget]] = {}
invalid_targets = set()
@ -157,9 +158,9 @@ def generate_dependency_map(integration_targets: t.List[IntegrationTarget]) -> t
return dependency_map
def get_files_needed(target_dependencies: t.List[IntegrationTarget]) -> t.List[str]:
def get_files_needed(target_dependencies: list[IntegrationTarget]) -> list[str]:
"""Return a list of files needed by the given list of target dependencies."""
files_needed: t.List[str] = []
files_needed: list[str] = []
for target_dependency in target_dependencies:
files_needed += target_dependency.needs_file
@ -198,7 +199,7 @@ def get_inventory_absolute_path(args: IntegrationConfig, target: InventoryConfig
def get_inventory_relative_path(args: IntegrationConfig) -> str:
"""Return the inventory path used for the given integration configuration relative to the content root."""
inventory_names: t.Dict[t.Type[IntegrationConfig], str] = {
inventory_names: dict[t.Type[IntegrationConfig], str] = {
PosixIntegrationConfig: 'inventory',
WindowsIntegrationConfig: 'inventory.winrm',
NetworkIntegrationConfig: 'inventory.networking',
@ -212,7 +213,7 @@ def delegate_inventory(args: IntegrationConfig, inventory_path_src: str) -> None
if isinstance(args, PosixIntegrationConfig):
return
def inventory_callback(files: t.List[t.Tuple[str, str]]) -> None:
def inventory_callback(files: list[tuple[str, str]]) -> None:
"""
Add the inventory file to the payload file list.
This will preserve the file during delegation even if it is ignored or is outside the content and install roots.
@ -241,7 +242,7 @@ def integration_test_environment(
args: IntegrationConfig,
target: IntegrationTarget,
inventory_path_src: str,
) -> t.Iterator[IntegrationEnvironment]:
) -> c.Iterator[IntegrationEnvironment]:
"""Context manager that prepares the integration test environment and cleans it up."""
ansible_config_src = args.get_ansible_config()
ansible_config_relative = os.path.join(data_context().content.integration_path, '%s.cfg' % args.command)
@ -344,7 +345,7 @@ def integration_test_config_file(
args: IntegrationConfig,
env_config: CloudEnvironmentConfig,
integration_dir: str,
) -> t.Iterator[t.Optional[str]]:
) -> c.Iterator[t.Optional[str]]:
"""Context manager that provides a config file for integration tests, if needed."""
if not env_config:
yield None
@ -398,11 +399,11 @@ def create_inventory(
def command_integration_filtered(
args: IntegrationConfig,
host_state: HostState,
targets: t.Tuple[IntegrationTarget, ...],
all_targets: t.Tuple[IntegrationTarget, ...],
targets: tuple[IntegrationTarget, ...],
all_targets: tuple[IntegrationTarget, ...],
inventory_path: str,
pre_target: t.Optional[t.Callable[[IntegrationTarget], None]] = None,
post_target: t.Optional[t.Callable[[IntegrationTarget], None]] = None,
pre_target: t.Optional[c.Callable[[IntegrationTarget], None]] = None,
post_target: t.Optional[c.Callable[[IntegrationTarget], None]] = None,
):
"""Run integration tests for the specified targets."""
found = False
@ -413,7 +414,7 @@ def command_integration_filtered(
all_targets_dict = dict((target.name, target) for target in all_targets)
setup_errors = []
setup_targets_executed: t.Set[str] = set()
setup_targets_executed: set[str] = set()
for target in all_targets:
for setup_target in target.setup_once + target.setup_always:
@ -745,9 +746,9 @@ def run_setup_targets(
args: IntegrationConfig,
host_state: HostState,
test_dir: str,
target_names: t.Sequence[str],
targets_dict: t.Dict[str, IntegrationTarget],
targets_executed: t.Set[str],
target_names: c.Sequence[str],
targets_dict: dict[str, IntegrationTarget],
targets_executed: set[str],
inventory_path: str,
coverage_manager: CoverageManager,
always: bool,
@ -780,7 +781,7 @@ def integration_environment(
ansible_config: t.Optional[str],
env_config: t.Optional[CloudEnvironmentConfig],
test_env: IntegrationEnvironment,
) -> t.Dict[str, str]:
) -> dict[str, str]:
"""Return a dictionary of environment variables to use when running the given integration test target."""
env = ansible_environment(args, ansible_config=ansible_config)
@ -839,7 +840,7 @@ class IntegrationCache(CommonCache):
return self.get('dependency_map', lambda: generate_dependency_map(self.integration_targets))
def filter_profiles_for_target(args: IntegrationConfig, profiles: t.List[THostProfile], target: IntegrationTarget) -> t.List[THostProfile]:
def filter_profiles_for_target(args: IntegrationConfig, profiles: list[THostProfile], target: IntegrationTarget) -> list[THostProfile]:
"""Return a list of profiles after applying target filters."""
if target.target_type == IntegrationTargetType.CONTROLLER:
profile_filter = get_target_filter(args, [args.controller], True)
@ -853,7 +854,7 @@ def filter_profiles_for_target(args: IntegrationConfig, profiles: t.List[THostPr
return profiles
def get_integration_filter(args: IntegrationConfig, targets: t.List[IntegrationTarget]) -> t.Set[str]:
def get_integration_filter(args: IntegrationConfig, targets: list[IntegrationTarget]) -> set[str]:
"""Return a list of test targets to skip based on the host(s) that will be used to run the specified test targets."""
invalid_targets = sorted(target.name for target in targets if target.target_type not in (IntegrationTargetType.CONTROLLER, IntegrationTargetType.TARGET))
@ -881,7 +882,7 @@ If necessary, context can be controlled by adding entries to the "aliases" file
else:
display.warning(f'Unable to determine context for the following test targets, they will be run on the target host: {", ".join(invalid_targets)}')
exclude: t.Set[str] = set()
exclude: set[str] = set()
controller_targets = [target for target in targets if target.target_type == IntegrationTargetType.CONTROLLER]
target_targets = [target for target in targets if target.target_type == IntegrationTargetType.TARGET]
@ -896,8 +897,8 @@ If necessary, context can be controlled by adding entries to the "aliases" file
def command_integration_filter(args: TIntegrationConfig,
targets: t.Iterable[TIntegrationTarget],
) -> t.Tuple[HostState, t.Tuple[TIntegrationTarget, ...]]:
targets: c.Iterable[TIntegrationTarget],
) -> tuple[HostState, tuple[TIntegrationTarget, ...]]:
"""Filter the given integration test targets."""
targets = tuple(target for target in targets if 'hidden/' not in target.aliases)
changes = get_changes_filter(args)
@ -935,7 +936,7 @@ def command_integration_filter(args: TIntegrationConfig,
vars_file_src = os.path.join(data_context().content.root, data_context().content.integration_vars_path)
if os.path.exists(vars_file_src):
def integration_config_callback(files: t.List[t.Tuple[str, str]]) -> None:
def integration_config_callback(files: list[tuple[str, str]]) -> None:
"""
Add the integration config vars file to the payload file list.
This will preserve the file during delegation even if the file is ignored by source control.

@ -55,12 +55,12 @@ from ....docker_util import (
@cache
def get_cloud_plugins() -> t.Tuple[t.Dict[str, t.Type[CloudProvider]], t.Dict[str, t.Type[CloudEnvironment]]]:
def get_cloud_plugins() -> tuple[dict[str, t.Type[CloudProvider]], dict[str, t.Type[CloudEnvironment]]]:
"""Import cloud plugins and load them into the plugin dictionaries."""
import_plugins('commands/integration/cloud')
providers: t.Dict[str, t.Type[CloudProvider]] = {}
environments: t.Dict[str, t.Type[CloudEnvironment]] = {}
providers: dict[str, t.Type[CloudProvider]] = {}
environments: dict[str, t.Type[CloudEnvironment]] = {}
load_plugins(CloudProvider, providers)
load_plugins(CloudEnvironment, environments)
@ -69,18 +69,18 @@ def get_cloud_plugins() -> t.Tuple[t.Dict[str, t.Type[CloudProvider]], t.Dict[st
@cache
def get_provider_plugins() -> t.Dict[str, t.Type[CloudProvider]]:
def get_provider_plugins() -> dict[str, t.Type[CloudProvider]]:
"""Return a dictionary of the available cloud provider plugins."""
return get_cloud_plugins()[0]
@cache
def get_environment_plugins() -> t.Dict[str, t.Type[CloudEnvironment]]:
def get_environment_plugins() -> dict[str, t.Type[CloudEnvironment]]:
"""Return a dictionary of the available cloud environment plugins."""
return get_cloud_plugins()[1]
def get_cloud_platforms(args: TestConfig, targets: t.Optional[t.Tuple[IntegrationTarget, ...]] = None) -> t.List[str]:
def get_cloud_platforms(args: TestConfig, targets: t.Optional[tuple[IntegrationTarget, ...]] = None) -> list[str]:
"""Return cloud platform names for the specified targets."""
if isinstance(args, IntegrationConfig):
if args.list_targets:
@ -114,7 +114,7 @@ def get_cloud_platform(target: IntegrationTarget) -> t.Optional[str]:
raise ApplicationError('Target %s aliases contains multiple cloud platforms: %s' % (target.name, ', '.join(sorted(cloud_platforms))))
def get_cloud_providers(args: IntegrationConfig, targets: t.Optional[t.Tuple[IntegrationTarget, ...]] = None) -> t.List[CloudProvider]:
def get_cloud_providers(args: IntegrationConfig, targets: t.Optional[tuple[IntegrationTarget, ...]] = None) -> list[CloudProvider]:
"""Return a list of cloud providers for the given targets."""
return [get_provider_plugins()[p](args) for p in get_cloud_platforms(args, targets)]
@ -129,12 +129,12 @@ def get_cloud_environment(args: IntegrationConfig, target: IntegrationTarget) ->
return get_environment_plugins()[cloud_platform](args)
def cloud_filter(args: IntegrationConfig, targets: t.Tuple[IntegrationTarget, ...]) -> t.List[str]:
def cloud_filter(args: IntegrationConfig, targets: tuple[IntegrationTarget, ...]) -> list[str]:
"""Return a list of target names to exclude based on the given targets."""
if args.metadata.cloud_config is not None:
return [] # cloud filter already performed prior to delegation
exclude: t.List[str] = []
exclude: list[str] = []
for provider in get_cloud_providers(args, targets):
provider.filter(targets, exclude)
@ -142,7 +142,7 @@ def cloud_filter(args: IntegrationConfig, targets: t.Tuple[IntegrationTarget, ..
return exclude
def cloud_init(args: IntegrationConfig, targets: t.Tuple[IntegrationTarget, ...]) -> None:
def cloud_init(args: IntegrationConfig, targets: tuple[IntegrationTarget, ...]) -> None:
"""Initialize cloud plugins for the given targets."""
if args.metadata.cloud_config is not None:
return # cloud configuration already established prior to delegation
@ -189,7 +189,7 @@ class CloudBase(metaclass=abc.ABCMeta):
self.args = args
self.platform = self.__module__.rsplit('.', 1)[-1]
def config_callback(files: t.List[t.Tuple[str, str]]) -> None:
def config_callback(files: list[tuple[str, str]]) -> None:
"""Add the config file to the payload file list."""
if self.platform not in self.args.metadata.cloud_config:
return # platform was initialized, but not used -- such as being skipped due to all tests being disabled
@ -270,7 +270,7 @@ class CloudProvider(CloudBase):
self.uses_config = False
self.uses_docker = False
def filter(self, targets: t.Tuple[IntegrationTarget, ...], exclude: t.List[str]) -> None:
def filter(self, targets: tuple[IntegrationTarget, ...], exclude: list[str]) -> None:
"""Filter out the cloud tests when the necessary config and resources are not available."""
if not self.uses_docker and not self.uses_config:
return
@ -345,7 +345,7 @@ class CloudProvider(CloudBase):
return config
@staticmethod
def _populate_config_template(template: str, values: t.Dict[str, str]) -> str:
def _populate_config_template(template: str, values: dict[str, str]) -> str:
"""Populate and return the given template with the provided values."""
for key in sorted(values):
value = values[key]
@ -378,10 +378,10 @@ class CloudEnvironment(CloudBase):
class CloudEnvironmentConfig:
"""Configuration for the environment."""
def __init__(self,
env_vars: t.Optional[t.Dict[str, str]] = None,
ansible_vars: t.Optional[t.Dict[str, t.Any]] = None,
module_defaults: t.Optional[t.Dict[str, t.Dict[str, t.Any]]] = None,
callback_plugins: t.Optional[t.List[str]] = None,
env_vars: t.Optional[dict[str, str]] = None,
ansible_vars: t.Optional[dict[str, t.Any]] = None,
module_defaults: t.Optional[dict[str, dict[str, t.Any]]] = None,
callback_plugins: t.Optional[list[str]] = None,
):
self.env_vars = env_vars
self.ansible_vars = ansible_vars

@ -42,7 +42,7 @@ class AwsCloudProvider(CloudProvider):
self.uses_config = True
def filter(self, targets: t.Tuple[IntegrationTarget, ...], exclude: t.List[str]) -> None:
def filter(self, targets: tuple[IntegrationTarget, ...], exclude: list[str]) -> None:
"""Filter out the cloud tests when the necessary config and resources are not available."""
aci = self._create_ansible_core_ci()
@ -102,7 +102,7 @@ class AwsCloudEnvironment(CloudEnvironment):
parser = configparser.ConfigParser()
parser.read(self.config_path)
ansible_vars: t.Dict[str, t.Any] = dict(
ansible_vars: dict[str, t.Any] = dict(
resource_prefix=self.resource_prefix,
tiny_prefix=uuid.uuid4().hex[0:12]
)

@ -38,7 +38,7 @@ class AzureCloudProvider(CloudProvider):
self.uses_config = True
def filter(self, targets: t.Tuple[IntegrationTarget, ...], exclude: t.List[str]) -> None:
def filter(self, targets: tuple[IntegrationTarget, ...], exclude: list[str]) -> None:
"""Filter out the cloud tests when the necessary config and resources are not available."""
aci = self._create_ansible_core_ci()
@ -127,7 +127,7 @@ class AzureCloudEnvironment(CloudEnvironment):
display.notice('If %s failed due to permissions, the test policy may need to be updated.' % target.name)
def get_config(config_path: str) -> t.Dict[str, str]:
def get_config(config_path: str) -> dict[str, str]:
"""Return a configuration dictionary parsed from the given configuration path."""
parser = configparser.ConfigParser()
parser.read(config_path)

@ -129,7 +129,7 @@ class CsCloudProvider(CloudProvider):
self._write_config(config)
def _get_credentials(self, container_name: str) -> t.Dict[str, t.Any]:
def _get_credentials(self, container_name: str) -> dict[str, t.Any]:
"""Wait for the CloudStack simulator to return credentials."""
def check(value):
"""Return True if the given configuration is valid JSON, otherwise return False."""

@ -2,7 +2,6 @@
from __future__ import annotations
import configparser
import typing as t
from ....util import (
display,
@ -35,7 +34,7 @@ class HcloudCloudProvider(CloudProvider):
self.uses_config = True
def filter(self, targets: t.Tuple[IntegrationTarget, ...], exclude: t.List[str]) -> None:
def filter(self, targets: tuple[IntegrationTarget, ...], exclude: list[str]) -> None:
"""Filter out the cloud tests when the necessary config and resources are not available."""
aci = self._create_ansible_core_ci()

@ -87,7 +87,7 @@ class CoverageHandler(t.Generic[THostConfig], metaclass=abc.ABCMeta):
self.inventory_path = inventory_path
self.profiles = self.get_profiles()
def get_profiles(self) -> t.List[HostProfile]:
def get_profiles(self) -> list[HostProfile]:
"""Return a list of profiles relevant for this handler."""
profile_type = get_generic_type(type(self), HostConfig)
profiles = [profile for profile in self.host_state.target_profiles if isinstance(profile.config, profile_type)]
@ -112,10 +112,10 @@ class CoverageHandler(t.Generic[THostConfig], metaclass=abc.ABCMeta):
"""Create inventory, if needed."""
@abc.abstractmethod
def get_environment(self, target_name: str, aliases: t.Tuple[str, ...]) -> t.Dict[str, str]:
def get_environment(self, target_name: str, aliases: tuple[str, ...]) -> dict[str, str]:
"""Return a dictionary of environment variables for running tests with code coverage."""
def run_playbook(self, playbook: str, variables: t.Dict[str, str]) -> None:
def run_playbook(self, playbook: str, variables: dict[str, str]) -> None:
"""Run the specified playbook using the current inventory."""
self.create_inventory()
run_playbook(self.args, self.inventory_path, playbook, capture=False, variables=variables)
@ -129,7 +129,7 @@ class PosixCoverageHandler(CoverageHandler[PosixConfig]):
# Common temporary directory used on all POSIX hosts that will be created world writeable.
self.common_temp_path = f'/tmp/ansible-test-{generate_name()}'
def get_profiles(self) -> t.List[HostProfile]:
def get_profiles(self) -> list[HostProfile]:
"""Return a list of profiles relevant for this handler."""
profiles = super().get_profiles()
profiles = [profile for profile in profiles if not isinstance(profile, ControllerProfile) or
@ -221,7 +221,7 @@ class PosixCoverageHandler(CoverageHandler[PosixConfig]):
self.run_playbook('posix_coverage_teardown.yml', self.get_playbook_variables())
def get_environment(self, target_name: str, aliases: t.Tuple[str, ...]) -> t.Dict[str, str]:
def get_environment(self, target_name: str, aliases: tuple[str, ...]) -> dict[str, str]:
"""Return a dictionary of environment variables for running tests with code coverage."""
# Enable code coverage collection on Ansible modules (both local and remote).
@ -247,7 +247,7 @@ class PosixCoverageHandler(CoverageHandler[PosixConfig]):
"""Create inventory."""
create_posix_inventory(self.args, self.inventory_path, self.host_state.target_profiles)
def get_playbook_variables(self) -> t.Dict[str, str]:
def get_playbook_variables(self) -> dict[str, str]:
"""Return a dictionary of variables for setup and teardown of POSIX coverage."""
return dict(
common_temp_dir=self.common_temp_path,
@ -306,7 +306,7 @@ class WindowsCoverageHandler(CoverageHandler[WindowsConfig]):
coverage_zip.extract(item, ResultType.COVERAGE.path)
def get_environment(self, target_name: str, aliases: t.Tuple[str, ...]) -> t.Dict[str, str]:
def get_environment(self, target_name: str, aliases: tuple[str, ...]) -> dict[str, str]:
"""Return a dictionary of environment variables for running tests with code coverage."""
# Include the command, target and platform marker so the remote host can create a filename with that info.
@ -324,7 +324,7 @@ class WindowsCoverageHandler(CoverageHandler[WindowsConfig]):
"""Create inventory."""
create_windows_inventory(self.args, self.inventory_path, self.host_state.target_profiles)
def get_playbook_variables(self) -> t.Dict[str, str]:
def get_playbook_variables(self) -> dict[str, str]:
"""Return a dictionary of variables for setup and teardown of Windows coverage."""
return dict(
remote_temp_path=self.remote_temp_path,
@ -364,7 +364,7 @@ class CoverageManager:
for handler in self.handlers:
handler.teardown()
def get_environment(self, target_name: str, aliases: t.Tuple[str, ...]) -> t.Dict[str, str]:
def get_environment(self, target_name: str, aliases: tuple[str, ...]) -> dict[str, str]:
"""Return a dictionary of environment variables for running tests with code coverage."""
if not self.args.coverage or 'non_local/' in aliases:
return {}
@ -378,7 +378,7 @@ class CoverageManager:
@cache
def get_config_handler_type_map() -> t.Dict[t.Type[HostConfig], t.Type[CoverageHandler]]:
def get_config_handler_type_map() -> dict[t.Type[HostConfig], t.Type[CoverageHandler]]:
"""Create and return a mapping of HostConfig types to CoverageHandler types."""
return get_type_map(CoverageHandler, HostConfig)

@ -47,7 +47,7 @@ THostProfile = t.TypeVar('THostProfile', bound=HostProfile)
class TargetFilter(t.Generic[THostConfig], metaclass=abc.ABCMeta):
"""Base class for target filters."""
def __init__(self, args: IntegrationConfig, configs: t.List[THostConfig], controller: bool) -> None:
def __init__(self, args: IntegrationConfig, configs: list[THostConfig], controller: bool) -> None:
self.args = args
self.configs = configs
self.controller = controller
@ -70,9 +70,9 @@ class TargetFilter(t.Generic[THostConfig], metaclass=abc.ABCMeta):
self,
skip: str,
reason: str,
targets: t.List[IntegrationTarget],
exclude: t.Set[str],
override: t.Optional[t.List[str]] = None,
targets: list[IntegrationTarget],
exclude: set[str],
override: t.Optional[list[str]] = None,
) -> None:
"""Apply the specified skip rule to the given targets by updating the provided exclude list."""
if skip.startswith('skip/'):
@ -82,7 +82,7 @@ class TargetFilter(t.Generic[THostConfig], metaclass=abc.ABCMeta):
self.apply_skip(f'"{skip}"', reason, skipped, exclude)
def apply_skip(self, marked: str, reason: str, skipped: t.List[str], exclude: t.Set[str]) -> None:
def apply_skip(self, marked: str, reason: str, skipped: list[str], exclude: set[str]) -> None:
"""Apply the provided skips to the given exclude list."""
if not skipped:
return
@ -90,12 +90,12 @@ class TargetFilter(t.Generic[THostConfig], metaclass=abc.ABCMeta):
exclude.update(skipped)
display.warning(f'Excluding {self.host_type} tests marked {marked} {reason}: {", ".join(skipped)}')
def filter_profiles(self, profiles: t.List[THostProfile], target: IntegrationTarget) -> t.List[THostProfile]:
def filter_profiles(self, profiles: list[THostProfile], target: IntegrationTarget) -> list[THostProfile]:
"""Filter the list of profiles, returning only those which are not skipped for the given target."""
del target
return profiles
def filter_targets(self, targets: t.List[IntegrationTarget], exclude: t.Set[str]) -> None:
def filter_targets(self, targets: list[IntegrationTarget], exclude: set[str]) -> None:
"""Filter the list of targets, adding any which this host profile cannot support to the provided exclude list."""
if self.controller and self.args.host_settings.controller_fallback and targets:
affected_targets = [target.name for target in targets]
@ -138,7 +138,7 @@ class TargetFilter(t.Generic[THostConfig], metaclass=abc.ABCMeta):
class PosixTargetFilter(TargetFilter[TPosixConfig]):
"""Target filter for POSIX hosts."""
def filter_targets(self, targets: t.List[IntegrationTarget], exclude: t.Set[str]) -> None:
def filter_targets(self, targets: list[IntegrationTarget], exclude: set[str]) -> None:
"""Filter the list of targets, adding any which this host profile cannot support to the provided exclude list."""
super().filter_targets(targets, exclude)
@ -151,7 +151,7 @@ class PosixTargetFilter(TargetFilter[TPosixConfig]):
class DockerTargetFilter(PosixTargetFilter[DockerConfig]):
"""Target filter for docker hosts."""
def filter_targets(self, targets: t.List[IntegrationTarget], exclude: t.Set[str]) -> None:
def filter_targets(self, targets: list[IntegrationTarget], exclude: set[str]) -> None:
"""Filter the list of targets, adding any which this host profile cannot support to the provided exclude list."""
super().filter_targets(targets, exclude)
@ -167,21 +167,21 @@ class PosixSshTargetFilter(PosixTargetFilter[PosixSshConfig]):
class RemoteTargetFilter(TargetFilter[TRemoteConfig]):
"""Target filter for remote Ansible Core CI managed hosts."""
def filter_profiles(self, profiles: t.List[THostProfile], target: IntegrationTarget) -> t.List[THostProfile]:
def filter_profiles(self, profiles: list[THostProfile], target: IntegrationTarget) -> list[THostProfile]:
"""Filter the list of profiles, returning only those which are not skipped for the given target."""
profiles = super().filter_profiles(profiles, target)
skipped_profiles = [profile for profile in profiles if any(skip in target.skips for skip in get_remote_skip_aliases(profile.config))]
if skipped_profiles:
configs: t.List[TRemoteConfig] = [profile.config for profile in skipped_profiles]
configs: list[TRemoteConfig] = [profile.config for profile in skipped_profiles]
display.warning(f'Excluding skipped hosts from inventory: {", ".join(config.name for config in configs)}')
profiles = [profile for profile in profiles if profile not in skipped_profiles]
return profiles
def filter_targets(self, targets: t.List[IntegrationTarget], exclude: t.Set[str]) -> None:
def filter_targets(self, targets: list[IntegrationTarget], exclude: set[str]) -> None:
"""Filter the list of targets, adding any which this host profile cannot support to the provided exclude list."""
super().filter_targets(targets, exclude)
@ -224,7 +224,7 @@ class NetworkInventoryTargetFilter(TargetFilter[NetworkInventoryConfig]):
class OriginTargetFilter(PosixTargetFilter[OriginConfig]):
"""Target filter for localhost."""
def filter_targets(self, targets: t.List[IntegrationTarget], exclude: t.Set[str]) -> None:
def filter_targets(self, targets: list[IntegrationTarget], exclude: set[str]) -> None:
"""Filter the list of targets, adding any which this host profile cannot support to the provided exclude list."""
super().filter_targets(targets, exclude)
@ -235,12 +235,12 @@ class OriginTargetFilter(PosixTargetFilter[OriginConfig]):
@cache
def get_host_target_type_map() -> t.Dict[t.Type[HostConfig], t.Type[TargetFilter]]:
def get_host_target_type_map() -> dict[t.Type[HostConfig], t.Type[TargetFilter]]:
"""Create and return a mapping of HostConfig types to TargetFilter types."""
return get_type_map(TargetFilter, HostConfig)
def get_target_filter(args: IntegrationConfig, configs: t.List[HostConfig], controller: bool) -> TargetFilter:
def get_target_filter(args: IntegrationConfig, configs: list[HostConfig], controller: bool) -> TargetFilter:
"""Return an integration test target filter instance for the provided host configurations."""
target_type = type(configs[0])
@ -254,12 +254,12 @@ def get_target_filter(args: IntegrationConfig, configs: t.List[HostConfig], cont
return filter_instance
def get_remote_skip_aliases(config: RemoteConfig) -> t.Dict[str, str]:
def get_remote_skip_aliases(config: RemoteConfig) -> dict[str, str]:
"""Return a dictionary of skip aliases and the reason why they apply."""
return get_platform_skip_aliases(config.platform, config.version, config.arch)
def get_platform_skip_aliases(platform: str, version: str, arch: t.Optional[str]) -> t.Dict[str, str]:
def get_platform_skip_aliases(platform: str, version: str, arch: t.Optional[str]) -> dict[str, str]:
"""Return a dictionary of skip aliases and the reason why they apply."""
skips = {
f'skip/{platform}': platform,

@ -9,6 +9,7 @@ import os
import pathlib
import re
import collections
import collections.abc as c
import typing as t
from ...constants import (
@ -128,15 +129,15 @@ DOCUMENTABLE_PLUGINS = (
'become', 'cache', 'callback', 'cliconf', 'connection', 'httpapi', 'inventory', 'lookup', 'netconf', 'modules', 'shell', 'strategy', 'vars'
)
created_venvs: t.List[str] = []
created_venvs: list[str] = []
def command_sanity(args: SanityConfig) -> None:
"""Run sanity tests."""
create_result_directories(args)
target_configs = t.cast(t.List[PosixConfig], args.targets)
target_versions: t.Dict[str, PosixConfig] = {target.python.version: target for target in target_configs}
target_configs = t.cast(list[PosixConfig], args.targets)
target_versions: dict[str, PosixConfig] = {target.python.version: target for target in target_configs}
handle_layout_messages(data_context().content.sanity_messages)
@ -172,7 +173,7 @@ def command_sanity(args: SanityConfig) -> None:
if disabled:
display.warning('Skipping tests disabled by default without --allow-disabled: %s' % ', '.join(sorted(disabled)))
target_profiles: t.Dict[str, PosixProfile] = {profile.config.python.version: profile for profile in host_state.targets(PosixProfile)}
target_profiles: dict[str, PosixProfile] = {profile.config.python.version: profile for profile in host_state.targets(PosixProfile)}
total = 0
failed = []
@ -307,7 +308,7 @@ def command_sanity(args: SanityConfig) -> None:
@cache
def collect_code_smell_tests() -> t.Tuple[SanityTest, ...]:
def collect_code_smell_tests() -> tuple[SanityTest, ...]:
"""Return a tuple of available code smell sanity tests."""
paths = glob.glob(os.path.join(SANITY_ROOT, 'code-smell', '*.py'))
@ -339,19 +340,19 @@ class SanityIgnoreParser:
self.args = args
self.relative_path = os.path.join(data_context().content.sanity_path, file_name)
self.path = os.path.join(data_context().content.root, self.relative_path)
self.ignores: t.Dict[str, t.Dict[str, t.Dict[str, int]]] = collections.defaultdict(lambda: collections.defaultdict(dict))
self.skips: t.Dict[str, t.Dict[str, int]] = collections.defaultdict(lambda: collections.defaultdict(int))
self.parse_errors: t.List[t.Tuple[int, int, str]] = []
self.file_not_found_errors: t.List[t.Tuple[int, str]] = []
self.ignores: dict[str, dict[str, dict[str, int]]] = collections.defaultdict(lambda: collections.defaultdict(dict))
self.skips: dict[str, dict[str, int]] = collections.defaultdict(lambda: collections.defaultdict(int))
self.parse_errors: list[tuple[int, int, str]] = []
self.file_not_found_errors: list[tuple[int, str]] = []
lines = read_lines_without_comments(self.path, optional=True)
targets = SanityTargets.get_targets()
paths = set(target.path for target in targets)
tests_by_name: t.Dict[str, SanityTest] = {}
versioned_test_names: t.Set[str] = set()
unversioned_test_names: t.Dict[str, str] = {}
tests_by_name: dict[str, SanityTest] = {}
versioned_test_names: set[str] = set()
unversioned_test_names: dict[str, str] = {}
directories = paths_to_dirs(list(paths))
paths_by_test: t.Dict[str, t.Set[str]] = {}
paths_by_test: dict[str, set[str]] = {}
display.info('Read %d sanity test ignore line(s) for %s from: %s' % (len(lines), ansible_label, self.relative_path), verbosity=1)
@ -544,13 +545,13 @@ class SanityIgnoreProcessor:
self.parser = SanityIgnoreParser.load(args)
self.ignore_entries = self.parser.ignores.get(full_name, {})
self.skip_entries = self.parser.skips.get(full_name, {})
self.used_line_numbers: t.Set[int] = set()
self.used_line_numbers: set[int] = set()
def filter_skipped_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]:
def filter_skipped_targets(self, targets: list[TestTarget]) -> list[TestTarget]:
"""Return the given targets, with any skipped paths filtered out."""
return sorted(target for target in targets if target.path not in self.skip_entries)
def process_errors(self, errors: t.List[SanityMessage], paths: t.List[str]) -> t.List[SanityMessage]:
def process_errors(self, errors: list[SanityMessage], paths: list[str]) -> list[SanityMessage]:
"""Return the given errors filtered for ignores and with any settings related errors included."""
errors = self.filter_messages(errors)
errors.extend(self.get_errors(paths))
@ -559,7 +560,7 @@ class SanityIgnoreProcessor:
return errors
def filter_messages(self, messages: t.List[SanityMessage]) -> t.List[SanityMessage]:
def filter_messages(self, messages: list[SanityMessage]) -> list[SanityMessage]:
"""Return a filtered list of the given messages using the entries that have been loaded."""
filtered = []
@ -581,13 +582,13 @@ class SanityIgnoreProcessor:
return filtered
def get_errors(self, paths: t.List[str]) -> t.List[SanityMessage]:
def get_errors(self, paths: list[str]) -> list[SanityMessage]:
"""Return error messages related to issues with the file."""
messages: t.List[SanityMessage] = []
messages: list[SanityMessage] = []
# unused errors
unused: t.List[t.Tuple[int, str, str]] = []
unused: list[tuple[int, str, str]] = []
if self.test.no_targets or self.test.all_targets:
# tests which do not accept a target list, or which use all targets, always return all possible errors, so all ignores can be checked
@ -633,7 +634,7 @@ class SanityFailure(TestFailure):
self,
test: str,
python_version: t.Optional[str] = None,
messages: t.Optional[t.Sequence[SanityMessage]] = None,
messages: t.Optional[c.Sequence[SanityMessage]] = None,
summary: t.Optional[str] = None,
) -> None:
super().__init__(COMMAND, test, python_version, messages, summary)
@ -645,19 +646,19 @@ class SanityMessage(TestMessage):
class SanityTargets:
"""Sanity test target information."""
def __init__(self, targets: t.Tuple[TestTarget, ...], include: t.Tuple[TestTarget, ...]) -> None:
def __init__(self, targets: tuple[TestTarget, ...], include: tuple[TestTarget, ...]) -> None:
self.targets = targets
self.include = include
@staticmethod
def create(include: t.List[str], exclude: t.List[str], require: t.List[str]) -> SanityTargets:
def create(include: list[str], exclude: list[str], require: list[str]) -> SanityTargets:
"""Create a SanityTargets instance from the given include, exclude and require lists."""
_targets = SanityTargets.get_targets()
_include = walk_internal_targets(_targets, include, exclude, require)
return SanityTargets(_targets, _include)
@staticmethod
def filter_and_inject_targets(test: SanityTest, targets: t.Iterable[TestTarget]) -> t.List[TestTarget]:
def filter_and_inject_targets(test: SanityTest, targets: c.Iterable[TestTarget]) -> list[TestTarget]:
"""Filter and inject targets based on test requirements and the given target list."""
test_targets = list(targets)
@ -680,7 +681,7 @@ class SanityTargets:
return test_targets
@staticmethod
def get_targets() -> t.Tuple[TestTarget, ...]:
def get_targets() -> tuple[TestTarget, ...]:
"""Return a tuple of sanity test targets. Uses a cached version when available."""
try:
return SanityTargets.get_targets.targets # type: ignore[attr-defined]
@ -709,7 +710,7 @@ class SanityTest(metaclass=abc.ABCMeta):
# Because these errors can be unpredictable they behave differently than normal error codes:
# * They are not reported by default. The `--enable-optional-errors` option must be used to display these errors.
# * They cannot be ignored. This is done to maintain the integrity of the ignore system.
self.optional_error_codes: t.Set[str] = set()
self.optional_error_codes: set[str] = set()
@property
def error_code(self) -> t.Optional[str]:
@ -752,18 +753,18 @@ class SanityTest(metaclass=abc.ABCMeta):
return False
@property
def supported_python_versions(self) -> t.Optional[t.Tuple[str, ...]]:
def supported_python_versions(self) -> t.Optional[tuple[str, ...]]:
"""A tuple of supported Python versions or None if the test does not depend on specific Python versions."""
return CONTROLLER_PYTHON_VERSIONS
def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]: # pylint: disable=unused-argument
def filter_targets(self, targets: list[TestTarget]) -> list[TestTarget]: # pylint: disable=unused-argument
"""Return the given list of test targets, filtered to include only those relevant for the test."""
if self.no_targets:
return []
raise NotImplementedError('Sanity test "%s" must implement "filter_targets" or set "no_targets" to True.' % self.name)
def filter_targets_by_version(self, args: SanityConfig, targets: t.List[TestTarget], python_version: str) -> t.List[TestTarget]:
def filter_targets_by_version(self, args: SanityConfig, targets: list[TestTarget], python_version: str) -> list[TestTarget]:
"""Return the given list of test targets, filtered to include only those relevant for the test, taking into account the Python version."""
del python_version # python_version is not used here, but derived classes may make use of it
@ -785,7 +786,7 @@ class SanityTest(metaclass=abc.ABCMeta):
return targets
@staticmethod
def filter_remote_targets(targets: t.List[TestTarget]) -> t.List[TestTarget]:
def filter_remote_targets(targets: list[TestTarget]) -> list[TestTarget]:
"""Return a filtered list of the given targets, including only those that require support for remote-only Python versions."""
targets = [target for target in targets if (
is_subdir(target.path, data_context().content.module_path) or
@ -843,9 +844,9 @@ class SanityCodeSmellTest(SanitySingleVersion):
self.enabled = not self.config.get('disabled')
self.output: t.Optional[str] = self.config.get('output')
self.extensions: t.List[str] = self.config.get('extensions')
self.prefixes: t.List[str] = self.config.get('prefixes')
self.files: t.List[str] = self.config.get('files')
self.extensions: list[str] = self.config.get('extensions')
self.prefixes: list[str] = self.config.get('prefixes')
self.files: list[str] = self.config.get('files')
self.text: t.Optional[bool] = self.config.get('text')
self.ignore_self: bool = self.config.get('ignore_self')
self.minimum_python_version: t.Optional[str] = self.config.get('minimum_python_version')
@ -915,7 +916,7 @@ class SanityCodeSmellTest(SanitySingleVersion):
return self.__py2_compat
@property
def supported_python_versions(self) -> t.Optional[t.Tuple[str, ...]]:
def supported_python_versions(self) -> t.Optional[tuple[str, ...]]:
"""A tuple of supported Python versions or None if the test does not depend on specific Python versions."""
versions = super().supported_python_versions
@ -927,7 +928,7 @@ class SanityCodeSmellTest(SanitySingleVersion):
return versions
def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]:
def filter_targets(self, targets: list[TestTarget]) -> list[TestTarget]:
"""Return the given list of test targets, filtered to include only those relevant for the test."""
if self.no_targets:
return []
@ -1038,7 +1039,7 @@ class SanityVersionNeutral(SanityTest, metaclass=abc.ABCMeta):
return SanityIgnoreProcessor(args, self, None)
@property
def supported_python_versions(self) -> t.Optional[t.Tuple[str, ...]]:
def supported_python_versions(self) -> t.Optional[tuple[str, ...]]:
"""A tuple of supported Python versions or None if the test does not depend on specific Python versions."""
return None
@ -1059,11 +1060,11 @@ class SanityMultipleVersion(SanityTest, metaclass=abc.ABCMeta):
return False
@property
def supported_python_versions(self) -> t.Optional[t.Tuple[str, ...]]:
def supported_python_versions(self) -> t.Optional[tuple[str, ...]]:
"""A tuple of supported Python versions or None if the test does not depend on specific Python versions."""
return SUPPORTED_PYTHON_VERSIONS
def filter_targets_by_version(self, args: SanityConfig, targets: t.List[TestTarget], python_version: str) -> t.List[TestTarget]:
def filter_targets_by_version(self, args: SanityConfig, targets: list[TestTarget], python_version: str) -> list[TestTarget]:
"""Return the given list of test targets, filtered to include only those relevant for the test, taking into account the Python version."""
if not python_version:
raise Exception('python_version is required to filter multi-version tests')
@ -1084,10 +1085,10 @@ class SanityMultipleVersion(SanityTest, metaclass=abc.ABCMeta):
@cache
def sanity_get_tests() -> t.Tuple[SanityTest, ...]:
def sanity_get_tests() -> tuple[SanityTest, ...]:
"""Return a tuple of the available sanity tests."""
import_plugins('commands/sanity')
sanity_plugins: t.Dict[str, t.Type[SanityTest]] = {}
sanity_plugins: dict[str, t.Type[SanityTest]] = {}
load_plugins(SanityTest, sanity_plugins)
sanity_plugins.pop('sanity') # SanityCodeSmellTest
sanity_tests = tuple(plugin() for plugin in sanity_plugins.values() if data_context().content.is_ansible or not plugin.ansible_only)

@ -4,7 +4,6 @@ from __future__ import annotations
import collections
import os
import re
import typing as t
from . import (
DOCUMENTABLE_PLUGINS,
@ -49,7 +48,7 @@ from ...host_configs import (
class AnsibleDocTest(SanitySingleVersion):
"""Sanity test for ansible-doc."""
def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]:
def filter_targets(self, targets: list[TestTarget]) -> list[TestTarget]:
"""Return the given list of test targets, filtered to include only those relevant for the test."""
plugin_paths = [plugin_path for plugin_type, plugin_path in data_context().content.plugin_paths.items() if plugin_type in DOCUMENTABLE_PLUGINS]
@ -64,8 +63,8 @@ class AnsibleDocTest(SanitySingleVersion):
paths = [target.path for target in targets.include]
doc_targets: t.Dict[str, t.List[str]] = collections.defaultdict(list)
target_paths: t.Dict[str, t.Dict[str, str]] = collections.defaultdict(dict)
doc_targets: dict[str, list[str]] = collections.defaultdict(list)
target_paths: dict[str, dict[str, str]] = collections.defaultdict(dict)
remap_types = dict(
modules='module',
@ -84,7 +83,7 @@ class AnsibleDocTest(SanitySingleVersion):
target_paths[plugin_type][data_context().content.prefix + plugin_name] = plugin_file_path
env = ansible_environment(args, color=False)
error_messages: t.List[SanityMessage] = []
error_messages: list[SanityMessage] = []
for doc_type in sorted(doc_targets):
for format_option in [None, '--json']:

@ -2,7 +2,6 @@
from __future__ import annotations
import os
import typing as t
from . import (
SanityVersionNeutral,
@ -56,7 +55,7 @@ class BinSymlinksTest(SanityVersionNeutral):
bin_names = os.listdir(bin_root)
bin_paths = sorted(os.path.join(bin_root, path) for path in bin_names)
errors: t.List[t.Tuple[str, str]] = []
errors: list[tuple[str, str]] = []
symlink_map_path = os.path.relpath(symlink_map_full_path, data_context().content.root)

@ -2,7 +2,6 @@
from __future__ import annotations
import os
import typing as t
from . import (
SanityMultipleVersion,
@ -44,7 +43,7 @@ from ...host_configs import (
class CompileTest(SanityMultipleVersion):
"""Sanity test for proper python syntax."""
def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]:
def filter_targets(self, targets: list[TestTarget]) -> list[TestTarget]:
"""Return the given list of test targets, filtered to include only those relevant for the test."""
return [target for target in targets if os.path.splitext(target.path)[1] == '.py' or is_subdir(target.path, 'bin')]

@ -2,7 +2,6 @@
from __future__ import annotations
import os
import typing as t
from . import (
SanityFailure,
@ -39,7 +38,7 @@ class IgnoresTest(SanityVersionNeutral):
def test(self, args: SanityConfig, targets: SanityTargets) -> TestResult:
sanity_ignore = SanityIgnoreParser.load(args)
messages: t.List[SanityMessage] = []
messages: list[SanityMessage] = []
# parse errors

@ -1,8 +1,8 @@
"""Sanity test for proper import exception handling."""
from __future__ import annotations
import collections.abc as c
import os
import typing as t
from . import (
SanityMultipleVersion,
@ -73,7 +73,7 @@ from ...venv import (
)
def _get_module_test(module_restrictions: bool) -> t.Callable[[str], bool]:
def _get_module_test(module_restrictions: bool) -> c.Callable[[str], bool]:
"""Create a predicate which tests whether a path can be used by modules or not."""
module_path = data_context().content.module_path
module_utils_path = data_context().content.module_utils_path
@ -84,7 +84,7 @@ def _get_module_test(module_restrictions: bool) -> t.Callable[[str], bool]:
class ImportTest(SanityMultipleVersion):
"""Sanity test for proper import exception handling."""
def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]:
def filter_targets(self, targets: list[TestTarget]) -> list[TestTarget]:
"""Return the given list of test targets, filtered to include only those relevant for the test."""
if data_context().content.is_ansible:
# all of ansible-core must pass the import test, not just plugins/modules

@ -106,8 +106,8 @@ class IntegrationAliasesTest(SanitySingleVersion):
def __init__(self):
super().__init__()
self._ci_config: t.Dict[str, t.Any] = {}
self._ci_test_groups: t.Dict[str, t.List[int]] = {}
self._ci_config: dict[str, t.Any] = {}
self._ci_test_groups: dict[str, list[int]] = {}
@property
def can_ignore(self) -> bool:
@ -119,7 +119,7 @@ class IntegrationAliasesTest(SanitySingleVersion):
"""True if the test does not use test targets. Mutually exclusive with all_targets."""
return True
def load_ci_config(self, python: PythonConfig) -> t.Dict[str, t.Any]:
def load_ci_config(self, python: PythonConfig) -> dict[str, t.Any]:
"""Load and return the CI YAML configuration."""
if not self._ci_config:
self._ci_config = self.load_yaml(python, self.CI_YML)
@ -127,10 +127,10 @@ class IntegrationAliasesTest(SanitySingleVersion):
return self._ci_config
@property
def ci_test_groups(self) -> t.Dict[str, t.List[int]]:
def ci_test_groups(self) -> dict[str, list[int]]:
"""Return a dictionary of CI test names and their group(s)."""
if not self._ci_test_groups:
test_groups: t.Dict[str, t.Set[int]] = {}
test_groups: dict[str, set[int]] = {}
for stage in self._ci_config['stages']:
for job in stage['jobs']:
@ -197,7 +197,7 @@ class IntegrationAliasesTest(SanitySingleVersion):
return alias
def load_yaml(self, python: PythonConfig, path: str) -> t.Dict[str, t.Any]:
def load_yaml(self, python: PythonConfig, path: str) -> dict[str, t.Any]:
"""Load the specified YAML file and return the contents."""
yaml_to_json_path = os.path.join(SANITY_ROOT, self.name, 'yaml_to_json.py')
return json.loads(raw_command([python.path, yaml_to_json_path], data=read_text_file(path), capture=True)[0])
@ -232,7 +232,7 @@ class IntegrationAliasesTest(SanitySingleVersion):
return SanitySuccess(self.name)
def check_posix_targets(self, args: SanityConfig) -> t.List[SanityMessage]:
def check_posix_targets(self, args: SanityConfig) -> list[SanityMessage]:
"""Check POSIX integration test targets and return messages with any issues found."""
posix_targets = tuple(walk_posix_integration_targets())
@ -321,10 +321,10 @@ class IntegrationAliasesTest(SanitySingleVersion):
def check_ci_group(
self,
targets: t.Tuple[CompletionTarget, ...],
targets: tuple[CompletionTarget, ...],
find: str,
find_incidental: t.Optional[t.List[str]] = None,
) -> t.List[SanityMessage]:
find_incidental: t.Optional[list[str]] = None,
) -> list[SanityMessage]:
"""Check the CI groups set in the provided targets and return a list of messages with any issues found."""
all_paths = set(target.path for target in targets)
supported_paths = set(target.path for target in filter_targets(targets, [find], directories=False, errors=False))
@ -399,7 +399,7 @@ class IntegrationAliasesTest(SanitySingleVersion):
results.comments += comments
results.labels.update(labels)
def format_comment(self, template: str, targets: t.List[str]) -> t.Optional[str]:
def format_comment(self, template: str, targets: list[str]) -> t.Optional[str]:
"""Format and return a comment based on the given template and targets, or None if there are no targets."""
if not targets:
return None
@ -419,5 +419,5 @@ class IntegrationAliasesTest(SanitySingleVersion):
@dataclasses.dataclass
class Results:
"""Check results."""
comments: t.List[str]
labels: t.Dict[str, bool]
comments: list[str]
labels: dict[str, bool]

@ -67,7 +67,7 @@ class MypyTest(SanityMultipleVersion):
'lib/ansible/module_utils/compat/_selectors2.py',
)
def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]:
def filter_targets(self, targets: list[TestTarget]) -> list[TestTarget]:
"""Return the given list of test targets, filtered to include only those relevant for the test."""
return [target for target in targets if os.path.splitext(target.path)[1] == '.py' and target.path not in self.vendored_paths and (
target.path.startswith('lib/ansible/') or target.path.startswith('test/lib/ansible_test/_internal/')
@ -111,7 +111,7 @@ class MypyTest(SanityMultipleVersion):
MyPyContext('modules', ['lib/ansible/modules/', 'lib/ansible/module_utils/'], remote_only_python_versions),
)
unfiltered_messages: t.List[SanityMessage] = []
unfiltered_messages: list[SanityMessage] = []
for context in contexts:
if python.version not in context.python_versions:
@ -174,8 +174,8 @@ class MypyTest(SanityMultipleVersion):
virtualenv_python: VirtualPythonConfig,
python: PythonConfig,
context: MyPyContext,
paths: t.List[str],
) -> t.List[SanityMessage]:
paths: list[str],
) -> list[SanityMessage]:
"""Run mypy tests for the specified context."""
context_paths = [path for path in paths if any(is_subdir(path, match_path) for match_path in context.paths)]
@ -260,5 +260,5 @@ class MypyTest(SanityMultipleVersion):
class MyPyContext:
"""Context details for a single run of mypy."""
name: str
paths: t.List[str]
python_versions: t.Tuple[str, ...]
paths: list[str]
python_versions: tuple[str, ...]

@ -48,7 +48,7 @@ class Pep8Test(SanitySingleVersion):
"""Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes."""
return 'A100'
def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]:
def filter_targets(self, targets: list[TestTarget]) -> list[TestTarget]:
"""Return the given list of test targets, filtered to include only those relevant for the test."""
return [target for target in targets if os.path.splitext(target.path)[1] == '.py' or is_subdir(target.path, 'bin')]

@ -50,7 +50,7 @@ class PslintTest(SanityVersionNeutral):
"""Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes."""
return 'AnsibleTest'
def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]:
def filter_targets(self, targets: list[TestTarget]) -> list[TestTarget]:
"""Return the given list of test targets, filtered to include only those relevant for the test."""
return [target for target in targets if os.path.splitext(target.path)[1] in ('.ps1', '.psm1', '.psd1')]

@ -1,6 +1,7 @@
"""Sanity test using pylint."""
from __future__ import annotations
import collections.abc as c
import itertools
import json
import os
@ -70,7 +71,7 @@ class PylintTest(SanitySingleVersion):
])
@property
def supported_python_versions(self) -> t.Optional[t.Tuple[str, ...]]:
def supported_python_versions(self) -> t.Optional[tuple[str, ...]]:
"""A tuple of supported Python versions or None if the test does not depend on specific Python versions."""
return tuple(version for version in CONTROLLER_PYTHON_VERSIONS if str_to_version(version) < (3, 11))
@ -79,7 +80,7 @@ class PylintTest(SanitySingleVersion):
"""Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes."""
return 'ansible-test'
def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]:
def filter_targets(self, targets: list[TestTarget]) -> list[TestTarget]:
"""Return the given list of test targets, filtered to include only those relevant for the test."""
return [target for target in targets if os.path.splitext(target.path)[1] == '.py' or is_subdir(target.path, 'bin')]
@ -107,13 +108,13 @@ class PylintTest(SanitySingleVersion):
contexts = []
remaining_paths = set(paths)
def add_context(available_paths: t.Set[str], context_name: str, context_filter: t.Callable[[str], bool]) -> None:
def add_context(available_paths: set[str], context_name: str, context_filter: c.Callable[[str], bool]) -> None:
"""Add the specified context to the context list, consuming available paths that match the given context filter."""
filtered_paths = set(p for p in available_paths if context_filter(p))
contexts.append((context_name, sorted(filtered_paths)))
available_paths -= filtered_paths
def filter_path(path_filter: str = None) -> t.Callable[[str], bool]:
def filter_path(path_filter: str = None) -> c.Callable[[str], bool]:
"""Return a function that filters out paths which are not a subdirectory of the given path."""
def context_filter(path_to_filter: str) -> bool:
"""Return true if the given path matches, otherwise return False."""
@ -200,12 +201,12 @@ class PylintTest(SanitySingleVersion):
def pylint(
args: SanityConfig,
context: str,
paths: t.List[str],
paths: list[str],
plugin_dir: str,
plugin_names: t.List[str],
plugin_names: list[str],
python: PythonConfig,
collection_detail: CollectionDetail,
) -> t.List[t.Dict[str, str]]:
) -> list[dict[str, str]]:
"""Run pylint using the config specified by the context on the specified paths."""
rcfile = os.path.join(SANITY_ROOT, 'pylint', 'config', context.split('/')[0] + '.cfg')

@ -49,7 +49,7 @@ class ShellcheckTest(SanityVersionNeutral):
"""Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes."""
return 'AT1000'
def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]:
def filter_targets(self, targets: list[TestTarget]) -> list[TestTarget]:
"""Return the given list of test targets, filtered to include only those relevant for the test."""
return [target for target in targets if os.path.splitext(target.path)[1] == '.sh']

@ -96,7 +96,7 @@ class ValidateModulesTest(SanitySingleVersion):
return None
def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]:
def filter_targets(self, targets: list[TestTarget]) -> list[TestTarget]:
"""Return the given list of test targets, filtered to include only those relevant for the test."""
return [target for target in targets if self.get_plugin_type(target) is not None]

@ -57,7 +57,7 @@ class YamllintTest(SanitySingleVersion):
"""True if the test requires PyYAML to have libyaml support."""
return True
def filter_targets(self, targets: t.List[TestTarget]) -> t.List[TestTarget]:
def filter_targets(self, targets: list[TestTarget]) -> list[TestTarget]:
"""Return the given list of test targets, filtered to include only those relevant for the test."""
yaml_targets = [target for target in targets if os.path.splitext(target.path)[1] in ('.yml', '.yaml')]
@ -86,7 +86,7 @@ class YamllintTest(SanitySingleVersion):
return SanitySuccess(self.name)
@staticmethod
def test_paths(args: SanityConfig, paths: t.List[str], python: PythonConfig) -> t.List[SanityMessage]:
def test_paths(args: SanityConfig, paths: list[str], python: PythonConfig) -> list[SanityMessage]:
"""Test the specified paths using the given Python and return the results."""
cmd = [
python.path,

@ -90,7 +90,7 @@ def command_shell(args: ShellConfig) -> None:
return
if isinstance(con, SshConnection) and args.raw:
cmd: t.List[str] = []
cmd: list[str] = []
elif isinstance(target_profile, PosixProfile):
cmd = []

@ -128,8 +128,8 @@ def command_units(args: UnitsConfig) -> None:
if not paths:
raise AllTargetsSkipped()
targets = t.cast(t.List[PosixConfig], args.targets)
target_versions: t.Dict[str, PosixConfig] = {target.python.version: target for target in targets}
targets = t.cast(list[PosixConfig], args.targets)
target_versions: dict[str, PosixConfig] = {target.python.version: target for target in targets}
skipped_versions = args.host_settings.skipped_python_versions
warn_versions = []
@ -221,7 +221,7 @@ def command_units(args: UnitsConfig) -> None:
display.warning("Skipping unit tests on Python %s because it could not be found." % version)
continue
target_profiles: t.Dict[str, PosixProfile] = {profile.config.python.version: profile for profile in host_state.targets(PosixProfile)}
target_profiles: dict[str, PosixProfile] = {profile.config.python.version: profile for profile in host_state.targets(PosixProfile)}
target_profile = target_profiles[version]
final_candidates = [(test_context, target_profile.python, paths, env) for test_context, paths, env in test_candidates]

@ -42,7 +42,7 @@ class PosixCompletionConfig(CompletionConfig, metaclass=abc.ABCMeta):
"""Base class for completion configuration of POSIX environments."""
@property
@abc.abstractmethod
def supported_pythons(self) -> t.List[str]:
def supported_pythons(self) -> list[str]:
"""Return a list of the supported Python versions."""
@abc.abstractmethod
@ -68,7 +68,7 @@ class PythonCompletionConfig(PosixCompletionConfig, metaclass=abc.ABCMeta):
python_dir: str = '/usr/bin'
@property
def supported_pythons(self) -> t.List[str]:
def supported_pythons(self) -> list[str]:
"""Return a list of the supported Python versions."""
versions = self.python.split(',') if self.python else []
versions = [version for version in versions if version in SUPPORTED_PYTHON_VERSIONS]
@ -196,7 +196,7 @@ class WindowsRemoteCompletionConfig(RemoteCompletionConfig):
TCompletionConfig = t.TypeVar('TCompletionConfig', bound=CompletionConfig)
def load_completion(name: str, completion_type: t.Type[TCompletionConfig]) -> t.Dict[str, TCompletionConfig]:
def load_completion(name: str, completion_type: t.Type[TCompletionConfig]) -> dict[str, TCompletionConfig]:
"""Load the named completion entries, returning them in dictionary form using the specified completion type."""
lines = read_lines_without_comments(os.path.join(ANSIBLE_TEST_DATA_ROOT, 'completion', '%s.txt' % name), remove_blank_lines=True)
@ -216,7 +216,7 @@ def load_completion(name: str, completion_type: t.Type[TCompletionConfig]) -> t.
return completion
def parse_completion_entry(value: str) -> t.Tuple[str, t.Dict[str, str]]:
def parse_completion_entry(value: str) -> tuple[str, dict[str, str]]:
"""Parse the given completion entry, returning the entry name and a dictionary of key/value settings."""
values = value.split()
@ -227,10 +227,10 @@ def parse_completion_entry(value: str) -> t.Tuple[str, t.Dict[str, str]]:
def filter_completion(
completion: t.Dict[str, TCompletionConfig],
completion: dict[str, TCompletionConfig],
controller_only: bool = False,
include_defaults: bool = False,
) -> t.Dict[str, TCompletionConfig]:
) -> dict[str, TCompletionConfig]:
"""Return the given completion dictionary, filtering out configs which do not support the controller if controller_only is specified."""
if controller_only:
completion = {name: config for name, config in completion.items() if isinstance(config, PosixCompletionConfig) and config.controller_supported}
@ -242,24 +242,24 @@ def filter_completion(
@cache
def docker_completion() -> t.Dict[str, DockerCompletionConfig]:
def docker_completion() -> dict[str, DockerCompletionConfig]:
"""Return docker completion entries."""
return load_completion('docker', DockerCompletionConfig)
@cache
def remote_completion() -> t.Dict[str, PosixRemoteCompletionConfig]:
def remote_completion() -> dict[str, PosixRemoteCompletionConfig]:
"""Return remote completion entries."""
return load_completion('remote', PosixRemoteCompletionConfig)
@cache
def windows_completion() -> t.Dict[str, WindowsRemoteCompletionConfig]:
def windows_completion() -> dict[str, WindowsRemoteCompletionConfig]:
"""Return windows completion entries."""
return load_completion('windows', WindowsRemoteCompletionConfig)
@cache
def network_completion() -> t.Dict[str, NetworkRemoteCompletionConfig]:
def network_completion() -> dict[str, NetworkRemoteCompletionConfig]:
"""Return network completion entries."""
return load_completion('network', NetworkRemoteCompletionConfig)

@ -109,9 +109,9 @@ class EnvironmentConfig(CommonConfig):
self.requirements: bool = args.requirements
self.delegate_args: t.List[str] = []
self.delegate_args: list[str] = []
def host_callback(files: t.List[t.Tuple[str, str]]) -> None:
def host_callback(files: list[tuple[str, str]]) -> None:
"""Add the host files to the payload file list."""
config = self
@ -138,7 +138,7 @@ class EnvironmentConfig(CommonConfig):
return self.host_settings.controller
@property
def targets(self) -> t.List[HostConfig]:
def targets(self) -> list[HostConfig]:
"""Host configuration for the targets."""
return self.host_settings.targets
@ -159,7 +159,7 @@ class EnvironmentConfig(CommonConfig):
return target
def only_targets(self, target_type: t.Type[THostConfig]) -> t.List[THostConfig]:
def only_targets(self, target_type: t.Type[THostConfig]) -> list[THostConfig]:
"""
Return a list of target host configurations.
Requires that there are one or more targets, all the specified type.
@ -169,7 +169,7 @@ class EnvironmentConfig(CommonConfig):
assert type_guard(self.targets, target_type)
return t.cast(t.List[THostConfig], self.targets)
return t.cast(list[THostConfig], self.targets)
@property
def target_type(self) -> t.Type[HostConfig]:
@ -198,9 +198,9 @@ class TestConfig(EnvironmentConfig):
self.coverage: bool = args.coverage
self.coverage_check: bool = args.coverage_check
self.include: t.List[str] = args.include or []
self.exclude: t.List[str] = args.exclude or []
self.require: t.List[str] = args.require or []
self.include: list[str] = args.include or []
self.exclude: list[str] = args.exclude or []
self.require: list[str] = args.require or []
self.changed: bool = args.changed
self.tracked: bool = args.tracked
@ -209,7 +209,7 @@ class TestConfig(EnvironmentConfig):
self.staged: bool = args.staged
self.unstaged: bool = args.unstaged
self.changed_from: str = args.changed_from
self.changed_path: t.List[str] = args.changed_path
self.changed_path: list[str] = args.changed_path
self.base_branch: str = args.base_branch
self.lint: bool = getattr(args, 'lint', False)
@ -222,7 +222,7 @@ class TestConfig(EnvironmentConfig):
if self.coverage_check:
self.coverage = True
def metadata_callback(files: t.List[t.Tuple[str, str]]) -> None:
def metadata_callback(files: list[tuple[str, str]]) -> None:
"""Add the metadata file to the payload file list."""
config = self
@ -237,7 +237,7 @@ class ShellConfig(EnvironmentConfig):
def __init__(self, args: t.Any) -> None:
super().__init__(args, 'shell')
self.cmd: t.List[str] = args.cmd
self.cmd: list[str] = args.cmd
self.raw: bool = args.raw
self.check_layout = self.delegate # allow shell to be used without a valid layout as long as no delegation is required
self.interactive = sys.stdin.isatty() and not args.cmd # delegation should only be interactive when stdin is a TTY and no command was given
@ -250,8 +250,8 @@ class SanityConfig(TestConfig):
def __init__(self, args: t.Any) -> None:
super().__init__(args, 'sanity')
self.test: t.List[str] = args.test
self.skip_test: t.List[str] = args.skip_test
self.test: list[str] = args.test
self.skip_test: list[str] = args.skip_test
self.list_tests: bool = args.list_tests
self.allow_disabled: bool = args.allow_disabled
self.enable_optional_errors: bool = args.enable_optional_errors
@ -261,7 +261,7 @@ class SanityConfig(TestConfig):
self.display_stderr = self.lint or self.list_tests
if self.keep_git:
def git_callback(files: t.List[t.Tuple[str, str]]) -> None:
def git_callback(files: list[tuple[str, str]]) -> None:
"""Add files from the content root .git directory to the payload file list."""
for dirpath, _dirnames, filenames in os.walk(os.path.join(data_context().content.root, '.git')):
paths = [os.path.join(dirpath, filename) for filename in filenames]

@ -45,14 +45,14 @@ class Connection(metaclass=abc.ABCMeta):
"""Base class for connecting to a host."""
@abc.abstractmethod
def run(self,
command: t.List[str],
command: list[str],
capture: bool,
interactive: bool = False,
data: t.Optional[str] = None,
stdin: t.Optional[t.IO[bytes]] = None,
stdout: t.Optional[t.IO[bytes]] = None,
output_stream: t.Optional[OutputStream] = None,
) -> t.Tuple[t.Optional[str], t.Optional[str]]:
) -> tuple[t.Optional[str], t.Optional[str]]:
"""Run the specified command and return the result."""
def extract_archive(self,
@ -93,14 +93,14 @@ class LocalConnection(Connection):
self.args = args
def run(self,
command: t.List[str],
command: list[str],
capture: bool,
interactive: bool = False,
data: t.Optional[str] = None,
stdin: t.Optional[t.IO[bytes]] = None,
stdout: t.Optional[t.IO[bytes]] = None,
output_stream: t.Optional[OutputStream] = None,
) -> t.Tuple[t.Optional[str], t.Optional[str]]:
) -> tuple[t.Optional[str], t.Optional[str]]:
"""Run the specified command and return the result."""
return run_command(
args=self.args,
@ -135,14 +135,14 @@ class SshConnection(Connection):
self.options.extend(['-o', f'{ssh_option}={ssh_options[ssh_option]}'])
def run(self,
command: t.List[str],
command: list[str],
capture: bool,
interactive: bool = False,
data: t.Optional[str] = None,
stdin: t.Optional[t.IO[bytes]] = None,
stdout: t.Optional[t.IO[bytes]] = None,
output_stream: t.Optional[OutputStream] = None,
) -> t.Tuple[t.Optional[str], t.Optional[str]]:
) -> tuple[t.Optional[str], t.Optional[str]]:
"""Run the specified command and return the result."""
options = list(self.options)
@ -217,14 +217,14 @@ class DockerConnection(Connection):
self.user: t.Optional[str] = user
def run(self,
command: t.List[str],
command: list[str],
capture: bool,
interactive: bool = False,
data: t.Optional[str] = None,
stdin: t.Optional[t.IO[bytes]] = None,
stdout: t.Optional[t.IO[bytes]] = None,
output_stream: t.Optional[OutputStream] = None,
) -> t.Tuple[t.Optional[str], t.Optional[str]]:
) -> tuple[t.Optional[str], t.Optional[str]]:
"""Run the specified command and return the result."""
options = []

@ -2,6 +2,7 @@
from __future__ import annotations
import atexit
import collections.abc as c
import contextlib
import enum
import json
@ -81,7 +82,7 @@ from .connections import (
)
# information about support containers provisioned by the current ansible-test instance
support_containers: t.Dict[str, ContainerDescriptor] = {}
support_containers: dict[str, ContainerDescriptor] = {}
support_containers_mutex = threading.Lock()
@ -104,14 +105,14 @@ def run_support_container(
context: str,
image: str,
name: str,
ports: t.List[int],
aliases: t.Optional[t.List[str]] = None,
ports: list[int],
aliases: t.Optional[list[str]] = None,
start: bool = True,
allow_existing: bool = False,
cleanup: t.Optional[CleanupMode] = None,
cmd: t.Optional[t.List[str]] = None,
env: t.Optional[t.Dict[str, str]] = None,
options: t.Optional[t.List[str]] = None,
cmd: t.Optional[list[str]] = None,
env: t.Optional[dict[str, str]] = None,
options: t.Optional[list[str]] = None,
publish_ports: bool = True,
) -> t.Optional[ContainerDescriptor]:
"""
@ -243,7 +244,7 @@ def get_container_database(args: EnvironmentConfig) -> ContainerDatabase:
class ContainerAccess:
"""Information needed for one test host to access a single container supporting tests."""
def __init__(self, host_ip: str, names: t.List[str], ports: t.Optional[t.List[int]], forwards: t.Optional[t.Dict[int, int]]) -> None:
def __init__(self, host_ip: str, names: list[str], ports: t.Optional[list[int]], forwards: t.Optional[dict[int, int]]) -> None:
# if forwards is set
# this is where forwards are sent (it is the host that provides an indirect connection to the containers on alternate ports)
# /etc/hosts uses 127.0.0.1 (since port redirection will be used)
@ -260,7 +261,7 @@ class ContainerAccess:
# port redirections to create through host_ip -- if not set, no port redirections will be used
self.forwards = forwards
def port_map(self) -> t.List[t.Tuple[int, int]]:
def port_map(self) -> list[tuple[int, int]]:
"""Return a port map for accessing this container."""
if self.forwards:
ports = list(self.forwards.items())
@ -270,7 +271,7 @@ class ContainerAccess:
return ports
@staticmethod
def from_dict(data: t.Dict[str, t.Any]) -> ContainerAccess:
def from_dict(data: dict[str, t.Any]) -> ContainerAccess:
"""Return a ContainerAccess instance from the given dict."""
forwards = data.get('forwards')
@ -284,9 +285,9 @@ class ContainerAccess:
forwards=forwards,
)
def to_dict(self) -> t.Dict[str, t.Any]:
def to_dict(self) -> dict[str, t.Any]:
"""Return a dict of the current instance."""
value: t.Dict[str, t.Any] = dict(
value: dict[str, t.Any] = dict(
host_ip=self.host_ip,
names=self.names,
)
@ -302,11 +303,11 @@ class ContainerAccess:
class ContainerDatabase:
"""Database of running containers used to support tests."""
def __init__(self, data: t.Dict[str, t.Dict[str, t.Dict[str, ContainerAccess]]]) -> None:
def __init__(self, data: dict[str, dict[str, dict[str, ContainerAccess]]]) -> None:
self.data = data
@staticmethod
def from_dict(data: t.Dict[str, t.Any]) -> ContainerDatabase:
def from_dict(data: dict[str, t.Any]) -> ContainerDatabase:
"""Return a ContainerDatabase instance from the given dict."""
return ContainerDatabase(dict((access_name,
dict((context_name,
@ -315,7 +316,7 @@ class ContainerDatabase:
for context_name, containers in contexts.items()))
for access_name, contexts in data.items()))
def to_dict(self) -> t.Dict[str, t.Any]:
def to_dict(self) -> dict[str, t.Any]:
"""Return a dict of the current instance."""
return dict((access_name,
dict((context_name,
@ -344,9 +345,9 @@ def root_ssh(ssh: SshConnection) -> SshConnectionDetail:
def create_container_database(args: EnvironmentConfig) -> ContainerDatabase:
"""Create and return a container database with information necessary for all test hosts to make use of relevant support containers."""
origin: t.Dict[str, t.Dict[str, ContainerAccess]] = {}
control: t.Dict[str, t.Dict[str, ContainerAccess]] = {}
managed: t.Dict[str, t.Dict[str, ContainerAccess]] = {}
origin: dict[str, dict[str, ContainerAccess]] = {}
control: dict[str, dict[str, ContainerAccess]] = {}
managed: dict[str, dict[str, ContainerAccess]] = {}
for name, container in support_containers.items():
if container.details.published_ports:
@ -461,7 +462,7 @@ class SupportContainerContext:
def support_container_context(
args: EnvironmentConfig,
ssh: t.Optional[SshConnectionDetail],
) -> t.Iterator[t.Optional[ContainerDatabase]]:
) -> c.Iterator[t.Optional[ContainerDatabase]]:
"""Create a context manager for integration tests that use support containers."""
if not isinstance(args, (IntegrationConfig, UnitsConfig, SanityConfig, ShellConfig)):
yield None # containers are only needed for commands that have targets (hosts or pythons)
@ -492,7 +493,7 @@ def create_support_container_context(
revised = ContainerDatabase(containers.data.copy())
source = revised.data.pop(HostType.origin, None)
container_map: t.Dict[t.Tuple[str, int], t.Tuple[str, str, int]] = {}
container_map: dict[tuple[str, int], tuple[str, str, int]] = {}
if host_type not in revised.data:
if not source:
@ -518,7 +519,7 @@ def create_support_container_context(
try:
port_forwards = process.collect_port_forwards()
contexts: t.Dict[str, t.Dict[str, ContainerAccess]] = {}
contexts: dict[str, dict[str, ContainerAccess]] = {}
for forward, forwarded_port in port_forwards.items():
access_host, access_port = forward
@ -548,13 +549,13 @@ class ContainerDescriptor:
context: str,
name: str,
container_id: str,
ports: t.List[int],
aliases: t.List[str],
ports: list[int],
aliases: list[str],
publish_ports: bool,
running: bool,
existing: bool,
cleanup: CleanupMode,
env: t.Optional[t.Dict[str, str]],
env: t.Optional[dict[str, str]],
) -> None:
self.image = image
self.context = context
@ -625,7 +626,7 @@ class SupportContainer:
def __init__(self,
container: DockerInspect,
container_ip: str,
published_ports: t.Dict[int, int],
published_ports: dict[int, int],
) -> None:
self.container = container
self.container_ip = container_ip
@ -637,7 +638,7 @@ def wait_for_file(args: EnvironmentConfig,
path: str,
sleep: int,
tries: int,
check: t.Optional[t.Callable[[str], bool]] = None,
check: t.Optional[c.Callable[[str], bool]] = None,
) -> str:
"""Wait for the specified file to become available in the requested container and return its contents."""
display.info('Waiting for container "%s" to provide file: %s' % (container_name, path))
@ -666,7 +667,7 @@ def cleanup_containers(args: EnvironmentConfig) -> None:
display.notice('Remember to run `docker rm -f %s` when finished testing.' % container.name)
def create_hosts_entries(context: t.Dict[str, ContainerAccess]) -> t.List[str]:
def create_hosts_entries(context: dict[str, ContainerAccess]) -> list[str]:
"""Return hosts entries for the specified context."""
entries = []
unique_id = uuid.uuid4()
@ -685,9 +686,9 @@ def create_hosts_entries(context: t.Dict[str, ContainerAccess]) -> t.List[str]:
def create_container_hooks(
args: IntegrationConfig,
control_connections: t.List[SshConnectionDetail],
managed_connections: t.Optional[t.List[SshConnectionDetail]],
) -> t.Tuple[t.Optional[t.Callable[[IntegrationTarget], None]], t.Optional[t.Callable[[IntegrationTarget], None]]]:
control_connections: list[SshConnectionDetail],
managed_connections: t.Optional[list[SshConnectionDetail]],
) -> tuple[t.Optional[c.Callable[[IntegrationTarget], None]], t.Optional[c.Callable[[IntegrationTarget], None]]]:
"""Return pre and post target callbacks for enabling and disabling container access for each test target."""
containers = get_container_database(args)
@ -706,8 +707,8 @@ def create_container_hooks(
else:
managed_type = 'posix'
control_state: t.Dict[str, t.Tuple[t.List[str], t.List[SshProcess]]] = {}
managed_state: t.Dict[str, t.Tuple[t.List[str], t.List[SshProcess]]] = {}
control_state: dict[str, tuple[list[str], list[SshProcess]]] = {}
managed_state: dict[str, tuple[list[str], list[SshProcess]]] = {}
def pre_target(target):
"""Configure hosts for SSH port forwarding required by the specified target."""
@ -724,9 +725,9 @@ def create_container_hooks(
return pre_target, post_target
def create_managed_contexts(control_contexts: t.Dict[str, t.Dict[str, ContainerAccess]]) -> t.Dict[str, t.Dict[str, ContainerAccess]]:
def create_managed_contexts(control_contexts: dict[str, dict[str, ContainerAccess]]) -> dict[str, dict[str, ContainerAccess]]:
"""Create managed contexts from the given control contexts."""
managed_contexts: t.Dict[str, t.Dict[str, ContainerAccess]] = {}
managed_contexts: dict[str, dict[str, ContainerAccess]] = {}
for context_name, control_context in control_contexts.items():
managed_context = managed_contexts[context_name] = {}
@ -739,12 +740,12 @@ def create_managed_contexts(control_contexts: t.Dict[str, t.Dict[str, ContainerA
def forward_ssh_ports(
args: IntegrationConfig,
ssh_connections: t.Optional[t.List[SshConnectionDetail]],
ssh_connections: t.Optional[list[SshConnectionDetail]],
playbook: str,
target_state: t.Dict[str, t.Tuple[t.List[str], t.List[SshProcess]]],
target_state: dict[str, tuple[list[str], list[SshProcess]]],
target: IntegrationTarget,
host_type: str,
contexts: t.Dict[str, t.Dict[str, ContainerAccess]],
contexts: dict[str, dict[str, ContainerAccess]],
) -> None:
"""Configure port forwarding using SSH and write hosts file entries."""
if ssh_connections is None:
@ -768,7 +769,7 @@ def forward_ssh_ports(
raise Exception('The %s host was not pre-configured for container access and SSH forwarding is not available.' % host_type)
redirects: t.List[t.Tuple[int, str, int]] = []
redirects: list[tuple[int, str, int]] = []
messages = []
for container_name, container in test_context.items():
@ -796,7 +797,7 @@ def forward_ssh_ports(
with named_temporary_file(args, 'ssh-inventory-', '.json', None, inventory) as inventory_path: # type: str
run_playbook(args, inventory_path, playbook, capture=False, variables=dict(hosts_entries=hosts_entries))
ssh_processes: t.List[SshProcess] = []
ssh_processes: list[SshProcess] = []
if redirects:
for ssh in ssh_connections:
@ -810,9 +811,9 @@ def forward_ssh_ports(
def cleanup_ssh_ports(
args: IntegrationConfig,
ssh_connections: t.List[SshConnectionDetail],
ssh_connections: list[SshConnectionDetail],
playbook: str,
target_state: t.Dict[str, t.Tuple[t.List[str], t.List[SshProcess]]],
target_state: dict[str, tuple[list[str], list[SshProcess]]],
target: IntegrationTarget,
host_type: str,
) -> None:

@ -54,7 +54,7 @@ from .data import (
class Resource(metaclass=abc.ABCMeta):
"""Base class for Ansible Core CI resources."""
@abc.abstractmethod
def as_tuple(self) -> t.Tuple[str, str, str, str]:
def as_tuple(self) -> tuple[str, str, str, str]:
"""Return the resource as a tuple of platform, version, architecture and provider."""
@abc.abstractmethod
@ -76,7 +76,7 @@ class VmResource(Resource):
provider: str
tag: str
def as_tuple(self) -> t.Tuple[str, str, str, str]:
def as_tuple(self) -> tuple[str, str, str, str]:
"""Return the resource as a tuple of platform, version, architecture and provider."""
return self.platform, self.version, self.architecture, self.provider
@ -95,7 +95,7 @@ class CloudResource(Resource):
"""Details needed to request cloud credentials from Ansible Core CI."""
platform: str
def as_tuple(self) -> t.Tuple[str, str, str, str]:
def as_tuple(self) -> tuple[str, str, str, str]:
"""Return the resource as a tuple of platform, version, architecture and provider."""
return self.platform, '', '', self.platform
@ -206,7 +206,7 @@ class AnsibleCoreCI:
raise self._create_http_error(response)
def get(self, tries: int = 3, sleep: int = 15, always_raise_on: t.Optional[t.List[int]] = None) -> t.Optional[InstanceConnection]:
def get(self, tries: int = 3, sleep: int = 15, always_raise_on: t.Optional[list[int]] = None) -> t.Optional[InstanceConnection]:
"""Get instance connection information."""
if not self.started:
display.info(f'Skipping invalid {self.label} instance.', verbosity=1)
@ -320,7 +320,7 @@ class AnsibleCoreCI:
return response.json()
def _start_endpoint(self, data: t.Dict[str, t.Any], headers: t.Dict[str, str]) -> HttpResponse:
def _start_endpoint(self, data: dict[str, t.Any], headers: dict[str, str]) -> HttpResponse:
tries = self.retries
sleep = 15
@ -368,7 +368,7 @@ class AnsibleCoreCI:
return self.load(config)
def load(self, config: t.Dict[str, str]) -> bool:
def load(self, config: dict[str, str]) -> bool:
"""Load the instance from the provided dictionary."""
self.instance_id = str(config['instance_id'])
self.endpoint = config['endpoint']
@ -387,7 +387,7 @@ class AnsibleCoreCI:
write_json_file(self.path, config, create_directories=True)
def save(self) -> t.Dict[str, str]:
def save(self) -> dict[str, str]:
"""Save instance details and return as a dictionary."""
return dict(
label=self.resource.get_label(),
@ -446,7 +446,7 @@ class SshKey:
key, pub = key_pair
key_dst, pub_dst = self.get_in_tree_key_pair_paths()
def ssh_key_callback(files: t.List[t.Tuple[str, str]]) -> None:
def ssh_key_callback(files: list[tuple[str, str]]) -> None:
"""
Add the SSH keys to the payload file list.
They are either outside the source tree or in the cache dir which is ignored by default.
@ -474,7 +474,7 @@ class SshKey:
return key
def get_in_tree_key_pair_paths(self) -> t.Optional[t.Tuple[str, str]]:
def get_in_tree_key_pair_paths(self) -> t.Optional[tuple[str, str]]:
"""Return the ansible-test SSH key pair paths from the content tree."""
temp_dir = ResultType.TMP.path
@ -483,7 +483,7 @@ class SshKey:
return key, pub
def get_source_key_pair_paths(self) -> t.Optional[t.Tuple[str, str]]:
def get_source_key_pair_paths(self) -> t.Optional[tuple[str, str]]:
"""Return the ansible-test SSH key pair paths for the current user."""
base_dir = os.path.expanduser('~/.ansible/test/')
@ -492,7 +492,7 @@ class SshKey:
return key, pub
def get_key_pair(self) -> t.Optional[t.Tuple[str, str]]:
def get_key_pair(self) -> t.Optional[tuple[str, str]]:
"""Return the ansible-test SSH key pair paths if present, otherwise return None."""
key, pub = self.get_in_tree_key_pair_paths()
@ -506,7 +506,7 @@ class SshKey:
return None
def generate_key_pair(self, args: EnvironmentConfig) -> t.Tuple[str, str]:
def generate_key_pair(self, args: EnvironmentConfig) -> tuple[str, str]:
"""Generate an SSH key pair for use by all ansible-test invocations for the current user."""
key, pub = self.get_source_key_pair_paths()
@ -536,7 +536,7 @@ class InstanceConnection:
port: t.Optional[int] = None,
username: t.Optional[str] = None,
password: t.Optional[str] = None,
response_json: t.Optional[t.Dict[str, t.Any]] = None,
response_json: t.Optional[dict[str, t.Any]] = None,
) -> None:
self.running = running
self.hostname = hostname

@ -141,13 +141,13 @@ def get_sqlite_schema_version(path: str) -> int:
def cover_python(
args: TestConfig,
python: PythonConfig,
cmd: t.List[str],
cmd: list[str],
target_name: str,
env: t.Dict[str, str],
env: dict[str, str],
capture: bool,
data: t.Optional[str] = None,
cwd: t.Optional[str] = None,
) -> t.Tuple[t.Optional[str], t.Optional[str]]:
) -> tuple[t.Optional[str], t.Optional[str]]:
"""Run a command while collecting Python code coverage."""
if args.coverage:
env.update(get_coverage_environment(args, target_name, python.version))
@ -175,7 +175,7 @@ def get_coverage_environment(
args: TestConfig,
target_name: str,
version: str,
) -> t.Dict[str, str]:
) -> dict[str, str]:
"""Return environment variables needed to collect code coverage."""
# unit tests, sanity tests and other special cases (localhost only)
# config is in a temporary directory

@ -1,6 +1,7 @@
"""Context information for the current invocation of ansible-test."""
from __future__ import annotations
import collections.abc as c
import dataclasses
import os
import typing as t
@ -60,9 +61,9 @@ class DataContext:
self.__layout_providers = layout_providers
self.__source_providers = source_providers
self.__ansible_source: t.Optional[t.Tuple[t.Tuple[str, str], ...]] = None
self.__ansible_source: t.Optional[tuple[tuple[str, str], ...]] = None
self.payload_callbacks: t.List[t.Callable[[t.List[t.Tuple[str, str]]], None]] = []
self.payload_callbacks: list[c.Callable[[list[tuple[str, str]]], None]] = []
if content_path:
content = self.__create_content_layout(layout_providers, source_providers, content_path, False)
@ -73,7 +74,7 @@ class DataContext:
self.content: ContentLayout = content
def create_collection_layouts(self) -> t.List[ContentLayout]:
def create_collection_layouts(self) -> list[ContentLayout]:
"""
Return a list of collection layouts, one for each collection in the same collection root as the current collection layout.
An empty list is returned if the current content layout is not a collection layout.
@ -112,8 +113,8 @@ class DataContext:
return collections
@staticmethod
def __create_content_layout(layout_providers: t.List[t.Type[LayoutProvider]],
source_providers: t.List[t.Type[SourceProvider]],
def __create_content_layout(layout_providers: list[t.Type[LayoutProvider]],
source_providers: list[t.Type[SourceProvider]],
root: str,
walk: bool,
) -> ContentLayout:
@ -165,14 +166,14 @@ class DataContext:
return tuple((os.path.join(source_provider.root, path), path) for path in source_provider.get_paths(source_provider.root))
@property
def ansible_source(self) -> t.Tuple[t.Tuple[str, str], ...]:
def ansible_source(self) -> tuple[tuple[str, str], ...]:
"""Return a tuple of Ansible source files with both absolute and relative paths."""
if not self.__ansible_source:
self.__ansible_source = self.__create_ansible_source()
return self.__ansible_source
def register_payload_callback(self, callback: t.Callable[[t.List[t.Tuple[str, str]]], None]) -> None:
def register_payload_callback(self, callback: c.Callable[[list[tuple[str, str]]], None]) -> None:
"""Register the given payload callback."""
self.payload_callbacks.append(callback)
@ -240,7 +241,7 @@ class PluginInfo:
"""Information about an Ansible plugin."""
plugin_type: str
name: str
paths: t.List[str]
paths: list[str]
@cache
@ -249,7 +250,7 @@ def content_plugins():
Analyze content.
The primary purpose of this analysis is to facilitate mapping of integration tests to the plugin(s) they are intended to test.
"""
plugins: t.Dict[str, t.Dict[str, PluginInfo]] = {}
plugins: dict[str, dict[str, PluginInfo]] = {}
for plugin_type, plugin_directory in data_context().content.plugin_paths.items():
plugin_paths = sorted(data_context().content.walk_files(plugin_directory))

@ -1,6 +1,7 @@
"""Delegate test execution to another environment."""
from __future__ import annotations
import collections.abc as c
import contextlib
import json
import os
@ -78,7 +79,7 @@ from .content_config import (
@contextlib.contextmanager
def delegation_context(args: EnvironmentConfig, host_state: HostState) -> t.Iterator[None]:
def delegation_context(args: EnvironmentConfig, host_state: HostState) -> c.Iterator[None]:
"""Context manager for serialized host state during delegation."""
make_dirs(ResultType.TMP.path)
@ -99,7 +100,7 @@ def delegation_context(args: EnvironmentConfig, host_state: HostState) -> t.Iter
args.host_path = None
def delegate(args: CommonConfig, host_state: HostState, exclude: t.List[str], require: t.List[str]) -> None:
def delegate(args: CommonConfig, host_state: HostState, exclude: list[str], require: list[str]) -> None:
"""Delegate execution of ansible-test to another environment."""
assert isinstance(args, EnvironmentConfig)
@ -121,7 +122,7 @@ def delegate(args: CommonConfig, host_state: HostState, exclude: t.List[str], re
delegate_command(args, host_state, exclude, require)
def delegate_command(args: EnvironmentConfig, host_state: HostState, exclude: t.List[str], require: t.List[str]) -> None:
def delegate_command(args: EnvironmentConfig, host_state: HostState, exclude: list[str], require: list[str]) -> None:
"""Delegate execution based on the provided host state."""
con = host_state.controller_profile.get_origin_controller_connection()
working_directory = host_state.controller_profile.get_working_directory()
@ -258,9 +259,9 @@ def generate_command(
python: PythonConfig,
ansible_bin_path: str,
content_root: str,
exclude: t.List[str],
require: t.List[str],
) -> t.List[str]:
exclude: list[str],
require: list[str],
) -> list[str]:
"""Generate the command necessary to delegate ansible-test."""
cmd = [os.path.join(ansible_bin_path, 'ansible-test')]
cmd = [python.path] + cmd
@ -307,10 +308,10 @@ def generate_command(
def filter_options(
args: EnvironmentConfig,
argv: t.List[str],
exclude: t.List[str],
require: t.List[str],
) -> t.Iterable[str]:
argv: list[str],
exclude: list[str],
require: list[str],
) -> c.Iterable[str]:
"""Return an iterable that filters out unwanted CLI options and injects new ones as requested."""
replace: list[tuple[str, int, t.Optional[t.Union[bool, str, list[str]]]]] = [
('--docker-no-pull', 0, False),

@ -11,7 +11,7 @@ from .util import (
)
def parse_diff(lines: t.List[str]) -> t.List[FileDiff]:
def parse_diff(lines: list[str]) -> list[FileDiff]:
"""Parse the given diff lines and return a list of FileDiff objects representing the changes of each file."""
return DiffParser(lines).files
@ -21,7 +21,7 @@ class FileDiff:
def __init__(self, old_path: str, new_path: str) -> None:
self.old = DiffSide(old_path, new=False)
self.new = DiffSide(new_path, new=True)
self.headers: t.List[str] = []
self.headers: list[str] = []
self.binary = False
def append_header(self, line: str) -> None:
@ -43,9 +43,9 @@ class DiffSide:
self.eof_newline = True
self.exists = True
self.lines: t.List[t.Tuple[int, str]] = []
self.lines_and_context: t.List[t.Tuple[int, str]] = []
self.ranges: t.List[t.Tuple[int, int]] = []
self.lines: list[tuple[int, str]] = []
self.lines_and_context: list[tuple[int, str]] = []
self.ranges: list[tuple[int, int]] = []
self._next_line_number = 0
self._lines_remaining = 0
@ -97,7 +97,7 @@ class DiffSide:
"""True if the diff is complete, otherwise False."""
return self._lines_remaining == 0
def format_lines(self, context: bool = True) -> t.List[str]:
def format_lines(self, context: bool = True) -> list[str]:
"""Format the diff and return a list of lines, optionally including context."""
if context:
lines = self.lines_and_context
@ -109,9 +109,9 @@ class DiffSide:
class DiffParser:
"""Parse diff lines."""
def __init__(self, lines: t.List[str]) -> None:
def __init__(self, lines: list[str]) -> None:
self.lines = lines
self.files: t.List[FileDiff] = []
self.files: list[FileDiff] = []
self.action = self.process_start
self.line_number = 0

@ -287,8 +287,8 @@ def docker_cp_to(args: EnvironmentConfig, container_id: str, src: str, dst: str)
def docker_run(
args: EnvironmentConfig,
image: str,
options: t.Optional[t.List[str]],
cmd: t.Optional[t.List[str]] = None,
options: t.Optional[list[str]],
cmd: t.Optional[list[str]] = None,
create_only: bool = False,
) -> str:
"""Run a container using the given docker image."""
@ -327,7 +327,7 @@ def docker_run(
raise ApplicationError('Failed to run docker image "%s".' % image)
def docker_start(args: EnvironmentConfig, container_id: str, options: t.Optional[t.List[str]] = None) -> t.Tuple[t.Optional[str], t.Optional[str]]:
def docker_start(args: EnvironmentConfig, container_id: str, options: t.Optional[list[str]] = None) -> tuple[t.Optional[str], t.Optional[str]]:
"""
Start a docker container by name or ID
"""
@ -370,7 +370,7 @@ class ContainerNotFoundError(DockerError):
class DockerInspect:
"""The results of `docker inspect` for a single container."""
def __init__(self, args: EnvironmentConfig, inspection: t.Dict[str, t.Any]) -> None:
def __init__(self, args: EnvironmentConfig, inspection: dict[str, t.Any]) -> None:
self.args = args
self.inspection = inspection
@ -382,29 +382,29 @@ class DockerInspect:
return self.inspection['Id']
@property
def network_settings(self) -> t.Dict[str, t.Any]:
def network_settings(self) -> dict[str, t.Any]:
"""Return a dictionary of the container network settings."""
return self.inspection['NetworkSettings']
@property
def state(self) -> t.Dict[str, t.Any]:
def state(self) -> dict[str, t.Any]:
"""Return a dictionary of the container state."""
return self.inspection['State']
@property
def config(self) -> t.Dict[str, t.Any]:
def config(self) -> dict[str, t.Any]:
"""Return a dictionary of the container configuration."""
return self.inspection['Config']
# nested properties
@property
def ports(self) -> t.Dict[str, t.List[t.Dict[str, str]]]:
def ports(self) -> dict[str, list[dict[str, str]]]:
"""Return a dictionary of ports the container has published."""
return self.network_settings['Ports']
@property
def networks(self) -> t.Optional[t.Dict[str, t.Dict[str, t.Any]]]:
def networks(self) -> t.Optional[dict[str, dict[str, t.Any]]]:
"""Return a dictionary of the networks the container is attached to, or None if running under podman, which does not support networks."""
return self.network_settings.get('Networks')
@ -414,7 +414,7 @@ class DockerInspect:
return self.state['Running']
@property
def env(self) -> t.List[str]:
def env(self) -> list[str]:
"""Return a list of the environment variables used to create the container."""
return self.config['Env']
@ -425,15 +425,15 @@ class DockerInspect:
# functions
def env_dict(self) -> t.Dict[str, str]:
def env_dict(self) -> dict[str, str]:
"""Return a dictionary of the environment variables used to create the container."""
return dict((item[0], item[1]) for item in [e.split('=', 1) for e in self.env])
def get_tcp_port(self, port: int) -> t.Optional[t.List[t.Dict[str, str]]]:
def get_tcp_port(self, port: int) -> t.Optional[list[dict[str, str]]]:
"""Return a list of the endpoints published by the container for the specified TCP port, or None if it is not published."""
return self.ports.get('%d/tcp' % port)
def get_network_names(self) -> t.Optional[t.List[str]]:
def get_network_names(self) -> t.Optional[list[str]]:
"""Return a list of the network names the container is attached to."""
if self.networks is None:
return None
@ -511,15 +511,15 @@ def docker_image_exists(args: EnvironmentConfig, image: str) -> bool:
def docker_exec(
args: EnvironmentConfig,
container_id: str,
cmd: t.List[str],
cmd: list[str],
capture: bool,
options: t.Optional[t.List[str]] = None,
options: t.Optional[list[str]] = None,
stdin: t.Optional[t.IO[bytes]] = None,
stdout: t.Optional[t.IO[bytes]] = None,
interactive: bool = False,
output_stream: t.Optional[OutputStream] = None,
data: t.Optional[str] = None,
) -> t.Tuple[t.Optional[str], t.Optional[str]]:
) -> tuple[t.Optional[str], t.Optional[str]]:
"""Execute the given command in the specified container."""
if not options:
options = []
@ -531,13 +531,13 @@ def docker_exec(
output_stream=output_stream, data=data)
def docker_info(args: CommonConfig) -> t.Dict[str, t.Any]:
def docker_info(args: CommonConfig) -> dict[str, t.Any]:
"""Return a dictionary containing details from the `docker info` command."""
stdout, _dummy = docker_command(args, ['info', '--format', '{{json .}}'], capture=True, always=True)
return json.loads(stdout)
def docker_version(args: CommonConfig) -> t.Dict[str, t.Any]:
def docker_version(args: CommonConfig) -> dict[str, t.Any]:
"""Return a dictionary containing details from the `docker version` command."""
stdout, _dummy = docker_command(args, ['version', '--format', '{{json .}}'], capture=True, always=True)
return json.loads(stdout)
@ -545,7 +545,7 @@ def docker_version(args: CommonConfig) -> t.Dict[str, t.Any]:
def docker_command(
args: CommonConfig,
cmd: t.List[str],
cmd: list[str],
capture: bool,
stdin: t.Optional[t.IO[bytes]] = None,
stdout: t.Optional[t.IO[bytes]] = None,
@ -553,7 +553,7 @@ def docker_command(
output_stream: t.Optional[OutputStream] = None,
always: bool = False,
data: t.Optional[str] = None,
) -> t.Tuple[t.Optional[str], t.Optional[str]]:
) -> tuple[t.Optional[str], t.Optional[str]]:
"""Run the specified docker command."""
env = docker_environment()
command = [require_docker().command]
@ -565,7 +565,7 @@ def docker_command(
output_stream=output_stream, data=data)
def docker_environment() -> t.Dict[str, str]:
def docker_environment() -> dict[str, str]:
"""Return a dictionary of docker related environment variables found in the current environment."""
env = common_environment()
env.update(dict((key, os.environ[key]) for key in os.environ if key.startswith('DOCKER_') or key.startswith('CONTAINER_')))

@ -33,7 +33,7 @@ from .provisioning import (
)
def get_changes_filter(args: TestConfig) -> t.List[str]:
def get_changes_filter(args: TestConfig) -> list[str]:
"""Return a list of targets which should be tested based on the changes made."""
paths = detect_changes(args)
@ -57,7 +57,7 @@ def get_changes_filter(args: TestConfig) -> t.List[str]:
return args.metadata.change_description.targets
def detect_changes(args: TestConfig) -> t.Optional[t.List[str]]:
def detect_changes(args: TestConfig) -> t.Optional[list[str]]:
"""Return a list of changed paths."""
if args.changed:
paths = get_ci_provider().detect_changes(args)
@ -93,7 +93,7 @@ class NoTestsForChanges(ApplicationWarning):
class Delegate(Exception):
"""Trigger command delegation."""
def __init__(self, host_state: HostState, exclude: t.List[str] = None, require: t.List[str] = None) -> None:
def __init__(self, host_state: HostState, exclude: list[str] = None, require: list[str] = None) -> None:
super().__init__()
self.host_state = host_state
@ -103,7 +103,7 @@ class Delegate(Exception):
class ListTargets(Exception):
"""List integration test targets instead of executing them."""
def __init__(self, target_names: t.List[str]) -> None:
def __init__(self, target_names: list[str]) -> None:
super().__init__()
self.target_names = target_names

@ -16,19 +16,19 @@ class Git:
self.git = 'git'
self.root = root
def get_diff(self, args: t.List[str], git_options: t.Optional[t.List[str]] = None) -> t.List[str]:
def get_diff(self, args: list[str], git_options: t.Optional[list[str]] = None) -> list[str]:
"""Run `git diff` and return the result as a list."""
cmd = ['diff'] + args
if git_options is None:
git_options = ['-c', 'core.quotePath=']
return self.run_git_split(git_options + cmd, '\n', str_errors='replace')
def get_diff_names(self, args: t.List[str]) -> t.List[str]:
def get_diff_names(self, args: list[str]) -> list[str]:
"""Return a list of file names from the `git diff` command."""
cmd = ['diff', '--name-only', '--no-renames', '-z'] + args
return self.run_git_split(cmd, '\0')
def get_submodule_paths(self) -> t.List[str]:
def get_submodule_paths(self) -> list[str]:
"""Return a list of submodule paths recursively."""
cmd = ['submodule', 'status', '--recursive']
output = self.run_git_split(cmd, '\n')
@ -45,12 +45,12 @@ class Git:
return submodule_paths
def get_file_names(self, args: t.List[str]) -> t.List[str]:
def get_file_names(self, args: list[str]) -> list[str]:
"""Return a list of file names from the `git ls-files` command."""
cmd = ['ls-files', '-z'] + args
return self.run_git_split(cmd, '\0')
def get_branches(self) -> t.List[str]:
def get_branches(self) -> list[str]:
"""Return the list of branches."""
cmd = ['for-each-ref', 'refs/heads/', '--format', '%(refname:strip=2)']
return self.run_git_split(cmd)
@ -60,7 +60,7 @@ class Git:
cmd = ['symbolic-ref', '--short', 'HEAD']
return self.run_git(cmd).strip()
def get_rev_list(self, commits: t.Optional[t.List[str]] = None, max_count: t.Optional[int] = None) -> t.List[str]:
def get_rev_list(self, commits: t.Optional[list[str]] = None, max_count: t.Optional[int] = None) -> list[str]:
"""Return the list of results from the `git rev-list` command."""
cmd = ['rev-list']
@ -88,7 +88,7 @@ class Git:
except SubprocessError:
return False
def run_git_split(self, cmd: t.List[str], separator: t.Optional[str] = None, str_errors: str = 'strict') -> t.List[str]:
def run_git_split(self, cmd: list[str], separator: t.Optional[str] = None, str_errors: str = 'strict') -> list[str]:
"""Run the given `git` command and return the results as a list."""
output = self.run_git(cmd, str_errors=str_errors).strip(separator)
@ -97,6 +97,6 @@ class Git:
return output.split(separator)
def run_git(self, cmd: t.List[str], str_errors: str = 'strict') -> str:
def run_git(self, cmd: list[str], str_errors: str = 'strict') -> str:
"""Run the given `git` command and return the results as a string."""
return raw_command([self.git] + cmd, cwd=self.root, capture=True, str_errors=str_errors)[0]

@ -50,7 +50,7 @@ class OriginCompletionConfig(PosixCompletionConfig):
super().__init__(name='origin')
@property
def supported_pythons(self) -> t.List[str]:
def supported_pythons(self) -> list[str]:
"""Return a list of the supported Python versions."""
current_version = version_to_str(sys.version_info[:2])
versions = [version for version in SUPPORTED_PYTHON_VERSIONS if version == current_version] + \
@ -106,7 +106,7 @@ class PythonConfig(metaclass=abc.ABCMeta):
path: t.Optional[str] = None
@property
def tuple(self) -> t.Tuple[int, ...]:
def tuple(self) -> tuple[int, ...]:
"""Return the Python version as a tuple."""
return str_to_version(self.version)
@ -198,7 +198,7 @@ class PosixConfig(HostConfig, metaclass=abc.ABCMeta):
class ControllerHostConfig(PosixConfig, metaclass=abc.ABCMeta):
"""Base class for host configurations which support the controller."""
@abc.abstractmethod
def get_default_targets(self, context: HostContext) -> t.List[ControllerConfig]:
def get_default_targets(self, context: HostContext) -> list[ControllerConfig]:
"""Return the default targets for this host config."""
@ -291,7 +291,7 @@ class DockerConfig(ControllerHostConfig, PosixConfig):
placeholder=True,
)
def get_default_targets(self, context: HostContext) -> t.List[ControllerConfig]:
def get_default_targets(self, context: HostContext) -> list[ControllerConfig]:
"""Return the default targets for this host config."""
if self.name in filter_completion(docker_completion()):
defaults = self.get_defaults(context)
@ -342,7 +342,7 @@ class PosixRemoteConfig(RemoteConfig, ControllerHostConfig, PosixConfig):
placeholder=True,
)
def get_default_targets(self, context: HostContext) -> t.List[ControllerConfig]:
def get_default_targets(self, context: HostContext) -> list[ControllerConfig]:
"""Return the default targets for this host config."""
if self.name in filter_completion(remote_completion()):
defaults = self.get_defaults(context)
@ -424,7 +424,7 @@ class OriginConfig(ControllerHostConfig, PosixConfig):
"""Return the default settings."""
return OriginCompletionConfig()
def get_default_targets(self, context: HostContext) -> t.List[ControllerConfig]:
def get_default_targets(self, context: HostContext) -> list[ControllerConfig]:
"""Return the default targets for this host config."""
return [ControllerConfig(python=NativePythonConfig(version=version, path=path)) for version, path in get_available_python_versions().items()]
@ -486,9 +486,9 @@ class FallbackDetail:
class HostSettings:
"""Host settings for the controller and targets."""
controller: ControllerHostConfig
targets: t.List[HostConfig]
skipped_python_versions: t.List[str]
filtered_args: t.List[str]
targets: list[HostConfig]
skipped_python_versions: list[str]
filtered_args: list[str]
controller_fallback: t.Optional[FallbackDetail]
def serialize(self, path: str) -> None:

@ -112,11 +112,11 @@ TRemoteConfig = t.TypeVar('TRemoteConfig', bound=RemoteConfig)
@dataclasses.dataclass(frozen=True)
class Inventory:
"""Simple representation of an Ansible inventory."""
host_groups: t.Dict[str, t.Dict[str, t.Dict[str, t.Union[str, int]]]]
extra_groups: t.Optional[t.Dict[str, t.List[str]]] = None
host_groups: dict[str, dict[str, dict[str, t.Union[str, int]]]]
extra_groups: t.Optional[dict[str, list[str]]] = None
@staticmethod
def create_single_host(name: str, variables: t.Dict[str, t.Union[str, int]]) -> Inventory:
def create_single_host(name: str, variables: dict[str, t.Union[str, int]]) -> Inventory:
"""Return an inventory instance created from the given hostname and variables."""
return Inventory(host_groups=dict(all={name: variables}))
@ -161,16 +161,16 @@ class HostProfile(t.Generic[THostConfig], metaclass=abc.ABCMeta):
*,
args: EnvironmentConfig,
config: THostConfig,
targets: t.Optional[t.List[HostConfig]],
targets: t.Optional[list[HostConfig]],
) -> None:
self.args = args
self.config = config
self.controller = bool(targets)
self.targets = targets or []
self.state: t.Dict[str, t.Any] = {}
self.state: dict[str, t.Any] = {}
"""State that must be persisted across delegation."""
self.cache: t.Dict[str, t.Any] = {}
self.cache: dict[str, t.Any] = {}
"""Cache that must not be persisted across delegation."""
def provision(self) -> None:
@ -233,19 +233,19 @@ class ControllerHostProfile(PosixProfile[TControllerHostConfig], metaclass=abc.A
class SshTargetHostProfile(HostProfile[THostConfig], metaclass=abc.ABCMeta):
"""Base class for profiles offering SSH connectivity."""
@abc.abstractmethod
def get_controller_target_connections(self) -> t.List[SshConnection]:
def get_controller_target_connections(self) -> list[SshConnection]:
"""Return SSH connection(s) for accessing the host as a target from the controller."""
class RemoteProfile(SshTargetHostProfile[TRemoteConfig], metaclass=abc.ABCMeta):
"""Base class for remote instance profiles."""
@property
def core_ci_state(self) -> t.Optional[t.Dict[str, str]]:
def core_ci_state(self) -> t.Optional[dict[str, str]]:
"""The saved Ansible Core CI state."""
return self.state.get('core_ci')
@core_ci_state.setter
def core_ci_state(self, value: t.Dict[str, str]) -> None:
def core_ci_state(self, value: dict[str, str]) -> None:
"""The saved Ansible Core CI state."""
self.state['core_ci'] = value
@ -315,7 +315,7 @@ class RemoteProfile(SshTargetHostProfile[TRemoteConfig], metaclass=abc.ABCMeta):
class ControllerProfile(SshTargetHostProfile[ControllerConfig], PosixProfile[ControllerConfig]):
"""Host profile for the controller as a target."""
def get_controller_target_connections(self) -> t.List[SshConnection]:
def get_controller_target_connections(self) -> list[SshConnection]:
"""Return SSH connection(s) for accessing the host as a target from the controller."""
settings = SshConnectionDetail(
name='localhost',
@ -396,7 +396,7 @@ class DockerProfile(ControllerHostProfile[DockerConfig], SshTargetHostProfile[Do
else:
return
def get_controller_target_connections(self) -> t.List[SshConnection]:
def get_controller_target_connections(self) -> list[SshConnection]:
"""Return SSH connection(s) for accessing the host as a target from the controller."""
containers = get_container_database(self.args)
access = containers.data[HostType.control]['__test_hosts__'][self.container_name]
@ -423,7 +423,7 @@ class DockerProfile(ControllerHostProfile[DockerConfig], SshTargetHostProfile[Do
"""Return the working directory for the host."""
return '/root'
def get_docker_run_options(self) -> t.List[str]:
def get_docker_run_options(self) -> list[str]:
"""Return a list of options needed to run the container."""
options = [
'--volume', '/sys/fs/cgroup:/sys/fs/cgroup:ro',
@ -457,12 +457,12 @@ class NetworkRemoteProfile(RemoteProfile[NetworkRemoteConfig]):
"""Wait for the instance to be ready. Executed before delegation for the controller and after delegation for targets."""
self.wait_until_ready()
def get_inventory_variables(self) -> t.Dict[str, t.Optional[t.Union[str, int]]]:
def get_inventory_variables(self) -> dict[str, t.Optional[t.Union[str, int]]]:
"""Return inventory variables for accessing this host."""
core_ci = self.wait_for_instance()
connection = core_ci.connection
variables: t.Dict[str, t.Optional[t.Union[str, int]]] = dict(
variables: dict[str, t.Optional[t.Union[str, int]]] = dict(
ansible_connection=self.config.connection,
ansible_pipelining='yes',
ansible_host=connection.hostname,
@ -501,7 +501,7 @@ class NetworkRemoteProfile(RemoteProfile[NetworkRemoteConfig]):
raise ApplicationError(f'Timeout waiting for {self.config.name} instance {core_ci.instance_id}.')
def get_controller_target_connections(self) -> t.List[SshConnection]:
def get_controller_target_connections(self) -> list[SshConnection]:
"""Return SSH connection(s) for accessing the host as a target from the controller."""
core_ci = self.wait_for_instance()
@ -596,7 +596,7 @@ class PosixRemoteProfile(ControllerHostProfile[PosixRemoteConfig], RemoteProfile
raise ApplicationError(f'Timeout waiting for {self.config.name} instance {core_ci.instance_id}.')
def get_controller_target_connections(self) -> t.List[SshConnection]:
def get_controller_target_connections(self) -> list[SshConnection]:
"""Return SSH connection(s) for accessing the host as a target from the controller."""
return [self.get_ssh_connection()]
@ -635,7 +635,7 @@ class PosixRemoteProfile(ControllerHostProfile[PosixRemoteConfig], RemoteProfile
class PosixSshProfile(SshTargetHostProfile[PosixSshConfig], PosixProfile[PosixSshConfig]):
"""Host profile for a POSIX SSH instance."""
def get_controller_target_connections(self) -> t.List[SshConnection]:
def get_controller_target_connections(self) -> list[SshConnection]:
"""Return SSH connection(s) for accessing the host as a target from the controller."""
settings = SshConnectionDetail(
name='target',
@ -651,7 +651,7 @@ class PosixSshProfile(SshTargetHostProfile[PosixSshConfig], PosixProfile[PosixSs
class WindowsInventoryProfile(SshTargetHostProfile[WindowsInventoryConfig]):
"""Host profile for a Windows inventory."""
def get_controller_target_connections(self) -> t.List[SshConnection]:
def get_controller_target_connections(self) -> list[SshConnection]:
"""Return SSH connection(s) for accessing the host as a target from the controller."""
inventory = parse_inventory(self.args, self.config.path)
hosts = get_hosts(inventory, 'windows')
@ -679,12 +679,12 @@ class WindowsRemoteProfile(RemoteProfile[WindowsRemoteConfig]):
"""Wait for the instance to be ready. Executed before delegation for the controller and after delegation for targets."""
self.wait_until_ready()
def get_inventory_variables(self) -> t.Dict[str, t.Optional[t.Union[str, int]]]:
def get_inventory_variables(self) -> dict[str, t.Optional[t.Union[str, int]]]:
"""Return inventory variables for accessing this host."""
core_ci = self.wait_for_instance()
connection = core_ci.connection
variables: t.Dict[str, t.Optional[t.Union[str, int]]] = dict(
variables: dict[str, t.Optional[t.Union[str, int]]] = dict(
ansible_connection='winrm',
ansible_pipelining='yes',
ansible_winrm_server_cert_validation='ignore',
@ -732,7 +732,7 @@ class WindowsRemoteProfile(RemoteProfile[WindowsRemoteConfig]):
raise ApplicationError(f'Timeout waiting for {self.config.name} instance {core_ci.instance_id}.')
def get_controller_target_connections(self) -> t.List[SshConnection]:
def get_controller_target_connections(self) -> list[SshConnection]:
"""Return SSH connection(s) for accessing the host as a target from the controller."""
core_ci = self.wait_for_instance()
@ -749,7 +749,7 @@ class WindowsRemoteProfile(RemoteProfile[WindowsRemoteConfig]):
@cache
def get_config_profile_type_map() -> t.Dict[t.Type[HostConfig], t.Type[HostProfile]]:
def get_config_profile_type_map() -> dict[t.Type[HostConfig], t.Type[HostProfile]]:
"""Create and return a mapping of HostConfig types to HostProfile types."""
return get_type_map(HostProfile, HostConfig)

@ -39,11 +39,11 @@ class HttpClient:
"""Perform an HTTP DELETE and return the response."""
return self.request('DELETE', url)
def put(self, url: str, data: t.Optional[str] = None, headers: t.Optional[t.Dict[str, str]] = None) -> HttpResponse:
def put(self, url: str, data: t.Optional[str] = None, headers: t.Optional[dict[str, str]] = None) -> HttpResponse:
"""Perform an HTTP PUT and return the response."""
return self.request('PUT', url, data, headers)
def request(self, method: str, url: str, data: t.Optional[str] = None, headers: t.Optional[t.Dict[str, str]] = None) -> HttpResponse:
def request(self, method: str, url: str, data: t.Optional[str] = None, headers: t.Optional[dict[str, str]] = None) -> HttpResponse:
"""Perform an HTTP request and return the response."""
cmd = ['curl', '-s', '-S', '-i', '-X', method]

@ -43,7 +43,7 @@ def create_controller_inventory(args: EnvironmentConfig, path: str, controller_h
inventory.write(args, path)
def create_windows_inventory(args: EnvironmentConfig, path: str, target_hosts: t.List[HostProfile]) -> None:
def create_windows_inventory(args: EnvironmentConfig, path: str, target_hosts: list[HostProfile]) -> None:
"""Create and return inventory for use in target Windows integration tests."""
first = target_hosts[0]
@ -58,7 +58,7 @@ def create_windows_inventory(args: EnvironmentConfig, path: str, target_hosts: t
return
target_hosts = t.cast(t.List[WindowsRemoteProfile], target_hosts)
target_hosts = t.cast(list[WindowsRemoteProfile], target_hosts)
hosts = [(target_host, target_host.wait_for_instance().connection) for target_host in target_hosts]
windows_hosts = {sanitize_host_name(host.config.name): host.get_inventory_variables() for host, connection in hosts}
@ -78,7 +78,7 @@ def create_windows_inventory(args: EnvironmentConfig, path: str, target_hosts: t
inventory.write(args, path)
def create_network_inventory(args: EnvironmentConfig, path: str, target_hosts: t.List[HostProfile]) -> None:
def create_network_inventory(args: EnvironmentConfig, path: str, target_hosts: list[HostProfile]) -> None:
"""Create and return inventory for use in target network integration tests."""
first = target_hosts[0]
@ -93,8 +93,8 @@ def create_network_inventory(args: EnvironmentConfig, path: str, target_hosts: t
return
target_hosts = t.cast(t.List[NetworkRemoteProfile], target_hosts)
host_groups: t.Dict[str, t.Dict[str, t.Dict[str, t.Union[str, int]]]] = {target_host.config.platform: {} for target_host in target_hosts}
target_hosts = t.cast(list[NetworkRemoteProfile], target_hosts)
host_groups: dict[str, dict[str, dict[str, t.Union[str, int]]]] = {target_host.config.platform: {} for target_host in target_hosts}
for target_host in target_hosts:
host_groups[target_host.config.platform][sanitize_host_name(target_host.config.name)] = target_host.get_inventory_variables()
@ -112,9 +112,9 @@ def create_network_inventory(args: EnvironmentConfig, path: str, target_hosts: t
inventory.write(args, path)
def create_posix_inventory(args: EnvironmentConfig, path: str, target_hosts: t.List[HostProfile], needs_ssh: bool = False) -> None:
def create_posix_inventory(args: EnvironmentConfig, path: str, target_hosts: list[HostProfile], needs_ssh: bool = False) -> None:
"""Create and return inventory for use in POSIX integration tests."""
target_hosts = t.cast(t.List[SshTargetHostProfile], target_hosts)
target_hosts = t.cast(list[SshTargetHostProfile], target_hosts)
if len(target_hosts) != 1:
raise Exception()
@ -141,7 +141,7 @@ def create_posix_inventory(args: EnvironmentConfig, path: str, target_hosts: t.L
ssh = connections[0]
testhost: t.Dict[str, t.Optional[t.Union[str, int]]] = dict(
testhost: dict[str, t.Optional[t.Union[str, int]]] = dict(
ansible_connection='ssh',
ansible_pipelining='yes',
ansible_python_interpreter=ssh.settings.python_interpreter,

@ -25,7 +25,7 @@ class LocaleError(SystemExit):
super().__init__(f'ERROR: {message}')
def configure_locale() -> t.Tuple[str, t.Optional[str]]:
def configure_locale() -> tuple[str, t.Optional[str]]:
"""Configure the locale, returning the selected locale and an optional warning."""
if (fs_encoding := sys.getfilesystemencoding()).lower() != 'utf-8':

@ -21,15 +21,15 @@ class Metadata:
"""Metadata object for passing data to delegated tests."""
def __init__(self):
"""Initialize metadata."""
self.changes: t.Dict[str, t.Tuple[t.Tuple[int, int], ...]] = {}
self.cloud_config: t.Optional[t.Dict[str, t.Dict[str, t.Union[int, str, bool]]]] = None
self.changes: dict[str, tuple[tuple[int, int], ...]] = {}
self.cloud_config: t.Optional[dict[str, dict[str, t.Union[int, str, bool]]]] = None
self.change_description: t.Optional[ChangeDescription] = None
self.ci_provider: t.Optional[str] = None
def populate_changes(self, diff: t.Optional[t.List[str]]) -> None:
def populate_changes(self, diff: t.Optional[list[str]]) -> None:
"""Populate the changeset using the given diff."""
patches = parse_diff(diff)
patches: t.List[FileDiff] = sorted(patches, key=lambda k: k.new.path)
patches: list[FileDiff] = sorted(patches, key=lambda k: k.new.path)
self.changes = dict((patch.new.path, tuple(patch.new.ranges)) for patch in patches)
@ -45,7 +45,7 @@ class Metadata:
# failed tests involving deleted files should be using line 0 since there is no content remaining
self.changes[path] = ((0, 0),)
def to_dict(self) -> t.Dict[str, t.Any]:
def to_dict(self) -> dict[str, t.Any]:
"""Return a dictionary representation of the metadata."""
return dict(
changes=self.changes,
@ -69,7 +69,7 @@ class Metadata:
return Metadata.from_dict(data)
@staticmethod
def from_dict(data: t.Dict[str, t.Any]) -> Metadata:
def from_dict(data: dict[str, t.Any]) -> Metadata:
"""Return metadata loaded from the specified dictionary."""
metadata = Metadata()
metadata.changes = data['changes']
@ -84,23 +84,23 @@ class ChangeDescription:
"""Description of changes."""
def __init__(self):
self.command: str = ''
self.changed_paths: t.List[str] = []
self.deleted_paths: t.List[str] = []
self.regular_command_targets: t.Dict[str, t.List[str]] = {}
self.focused_command_targets: t.Dict[str, t.List[str]] = {}
self.no_integration_paths: t.List[str] = []
self.changed_paths: list[str] = []
self.deleted_paths: list[str] = []
self.regular_command_targets: dict[str, list[str]] = {}
self.focused_command_targets: dict[str, list[str]] = {}
self.no_integration_paths: list[str] = []
@property
def targets(self) -> t.Optional[t.List[str]]:
def targets(self) -> t.Optional[list[str]]:
"""Optional list of target names."""
return self.regular_command_targets.get(self.command)
@property
def focused_targets(self) -> t.Optional[t.List[str]]:
def focused_targets(self) -> t.Optional[list[str]]:
"""Optional list of focused target names."""
return self.focused_command_targets.get(self.command)
def to_dict(self) -> t.Dict[str, t.Any]:
def to_dict(self) -> dict[str, t.Any]:
"""Return a dictionary representation of the change description."""
return dict(
command=self.command,
@ -112,7 +112,7 @@ class ChangeDescription:
)
@staticmethod
def from_dict(data: t.Dict[str, t.Any]) -> ChangeDescription:
def from_dict(data: dict[str, t.Any]) -> ChangeDescription:
"""Return a change description loaded from the given dictionary."""
changes = ChangeDescription()
changes.command = data['command']

@ -69,8 +69,8 @@ def create_payload(args: CommonConfig, dst_path: str) -> None:
collection_layouts = data_context().create_collection_layouts()
content_files: t.List[t.Tuple[str, str]] = []
extra_files: t.List[t.Tuple[str, str]] = []
content_files: list[tuple[str, str]] = []
extra_files: list[tuple[str, str]] = []
for layout in collection_layouts:
if layout == data_context().content:
@ -117,7 +117,7 @@ def create_payload(args: CommonConfig, dst_path: str) -> None:
display.info('Created a %d byte payload archive containing %d files in %d seconds.' % (payload_size_bytes, len(files), duration), verbosity=1)
def create_temporary_bin_files(args: CommonConfig) -> t.Tuple[t.Tuple[str, str], ...]:
def create_temporary_bin_files(args: CommonConfig) -> tuple[tuple[str, str], ...]:
"""Create a temporary ansible bin directory populated using the symlink map."""
if args.explain:
temp_path = '/tmp/ansible-tmp-bin'

@ -11,13 +11,13 @@ from ..util import (
)
def get_path_provider_classes(provider_type: t.Type[TPathProvider]) -> t.List[t.Type[TPathProvider]]:
def get_path_provider_classes(provider_type: t.Type[TPathProvider]) -> list[t.Type[TPathProvider]]:
"""Return a list of path provider classes of the given type."""
return sorted(get_subclasses(provider_type), key=lambda c: (c.priority, c.__name__))
return sorted(get_subclasses(provider_type), key=lambda subclass: (subclass.priority, subclass.__name__))
def find_path_provider(provider_type: t.Type[TPathProvider],
provider_classes: t.List[t.Type[TPathProvider]],
provider_classes: list[t.Type[TPathProvider]],
path: str,
walk: bool,
) -> TPathProvider:

@ -19,7 +19,7 @@ class Layout:
"""Description of content locations and helper methods to access content."""
def __init__(self,
root: str,
paths: t.List[str],
paths: list[str],
) -> None:
self.root = root
@ -28,14 +28,14 @@ class Layout:
self.__paths_tree = paths_to_tree(self.__paths)
self.__files_tree = paths_to_tree(self.__files)
def all_files(self, include_symlinked_directories: bool = False) -> t.List[str]:
def all_files(self, include_symlinked_directories: bool = False) -> list[str]:
"""Return a list of all file paths."""
if include_symlinked_directories:
return self.__paths
return self.__files
def walk_files(self, directory: str, include_symlinked_directories: bool = False) -> t.List[str]:
def walk_files(self, directory: str, include_symlinked_directories: bool = False) -> list[str]:
"""Return a list of file paths found recursively under the given directory."""
if include_symlinked_directories:
tree = self.__paths_tree
@ -59,13 +59,13 @@ class Layout:
return files
def get_dirs(self, directory: str) -> t.List[str]:
def get_dirs(self, directory: str) -> list[str]:
"""Return a list directory paths found directly under the given directory."""
parts = directory.rstrip(os.path.sep).split(os.path.sep)
item = get_tree_item(self.__files_tree, parts)
return [os.path.join(directory, key) for key in item[0].keys()] if item else []
def get_files(self, directory: str) -> t.List[str]:
def get_files(self, directory: str) -> list[str]:
"""Return a list of file paths found directly under the given directory."""
parts = directory.rstrip(os.path.sep).split(os.path.sep)
item = get_tree_item(self.__files_tree, parts)
@ -76,8 +76,8 @@ class ContentLayout(Layout):
"""Information about the current Ansible content being tested."""
def __init__(self,
root: str,
paths: t.List[str],
plugin_paths: t.Dict[str, str],
paths: list[str],
plugin_paths: dict[str, str],
collection: t.Optional[CollectionDetail],
test_path: str,
results_path: str,
@ -151,9 +151,9 @@ class ContentLayout(Layout):
class LayoutMessages:
"""Messages generated during layout creation that should be deferred for later display."""
def __init__(self):
self.info: t.List[str] = []
self.warning: t.List[str] = []
self.error: t.List[str] = []
self.info: list[str] = []
self.warning: list[str] = []
self.error: list[str] = []
class CollectionDetail:
@ -200,13 +200,13 @@ class LayoutProvider(PathProvider):
)
@abc.abstractmethod
def create(self, root: str, paths: t.List[str]) -> ContentLayout:
def create(self, root: str, paths: list[str]) -> ContentLayout:
"""Create a layout using the given root and paths."""
def paths_to_tree(paths: t.List[str]) -> t.Tuple[t.Dict[str, t.Any], t.List[str]]:
def paths_to_tree(paths: list[str]) -> tuple[dict[str, t.Any], list[str]]:
"""Return a filesystem tree from the given list of paths."""
tree: t.Tuple[t.Dict[str, t.Any], t.List[str]] = {}, []
tree: tuple[dict[str, t.Any], list[str]] = {}, []
for path in paths:
parts = path.split(os.path.sep)
@ -223,7 +223,7 @@ def paths_to_tree(paths: t.List[str]) -> t.Tuple[t.Dict[str, t.Any], t.List[str]
return tree
def get_tree_item(tree: t.Tuple[t.Dict[str, t.Any], t.List[str]], parts: t.List[str]) -> t.Optional[t.Tuple[t.Dict[str, t.Any], t.List[str]]]:
def get_tree_item(tree: tuple[dict[str, t.Any], list[str]], parts: list[str]) -> t.Optional[tuple[dict[str, t.Any], list[str]]]:
"""Return the portion of the tree found under the path given by parts, or None if it does not exist."""
root = tree

@ -2,7 +2,6 @@
from __future__ import annotations
import os
import typing as t
from . import (
ContentLayout,
@ -17,7 +16,7 @@ class AnsibleLayout(LayoutProvider):
"""Return True if the given path is a content root for this provider."""
return os.path.exists(os.path.join(path, 'setup.py')) and os.path.exists(os.path.join(path, 'bin/ansible-test'))
def create(self, root: str, paths: t.List[str]) -> ContentLayout:
def create(self, root: str, paths: list[str]) -> ContentLayout:
"""Create a Layout using the given root and paths."""
plugin_paths = dict((p, os.path.join('lib/ansible/plugins', p)) for p in self.PLUGIN_TYPES)

@ -2,7 +2,6 @@
from __future__ import annotations
import os
import typing as t
from . import (
ContentLayout,
@ -26,7 +25,7 @@ class CollectionLayout(LayoutProvider):
return False
def create(self, root: str, paths: t.List[str]) -> ContentLayout:
def create(self, root: str, paths: list[str]) -> ContentLayout:
"""Create a Layout using the given root and paths."""
plugin_paths = dict((p, os.path.join('plugins', p)) for p in self.PLUGIN_TYPES)
@ -77,7 +76,7 @@ class CollectionLayout(LayoutProvider):
)
@staticmethod
def __check_test_path(paths: t.List[str], messages: LayoutMessages) -> None:
def __check_test_path(paths: list[str], messages: LayoutMessages) -> None:
modern_test_path = 'tests/'
modern_test_path_found = any(path.startswith(modern_test_path) for path in paths)
legacy_test_path = 'test/'
@ -89,7 +88,7 @@ class CollectionLayout(LayoutProvider):
messages.warning.append('Ignoring tests in "%s" that should be in "%s".' % (legacy_test_path, modern_test_path))
@staticmethod
def __check_integration_path(paths: t.List[str], messages: LayoutMessages) -> str:
def __check_integration_path(paths: list[str], messages: LayoutMessages) -> str:
modern_integration_path = 'roles/test/'
modern_integration_path_found = any(path.startswith(modern_integration_path) for path in paths)
legacy_integration_path = 'tests/integration/targets/'
@ -111,7 +110,7 @@ class CollectionLayout(LayoutProvider):
return integration_targets_path
@staticmethod
def __check_unit_path(paths: t.List[str], messages: LayoutMessages) -> None:
def __check_unit_path(paths: list[str], messages: LayoutMessages) -> None:
modern_unit_path = 'tests/unit/'
modern_unit_path_found = any(path.startswith(modern_unit_path) for path in paths)
legacy_unit_path = 'tests/units/' # test/units/ will be covered by the warnings for test/ vs tests/

@ -1,8 +1,6 @@
"""Layout provider for an unsupported directory layout."""
from __future__ import annotations
import typing as t
from . import (
ContentLayout,
LayoutProvider,
@ -18,7 +16,7 @@ class UnsupportedLayout(LayoutProvider):
"""Return True if the given path is a content root for this provider."""
return False
def create(self, root: str, paths: t.List[str]) -> ContentLayout:
def create(self, root: str, paths: list[str]) -> ContentLayout:
"""Create a Layout using the given root and paths."""
plugin_paths = dict((p, p) for p in self.PLUGIN_TYPES)

@ -2,7 +2,6 @@
from __future__ import annotations
import abc
import typing as t
from .. import (
PathProvider,
@ -12,5 +11,5 @@ from .. import (
class SourceProvider(PathProvider):
"""Base class for source providers."""
@abc.abstractmethod
def get_paths(self, path: str) -> t.List[str]:
def get_paths(self, path: str) -> list[str]:
"""Return the list of available content paths under the given path."""

@ -2,7 +2,6 @@
from __future__ import annotations
import os
import typing as t
from ...git import (
Git,
@ -28,7 +27,7 @@ class GitSource(SourceProvider):
"""Return True if the given path is a content root for this provider."""
return os.path.exists(os.path.join(path, '.git'))
def get_paths(self, path: str) -> t.List[str]:
def get_paths(self, path: str) -> list[str]:
"""Return the list of available content paths under the given path."""
paths = self.__get_paths(path)
@ -57,7 +56,7 @@ class GitSource(SourceProvider):
return paths
@staticmethod
def __get_paths(path: str) -> t.List[str]:
def __get_paths(path: str) -> list[str]:
"""Return the list of available content paths under the given path."""
git = Git(path)
paths = git.get_file_names(['--cached', '--others', '--exclude-standard'])

@ -2,7 +2,6 @@
from __future__ import annotations
import os
import typing as t
from . import (
SourceProvider,
@ -18,7 +17,7 @@ class InstalledSource(SourceProvider):
"""Return True if the given path is a content root for this provider."""
return False
def get_paths(self, path: str) -> t.List[str]:
def get_paths(self, path: str) -> list[str]:
"""Return the list of available content paths under the given path."""
paths = []

@ -1,8 +1,6 @@
"""Source provider to use when the layout is unsupported."""
from __future__ import annotations
import typing as t
from . import (
SourceProvider,
)
@ -17,6 +15,6 @@ class UnsupportedSource(SourceProvider):
"""Return True if the given path is a content root for this provider."""
return False
def get_paths(self, path: str) -> t.List[str]:
def get_paths(self, path: str) -> list[str]:
"""Return the list of available content paths under the given path."""
return []

@ -2,7 +2,6 @@
from __future__ import annotations
import os
import typing as t
from ...constants import (
TIMEOUT_PATH,
@ -26,7 +25,7 @@ class UnversionedSource(SourceProvider):
"""Return True if the given path is a content root for this provider."""
return False
def get_paths(self, path: str) -> t.List[str]:
def get_paths(self, path: str) -> list[str]:
"""Return the list of available content paths under the given path."""
paths = []

@ -2,6 +2,7 @@
from __future__ import annotations
import atexit
import collections.abc as c
import dataclasses
import functools
import itertools
@ -54,10 +55,10 @@ class PrimeContainers(ApplicationError):
class HostState:
"""State of hosts and profiles to be passed to ansible-test during delegation."""
controller_profile: ControllerHostProfile
target_profiles: t.List[HostProfile]
target_profiles: list[HostProfile]
@property
def profiles(self) -> t.List[HostProfile]:
def profiles(self) -> list[HostProfile]:
"""Return all the profiles as a list."""
return [t.cast(HostProfile, self.controller_profile)] + self.target_profiles
@ -79,26 +80,26 @@ class HostState:
return host_state
def get_controller_target_connections(self) -> t.List[SshConnection]:
def get_controller_target_connections(self) -> list[SshConnection]:
"""Return SSH connection(s) for accessing all target hosts from the controller."""
return list(itertools.chain.from_iterable([target.get_controller_target_connections() for
target in self.target_profiles if isinstance(target, SshTargetHostProfile)]))
def targets(self, profile_type: t.Type[THostProfile]) -> t.List[THostProfile]:
def targets(self, profile_type: t.Type[THostProfile]) -> list[THostProfile]:
"""The list of target(s), verified to be of the specified type."""
if not self.target_profiles:
raise Exception('No target profiles found.')
assert type_guard(self.target_profiles, profile_type)
return t.cast(t.List[THostProfile], self.target_profiles)
return t.cast(list[THostProfile], self.target_profiles)
def prepare_profiles(
args: TEnvironmentConfig,
targets_use_pypi: bool = False,
skip_setup: bool = False,
requirements: t.Optional[t.Callable[[TEnvironmentConfig, HostState], None]] = None,
requirements: t.Optional[c.Callable[[TEnvironmentConfig, HostState], None]] = None,
) -> HostState:
"""
Create new profiles, or load existing ones, and return them.
@ -174,7 +175,7 @@ def cleanup_profiles(host_state: HostState) -> None:
profile.deprovision()
def dispatch_jobs(jobs: t.List[t.Tuple[HostProfile, WrappedThread]]) -> None:
def dispatch_jobs(jobs: list[tuple[HostProfile, WrappedThread]]) -> None:
"""Run the given profile job threads and wait for them to complete."""
for profile, thread in jobs:
thread.daemon = True

@ -3,7 +3,6 @@ from __future__ import annotations
import atexit
import os
import typing as t
import urllib.parse
from .io import (
@ -55,7 +54,7 @@ def run_pypi_proxy(args: EnvironmentConfig, targets_use_pypi: bool) -> None:
if args.pypi_endpoint:
return # user has overridden the proxy endpoint, there is nothing to provision
versions_needing_proxy: t.Tuple[str, ...] = tuple() # preserved for future use, no versions currently require this
versions_needing_proxy: tuple[str, ...] = tuple() # preserved for future use, no versions currently require this
posix_targets = [target for target in args.targets if isinstance(target, PosixConfig)]
need_proxy = targets_use_pypi and any(target.python.version in versions_needing_proxy for target in posix_targets)
use_proxy = args.pypi_proxy or need_proxy

@ -78,7 +78,7 @@ class PipUnavailableError(ApplicationError):
class PipCommand:
"""Base class for pip commands."""""
def serialize(self) -> t.Tuple[str, t.Dict[str, t.Any]]:
def serialize(self) -> tuple[str, dict[str, t.Any]]:
"""Return a serialized representation of this command."""
name = type(self).__name__[3:].lower()
return name, self.__dict__
@ -87,9 +87,9 @@ class PipCommand:
@dataclasses.dataclass(frozen=True)
class PipInstall(PipCommand):
"""Details required to perform a pip install."""
requirements: t.List[t.Tuple[str, str]]
constraints: t.List[t.Tuple[str, str]]
packages: t.List[str]
requirements: list[tuple[str, str]]
constraints: list[tuple[str, str]]
packages: list[str]
def has_package(self, name: str) -> bool:
"""Return True if the specified package will be installed, otherwise False."""
@ -102,7 +102,7 @@ class PipInstall(PipCommand):
@dataclasses.dataclass(frozen=True)
class PipUninstall(PipCommand):
"""Details required to perform a pip uninstall."""
packages: t.List[str]
packages: list[str]
ignore_errors: bool
@ -115,7 +115,7 @@ class PipVersion(PipCommand):
class PipBootstrap(PipCommand):
"""Details required to bootstrap pip."""
pip_version: str
packages: t.List[str]
packages: list[str]
# Entry Points
@ -182,7 +182,7 @@ def install_requirements(
check_pyyaml(python)
def collect_bootstrap(python: PythonConfig) -> t.List[PipCommand]:
def collect_bootstrap(python: PythonConfig) -> list[PipCommand]:
"""Return the details necessary to bootstrap pip into an empty virtual environment."""
infrastructure_packages = get_venv_packages(python)
pip_version = infrastructure_packages['pip']
@ -206,9 +206,9 @@ def collect_requirements(
minimize: bool,
command: t.Optional[str],
sanity: t.Optional[str],
) -> t.List[PipCommand]:
) -> list[PipCommand]:
"""Collect requirements for the given Python using the specified arguments."""
commands: t.List[PipCommand] = []
commands: list[PipCommand] = []
if virtualenv:
# sanity tests on Python 2.x install virtualenv when it is too old or is not already installed and the `--requirements` option is given
@ -254,7 +254,7 @@ def collect_requirements(
def run_pip(
args: EnvironmentConfig,
python: PythonConfig,
commands: t.List[PipCommand],
commands: list[PipCommand],
connection: t.Optional[Connection],
) -> None:
"""Run the specified pip commands for the given Python, and optionally the specified host."""
@ -282,10 +282,10 @@ def run_pip(
def collect_general_install(
command: t.Optional[str] = None,
ansible: bool = False,
) -> t.List[PipInstall]:
) -> list[PipInstall]:
"""Return details necessary for the specified general-purpose pip install(s)."""
requirements_paths: t.List[t.Tuple[str, str]] = []
constraints_paths: t.List[t.Tuple[str, str]] = []
requirements_paths: list[tuple[str, str]] = []
constraints_paths: list[tuple[str, str]] = []
if ansible:
path = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'requirements', 'ansible.txt')
@ -298,15 +298,15 @@ def collect_general_install(
return collect_install(requirements_paths, constraints_paths)
def collect_package_install(packages: t.List[str], constraints: bool = True) -> t.List[PipInstall]:
def collect_package_install(packages: list[str], constraints: bool = True) -> list[PipInstall]:
"""Return the details necessary to install the specified packages."""
return collect_install([], [], packages, constraints=constraints)
def collect_sanity_install(sanity: str) -> t.List[PipInstall]:
def collect_sanity_install(sanity: str) -> list[PipInstall]:
"""Return the details necessary for the specified sanity pip install(s)."""
requirements_paths: t.List[t.Tuple[str, str]] = []
constraints_paths: t.List[t.Tuple[str, str]] = []
requirements_paths: list[tuple[str, str]] = []
constraints_paths: list[tuple[str, str]] = []
path = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'requirements', f'sanity.{sanity}.txt')
requirements_paths.append((ANSIBLE_TEST_DATA_ROOT, path))
@ -318,10 +318,10 @@ def collect_sanity_install(sanity: str) -> t.List[PipInstall]:
return collect_install(requirements_paths, constraints_paths, constraints=False)
def collect_units_install() -> t.List[PipInstall]:
def collect_units_install() -> list[PipInstall]:
"""Return details necessary for the specified units pip install(s)."""
requirements_paths: t.List[t.Tuple[str, str]] = []
constraints_paths: t.List[t.Tuple[str, str]] = []
requirements_paths: list[tuple[str, str]] = []
constraints_paths: list[tuple[str, str]] = []
path = os.path.join(data_context().content.unit_path, 'requirements.txt')
requirements_paths.append((data_context().content.root, path))
@ -332,10 +332,10 @@ def collect_units_install() -> t.List[PipInstall]:
return collect_install(requirements_paths, constraints_paths)
def collect_integration_install(command: str, controller: bool) -> t.List[PipInstall]:
def collect_integration_install(command: str, controller: bool) -> list[PipInstall]:
"""Return details necessary for the specified integration pip install(s)."""
requirements_paths: t.List[t.Tuple[str, str]] = []
constraints_paths: t.List[t.Tuple[str, str]] = []
requirements_paths: list[tuple[str, str]] = []
constraints_paths: list[tuple[str, str]] = []
# Support for prefixed files was added to ansible-test in ansible-core 2.12 when split controller/target testing was implemented.
# Previous versions of ansible-test only recognize non-prefixed files.
@ -367,11 +367,11 @@ def collect_integration_install(command: str, controller: bool) -> t.List[PipIns
def collect_install(
requirements_paths: t.List[t.Tuple[str, str]],
constraints_paths: t.List[t.Tuple[str, str]],
packages: t.Optional[t.List[str]] = None,
requirements_paths: list[tuple[str, str]],
constraints_paths: list[tuple[str, str]],
packages: t.Optional[list[str]] = None,
constraints: bool = True,
) -> t.List[PipInstall]:
) -> list[PipInstall]:
"""Build a pip install list from the given requirements, constraints and packages."""
# listing content constraints first gives them priority over constraints provided by ansible-test
constraints_paths = list(constraints_paths)
@ -395,7 +395,7 @@ def collect_install(
return installs
def collect_uninstall(packages: t.List[str], ignore_errors: bool = False) -> t.List[PipUninstall]:
def collect_uninstall(packages: list[str], ignore_errors: bool = False) -> list[PipUninstall]:
"""Return the details necessary for the specified pip uninstall."""
uninstall = PipUninstall(
packages=packages,
@ -408,7 +408,7 @@ def collect_uninstall(packages: t.List[str], ignore_errors: bool = False) -> t.L
# Support
def get_venv_packages(python: PythonConfig) -> t.Dict[str, str]:
def get_venv_packages(python: PythonConfig) -> dict[str, str]:
"""Return a dictionary of Python packages needed for a consistent virtual environment specific to the given Python version."""
# NOTE: This same information is needed for building the base-test-container image.
@ -464,7 +464,7 @@ def requirements_allowed(args: EnvironmentConfig, controller: bool) -> bool:
return target.is_managed or target.python.is_managed
def prepare_pip_script(commands: t.List[PipCommand]) -> str:
def prepare_pip_script(commands: list[PipCommand]) -> str:
"""Generate a Python script to perform the requested pip commands."""
data = [command.serialize() for command in commands]
@ -504,7 +504,7 @@ def is_cryptography_available(python: str) -> bool:
return True
def get_cryptography_requirements(python: PythonConfig) -> t.List[str]:
def get_cryptography_requirements(python: PythonConfig) -> list[str]:
"""
Return the correct cryptography and pyopenssl requirements for the given python version.
The version of cryptography installed depends on the python version and openssl version.
@ -534,7 +534,7 @@ def get_cryptography_requirements(python: PythonConfig) -> t.List[str]:
return requirements
def get_openssl_version(python: PythonConfig) -> t.Optional[t.Tuple[int, ...]]:
def get_openssl_version(python: PythonConfig) -> t.Optional[tuple[int, ...]]:
"""Return the openssl version."""
if not python.version.startswith('2.'):
# OpenSSL version checking only works on Python 3.x.

@ -47,9 +47,9 @@ class SshProcess:
"""Wrapper around an SSH process."""
def __init__(self, process: t.Optional[subprocess.Popen]) -> None:
self._process = process
self.pending_forwards: t.Optional[t.List[t.Tuple[str, int]]] = None
self.pending_forwards: t.Optional[list[tuple[str, int]]] = None
self.forwards: t.Dict[t.Tuple[str, int], int] = {}
self.forwards: dict[tuple[str, int], int] = {}
def terminate(self) -> None:
"""Terminate the SSH process."""
@ -69,9 +69,9 @@ class SshProcess:
self._process.wait()
def collect_port_forwards(self) -> t.Dict[t.Tuple[str, int], int]:
def collect_port_forwards(self) -> dict[tuple[str, int], int]:
"""Collect port assignments for dynamic SSH port forwards."""
errors: t.List[str] = []
errors: list[str] = []
display.info('Collecting %d SSH port forward(s).' % len(self.pending_forwards), verbosity=2)
@ -121,10 +121,10 @@ class SshProcess:
def create_ssh_command(
ssh: SshConnectionDetail,
options: t.Optional[t.Dict[str, t.Union[str, int]]] = None,
cli_args: t.List[str] = None,
options: t.Optional[dict[str, t.Union[str, int]]] = None,
cli_args: list[str] = None,
command: t.Optional[str] = None,
) -> t.List[str]:
) -> list[str]:
"""Create an SSH command using the specified options."""
cmd = [
'ssh',
@ -168,8 +168,8 @@ def create_ssh_command(
def run_ssh_command(
args: EnvironmentConfig,
ssh: SshConnectionDetail,
options: t.Optional[t.Dict[str, t.Union[str, int]]] = None,
cli_args: t.List[str] = None,
options: t.Optional[dict[str, t.Union[str, int]]] = None,
cli_args: list[str] = None,
command: t.Optional[str] = None,
) -> SshProcess:
"""Run the specified SSH command, returning the created SshProcess instance created."""
@ -179,7 +179,7 @@ def run_ssh_command(
cmd_show = shlex.join(cmd)
display.info('Run background command: %s' % cmd_show, verbosity=1, truncate=True)
cmd_bytes = [to_bytes(c) for c in cmd]
cmd_bytes = [to_bytes(arg) for arg in cmd]
env_bytes = dict((to_bytes(k), to_bytes(v)) for k, v in env.items())
if args.explain:
@ -194,13 +194,13 @@ def run_ssh_command(
def create_ssh_port_forwards(
args: EnvironmentConfig,
ssh: SshConnectionDetail,
forwards: t.List[t.Tuple[str, int]],
forwards: list[tuple[str, int]],
) -> SshProcess:
"""
Create SSH port forwards using the provided list of tuples (target_host, target_port).
Port bindings will be automatically assigned by SSH and must be collected with a subsequent call to collect_port_forwards.
"""
options: t.Dict[str, t.Union[str, int]] = dict(
options: dict[str, t.Union[str, int]] = dict(
LogLevel='INFO', # info level required to get messages on stderr indicating the ports assigned to each forward
)
@ -218,10 +218,10 @@ def create_ssh_port_forwards(
def create_ssh_port_redirects(
args: EnvironmentConfig,
ssh: SshConnectionDetail,
redirects: t.List[t.Tuple[int, str, int]],
redirects: list[tuple[int, str, int]],
) -> SshProcess:
"""Create SSH port redirections using the provided list of tuples (bind_port, target_host, target_port)."""
options: t.Dict[str, t.Union[str, int]] = {}
options: dict[str, t.Union[str, int]] = {}
cli_args = []
for bind_port, target_host, target_port in redirects:
@ -232,7 +232,7 @@ def create_ssh_port_redirects(
return process
def generate_ssh_inventory(ssh_connections: t.List[SshConnectionDetail]) -> str:
def generate_ssh_inventory(ssh_connections: list[SshConnectionDetail]) -> str:
"""Return an inventory file in JSON format, created from the provided SSH connection details."""
inventory = dict(
all=dict(

@ -2,6 +2,7 @@
from __future__ import annotations
import collections
import collections.abc as c
import enum
import os
import re
@ -33,7 +34,7 @@ from .data import (
MODULE_EXTENSIONS = '.py', '.ps1'
def find_target_completion(target_func: t.Callable[[], t.Iterable[CompletionTarget]], prefix: str, short: bool) -> t.List[str]:
def find_target_completion(target_func: c.Callable[[], c.Iterable[CompletionTarget]], prefix: str, short: bool) -> list[str]:
"""Return a list of targets from the given target function which match the given prefix."""
try:
targets = target_func()
@ -43,7 +44,7 @@ def find_target_completion(target_func: t.Callable[[], t.Iterable[CompletionTarg
return ['%s' % ex]
def walk_completion_targets(targets: t.Iterable[CompletionTarget], prefix: str, short: bool = False) -> t.Tuple[str, ...]:
def walk_completion_targets(targets: c.Iterable[CompletionTarget], prefix: str, short: bool = False) -> tuple[str, ...]:
"""Return a tuple of targets from the given target iterable which match the given prefix."""
aliases = set(alias for target in targets for alias in target.aliases)
@ -64,11 +65,11 @@ def walk_completion_targets(targets: t.Iterable[CompletionTarget], prefix: str,
def walk_internal_targets(
targets: t.Iterable[TCompletionTarget],
includes: t.Optional[t.List[str]] = None,
excludes: t.Optional[t.List[str]] = None,
requires: t.Optional[t.List[str]] = None,
) -> t.Tuple[TCompletionTarget, ...]:
targets: c.Iterable[TCompletionTarget],
includes: t.Optional[list[str]] = None,
excludes: t.Optional[list[str]] = None,
requires: t.Optional[list[str]] = None,
) -> tuple[TCompletionTarget, ...]:
"""Return a tuple of matching completion targets."""
targets = tuple(targets)
@ -85,12 +86,12 @@ def walk_internal_targets(
return tuple(sorted(internal_targets, key=lambda sort_target: sort_target.name))
def filter_targets(targets: t.Iterable[TCompletionTarget],
patterns: t.List[str],
def filter_targets(targets: c.Iterable[TCompletionTarget],
patterns: list[str],
include: bool = True,
directories: bool = True,
errors: bool = True,
) -> t.Iterable[TCompletionTarget]:
) -> c.Iterable[TCompletionTarget]:
"""Iterate over the given targets and filter them based on the supplied arguments."""
unmatched = set(patterns or ())
compiled_patterns = dict((p, re.compile('^%s$' % p)) for p in patterns) if patterns else None
@ -150,48 +151,48 @@ def walk_module_targets():
yield target
def walk_units_targets() -> t.Iterable[TestTarget]:
def walk_units_targets() -> c.Iterable[TestTarget]:
"""Return an iterable of units targets."""
return walk_test_targets(path=data_context().content.unit_path, module_path=data_context().content.unit_module_path, extensions=('.py',), prefix='test_')
def walk_compile_targets(include_symlinks: bool = True) -> t.Iterable[TestTarget]:
def walk_compile_targets(include_symlinks: bool = True) -> c.Iterable[TestTarget]:
"""Return an iterable of compile targets."""
return walk_test_targets(module_path=data_context().content.module_path, extensions=('.py',), extra_dirs=('bin',), include_symlinks=include_symlinks)
def walk_powershell_targets(include_symlinks: bool = True) -> t.Iterable[TestTarget]:
def walk_powershell_targets(include_symlinks: bool = True) -> c.Iterable[TestTarget]:
"""Return an iterable of PowerShell targets."""
return walk_test_targets(module_path=data_context().content.module_path, extensions=('.ps1', '.psm1'), include_symlinks=include_symlinks)
def walk_sanity_targets() -> t.Iterable[TestTarget]:
def walk_sanity_targets() -> c.Iterable[TestTarget]:
"""Return an iterable of sanity targets."""
return walk_test_targets(module_path=data_context().content.module_path, include_symlinks=True, include_symlinked_directories=True)
def walk_posix_integration_targets(include_hidden: bool = False) -> t.Iterable[IntegrationTarget]:
def walk_posix_integration_targets(include_hidden: bool = False) -> c.Iterable[IntegrationTarget]:
"""Return an iterable of POSIX integration targets."""
for target in walk_integration_targets():
if 'posix/' in target.aliases or (include_hidden and 'hidden/posix/' in target.aliases):
yield target
def walk_network_integration_targets(include_hidden: bool = False) -> t.Iterable[IntegrationTarget]:
def walk_network_integration_targets(include_hidden: bool = False) -> c.Iterable[IntegrationTarget]:
"""Return an iterable of network integration targets."""
for target in walk_integration_targets():
if 'network/' in target.aliases or (include_hidden and 'hidden/network/' in target.aliases):
yield target
def walk_windows_integration_targets(include_hidden: bool = False) -> t.Iterable[IntegrationTarget]:
def walk_windows_integration_targets(include_hidden: bool = False) -> c.Iterable[IntegrationTarget]:
"""Return an iterable of windows integration targets."""
for target in walk_integration_targets():
if 'windows/' in target.aliases or (include_hidden and 'hidden/windows/' in target.aliases):
yield target
def walk_integration_targets() -> t.Iterable[IntegrationTarget]:
def walk_integration_targets() -> c.Iterable[IntegrationTarget]:
"""Return an iterable of integration targets."""
path = data_context().content.integration_targets_path
modules = frozenset(target.module for target in walk_module_targets())
@ -265,12 +266,12 @@ def load_integration_prefixes():
def walk_test_targets(
path: t.Optional[str] = None,
module_path: t.Optional[str] = None,
extensions: t.Optional[t.Tuple[str, ...]] = None,
extensions: t.Optional[tuple[str, ...]] = None,
prefix: t.Optional[str] = None,
extra_dirs: t.Optional[t.Tuple[str, ...]] = None,
extra_dirs: t.Optional[tuple[str, ...]] = None,
include_symlinks: bool = False,
include_symlinked_directories: bool = False,
) -> t.Iterable[TestTarget]:
) -> c.Iterable[TestTarget]:
"""Iterate over available test targets."""
if path:
file_paths = data_context().content.walk_files(path, include_symlinked_directories=include_symlinked_directories)
@ -309,7 +310,7 @@ def walk_test_targets(
yield TestTarget(file_path, module_path, prefix, path, symlink)
def analyze_integration_target_dependencies(integration_targets: t.List[IntegrationTarget]) -> t.Dict[str, t.Set[str]]:
def analyze_integration_target_dependencies(integration_targets: list[IntegrationTarget]) -> dict[str, set[str]]:
"""Analyze the given list of integration test targets and return a dictionary expressing target names and the target names which depend on them."""
real_target_root = os.path.realpath(data_context().content.integration_targets_path) + '/'
@ -442,7 +443,7 @@ class CompletionTarget(metaclass=abc.ABCMeta):
class DirectoryTarget(CompletionTarget):
"""Directory target."""
def __init__(self, path: str, modules: t.Tuple[str, ...]) -> None:
def __init__(self, path: str, modules: tuple[str, ...]) -> None:
super().__init__()
self.name = path
@ -499,10 +500,10 @@ class IntegrationTargetType(enum.Enum):
CONFLICT = enum.auto()
def extract_plugin_references(name: str, aliases: t.List[str]) -> t.List[t.Tuple[str, str]]:
def extract_plugin_references(name: str, aliases: list[str]) -> list[tuple[str, str]]:
"""Return a list of plugin references found in the given integration test target name and aliases."""
plugins = content_plugins()
found: t.List[t.Tuple[str, str]] = []
found: list[tuple[str, str]] = []
for alias in [name] + aliases:
plugin_type = 'modules'
@ -524,7 +525,7 @@ def extract_plugin_references(name: str, aliases: t.List[str]) -> t.List[t.Tuple
return found
def categorize_integration_test(name: str, aliases: t.List[str], force_target: bool) -> t.Tuple[IntegrationTargetType, IntegrationTargetType]:
def categorize_integration_test(name: str, aliases: list[str], force_target: bool) -> tuple[IntegrationTargetType, IntegrationTargetType]:
"""Return the integration test target types (used and actual) based on the given target name and aliases."""
context_controller = f'context/{IntegrationTargetType.CONTROLLER.name.lower()}' in aliases
context_target = f'context/{IntegrationTargetType.TARGET.name.lower()}' in aliases or force_target
@ -566,7 +567,7 @@ class IntegrationTarget(CompletionTarget):
'skip',
)))
def __init__(self, path: str, modules: t.FrozenSet[str], prefixes: t.Dict[str, str]) -> None:
def __init__(self, path: str, modules: frozenset[str], prefixes: dict[str, str]) -> None:
super().__init__()
self.relative_path = os.path.relpath(path, data_context().content.integration_targets_path)
@ -710,7 +711,7 @@ class IntegrationTarget(CompletionTarget):
class TargetPatternsNotMatched(ApplicationError):
"""One or more targets were not matched when a match was required."""
def __init__(self, patterns: t.Set[str]) -> None:
def __init__(self, patterns: set[str]) -> None:
self.patterns = sorted(patterns)
if len(patterns) > 1:

@ -1,6 +1,7 @@
"""Classes for storing and processing test results."""
from __future__ import annotations
import collections.abc as c
import datetime
import re
import typing as t
@ -27,7 +28,7 @@ from .config import (
from . import junit_xml
def calculate_best_confidence(choices: t.Tuple[t.Tuple[str, int], ...], metadata: Metadata) -> int:
def calculate_best_confidence(choices: tuple[tuple[str, int], ...], metadata: Metadata) -> int:
"""Return the best confidence value available from the given choices and metadata."""
best_confidence = 0
@ -219,7 +220,7 @@ class TestFailure(TestResult):
command: str,
test: str,
python_version: t.Optional[str] = None,
messages: t.Optional[t.Sequence[TestMessage]] = None,
messages: t.Optional[c.Sequence[TestMessage]] = None,
summary: t.Optional[str] = None,
):
super().__init__(command, test, python_version)
@ -441,7 +442,7 @@ class TestMessage:
return self.__message
@property
def tuple(self) -> t.Tuple[str, int, int, str, t.Optional[str], str]:
def tuple(self) -> tuple[str, int, int, str, t.Optional[str], str]:
"""Return a tuple with all the immutable values of this test message."""
return self.__path, self.__line, self.__column, self.__level, self.__code, self.__message

@ -1,6 +1,7 @@
"""Python threading tools."""
from __future__ import annotations
import collections.abc as c
import functools
import sys
import threading
@ -13,7 +14,7 @@ TCallable = t.TypeVar('TCallable', bound=t.Callable[..., t.Any])
class WrappedThread(threading.Thread):
"""Wrapper around Thread which captures results and exceptions."""
def __init__(self, action: t.Callable[[], t.Any]) -> None:
def __init__(self, action: c.Callable[[], t.Any]) -> None:
super().__init__()
self._result: queue.Queue[t.Any] = queue.Queue()
self.action = action

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save