ansible-test - More type hint updates. (#78455)

* Simple regex replace of multi-line function arg annotations on the first line.

* Manually fix up ArgumentParser type annotations.

* Manual type hint conversions.

* Manual conversion of function type hints.

* Remove unnecessary type hints on for statements.
pull/78456/head
Matt Clay 2 years ago committed by GitHub
parent b993b5cd49
commit 5bee66fc5d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -60,7 +60,7 @@ def parse_inventory(args: EnvironmentConfig, inventory_path: str) -> t.Dict[str,
return inventory
def get_hosts(inventory, group_name): # type: (t.Dict[str, t.Any], str) -> t.Dict[str, t.Dict[str, t.Any]]
def get_hosts(inventory: t.Dict[str, t.Any], group_name: str) -> t.Dict[str, t.Dict[str, t.Any]]:
"""Return a dict of hosts from the specified group in the given inventory."""
hostvars = inventory.get('_meta', {}).get('hostvars', {})
group = inventory.get(group_name, {})
@ -281,7 +281,7 @@ def get_collection_detail(python: PythonConfig) -> CollectionDetail:
def run_playbook(
args: EnvironmentConfig,
inventory_path: str,
playbook, # type: str
playbook: str,
capture: bool,
variables: t.Optional[t.Dict[str, t.Any]] = None,
) -> None:

@ -15,14 +15,14 @@ class CommonCache:
def __init__(self, args: CommonConfig) -> None:
self.args = args
def get(self, key, factory): # type: (str, t.Callable[[], TValue]) -> TValue
def get(self, key: str, factory: t.Callable[[], TValue]) -> TValue:
"""Return the value from the cache identified by the given key, using the specified factory method if it is not found."""
if key not in self.args.cache:
self.args.cache[key] = factory()
return self.args.cache[key]
def get_with_args(self, key, factory): # type: (str, t.Callable[[CommonConfig], TValue]) -> TValue
def get_with_args(self, key: str, factory: t.Callable[[CommonConfig], TValue]) -> TValue:
"""Return the value from the cache identified by the given key, using the specified factory method (which accepts args) if it is not found."""
if key not in self.args.cache:
self.args.cache[key] = factory(self.args)

@ -103,7 +103,7 @@ def get_ci_provider() -> CIProvider:
class AuthHelper(metaclass=abc.ABCMeta):
"""Public key based authentication helper for Ansible Core CI."""
def sign_request(self, request): # type: (t.Dict[str, t.Any]) -> None
def sign_request(self, request: t.Dict[str, t.Any]) -> None:
"""Sign the given auth request and make the public key available."""
payload_bytes = to_bytes(json.dumps(request, sort_keys=True))
signature_raw_bytes = self.sign_bytes(payload_bytes)

@ -254,7 +254,7 @@ def vso_add_attachment(file_type: str, file_name: str, path: str) -> None:
vso('task.addattachment', dict(type=file_type, name=file_name), path)
def vso(name, data, message): # type: (str, t.Dict[str, str], str) -> None
def vso(name: str, data: t.Dict[str, str], message: str) -> None:
"""
Write a logging command for the Azure Pipelines agent to process.
See: https://docs.microsoft.com/en-us/azure/devops/pipelines/scripts/logging-commands?view=azure-devops&tabs=bash

@ -58,13 +58,13 @@ def categorize_changes(args: TestConfig, paths: t.List[str], verbose_command: t.
"""Categorize the given list of changed paths and return a description of the changes."""
mapper = PathMapper(args)
commands = {
commands: t.Dict[str, t.Set[str]] = {
'sanity': set(),
'units': set(),
'integration': set(),
'windows-integration': set(),
'network-integration': set(),
} # type: t.Dict[str, t.Set[str]]
}
focused_commands = collections.defaultdict(set)
@ -206,9 +206,9 @@ class PathMapper:
self.prefixes = load_integration_prefixes()
self.integration_dependencies = analyze_integration_target_dependencies(self.integration_targets)
self.python_module_utils_imports = {} # type: t.Dict[str, t.Set[str]] # populated on first use to reduce overhead when not needed
self.powershell_module_utils_imports = {} # type: t.Dict[str, t.Set[str]] # populated on first use to reduce overhead when not needed
self.csharp_module_utils_imports = {} # type: t.Dict[str, t.Set[str]] # populated on first use to reduce overhead when not needed
self.python_module_utils_imports: t.Dict[str, t.Set[str]] = {} # populated on first use to reduce overhead when not needed
self.powershell_module_utils_imports: t.Dict[str, t.Set[str]] = {} # populated on first use to reduce overhead when not needed
self.csharp_module_utils_imports: t.Dict[str, t.Set[str]] = {} # populated on first use to reduce overhead when not needed
self.paths_to_dependent_targets: t.Dict[str, t.Set[IntegrationTarget]] = {}

@ -18,10 +18,10 @@ def do_analyze(
completer: CompositeActionCompletionFinder,
) -> None:
"""Command line parsing for all `coverage analyze` commands."""
parser = subparsers.add_parser(
parser: argparse.ArgumentParser = subparsers.add_parser(
'analyze',
help='analyze collected coverage data',
) # type: argparse.ArgumentParser
)
analyze_subparsers = parser.add_subparsers(metavar='COMMAND', required=True)

@ -22,11 +22,11 @@ def do_combine(
completer: CompositeActionCompletionFinder,
):
"""Command line parsing for the `coverage analyze targets combine` command."""
parser = subparsers.add_parser(
parser: argparse.ArgumentParser = subparsers.add_parser(
'combine',
parents=[parent],
help='combine multiple aggregated coverage files',
) # type: argparse.ArgumentParser
)
parser.set_defaults(
func=command_coverage_analyze_targets_combine,

@ -22,11 +22,11 @@ def do_expand(
completer: CompositeActionCompletionFinder,
):
"""Command line parsing for the `coverage analyze targets expand` command."""
parser = subparsers.add_parser(
parser: argparse.ArgumentParser = subparsers.add_parser(
'expand',
parents=[parent],
help='expand target names from integers in aggregated coverage',
) # type: argparse.ArgumentParser
)
parser.set_defaults(
func=command_coverage_analyze_targets_expand,

@ -22,11 +22,11 @@ def do_filter(
completer: CompositeActionCompletionFinder,
):
"""Command line parsing for the `coverage analyze targets filter` command."""
parser = subparsers.add_parser(
parser: argparse.ArgumentParser = subparsers.add_parser(
'filter',
parents=[parent],
help='filter aggregated coverage data',
) # type: argparse.ArgumentParser
)
parser.set_defaults(
func=command_coverage_analyze_targets_filter,

@ -22,11 +22,11 @@ def do_generate(
completer: CompositeActionCompletionFinder,
):
"""Command line parsing for the `coverage analyze targets generate` command."""
parser = subparsers.add_parser(
parser: argparse.ArgumentParser = subparsers.add_parser(
'generate',
parents=[parent],
help='aggregate coverage by integration test target',
) # type: argparse.ArgumentParser
)
parser.set_defaults(
func=command_coverage_analyze_targets_generate,

@ -22,11 +22,11 @@ def do_missing(
completer: CompositeActionCompletionFinder,
):
"""Command line parsing for the `coverage analyze targets missing` command."""
parser = subparsers.add_parser(
parser: argparse.ArgumentParser = subparsers.add_parser(
'missing',
parents=[parent],
help='identify coverage in one file missing in another',
) # type: argparse.ArgumentParser
)
parser.set_defaults(
func=command_coverage_analyze_targets_missing,

@ -24,11 +24,11 @@ def do_combine(
completer: CompositeActionCompletionFinder,
) -> None:
"""Command line parsing for the `coverage combine` command."""
parser = subparsers.add_parser(
parser: argparse.ArgumentParser = subparsers.add_parser(
'combine',
parents=[parent],
help='combine coverage data and rewrite remote paths',
) # type: argparse.ArgumentParser
)
parser.set_defaults(
func=command_coverage_combine,

@ -22,11 +22,11 @@ def do_erase(
completer: CompositeActionCompletionFinder,
) -> None:
"""Command line parsing for the `coverage erase` command."""
parser = subparsers.add_parser(
parser: argparse.ArgumentParser = subparsers.add_parser(
'erase',
parents=[parent],
help='erase coverage data files',
) # type: argparse.ArgumentParser
)
parser.set_defaults(
func=command_coverage_erase,

@ -24,11 +24,11 @@ def do_html(
completer: CompositeActionCompletionFinder,
) -> None:
"""Command line parsing for the `coverage html` command."""
parser = subparsers.add_parser(
parser: argparse.ArgumentParser = subparsers.add_parser(
'html',
parents=[parent],
help='generate html coverage report',
) # type: argparse.ArgumentParser
)
parser.set_defaults(
func=command_coverage_html,

@ -24,11 +24,11 @@ def do_report(
completer: CompositeActionCompletionFinder,
) -> None:
"""Command line parsing for the `coverage report` command."""
parser = subparsers.add_parser(
parser: argparse.ArgumentParser = subparsers.add_parser(
'report',
parents=[parent],
help='generate console coverage report',
) # type: argparse.ArgumentParser
)
parser.set_defaults(
func=command_coverage_report,

@ -24,11 +24,11 @@ def do_xml(
completer: CompositeActionCompletionFinder,
) -> None:
"""Command line parsing for the `coverage xml` command."""
parser = subparsers.add_parser(
parser: argparse.ArgumentParser = subparsers.add_parser(
'xml',
parents=[parent],
help='generate xml coverage report',
) # type: argparse.ArgumentParser
)
parser.set_defaults(
func=command_coverage_xml,

@ -22,11 +22,11 @@ def do_env(
completer: CompositeActionCompletionFinder,
):
"""Command line parsing for the `env` command."""
parser = subparsers.add_parser(
parser: argparse.ArgumentParser = subparsers.add_parser(
'env',
parents=[parent],
help='show information about the test environment',
) # type: argparse.ArgumentParser
)
parser.set_defaults(
func=command_env,

@ -34,7 +34,7 @@ def do_integration(
parser = argparse.ArgumentParser(
add_help=False,
parents=[parent],
) # type: argparse.ArgumentParser
)
do_posix_integration(subparsers, parser, add_integration_common, completer)
do_network_integration(subparsers, parser, add_integration_common, completer)

@ -40,11 +40,11 @@ def do_network_integration(
completer: CompositeActionCompletionFinder,
):
"""Command line parsing for the `network-integration` command."""
parser = subparsers.add_parser(
parser: argparse.ArgumentParser = subparsers.add_parser(
'network-integration',
parents=[parent],
help='network integration tests',
) # type: argparse.ArgumentParser
)
parser.set_defaults(
func=command_network_integration,

@ -31,11 +31,11 @@ def do_posix_integration(
completer: CompositeActionCompletionFinder,
):
"""Command line parsing for the `integration` command."""
parser = subparsers.add_parser(
parser: argparse.ArgumentParser = subparsers.add_parser(
'integration',
parents=[parent],
help='posix integration tests',
) # type: argparse.ArgumentParser
)
parser.set_defaults(
func=command_posix_integration,

@ -31,11 +31,11 @@ def do_windows_integration(
completer: CompositeActionCompletionFinder,
):
"""Command line parsing for the `windows-integration` command."""
parser = subparsers.add_parser(
parser: argparse.ArgumentParser = subparsers.add_parser(
'windows-integration',
parents=[parent],
help='windows integration tests',
) # type: argparse.ArgumentParser
)
parser.set_defaults(
func=command_windows_integration,

@ -34,11 +34,11 @@ def do_sanity(
completer: CompositeActionCompletionFinder,
):
"""Command line parsing for the `sanity` command."""
parser = subparsers.add_parser(
parser: argparse.ArgumentParser = subparsers.add_parser(
'sanity',
parents=[parent],
help='sanity tests',
) # type: argparse.ArgumentParser
)
parser.set_defaults(
func=command_sanity,

@ -25,11 +25,11 @@ def do_shell(
completer: CompositeActionCompletionFinder,
):
"""Command line parsing for the `shell` command."""
parser = subparsers.add_parser(
parser: argparse.ArgumentParser = subparsers.add_parser(
'shell',
parents=[parent],
help='open an interactive shell',
) # type: argparse.ArgumentParser
)
parser.set_defaults(
func=command_shell,

@ -29,11 +29,11 @@ def do_units(
completer: CompositeActionCompletionFinder,
):
"""Command line parsing for the `units` command."""
parser = subparsers.add_parser(
parser: argparse.ArgumentParser = subparsers.add_parser(
'units',
parents=[parent],
help='unit tests',
) # type: argparse.ArgumentParser
)
parser.set_defaults(
func=command_units,

@ -127,7 +127,7 @@ class LegacyHostOptions:
inventory: t.Optional[str] = None
@staticmethod
def create(namespace): # type: (t.Union[argparse.Namespace, types.SimpleNamespace]) -> LegacyHostOptions
def create(namespace: t.Union[argparse.Namespace, types.SimpleNamespace]) -> LegacyHostOptions:
"""Create legacy host options from the given namespace."""
kwargs = {field.name: getattr(namespace, field.name, None) for field in dataclasses.fields(LegacyHostOptions)}
@ -137,9 +137,9 @@ class LegacyHostOptions:
return LegacyHostOptions(**kwargs)
@staticmethod
def purge_namespace(namespace): # type: (t.Union[argparse.Namespace, types.SimpleNamespace]) -> None
def purge_namespace(namespace: t.Union[argparse.Namespace, types.SimpleNamespace]) -> None:
"""Purge legacy host options fields from the given namespace."""
for field in dataclasses.fields(LegacyHostOptions): # type: dataclasses.Field
for field in dataclasses.fields(LegacyHostOptions):
if hasattr(namespace, field.name):
delattr(namespace, field.name)

@ -287,7 +287,7 @@ def add_environments_python(
def add_environments_host(
environments_parser: argparse.ArgumentParser,
controller_mode: ControllerMode,
target_mode # type: TargetMode
target_mode: TargetMode,
) -> None:
"""Add environment arguments for the given host and argument modes."""
environments_exclusive_group: argparse.ArgumentParser = environments_parser.add_mutually_exclusive_group() # type: ignore[assignment] # real type private

@ -47,7 +47,7 @@ def make_report(target_indexes: TargetIndexes, arcs: Arcs, lines: Lines) -> t.Di
return report
def load_report(report): # type: (t.Dict[str, t.Any]) -> t.Tuple[t.List[str], Arcs, Lines]
def load_report(report: t.Dict[str, t.Any]) -> t.Tuple[t.List[str], Arcs, Lines]:
"""Extract target indexes, arcs and lines from an existing report."""
try:
target_indexes: t.List[str] = report['targets']
@ -78,7 +78,7 @@ def read_report(path: str) -> t.Tuple[t.List[str], Arcs, Lines]:
raise ApplicationError('File "%s" is not an aggregated coverage data file. %s' % (path, ex))
def write_report(args, report, path): # type: (CoverageAnalyzeTargetsConfig, t.Dict[str, t.Any], str) -> None
def write_report(args: CoverageAnalyzeTargetsConfig, report: t.Dict[str, t.Any], path: str) -> None:
"""Write a JSON report to disk."""
if args.explain:
return
@ -95,7 +95,7 @@ def format_line(value: int) -> str:
return str(value) # putting this in a function keeps both pylint and mypy happy
def format_arc(value): # type: (t.Tuple[int, int]) -> str
def format_arc(value: t.Tuple[int, int]) -> str:
"""Format an arc tuple as a string."""
return '%d:%d' % value

@ -81,7 +81,7 @@ def combine_coverage_files(args: CoverageCombineConfig, host_state: HostState) -
pairs = [(path, os.path.relpath(path, data_context().content.root)) for path in exported_paths]
def coverage_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
def coverage_callback(files: t.List[t.Tuple[str, str]]) -> None:
"""Add the coverage files to the payload file list."""
display.info('Including %d exported coverage file(s) in payload.' % len(pairs), verbosity=1)
files.extend(pairs)

@ -131,7 +131,7 @@ def _generate_powershell_xml(coverage_file: str) -> Element:
return elem_coverage
def _add_cobertura_package(packages, package_name, package_data): # type: (Element, str, t.Dict[str, t.Dict[str, int]]) -> t.Tuple[int, int]
def _add_cobertura_package(packages: Element, package_name: str, package_data: t.Dict[str, t.Dict[str, int]]) -> t.Tuple[int, int]:
"""Add a package element to the given packages element."""
elem_package = SubElement(packages, 'package')
elem_classes = SubElement(elem_package, 'classes')

@ -144,7 +144,7 @@ def set_timeout(args: EnvConfig) -> None:
os.remove(TIMEOUT_PATH)
def show_dict(data, verbose, root_verbosity=0, path=None): # type: (t.Dict[str, t.Any], t.Dict[str, int], int, t.Optional[t.List[str]]) -> None
def show_dict(data: t.Dict[str, t.Any], verbose: t.Dict[str, int], root_verbosity: int = 0, path: t.Optional[t.List[str]] = None) -> None:
"""Show a dict with varying levels of verbosity."""
path = path if path else []

@ -198,11 +198,11 @@ def get_inventory_absolute_path(args: IntegrationConfig, target: InventoryConfig
def get_inventory_relative_path(args: IntegrationConfig) -> str:
"""Return the inventory path used for the given integration configuration relative to the content root."""
inventory_names = {
inventory_names: t.Dict[t.Type[IntegrationConfig], str] = {
PosixIntegrationConfig: 'inventory',
WindowsIntegrationConfig: 'inventory.winrm',
NetworkIntegrationConfig: 'inventory.networking',
} # type: t.Dict[t.Type[IntegrationConfig], str]
}
return os.path.join(data_context().content.integration_path, inventory_names[type(args)])
@ -212,7 +212,7 @@ def delegate_inventory(args: IntegrationConfig, inventory_path_src: str) -> None
if isinstance(args, PosixIntegrationConfig):
return
def inventory_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
def inventory_callback(files: t.List[t.Tuple[str, str]]) -> None:
"""
Add the inventory file to the payload file list.
This will preserve the file during delegation even if it is ignored or is outside the content and install roots.
@ -895,7 +895,7 @@ If necessary, context can be controlled by adding entries to the "aliases" file
return exclude
def command_integration_filter(args, # type: TIntegrationConfig
def command_integration_filter(args: TIntegrationConfig,
targets: t.Iterable[TIntegrationTarget],
) -> t.Tuple[HostState, t.Tuple[TIntegrationTarget, ...]]:
"""Filter the given integration test targets."""
@ -935,7 +935,7 @@ def command_integration_filter(args, # type: TIntegrationConfig
vars_file_src = os.path.join(data_context().content.root, data_context().content.integration_vars_path)
if os.path.exists(vars_file_src):
def integration_config_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
def integration_config_callback(files: t.List[t.Tuple[str, str]]) -> None:
"""
Add the integration config vars file to the payload file list.
This will preserve the file during delegation even if the file is ignored by source control.

@ -80,7 +80,7 @@ def get_environment_plugins() -> t.Dict[str, t.Type[CloudEnvironment]]:
return get_cloud_plugins()[1]
def get_cloud_platforms(args, targets=None): # type: (TestConfig, t.Optional[t.Tuple[IntegrationTarget, ...]]) -> t.List[str]
def get_cloud_platforms(args: TestConfig, targets: t.Optional[t.Tuple[IntegrationTarget, ...]] = None) -> t.List[str]:
"""Return cloud platform names for the specified targets."""
if isinstance(args, IntegrationConfig):
if args.list_targets:
@ -114,7 +114,7 @@ def get_cloud_platform(target: IntegrationTarget) -> t.Optional[str]:
raise ApplicationError('Target %s aliases contains multiple cloud platforms: %s' % (target.name, ', '.join(sorted(cloud_platforms))))
def get_cloud_providers(args, targets=None): # type: (IntegrationConfig, t.Optional[t.Tuple[IntegrationTarget, ...]]) -> t.List[CloudProvider]
def get_cloud_providers(args: IntegrationConfig, targets: t.Optional[t.Tuple[IntegrationTarget, ...]] = None) -> t.List[CloudProvider]:
"""Return a list of cloud providers for the given targets."""
return [get_provider_plugins()[p](args) for p in get_cloud_platforms(args, targets)]
@ -129,7 +129,7 @@ def get_cloud_environment(args: IntegrationConfig, target: IntegrationTarget) ->
return get_environment_plugins()[cloud_platform](args)
def cloud_filter(args, targets): # type: (IntegrationConfig, t.Tuple[IntegrationTarget, ...]) -> t.List[str]
def cloud_filter(args: IntegrationConfig, targets: t.Tuple[IntegrationTarget, ...]) -> t.List[str]:
"""Return a list of target names to exclude based on the given targets."""
if args.metadata.cloud_config is not None:
return [] # cloud filter already performed prior to delegation
@ -142,7 +142,7 @@ def cloud_filter(args, targets): # type: (IntegrationConfig, t.Tuple[Integratio
return exclude
def cloud_init(args, targets): # type: (IntegrationConfig, t.Tuple[IntegrationTarget, ...]) -> None
def cloud_init(args: IntegrationConfig, targets: t.Tuple[IntegrationTarget, ...]) -> None:
"""Initialize cloud plugins for the given targets."""
if args.metadata.cloud_config is not None:
return # cloud configuration already established prior to delegation
@ -151,7 +151,7 @@ def cloud_init(args, targets): # type: (IntegrationConfig, t.Tuple[IntegrationT
results = {}
for provider in get_cloud_providers(args, targets): # type: CloudProvider
for provider in get_cloud_providers(args, targets):
if args.prime_containers and not provider.uses_docker:
continue
@ -189,7 +189,7 @@ class CloudBase(metaclass=abc.ABCMeta):
self.args = args
self.platform = self.__module__.rsplit('.', 1)[-1]
def config_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
def config_callback(files: t.List[t.Tuple[str, str]]) -> None:
"""Add the config file to the payload file list."""
if self.platform not in self.args.metadata.cloud_config:
return # platform was initialized, but not used -- such as being skipped due to all tests being disabled
@ -243,14 +243,14 @@ class CloudBase(metaclass=abc.ABCMeta):
"""True if resources are managed by ansible-test, otherwise False."""
self._set_cloud_config(self._MANAGED, value)
def _get_cloud_config(self, key, default=None): # type: (str, t.Optional[t.Union[str, int, bool]]) -> t.Union[str, int, bool]
def _get_cloud_config(self, key: str, default: t.Optional[t.Union[str, int, bool]] = None) -> t.Union[str, int, bool]:
"""Return the specified value from the internal configuration."""
if default is not None:
return self.args.metadata.cloud_config[self.platform].get(key, default)
return self.args.metadata.cloud_config[self.platform][key]
def _set_cloud_config(self, key, value): # type: (str, t.Union[str, int, bool]) -> None
def _set_cloud_config(self, key: str, value: t.Union[str, int, bool]) -> None:
"""Set the specified key and value in the internal configuration."""
self.args.metadata.cloud_config[self.platform][key] = value
@ -270,7 +270,7 @@ class CloudProvider(CloudBase):
self.uses_config = False
self.uses_docker = False
def filter(self, targets, exclude): # type: (t.Tuple[IntegrationTarget, ...], t.List[str]) -> None
def filter(self, targets: t.Tuple[IntegrationTarget, ...], exclude: t.List[str]) -> None:
"""Filter out the cloud tests when the necessary config and resources are not available."""
if not self.uses_docker and not self.uses_config:
return
@ -345,7 +345,7 @@ class CloudProvider(CloudBase):
return config
@staticmethod
def _populate_config_template(template, values): # type: (str, t.Dict[str, str]) -> str
def _populate_config_template(template: str, values: t.Dict[str, str]) -> str:
"""Populate and return the given template with the provided values."""
for key in sorted(values):
value = values[key]

@ -42,7 +42,7 @@ class AwsCloudProvider(CloudProvider):
self.uses_config = True
def filter(self, targets, exclude): # type: (t.Tuple[IntegrationTarget, ...], t.List[str]) -> None
def filter(self, targets: t.Tuple[IntegrationTarget, ...], exclude: t.List[str]) -> None:
"""Filter out the cloud tests when the necessary config and resources are not available."""
aci = self._create_ansible_core_ci()
@ -102,10 +102,10 @@ class AwsCloudEnvironment(CloudEnvironment):
parser = configparser.ConfigParser()
parser.read(self.config_path)
ansible_vars = dict(
ansible_vars: t.Dict[str, t.Any] = dict(
resource_prefix=self.resource_prefix,
tiny_prefix=uuid.uuid4().hex[0:12]
) # type: t.Dict[str, t.Any]
)
ansible_vars.update(dict(parser.items('default')))

@ -38,7 +38,7 @@ class AzureCloudProvider(CloudProvider):
self.uses_config = True
def filter(self, targets, exclude): # type: (t.Tuple[IntegrationTarget, ...], t.List[str]) -> None
def filter(self, targets: t.Tuple[IntegrationTarget, ...], exclude: t.List[str]) -> None:
"""Filter out the cloud tests when the necessary config and resources are not available."""
aci = self._create_ansible_core_ci()
@ -127,7 +127,7 @@ class AzureCloudEnvironment(CloudEnvironment):
display.notice('If %s failed due to permissions, the test policy may need to be updated.' % target.name)
def get_config(config_path): # type: (str) -> t.Dict[str, str]
def get_config(config_path: str) -> t.Dict[str, str]:
"""Return a configuration dictionary parsed from the given configuration path."""
parser = configparser.ConfigParser()
parser.read(config_path)

@ -35,7 +35,7 @@ class HcloudCloudProvider(CloudProvider):
self.uses_config = True
def filter(self, targets, exclude): # type: (t.Tuple[IntegrationTarget, ...], t.List[str]) -> None
def filter(self, targets: t.Tuple[IntegrationTarget, ...], exclude: t.List[str]) -> None:
"""Filter out the cloud tests when the necessary config and resources are not available."""
aci = self._create_ansible_core_ci()

@ -112,10 +112,10 @@ class CoverageHandler(t.Generic[THostConfig], metaclass=abc.ABCMeta):
"""Create inventory, if needed."""
@abc.abstractmethod
def get_environment(self, target_name, aliases): # type: (str, t.Tuple[str, ...]) -> t.Dict[str, str]
def get_environment(self, target_name: str, aliases: t.Tuple[str, ...]) -> t.Dict[str, str]:
"""Return a dictionary of environment variables for running tests with code coverage."""
def run_playbook(self, playbook, variables): # type: (str, t.Dict[str, str]) -> None
def run_playbook(self, playbook: str, variables: t.Dict[str, str]) -> None:
"""Run the specified playbook using the current inventory."""
self.create_inventory()
run_playbook(self.args, self.inventory_path, playbook, capture=False, variables=variables)
@ -221,7 +221,7 @@ class PosixCoverageHandler(CoverageHandler[PosixConfig]):
self.run_playbook('posix_coverage_teardown.yml', self.get_playbook_variables())
def get_environment(self, target_name, aliases): # type: (str, t.Tuple[str, ...]) -> t.Dict[str, str]
def get_environment(self, target_name: str, aliases: t.Tuple[str, ...]) -> t.Dict[str, str]:
"""Return a dictionary of environment variables for running tests with code coverage."""
# Enable code coverage collection on Ansible modules (both local and remote).
@ -306,7 +306,7 @@ class WindowsCoverageHandler(CoverageHandler[WindowsConfig]):
coverage_zip.extract(item, ResultType.COVERAGE.path)
def get_environment(self, target_name, aliases): # type: (str, t.Tuple[str, ...]) -> t.Dict[str, str]
def get_environment(self, target_name: str, aliases: t.Tuple[str, ...]) -> t.Dict[str, str]:
"""Return a dictionary of environment variables for running tests with code coverage."""
# Include the command, target and platform marker so the remote host can create a filename with that info.
@ -364,7 +364,7 @@ class CoverageManager:
for handler in self.handlers:
handler.teardown()
def get_environment(self, target_name, aliases): # type: (str, t.Tuple[str, ...]) -> t.Dict[str, str]
def get_environment(self, target_name: str, aliases: t.Tuple[str, ...]) -> t.Dict[str, str]:
"""Return a dictionary of environment variables for running tests with code coverage."""
if not self.args.coverage or 'non_local/' in aliases:
return {}

@ -645,7 +645,7 @@ class SanityMessage(TestMessage):
class SanityTargets:
"""Sanity test target information."""
def __init__(self, targets, include): # type: (t.Tuple[TestTarget, ...], t.Tuple[TestTarget, ...]) -> None
def __init__(self, targets: t.Tuple[TestTarget, ...], include: t.Tuple[TestTarget, ...]) -> None:
self.targets = targets
self.include = include
@ -855,7 +855,7 @@ class SanityCodeSmellTest(SanitySingleVersion):
self.__no_targets: bool = self.config.get('no_targets')
self.__include_directories: bool = self.config.get('include_directories')
self.__include_symlinks: bool = self.config.get('include_symlinks')
self.__py2_compat = self.config.get('py2_compat', False) # type: bool
self.__py2_compat: bool = self.config.get('py2_compat', False)
else:
self.output = None
self.extensions = []

@ -107,7 +107,7 @@ class PylintTest(SanitySingleVersion):
contexts = []
remaining_paths = set(paths)
def add_context(available_paths, context_name, context_filter): # type: (t.Set[str], str, t.Callable[[str], bool]) -> None
def add_context(available_paths: t.Set[str], context_name: str, context_filter: t.Callable[[str], bool]) -> None:
"""Add the specified context to the context list, consuming available paths that match the given context filter."""
filtered_paths = set(p for p in available_paths if context_filter(p))
contexts.append((context_name, sorted(filtered_paths)))

@ -89,8 +89,8 @@ class ShellcheckTest(SanityVersionNeutral):
results = []
for item in root: # type: Element
for entry in item: # type: Element
for item in root:
for entry in item:
results.append(SanityMessage(
message=entry.attrib['message'],
path=item.attrib['name'],

@ -111,7 +111,7 @@ class EnvironmentConfig(CommonConfig):
self.delegate_args: t.List[str] = []
def host_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
def host_callback(files: t.List[t.Tuple[str, str]]) -> None:
"""Add the host files to the payload file list."""
config = self
@ -222,7 +222,7 @@ class TestConfig(EnvironmentConfig):
if self.coverage_check:
self.coverage = True
def metadata_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
def metadata_callback(files: t.List[t.Tuple[str, str]]) -> None:
"""Add the metadata file to the payload file list."""
config = self
@ -261,7 +261,7 @@ class SanityConfig(TestConfig):
self.display_stderr = self.lint or self.list_tests
if self.keep_git:
def git_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
def git_callback(files: t.List[t.Tuple[str, str]]) -> None:
"""Add files from the content root .git directory to the payload file list."""
for dirpath, _dirnames, filenames in os.walk(os.path.join(data_context().content.root, '.git')):
paths = [os.path.join(dirpath, filename) for filename in filenames]

@ -243,7 +243,7 @@ def get_container_database(args: EnvironmentConfig) -> ContainerDatabase:
class ContainerAccess:
"""Information needed for one test host to access a single container supporting tests."""
def __init__(self, host_ip, names, ports, forwards): # type: (str, t.List[str], t.Optional[t.List[int]], t.Optional[t.Dict[int, int]]) -> None
def __init__(self, host_ip: str, names: t.List[str], ports: t.Optional[t.List[int]], forwards: t.Optional[t.Dict[int, int]]) -> None:
# if forwards is set
# this is where forwards are sent (it is the host that provides an indirect connection to the containers on alternate ports)
# /etc/hosts uses 127.0.0.1 (since port redirection will be used)
@ -270,7 +270,7 @@ class ContainerAccess:
return ports
@staticmethod
def from_dict(data): # type: (t.Dict[str, t.Any]) -> ContainerAccess
def from_dict(data: t.Dict[str, t.Any]) -> ContainerAccess:
"""Return a ContainerAccess instance from the given dict."""
forwards = data.get('forwards')
@ -302,11 +302,11 @@ class ContainerAccess:
class ContainerDatabase:
"""Database of running containers used to support tests."""
def __init__(self, data): # type: (t.Dict[str, t.Dict[str, t.Dict[str, ContainerAccess]]]) -> None
def __init__(self, data: t.Dict[str, t.Dict[str, t.Dict[str, ContainerAccess]]]) -> None:
self.data = data
@staticmethod
def from_dict(data): # type: (t.Dict[str, t.Any]) -> ContainerDatabase
def from_dict(data: t.Dict[str, t.Any]) -> ContainerDatabase:
"""Return a ContainerDatabase instance from the given dict."""
return ContainerDatabase(dict((access_name,
dict((context_name,
@ -632,7 +632,7 @@ class SupportContainer:
self.published_ports = published_ports
def wait_for_file(args, # type: EnvironmentConfig
def wait_for_file(args: EnvironmentConfig,
container_name: str,
path: str,
sleep: int,
@ -666,7 +666,7 @@ def cleanup_containers(args: EnvironmentConfig) -> None:
display.notice('Remember to run `docker rm -f %s` when finished testing.' % container.name)
def create_hosts_entries(context): # type: (t.Dict[str, ContainerAccess]) -> t.List[str]
def create_hosts_entries(context: t.Dict[str, ContainerAccess]) -> t.List[str]:
"""Return hosts entries for the specified context."""
entries = []
unique_id = uuid.uuid4()
@ -724,7 +724,7 @@ def create_container_hooks(
return pre_target, post_target
def create_managed_contexts(control_contexts): # type: (t.Dict[str, t.Dict[str, ContainerAccess]]) -> t.Dict[str, t.Dict[str, ContainerAccess]]
def create_managed_contexts(control_contexts: t.Dict[str, t.Dict[str, ContainerAccess]]) -> t.Dict[str, t.Dict[str, ContainerAccess]]:
"""Create managed contexts from the given control contexts."""
managed_contexts: t.Dict[str, t.Dict[str, ContainerAccess]] = {}

@ -320,7 +320,7 @@ class AnsibleCoreCI:
return response.json()
def _start_endpoint(self, data, headers): # type: (t.Dict[str, t.Any], t.Dict[str, str]) -> HttpResponse
def _start_endpoint(self, data: t.Dict[str, t.Any], headers: t.Dict[str, str]) -> HttpResponse:
tries = self.retries
sleep = 15
@ -368,7 +368,7 @@ class AnsibleCoreCI:
return self.load(config)
def load(self, config): # type: (t.Dict[str, str]) -> bool
def load(self, config: t.Dict[str, str]) -> bool:
"""Load the instance from the provided dictionary."""
self.instance_id = str(config['instance_id'])
self.endpoint = config['endpoint']
@ -446,7 +446,7 @@ class SshKey:
key, pub = key_pair
key_dst, pub_dst = self.get_in_tree_key_pair_paths()
def ssh_key_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
def ssh_key_callback(files: t.List[t.Tuple[str, str]]) -> None:
"""
Add the SSH keys to the payload file list.
They are either outside the source tree or in the cache dir which is ignored by default.

@ -112,7 +112,7 @@ class DataContext:
return collections
@staticmethod
def __create_content_layout(layout_providers, # type: t.List[t.Type[LayoutProvider]]
def __create_content_layout(layout_providers: t.List[t.Type[LayoutProvider]],
source_providers: t.List[t.Type[SourceProvider]],
root: str,
walk: bool,
@ -172,7 +172,7 @@ class DataContext:
return self.__ansible_source
def register_payload_callback(self, callback): # type: (t.Callable[[t.List[t.Tuple[str, str]]], None]) -> None
def register_payload_callback(self, callback: t.Callable[[t.List[t.Tuple[str, str]]], None]) -> None:
"""Register the given payload callback."""
self.payload_callbacks.append(callback)

@ -370,7 +370,7 @@ class ContainerNotFoundError(DockerError):
class DockerInspect:
"""The results of `docker inspect` for a single container."""
def __init__(self, args, inspection): # type: (EnvironmentConfig, t.Dict[str, t.Any]) -> None
def __init__(self, args: EnvironmentConfig, inspection: t.Dict[str, t.Any]) -> None:
self.args = args
self.inspection = inspection

@ -116,7 +116,7 @@ class Inventory:
extra_groups: t.Optional[t.Dict[str, t.List[str]]] = None
@staticmethod
def create_single_host(name, variables): # type: (str, t.Dict[str, t.Union[str, int]]) -> Inventory
def create_single_host(name: str, variables: t.Dict[str, t.Union[str, int]]) -> Inventory:
"""Return an inventory instance created from the given hostname and variables."""
return Inventory(host_groups=dict(all={name: variables}))
@ -245,7 +245,7 @@ class RemoteProfile(SshTargetHostProfile[TRemoteConfig], metaclass=abc.ABCMeta):
return self.state.get('core_ci')
@core_ci_state.setter
def core_ci_state(self, value): # type: (t.Dict[str, str]) -> None
def core_ci_state(self, value: t.Dict[str, str]) -> None:
"""The saved Ansible Core CI state."""
self.state['core_ci'] = value
@ -462,7 +462,7 @@ class NetworkRemoteProfile(RemoteProfile[NetworkRemoteConfig]):
core_ci = self.wait_for_instance()
connection = core_ci.connection
variables = dict(
variables: t.Dict[str, t.Optional[t.Union[str, int]]] = dict(
ansible_connection=self.config.connection,
ansible_pipelining='yes',
ansible_host=connection.hostname,
@ -470,7 +470,7 @@ class NetworkRemoteProfile(RemoteProfile[NetworkRemoteConfig]):
ansible_user=connection.username,
ansible_ssh_private_key_file=core_ci.ssh_key.key,
ansible_network_os=f'{self.config.collection}.{self.config.platform}' if self.config.collection else self.config.platform,
) # type: t.Dict[str, t.Optional[t.Union[str, int]]]
)
return variables
@ -684,7 +684,7 @@ class WindowsRemoteProfile(RemoteProfile[WindowsRemoteConfig]):
core_ci = self.wait_for_instance()
connection = core_ci.connection
variables = dict(
variables: t.Dict[str, t.Optional[t.Union[str, int]]] = dict(
ansible_connection='winrm',
ansible_pipelining='yes',
ansible_winrm_server_cert_validation='ignore',
@ -693,7 +693,7 @@ class WindowsRemoteProfile(RemoteProfile[WindowsRemoteConfig]):
ansible_user=connection.username,
ansible_password=connection.password,
ansible_ssh_private_key_file=core_ci.ssh_key.key,
) # type: t.Dict[str, t.Optional[t.Union[str, int]]]
)
# HACK: force 2016 to use NTLM + HTTP message encryption
if self.config.version == '2016':

@ -39,11 +39,11 @@ class HttpClient:
"""Perform an HTTP DELETE and return the response."""
return self.request('DELETE', url)
def put(self, url, data=None, headers=None): # type: (str, t.Optional[str], t.Optional[t.Dict[str, str]]) -> HttpResponse
def put(self, url: str, data: t.Optional[str] = None, headers: t.Optional[t.Dict[str, str]] = None) -> HttpResponse:
"""Perform an HTTP PUT and return the response."""
return self.request('PUT', url, data, headers)
def request(self, method, url, data=None, headers=None): # type: (str, str, t.Optional[str], t.Optional[t.Dict[str, str]]) -> HttpResponse
def request(self, method: str, url: str, data: t.Optional[str] = None, headers: t.Optional[t.Dict[str, str]] = None) -> HttpResponse:
"""Perform an HTTP request and return the response."""
cmd = ['curl', '-s', '-S', '-i', '-X', method]

@ -141,7 +141,7 @@ def create_posix_inventory(args: EnvironmentConfig, path: str, target_hosts: t.L
ssh = connections[0]
testhost = dict(
testhost: t.Dict[str, t.Optional[t.Union[str, int]]] = dict(
ansible_connection='ssh',
ansible_pipelining='yes',
ansible_python_interpreter=ssh.settings.python_interpreter,
@ -149,7 +149,7 @@ def create_posix_inventory(args: EnvironmentConfig, path: str, target_hosts: t.L
ansible_port=ssh.settings.port,
ansible_user=ssh.settings.user,
ansible_ssh_private_key_file=ssh.settings.identity_file,
) # type: t.Dict[str, t.Optional[t.Union[str, int]]]
)
if ssh.become:
testhost.update(

@ -39,7 +39,7 @@ def make_dirs(path: str) -> None:
raise
def write_json_file(path, # type: str
def write_json_file(path: str,
content: t.Any,
create_directories: bool = False,
formatted: bool = True,

@ -69,7 +69,7 @@ class Metadata:
return Metadata.from_dict(data)
@staticmethod
def from_dict(data): # type: (t.Dict[str, t.Any]) -> Metadata
def from_dict(data: t.Dict[str, t.Any]) -> Metadata:
"""Return metadata loaded from the specified dictionary."""
metadata = Metadata()
metadata.changes = data['changes']
@ -112,7 +112,7 @@ class ChangeDescription:
)
@staticmethod
def from_dict(data): # type: (t.Dict[str, t.Any]) -> ChangeDescription
def from_dict(data: t.Dict[str, t.Any]) -> ChangeDescription:
"""Return a change description loaded from the given dictionary."""
changes = ChangeDescription()
changes.command = data['command']

@ -16,7 +16,7 @@ def get_path_provider_classes(provider_type: t.Type[TPathProvider]) -> t.List[t.
return sorted(get_subclasses(provider_type), key=lambda c: (c.priority, c.__name__))
def find_path_provider(provider_type, # type: t.Type[TPathProvider]
def find_path_provider(provider_type: t.Type[TPathProvider],
provider_classes: t.List[t.Type[TPathProvider]],
path: str,
walk: bool,

@ -223,7 +223,7 @@ def paths_to_tree(paths: t.List[str]) -> t.Tuple[t.Dict[str, t.Any], t.List[str]
return tree
def get_tree_item(tree, parts): # type: (t.Tuple[t.Dict[str, t.Any], t.List[str]], t.List[str]) -> t.Optional[t.Tuple[t.Dict[str, t.Any], t.List[str]]]
def get_tree_item(tree: t.Tuple[t.Dict[str, t.Any], t.List[str]], parts: t.List[str]) -> t.Optional[t.Tuple[t.Dict[str, t.Any], t.List[str]]]:
"""Return the portion of the tree found under the path given by parts, or None if it does not exist."""
root = tree

@ -174,7 +174,7 @@ def cleanup_profiles(host_state: HostState) -> None:
profile.deprovision()
def dispatch_jobs(jobs): # type: (t.List[t.Tuple[HostProfile, WrappedThread]]) -> None
def dispatch_jobs(jobs: t.List[t.Tuple[HostProfile, WrappedThread]]) -> None:
"""Run the given profile job threads and wait for them to complete."""
for profile, thread in jobs:
thread.daemon = True

@ -55,7 +55,7 @@ def run_pypi_proxy(args: EnvironmentConfig, targets_use_pypi: bool) -> None:
if args.pypi_endpoint:
return # user has overridden the proxy endpoint, there is nothing to provision
versions_needing_proxy = tuple() # type: t.Tuple[str, ...] # preserved for future use, no versions currently require this
versions_needing_proxy: t.Tuple[str, ...] = tuple() # preserved for future use, no versions currently require this
posix_targets = [target for target in args.targets if isinstance(target, PosixConfig)]
need_proxy = targets_use_pypi and any(target.python.version in versions_needing_proxy for target in posix_targets)
use_proxy = args.pypi_proxy or need_proxy

@ -69,7 +69,7 @@ class SshProcess:
self._process.wait()
def collect_port_forwards(self): # type: (SshProcess) -> t.Dict[t.Tuple[str, int], int]
def collect_port_forwards(self) -> t.Dict[t.Tuple[str, int], int]:
"""Collect port assignments for dynamic SSH port forwards."""
errors: t.List[str] = []
@ -200,9 +200,9 @@ def create_ssh_port_forwards(
Create SSH port forwards using the provided list of tuples (target_host, target_port).
Port bindings will be automatically assigned by SSH and must be collected with a subsequent call to collect_port_forwards.
"""
options = dict(
options: t.Dict[str, t.Union[str, int]] = dict(
LogLevel='INFO', # info level required to get messages on stderr indicating the ports assigned to each forward
) # type: t.Dict[str, t.Union[str, int]]
)
cli_args = []

@ -33,7 +33,7 @@ from .data import (
MODULE_EXTENSIONS = '.py', '.ps1'
def find_target_completion(target_func, prefix, short): # type: (t.Callable[[], t.Iterable[CompletionTarget]], str, bool) -> t.List[str]
def find_target_completion(target_func: t.Callable[[], t.Iterable[CompletionTarget]], prefix: str, short: bool) -> t.List[str]:
"""Return a list of targets from the given target function which match the given prefix."""
try:
targets = target_func()
@ -85,7 +85,7 @@ def walk_internal_targets(
return tuple(sorted(internal_targets, key=lambda sort_target: sort_target.name))
def filter_targets(targets, # type: t.Iterable[TCompletionTarget]
def filter_targets(targets: t.Iterable[TCompletionTarget],
patterns: t.List[str],
include: bool = True,
directories: bool = True,
@ -442,7 +442,7 @@ class CompletionTarget(metaclass=abc.ABCMeta):
class DirectoryTarget(CompletionTarget):
"""Directory target."""
def __init__(self, path, modules): # type: (str, t.Tuple[str, ...]) -> None
def __init__(self, path: str, modules: t.Tuple[str, ...]) -> None:
super().__init__()
self.name = path
@ -566,7 +566,7 @@ class IntegrationTarget(CompletionTarget):
'skip',
)))
def __init__(self, path, modules, prefixes): # type: (str, t.FrozenSet[str], t.Dict[str, str]) -> None
def __init__(self, path: str, modules: t.FrozenSet[str], prefixes: t.Dict[str, str]) -> None:
super().__init__()
self.relative_path = os.path.relpath(path, data_context().content.integration_targets_path)

@ -27,7 +27,7 @@ from .config import (
from . import junit_xml
def calculate_best_confidence(choices, metadata): # type: (t.Tuple[t.Tuple[str, int], ...], Metadata) -> int
def calculate_best_confidence(choices: t.Tuple[t.Tuple[str, int], ...], metadata: Metadata) -> int:
"""Return the best confidence value available from the given choices and metadata."""
best_confidence = 0

@ -13,7 +13,7 @@ TCallable = t.TypeVar('TCallable', bound=t.Callable[..., t.Any])
class WrappedThread(threading.Thread):
"""Wrapper around Thread which captures results and exceptions."""
def __init__(self, action): # type: (t.Callable[[], t.Any]) -> None
def __init__(self, action: t.Callable[[], t.Any]) -> None:
super().__init__()
self._result: queue.Queue[t.Any] = queue.Queue()
self.action = action

@ -141,7 +141,7 @@ def is_valid_identifier(value: str) -> bool:
return value.isidentifier() and not keyword.iskeyword(value)
def cache(func): # type: (t.Callable[[], TValue]) -> t.Callable[[], TValue]
def cache(func: t.Callable[[], TValue]) -> t.Callable[[], TValue]:
"""Enforce exclusive access on a decorated function and cache the result."""
storage: t.Dict[None, TValue] = {}
sentinel = object()
@ -211,7 +211,7 @@ def detect_architecture(python: str) -> t.Optional[str]:
return architecture
def filter_args(args, filters): # type: (t.List[str], t.Dict[str, int]) -> t.List[str]
def filter_args(args: t.List[str], filters: t.Dict[str, int]) -> t.List[str]:
"""Return a filtered version of the given command line arguments."""
remaining = 0
result = []
@ -254,12 +254,12 @@ def read_lines_without_comments(path: str, remove_blank_lines: bool = False, opt
return lines
def exclude_none_values(data): # type: (t.Dict[TKey, t.Optional[TValue]]) -> t.Dict[TKey, TValue]
def exclude_none_values(data: t.Dict[TKey, t.Optional[TValue]]) -> t.Dict[TKey, TValue]:
"""Return the provided dictionary with any None values excluded."""
return dict((key, value) for key, value in data.items() if value is not None)
def find_executable(executable, cwd=None, path=None, required=True): # type: (str, t.Optional[str], t.Optional[str], t.Union[bool, str]) -> t.Optional[str]
def find_executable(executable: str, cwd: t.Optional[str] = None, path: t.Optional[str] = None, required: t.Union[bool, str] = True) -> t.Optional[str]:
"""
Find the specified executable and return the full path, or None if it could not be found.
If required is True an exception will be raised if the executable is not found.
@ -1017,7 +1017,7 @@ def str_to_version(version: str) -> t.Tuple[int, ...]:
return tuple(int(n) for n in version.split('.'))
def version_to_str(version): # type: (t.Tuple[int, ...]) -> str
def version_to_str(version: t.Tuple[int, ...]) -> str:
"""Return a version string from a version tuple."""
return '.'.join(str(n) for n in version)
@ -1044,7 +1044,7 @@ def import_plugins(directory: str, root: t.Optional[str] = None) -> None:
load_module(module_path, name)
def load_plugins(base_type, database): # type: (t.Type[C], t.Dict[str, t.Type[C]]) -> None
def load_plugins(base_type: t.Type[C], database: t.Dict[str, t.Type[C]]) -> None:
"""
Load plugins of the specified type and track them in the specified database.
Only plugins which have already been imported will be loaded.

@ -74,7 +74,7 @@ class ShellScriptTemplate:
return value
@staticmethod
def quote(value): # type: (t.Union[str, t.List[str]]) -> str
def quote(value: t.Union[str, t.List[str]]) -> str:
"""Return a shell quoted version of the given value."""
if isinstance(value, list):
return shlex.quote(' '.join(value))
@ -209,7 +209,7 @@ def named_temporary_file(args: CommonConfig, prefix: str, suffix: str, directory
yield tempfile_fd.name
def write_json_test_results(category, # type: ResultType
def write_json_test_results(category: ResultType,
name: str,
content: t.Union[t.List[t.Any], t.Dict[str, t.Any]],
formatted: bool = True,

@ -77,7 +77,7 @@ def get_virtual_python(
return virtual_environment_python
def create_virtual_environment(args, # type: EnvironmentConfig
def create_virtual_environment(args: EnvironmentConfig,
python: PythonConfig,
path: str,
system_site_packages: bool = False,
@ -179,7 +179,7 @@ def get_python_real_prefix(python_path: str) -> t.Optional[str]:
return real_prefix
def run_venv(args, # type: EnvironmentConfig
def run_venv(args: EnvironmentConfig,
run_python: str,
system_site_packages: bool,
pip: bool,
@ -209,7 +209,7 @@ def run_venv(args, # type: EnvironmentConfig
return True
def run_virtualenv(args, # type: EnvironmentConfig
def run_virtualenv(args: EnvironmentConfig,
run_python: str,
env_python: str,
system_site_packages: bool,

Loading…
Cancel
Save