ansible-test - Update git diff handling (#80202)

This change allows ansible-test to work with newer versions of git on AZP.
pull/78698/head
Matt Clay 1 year ago committed by GitHub
parent 1c156c0d16
commit bad8843124
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -7,12 +7,6 @@ IFS='/:' read -ra args <<< "$1"
group="${args[1]}"
if [ "${BASE_BRANCH:-}" ]; then
base_branch="origin/${BASE_BRANCH}"
else
base_branch=""
fi
case "${group}" in
1) options=(--skip-test pylint --skip-test ansible-doc --skip-test docs-build --skip-test package-data --skip-test changelog --skip-test validate-modules) ;;
2) options=( --test ansible-doc --test docs-build --test package-data --test changelog) ;;
@ -23,5 +17,5 @@ esac
# shellcheck disable=SC2086
ansible-test sanity --color -v --junit ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} \
--docker --keep-git --base-branch "${base_branch}" \
--docker \
"${options[@]}" --allow-disabled

@ -0,0 +1,4 @@
minor_changes:
- ansible-test - Updated the Azure Pipelines CI plugin to work with newer versions of git.
- ansible-test - Moved git handling out of the validate-modules sanity test and into ansible-test.
- ansible-test - Removed the ``--keep-git`` sanity test option, which was limited to testing ansible-core itself.

@ -22,10 +22,6 @@ How to run
To run sanity tests using docker, always use the default docker image
by passing the ``--docker`` or ``--docker default`` argument.
.. note::
When using docker and the ``--base-branch`` argument,
also use the ``--keep-git`` argument to avoid git related errors.
.. code:: shell
source hacking/env-setup

@ -62,8 +62,8 @@ class CIProvider(metaclass=abc.ABCMeta):
"""Return a resource prefix specific to this CI provider."""
@abc.abstractmethod
def get_base_branch(self) -> str:
"""Return the base branch or an empty string."""
def get_base_commit(self, args: CommonConfig) -> str:
"""Return the base commit or an empty string."""
@abc.abstractmethod
def detect_changes(self, args: TestConfig) -> t.Optional[list[str]]:

@ -44,6 +44,8 @@ class AzurePipelines(CIProvider):
def __init__(self) -> None:
self.auth = AzurePipelinesAuthHelper()
self._changes: AzurePipelinesChanges | None = None
@staticmethod
def is_supported() -> bool:
"""Return True if this provider is supported in the current running environment."""
@ -72,18 +74,20 @@ class AzurePipelines(CIProvider):
return prefix
def get_base_branch(self) -> str:
"""Return the base branch or an empty string."""
base_branch = os.environ.get('SYSTEM_PULLREQUEST_TARGETBRANCH') or os.environ.get('BUILD_SOURCEBRANCHNAME')
def get_base_commit(self, args: CommonConfig) -> str:
"""Return the base commit or an empty string."""
return self._get_changes(args).base_commit or ''
if base_branch:
base_branch = 'origin/%s' % base_branch
def _get_changes(self, args: CommonConfig) -> AzurePipelinesChanges:
"""Return an AzurePipelinesChanges instance, which will be created on first use."""
if not self._changes:
self._changes = AzurePipelinesChanges(args)
return base_branch or ''
return self._changes
def detect_changes(self, args: TestConfig) -> t.Optional[list[str]]:
"""Initialize change detection."""
result = AzurePipelinesChanges(args)
result = self._get_changes(args)
if result.is_pr:
job_type = 'pull request'
@ -129,7 +133,7 @@ class AzurePipelines(CIProvider):
def get_git_details(self, args: CommonConfig) -> t.Optional[dict[str, t.Any]]:
"""Return details about git in the current environment."""
changes = AzurePipelinesChanges(args)
changes = self._get_changes(args)
details = dict(
base_commit=changes.base_commit,

@ -63,8 +63,8 @@ class Local(CIProvider):
return prefix
def get_base_branch(self) -> str:
"""Return the base branch or an empty string."""
def get_base_commit(self, args: CommonConfig) -> str:
"""Return the base commit or an empty string."""
return ''
def detect_changes(self, args: TestConfig) -> t.Optional[list[str]]:

@ -16,10 +16,6 @@ from ...target import (
walk_sanity_targets,
)
from ...data import (
data_context,
)
from ..environments import (
CompositeActionCompletionFinder,
ControllerMode,
@ -82,17 +78,6 @@ def do_sanity(
help='enable optional errors',
)
if data_context().content.is_ansible:
sanity.add_argument(
'--keep-git',
action='store_true',
help='transfer git related files to the remote host/container',
)
else:
sanity.set_defaults(
keep_git=False,
)
sanity.add_argument(
'--lint',
action='store_true',

@ -164,6 +164,10 @@ def command_sanity(args: SanityConfig) -> None:
if args.skip_test:
tests = [target for target in tests if target.name not in args.skip_test]
if not args.host_path:
for test in tests:
test.origin_hook(args)
targets_use_pypi = any(isinstance(test, SanityMultipleVersion) and test.needs_pypi for test in tests) and not args.list_tests
host_state = prepare_profiles(args, targets_use_pypi=targets_use_pypi) # sanity
@ -769,6 +773,9 @@ class SanityTest(metaclass=abc.ABCMeta):
"""A tuple of supported Python versions or None if the test does not depend on specific Python versions."""
return CONTROLLER_PYTHON_VERSIONS
def origin_hook(self, args: SanityConfig) -> None:
"""This method is called on the origin, before the test runs or delegation occurs."""
def filter_targets(self, targets: list[TestTarget]) -> list[TestTarget]: # pylint: disable=unused-argument
"""Return the given list of test targets, filtered to include only those relevant for the test."""
if self.no_targets:

@ -1,9 +1,12 @@
"""Sanity test using validate-modules."""
from __future__ import annotations
import atexit
import collections
import contextlib
import json
import os
import tarfile
import typing as t
from . import (
@ -17,6 +20,10 @@ from . import (
SANITY_ROOT,
)
from ...io import (
make_dirs,
)
from ...test import (
TestResult,
)
@ -31,7 +38,9 @@ from ...util import (
)
from ...util_common import (
process_scoped_temporary_directory,
run_command,
ResultType,
)
from ...ansible_util import (
@ -50,12 +59,21 @@ from ...ci import (
from ...data import (
data_context,
PayloadConfig,
)
from ...host_configs import (
PythonConfig,
)
from ...git import (
Git,
)
from ...provider.source import (
SourceProvider as GitSourceProvider,
)
class ValidateModulesTest(SanitySingleVersion):
"""Sanity test using validate-modules."""
@ -135,14 +153,17 @@ class ValidateModulesTest(SanitySingleVersion):
except CollectionDetailError as ex:
display.warning('Skipping validate-modules collection version checks since collection detail loading failed: %s' % ex.reason)
else:
base_branch = args.base_branch or get_ci_provider().get_base_branch()
path = self.get_archive_path(args)
if os.path.exists(path):
temp_dir = process_scoped_temporary_directory(args)
with tarfile.open(path) as file:
file.extractall(temp_dir)
if base_branch:
cmd.extend([
'--base-branch', base_branch,
'--original-plugins', temp_dir,
])
else:
display.warning('Cannot perform module comparison against the base branch because the base branch was not detected.')
errors = []
@ -193,3 +214,43 @@ class ValidateModulesTest(SanitySingleVersion):
return SanityFailure(self.name, messages=all_errors)
return SanitySuccess(self.name)
def origin_hook(self, args: SanityConfig) -> None:
"""This method is called on the origin, before the test runs or delegation occurs."""
if not data_context().content.is_ansible:
return
if not isinstance(data_context().source_provider, GitSourceProvider):
display.warning('The validate-modules sanity test cannot compare against the base commit because git is not being used.')
return
base_commit = args.base_branch or get_ci_provider().get_base_commit(args)
if not base_commit:
display.warning('The validate-modules sanity test cannot compare against the base commit because it was not detected.')
return
path = self.get_archive_path(args)
def cleanup() -> None:
"""Cleanup callback called when the process exits."""
with contextlib.suppress(FileNotFoundError):
os.unlink(path)
def git_callback(payload_config: PayloadConfig) -> None:
"""Include the previous plugin content archive in the payload."""
files = payload_config.files
files.append((path, os.path.relpath(path, data_context().content.root)))
atexit.register(cleanup)
data_context().register_payload_callback(git_callback)
make_dirs(os.path.dirname(path))
git = Git()
git.run_git(['archive', '--output', path, base_commit, 'lib/ansible/modules/', 'lib/ansible/plugins/'])
@staticmethod
def get_archive_path(args: SanityConfig) -> str:
"""Return the path to the original plugin content archive."""
return os.path.join(ResultType.TMP.path, f'validate-modules-{args.metadata.session_id}.tar')

@ -269,23 +269,10 @@ class SanityConfig(TestConfig):
self.list_tests: bool = args.list_tests
self.allow_disabled: bool = args.allow_disabled
self.enable_optional_errors: bool = args.enable_optional_errors
self.keep_git: bool = args.keep_git
self.prime_venvs: bool = args.prime_venvs
self.display_stderr = self.lint or self.list_tests
if self.keep_git:
def git_callback(payload_config: PayloadConfig) -> None:
"""Add files from the content root .git directory to the payload file list."""
files = payload_config.files
for dirpath, _dirnames, filenames in os.walk(os.path.join(data_context().content.root, '.git')):
paths = [os.path.join(dirpath, filename) for filename in filenames]
files.extend((path, os.path.relpath(path, data_context().content.root)) for path in paths)
data_context().register_payload_callback(git_callback)
class IntegrationConfig(TestConfig):
"""Configuration for the integration command."""

@ -75,13 +75,14 @@ class DataContext:
self.payload_callbacks: list[c.Callable[[PayloadConfig], None]] = []
if content_path:
content = self.__create_content_layout(layout_providers, source_providers, content_path, False)
content, source_provider = self.__create_content_layout(layout_providers, source_providers, content_path, False)
elif ANSIBLE_SOURCE_ROOT and is_subdir(current_path, ANSIBLE_SOURCE_ROOT):
content = self.__create_content_layout(layout_providers, source_providers, ANSIBLE_SOURCE_ROOT, False)
content, source_provider = self.__create_content_layout(layout_providers, source_providers, ANSIBLE_SOURCE_ROOT, False)
else:
content = self.__create_content_layout(layout_providers, source_providers, current_path, True)
content, source_provider = self.__create_content_layout(layout_providers, source_providers, current_path, True)
self.content: ContentLayout = content
self.source_provider = source_provider
def create_collection_layouts(self) -> list[ContentLayout]:
"""
@ -109,7 +110,7 @@ class DataContext:
if collection_path == os.path.join(collection.root, collection.directory):
collection_layout = layout
else:
collection_layout = self.__create_content_layout(self.__layout_providers, self.__source_providers, collection_path, False)
collection_layout = self.__create_content_layout(self.__layout_providers, self.__source_providers, collection_path, False)[0]
file_count = len(collection_layout.all_files())
@ -127,7 +128,7 @@ class DataContext:
source_providers: list[t.Type[SourceProvider]],
root: str,
walk: bool,
) -> ContentLayout:
) -> t.Tuple[ContentLayout, SourceProvider]:
"""Create a content layout using the given providers and root path."""
try:
layout_provider = find_path_provider(LayoutProvider, layout_providers, root, walk)
@ -148,7 +149,7 @@ class DataContext:
layout = layout_provider.create(layout_provider.root, source_provider.get_paths(layout_provider.root))
return layout
return layout, source_provider
def __create_ansible_source(self):
"""Return a tuple of Ansible source files with both absolute and relative paths."""

@ -346,7 +346,7 @@ def filter_options(
('--metadata', 1, args.metadata_path),
('--exclude', 1, exclude),
('--require', 1, require),
('--base-branch', 1, args.base_branch or get_ci_provider().get_base_branch()),
('--base-branch', 1, False),
])
pass_through_args: list[str] = []

@ -4,6 +4,7 @@ import typing as t
from .util import (
display,
generate_name,
)
from .io import (
@ -26,6 +27,7 @@ class Metadata:
self.cloud_config: t.Optional[dict[str, dict[str, t.Union[int, str, bool]]]] = None
self.change_description: t.Optional[ChangeDescription] = None
self.ci_provider: t.Optional[str] = None
self.session_id = generate_name()
def populate_changes(self, diff: t.Optional[list[str]]) -> None:
"""Populate the changeset using the given diff."""
@ -53,6 +55,7 @@ class Metadata:
cloud_config=self.cloud_config,
ci_provider=self.ci_provider,
change_description=self.change_description.to_dict(),
session_id=self.session_id,
)
def to_file(self, path: str) -> None:
@ -77,6 +80,7 @@ class Metadata:
metadata.cloud_config = data['cloud_config']
metadata.ci_provider = data['ci_provider']
metadata.change_description = ChangeDescription.from_dict(data['change_description'])
metadata.session_id = data['session_id']
return metadata

@ -24,9 +24,7 @@ import datetime
import json
import os
import re
import subprocess
import sys
import tempfile
import traceback
import warnings
@ -299,8 +297,8 @@ class ModuleValidator(Validator):
# win_dsc is a dynamic arg spec, the docs won't ever match
PS_ARG_VALIDATE_REJECTLIST = frozenset(('win_dsc.ps1', ))
def __init__(self, path, analyze_arg_spec=False, collection=None, collection_version=None,
base_branch=None, git_cache=None, reporter=None, routing=None, plugin_type='module'):
def __init__(self, path, git_cache: GitCache, analyze_arg_spec=False, collection=None, collection_version=None,
reporter=None, routing=None, plugin_type='module'):
super(ModuleValidator, self).__init__(reporter=reporter or Reporter())
self.path = path
@ -326,8 +324,8 @@ class ModuleValidator(Validator):
self.collection_version_str = collection_version
self.collection_version = SemanticVersion(collection_version)
self.base_branch = base_branch
self.git_cache = git_cache or GitCache()
self.git_cache = git_cache
self.base_module = self.git_cache.get_original_path(self.path)
self._python_module_override = False
@ -339,11 +337,6 @@ class ModuleValidator(Validator):
except Exception:
self.ast = None
if base_branch:
self.base_module = self._get_base_file()
else:
self.base_module = None
def _create_version(self, v, collection_name=None):
if not v:
raise ValueError('Empty string is not a valid version')
@ -366,13 +359,7 @@ class ModuleValidator(Validator):
return self
def __exit__(self, exc_type, exc_value, traceback):
if not self.base_module:
return
try:
os.remove(self.base_module)
except Exception:
pass
pass
@property
def object_name(self):
@ -421,36 +408,9 @@ class ModuleValidator(Validator):
except AttributeError:
return False
def _get_base_branch_module_path(self):
"""List all paths within lib/ansible/modules to try and match a moved module"""
return self.git_cache.base_module_paths.get(self.object_name)
def _has_alias(self):
"""Return true if the module has any aliases."""
return self.object_name in self.git_cache.head_aliased_modules
def _get_base_file(self):
# In case of module moves, look for the original location
base_path = self._get_base_branch_module_path()
ext = os.path.splitext(base_path or self.path)[1]
command = ['git', 'show', '%s:%s' % (self.base_branch, base_path or self.path)]
p = subprocess.run(command, stdin=subprocess.DEVNULL, capture_output=True, check=False)
if int(p.returncode) != 0:
return None
t = tempfile.NamedTemporaryFile(delete=False, suffix=ext)
t.write(p.stdout)
t.close()
return t.name
def _is_new_module(self):
if self._has_alias():
return False
return not self.object_name.startswith('_') and bool(self.base_branch) and not bool(self.base_module)
def _is_new_module(self) -> bool | None:
"""Return True if the content is new, False if it is not and None if the information is not available."""
return self.git_cache.is_new(self.path)
def _check_interpreter(self, powershell=False):
if powershell:
@ -2037,7 +1997,7 @@ class ModuleValidator(Validator):
)
def _check_for_new_args(self, doc):
if not self.base_branch or self._is_new_module():
if not self.base_module:
return
with CaptureStd():
@ -2271,7 +2231,7 @@ class ModuleValidator(Validator):
# We can only validate PowerShell arg spec if it is using the new Ansible.Basic.AnsibleModule util
pattern = r'(?im)^#\s*ansiblerequires\s+\-csharputil\s*Ansible\.Basic'
if re.search(pattern, self.text) and self.object_name not in self.PS_ARG_VALIDATE_REJECTLIST:
with ModuleValidator(docs_path, base_branch=self.base_branch, git_cache=self.git_cache) as docs_mv:
with ModuleValidator(docs_path, git_cache=self.git_cache) as docs_mv:
docs = docs_mv._validate_docs()[1]
self._validate_ansible_module_call(docs)
@ -2316,6 +2276,84 @@ class PythonPackageValidator(Validator):
)
class GitCache(metaclass=abc.ABCMeta):
"""Base class for access to original files."""
@abc.abstractmethod
def get_original_path(self, path: str) -> str | None:
"""Return the path to the original version of the specified file, or None if there isn't one."""
@abc.abstractmethod
def is_new(self, path: str) -> bool | None:
"""Return True if the content is new, False if it is not and None if the information is not available."""
@staticmethod
def create(original_plugins: str | None, plugin_type: str) -> GitCache:
return CoreGitCache(original_plugins, plugin_type) if original_plugins else NoOpGitCache()
class CoreGitCache(GitCache):
"""Provides access to original files when testing core."""
def __init__(self, original_plugins: str | None, plugin_type: str) -> None:
super().__init__()
self.original_plugins = original_plugins
rel_path = 'lib/ansible/modules/' if plugin_type == 'module' else f'lib/ansible/plugins/{plugin_type}/'
head_tree = self._find_files(rel_path)
head_aliased_modules = set()
for path in head_tree:
filename = os.path.basename(path)
if filename.startswith('_') and filename != '__init__.py':
if os.path.islink(path):
head_aliased_modules.add(os.path.basename(os.path.realpath(path)))
self._head_aliased_modules = head_aliased_modules
def get_original_path(self, path: str) -> str | None:
"""Return the path to the original version of the specified file, or None if there isn't one."""
path = os.path.join(self.original_plugins, path)
if not os.path.exists(path):
path = None
return path
def is_new(self, path: str) -> bool | None:
"""Return True if the content is new, False if it is not and None if the information is not available."""
if os.path.basename(path).startswith('_'):
return False
if os.path.basename(path) in self._head_aliased_modules:
return False
return not self.get_original_path(path)
@staticmethod
def _find_files(path: str) -> list[str]:
"""Return a list of files found in the specified directory."""
paths = []
for (dir_path, dir_names, file_names) in os.walk(path):
for file_name in file_names:
paths.append(os.path.join(dir_path, file_name))
return sorted(paths)
class NoOpGitCache(GitCache):
"""Provides a no-op interface for access to original files."""
def get_original_path(self, path: str) -> str | None:
"""Return the path to the original version of the specified file, or None if there isn't one."""
return None
def is_new(self, path: str) -> bool | None:
"""Return True if the content is new, False if it is not and None if the information is not available."""
return None
def re_compile(value):
"""
Argparse expects things to raise TypeError, re.compile raises an re.error
@ -2341,8 +2379,6 @@ def run():
type=re_compile)
parser.add_argument('--arg-spec', help='Analyze module argument spec',
action='store_true', default=False)
parser.add_argument('--base-branch', default=None,
help='Used in determining if new options were added')
parser.add_argument('--format', choices=['json', 'plain'], default='plain',
help='Output format. Default: "%(default)s"')
parser.add_argument('--output', default='-',
@ -2359,13 +2395,14 @@ def run():
parser.add_argument('--plugin-type',
default='module',
help='The plugin type to validate. Defaults to %(default)s')
parser.add_argument('--original-plugins')
args = parser.parse_args()
args.plugins = [m.rstrip('/') for m in args.plugins]
reporter = Reporter()
git_cache = GitCache(args.base_branch, args.plugin_type)
git_cache = GitCache.create(args.original_plugins, args.plugin_type)
check_dirs = set()
@ -2390,7 +2427,7 @@ def run():
if ModuleValidator.is_on_rejectlist(path):
continue
with ModuleValidator(path, collection=args.collection, collection_version=args.collection_version,
analyze_arg_spec=args.arg_spec, base_branch=args.base_branch,
analyze_arg_spec=args.arg_spec,
git_cache=git_cache, reporter=reporter, routing=routing,
plugin_type=args.plugin_type) as mv1:
mv1.validate()
@ -2415,7 +2452,7 @@ def run():
if ModuleValidator.is_on_rejectlist(path):
continue
with ModuleValidator(path, collection=args.collection, collection_version=args.collection_version,
analyze_arg_spec=args.arg_spec, base_branch=args.base_branch,
analyze_arg_spec=args.arg_spec,
git_cache=git_cache, reporter=reporter, routing=routing,
plugin_type=args.plugin_type) as mv2:
mv2.validate()
@ -2431,75 +2468,6 @@ def run():
sys.exit(reporter.json(warnings=args.warnings, output=args.output))
class GitCache:
def __init__(self, base_branch, plugin_type):
self.base_branch = base_branch
self.plugin_type = plugin_type
self.rel_path = 'lib/ansible/modules/'
if plugin_type != 'module':
self.rel_path = 'lib/ansible/plugins/%s/' % plugin_type
if self.base_branch:
self.base_tree = self._git(['ls-tree', '-r', '--name-only', self.base_branch, self.rel_path])
else:
self.base_tree = []
try:
self.head_tree = self._git(['ls-tree', '-r', '--name-only', 'HEAD', self.rel_path])
except GitError as ex:
if ex.status == 128:
# fallback when there is no .git directory
self.head_tree = self._get_module_files()
else:
raise
except FileNotFoundError:
# fallback when git is not installed
self.head_tree = self._get_module_files()
allowed_exts = ('.py', '.ps1')
if plugin_type != 'module':
allowed_exts = ('.py', )
self.base_module_paths = dict((os.path.basename(p), p) for p in self.base_tree if os.path.splitext(p)[1] in allowed_exts)
self.base_module_paths.pop('__init__.py', None)
self.head_aliased_modules = set()
for path in self.head_tree:
filename = os.path.basename(path)
if filename.startswith('_') and filename != '__init__.py':
if os.path.islink(path):
self.head_aliased_modules.add(os.path.basename(os.path.realpath(path)))
def _get_module_files(self):
module_files = []
for (dir_path, dir_names, file_names) in os.walk(self.rel_path):
for file_name in file_names:
module_files.append(os.path.join(dir_path, file_name))
return module_files
@staticmethod
def _git(args):
cmd = ['git'] + args
p = subprocess.run(cmd, stdin=subprocess.DEVNULL, capture_output=True, text=True, check=False)
if p.returncode != 0:
raise GitError(p.stderr, p.returncode)
return p.stdout.splitlines()
class GitError(Exception):
def __init__(self, message, status):
super(GitError, self).__init__(message)
self.status = status
def main():
try:
run()

Loading…
Cancel
Save