Update ansible-test support for CI providers.

Refactored CI provider code to simplify multiple provider support and addition of new providers.
pull/69525/head
Matt Clay 4 years ago
parent ca6da5ad63
commit d8e0aadc0d

@ -0,0 +1,2 @@
minor_changes:
- ansible-test - Refactored CI related logic into a basic provider abstraction.

@ -1,177 +0,0 @@
"""Detect changes in Ansible code."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import re
import os
from .util import (
ApplicationError,
SubprocessError,
MissingEnvironmentVariable,
display,
)
from .util_common import (
CommonConfig,
)
from .http import (
HttpClient,
urlencode,
)
from .git import (
Git,
)
class InvalidBranch(ApplicationError):
"""Exception for invalid branch specification."""
def __init__(self, branch, reason):
"""
:type branch: str
:type reason: str
"""
message = 'Invalid branch: %s\n%s' % (branch, reason)
super(InvalidBranch, self).__init__(message)
self.branch = branch
class ChangeDetectionNotSupported(ApplicationError):
"""Exception for cases where change detection is not supported."""
class ShippableChanges:
"""Change information for Shippable build."""
def __init__(self, args, git):
"""
:type args: CommonConfig
:type git: Git
"""
self.args = args
try:
self.branch = os.environ['BRANCH']
self.is_pr = os.environ['IS_PULL_REQUEST'] == 'true'
self.is_tag = os.environ['IS_GIT_TAG'] == 'true'
self.commit = os.environ['COMMIT']
self.project_id = os.environ['PROJECT_ID']
self.commit_range = os.environ['SHIPPABLE_COMMIT_RANGE']
except KeyError as ex:
raise MissingEnvironmentVariable(name=ex.args[0])
if self.is_tag:
raise ChangeDetectionNotSupported('Change detection is not supported for tags.')
if self.is_pr:
self.paths = sorted(git.get_diff_names([self.commit_range]))
self.diff = git.get_diff([self.commit_range])
else:
merge_runs = self.get_merge_runs(self.project_id, self.branch)
last_successful_commit = self.get_last_successful_commit(git, merge_runs)
if last_successful_commit:
self.paths = sorted(git.get_diff_names([last_successful_commit, self.commit]))
self.diff = git.get_diff([last_successful_commit, self.commit])
else:
# first run for branch
self.paths = None # act as though change detection not enabled, do not filter targets
self.diff = []
def get_merge_runs(self, project_id, branch):
"""
:type project_id: str
:type branch: str
:rtype: list[dict]
"""
params = dict(
isPullRequest='false',
projectIds=project_id,
branch=branch,
)
client = HttpClient(self.args, always=True)
response = client.get('https://api.shippable.com/runs?%s' % urlencode(params))
return response.json()
@staticmethod
def get_last_successful_commit(git, merge_runs):
"""
:type git: Git
:type merge_runs: dict | list[dict]
:rtype: str
"""
if 'id' in merge_runs and merge_runs['id'] == 4004:
display.warning('Unable to find project. Cannot determine changes. All tests will be executed.')
return None
successful_commits = set(run['commitSha'] for run in merge_runs if run['statusCode'] == 30)
commit_history = git.get_rev_list(max_count=100)
ordered_successful_commits = [commit for commit in commit_history if commit in successful_commits]
last_successful_commit = ordered_successful_commits[0] if ordered_successful_commits else None
if last_successful_commit is None:
display.warning('No successful commit found. All tests will be executed.')
return last_successful_commit
class LocalChanges:
"""Change information for local work."""
def __init__(self, args, git):
"""
:type args: CommonConfig
:type git: Git
"""
self.args = args
self.current_branch = git.get_branch()
if self.is_official_branch(self.current_branch):
raise InvalidBranch(branch=self.current_branch,
reason='Current branch is not a feature branch.')
self.fork_branch = None
self.fork_point = None
self.local_branches = sorted(git.get_branches())
self.official_branches = sorted([b for b in self.local_branches if self.is_official_branch(b)])
for self.fork_branch in self.official_branches:
try:
self.fork_point = git.get_branch_fork_point(self.fork_branch)
break
except SubprocessError:
pass
if self.fork_point is None:
raise ApplicationError('Unable to auto-detect fork branch and fork point.')
# tracked files (including unchanged)
self.tracked = sorted(git.get_file_names(['--cached']))
# untracked files (except ignored)
self.untracked = sorted(git.get_file_names(['--others', '--exclude-standard']))
# tracked changes (including deletions) committed since the branch was forked
self.committed = sorted(git.get_diff_names([self.fork_point, 'HEAD']))
# tracked changes (including deletions) which are staged
self.staged = sorted(git.get_diff_names(['--cached']))
# tracked changes (including deletions) which are not staged
self.unstaged = sorted(git.get_diff_names([]))
# diff of all tracked files from fork point to working copy
self.diff = git.get_diff([self.fork_point])
@staticmethod
def is_official_branch(name):
"""
:type name: str
:rtype: bool
"""
if name == 'devel':
return True
if re.match(r'^stable-[0-9]+\.[0-9]+$', name):
return True
return False

@ -0,0 +1,101 @@
"""Support code for CI environments."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import abc
from .. import types as t
from ..config import (
CommonConfig,
TestConfig,
)
from ..util import (
ABC,
ApplicationError,
display,
get_subclasses,
import_plugins,
)
class ChangeDetectionNotSupported(ApplicationError):
"""Exception for cases where change detection is not supported."""
class AuthContext:
"""Context information required for Ansible Core CI authentication."""
def __init__(self): # type: () -> None
self.region = None # type: t.Optional[str]
class CIProvider(ABC):
"""Base class for CI provider plugins."""
priority = 500
@staticmethod
@abc.abstractmethod
def is_supported(): # type: () -> bool
"""Return True if this provider is supported in the current running environment."""
@property
@abc.abstractmethod
def code(self): # type: () -> str
"""Return a unique code representing this provider."""
@property
@abc.abstractmethod
def name(self): # type: () -> str
"""Return descriptive name for this provider."""
@abc.abstractmethod
def generate_resource_prefix(self): # type: () -> str
"""Return a resource prefix specific to this CI provider."""
@abc.abstractmethod
def get_base_branch(self): # type: () -> str
"""Return the base branch or an empty string."""
@abc.abstractmethod
def detect_changes(self, args): # type: (TestConfig) -> t.Optional[t.List[str]]
"""Initialize change detection."""
@abc.abstractmethod
def supports_core_ci_auth(self, context): # type: (AuthContext) -> bool
"""Return True if Ansible Core CI is supported."""
@abc.abstractmethod
def prepare_core_ci_auth(self, context): # type: (AuthContext) -> t.Dict[str, t.Any]
"""Return authentication details for Ansible Core CI."""
@abc.abstractmethod
def get_git_details(self, args): # type: (CommonConfig) -> t.Optional[t.Dict[str, t.Any]]
"""Return details about git in the current environment."""
def get_ci_provider(): # type: () -> CIProvider
"""Return a CI provider instance for the current environment."""
try:
return get_ci_provider.provider
except AttributeError:
pass
provider = None
import_plugins('ci')
candidates = sorted(get_subclasses(CIProvider), key=lambda c: (c.priority, c.__name__))
for candidate in candidates:
if candidate.is_supported():
provider = candidate()
break
if provider.code:
display.info('Detected CI provider: %s' % provider.name)
get_ci_provider.provider = provider
return provider

@ -0,0 +1,215 @@
"""Support code for working without a supported CI provider."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import platform
import random
import re
from .. import types as t
from ..config import (
CommonConfig,
TestConfig,
)
from ..io import (
read_text_file,
)
from ..git import (
Git,
)
from ..util import (
ApplicationError,
display,
is_binary_file,
SubprocessError,
)
from . import (
AuthContext,
CIProvider,
)
CODE = '' # not really a CI provider, so use an empty string for the code
class Local(CIProvider):
"""CI provider implementation when not using CI."""
priority = 1000
@staticmethod
def is_supported(): # type: () -> bool
"""Return True if this provider is supported in the current running environment."""
return True
@property
def code(self): # type: () -> str
"""Return a unique code representing this provider."""
return CODE
@property
def name(self): # type: () -> str
"""Return descriptive name for this provider."""
return 'Local'
def generate_resource_prefix(self): # type: () -> str
"""Return a resource prefix specific to this CI provider."""
node = re.sub(r'[^a-zA-Z0-9]+', '-', platform.node().split('.')[0]).lower()
prefix = 'ansible-test-%s-%d' % (node, random.randint(10000000, 99999999))
return prefix
def get_base_branch(self): # type: () -> str
"""Return the base branch or an empty string."""
return ''
def detect_changes(self, args): # type: (TestConfig) -> t.Optional[t.List[str]]
"""Initialize change detection."""
result = LocalChanges(args)
display.info('Detected branch %s forked from %s at commit %s' % (
result.current_branch, result.fork_branch, result.fork_point))
if result.untracked and not args.untracked:
display.warning('Ignored %s untracked file(s). Use --untracked to include them.' %
len(result.untracked))
if result.committed and not args.committed:
display.warning('Ignored %s committed change(s). Omit --ignore-committed to include them.' %
len(result.committed))
if result.staged and not args.staged:
display.warning('Ignored %s staged change(s). Omit --ignore-staged to include them.' %
len(result.staged))
if result.unstaged and not args.unstaged:
display.warning('Ignored %s unstaged change(s). Omit --ignore-unstaged to include them.' %
len(result.unstaged))
names = set()
if args.tracked:
names |= set(result.tracked)
if args.untracked:
names |= set(result.untracked)
if args.committed:
names |= set(result.committed)
if args.staged:
names |= set(result.staged)
if args.unstaged:
names |= set(result.unstaged)
if not args.metadata.changes:
args.metadata.populate_changes(result.diff)
for path in result.untracked:
if is_binary_file(path):
args.metadata.changes[path] = ((0, 0),)
continue
line_count = len(read_text_file(path).splitlines())
args.metadata.changes[path] = ((1, line_count),)
return sorted(names)
def supports_core_ci_auth(self, context): # type: (AuthContext) -> bool
"""Return True if Ansible Core CI is supported."""
path = self._get_aci_key_path(context)
return os.path.exists(path)
def prepare_core_ci_auth(self, context): # type: (AuthContext) -> t.Dict[str, t.Any]
"""Return authentication details for Ansible Core CI."""
path = self._get_aci_key_path(context)
auth_key = read_text_file(path).strip()
request = dict(
key=auth_key,
nonce=None,
)
auth = dict(
remote=request,
)
return auth
def get_git_details(self, args): # type: (CommonConfig) -> t.Optional[t.Dict[str, t.Any]]
"""Return details about git in the current environment."""
return None # not yet implemented for local
def _get_aci_key_path(self, context): # type: (AuthContext) -> str
path = os.path.expanduser('~/.ansible-core-ci.key')
if context.region:
path += '.%s' % context.region
return path
class InvalidBranch(ApplicationError):
"""Exception for invalid branch specification."""
def __init__(self, branch, reason): # type: (str, str) -> None
message = 'Invalid branch: %s\n%s' % (branch, reason)
super(InvalidBranch, self).__init__(message)
self.branch = branch
class LocalChanges:
"""Change information for local work."""
def __init__(self, args): # type: (CommonConfig) -> None
self.args = args
self.git = Git()
self.current_branch = self.git.get_branch()
if self.is_official_branch(self.current_branch):
raise InvalidBranch(branch=self.current_branch,
reason='Current branch is not a feature branch.')
self.fork_branch = None
self.fork_point = None
self.local_branches = sorted(self.git.get_branches())
self.official_branches = sorted([b for b in self.local_branches if self.is_official_branch(b)])
for self.fork_branch in self.official_branches:
try:
self.fork_point = self.git.get_branch_fork_point(self.fork_branch)
break
except SubprocessError:
pass
if self.fork_point is None:
raise ApplicationError('Unable to auto-detect fork branch and fork point.')
# tracked files (including unchanged)
self.tracked = sorted(self.git.get_file_names(['--cached']))
# untracked files (except ignored)
self.untracked = sorted(self.git.get_file_names(['--others', '--exclude-standard']))
# tracked changes (including deletions) committed since the branch was forked
self.committed = sorted(self.git.get_diff_names([self.fork_point, 'HEAD']))
# tracked changes (including deletions) which are staged
self.staged = sorted(self.git.get_diff_names(['--cached']))
# tracked changes (including deletions) which are not staged
self.unstaged = sorted(self.git.get_diff_names([]))
# diff of all tracked files from fork point to working copy
self.diff = self.git.get_diff([self.fork_point])
@staticmethod
def is_official_branch(name): # type: (str) -> bool
"""Return True if the given branch name an official branch for development or releases."""
if name == 'devel':
return True
if re.match(r'^stable-[0-9]+\.[0-9]+$', name):
return True
return False

@ -0,0 +1,249 @@
"""Support code for working with Shippable."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import re
from .. import types as t
from ..config import (
CommonConfig,
TestConfig,
)
from ..git import (
Git,
)
from ..http import (
HttpClient,
urlencode,
)
from ..util import (
ApplicationError,
display,
MissingEnvironmentVariable,
SubprocessError,
)
from . import (
AuthContext,
ChangeDetectionNotSupported,
CIProvider,
)
CODE = 'shippable'
class Shippable(CIProvider):
"""CI provider implementation for Shippable."""
@staticmethod
def is_supported(): # type: () -> bool
"""Return True if this provider is supported in the current running environment."""
return os.environ.get('SHIPPABLE') == 'true'
@property
def code(self): # type: () -> str
"""Return a unique code representing this provider."""
return CODE
@property
def name(self): # type: () -> str
"""Return descriptive name for this provider."""
return 'Shippable'
def generate_resource_prefix(self): # type: () -> str
"""Return a resource prefix specific to this CI provider."""
try:
prefix = 'shippable-%s-%s' % (
os.environ['SHIPPABLE_BUILD_NUMBER'],
os.environ['SHIPPABLE_JOB_NUMBER'],
)
except KeyError as ex:
raise MissingEnvironmentVariable(name=ex.args[0])
return prefix
def get_base_branch(self): # type: () -> str
"""Return the base branch or an empty string."""
base_branch = os.environ.get('BASE_BRANCH')
if base_branch:
base_branch = 'origin/%s' % base_branch
return base_branch or ''
def detect_changes(self, args): # type: (TestConfig) -> t.Optional[t.List[str]]
"""Initialize change detection."""
result = ShippableChanges(args)
if result.is_pr:
job_type = 'pull request'
elif result.is_tag:
job_type = 'tag'
else:
job_type = 'merge commit'
display.info('Processing %s for branch %s commit %s' % (job_type, result.branch, result.commit))
if not args.metadata.changes:
args.metadata.populate_changes(result.diff)
if result.paths is None:
# There are several likely causes of this:
# - First run on a new branch.
# - Too many pull requests passed since the last merge run passed.
display.warning('No successful commit found. All tests will be executed.')
return result.paths
def supports_core_ci_auth(self, context): # type: (AuthContext) -> bool
"""Return True if Ansible Core CI is supported."""
return True
def prepare_core_ci_auth(self, context): # type: (AuthContext) -> t.Dict[str, t.Any]
"""Return authentication details for Ansible Core CI."""
try:
request = dict(
run_id=os.environ['SHIPPABLE_BUILD_ID'],
job_number=int(os.environ['SHIPPABLE_JOB_NUMBER']),
)
except KeyError as ex:
raise MissingEnvironmentVariable(name=ex.args[0])
auth = dict(
shippable=request,
)
return auth
def get_git_details(self, args): # type: (CommonConfig) -> t.Optional[t.Dict[str, t.Any]]
"""Return details about git in the current environment."""
commit = os.environ.get('COMMIT')
base_commit = os.environ.get('BASE_COMMIT')
details = dict(
base_commit=base_commit,
commit=commit,
merged_commit=self._get_merged_commit(args, commit),
)
return details
# noinspection PyUnusedLocal
def _get_merged_commit(self, args, commit): # type: (CommonConfig, str) -> t.Optional[str] # pylint: disable=unused-argument
"""Find the merged commit that should be present."""
if not commit:
return None
git = Git()
try:
show_commit = git.run_git(['show', '--no-patch', '--no-abbrev', commit])
except SubprocessError as ex:
# This should only fail for pull requests where the commit does not exist.
# Merge runs would fail much earlier when attempting to checkout the commit.
raise ApplicationError('Commit %s was not found:\n\n%s\n\n'
'GitHub may not have fully replicated the commit across their infrastructure.\n'
'It is also possible the commit was removed by a force push between job creation and execution.\n'
'Find the latest run for the pull request and restart failed jobs as needed.'
% (commit, ex.stderr.strip()))
head_commit = git.run_git(['show', '--no-patch', '--no-abbrev', 'HEAD'])
if show_commit == head_commit:
# Commit is HEAD, so this is not a pull request or the base branch for the pull request is up-to-date.
return None
match_merge = re.search(r'^Merge: (?P<parents>[0-9a-f]{40} [0-9a-f]{40})$', head_commit, flags=re.MULTILINE)
if not match_merge:
# The most likely scenarios resulting in a failure here are:
# A new run should or does supersede this job, but it wasn't cancelled in time.
# A job was superseded and then later restarted.
raise ApplicationError('HEAD is not commit %s or a merge commit:\n\n%s\n\n'
'This job has likely been superseded by another run due to additional commits being pushed.\n'
'Find the latest run for the pull request and restart failed jobs as needed.'
% (commit, head_commit.strip()))
parents = set(match_merge.group('parents').split(' '))
if len(parents) != 2:
raise ApplicationError('HEAD is a %d-way octopus merge.' % len(parents))
if commit not in parents:
raise ApplicationError('Commit %s is not a parent of HEAD.' % commit)
parents.remove(commit)
last_commit = parents.pop()
return last_commit
class ShippableChanges:
"""Change information for Shippable build."""
def __init__(self, args): # type: (CommonConfig) -> None
self.args = args
self.git = Git()
try:
self.branch = os.environ['BRANCH']
self.is_pr = os.environ['IS_PULL_REQUEST'] == 'true'
self.is_tag = os.environ['IS_GIT_TAG'] == 'true'
self.commit = os.environ['COMMIT']
self.project_id = os.environ['PROJECT_ID']
self.commit_range = os.environ['SHIPPABLE_COMMIT_RANGE']
except KeyError as ex:
raise MissingEnvironmentVariable(name=ex.args[0])
if self.is_tag:
raise ChangeDetectionNotSupported('Change detection is not supported for tags.')
if self.is_pr:
self.paths = sorted(self.git.get_diff_names([self.commit_range]))
self.diff = self.git.get_diff([self.commit_range])
else:
commits = self.get_successful_merge_run_commits(self.project_id, self.branch)
last_successful_commit = self.get_last_successful_commit(commits)
if last_successful_commit:
self.paths = sorted(self.git.get_diff_names([last_successful_commit, self.commit]))
self.diff = self.git.get_diff([last_successful_commit, self.commit])
else:
# first run for branch
self.paths = None # act as though change detection not enabled, do not filter targets
self.diff = []
def get_successful_merge_run_commits(self, project_id, branch): # type: (str, str) -> t.Set[str]
"""Return a set of recent successsful merge commits from Shippable for the given project and branch."""
parameters = dict(
isPullRequest='false',
projectIds=project_id,
branch=branch,
)
url = 'https://api.shippable.com/runs?%s' % urlencode(parameters)
http = HttpClient(self.args, always=True)
response = http.get(url)
result = response.json()
if 'id' in result and result['id'] == 4004:
# most likely due to a private project, which returns an HTTP 200 response with JSON
display.warning('Unable to find project. Cannot determine changes. All tests will be executed.')
return set()
commits = set(run['commitSha'] for run in result if run['statusCode'] == 30)
return commits
def get_last_successful_commit(self, successful_commits): # type: (t.Set[str]) -> t.Optional[str]
"""Return the last successful commit from git history that is found in the given commit list, or None."""
commit_history = self.git.get_rev_list(max_count=100)
ordered_successful_commits = [commit for commit in commit_history if commit in successful_commits]
last_successful_commit = ordered_successful_commits[0] if ordered_successful_commits else None
return last_successful_commit

@ -7,8 +7,6 @@ import atexit
import datetime
import time
import os
import platform
import random
import re
import tempfile
@ -25,7 +23,6 @@ from ..io import (
from ..util import (
ApplicationError,
display,
is_shippable,
import_plugins,
load_plugins,
ABC,
@ -45,6 +42,10 @@ from ..config import (
IntegrationConfig,
)
from ..ci import (
get_ci_provider,
)
from ..data import (
data_context,
)
@ -289,6 +290,7 @@ class CloudProvider(CloudBase):
"""
super(CloudProvider, self).__init__(args)
self.ci_provider = get_ci_provider()
self.remove_config = False
self.config_static_name = 'cloud-config-%s%s' % (self.platform, config_extension)
self.config_static_path = os.path.join(data_context().content.integration_path, self.config_static_name)
@ -310,7 +312,7 @@ class CloudProvider(CloudBase):
def setup(self):
"""Setup the cloud resource before delegation and register a cleanup callback."""
self.resource_prefix = self._generate_resource_prefix()
self.resource_prefix = self.ci_provider.generate_resource_prefix()
atexit.register(self.cleanup)
@ -385,21 +387,6 @@ class CloudProvider(CloudBase):
return template
@staticmethod
def _generate_resource_prefix():
"""
:rtype: str
"""
if is_shippable():
return 'shippable-%s-%s' % (
os.environ['SHIPPABLE_BUILD_NUMBER'],
os.environ['SHIPPABLE_JOB_NUMBER'],
)
node = re.sub(r'[^a-zA-Z0-9]+', '-', platform.node().split('.')[0]).lower()
return 'ansible-test-%s-%d' % (node, random.randint(10000000, 99999999))
class CloudEnvironment(CloudBase):
"""Base class for cloud environment plugins. Updates integration test environment after delegation."""

@ -7,7 +7,6 @@ import os
from ..util import (
ApplicationError,
display,
is_shippable,
ConfigParser,
)
@ -34,10 +33,7 @@ class AwsCloudProvider(CloudProvider):
aci = self._create_ansible_core_ci()
if os.path.isfile(aci.ci_key):
return
if is_shippable():
if aci.available:
return
super(AwsCloudProvider, self).filter(targets, exclude)

@ -11,7 +11,6 @@ from ..io import (
from ..util import (
ApplicationError,
display,
is_shippable,
ConfigParser,
)
@ -55,15 +54,12 @@ class AzureCloudProvider(CloudProvider):
aci = self._create_ansible_core_ci()
if os.path.isfile(aci.ci_key):
if aci.available:
return
if os.path.isfile(self.SHERLOCK_CONFIG_PATH):
return
if is_shippable():
return
super(AzureCloudProvider, self).filter(targets, exclude)
def setup(self):

@ -18,7 +18,6 @@ from ..util import (
ApplicationError,
display,
SubprocessError,
is_shippable,
ConfigParser,
)
@ -107,7 +106,7 @@ class CsCloudProvider(CloudProvider):
def cleanup(self):
"""Clean up the cloud resource and any temporary configuration files after tests complete."""
if self.container_name:
if is_shippable():
if self.ci_provider.code:
docker_rm(self.args, self.container_name)
elif not self.args.explain:
display.notice('Remember to run `docker rm -f %s` when finished testing.' % self.container_name)

@ -6,7 +6,6 @@ import os
from ..util import (
display,
is_shippable,
ConfigParser,
)
@ -42,10 +41,7 @@ class HcloudCloudProvider(CloudProvider):
aci = self._create_ansible_core_ci()
if os.path.isfile(aci.ci_key):
return
if is_shippable():
if aci.available:
return
super(HcloudCloudProvider, self).filter(targets, exclude)

@ -8,7 +8,6 @@ import time
from ..util import (
display,
ApplicationError,
is_shippable,
SubprocessError,
ConfigParser,
)
@ -49,10 +48,7 @@ class TowerCloudProvider(CloudProvider):
aci = get_tower_aci(self.args)
if os.path.isfile(aci.ci_key):
return
if is_shippable():
if aci.available:
return
super(TowerCloudProvider, self).filter(targets, exclude)

@ -8,7 +8,6 @@ import sys
from . import types as t
from .util import (
is_shippable,
docker_qualify_image,
find_python,
generate_pip_command,
@ -242,17 +241,7 @@ class SanityConfig(TestConfig):
self.list_tests = args.list_tests # type: bool
self.allow_disabled = args.allow_disabled # type: bool
self.enable_optional_errors = args.enable_optional_errors # type: bool
if args.base_branch:
self.base_branch = args.base_branch # str
elif is_shippable():
self.base_branch = os.environ.get('BASE_BRANCH', '') # str
if self.base_branch:
self.base_branch = 'origin/%s' % self.base_branch
else:
self.base_branch = ''
self.base_branch = args.base_branch # type: str
self.info_stderr = self.lint

@ -28,7 +28,6 @@ from .io import (
from .util import (
ApplicationError,
display,
is_shippable,
ANSIBLE_TEST_DATA_ROOT,
)
@ -41,6 +40,11 @@ from .config import (
EnvironmentConfig,
)
from .ci import (
AuthContext,
get_ci_provider,
)
from .data import (
data_context,
)
@ -75,13 +79,14 @@ class AnsibleCoreCI:
self.endpoint = None
self.max_threshold = 1
self.retries = 3
self.ci_provider = get_ci_provider()
self.auth_context = AuthContext()
if self.arch:
self.name = '%s-%s-%s' % (self.arch, self.platform, self.version)
else:
self.name = '%s-%s' % (self.platform, self.version)
self.ci_key = os.path.expanduser('~/.ansible-core-ci.key')
self.resource = 'jobs'
# Assign each supported platform to one provider.
@ -159,7 +164,7 @@ class AnsibleCoreCI:
# permit command-line override of region selection
region = args.remote_aws_region
# use a dedicated CI key when overriding the region selection
self.ci_key += '.%s' % args.remote_aws_region
self.auth_context.region = args.remote_aws_region
else:
region = 'us-east-1'
@ -243,6 +248,11 @@ class AnsibleCoreCI:
raise ApplicationError('Unable to get available endpoints.')
@property
def available(self):
"""Return True if Ansible Core CI is supported."""
return self.ci_provider.supports_core_ci_auth(self.auth_context)
def start(self):
"""Start instance."""
if self.started:
@ -250,30 +260,7 @@ class AnsibleCoreCI:
verbosity=1)
return None
if is_shippable():
return self.start_shippable()
return self.start_remote()
def start_remote(self):
"""Start instance for remote development/testing."""
auth_key = read_text_file(self.ci_key).strip()
return self._start(dict(
remote=dict(
key=auth_key,
nonce=None,
),
))
def start_shippable(self):
"""Start instance on Shippable."""
return self._start(dict(
shippable=dict(
run_id=os.environ['SHIPPABLE_BUILD_ID'],
job_number=int(os.environ['SHIPPABLE_JOB_NUMBER']),
),
))
return self._start(self.ci_provider.prepare_core_ci_auth(self.auth_context))
def stop(self):
"""Stop instance."""

@ -94,6 +94,10 @@ from .venv import (
create_virtual_environment,
)
from .ci import (
get_ci_provider,
)
def check_delegation_args(args):
"""
@ -117,6 +121,8 @@ def delegate(args, exclude, require, integration_targets):
:rtype: bool
"""
if isinstance(args, TestConfig):
args.metadata.ci_provider = get_ci_provider().code
make_dirs(ResultType.TMP.path)
with tempfile.NamedTemporaryFile(prefix='metadata-', suffix='.json', dir=ResultType.TMP.path) as metadata_fd:
@ -550,8 +556,10 @@ def generate_command(args, python_interpreter, ansible_bin_path, content_root, o
if isinstance(args, ShellConfig):
cmd = create_shell_command(cmd)
elif isinstance(args, SanityConfig):
if args.base_branch:
cmd += ['--base-branch', args.base_branch]
base_branch = args.base_branch or get_ci_provider().get_base_branch()
if base_branch:
cmd += ['--base-branch', base_branch]
return cmd

@ -6,7 +6,6 @@ import datetime
import functools
import os
import platform
import re
import signal
import sys
import time
@ -36,10 +35,6 @@ from .util_common import (
ResultType,
)
from .git import (
Git,
)
from .docker_util import (
docker_info,
docker_version
@ -61,6 +56,10 @@ from .executor import (
SUPPORTED_PYTHON_VERSIONS,
)
from .ci import (
get_ci_provider,
)
class EnvConfig(CommonConfig):
"""Configuration for the tools command."""
@ -106,7 +105,7 @@ def show_dump_env(args):
pwd=os.environ.get('PWD', None),
cwd=os.getcwd(),
),
git=get_git_details(args),
git=get_ci_provider().get_git_details(args),
platform=dict(
datetime=datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ'),
platform=platform.platform(),
@ -292,75 +291,3 @@ def get_docker_details(args):
)
return docker_details
def get_git_details(args):
"""
:type args: CommonConfig
:rtype: dict[str, any]
"""
commit = os.environ.get('COMMIT')
base_commit = os.environ.get('BASE_COMMIT')
git_details = dict(
base_commit=base_commit,
commit=commit,
merged_commit=get_merged_commit(args, commit),
)
return git_details
# noinspection PyUnusedLocal
def get_merged_commit(args, commit): # pylint: disable=unused-argument
"""
:type args: CommonConfig
:type commit: str
:rtype: str | None
"""
if not commit:
return None
git = Git()
try:
show_commit = git.run_git(['show', '--no-patch', '--no-abbrev', commit])
except SubprocessError as ex:
# This should only fail for pull requests where the commit does not exist.
# Merge runs would fail much earlier when attempting to checkout the commit.
raise ApplicationError('Commit %s was not found:\n\n%s\n\n'
'GitHub may not have fully replicated the commit across their infrastructure.\n'
'It is also possible the commit was removed by a force push between job creation and execution.\n'
'Find the latest run for the pull request and restart failed jobs as needed.'
% (commit, ex.stderr.strip()))
head_commit = git.run_git(['show', '--no-patch', '--no-abbrev', 'HEAD'])
if show_commit == head_commit:
# Commit is HEAD, so this is not a pull request or the base branch for the pull request is up-to-date.
return None
match_merge = re.search(r'^Merge: (?P<parents>[0-9a-f]{40} [0-9a-f]{40})$', head_commit, flags=re.MULTILINE)
if not match_merge:
# The most likely scenarios resulting in a failure here are:
# A new run should or does supersede this job, but it wasn't cancelled in time.
# A job was superseded and then later restarted.
raise ApplicationError('HEAD is not commit %s or a merge commit:\n\n%s\n\n'
'This job has likely been superseded by another run due to additional commits being pushed.\n'
'Find the latest run for the pull request and restart failed jobs as needed.'
% (commit, head_commit.strip()))
parents = set(match_merge.group('parents').split(' '))
if len(parents) != 2:
raise ApplicationError('HEAD is a %d-way octopus merge.' % len(parents))
if commit not in parents:
raise ApplicationError('Commit %s is not a parent of HEAD.' % commit)
parents.remove(commit)
last_commit = parents.pop()
return last_commit

@ -54,8 +54,6 @@ from .util import (
SubprocessError,
display,
remove_tree,
is_shippable,
is_binary_file,
find_executable,
raw_command,
get_available_port,
@ -108,13 +106,8 @@ from .target import (
TIntegrationTarget,
)
from .changes import (
ShippableChanges,
LocalChanges,
)
from .git import (
Git,
from .ci import (
get_ci_provider,
)
from .classification import (
@ -1364,7 +1357,7 @@ def integration_environment(args, target, test_dir, inventory_path, ansible_conf
integration = dict(
JUNIT_OUTPUT_DIR=ResultType.JUNIT.path,
ANSIBLE_CALLBACK_WHITELIST=','.join(sorted(set(callback_plugins))),
ANSIBLE_TEST_CI=args.metadata.ci_provider,
ANSIBLE_TEST_CI=args.metadata.ci_provider or get_ci_provider().code,
ANSIBLE_TEST_COVERAGE='check' if args.coverage_check else ('yes' if args.coverage else ''),
OUTPUT_DIR=test_dir,
INVENTORY_PATH=os.path.abspath(inventory_path),
@ -1564,15 +1557,12 @@ def detect_changes(args):
:type args: TestConfig
:rtype: list[str] | None
"""
if args.changed and is_shippable():
display.info('Shippable detected, collecting parameters from environment.')
paths = detect_changes_shippable(args)
if args.changed:
paths = get_ci_provider().detect_changes(args)
elif args.changed_from or args.changed_path:
paths = args.changed_path or []
if args.changed_from:
paths += read_text_file(args.changed_from).splitlines()
elif args.changed:
paths = detect_changes_local(args)
else:
return None # change detection not enabled
@ -1587,84 +1577,6 @@ def detect_changes(args):
return paths
def detect_changes_shippable(args):
"""Initialize change detection on Shippable.
:type args: TestConfig
:rtype: list[str] | None
"""
git = Git()
result = ShippableChanges(args, git)
if result.is_pr:
job_type = 'pull request'
elif result.is_tag:
job_type = 'tag'
else:
job_type = 'merge commit'
display.info('Processing %s for branch %s commit %s' % (job_type, result.branch, result.commit))
if not args.metadata.changes:
args.metadata.populate_changes(result.diff)
return result.paths
def detect_changes_local(args):
"""
:type args: TestConfig
:rtype: list[str]
"""
git = Git()
result = LocalChanges(args, git)
display.info('Detected branch %s forked from %s at commit %s' % (
result.current_branch, result.fork_branch, result.fork_point))
if result.untracked and not args.untracked:
display.warning('Ignored %s untracked file(s). Use --untracked to include them.' %
len(result.untracked))
if result.committed and not args.committed:
display.warning('Ignored %s committed change(s). Omit --ignore-committed to include them.' %
len(result.committed))
if result.staged and not args.staged:
display.warning('Ignored %s staged change(s). Omit --ignore-staged to include them.' %
len(result.staged))
if result.unstaged and not args.unstaged:
display.warning('Ignored %s unstaged change(s). Omit --ignore-unstaged to include them.' %
len(result.unstaged))
names = set()
if args.tracked:
names |= set(result.tracked)
if args.untracked:
names |= set(result.untracked)
if args.committed:
names |= set(result.committed)
if args.staged:
names |= set(result.staged)
if args.unstaged:
names |= set(result.unstaged)
if not args.metadata.changes:
args.metadata.populate_changes(result.diff)
for path in result.untracked:
if is_binary_file(path):
args.metadata.changes[path] = ((0, 0),)
continue
line_count = len(read_text_file(path).splitlines())
args.metadata.changes[path] = ((1, line_count),)
return sorted(names)
def get_integration_filter(args, targets):
"""
:type args: IntegrationConfig

@ -6,7 +6,6 @@ from . import types as t
from .util import (
display,
is_shippable,
)
from .io import (
@ -28,11 +27,7 @@ class Metadata:
self.cloud_config = None # type: t.Optional[t.Dict[str, str]]
self.instance_config = None # type: t.Optional[t.List[t.Dict[str, str]]]
self.change_description = None # type: t.Optional[ChangeDescription]
if is_shippable():
self.ci_provider = 'shippable'
else:
self.ci_provider = ''
self.ci_provider = None # type: t.Optional[str]
def populate_changes(self, diff):
"""

@ -39,6 +39,10 @@ from ..config import (
SanityConfig,
)
from ..ci import (
get_ci_provider,
)
from ..data import (
data_context,
)
@ -90,12 +94,14 @@ class ValidateModulesTest(SanitySingleVersion):
except CollectionDetailError as ex:
display.warning('Skipping validate-modules collection version checks since collection detail loading failed: %s' % ex.reason)
else:
if args.base_branch:
base_branch = args.base_branch or get_ci_provider().get_base_branch()
if base_branch:
cmd.extend([
'--base-branch', args.base_branch,
'--base-branch', base_branch,
])
else:
display.warning('Cannot perform module comparison against the base branch. Base branch not detected when running locally.')
display.warning('Cannot perform module comparison against the base branch because the base branch was not detected.')
try:
stdout, stderr = run_command(args, cmd, env=env, capture=True)

@ -191,7 +191,7 @@ One or more of the following situations may be responsible:
timestamp = datetime.datetime.utcnow().replace(microsecond=0).isoformat()
# hack to avoid requiring junit-xml, which isn't pre-installed on Shippable outside our test containers
# hack to avoid requiring junit-xml, which may not be pre-installed outside our test containers
xml = '''
<?xml version="1.0" encoding="utf-8"?>
<testsuites disabled="0" errors="1" failures="0" tests="1" time="0.0">

@ -173,13 +173,6 @@ def parse_parameterized_completion(value):
return name, data
def is_shippable():
"""
:rtype: bool
"""
return os.environ.get('SHIPPABLE') == 'true'
def remove_file(path):
"""
:type path: str

Loading…
Cancel
Save