pull/81764/merge
Sviatoslav Sydorenko (Святослав Сидоренко) 1 month ago committed by GitHub
commit a5c80b3c5a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -0,0 +1,21 @@
---
bugfixes:
- >-
ansible-galaxy - started allowing the use of pre-releases
for dependencies on any level of the dependency tree that
specifically demand exact pre-release versions of
collections and not version ranges.
(https://github.com/ansible/ansible/pull/81606)
- >-
ansible-galaxy - started allowing the use of pre-releases
for collections that do not have any stable versions
published.
(https://github.com/ansible/ansible/pull/81606)
- >-
ansible-galaxy - started choosing the collection
pre-releases when they are the only way to satisfy
the requirements.
(https://github.com/ansible/ansible/pull/81764)
...

@ -340,6 +340,19 @@ class GalaxyCLI(CLI):
help='A file containing a list of collections to be downloaded.')
download_parser.add_argument('--pre', dest='allow_pre_release', action='store_true',
help='Include pre-release versions. Semantic versioning pre-releases are ignored by default')
download_parser.add_argument('--forbid-inevitable-pre-releases',
dest='forbid_inevitable_pre_releases',
action='store_true',
help='Prevent any pre-release '
'collection versions from being '
'considered. Even if it is the last '
'hope. By default, pre-releases that '
'are the only way to satisfy the '
'requirement can be selected when '
'there are no stable candidates '
'available that could satisfy the '
'requirements.',
)
def add_init_options(self, parser, parents=None):
galaxy_type = 'collection' if parser.metavar == 'COLLECTION_ACTION' else 'role'
@ -524,6 +537,19 @@ class GalaxyCLI(CLI):
help='A file containing a list of collections to be installed.')
install_parser.add_argument('--pre', dest='allow_pre_release', action='store_true',
help='Include pre-release versions. Semantic versioning pre-releases are ignored by default')
install_parser.add_argument('--forbid-inevitable-pre-releases',
dest='forbid_inevitable_pre_releases',
action='store_true',
help='Prevent any pre-release '
'collection versions from being '
'considered. Even if it is the last '
'hope. By default, pre-releases that '
'are the only way to satisfy the '
'requirement can be selected when '
'there are no stable candidates '
'available that could satisfy the '
'requirements.',
)
install_parser.add_argument('-U', '--upgrade', dest='upgrade', action='store_true', default=False,
help='Upgrade installed collection artifacts. This will also update dependencies unless --no-deps is provided')
install_parser.add_argument('--keyring', dest='keyring', default=C.GALAXY_GPG_KEYRING,
@ -1078,6 +1104,7 @@ class GalaxyCLI(CLI):
download_collections(
requirements, download_path, self.api_servers, no_deps,
context.CLIARGS['allow_pre_release'],
forbid_inevitable_pre_releases=context.CLIARGS['forbid_inevitable_pre_releases'],
artifacts_manager=artifacts_manager,
)
@ -1436,6 +1463,10 @@ class GalaxyCLI(CLI):
# If `ansible-galaxy install` is used, collection-only options aren't available to the user and won't be in context.CLIARGS
allow_pre_release = context.CLIARGS.get('allow_pre_release', False)
forbid_inevitable_pre_releases = context.CLIARGS.get(
'forbid_inevitable_pre_releases',
False,
)
upgrade = context.CLIARGS.get('upgrade', False)
collections_path = C.COLLECTIONS_PATHS
@ -1465,6 +1496,7 @@ class GalaxyCLI(CLI):
requirements, output_path, self.api_servers, ignore_errors,
no_deps, force, force_with_deps, upgrade,
allow_pre_release=allow_pre_release,
forbid_inevitable_pre_releases=forbid_inevitable_pre_releases,
artifacts_manager=artifacts_manager,
disable_gpg_verify=disable_gpg_verify,
offline=context.CLIARGS.get('offline', False),

@ -516,6 +516,7 @@ def download_collections(
apis, # type: t.Iterable[GalaxyAPI]
no_deps, # type: bool
allow_pre_release, # type: bool
forbid_inevitable_pre_releases, # type: bool
artifacts_manager, # type: ConcreteArtifactsManager
): # type: (...) -> None
"""Download Ansible collections as their tarball from a Galaxy server to the path specified and creates a requirements
@ -527,6 +528,7 @@ def download_collections(
:param validate_certs: Whether to validate the certificate if downloading a tarball from a non-Galaxy host.
:param no_deps: Ignore any collection dependencies and only download the base requirements.
:param allow_pre_release: Do not ignore pre-release versions when selecting the latest.
:param forbid_inevitable_pre_releases: Error out if the dependency tree requires pre-releases.
"""
with _display_progress("Process download dependency map"):
dep_map = _resolve_depenency_map(
@ -536,6 +538,7 @@ def download_collections(
concrete_artifacts_manager=artifacts_manager,
no_deps=no_deps,
allow_pre_release=allow_pre_release,
forbid_inevitable_pre_releases=forbid_inevitable_pre_releases,
upgrade=False,
# Avoid overhead getting signatures since they are not currently applicable to downloaded collections
include_signatures=False,
@ -659,6 +662,7 @@ def install_collections(
force_deps, # type: bool
upgrade, # type: bool
allow_pre_release, # type: bool
forbid_inevitable_pre_releases, # type: bool
artifacts_manager, # type: ConcreteArtifactsManager
disable_gpg_verify, # type: bool
offline, # type: bool
@ -674,6 +678,7 @@ def install_collections(
:param no_deps: Ignore any collection dependencies and only install the base requirements.
:param force: Re-install a collection if it has already been installed.
:param force_deps: Re-install a collection as well as its dependencies if they have already been installed.
:param forbid_inevitable_pre_releases: Error out if the dependency tree requires pre-releases.
"""
existing_collections = {
Requirement(coll.fqcn, coll.ver, coll.src, coll.type, None)
@ -739,6 +744,7 @@ def install_collections(
concrete_artifacts_manager=artifacts_manager,
no_deps=no_deps,
allow_pre_release=allow_pre_release,
forbid_inevitable_pre_releases=forbid_inevitable_pre_releases,
upgrade=upgrade,
include_signatures=not disable_gpg_verify,
offline=offline,
@ -1803,6 +1809,7 @@ def _resolve_depenency_map(
preferred_candidates, # type: t.Iterable[Candidate] | None
no_deps, # type: bool
allow_pre_release, # type: bool
forbid_inevitable_pre_releases, # type: bool
upgrade, # type: bool
include_signatures, # type: bool
offline, # type: bool
@ -1849,7 +1856,7 @@ def _resolve_depenency_map(
offline=offline,
)
try:
return collection_dep_resolver.resolve(
dependency_map = collection_dep_resolver.resolve(
requested_requirements,
max_rounds=2000000, # NOTE: same constant pip uses
).mapping
@ -1903,3 +1910,27 @@ def _resolve_depenency_map(
raise AnsibleError('\n'.join(error_msg_lines)) from dep_exc
except ValueError as exc:
raise AnsibleError(to_native(exc)) from exc
else:
if not allow_pre_release:
pre_release_candidates = tuple(
concrete_coll_pin
for concrete_coll_pin in dependency_map.values()
if concrete_coll_pin.is_pre_release
)
if pre_release_candidates:
log_msg_lines = [
'The pre-release collections versions have been selected, '
'being the only way to satisfy the dependency tree '
'constraints, despite the user not requesting them '
'globally:',
]
for collection_candidate in pre_release_candidates:
log_msg_lines.append(f'* {collection_candidate!s}')
log_msg_lines.append(pre_release_hint)
log_msg = '\n'.join(log_msg_lines)
if forbid_inevitable_pre_releases:
raise AnsibleError(log_msg) from None
else:
display.warning(log_msg, formatted=True)
return dependency_map

@ -29,6 +29,7 @@ if t.TYPE_CHECKING:
from ansible.errors import AnsibleError, AnsibleAssertionError
from ansible.galaxy.api import GalaxyAPI
from ansible.galaxy.collection import HAS_PACKAGING, PkgReq
from ansible.galaxy.dependency_resolution.versioning import is_pre_release
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
from ansible.module_utils.common.arg_spec import ArgumentSpecValidator
from ansible.utils.collection_loader import AnsibleCollectionRef
@ -584,6 +585,11 @@ class _ComputedReqKindsMixin:
version_string.startswith(('<', '>', '!='))
)
@property
def is_pre_release(self) -> bool:
"""Return whether this candidate has a pre-release version."""
return is_pre_release(self.ver)
@property
def source_info(self):
return self._source_info

@ -295,26 +295,6 @@ class CollectionDependencyProviderBase(AbstractProvider):
signatures = []
extra_signature_sources = [] # type: list[str]
discarding_pre_releases_acceptable = any(
not is_pre_release(candidate_version)
for candidate_version, _src_server in coll_versions
)
# NOTE: The optimization of conditionally looping over the requirements
# NOTE: is used to skip having to compute the pinned status of all
# NOTE: requirements and apply version normalization to the found ones.
all_pinned_requirement_version_numbers = {
# NOTE: Pinned versions can start with a number, but also with an
# NOTE: equals sign. Stripping it at the beginning should be
# NOTE: enough. If there's a space after equals, the second strip
# NOTE: will take care of it.
# NOTE: Without this conversion, requirements versions like
# NOTE: '1.2.3-alpha.4' work, but '=1.2.3-alpha.4' don't.
requirement.ver.lstrip('=').strip()
for requirement in requirements
if requirement.is_pinned
} if discarding_pre_releases_acceptable else set()
for version, src_server in coll_versions:
tmp_candidate = Candidate(fqcn, version, src_server, 'galaxy', None)
@ -325,34 +305,6 @@ class CollectionDependencyProviderBase(AbstractProvider):
if not candidate_satisfies_requirement:
break
should_disregard_pre_release_candidate = (
# NOTE: Do not discard pre-release candidates in the
# NOTE: following cases:
# NOTE: * the end-user requested pre-releases explicitly;
# NOTE: * the candidate is a concrete artifact (e.g. a
# NOTE: Git repository, subdirs, a tarball URL, or a
# NOTE: local dir or file etc.);
# NOTE: * the candidate's pre-release version exactly
# NOTE: matches a version specifically requested by one
# NOTE: of the requirements in the current match
# NOTE: discovery round (i.e. matching a requirement
# NOTE: that is not a range but an explicit specific
# NOTE: version pin). This works when some requirements
# NOTE: request version ranges but others (possibly on
# NOTE: different dependency tree level depths) demand
# NOTE: pre-release dependency versions, even if those
# NOTE: dependencies are transitive.
is_pre_release(tmp_candidate.ver)
and discarding_pre_releases_acceptable
and not (
self._with_pre_releases
or tmp_candidate.is_concrete_artifact
or version in all_pinned_requirement_version_numbers
)
)
if should_disregard_pre_release_candidate:
break
# FIXME
# candidate_is_from_requested_source = (
# requirement.src is None # if this is true for some candidates but not all it will break key param - Nonetype can't be compared to str
@ -396,7 +348,70 @@ class CollectionDependencyProviderBase(AbstractProvider):
)
}
return list(preinstalled_candidates) + latest_matches
# NOTE: Following is the stage of postponed pre-release candidate
# NOTE: evaluation. It is necessary to allow the dependency resolver
# NOTE: select pre-release candidates if none of the candidate versions
# NOTE: that match all the requirements on this round, are stable
# NOTE: releases. Those pre-releases are only taken into account, if
# NOTE: there are no preinstalled candidates available that satisfy
# NOTE: said requirements.
prefer_avoiding_pre_releases = not self._with_pre_releases
preinstalled_candidates_satisfy_requirements = bool(
preinstalled_candidates,
)
discarding_pre_releases_acceptable = prefer_avoiding_pre_releases and (
preinstalled_candidates_satisfy_requirements or
any(
not is_pre_release(candidate.ver)
for candidate in latest_matches
)
)
# NOTE: The optimization of conditionally looping over the requirements
# NOTE: is used to skip having to compute the pinned status of all
# NOTE: requirements and apply version normalization to the found ones.
all_pinned_requirement_version_numbers = {
# NOTE: Pinned versions can start with a number, but also with an
# NOTE: equals sign. Stripping it at the beginning should be
# NOTE: enough. If there's a space after equals, the second strip
# NOTE: will take care of it.
# NOTE: Without this conversion, requirements versions like
# NOTE: '1.2.3-alpha.4' work, but '=1.2.3-alpha.4' don't.
requirement.ver.lstrip('=').strip()
for requirement in requirements
if requirement.is_pinned
} if discarding_pre_releases_acceptable else set()
def should_disregard_as_pre_release(candidate) -> bool:
# NOTE: Do not discard pre-release candidates in the
# NOTE: following cases:
# NOTE: * the end-user requested pre-releases explicitly;
# NOTE: * the candidate is a concrete artifact (e.g. a
# NOTE: Git repository, subdirs, a tarball URL, or a
# NOTE: local dir or file etc.);
# NOTE: * the candidate's pre-release version exactly
# NOTE: matches a version specifically requested by one
# NOTE: of the requirements in the current match
# NOTE: discovery round (i.e. matching a requirement
# NOTE: that is not a range but an explicit specific
# NOTE: version pin). This works when some requirements
# NOTE: request version ranges but others (possibly on
# NOTE: different dependency tree level depths) demand
# NOTE: pre-release dependency versions, even if those
# NOTE: dependencies are transitive.
return (
is_pre_release(candidate.ver)
and not candidate.is_concrete_artifact
and candidate.ver not in all_pinned_requirement_version_numbers
)
matching_non_preinstalled_candidates = [
candidate
for candidate in latest_matches
if not should_disregard_as_pre_release(candidate)
] if discarding_pre_releases_acceptable else latest_matches
return list(preinstalled_candidates) + matching_non_preinstalled_candidates
def is_satisfied_by(self, requirement, candidate):
# type: (Requirement, Candidate) -> bool

@ -5,6 +5,8 @@
environment:
ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}'
ANSIBLE_CONFIG: '{{ galaxy_dir }}/ansible.cfg'
ANSIBLE_NOCOLOR: 1
ANSIBLE_FORCE_COLOR: 0
block:
- name: reset installation directory
file:
@ -67,6 +69,73 @@
- >-
"dev_and_stables_ns.dev_and_stables_name:1.2.3-dev0 was installed successfully"
in select_pre_release_if_no_stable.stdout
- name: cleanup
file:
state: "{{ item }}"
path: "{{ galaxy_dir }}/ansible_collections"
loop:
- absent
- directory
- name: >-
request installing collection from range when only pre-releases
match the user request
command: >-
ansible-galaxy collection install
'dev_and_stables_ns.dev_and_stables_name:>=1.0.0,<1.2.4'
{{ galaxy_verbosity }}
register: select_pre_release_from_range_if_no_stable
- assert:
that:
- >-
"dev_and_stables_ns.dev_and_stables_name:1.2.3-dev0 was installed successfully"
in select_pre_release_from_range_if_no_stable.stdout_lines
- >-
'[WARNING]: '
in select_pre_release_from_range_if_no_stable.stderr_lines
- >-
'The pre-release collections versions have been selected, ' ~
'being the only way to satisfy the dependency tree ' ~
'constraints, despite the user not requesting them globally:'
in select_pre_release_from_range_if_no_stable.stderr_lines
- >-
"* dev_and_stables_ns.dev_and_stables_name:1.2.3-dev0"
in select_pre_release_from_range_if_no_stable.stderr_lines
- name: cleanup
file:
state: "{{ item }}"
path: "{{ galaxy_dir }}/ansible_collections"
loop:
- absent
- directory
- name: >-
request installing collection from range when only pre-releases
match the user request
command: >-
ansible-galaxy collection install
--forbid-inevitable-pre-releases
'dev_and_stables_ns.dev_and_stables_name:>=1.0.0,<1.2.4'
{{ galaxy_verbosity }}
ignore_errors: yes
register: forbid_selected_pre_release_from_range_if_no_stable
- assert:
that:
- forbid_selected_pre_release_from_range_if_no_stable is failed
- >-
"was installed successfully"
not in forbid_selected_pre_release_from_range_if_no_stable.stdout_lines
- >-
'ERROR! ' ~
'The pre-release collections versions have been selected, ' ~
'being the only way to satisfy the dependency tree ' ~
'constraints, despite the user not requesting them globally:'
in forbid_selected_pre_release_from_range_if_no_stable.stderr_lines
- >-
"* dev_and_stables_ns.dev_and_stables_name:1.2.3-dev0"
in forbid_selected_pre_release_from_range_if_no_stable.stderr_lines
always:
- name: cleanup
file:

@ -105,6 +105,9 @@
that:
- '"Installing ''namespace1.name1:1.1.0-beta.1'' to" in install_prerelease.stdout'
- (install_prerelease_actual.content | b64decode | from_json).collection_info.version == '1.1.0-beta.1'
- >-
'The pre-release collections versions have been selected'
in install_prerelease.stderr
- name: Remove beta
file:
@ -135,6 +138,12 @@
ANSIBLE_CONFIG: '{{ galaxy_dir }}/ansible.cfg'
register: install_multiple_with_dep
- assert:
that:
- >-
'The pre-release collections versions have been selected'
not in install_multiple_with_dep.stderr
- name: get result of install multiple collections with dependencies - {{ test_id }}
slurp:
path: '{{ galaxy_dir }}/ansible_collections/{{ collection.namespace }}/{{ collection.name }}/MANIFEST.json'

@ -114,6 +114,8 @@
apply:
environment:
ANSIBLE_CONFIG: '{{ galaxy_dir }}/ansible.cfg'
ANSIBLE_NOCOLOR: 1
ANSIBLE_FORCE_COLOR: 0
loop:
- name: galaxy_ng
server: '{{ galaxy_ng_server }}'
@ -136,10 +138,10 @@
- name: test choosing pinned pre-releases anywhere in the dependency tree
# This is a regression test for the case when the end-user does not
# explicitly allow installing pre-release collection versions, but their
# precise pins are still selected if met among the dependencies, either
# explicitly allow installing pre-release collection versions, but they
# are still selected if no stable releases satisfy the requirements, either
# direct or transitive.
include_tasks: pinned_pre_releases_in_deptree.yml
include_tasks: inevitable_pre_releases_in_deptree.yml
- name: test installing prereleases via scm direct requests
# In this test suite because the bug relies on the dep having versions on a Galaxy server

@ -420,7 +420,16 @@ def test_build_requirement_from_name(galaxy_server, monkeypatch, tmp_path_factor
collections, requirements_file, artifacts_manager=concrete_artifact_cm
)['collections']
actual = collection._resolve_depenency_map(
requirements, [galaxy_server], concrete_artifact_cm, None, True, False, False, False, False
requested_requirements=requirements,
galaxy_apis=[galaxy_server],
concrete_artifacts_manager=concrete_artifact_cm,
preferred_candidates=None,
no_deps=True,
allow_pre_release=False,
forbid_inevitable_pre_releases=False,
upgrade=False,
include_signatures=False,
offline=False,
)['namespace.collection']
assert actual.namespace == u'namespace'
@ -449,7 +458,16 @@ def test_build_requirement_from_name_with_prerelease(galaxy_server, monkeypatch,
['namespace.collection'], None, artifacts_manager=concrete_artifact_cm
)['collections']
actual = collection._resolve_depenency_map(
requirements, [galaxy_server], concrete_artifact_cm, None, True, False, False, False, False
requested_requirements=requirements,
galaxy_apis=[galaxy_server],
concrete_artifacts_manager=concrete_artifact_cm,
preferred_candidates=None,
no_deps=True,
allow_pre_release=False,
forbid_inevitable_pre_releases=False,
upgrade=False,
include_signatures=False,
offline=False,
)['namespace.collection']
assert actual.namespace == u'namespace'
@ -479,7 +497,16 @@ def test_build_requirment_from_name_with_prerelease_explicit(galaxy_server, monk
['namespace.collection:2.0.1-beta.1'], None, artifacts_manager=concrete_artifact_cm
)['collections']
actual = collection._resolve_depenency_map(
requirements, [galaxy_server], concrete_artifact_cm, None, True, False, False, False, False
requested_requirements=requirements,
galaxy_apis=[galaxy_server],
concrete_artifacts_manager=concrete_artifact_cm,
preferred_candidates=None,
no_deps=True,
allow_pre_release=False,
forbid_inevitable_pre_releases=False,
upgrade=False,
include_signatures=False,
offline=False,
)['namespace.collection']
assert actual.namespace == u'namespace'
@ -514,7 +541,16 @@ def test_build_requirement_from_name_second_server(galaxy_server, monkeypatch, t
['namespace.collection:>1.0.1'], None, artifacts_manager=concrete_artifact_cm
)['collections']
actual = collection._resolve_depenency_map(
requirements, [broken_server, galaxy_server], concrete_artifact_cm, None, True, False, False, False, False
requested_requirements=requirements,
galaxy_apis=[broken_server, galaxy_server],
concrete_artifacts_manager=concrete_artifact_cm,
preferred_candidates=None,
no_deps=True,
allow_pre_release=False,
forbid_inevitable_pre_releases=False,
upgrade=False,
include_signatures=False,
offline=False,
)['namespace.collection']
assert actual.namespace == u'namespace'
@ -545,7 +581,18 @@ def test_build_requirement_from_name_missing(galaxy_server, monkeypatch, tmp_pat
expected = "Failed to resolve the requested dependencies map. Could not satisfy the following requirements:\n* namespace.collection:* (direct request)"
with pytest.raises(AnsibleError, match=re.escape(expected)):
collection._resolve_depenency_map(requirements, [galaxy_server, galaxy_server], concrete_artifact_cm, None, False, True, False, False, False)
collection._resolve_depenency_map(
requested_requirements=requirements,
galaxy_apis=[galaxy_server, galaxy_server],
concrete_artifacts_manager=concrete_artifact_cm,
preferred_candidates=None,
no_deps=True,
allow_pre_release=False,
forbid_inevitable_pre_releases=False,
upgrade=False,
include_signatures=False,
offline=False,
)
def test_build_requirement_from_name_401_unauthorized(galaxy_server, monkeypatch, tmp_path_factory):
@ -565,7 +612,18 @@ def test_build_requirement_from_name_401_unauthorized(galaxy_server, monkeypatch
expected = "error (HTTP Code: 401, Message: msg)"
with pytest.raises(api.GalaxyError, match=re.escape(expected)):
collection._resolve_depenency_map(requirements, [galaxy_server, galaxy_server], concrete_artifact_cm, None, False, False, False, False, False)
collection._resolve_depenency_map(
requested_requirements=requirements,
galaxy_apis=[galaxy_server, galaxy_server],
concrete_artifacts_manager=concrete_artifact_cm,
preferred_candidates=None,
no_deps=False,
allow_pre_release=False,
forbid_inevitable_pre_releases=False,
upgrade=False,
include_signatures=False,
offline=False,
)
def test_build_requirement_from_name_single_version(galaxy_server, monkeypatch, tmp_path_factory):
@ -593,7 +651,17 @@ def test_build_requirement_from_name_single_version(galaxy_server, monkeypatch,
)['collections']
actual = collection._resolve_depenency_map(
requirements, [galaxy_server], concrete_artifact_cm, None, False, True, False, False, False)['namespace.collection']
requested_requirements=requirements,
galaxy_apis=[galaxy_server],
concrete_artifacts_manager=concrete_artifact_cm,
preferred_candidates=None,
no_deps=False,
allow_pre_release=True,
forbid_inevitable_pre_releases=False,
upgrade=False,
include_signatures=False,
offline=False,
)['namespace.collection']
assert actual.namespace == u'namespace'
assert actual.name == u'collection'
@ -630,7 +698,17 @@ def test_build_requirement_from_name_multiple_versions_one_match(galaxy_server,
)['collections']
actual = collection._resolve_depenency_map(
requirements, [galaxy_server], concrete_artifact_cm, None, False, True, False, False, False)['namespace.collection']
requested_requirements=requirements,
galaxy_apis=[galaxy_server],
concrete_artifacts_manager=concrete_artifact_cm,
preferred_candidates=None,
no_deps=False,
allow_pre_release=True,
forbid_inevitable_pre_releases=False,
upgrade=False,
include_signatures=False,
offline=False,
)['namespace.collection']
assert actual.namespace == u'namespace'
assert actual.name == u'collection'
@ -672,7 +750,17 @@ def test_build_requirement_from_name_multiple_version_results(galaxy_server, mon
)['collections']
actual = collection._resolve_depenency_map(
requirements, [galaxy_server], concrete_artifact_cm, None, False, True, False, False, False)['namespace.collection']
requested_requirements=requirements,
galaxy_apis=[galaxy_server],
concrete_artifacts_manager=concrete_artifact_cm,
preferred_candidates=None,
no_deps=False,
allow_pre_release=True,
forbid_inevitable_pre_releases=False,
upgrade=False,
include_signatures=False,
offline=False,
)['namespace.collection']
assert actual.namespace == u'namespace'
assert actual.name == u'collection'
@ -706,7 +794,18 @@ def test_candidate_with_conflict(monkeypatch, tmp_path_factory, galaxy_server):
expected = "Failed to resolve the requested dependencies map. Could not satisfy the following requirements:\n"
expected += "* namespace.collection:!=2.0.5 (direct request)"
with pytest.raises(AnsibleError, match=re.escape(expected)):
collection._resolve_depenency_map(requirements, [galaxy_server], concrete_artifact_cm, None, False, True, False, False, False)
collection._resolve_depenency_map(
requested_requirements=requirements,
galaxy_apis=[galaxy_server],
concrete_artifacts_manager=concrete_artifact_cm,
preferred_candidates=None,
no_deps=False,
allow_pre_release=True,
forbid_inevitable_pre_releases=False,
upgrade=False,
include_signatures=False,
offline=False,
)
def test_dep_candidate_with_conflict(monkeypatch, tmp_path_factory, galaxy_server):
@ -731,7 +830,18 @@ def test_dep_candidate_with_conflict(monkeypatch, tmp_path_factory, galaxy_serve
expected = "Failed to resolve the requested dependencies map. Could not satisfy the following requirements:\n"
expected += "* namespace.collection:!=1.0.0 (dependency of parent.collection:2.0.5)"
with pytest.raises(AnsibleError, match=re.escape(expected)):
collection._resolve_depenency_map(requirements, [galaxy_server], concrete_artifact_cm, None, False, True, False, False, False)
collection._resolve_depenency_map(
requested_requirements=requirements,
galaxy_apis=[galaxy_server],
concrete_artifacts_manager=concrete_artifact_cm,
preferred_candidates=None,
no_deps=False,
allow_pre_release=True,
forbid_inevitable_pre_releases=False,
upgrade=False,
include_signatures=False,
offline=False,
)
def test_install_installed_collection(monkeypatch, tmp_path_factory, galaxy_server):
@ -843,7 +953,21 @@ def test_install_collections_from_tar(collection_artifact, monkeypatch):
requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file', None)]
collection.install_collections(
requirements, to_text(temp_path), [], False, False, False, False, False, False, concrete_artifact_cm, True, False, set())
collections=requirements,
output_path=to_text(temp_path),
apis=[],
ignore_errors=False,
no_deps=False,
force=False,
force_deps=False,
upgrade=False,
allow_pre_release=False,
forbid_inevitable_pre_releases=False,
artifacts_manager=concrete_artifact_cm,
disable_gpg_verify=True,
offline=False,
read_requirement_paths=set(),
)
assert os.path.isdir(collection_path)
@ -882,7 +1006,21 @@ def test_install_collection_with_circular_dependency(collection_artifact, monkey
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False)
requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file', None)]
collection.install_collections(
requirements, to_text(temp_path), [], False, False, False, False, False, False, concrete_artifact_cm, True, False, set())
collections=requirements,
output_path=to_text(temp_path),
apis=[],
ignore_errors=False,
no_deps=False,
force=False,
force_deps=False,
upgrade=False,
allow_pre_release=False,
forbid_inevitable_pre_releases=False,
artifacts_manager=concrete_artifact_cm,
disable_gpg_verify=True,
offline=False,
read_requirement_paths=set(),
)
assert os.path.isdir(collection_path)
@ -920,7 +1058,21 @@ def test_install_collection_with_no_dependency(collection_artifact, monkeypatch)
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False)
requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file', None)]
collection.install_collections(
requirements, to_text(temp_path), [], False, False, False, False, False, False, concrete_artifact_cm, True, False, set())
collections=requirements,
output_path=to_text(temp_path),
apis=[],
ignore_errors=False,
no_deps=False,
force=False,
force_deps=False,
upgrade=False,
allow_pre_release=False,
forbid_inevitable_pre_releases=False,
artifacts_manager=concrete_artifact_cm,
disable_gpg_verify=True,
offline=False,
read_requirement_paths=set(),
)
assert os.path.isdir(collection_path)

Loading…
Cancel
Save