Add support for importlib.resources (#78915)

* Add support for importlib.resources

* Remove the importlib.resources imports

* return the correct data

* Some code comments, and re-order for consistency

* Disallow traversing packages below an individual collection

* Add a traversable class for namespaces

* Re-use variable

* Utilize itertools.chain.from_iterable

Co-authored-by: Sviatoslav Sydorenko <wk.cvs.github@sydorenko.org.ua>

* Simplify logic to check for packages from ansible loaders

Co-authored-by: Sviatoslav Sydorenko <wk.cvs.github@sydorenko.org.ua>

* Just a generator expression, instead of a generator

* docstrings

* Add comment about find_spec for our namespaces

* Add some initial unit tests for importlib.resources

* normalize

* Utilize importlib.resources for listing collections

* collections_path is already in config, just use config

* install uses a different default for collections_path

* Remove unused import

* Remove duplicate __truediv__

* Bring back TraversableResources

* Apply some small suggestions from code review

Co-authored-by: Sviatoslav Sydorenko <wk.cvs.github@sydorenko.org.ua>
Co-authored-by: Matt Davis <6775756+nitzmahone@users.noreply.github.com>

* Remove cross contamination between plugin loader code and CLI code

* Remove unused import

Co-authored-by: Sviatoslav Sydorenko <wk.cvs.github@sydorenko.org.ua>
Co-authored-by: Matt Davis <6775756+nitzmahone@users.noreply.github.com>
pull/79712/head
Matt Martz 2 years ago committed by GitHub
parent e41d2874a6
commit 56d142350d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -44,6 +44,7 @@ import ansible.utils.vars as utils_vars
from ansible.parsing.dataloader import DataLoader from ansible.parsing.dataloader import DataLoader
from ansible.parsing.utils.jsonify import jsonify from ansible.parsing.utils.jsonify import jsonify
from ansible.parsing.splitter import parse_kv from ansible.parsing.splitter import parse_kv
from ansible.plugins.loader import init_plugin_loader
from ansible.executor import module_common from ansible.executor import module_common
import ansible.constants as C import ansible.constants as C
from ansible.module_utils._text import to_native, to_text from ansible.module_utils._text import to_native, to_text
@ -266,6 +267,7 @@ def rundebug(debugger, modfile, argspath, modname, module_style, interpreters):
def main(): def main():
options, args = parse() options, args = parse()
init_plugin_loader()
interpreters = get_interpreters(options.interpreter) interpreters = get_interpreters(options.interpreter)
(modfile, modname, module_style) = boilerplate_module(options.module_path, options.module_args, interpreters, options.check, options.filename) (modfile, modname, module_style) = boilerplate_module(options.module_path, options.module_args, interpreters, options.check, options.filename)

@ -98,10 +98,11 @@ from ansible.errors import AnsibleError, AnsibleOptionsError, AnsibleParserError
from ansible.inventory.manager import InventoryManager from ansible.inventory.manager import InventoryManager
from ansible.module_utils.six import string_types from ansible.module_utils.six import string_types
from ansible.module_utils._text import to_bytes, to_text from ansible.module_utils._text import to_bytes, to_text
from ansible.module_utils.common.collections import is_sequence
from ansible.module_utils.common.file import is_executable from ansible.module_utils.common.file import is_executable
from ansible.parsing.dataloader import DataLoader from ansible.parsing.dataloader import DataLoader
from ansible.parsing.vault import PromptVaultSecret, get_file_vault_secret from ansible.parsing.vault import PromptVaultSecret, get_file_vault_secret
from ansible.plugins.loader import add_all_plugin_dirs from ansible.plugins.loader import add_all_plugin_dirs, init_plugin_loader
from ansible.release import __version__ from ansible.release import __version__
from ansible.utils.collection_loader import AnsibleCollectionConfig from ansible.utils.collection_loader import AnsibleCollectionConfig
from ansible.utils.collection_loader._collection_finder import _get_collection_name_from_path from ansible.utils.collection_loader._collection_finder import _get_collection_name_from_path
@ -154,6 +155,13 @@ class CLI(ABC):
""" """
self.parse() self.parse()
# Initialize plugin loader after parse, so that the init code can utilize parsed arguments
cli_collections_path = context.CLIARGS.get('collections_path') or []
if not is_sequence(cli_collections_path):
# In some contexts ``collections_path`` is singular
cli_collections_path = [cli_collections_path]
init_plugin_loader(cli_collections_path)
display.vv(to_text(opt_help.version(self.parser.prog))) display.vv(to_text(opt_help.version(self.parser.prog)))
if C.CONFIG_FILE: if C.CONFIG_FILE:
@ -522,6 +530,10 @@ class CLI(ABC):
@staticmethod @staticmethod
def _play_prereqs(): def _play_prereqs():
# TODO: evaluate moving all of the code that touches ``AnsibleCollectionConfig``
# into ``init_plugin_loader`` so that we can specifically remove
# ``AnsibleCollectionConfig.playbook_paths`` to make it immutable after instantiation
options = context.CLIARGS options = context.CLIARGS
# all needs loader # all needs loader

@ -12,6 +12,7 @@ from ansible.cli import CLI
import json import json
import os.path import os.path
import pathlib
import re import re
import shutil import shutil
import sys import sys
@ -97,7 +98,8 @@ def with_collection_artifacts_manager(wrapped_method):
return wrapped_method(*args, **kwargs) return wrapped_method(*args, **kwargs)
# FIXME: use validate_certs context from Galaxy servers when downloading collections # FIXME: use validate_certs context from Galaxy servers when downloading collections
artifacts_manager_kwargs = {'validate_certs': context.CLIARGS['resolved_validate_certs']} # .get used here for when this is used in a non-CLI context
artifacts_manager_kwargs = {'validate_certs': context.CLIARGS.get('resolved_validate_certs', True)}
keyring = context.CLIARGS.get('keyring', None) keyring = context.CLIARGS.get('keyring', None)
if keyring is not None: if keyring is not None:
@ -154,8 +156,8 @@ def _get_collection_widths(collections):
fqcn_set = {to_text(c.fqcn) for c in collections} fqcn_set = {to_text(c.fqcn) for c in collections}
version_set = {to_text(c.ver) for c in collections} version_set = {to_text(c.ver) for c in collections}
fqcn_length = len(max(fqcn_set, key=len)) fqcn_length = len(max(fqcn_set or [''], key=len))
version_length = len(max(version_set, key=len)) version_length = len(max(version_set or [''], key=len))
return fqcn_length, version_length return fqcn_length, version_length
@ -268,7 +270,6 @@ class GalaxyCLI(CLI):
collections_path = opt_help.argparse.ArgumentParser(add_help=False) collections_path = opt_help.argparse.ArgumentParser(add_help=False)
collections_path.add_argument('-p', '--collections-path', dest='collections_path', type=opt_help.unfrack_path(pathsep=True), collections_path.add_argument('-p', '--collections-path', dest='collections_path', type=opt_help.unfrack_path(pathsep=True),
default=AnsibleCollectionConfig.collection_paths,
action=opt_help.PrependListAction, action=opt_help.PrependListAction,
help="One or more directories to search for collections in addition " help="One or more directories to search for collections in addition "
"to the default COLLECTIONS_PATHS. Separate multiple paths " "to the default COLLECTIONS_PATHS. Separate multiple paths "
@ -1250,7 +1251,7 @@ class GalaxyCLI(CLI):
def execute_verify(self, artifacts_manager=None): def execute_verify(self, artifacts_manager=None):
collections = context.CLIARGS['args'] collections = context.CLIARGS['args']
search_paths = context.CLIARGS['collections_path'] search_paths = AnsibleCollectionConfig.collection_paths
ignore_errors = context.CLIARGS['ignore_errors'] ignore_errors = context.CLIARGS['ignore_errors']
local_verify_only = context.CLIARGS['offline'] local_verify_only = context.CLIARGS['offline']
requirements_file = context.CLIARGS['requirements'] requirements_file = context.CLIARGS['requirements']
@ -1577,7 +1578,9 @@ class GalaxyCLI(CLI):
display.warning(w) display.warning(w)
if not path_found: if not path_found:
raise AnsibleOptionsError("- None of the provided paths were usable. Please specify a valid path with --{0}s-path".format(context.CLIARGS['type'])) raise AnsibleOptionsError(
"- None of the provided paths were usable. Please specify a valid path with --{0}s-path".format(context.CLIARGS['type'])
)
return 0 return 0
@ -1592,100 +1595,66 @@ class GalaxyCLI(CLI):
artifacts_manager.require_build_metadata = False artifacts_manager.require_build_metadata = False
output_format = context.CLIARGS['output_format'] output_format = context.CLIARGS['output_format']
collections_search_paths = set(context.CLIARGS['collections_path'])
collection_name = context.CLIARGS['collection'] collection_name = context.CLIARGS['collection']
default_collections_path = AnsibleCollectionConfig.collection_paths default_collections_path = set(C.COLLECTIONS_PATHS)
collections_search_paths = (
set(context.CLIARGS['collections_path'] or []) | default_collections_path | set(AnsibleCollectionConfig.collection_paths)
)
collections_in_paths = {} collections_in_paths = {}
warnings = [] warnings = []
path_found = False path_found = False
collection_found = False collection_found = False
for path in collections_search_paths:
collection_path = GalaxyCLI._resolve_path(path)
if not os.path.exists(path):
if path in default_collections_path:
# don't warn for missing default paths
continue
warnings.append("- the configured path {0} does not exist.".format(collection_path))
continue
if not os.path.isdir(collection_path):
warnings.append("- the configured path {0}, exists, but it is not a directory.".format(collection_path))
continue
path_found = True
namespace_filter = None
collection_filter = None
if collection_name: if collection_name:
# list a specific collection # list a specific collection
validate_collection_name(collection_name) validate_collection_name(collection_name)
namespace, collection = collection_name.split('.') namespace_filter, collection_filter = collection_name.split('.')
collection_path = validate_collection_path(collection_path)
b_collection_path = to_bytes(os.path.join(collection_path, namespace, collection), errors='surrogate_or_strict')
if not os.path.exists(b_collection_path):
warnings.append("- unable to find {0} in collection paths".format(collection_name))
continue
if not os.path.isdir(collection_path):
warnings.append("- the configured path {0}, exists, but it is not a directory.".format(collection_path))
continue
collection_found = True
try:
collection = Requirement.from_dir_path_as_unknown(
b_collection_path,
artifacts_manager,
)
except ValueError as val_err:
six.raise_from(AnsibleError(val_err), val_err)
if output_format in {'yaml', 'json'}:
collections_in_paths[collection_path] = {
collection.fqcn: {'version': collection.ver}
}
continue
fqcn_width, version_width = _get_collection_widths([collection])
_display_header(collection_path, 'Collection', 'Version', fqcn_width, version_width)
_display_collection(collection, fqcn_width, version_width)
else:
# list all collections
collection_path = validate_collection_path(path)
if os.path.isdir(collection_path):
display.vvv("Searching {0} for collections".format(collection_path))
collections = list(find_existing_collections( collections = list(find_existing_collections(
collection_path, artifacts_manager, list(collections_search_paths),
artifacts_manager,
namespace_filter=namespace_filter,
collection_filter=collection_filter,
dedupe=False
)) ))
else:
# There was no 'ansible_collections/' directory in the path, so there
# or no collections here.
display.vvv("No 'ansible_collections' directory found at {0}".format(collection_path))
continue
if not collections: seen = set()
display.vvv("No collections found at {0}".format(collection_path)) fqcn_width, version_width = _get_collection_widths(collections)
continue for collection in sorted(collections, key=lambda c: c.src):
collection_found = True
collection_path = pathlib.Path(to_text(collection.src)).parent.parent.as_posix()
if output_format in {'yaml', 'json'}: if output_format in {'yaml', 'json'}:
collections_in_paths[collection_path] = { collections_in_paths[collection_path] = {
collection.fqcn: {'version': collection.ver} for collection in collections collection.fqcn: {'version': collection.ver} for collection in collections
} }
else:
if collection_path not in seen:
_display_header(
collection_path,
'Collection',
'Version',
fqcn_width,
version_width
)
seen.add(collection_path)
_display_collection(collection, fqcn_width, version_width)
path_found = False
for path in collections_search_paths:
if not os.path.exists(path):
if path in default_collections_path:
# don't warn for missing default paths
continue continue
warnings.append("- the configured path {0} does not exist.".format(path))
# Display header elif os.path.exists(path) and not os.path.isdir(path):
fqcn_width, version_width = _get_collection_widths(collections) warnings.append("- the configured path {0}, exists, but it is not a directory.".format(path))
_display_header(collection_path, 'Collection', 'Version', fqcn_width, version_width) else:
path_found = True
# Sort collections by the namespace and name
for collection in sorted(collections, key=to_text):
_display_collection(collection, fqcn_width, version_width)
# Do not warn if the specific collection was found in any of the search paths # Do not warn if the specific collection was found in any of the search paths
if collection_found and collection_name: if collection_found and collection_name:
@ -1694,8 +1663,10 @@ class GalaxyCLI(CLI):
for w in warnings: for w in warnings:
display.warning(w) display.warning(w)
if not path_found: if not collections and not path_found:
raise AnsibleOptionsError("- None of the provided paths were usable. Please specify a valid path with --{0}s-path".format(context.CLIARGS['type'])) raise AnsibleOptionsError(
"- None of the provided paths were usable. Please specify a valid path with --{0}s-path".format(context.CLIARGS['type'])
)
if output_format == 'json': if output_format == 'json':
display.display(json.dumps(collections_in_paths)) display.display(json.dumps(collections_in_paths))

@ -29,7 +29,7 @@ from ansible.module_utils.connection import Connection, ConnectionError, send_da
from ansible.module_utils.service import fork_process from ansible.module_utils.service import fork_process
from ansible.parsing.ajson import AnsibleJSONEncoder, AnsibleJSONDecoder from ansible.parsing.ajson import AnsibleJSONEncoder, AnsibleJSONDecoder
from ansible.playbook.play_context import PlayContext from ansible.playbook.play_context import PlayContext
from ansible.plugins.loader import connection_loader from ansible.plugins.loader import connection_loader, init_plugin_loader
from ansible.utils.path import unfrackpath, makedirs_safe from ansible.utils.path import unfrackpath, makedirs_safe
from ansible.utils.display import Display from ansible.utils.display import Display
from ansible.utils.jsonrpc import JsonRpcServer from ansible.utils.jsonrpc import JsonRpcServer
@ -230,6 +230,7 @@ def main(args=None):
parser.add_argument('playbook_pid') parser.add_argument('playbook_pid')
parser.add_argument('task_uuid') parser.add_argument('task_uuid')
args = parser.parse_args(args[1:] if args is not None else args) args = parser.parse_args(args[1:] if args is not None else args)
init_plugin_loader()
# initialize verbosity # initialize verbosity
display.verbosity = args.verbosity display.verbosity = args.verbosity

@ -9,7 +9,8 @@ import os
from collections import defaultdict from collections import defaultdict
from ansible.errors import AnsibleError from ansible.errors import AnsibleError
from ansible.collections import is_collection_path from ansible.cli.galaxy import with_collection_artifacts_manager
from ansible.galaxy.collection import find_existing_collections
from ansible.module_utils._text import to_bytes from ansible.module_utils._text import to_bytes
from ansible.utils.collection_loader import AnsibleCollectionConfig from ansible.utils.collection_loader import AnsibleCollectionConfig
from ansible.utils.collection_loader._collection_finder import _get_collection_name_from_path from ansible.utils.collection_loader._collection_finder import _get_collection_name_from_path
@ -18,13 +19,12 @@ from ansible.utils.display import Display
display = Display() display = Display()
def list_collections(coll_filter=None, search_paths=None, dedupe=False): @with_collection_artifacts_manager
def list_collections(coll_filter=None, search_paths=None, dedupe=True, artifacts_manager=None):
collections = {} collections = {}
for candidate in list_collection_dirs(search_paths=search_paths, coll_filter=coll_filter): for candidate in list_collection_dirs(search_paths=search_paths, coll_filter=coll_filter, artifacts_manager=artifacts_manager, dedupe=dedupe):
if os.path.exists(candidate):
collection = _get_collection_name_from_path(candidate) collection = _get_collection_name_from_path(candidate)
if collection not in collections or not dedupe:
collections[collection] = candidate collections[collection] = candidate
return collections return collections
@ -59,7 +59,8 @@ def list_valid_collection_paths(search_paths=None, warn=False):
yield path yield path
def list_collection_dirs(search_paths=None, coll_filter=None): @with_collection_artifacts_manager
def list_collection_dirs(search_paths=None, coll_filter=None, artifacts_manager=None, dedupe=True):
""" """
Return paths for the specific collections found in passed or configured search paths Return paths for the specific collections found in passed or configured search paths
:param search_paths: list of text-string paths, if none load default config :param search_paths: list of text-string paths, if none load default config
@ -67,48 +68,18 @@ def list_collection_dirs(search_paths=None, coll_filter=None):
:return: list of collection directory paths :return: list of collection directory paths
""" """
collection = None namespace_filter = None
namespace = None collection_filter = None
if coll_filter is not None: if coll_filter is not None:
if '.' in coll_filter: if '.' in coll_filter:
try: try:
(namespace, collection) = coll_filter.split('.') namespace_filter, collection_filter = coll_filter.split('.')
except ValueError: except ValueError:
raise AnsibleError("Invalid collection pattern supplied: %s" % coll_filter) raise AnsibleError("Invalid collection pattern supplied: %s" % coll_filter)
else: else:
namespace = coll_filter namespace_filter = coll_filter
collections = defaultdict(dict) for req in find_existing_collections(search_paths, artifacts_manager, namespace_filter=namespace_filter,
for path in list_valid_collection_paths(search_paths): collection_filter=collection_filter, dedupe=dedupe):
if os.path.basename(path) != 'ansible_collections': yield to_bytes(req.src)
path = os.path.join(path, 'ansible_collections')
b_coll_root = to_bytes(path, errors='surrogate_or_strict')
if os.path.exists(b_coll_root) and os.path.isdir(b_coll_root):
if namespace is None:
namespaces = os.listdir(b_coll_root)
else:
namespaces = [namespace]
for ns in namespaces:
b_namespace_dir = os.path.join(b_coll_root, to_bytes(ns))
if os.path.isdir(b_namespace_dir):
if collection is None:
colls = os.listdir(b_namespace_dir)
else:
colls = [collection]
for mycoll in colls:
# skip dupe collections as they will be masked in execution
if mycoll not in collections[ns]:
b_coll = to_bytes(mycoll)
b_coll_dir = os.path.join(b_namespace_dir, b_coll)
if is_collection_path(b_coll_dir):
collections[ns][mycoll] = b_coll_dir
yield b_coll_dir

@ -0,0 +1,20 @@
# Copyright: Contributors to the Ansible project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
HAS_IMPORTLIB_RESOURCES = False
if sys.version_info < (3, 10):
try:
from importlib_resources import files # type: ignore[import]
except ImportError:
files = None # type: ignore[assignment]
else:
HAS_IMPORTLIB_RESOURCES = True
else:
from importlib.resources import files
HAS_IMPORTLIB_RESOURCES = True

@ -11,6 +11,7 @@ import fnmatch
import functools import functools
import json import json
import os import os
import pathlib
import queue import queue
import re import re
import shutil import shutil
@ -83,6 +84,7 @@ if t.TYPE_CHECKING:
FilesManifestType = t.Dict[t.Literal['files', 'format'], t.Union[t.List[FileManifestEntryType], int]] FilesManifestType = t.Dict[t.Literal['files', 'format'], t.Union[t.List[FileManifestEntryType], int]]
import ansible.constants as C import ansible.constants as C
from ansible.compat.importlib_resources import files
from ansible.errors import AnsibleError from ansible.errors import AnsibleError
from ansible.galaxy.api import GalaxyAPI from ansible.galaxy.api import GalaxyAPI
from ansible.galaxy.collection.concrete_artifact_manager import ( from ansible.galaxy.collection.concrete_artifact_manager import (
@ -1402,30 +1404,69 @@ def _build_collection_dir(b_collection_path, b_collection_output, collection_man
return collection_output return collection_output
def find_existing_collections(path, artifacts_manager): def _normalize_collection_path(path):
str_path = path.as_posix() if isinstance(path, pathlib.Path) else path
return pathlib.Path(
# This is annoying, but GalaxyCLI._resolve_path did it
os.path.expandvars(str_path)
).expanduser().absolute()
def find_existing_collections(path_filter, artifacts_manager, namespace_filter=None, collection_filter=None, dedupe=True):
"""Locate all collections under a given path. """Locate all collections under a given path.
:param path: Collection dirs layout search path. :param path: Collection dirs layout search path.
:param artifacts_manager: Artifacts manager. :param artifacts_manager: Artifacts manager.
""" """
b_path = to_bytes(path, errors='surrogate_or_strict') if files is None:
raise AnsibleError('importlib_resources is not installed and is required')
if path_filter and not is_sequence(path_filter):
path_filter = [path_filter]
paths = set()
for path in files('ansible_collections').glob('*/*/'):
path = _normalize_collection_path(path)
if not path.is_dir():
continue
if path_filter:
for pf in path_filter:
try:
path.relative_to(_normalize_collection_path(pf))
except ValueError:
continue
break
else:
continue
paths.add(path)
# FIXME: consider using `glob.glob()` to simplify looping seen = set()
for b_namespace in os.listdir(b_path): for path in paths:
b_namespace_path = os.path.join(b_path, b_namespace) namespace = path.parent.name
if os.path.isfile(b_namespace_path): name = path.name
if namespace_filter and namespace != namespace_filter:
continue
if collection_filter and name != collection_filter:
continue continue
# FIXME: consider feeding b_namespace_path to Candidate.from_dir_path to get subdirs automatically if dedupe:
for b_collection in os.listdir(b_namespace_path): try:
b_collection_path = os.path.join(b_namespace_path, b_collection) collection_path = files(f'ansible_collections.{namespace}.{name}')
if not os.path.isdir(b_collection_path): except ImportError:
continue continue
if collection_path in seen:
continue
seen.add(collection_path)
else:
collection_path = path
b_collection_path = to_bytes(collection_path.as_posix())
try: try:
req = Candidate.from_dir_path_as_unknown(b_collection_path, artifacts_manager) req = Candidate.from_dir_path_as_unknown(b_collection_path, artifacts_manager)
except ValueError as val_err: except ValueError as val_err:
raise_from(AnsibleError(val_err), val_err) display.warning(f'{val_err}')
continue
display.vvv( display.vvv(
u"Found installed collection {coll!s} at '{path!s}'". u"Found installed collection {coll!s} at '{path!s}'".

@ -44,6 +44,7 @@ def get_composite_name(collection, name, path, depth):
def _list_plugins_from_paths(ptype, dirs, collection, depth=0): def _list_plugins_from_paths(ptype, dirs, collection, depth=0):
# TODO: update to use importlib.resources
plugins = {} plugins = {}
@ -117,6 +118,7 @@ def _list_j2_plugins_from_file(collection, plugin_path, ptype, plugin_name):
def list_collection_plugins(ptype, collections, search_paths=None): def list_collection_plugins(ptype, collections, search_paths=None):
# TODO: update to use importlib.resources
# starts at {plugin_name: filepath, ...}, but changes at the end # starts at {plugin_name: filepath, ...}, but changes at the end
plugins = {} plugins = {}

@ -17,6 +17,7 @@ import warnings
from collections import defaultdict, namedtuple from collections import defaultdict, namedtuple
from traceback import format_exc from traceback import format_exc
import ansible.module_utils.compat.typing as t
from ansible import __version__ as ansible_version from ansible import __version__ as ansible_version
from ansible import constants as C from ansible import constants as C
from ansible.errors import AnsibleError, AnsiblePluginCircularRedirect, AnsiblePluginRemovedError, AnsibleCollectionUnsupportedVersionError from ansible.errors import AnsibleError, AnsiblePluginCircularRedirect, AnsiblePluginRemovedError, AnsibleCollectionUnsupportedVersionError
@ -42,6 +43,7 @@ except ImportError:
import importlib.util import importlib.util
_PLUGIN_FILTERS = defaultdict(frozenset) # type: t.DefaultDict[str, frozenset]
display = Display() display = Display()
get_with_context_result = namedtuple('get_with_context_result', ['object', 'plugin_load_context']) get_with_context_result = namedtuple('get_with_context_result', ['object', 'plugin_load_context'])
@ -1357,7 +1359,7 @@ def get_fqcr_and_name(resource, collection='ansible.builtin'):
def _load_plugin_filter(): def _load_plugin_filter():
filters = defaultdict(frozenset) filters = _PLUGIN_FILTERS
user_set = False user_set = False
if C.PLUGIN_FILTERS_CFG is None: if C.PLUGIN_FILTERS_CFG is None:
filter_cfg = '/etc/ansible/plugin_filters.yml' filter_cfg = '/etc/ansible/plugin_filters.yml'
@ -1455,25 +1457,38 @@ def _does_collection_support_ansible_version(requirement_string, ansible_version
return ss.contains(base_ansible_version) return ss.contains(base_ansible_version)
def _configure_collection_loader(): def _configure_collection_loader(prefix_collections_path=None):
if AnsibleCollectionConfig.collection_finder: if AnsibleCollectionConfig.collection_finder:
# this must be a Python warning so that it can be filtered out by the import sanity test # this must be a Python warning so that it can be filtered out by the import sanity test
warnings.warn('AnsibleCollectionFinder has already been configured') warnings.warn('AnsibleCollectionFinder has already been configured')
return return
finder = _AnsibleCollectionFinder(C.COLLECTIONS_PATHS, C.COLLECTIONS_SCAN_SYS_PATH) if prefix_collections_path is None:
prefix_collections_path = []
paths = list(prefix_collections_path) + C.COLLECTIONS_PATHS
finder = _AnsibleCollectionFinder(paths, C.COLLECTIONS_SCAN_SYS_PATH)
finder._install() finder._install()
# this should succeed now # this should succeed now
AnsibleCollectionConfig.on_collection_load += _on_collection_load_handler AnsibleCollectionConfig.on_collection_load += _on_collection_load_handler
# TODO: All of the following is initialization code It should be moved inside of an initialization def init_plugin_loader(prefix_collections_path=None):
# function which is called at some point early in the ansible and ansible-playbook CLI startup. """Initialize the plugin filters and the collection loaders
This method must be called to configure and insert the collection python loaders
into ``sys.meta_path`` and ``sys.path_hooks``.
This method is only called in ``CLI.run`` after CLI args have been parsed, so that
instantiation of the collection finder can utilize parsed CLI args, and to not cause
side effects.
"""
_load_plugin_filter()
_configure_collection_loader(prefix_collections_path)
_PLUGIN_FILTERS = _load_plugin_filter()
_configure_collection_loader() # TODO: Evaluate making these class instantiations lazy, but keep them in the global scope
# doc fragments first # doc fragments first
fragment_loader = PluginLoader( fragment_loader = PluginLoader(

@ -7,6 +7,7 @@
from __future__ import (absolute_import, division, print_function) from __future__ import (absolute_import, division, print_function)
__metaclass__ = type __metaclass__ = type
import itertools
import os import os
import os.path import os.path
import pkgutil import pkgutil
@ -39,7 +40,12 @@ except ImportError:
reload_module = reload # type: ignore[name-defined] # pylint:disable=undefined-variable reload_module = reload # type: ignore[name-defined] # pylint:disable=undefined-variable
try: try:
from importlib.util import spec_from_loader from importlib.abc import TraversableResources
except ImportError:
TraversableResources = object # type: ignore[assignment,misc]
try:
from importlib.util import find_spec, spec_from_loader
except ImportError: except ImportError:
pass pass
@ -50,6 +56,11 @@ except ImportError:
else: else:
HAS_FILE_FINDER = True HAS_FILE_FINDER = True
try:
import pathlib
except ImportError:
pass
# NB: this supports import sanity test providing a different impl # NB: this supports import sanity test providing a different impl
try: try:
from ._collection_meta import _meta_yml_to_dict from ._collection_meta import _meta_yml_to_dict
@ -78,6 +89,141 @@ except AttributeError: # Python 2
PB_EXTENSIONS = ('.yml', '.yaml') PB_EXTENSIONS = ('.yml', '.yaml')
SYNTHETIC_PACKAGE_NAME = '<ansible_synthetic_collection_package>'
class _AnsibleNSTraversable:
"""Class that implements the ``importlib.resources.abc.Traversable``
interface for the following ``ansible_collections`` namespace packages::
* ``ansible_collections``
* ``ansible_collections.<namespace>``
These namespace packages operate differently from a normal Python
namespace package, in that the same namespace can be distributed across
multiple directories on the filesystem and still function as a single
namespace, such as::
* ``/usr/share/ansible/collections/ansible_collections/ansible/posix/``
* ``/home/user/.ansible/collections/ansible_collections/ansible/windows/``
This class will mimic the behavior of various ``pathlib.Path`` methods,
by combining the results of multiple root paths into the output.
This class does not do anything to remove duplicate collections from the
list, so when traversing either namespace patterns supported by this class,
it is possible to have the same collection located in multiple root paths,
but precedence rules only use one. When iterating or traversing these
package roots, there is the potential to see the same collection in
multiple places without indication of which would be used. In such a
circumstance, it is best to then call ``importlib.resources.files`` for an
individual collection package rather than continuing to traverse from the
namespace package.
Several methods will raise ``NotImplementedError`` as they do not make
sense for these namespace packages.
"""
def __init__(self, *paths):
self._paths = [pathlib.Path(p) for p in paths]
def __repr__(self):
return "_AnsibleNSTraversable('%s')" % "', '".join(map(to_text, self._paths))
def iterdir(self):
return itertools.chain.from_iterable(p.iterdir() for p in self._paths if p.is_dir())
def is_dir(self):
return any(p.is_dir() for p in self._paths)
def is_file(self):
return False
def glob(self, pattern):
return itertools.chain.from_iterable(p.glob(pattern) for p in self._paths if p.is_dir())
def _not_implemented(self, *args, **kwargs):
raise NotImplementedError('not usable on namespaces')
joinpath = __truediv__ = read_bytes = read_text = _not_implemented
class _AnsibleTraversableResources(TraversableResources):
"""Implements ``importlib.resources.abc.TraversableResources`` for the
collection Python loaders.
The result of ``files`` will depend on whether a particular collection, or
a sub package of a collection was referenced, as opposed to
``ansible_collections`` or a particular namespace. For a collection and
its subpackages, a ``pathlib.Path`` instance will be returned, whereas
for the higher level namespace packages, ``_AnsibleNSTraversable``
will be returned.
"""
def __init__(self, package, loader):
self._package = package
self._loader = loader
def _get_name(self, package):
try:
# spec
return package.name
except AttributeError:
# module
return package.__name__
def _get_package(self, package):
try:
# spec
return package.__parent__
except AttributeError:
# module
return package.__package__
def _get_path(self, package):
try:
# spec
return package.origin
except AttributeError:
# module
return package.__file__
def _is_ansible_ns_package(self, package):
origin = getattr(package, 'origin', None)
if not origin:
return False
if origin == SYNTHETIC_PACKAGE_NAME:
return True
module_filename = os.path.basename(origin)
return module_filename in {'__synthetic__', '__init__.py'}
def _ensure_package(self, package):
if self._is_ansible_ns_package(package):
# Short circuit our loaders
return
if self._get_package(package) != package.__name__:
raise TypeError('%r is not a package' % package.__name__)
def files(self):
package = self._package
parts = package.split('.')
is_ns = parts[0] == 'ansible_collections' and len(parts) < 3
if isinstance(package, string_types):
if is_ns:
# Don't use ``spec_from_loader`` here, because that will point
# to exactly 1 location for a namespace. Use ``find_spec``
# to get a list of all locations for the namespace
package = find_spec(package)
else:
package = spec_from_loader(package, self._loader)
elif not isinstance(package, ModuleType):
raise TypeError('Expected string or module, got %r' % package.__class__.__name__)
self._ensure_package(package)
if is_ns:
return _AnsibleNSTraversable(*package.submodule_search_locations)
return pathlib.Path(self._get_path(package)).parent
class _AnsibleCollectionFinder: class _AnsibleCollectionFinder:
@ -423,6 +569,9 @@ class _AnsibleCollectionPkgLoaderBase:
return module_path, has_code, package_path return module_path, has_code, package_path
def get_resource_reader(self, fullname):
return _AnsibleTraversableResources(fullname, self)
def exec_module(self, module): def exec_module(self, module):
# short-circuit redirect; avoid reinitializing existing modules # short-circuit redirect; avoid reinitializing existing modules
if self._redirect_module: if self._redirect_module:
@ -509,7 +658,7 @@ class _AnsibleCollectionPkgLoaderBase:
return None return None
def _synthetic_filename(self, fullname): def _synthetic_filename(self, fullname):
return '<ansible_synthetic_collection_package>' return SYNTHETIC_PACKAGE_NAME
def get_filename(self, fullname): def get_filename(self, fullname):
if fullname != self._fullname: if fullname != self._fullname:
@ -748,6 +897,9 @@ class _AnsibleInternalRedirectLoader:
if not self._redirect: if not self._redirect:
raise ImportError('not redirected, go ask path_hook') raise ImportError('not redirected, go ask path_hook')
def get_resource_reader(self, fullname):
return _AnsibleTraversableResources(fullname, self)
def exec_module(self, module): def exec_module(self, module):
# should never see this # should never see this
if not self._redirect: if not self._redirect:

@ -7,6 +7,9 @@ jinja2 >= 3.0.0
PyYAML >= 5.1 # PyYAML 5.1 is required for Python 3.8+ support PyYAML >= 5.1 # PyYAML 5.1 is required for Python 3.8+ support
cryptography cryptography
packaging packaging
# importlib.resources in stdlib for py3.9 is lacking native hooks for
# importlib.resources.files
importlib_resources >= 5.0, < 5.1; python_version < '3.10'
# NOTE: resolvelib 0.x version bumps should be considered major/breaking # NOTE: resolvelib 0.x version bumps should be considered major/breaking
# NOTE: and we should update the upper cap with care, at least until 1.0 # NOTE: and we should update the upper cap with care, at least until 1.0
# NOTE: Ref: https://github.com/sarugaku/resolvelib/issues/69 # NOTE: Ref: https://github.com/sarugaku/resolvelib/issues/69

@ -17,7 +17,7 @@
"version": "0.1.1231", "version": "0.1.1231",
"readme": "README.md", "readme": "README.md",
"license_file": "COPYING", "license_file": "COPYING",
"homepage": "", "homepage": ""
}, },
"file_manifest_file": { "file_manifest_file": {
"format": 1, "format": 1,

@ -17,7 +17,7 @@
"version": "0.1.1231", "version": "0.1.1231",
"readme": "README.md", "readme": "README.md",
"license_file": "COPYING", "license_file": "COPYING",
"homepage": "", "homepage": ""
}, },
"file_manifest_file": { "file_manifest_file": {
"format": 1, "format": 1,

@ -17,7 +17,7 @@
"version": "1.2.0", "version": "1.2.0",
"readme": "README.md", "readme": "README.md",
"license_file": "COPYING", "license_file": "COPYING",
"homepage": "", "homepage": ""
}, },
"file_manifest_file": { "file_manifest_file": {
"format": 1, "format": 1,

@ -137,7 +137,7 @@
register: list_result_error register: list_result_error
ignore_errors: True ignore_errors: True
environment: environment:
ANSIBLE_COLLECTIONS_PATH: "" ANSIBLE_COLLECTIONS_PATH: "i_dont_exist"
- assert: - assert:
that: that:

@ -7,6 +7,9 @@ jinja2 >= 3.0.0
PyYAML >= 5.1 # PyYAML 5.1 is required for Python 3.8+ support PyYAML >= 5.1 # PyYAML 5.1 is required for Python 3.8+ support
cryptography cryptography
packaging packaging
# importlib.resources in stdlib for py3.9 is lacking native hooks for
# importlib.resources.files
importlib_resources >= 5.0, < 5.1; python_version < '3.10'
# NOTE: resolvelib 0.x version bumps should be considered major/breaking # NOTE: resolvelib 0.x version bumps should be considered major/breaking
# NOTE: and we should update the upper cap with care, at least until 1.0 # NOTE: and we should update the upper cap with care, at least until 1.0
# NOTE: Ref: https://github.com/sarugaku/resolvelib/issues/69 # NOTE: Ref: https://github.com/sarugaku/resolvelib/issues/69

@ -5,14 +5,18 @@
from __future__ import absolute_import, division, print_function from __future__ import absolute_import, division, print_function
__metaclass__ = type __metaclass__ = type
import pathlib
import pytest import pytest
from ansible import constants as C
from ansible import context from ansible import context
from ansible.cli.galaxy import GalaxyCLI from ansible.cli.galaxy import GalaxyCLI
from ansible.errors import AnsibleError, AnsibleOptionsError from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.galaxy import collection from ansible.galaxy import collection
from ansible.galaxy.dependency_resolution.dataclasses import Requirement from ansible.galaxy.dependency_resolution.dataclasses import Requirement
from ansible.module_utils._text import to_native from ansible.module_utils._text import to_native
from ansible.plugins.loader import init_plugin_loader
def path_exists(path): def path_exists(path):
@ -22,20 +26,18 @@ def path_exists(path):
return False return False
elif to_native(path) == 'nope': elif to_native(path) == 'nope':
return False return False
else:
return True return True
def isdir(path): def isdir(path):
if to_native(path) == 'nope': if to_native(path) == 'nope':
return False return False
else:
return True return True
def cliargs(collections_paths=None, collection_name=None): def cliargs(collections_paths=None, collection_name=None):
if collections_paths is None: if collections_paths is None:
collections_paths = ['~/root/.ansible/collections', '/usr/share/ansible/collections'] collections_paths = ['/root/.ansible/collections', '/usr/share/ansible/collections']
context.CLIARGS._store = { context.CLIARGS._store = {
'collections_path': collections_paths, 'collections_path': collections_paths,
@ -46,95 +48,61 @@ def cliargs(collections_paths=None, collection_name=None):
@pytest.fixture @pytest.fixture
def mock_collection_objects(mocker): def mock_from_path(mocker, monkeypatch):
mocker.patch('ansible.cli.galaxy.GalaxyCLI._resolve_path', side_effect=['/root/.ansible/collections', '/usr/share/ansible/collections']) collection_args = {
mocker.patch('ansible.cli.galaxy.validate_collection_path', '/usr/share/ansible/collections/ansible_collections/sandwiches/pbj': (
side_effect=['/root/.ansible/collections/ansible_collections', '/usr/share/ansible/collections/ansible_collections'])
collection_args_1 = (
(
'sandwiches.pbj',
'1.5.0',
None,
'dir',
None,
),
(
'sandwiches.reuben',
'2.5.0',
None,
'dir',
None,
),
)
collection_args_2 = (
(
'sandwiches.pbj', 'sandwiches.pbj',
'1.0.0', '1.0.0',
None, '/usr/share/ansible/collections/ansible_collections/sandwiches/pbj',
'dir', 'dir',
None, None,
), ),
( '/usr/share/ansible/collections/ansible_collections/sandwiches/ham': (
'sandwiches.ham', 'sandwiches.ham',
'1.0.0', '1.0.0',
None, '/usr/share/ansible/collections/ansible_collections/sandwiches/ham',
'dir', 'dir',
None, None,
), ),
) '/root/.ansible/collections/ansible_collections/sandwiches/pbj': (
collections_path_1 = [Requirement(*cargs) for cargs in collection_args_1]
collections_path_2 = [Requirement(*cargs) for cargs in collection_args_2]
mocker.patch('ansible.cli.galaxy.find_existing_collections', side_effect=[collections_path_1, collections_path_2])
@pytest.fixture
def mock_from_path(mocker):
def _from_path(collection_name='pbj'):
collection_args = {
'sandwiches.pbj': (
(
'sandwiches.pbj', 'sandwiches.pbj',
'1.5.0', '1.5.0',
None, '/root/.ansible/collections/ansible_collections/sandwiches/pbj',
'dir', 'dir',
None, None,
), ),
( '/root/.ansible/collections/ansible_collections/sandwiches/reuben': (
'sandwiches.pbj', 'sandwiches.reuben',
'1.0.0', '2.5.0',
None, '/root/.ansible/collections/ansible_collections/sandwiches/reuben',
'dir',
None,
),
),
'sandwiches.ham': (
(
'sandwiches.ham',
'1.0.0',
None,
'dir', 'dir',
None, None,
), ),
),
} }
from_path_objects = [Requirement(*args) for args in collection_args[collection_name]] def dispatch_requirement(path, am):
mocker.patch('ansible.cli.galaxy.Requirement.from_dir_path_as_unknown', side_effect=from_path_objects) return Requirement(*collection_args[to_native(path)])
files_mock = mocker.MagicMock()
mocker.patch('ansible.galaxy.collection.files', return_value=files_mock)
files_mock.glob.return_value = []
mocker.patch.object(pathlib.Path, 'is_dir', return_value=True)
for path, args in collection_args.items():
files_mock.glob.return_value.append(pathlib.Path(args[2]))
return _from_path mocker.patch('ansible.galaxy.collection.Candidate.from_dir_path_as_unknown', side_effect=dispatch_requirement)
monkeypatch.setattr(C, 'COLLECTIONS_PATHS', ['/root/.ansible/collections', '/usr/share/ansible/collections'])
def test_execute_list_collection_all(mocker, capsys, mock_collection_objects, tmp_path_factory):
def test_execute_list_collection_all(mocker, capsys, mock_from_path, tmp_path_factory):
"""Test listing all collections from multiple paths""" """Test listing all collections from multiple paths"""
cliargs() cliargs()
init_plugin_loader()
mocker.patch('os.path.exists', return_value=True) mocker.patch('os.path.exists', return_value=True)
mocker.patch('os.path.isdir', return_value=True)
gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list']) gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list'])
tmp_path = tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections') tmp_path = tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections')
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False) concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
@ -153,20 +121,21 @@ def test_execute_list_collection_all(mocker, capsys, mock_collection_objects, tm
assert out_lines[6] == '' assert out_lines[6] == ''
assert out_lines[7] == '# /usr/share/ansible/collections/ansible_collections' assert out_lines[7] == '# /usr/share/ansible/collections/ansible_collections'
assert out_lines[8] == 'Collection Version' assert out_lines[8] == 'Collection Version'
assert out_lines[9] == '-------------- -------' assert out_lines[9] == '----------------- -------'
assert out_lines[10] == 'sandwiches.ham 1.0.0 ' assert out_lines[10] == 'sandwiches.ham 1.0.0 '
assert out_lines[11] == 'sandwiches.pbj 1.0.0 ' assert out_lines[11] == 'sandwiches.pbj 1.0.0 '
def test_execute_list_collection_specific(mocker, capsys, mock_collection_objects, mock_from_path, tmp_path_factory): def test_execute_list_collection_specific(mocker, capsys, mock_from_path, tmp_path_factory):
"""Test listing a specific collection""" """Test listing a specific collection"""
collection_name = 'sandwiches.ham' collection_name = 'sandwiches.ham'
mock_from_path(collection_name)
cliargs(collection_name=collection_name) cliargs(collection_name=collection_name)
init_plugin_loader()
mocker.patch('os.path.exists', path_exists) mocker.patch('os.path.exists', path_exists)
mocker.patch('os.path.isdir', return_value=True) # mocker.patch.object(pathlib.Path, 'is_dir', return_value=True)
mocker.patch('ansible.galaxy.collection.validate_collection_name', collection_name) mocker.patch('ansible.galaxy.collection.validate_collection_name', collection_name)
mocker.patch('ansible.cli.galaxy._get_collection_widths', return_value=(14, 5)) mocker.patch('ansible.cli.galaxy._get_collection_widths', return_value=(14, 5))
@ -186,15 +155,16 @@ def test_execute_list_collection_specific(mocker, capsys, mock_collection_object
assert out_lines[4] == 'sandwiches.ham 1.0.0 ' assert out_lines[4] == 'sandwiches.ham 1.0.0 '
def test_execute_list_collection_specific_duplicate(mocker, capsys, mock_collection_objects, mock_from_path, tmp_path_factory): def test_execute_list_collection_specific_duplicate(mocker, capsys, mock_from_path, tmp_path_factory):
"""Test listing a specific collection that exists at multiple paths""" """Test listing a specific collection that exists at multiple paths"""
collection_name = 'sandwiches.pbj' collection_name = 'sandwiches.pbj'
mock_from_path(collection_name)
cliargs(collection_name=collection_name) cliargs(collection_name=collection_name)
init_plugin_loader()
mocker.patch('os.path.exists', path_exists) mocker.patch('os.path.exists', path_exists)
mocker.patch('os.path.isdir', return_value=True) # mocker.patch.object(pathlib.Path, 'is_dir', return_value=True)
mocker.patch('ansible.galaxy.collection.validate_collection_name', collection_name) mocker.patch('ansible.galaxy.collection.validate_collection_name', collection_name)
gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list', collection_name]) gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list', collection_name])
@ -221,6 +191,8 @@ def test_execute_list_collection_specific_duplicate(mocker, capsys, mock_collect
def test_execute_list_collection_specific_invalid_fqcn(mocker, tmp_path_factory): def test_execute_list_collection_specific_invalid_fqcn(mocker, tmp_path_factory):
"""Test an invalid fully qualified collection name (FQCN)""" """Test an invalid fully qualified collection name (FQCN)"""
init_plugin_loader()
collection_name = 'no.good.name' collection_name = 'no.good.name'
cliargs(collection_name=collection_name) cliargs(collection_name=collection_name)
@ -238,6 +210,7 @@ def test_execute_list_collection_no_valid_paths(mocker, capsys, tmp_path_factory
"""Test listing collections when no valid paths are given""" """Test listing collections when no valid paths are given"""
cliargs() cliargs()
init_plugin_loader()
mocker.patch('os.path.exists', return_value=True) mocker.patch('os.path.exists', return_value=True)
mocker.patch('os.path.isdir', return_value=False) mocker.patch('os.path.isdir', return_value=False)
@ -257,13 +230,14 @@ def test_execute_list_collection_no_valid_paths(mocker, capsys, tmp_path_factory
assert 'exists, but it\nis not a directory.' in err assert 'exists, but it\nis not a directory.' in err
def test_execute_list_collection_one_invalid_path(mocker, capsys, mock_collection_objects, tmp_path_factory): def test_execute_list_collection_one_invalid_path(mocker, capsys, mock_from_path, tmp_path_factory):
"""Test listing all collections when one invalid path is given""" """Test listing all collections when one invalid path is given"""
cliargs() cliargs(collections_paths=['nope'])
init_plugin_loader()
mocker.patch('os.path.exists', return_value=True) mocker.patch('os.path.exists', return_value=True)
mocker.patch('os.path.isdir', isdir) mocker.patch('os.path.isdir', isdir)
mocker.patch('ansible.cli.galaxy.GalaxyCLI._resolve_path', side_effect=['/root/.ansible/collections', 'nope'])
mocker.patch('ansible.utils.color.ANSIBLE_COLOR', False) mocker.patch('ansible.utils.color.ANSIBLE_COLOR', False)
gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list', '-p', 'nope']) gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list', '-p', 'nope'])

@ -5,7 +5,7 @@ __metaclass__ = type
import pytest import pytest
from ansible.cli.doc import DocCLI, RoleMixin from ansible.cli.doc import DocCLI, RoleMixin
from ansible.plugins.loader import module_loader from ansible.plugins.loader import module_loader, init_plugin_loader
TTY_IFY_DATA = { TTY_IFY_DATA = {
@ -118,6 +118,7 @@ def test_builtin_modules_list():
args = ['ansible-doc', '-l', 'ansible.builtin', '-t', 'module'] args = ['ansible-doc', '-l', 'ansible.builtin', '-t', 'module']
obj = DocCLI(args=args) obj = DocCLI(args=args)
obj.parse() obj.parse()
init_plugin_loader()
result = obj._list_plugins('module', module_loader) result = obj._list_plugins('module', module_loader)
assert len(result) > 0 assert len(result) > 0

@ -29,7 +29,7 @@ from io import BytesIO
import ansible.errors import ansible.errors
from ansible.executor.module_common import recursive_finder from ansible.executor.module_common import recursive_finder
from ansible.plugins.loader import init_plugin_loader
# These are the modules that are brought in by module_utils/basic.py This may need to be updated # These are the modules that are brought in by module_utils/basic.py This may need to be updated
# when basic.py gains new imports # when basic.py gains new imports
@ -79,6 +79,8 @@ ANSIBLE_LIB = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.pa
@pytest.fixture @pytest.fixture
def finder_containers(): def finder_containers():
init_plugin_loader()
FinderContainers = namedtuple('FinderContainers', ['zf']) FinderContainers = namedtuple('FinderContainers', ['zf'])
zipoutput = BytesIO() zipoutput = BytesIO()

@ -25,6 +25,7 @@ from unittest.mock import patch, MagicMock
from ansible.executor.play_iterator import HostState, PlayIterator, IteratingStates, FailedStates from ansible.executor.play_iterator import HostState, PlayIterator, IteratingStates, FailedStates
from ansible.playbook import Playbook from ansible.playbook import Playbook
from ansible.playbook.play_context import PlayContext from ansible.playbook.play_context import PlayContext
from ansible.plugins.loader import init_plugin_loader
from units.mock.loader import DictDataLoader from units.mock.loader import DictDataLoader
from units.mock.path import mock_unfrackpath_noop from units.mock.path import mock_unfrackpath_noop
@ -286,6 +287,7 @@ class TestPlayIterator(unittest.TestCase):
self.assertNotIn(hosts[0], failed_hosts) self.assertNotIn(hosts[0], failed_hosts)
def test_play_iterator_nested_blocks(self): def test_play_iterator_nested_blocks(self):
init_plugin_loader()
fake_loader = DictDataLoader({ fake_loader = DictDataLoader({
"test_play.yml": """ "test_play.yml": """
- hosts: all - hosts: all

@ -23,6 +23,7 @@ from ansible import context
from ansible.cli.galaxy import GalaxyCLI, SERVER_DEF from ansible.cli.galaxy import GalaxyCLI, SERVER_DEF
from ansible.errors import AnsibleError from ansible.errors import AnsibleError
from ansible.galaxy import api, collection, token from ansible.galaxy import api, collection, token
from ansible.plugins.loader import init_plugin_loader
from ansible.module_utils._text import to_bytes, to_native, to_text from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.module_utils.six.moves import builtins from ansible.module_utils.six.moves import builtins
from ansible.utils import context_objects as co from ansible.utils import context_objects as co
@ -854,57 +855,6 @@ def test_publish_with_wait(galaxy_server, collection_artifact, monkeypatch):
% galaxy_server.api_server % galaxy_server.api_server
def test_find_existing_collections(tmp_path_factory, monkeypatch):
test_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections'))
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
collection1 = os.path.join(test_dir, 'namespace1', 'collection1')
collection2 = os.path.join(test_dir, 'namespace2', 'collection2')
fake_collection1 = os.path.join(test_dir, 'namespace3', 'collection3')
fake_collection2 = os.path.join(test_dir, 'namespace4')
os.makedirs(collection1)
os.makedirs(collection2)
os.makedirs(os.path.split(fake_collection1)[0])
open(fake_collection1, 'wb+').close()
open(fake_collection2, 'wb+').close()
collection1_manifest = json.dumps({
'collection_info': {
'namespace': 'namespace1',
'name': 'collection1',
'version': '1.2.3',
'authors': ['Jordan Borean'],
'readme': 'README.md',
'dependencies': {},
},
'format': 1,
})
with open(os.path.join(collection1, 'MANIFEST.json'), 'wb') as manifest_obj:
manifest_obj.write(to_bytes(collection1_manifest))
mock_warning = MagicMock()
monkeypatch.setattr(Display, 'warning', mock_warning)
actual = list(collection.find_existing_collections(test_dir, artifacts_manager=concrete_artifact_cm))
assert len(actual) == 2
for actual_collection in actual:
if '%s.%s' % (actual_collection.namespace, actual_collection.name) == 'namespace1.collection1':
assert actual_collection.namespace == 'namespace1'
assert actual_collection.name == 'collection1'
assert actual_collection.ver == '1.2.3'
assert to_text(actual_collection.src) == collection1
else:
assert actual_collection.namespace == 'namespace2'
assert actual_collection.name == 'collection2'
assert actual_collection.ver == '*'
assert to_text(actual_collection.src) == collection2
assert mock_warning.call_count == 1
assert mock_warning.mock_calls[0][1][0] == "Collection at '%s' does not have a MANIFEST.json file, nor has it galaxy.yml: " \
"cannot detect version." % to_text(collection2)
def test_download_file(tmp_path_factory, monkeypatch): def test_download_file(tmp_path_factory, monkeypatch):
temp_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections')) temp_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections'))

@ -919,57 +919,6 @@ def test_install_collections_from_tar(collection_artifact, monkeypatch):
assert display_msgs[2] == "Installing 'ansible_namespace.collection:0.1.0' to '%s'" % to_text(collection_path) assert display_msgs[2] == "Installing 'ansible_namespace.collection:0.1.0' to '%s'" % to_text(collection_path)
def test_install_collections_existing_without_force(collection_artifact, monkeypatch):
collection_path, collection_tar = collection_artifact
temp_path = os.path.split(collection_tar)[0]
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False)
assert os.path.isdir(collection_path)
requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file', None)]
collection.install_collections(requirements, to_text(temp_path), [], False, False, False, False, False, False, concrete_artifact_cm, True, False)
assert os.path.isdir(collection_path)
actual_files = os.listdir(collection_path)
actual_files.sort()
assert actual_files == [b'README.md', b'docs', b'galaxy.yml', b'playbooks', b'plugins', b'roles', b'runme.sh']
# Filter out the progress cursor display calls.
display_msgs = [m[1][0] for m in mock_display.mock_calls if 'newline' not in m[2] and len(m[1]) == 1]
assert len(display_msgs) == 1
assert display_msgs[0] == 'Nothing to do. All requested collections are already installed. If you want to reinstall them, consider using `--force`.'
for msg in display_msgs:
assert 'WARNING' not in msg
def test_install_missing_metadata_warning(collection_artifact, monkeypatch):
collection_path, collection_tar = collection_artifact
temp_path = os.path.split(collection_tar)[0]
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
for file in [b'MANIFEST.json', b'galaxy.yml']:
b_path = os.path.join(collection_path, file)
if os.path.isfile(b_path):
os.unlink(b_path)
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False)
requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file', None)]
collection.install_collections(requirements, to_text(temp_path), [], False, False, False, False, False, False, concrete_artifact_cm, True, False)
display_msgs = [m[1][0] for m in mock_display.mock_calls if 'newline' not in m[2] and len(m[1]) == 1]
assert 'WARNING' in display_msgs[0]
# Makes sure we don't get stuck in some recursive loop # Makes sure we don't get stuck in some recursive loop
@pytest.mark.parametrize('collection_artifact', [ @pytest.mark.parametrize('collection_artifact', [
{'ansible_namespace.collection': '>=0.0.1'}, {'ansible_namespace.collection': '>=0.0.1'},

@ -10,6 +10,7 @@ import re
from ansible.errors import AnsibleParserError from ansible.errors import AnsibleParserError
from ansible.parsing.mod_args import ModuleArgsParser from ansible.parsing.mod_args import ModuleArgsParser
from ansible.plugins.loader import init_plugin_loader
from ansible.utils.sentinel import Sentinel from ansible.utils.sentinel import Sentinel
@ -119,6 +120,7 @@ class TestModArgsDwim:
assert err.value.args[0] == msg assert err.value.args[0] == msg
def test_multiple_actions_ping_shell(self): def test_multiple_actions_ping_shell(self):
init_plugin_loader()
args_dict = {'ping': 'data=hi', 'shell': 'echo hi'} args_dict = {'ping': 'data=hi', 'shell': 'echo hi'}
m = ModuleArgsParser(args_dict) m = ModuleArgsParser(args_dict)
with pytest.raises(AnsibleParserError) as err: with pytest.raises(AnsibleParserError) as err:
@ -129,6 +131,7 @@ class TestModArgsDwim:
assert actions == set(['ping', 'shell']) assert actions == set(['ping', 'shell'])
def test_bogus_action(self): def test_bogus_action(self):
init_plugin_loader()
args_dict = {'bogusaction': {}} args_dict = {'bogusaction': {}}
m = ModuleArgsParser(args_dict) m = ModuleArgsParser(args_dict)
with pytest.raises(AnsibleParserError) as err: with pytest.raises(AnsibleParserError) as err:

@ -22,6 +22,7 @@ __metaclass__ = type
from units.compat import unittest from units.compat import unittest
from unittest.mock import patch from unittest.mock import patch
from ansible.playbook.task import Task from ansible.playbook.task import Task
from ansible.plugins.loader import init_plugin_loader
from ansible.parsing.yaml import objects from ansible.parsing.yaml import objects
from ansible import errors from ansible import errors
@ -74,6 +75,7 @@ class TestTask(unittest.TestCase):
@patch.object(errors.AnsibleError, '_get_error_lines_from_file') @patch.object(errors.AnsibleError, '_get_error_lines_from_file')
def test_load_task_kv_form_error_36848(self, mock_get_err_lines): def test_load_task_kv_form_error_36848(self, mock_get_err_lines):
init_plugin_loader()
ds = objects.AnsibleMapping(kv_bad_args_ds) ds = objects.AnsibleMapping(kv_bad_args_ds)
ds.ansible_pos = ('test_task_faux_playbook.yml', 1, 1) ds.ansible_pos = ('test_task_faux_playbook.yml', 1, 1)
mock_get_err_lines.return_value = (kv_bad_args_str, '') mock_get_err_lines.return_value = (kv_bad_args_str, '')

@ -22,6 +22,7 @@ __metaclass__ = type
import os import os
import re import re
from importlib import import_module
from ansible import constants as C from ansible import constants as C
from units.compat import unittest from units.compat import unittest
@ -33,6 +34,7 @@ from ansible.module_utils.six.moves import shlex_quote, builtins
from ansible.module_utils._text import to_bytes from ansible.module_utils._text import to_bytes
from ansible.playbook.play_context import PlayContext from ansible.playbook.play_context import PlayContext
from ansible.plugins.action import ActionBase from ansible.plugins.action import ActionBase
from ansible.plugins.loader import init_plugin_loader
from ansible.template import Templar from ansible.template import Templar
from ansible.vars.clean import clean_facts from ansible.vars.clean import clean_facts
@ -109,6 +111,11 @@ class TestActionBase(unittest.TestCase):
self.assertEqual(results, {}) self.assertEqual(results, {})
def test_action_base__configure_module(self): def test_action_base__configure_module(self):
init_plugin_loader()
# Pre-populate the ansible.builtin collection
# so reading the ansible_builtin_runtime.yml happens
# before the mock_open below
import_module('ansible_collections.ansible.builtin')
fake_loader = DictDataLoader({ fake_loader = DictDataLoader({
}) })

@ -29,7 +29,7 @@ from units.compat import unittest
from ansible.errors import AnsibleError from ansible.errors import AnsibleError
from ansible.plugins.cache import CachePluginAdjudicator from ansible.plugins.cache import CachePluginAdjudicator
from ansible.plugins.cache.memory import CacheModule as MemoryCache from ansible.plugins.cache.memory import CacheModule as MemoryCache
from ansible.plugins.loader import cache_loader from ansible.plugins.loader import cache_loader, init_plugin_loader
from ansible.vars.fact_cache import FactCache from ansible.vars.fact_cache import FactCache
import pytest import pytest
@ -183,6 +183,7 @@ class TestFactCache(unittest.TestCase):
assert len(self.cache.keys()) == 0 assert len(self.cache.keys()) == 0
def test_plugin_load_failure(self): def test_plugin_load_failure(self):
init_plugin_loader()
# See https://github.com/ansible/ansible/issues/18751 # See https://github.com/ansible/ansible/issues/18751
# Note no fact_connection config set, so this will fail # Note no fact_connection config set, so this will fail
with mock.patch('ansible.constants.CACHE_PLUGIN', 'json'): with mock.patch('ansible.constants.CACHE_PLUGIN', 'json'):

@ -27,6 +27,7 @@ from unittest.mock import patch
from ansible import constants as C from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleUndefinedVariable from ansible.errors import AnsibleError, AnsibleUndefinedVariable
from ansible.module_utils.six import string_types from ansible.module_utils.six import string_types
from ansible.plugins.loader import init_plugin_loader
from ansible.template import Templar, AnsibleContext, AnsibleEnvironment, AnsibleUndefined from ansible.template import Templar, AnsibleContext, AnsibleEnvironment, AnsibleUndefined
from ansible.utils.unsafe_proxy import AnsibleUnsafe, wrap_var from ansible.utils.unsafe_proxy import AnsibleUnsafe, wrap_var
from units.mock.loader import DictDataLoader from units.mock.loader import DictDataLoader
@ -34,6 +35,7 @@ from units.mock.loader import DictDataLoader
class BaseTemplar(object): class BaseTemplar(object):
def setUp(self): def setUp(self):
init_plugin_loader()
self.test_vars = dict( self.test_vars = dict(
foo="bar", foo="bar",
bam="{{foo}}", bam="{{foo}}",

@ -13,7 +13,7 @@ from ansible.modules import ping as ping_module
from ansible.utils.collection_loader import AnsibleCollectionConfig, AnsibleCollectionRef from ansible.utils.collection_loader import AnsibleCollectionConfig, AnsibleCollectionRef
from ansible.utils.collection_loader._collection_finder import ( from ansible.utils.collection_loader._collection_finder import (
_AnsibleCollectionFinder, _AnsibleCollectionLoader, _AnsibleCollectionNSPkgLoader, _AnsibleCollectionPkgLoader, _AnsibleCollectionFinder, _AnsibleCollectionLoader, _AnsibleCollectionNSPkgLoader, _AnsibleCollectionPkgLoader,
_AnsibleCollectionPkgLoaderBase, _AnsibleCollectionRootPkgLoader, _AnsiblePathHookFinder, _AnsibleCollectionPkgLoaderBase, _AnsibleCollectionRootPkgLoader, _AnsibleNSTraversable, _AnsiblePathHookFinder,
_get_collection_name_from_path, _get_collection_role_path, _get_collection_metadata, _iter_modules_impl _get_collection_name_from_path, _get_collection_role_path, _get_collection_metadata, _iter_modules_impl
) )
from ansible.utils.collection_loader._collection_config import _EventSource from ansible.utils.collection_loader._collection_config import _EventSource
@ -828,6 +828,52 @@ def test_collectionref_components_invalid(name, subdirs, resource, ref_type, exp
assert re.search(expected_error_expression, str(curerr.value)) assert re.search(expected_error_expression, str(curerr.value))
@pytest.mark.skipif(not PY3, reason='importlib.resources only supported for py3')
def test_importlib_resources():
if sys.version_info < (3, 10):
from importlib_resources import files
else:
from importlib.resources import files
from pathlib import Path
f = get_default_finder()
reset_collections_loader_state(f)
ansible_collections_ns = files('ansible_collections')
ansible_ns = files('ansible_collections.ansible')
testns = files('ansible_collections.testns')
testcoll = files('ansible_collections.testns.testcoll')
testcoll2 = files('ansible_collections.testns.testcoll2')
module_utils = files('ansible_collections.testns.testcoll.plugins.module_utils')
assert isinstance(ansible_collections_ns, _AnsibleNSTraversable)
assert isinstance(ansible_ns, _AnsibleNSTraversable)
assert isinstance(testcoll, Path)
assert isinstance(module_utils, Path)
assert ansible_collections_ns.is_dir()
assert ansible_ns.is_dir()
assert testcoll.is_dir()
assert module_utils.is_dir()
first_path = Path(default_test_collection_paths[0])
second_path = Path(default_test_collection_paths[1])
testns_paths = []
ansible_ns_paths = []
for path in default_test_collection_paths[:2]:
ansible_ns_paths.append(Path(path) / 'ansible_collections' / 'ansible')
testns_paths.append(Path(path) / 'ansible_collections' / 'testns')
assert testns._paths == testns_paths
assert ansible_ns._paths == ansible_ns_paths
assert ansible_collections_ns._paths == [Path(p) / 'ansible_collections' for p in default_test_collection_paths[:2]]
assert testcoll2 == second_path / 'ansible_collections' / 'testns' / 'testcoll2'
assert {p.name for p in module_utils.glob('*.py')} == {'__init__.py', 'my_other_util.py', 'my_util.py'}
nestcoll_mu_init = first_path / 'ansible_collections' / 'testns' / 'testcoll' / 'plugins' / 'module_utils' / '__init__.py'
assert next(module_utils.glob('__init__.py')) == nestcoll_mu_init
# BEGIN TEST SUPPORT # BEGIN TEST SUPPORT
default_test_collection_paths = [ default_test_collection_paths = [

Loading…
Cancel
Save