Fix 'ansible-galaxy collection verify' to display new files/dirs (#76690)

* Fix 'ansible-galaxy collection verify' to report files/directories not listed in the FILES.json

* changelog

* Fix bug in 2.13+
pull/77170/head
Sloane Hertel 3 years ago committed by GitHub
parent 0d4219f265
commit 7657caa072
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -0,0 +1,2 @@
bugfixes:
- ansible-galaxy collection verify - display files/directories not included in the FILES.json as modified content.

@ -298,11 +298,46 @@ def verify_local_collection(
_verify_file_hash(b_collection_path, file_manifest_filename, expected_hash, modified_content) _verify_file_hash(b_collection_path, file_manifest_filename, expected_hash, modified_content)
file_manifest = get_json_from_validation_source(file_manifest_filename) file_manifest = get_json_from_validation_source(file_manifest_filename)
collection_dirs = set()
collection_files = {
os.path.join(b_collection_path, b'MANIFEST.json'),
os.path.join(b_collection_path, b'FILES.json'),
}
# Use the file manifest to verify individual file checksums # Use the file manifest to verify individual file checksums
for manifest_data in file_manifest['files']: for manifest_data in file_manifest['files']:
name = manifest_data['name']
if manifest_data['ftype'] == 'file': if manifest_data['ftype'] == 'file':
collection_files.add(
os.path.join(b_collection_path, to_bytes(name, errors='surrogate_or_strict'))
)
expected_hash = manifest_data['chksum_%s' % manifest_data['chksum_type']] expected_hash = manifest_data['chksum_%s' % manifest_data['chksum_type']]
_verify_file_hash(b_collection_path, manifest_data['name'], expected_hash, modified_content) _verify_file_hash(b_collection_path, name, expected_hash, modified_content)
if manifest_data['ftype'] == 'dir':
collection_dirs.add(
os.path.join(b_collection_path, to_bytes(name, errors='surrogate_or_strict'))
)
# Find any paths not in the FILES.json
for root, dirs, files in os.walk(b_collection_path):
for name in files:
full_path = os.path.join(root, name)
path = to_text(full_path[len(b_collection_path) + 1::], errors='surrogate_or_strict')
if full_path not in collection_files:
modified_content.append(
ModifiedContent(filename=path, expected='the file does not exist', installed='the file exists')
)
for name in dirs:
full_path = os.path.join(root, name)
path = to_text(full_path[len(b_collection_path) + 1::], errors='surrogate_or_strict')
if full_path not in collection_dirs:
modified_content.append(
ModifiedContent(filename=path, expected='the directory does not exist', installed='the directory exists')
)
if modified_content: if modified_content:
result.success = False result.success = False

@ -176,13 +176,7 @@ class _ComputedReqKindsMixin:
if not self.may_have_offline_galaxy_info: if not self.may_have_offline_galaxy_info:
self._source_info = None self._source_info = None
else: else:
# Store Galaxy metadata adjacent to the namespace of the collection info_path = self.construct_galaxy_info_path(to_bytes(self.src, errors='surrogate_or_strict'))
# Chop off the last two parts of the path (/ns/coll) to get the dir containing the ns
b_src = to_bytes(self.src, errors='surrogate_or_strict')
b_path_parts = b_src.split(to_bytes(os.path.sep))[0:-2]
b_path = to_bytes(os.path.sep).join(b_path_parts)
info_path = self.construct_galaxy_info_path(b_path)
self._source_info = get_validated_source_info( self._source_info = get_validated_source_info(
info_path, info_path,
@ -447,10 +441,16 @@ class _ComputedReqKindsMixin:
return False return False
return True return True
def construct_galaxy_info_path(self, b_metadata_dir): def construct_galaxy_info_path(self, b_collection_path):
if not self.may_have_offline_galaxy_info and not self.type == 'galaxy': if not self.may_have_offline_galaxy_info and not self.type == 'galaxy':
raise TypeError('Only installed collections from a Galaxy server have offline Galaxy info') raise TypeError('Only installed collections from a Galaxy server have offline Galaxy info')
# Store Galaxy metadata adjacent to the namespace of the collection
# Chop off the last two parts of the path (/ns/coll) to get the dir containing the ns
b_src = to_bytes(b_collection_path, errors='surrogate_or_strict')
b_path_parts = b_src.split(to_bytes(os.path.sep))[0:-2]
b_metadata_dir = to_bytes(os.path.sep).join(b_path_parts)
# ns.coll-1.0.0.info # ns.coll-1.0.0.info
b_dir_name = to_bytes(f"{self.namespace}.{self.name}-{self.ver}.info", errors="surrogate_or_strict") b_dir_name = to_bytes(f"{self.namespace}.{self.name}-{self.ver}.info", errors="surrogate_or_strict")

@ -251,6 +251,16 @@
- name: append a newline to a module to modify the checksum - name: append a newline to a module to modify the checksum
shell: "echo '' >> {{ module_path }}" shell: "echo '' >> {{ module_path }}"
- name: create a new module file
file:
path: '{{ galaxy_dir }}/ansible_collections/ansible_test/verify/plugins/modules/test_new_file.py'
state: touch
- name: create a new directory
file:
path: '{{ galaxy_dir }}/ansible_collections/ansible_test/verify/plugins/modules/test_new_dir'
state: directory
- name: verify modified collection locally-only (should fail) - name: verify modified collection locally-only (should fail)
command: ansible-galaxy collection verify --offline ansible_test.verify command: ansible-galaxy collection verify --offline ansible_test.verify
register: verify register: verify
@ -261,6 +271,8 @@
- verify.rc != 0 - verify.rc != 0
- "'Collection ansible_test.verify contains modified content in the following files:' in verify.stdout" - "'Collection ansible_test.verify contains modified content in the following files:' in verify.stdout"
- "'plugins/modules/test_module.py' in verify.stdout" - "'plugins/modules/test_module.py' in verify.stdout"
- "'plugins/modules/test_new_file.py' in verify.stdout"
- "'plugins/modules/test_new_dir' in verify.stdout"
# TODO: add a test for offline Galaxy signature metadata # TODO: add a test for offline Galaxy signature metadata

Loading…
Cancel
Save