Improve code coverage of unit tests (#80904)

* Improve coverage of validate-modules unit tests

* Remove unused galaxy unit test code

* Fix galaxy unit test teardown logic

* Improve coverage of galaxy unit test code

* Improve coverage of galaxy unit tests

* Remove unused code in galaxy API tests

* Remove unused galaxy collection unit test code

* Improve coverage of galaxy collection unit tests

* Remove unused galaxy unit test code
pull/80911/head
Matt Clay 2 years ago committed by GitHub
parent a5bb4c7dee
commit 585b82838b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -57,7 +57,7 @@ def test_type_regex(cstring, cexpected): # type: (str, str) -> None
match = TYPE_REGEX.match(cstring)
if cexpected and not match:
assert False, "%s should have matched" % cstring
elif not cexpected and match:
assert False, "%s should not have matched" % cstring
if cexpected:
assert match, f"should have matched: {cstring}"
else:
assert not match, f"should not have matched: {cstring}"

@ -19,16 +19,6 @@ from ansible.module_utils.common.text.converters import to_native
from ansible.plugins.loader import init_plugin_loader
def path_exists(path):
if to_native(path) == '/root/.ansible/collections/ansible_collections/sandwiches/ham':
return False
elif to_native(path) == '/usr/share/ansible/collections/ansible_collections/sandwiches/reuben':
return False
elif to_native(path) == 'nope':
return False
return True
def isdir(path):
if to_native(path) == 'nope':
return False
@ -134,8 +124,6 @@ def test_execute_list_collection_specific(mocker, capsys, mock_from_path, tmp_pa
cliargs(collection_name=collection_name)
init_plugin_loader()
mocker.patch('os.path.exists', path_exists)
# mocker.patch.object(pathlib.Path, 'is_dir', return_value=True)
mocker.patch('ansible.galaxy.collection.validate_collection_name', collection_name)
mocker.patch('ansible.cli.galaxy._get_collection_widths', return_value=(14, 5))
@ -163,8 +151,6 @@ def test_execute_list_collection_specific_duplicate(mocker, capsys, mock_from_pa
cliargs(collection_name=collection_name)
init_plugin_loader()
mocker.patch('os.path.exists', path_exists)
# mocker.patch.object(pathlib.Path, 'is_dir', return_value=True)
mocker.patch('ansible.galaxy.collection.validate_collection_name', collection_name)
gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list', collection_name])

@ -72,7 +72,6 @@ class TestGalaxy(unittest.TestCase):
# making a temp dir for role installation
cls.role_path = os.path.join(tempfile.mkdtemp(), "roles")
if not os.path.isdir(cls.role_path):
os.makedirs(cls.role_path)
# creating a tar file name for class data
@ -291,7 +290,7 @@ class ValidRoleTests(object):
cls.role_skeleton_path = gc.galaxy.default_role_skeleton_path
@classmethod
def tearDownClass(cls):
def tearDownRole(cls):
shutil.rmtree(cls.test_dir, ignore_errors=True)
def test_metadata(self):
@ -342,6 +341,10 @@ class TestGalaxyInitDefault(unittest.TestCase, ValidRoleTests):
def setUpClass(cls):
cls.setUpRole(role_name='delete_me')
@classmethod
def tearDownClass(cls):
cls.tearDownRole()
def test_metadata_contents(self):
with open(os.path.join(self.role_dir, 'meta', 'main.yml'), 'r') as mf:
metadata = yaml.safe_load(mf)
@ -354,6 +357,10 @@ class TestGalaxyInitAPB(unittest.TestCase, ValidRoleTests):
def setUpClass(cls):
cls.setUpRole('delete_me_apb', galaxy_args=['--type=apb'])
@classmethod
def tearDownClass(cls):
cls.tearDownRole()
def test_metadata_apb_tag(self):
with open(os.path.join(self.role_dir, 'meta', 'main.yml'), 'r') as mf:
metadata = yaml.safe_load(mf)
@ -384,6 +391,10 @@ class TestGalaxyInitContainer(unittest.TestCase, ValidRoleTests):
def setUpClass(cls):
cls.setUpRole('delete_me_container', galaxy_args=['--type=container'])
@classmethod
def tearDownClass(cls):
cls.tearDownRole()
def test_metadata_container_tag(self):
with open(os.path.join(self.role_dir, 'meta', 'main.yml'), 'r') as mf:
metadata = yaml.safe_load(mf)
@ -415,6 +426,10 @@ class TestGalaxyInitSkeleton(unittest.TestCase, ValidRoleTests):
role_skeleton_path = os.path.join(os.path.split(__file__)[0], 'test_data', 'role_skeleton')
cls.setUpRole('delete_me_skeleton', skeleton_path=role_skeleton_path, use_explicit_type=True)
@classmethod
def tearDownClass(cls):
cls.tearDownRole()
def test_empty_files_dir(self):
files_dir = os.path.join(self.role_dir, 'files')
self.assertTrue(os.path.isdir(files_dir))
@ -1235,12 +1250,7 @@ def test_install_implicit_role_with_collections(requirements_file, monkeypatch):
assert len(mock_role_install.call_args[0][0]) == 1
assert str(mock_role_install.call_args[0][0][0]) == 'namespace.name'
found = False
for mock_call in mock_display.mock_calls:
if 'contains collections which will be ignored' in mock_call[1][0]:
found = True
break
assert not found
assert not any(list('contains collections which will be ignored' in mock_call[1][0] for mock_call in mock_display.mock_calls))
@pytest.mark.parametrize('requirements_file', ['''
@ -1267,12 +1277,7 @@ def test_install_explicit_role_with_collections(requirements_file, monkeypatch):
assert len(mock_role_install.call_args[0][0]) == 1
assert str(mock_role_install.call_args[0][0][0]) == 'namespace.name'
found = False
for mock_call in mock_display.mock_calls:
if 'contains collections which will be ignored' in mock_call[1][0]:
found = True
break
assert found
assert any(list('contains collections which will be ignored' in mock_call[1][0] for mock_call in mock_display.mock_calls))
@pytest.mark.parametrize('requirements_file', ['''
@ -1299,12 +1304,7 @@ def test_install_role_with_collections_and_path(requirements_file, monkeypatch):
assert len(mock_role_install.call_args[0][0]) == 1
assert str(mock_role_install.call_args[0][0][0]) == 'namespace.name'
found = False
for mock_call in mock_display.mock_calls:
if 'contains collections which will be ignored' in mock_call[1][0]:
found = True
break
assert found
assert any(list('contains collections which will be ignored' in mock_call[1][0] for mock_call in mock_display.mock_calls))
@pytest.mark.parametrize('requirements_file', ['''
@ -1331,9 +1331,4 @@ def test_install_collection_with_roles(requirements_file, monkeypatch):
assert mock_role_install.call_count == 0
found = False
for mock_call in mock_display.mock_calls:
if 'contains roles which will be ignored' in mock_call[1][0]:
found = True
break
assert found
assert any(list('contains roles which will be ignored' in mock_call[1][0] for mock_call in mock_display.mock_calls))

@ -463,7 +463,6 @@ def test_publish_failure(api_version, collection_url, response, expected, collec
def test_wait_import_task(server_url, api_version, token_type, token_ins, import_uri, full_import_uri, monkeypatch):
api = get_test_galaxy_api(server_url, api_version, token_ins=token_ins)
if token_ins:
mock_token_get = MagicMock()
mock_token_get.return_value = 'my token'
monkeypatch.setattr(token_ins, 'get', mock_token_get)
@ -496,7 +495,6 @@ def test_wait_import_task(server_url, api_version, token_type, token_ins, import
def test_wait_import_task_multiple_requests(server_url, api_version, token_type, token_ins, import_uri, full_import_uri, monkeypatch):
api = get_test_galaxy_api(server_url, api_version, token_ins=token_ins)
if token_ins:
mock_token_get = MagicMock()
mock_token_get.return_value = 'my token'
monkeypatch.setattr(token_ins, 'get', mock_token_get)
@ -543,7 +541,6 @@ def test_wait_import_task_multiple_requests(server_url, api_version, token_type,
def test_wait_import_task_with_failure(server_url, api_version, token_type, token_ins, import_uri, full_import_uri, monkeypatch):
api = get_test_galaxy_api(server_url, api_version, token_ins=token_ins)
if token_ins:
mock_token_get = MagicMock()
mock_token_get.return_value = 'my token'
monkeypatch.setattr(token_ins, 'get', mock_token_get)
@ -620,7 +617,6 @@ def test_wait_import_task_with_failure(server_url, api_version, token_type, toke
def test_wait_import_task_with_failure_no_error(server_url, api_version, token_type, token_ins, import_uri, full_import_uri, monkeypatch):
api = get_test_galaxy_api(server_url, api_version, token_ins=token_ins)
if token_ins:
mock_token_get = MagicMock()
mock_token_get.return_value = 'my token'
monkeypatch.setattr(token_ins, 'get', mock_token_get)
@ -693,7 +689,6 @@ def test_wait_import_task_with_failure_no_error(server_url, api_version, token_t
def test_wait_import_task_timeout(server_url, api_version, token_type, token_ins, import_uri, full_import_uri, monkeypatch):
api = get_test_galaxy_api(server_url, api_version, token_ins=token_ins)
if token_ins:
mock_token_get = MagicMock()
mock_token_get.return_value = 'my token'
monkeypatch.setattr(token_ins, 'get', mock_token_get)

@ -170,28 +170,6 @@ def manifest_info(manifest_template):
return manifest_template()
@pytest.fixture()
def files_manifest_info():
return {
"files": [
{
"name": ".",
"ftype": "dir",
"chksum_type": None,
"chksum_sha256": None,
"format": 1
},
{
"name": "README.md",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "individual_file_checksum",
"format": 1
}
],
"format": 1}
@pytest.fixture()
def manifest(manifest_info):
b_data = to_bytes(json.dumps(manifest_info))
@ -479,19 +457,19 @@ def test_build_with_existing_files_and_manifest(collection_input):
with tarfile.open(output_artifact, mode='r') as actual:
members = actual.getmembers()
manifest_file = next(m for m in members if m.path == "MANIFEST.json")
manifest_file = [m for m in members if m.path == "MANIFEST.json"][0]
manifest_file_obj = actual.extractfile(manifest_file.name)
manifest_file_text = manifest_file_obj.read()
manifest_file_obj.close()
assert manifest_file_text != b'{"collection_info": {"version": "6.6.6"}, "version": 1}'
json_file = next(m for m in members if m.path == "MANIFEST.json")
json_file = [m for m in members if m.path == "MANIFEST.json"][0]
json_file_obj = actual.extractfile(json_file.name)
json_file_text = json_file_obj.read()
json_file_obj.close()
assert json_file_text != b'{"files": [], "format": 1}'
sub_manifest_file = next(m for m in members if m.path == "plugins/MANIFEST.json")
sub_manifest_file = [m for m in members if m.path == "plugins/MANIFEST.json"][0]
sub_manifest_file_obj = actual.extractfile(sub_manifest_file.name)
sub_manifest_file_text = sub_manifest_file_obj.read()
sub_manifest_file_obj.close()
@ -790,11 +768,11 @@ def test_build_with_symlink_inside_collection(collection_input):
with tarfile.open(output_artifact, mode='r') as actual:
members = actual.getmembers()
linked_folder = next(m for m in members if m.path == 'playbooks/roles/linked')
linked_folder = [m for m in members if m.path == 'playbooks/roles/linked'][0]
assert linked_folder.type == tarfile.SYMTYPE
assert linked_folder.linkname == '../../roles/linked'
linked_file = next(m for m in members if m.path == 'docs/README.md')
linked_file = [m for m in members if m.path == 'docs/README.md'][0]
assert linked_file.type == tarfile.SYMTYPE
assert linked_file.linkname == '../README.md'

@ -52,78 +52,6 @@ def call_galaxy_cli(args):
co.GlobalCLIArgs._Singleton__instance = orig
def artifact_json(namespace, name, version, dependencies, server):
json_str = json.dumps({
'artifact': {
'filename': '%s-%s-%s.tar.gz' % (namespace, name, version),
'sha256': '2d76f3b8c4bab1072848107fb3914c345f71a12a1722f25c08f5d3f51f4ab5fd',
'size': 1234,
},
'download_url': '%s/download/%s-%s-%s.tar.gz' % (server, namespace, name, version),
'metadata': {
'namespace': namespace,
'name': name,
'dependencies': dependencies,
},
'version': version
})
return to_text(json_str)
def artifact_versions_json(namespace, name, versions, galaxy_api, available_api_versions=None):
results = []
available_api_versions = available_api_versions or {}
api_version = 'v2'
if 'v3' in available_api_versions:
api_version = 'v3'
for version in versions:
results.append({
'href': '%s/api/%s/%s/%s/versions/%s/' % (galaxy_api.api_server, api_version, namespace, name, version),
'version': version,
})
if api_version == 'v2':
json_str = json.dumps({
'count': len(versions),
'next': None,
'previous': None,
'results': results
})
if api_version == 'v3':
response = {'meta': {'count': len(versions)},
'data': results,
'links': {'first': None,
'last': None,
'next': None,
'previous': None},
}
json_str = json.dumps(response)
return to_text(json_str)
def error_json(galaxy_api, errors_to_return=None, available_api_versions=None):
errors_to_return = errors_to_return or []
available_api_versions = available_api_versions or {}
response = {}
api_version = 'v2'
if 'v3' in available_api_versions:
api_version = 'v3'
if api_version == 'v2':
assert len(errors_to_return) <= 1
if errors_to_return:
response = errors_to_return[0]
if api_version == 'v3':
response['errors'] = errors_to_return
json_str = json.dumps(response)
return to_text(json_str)
@pytest.fixture(autouse='function')
def reset_cli_args():
co.GlobalCLIArgs._Singleton__instance = None

Loading…
Cancel
Save