[cloud][test] Use pytest conditional imports instead of nose/SkipTest (#22019)

pull/22217/head
Ryan Brown 7 years ago committed by GitHub
parent 7b2c013e5a
commit fc0ae5ee6b

@ -1,17 +1,9 @@
from nose.plugins.skip import SkipTest
try:
import boto3
import botocore
HAS_BOTO3 = True
except ImportError:
HAS_BOTO3 = False
if not HAS_BOTO3:
raise SkipTest("test_ec2_vpc_nat_gateway.py requires the python module 'boto3' and 'botocore'")
import pytest
import unittest
boto3 = pytest.importorskip("boto3")
botocore = pytest.importorskip("botocore")
from collections import namedtuple
from ansible.parsing.dataloader import DataLoader
from ansible.vars import VariableManager

@ -1,17 +1,9 @@
from nose.plugins.skip import SkipTest
try:
import boto3
import botocore
HAS_BOTO3 = True
except ImportError:
HAS_BOTO3 = False
if not HAS_BOTO3:
raise SkipTest("test_kinesis_stream.py requires the python module 'boto3' and 'botocore'")
import pytest
import unittest
boto3 = pytest.importorskip("boto3")
botocore = pytest.importorskip("botocore")
import ansible.modules.cloud.amazon.kinesis_stream as kinesis_stream
aws_region = 'us-west-2'

@ -19,15 +19,14 @@
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
from nose.plugins.skip import SkipTest
import pytest
boto3 = pytest.importorskip("boto3")
import json
import copy
from ansible.module_utils._text import to_bytes
from ansible.module_utils import basic
from ansible.compat.tests.mock import MagicMock, Mock, patch
from ansible.module_utils.ec2 import HAS_BOTO3
if not HAS_BOTO3:
raise SkipTest("test_ec2_asg.py requires the `boto3`, and `botocore` modules")
# lambda is a keyword so we have to hack this.
_temp = __import__("ansible.modules.cloud.amazon.lambda")

@ -1,13 +1,5 @@
from nose.plugins.skip import SkipTest
try:
import boto
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
if not HAS_BOTO:
raise SkipTest("test_s3.py requires the python module 'boto'")
import pytest
boto = pytest.importorskip("boto")
import unittest
import ansible.modules.cloud.amazon.s3 as s3

Loading…
Cancel
Save