mirror of https://github.com/ansible/ansible.git
Remove unused test/support plugins (#76078)
parent
227f4398c2
commit
2f3fbaac3c
@ -1,190 +0,0 @@
|
|||||||
# (c) 2018, John Imison <john+github@imison.net>
|
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = """
|
|
||||||
lookup: rabbitmq
|
|
||||||
author: John Imison <@Im0>
|
|
||||||
version_added: "2.8"
|
|
||||||
short_description: Retrieve messages from an AMQP/AMQPS RabbitMQ queue.
|
|
||||||
description:
|
|
||||||
- This lookup uses a basic get to retrieve all, or a limited number C(count), messages from a RabbitMQ queue.
|
|
||||||
options:
|
|
||||||
url:
|
|
||||||
description:
|
|
||||||
- An URI connection string to connect to the AMQP/AMQPS RabbitMQ server.
|
|
||||||
- For more information refer to the URI spec U(https://www.rabbitmq.com/uri-spec.html).
|
|
||||||
required: True
|
|
||||||
queue:
|
|
||||||
description:
|
|
||||||
- The queue to get messages from.
|
|
||||||
required: True
|
|
||||||
count:
|
|
||||||
description:
|
|
||||||
- How many messages to collect from the queue.
|
|
||||||
- If not set, defaults to retrieving all the messages from the queue.
|
|
||||||
requirements:
|
|
||||||
- The python pika package U(https://pypi.org/project/pika/).
|
|
||||||
notes:
|
|
||||||
- This lookup implements BlockingChannel.basic_get to get messages from a RabbitMQ server.
|
|
||||||
- After retrieving a message from the server, receipt of the message is acknowledged and the message on the server is deleted.
|
|
||||||
- Pika is a pure-Python implementation of the AMQP 0-9-1 protocol that tries to stay fairly independent of the underlying network support library.
|
|
||||||
- More information about pika can be found at U(https://pika.readthedocs.io/en/stable/).
|
|
||||||
- This plugin is tested against RabbitMQ. Other AMQP 0.9.1 protocol based servers may work but not tested/guaranteed.
|
|
||||||
- Assigning the return messages to a variable under C(vars) may result in unexpected results as the lookup is evaluated every time the
|
|
||||||
variable is referenced.
|
|
||||||
- Currently this plugin only handles text based messages from a queue. Unexpected results may occur when retrieving binary data.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
EXAMPLES = """
|
|
||||||
- name: Get all messages off a queue
|
|
||||||
debug:
|
|
||||||
msg: "{{ lookup('rabbitmq', url='amqp://guest:guest@192.168.0.10:5672/%2F', queue='hello') }}"
|
|
||||||
|
|
||||||
|
|
||||||
# If you are intending on using the returned messages as a variable in more than
|
|
||||||
# one task (eg. debug, template), it is recommended to set_fact.
|
|
||||||
|
|
||||||
- name: Get 2 messages off a queue and set a fact for re-use
|
|
||||||
set_fact:
|
|
||||||
messages: "{{ lookup('rabbitmq', url='amqp://guest:guest@192.168.0.10:5672/%2F', queue='hello', count=2) }}"
|
|
||||||
|
|
||||||
- name: Dump out contents of the messages
|
|
||||||
debug:
|
|
||||||
var: messages
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
RETURN = """
|
|
||||||
_list:
|
|
||||||
description:
|
|
||||||
- A list of dictionaries with keys and value from the queue.
|
|
||||||
type: list
|
|
||||||
contains:
|
|
||||||
content_type:
|
|
||||||
description: The content_type on the message in the queue.
|
|
||||||
type: str
|
|
||||||
delivery_mode:
|
|
||||||
description: The delivery_mode on the message in the queue.
|
|
||||||
type: str
|
|
||||||
delivery_tag:
|
|
||||||
description: The delivery_tag on the message in the queue.
|
|
||||||
type: str
|
|
||||||
exchange:
|
|
||||||
description: The exchange the message came from.
|
|
||||||
type: str
|
|
||||||
message_count:
|
|
||||||
description: The message_count for the message on the queue.
|
|
||||||
type: str
|
|
||||||
msg:
|
|
||||||
description: The content of the message.
|
|
||||||
type: str
|
|
||||||
redelivered:
|
|
||||||
description: The redelivered flag. True if the message has been delivered before.
|
|
||||||
type: bool
|
|
||||||
routing_key:
|
|
||||||
description: The routing_key on the message in the queue.
|
|
||||||
type: str
|
|
||||||
headers:
|
|
||||||
description: The headers for the message returned from the queue.
|
|
||||||
type: dict
|
|
||||||
json:
|
|
||||||
description: If application/json is specified in content_type, json will be loaded into variables.
|
|
||||||
type: dict
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
import json
|
|
||||||
|
|
||||||
from ansible.errors import AnsibleError, AnsibleParserError
|
|
||||||
from ansible.plugins.lookup import LookupBase
|
|
||||||
from ansible.module_utils._text import to_native, to_text
|
|
||||||
from ansible.utils.display import Display
|
|
||||||
|
|
||||||
try:
|
|
||||||
import pika
|
|
||||||
from pika import spec
|
|
||||||
HAS_PIKA = True
|
|
||||||
except ImportError:
|
|
||||||
HAS_PIKA = False
|
|
||||||
|
|
||||||
display = Display()
|
|
||||||
|
|
||||||
|
|
||||||
class LookupModule(LookupBase):
|
|
||||||
|
|
||||||
def run(self, terms, variables=None, url=None, queue=None, count=None):
|
|
||||||
if not HAS_PIKA:
|
|
||||||
raise AnsibleError('pika python package is required for rabbitmq lookup.')
|
|
||||||
if not url:
|
|
||||||
raise AnsibleError('URL is required for rabbitmq lookup.')
|
|
||||||
if not queue:
|
|
||||||
raise AnsibleError('Queue is required for rabbitmq lookup.')
|
|
||||||
|
|
||||||
display.vvv(u"terms:%s : variables:%s url:%s queue:%s count:%s" % (terms, variables, url, queue, count))
|
|
||||||
|
|
||||||
try:
|
|
||||||
parameters = pika.URLParameters(url)
|
|
||||||
except Exception as e:
|
|
||||||
raise AnsibleError("URL malformed: %s" % to_native(e))
|
|
||||||
|
|
||||||
try:
|
|
||||||
connection = pika.BlockingConnection(parameters)
|
|
||||||
except Exception as e:
|
|
||||||
raise AnsibleError("Connection issue: %s" % to_native(e))
|
|
||||||
|
|
||||||
try:
|
|
||||||
conn_channel = connection.channel()
|
|
||||||
except pika.exceptions.AMQPChannelError as e:
|
|
||||||
try:
|
|
||||||
connection.close()
|
|
||||||
except pika.exceptions.AMQPConnectionError as ie:
|
|
||||||
raise AnsibleError("Channel and connection closing issues: %s / %s" % to_native(e), to_native(ie))
|
|
||||||
raise AnsibleError("Channel issue: %s" % to_native(e))
|
|
||||||
|
|
||||||
ret = []
|
|
||||||
idx = 0
|
|
||||||
|
|
||||||
while True:
|
|
||||||
method_frame, properties, body = conn_channel.basic_get(queue=queue)
|
|
||||||
if method_frame:
|
|
||||||
display.vvv(u"%s, %s, %s " % (method_frame, properties, to_text(body)))
|
|
||||||
|
|
||||||
# TODO: In the future consider checking content_type and handle text/binary data differently.
|
|
||||||
msg_details = dict({
|
|
||||||
'msg': to_text(body),
|
|
||||||
'message_count': method_frame.message_count,
|
|
||||||
'routing_key': method_frame.routing_key,
|
|
||||||
'delivery_tag': method_frame.delivery_tag,
|
|
||||||
'redelivered': method_frame.redelivered,
|
|
||||||
'exchange': method_frame.exchange,
|
|
||||||
'delivery_mode': properties.delivery_mode,
|
|
||||||
'content_type': properties.content_type,
|
|
||||||
'headers': properties.headers
|
|
||||||
})
|
|
||||||
if properties.content_type == 'application/json':
|
|
||||||
try:
|
|
||||||
msg_details['json'] = json.loads(msg_details['msg'])
|
|
||||||
except ValueError as e:
|
|
||||||
raise AnsibleError("Unable to decode JSON for message %s: %s" % (method_frame.delivery_tag, to_native(e)))
|
|
||||||
|
|
||||||
ret.append(msg_details)
|
|
||||||
conn_channel.basic_ack(method_frame.delivery_tag)
|
|
||||||
idx += 1
|
|
||||||
if method_frame.message_count == 0 or idx == count:
|
|
||||||
break
|
|
||||||
# If we didn't get a method_frame, exit.
|
|
||||||
else:
|
|
||||||
break
|
|
||||||
|
|
||||||
if connection.is_closed:
|
|
||||||
return [ret]
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
connection.close()
|
|
||||||
except pika.exceptions.AMQPConnectionError:
|
|
||||||
pass
|
|
||||||
return [ret]
|
|
File diff suppressed because it is too large
Load Diff
@ -1,142 +0,0 @@
|
|||||||
# This code is part of Ansible, but is an independent component.
|
|
||||||
# This particular file snippet, and this file snippet only, is BSD licensed.
|
|
||||||
# Modules you write using this snippet, which is embedded dynamically by Ansible
|
|
||||||
# still belong to the author of the module, and may assign their own license
|
|
||||||
# to the complete work.
|
|
||||||
#
|
|
||||||
# Copyright (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com>
|
|
||||||
# All rights reserved.
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without modification,
|
|
||||||
# are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# * Redistributions of source code must retain the above copyright
|
|
||||||
# notice, this list of conditions and the following disclaimer.
|
|
||||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
|
||||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
|
||||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
|
||||||
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
|
||||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
|
||||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
|
||||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
|
||||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
|
|
||||||
class SQLParseError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class UnclosedQuoteError(SQLParseError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# maps a type of identifier to the maximum number of dot levels that are
|
|
||||||
# allowed to specify that identifier. For example, a database column can be
|
|
||||||
# specified by up to 4 levels: database.schema.table.column
|
|
||||||
_PG_IDENTIFIER_TO_DOT_LEVEL = dict(
|
|
||||||
database=1,
|
|
||||||
schema=2,
|
|
||||||
table=3,
|
|
||||||
column=4,
|
|
||||||
role=1,
|
|
||||||
tablespace=1,
|
|
||||||
sequence=3,
|
|
||||||
publication=1,
|
|
||||||
)
|
|
||||||
_MYSQL_IDENTIFIER_TO_DOT_LEVEL = dict(database=1, table=2, column=3, role=1, vars=1)
|
|
||||||
|
|
||||||
|
|
||||||
def _find_end_quote(identifier, quote_char):
|
|
||||||
accumulate = 0
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
quote = identifier.index(quote_char)
|
|
||||||
except ValueError:
|
|
||||||
raise UnclosedQuoteError
|
|
||||||
accumulate = accumulate + quote
|
|
||||||
try:
|
|
||||||
next_char = identifier[quote + 1]
|
|
||||||
except IndexError:
|
|
||||||
return accumulate
|
|
||||||
if next_char == quote_char:
|
|
||||||
try:
|
|
||||||
identifier = identifier[quote + 2:]
|
|
||||||
accumulate = accumulate + 2
|
|
||||||
except IndexError:
|
|
||||||
raise UnclosedQuoteError
|
|
||||||
else:
|
|
||||||
return accumulate
|
|
||||||
|
|
||||||
|
|
||||||
def _identifier_parse(identifier, quote_char):
|
|
||||||
if not identifier:
|
|
||||||
raise SQLParseError('Identifier name unspecified or unquoted trailing dot')
|
|
||||||
|
|
||||||
already_quoted = False
|
|
||||||
if identifier.startswith(quote_char):
|
|
||||||
already_quoted = True
|
|
||||||
try:
|
|
||||||
end_quote = _find_end_quote(identifier[1:], quote_char=quote_char) + 1
|
|
||||||
except UnclosedQuoteError:
|
|
||||||
already_quoted = False
|
|
||||||
else:
|
|
||||||
if end_quote < len(identifier) - 1:
|
|
||||||
if identifier[end_quote + 1] == '.':
|
|
||||||
dot = end_quote + 1
|
|
||||||
first_identifier = identifier[:dot]
|
|
||||||
next_identifier = identifier[dot + 1:]
|
|
||||||
further_identifiers = _identifier_parse(next_identifier, quote_char)
|
|
||||||
further_identifiers.insert(0, first_identifier)
|
|
||||||
else:
|
|
||||||
raise SQLParseError('User escaped identifiers must escape extra quotes')
|
|
||||||
else:
|
|
||||||
further_identifiers = [identifier]
|
|
||||||
|
|
||||||
if not already_quoted:
|
|
||||||
try:
|
|
||||||
dot = identifier.index('.')
|
|
||||||
except ValueError:
|
|
||||||
identifier = identifier.replace(quote_char, quote_char * 2)
|
|
||||||
identifier = ''.join((quote_char, identifier, quote_char))
|
|
||||||
further_identifiers = [identifier]
|
|
||||||
else:
|
|
||||||
if dot == 0 or dot >= len(identifier) - 1:
|
|
||||||
identifier = identifier.replace(quote_char, quote_char * 2)
|
|
||||||
identifier = ''.join((quote_char, identifier, quote_char))
|
|
||||||
further_identifiers = [identifier]
|
|
||||||
else:
|
|
||||||
first_identifier = identifier[:dot]
|
|
||||||
next_identifier = identifier[dot + 1:]
|
|
||||||
further_identifiers = _identifier_parse(next_identifier, quote_char)
|
|
||||||
first_identifier = first_identifier.replace(quote_char, quote_char * 2)
|
|
||||||
first_identifier = ''.join((quote_char, first_identifier, quote_char))
|
|
||||||
further_identifiers.insert(0, first_identifier)
|
|
||||||
|
|
||||||
return further_identifiers
|
|
||||||
|
|
||||||
|
|
||||||
def pg_quote_identifier(identifier, id_type):
|
|
||||||
identifier_fragments = _identifier_parse(identifier, quote_char='"')
|
|
||||||
if len(identifier_fragments) > _PG_IDENTIFIER_TO_DOT_LEVEL[id_type]:
|
|
||||||
raise SQLParseError('PostgreSQL does not support %s with more than %i dots' % (id_type, _PG_IDENTIFIER_TO_DOT_LEVEL[id_type]))
|
|
||||||
return '.'.join(identifier_fragments)
|
|
||||||
|
|
||||||
|
|
||||||
def mysql_quote_identifier(identifier, id_type):
|
|
||||||
identifier_fragments = _identifier_parse(identifier, quote_char='`')
|
|
||||||
if (len(identifier_fragments) - 1) > _MYSQL_IDENTIFIER_TO_DOT_LEVEL[id_type]:
|
|
||||||
raise SQLParseError('MySQL does not support %s with more than %i dots' % (id_type, _MYSQL_IDENTIFIER_TO_DOT_LEVEL[id_type]))
|
|
||||||
|
|
||||||
special_cased_fragments = []
|
|
||||||
for fragment in identifier_fragments:
|
|
||||||
if fragment == '`*`':
|
|
||||||
special_cased_fragments.append('*')
|
|
||||||
else:
|
|
||||||
special_cased_fragments.append(fragment)
|
|
||||||
|
|
||||||
return '.'.join(special_cased_fragments)
|
|
@ -1,364 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
# This code is part of Ansible, but is an independent component.
|
|
||||||
# This particular file snippet, and this file snippet only, is licensed under the
|
|
||||||
# Modified BSD License. Modules you write using this snippet, which is embedded
|
|
||||||
# dynamically by Ansible, still belong to the author of the module, and may assign
|
|
||||||
# their own license to the complete work.
|
|
||||||
#
|
|
||||||
# Copyright (c), Entrust Datacard Corporation, 2019
|
|
||||||
# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause)
|
|
||||||
|
|
||||||
# Redistribution and use in source and binary forms, with or without modification,
|
|
||||||
# are permitted provided that the following conditions are met:
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
|
||||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
|
||||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
|
||||||
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
|
||||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
|
||||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
|
||||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
|
||||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import time
|
|
||||||
import traceback
|
|
||||||
|
|
||||||
from ansible.module_utils._text import to_text, to_native
|
|
||||||
from ansible.module_utils.basic import missing_required_lib
|
|
||||||
from ansible.module_utils.six.moves.urllib.parse import urlencode
|
|
||||||
from ansible.module_utils.six.moves.urllib.error import HTTPError
|
|
||||||
from ansible.module_utils.urls import Request
|
|
||||||
|
|
||||||
YAML_IMP_ERR = None
|
|
||||||
try:
|
|
||||||
import yaml
|
|
||||||
except ImportError:
|
|
||||||
YAML_FOUND = False
|
|
||||||
YAML_IMP_ERR = traceback.format_exc()
|
|
||||||
else:
|
|
||||||
YAML_FOUND = True
|
|
||||||
|
|
||||||
valid_file_format = re.compile(r".*(\.)(yml|yaml|json)$")
|
|
||||||
|
|
||||||
|
|
||||||
def ecs_client_argument_spec():
|
|
||||||
return dict(
|
|
||||||
entrust_api_user=dict(type='str', required=True),
|
|
||||||
entrust_api_key=dict(type='str', required=True, no_log=True),
|
|
||||||
entrust_api_client_cert_path=dict(type='path', required=True),
|
|
||||||
entrust_api_client_cert_key_path=dict(type='path', required=True, no_log=True),
|
|
||||||
entrust_api_specification_path=dict(type='path', default='https://cloud.entrust.net/EntrustCloud/documentation/cms-api-2.1.0.yaml'),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class SessionConfigurationException(Exception):
|
|
||||||
""" Raised if we cannot configure a session with the API """
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class RestOperationException(Exception):
|
|
||||||
""" Encapsulate a REST API error """
|
|
||||||
|
|
||||||
def __init__(self, error):
|
|
||||||
self.status = to_native(error.get("status", None))
|
|
||||||
self.errors = [to_native(err.get("message")) for err in error.get("errors", {})]
|
|
||||||
self.message = to_native(" ".join(self.errors))
|
|
||||||
|
|
||||||
|
|
||||||
def generate_docstring(operation_spec):
|
|
||||||
"""Generate a docstring for an operation defined in operation_spec (swagger)"""
|
|
||||||
# Description of the operation
|
|
||||||
docs = operation_spec.get("description", "No Description")
|
|
||||||
docs += "\n\n"
|
|
||||||
|
|
||||||
# Parameters of the operation
|
|
||||||
parameters = operation_spec.get("parameters", [])
|
|
||||||
if len(parameters) != 0:
|
|
||||||
docs += "\tArguments:\n\n"
|
|
||||||
for parameter in parameters:
|
|
||||||
docs += "{0} ({1}:{2}): {3}\n".format(
|
|
||||||
parameter.get("name"),
|
|
||||||
parameter.get("type", "No Type"),
|
|
||||||
"Required" if parameter.get("required", False) else "Not Required",
|
|
||||||
parameter.get("description"),
|
|
||||||
)
|
|
||||||
|
|
||||||
return docs
|
|
||||||
|
|
||||||
|
|
||||||
def bind(instance, method, operation_spec):
|
|
||||||
def binding_scope_fn(*args, **kwargs):
|
|
||||||
return method(instance, *args, **kwargs)
|
|
||||||
|
|
||||||
# Make sure we don't confuse users; add the proper name and documentation to the function.
|
|
||||||
# Users can use !help(<function>) to get help on the function from interactive python or pdb
|
|
||||||
operation_name = operation_spec.get("operationId").split("Using")[0]
|
|
||||||
binding_scope_fn.__name__ = str(operation_name)
|
|
||||||
binding_scope_fn.__doc__ = generate_docstring(operation_spec)
|
|
||||||
|
|
||||||
return binding_scope_fn
|
|
||||||
|
|
||||||
|
|
||||||
class RestOperation(object):
|
|
||||||
def __init__(self, session, uri, method, parameters=None):
|
|
||||||
self.session = session
|
|
||||||
self.method = method
|
|
||||||
if parameters is None:
|
|
||||||
self.parameters = {}
|
|
||||||
else:
|
|
||||||
self.parameters = parameters
|
|
||||||
self.url = "{scheme}://{host}{base_path}{uri}".format(scheme="https", host=session._spec.get("host"), base_path=session._spec.get("basePath"), uri=uri)
|
|
||||||
|
|
||||||
def restmethod(self, *args, **kwargs):
|
|
||||||
"""Do the hard work of making the request here"""
|
|
||||||
|
|
||||||
# gather named path parameters and do substitution on the URL
|
|
||||||
if self.parameters:
|
|
||||||
path_parameters = {}
|
|
||||||
body_parameters = {}
|
|
||||||
query_parameters = {}
|
|
||||||
for x in self.parameters:
|
|
||||||
expected_location = x.get("in")
|
|
||||||
key_name = x.get("name", None)
|
|
||||||
key_value = kwargs.get(key_name, None)
|
|
||||||
if expected_location == "path" and key_name and key_value:
|
|
||||||
path_parameters.update({key_name: key_value})
|
|
||||||
elif expected_location == "body" and key_name and key_value:
|
|
||||||
body_parameters.update({key_name: key_value})
|
|
||||||
elif expected_location == "query" and key_name and key_value:
|
|
||||||
query_parameters.update({key_name: key_value})
|
|
||||||
|
|
||||||
if len(body_parameters.keys()) >= 1:
|
|
||||||
body_parameters = body_parameters.get(list(body_parameters.keys())[0])
|
|
||||||
else:
|
|
||||||
body_parameters = None
|
|
||||||
else:
|
|
||||||
path_parameters = {}
|
|
||||||
query_parameters = {}
|
|
||||||
body_parameters = None
|
|
||||||
|
|
||||||
# This will fail if we have not set path parameters with a KeyError
|
|
||||||
url = self.url.format(**path_parameters)
|
|
||||||
if query_parameters:
|
|
||||||
# modify the URL to add path parameters
|
|
||||||
url = url + "?" + urlencode(query_parameters)
|
|
||||||
|
|
||||||
try:
|
|
||||||
if body_parameters:
|
|
||||||
body_parameters_json = json.dumps(body_parameters)
|
|
||||||
response = self.session.request.open(method=self.method, url=url, data=body_parameters_json)
|
|
||||||
else:
|
|
||||||
response = self.session.request.open(method=self.method, url=url)
|
|
||||||
request_error = False
|
|
||||||
except HTTPError as e:
|
|
||||||
# An HTTPError has the same methods available as a valid response from request.open
|
|
||||||
response = e
|
|
||||||
request_error = True
|
|
||||||
|
|
||||||
# Return the result if JSON and success ({} for empty responses)
|
|
||||||
# Raise an exception if there was a failure.
|
|
||||||
try:
|
|
||||||
result_code = response.getcode()
|
|
||||||
result = json.loads(response.read())
|
|
||||||
except ValueError:
|
|
||||||
result = {}
|
|
||||||
|
|
||||||
if result or result == {}:
|
|
||||||
if result_code and result_code < 400:
|
|
||||||
return result
|
|
||||||
else:
|
|
||||||
raise RestOperationException(result)
|
|
||||||
|
|
||||||
# Raise a generic RestOperationException if this fails
|
|
||||||
raise RestOperationException({"status": result_code, "errors": [{"message": "REST Operation Failed"}]})
|
|
||||||
|
|
||||||
|
|
||||||
class Resource(object):
|
|
||||||
""" Implement basic CRUD operations against a path. """
|
|
||||||
|
|
||||||
def __init__(self, session):
|
|
||||||
self.session = session
|
|
||||||
self.parameters = {}
|
|
||||||
|
|
||||||
for url in session._spec.get("paths").keys():
|
|
||||||
methods = session._spec.get("paths").get(url)
|
|
||||||
for method in methods.keys():
|
|
||||||
operation_spec = methods.get(method)
|
|
||||||
operation_name = operation_spec.get("operationId", None)
|
|
||||||
parameters = operation_spec.get("parameters")
|
|
||||||
|
|
||||||
if not operation_name:
|
|
||||||
if method.lower() == "post":
|
|
||||||
operation_name = "Create"
|
|
||||||
elif method.lower() == "get":
|
|
||||||
operation_name = "Get"
|
|
||||||
elif method.lower() == "put":
|
|
||||||
operation_name = "Update"
|
|
||||||
elif method.lower() == "delete":
|
|
||||||
operation_name = "Delete"
|
|
||||||
elif method.lower() == "patch":
|
|
||||||
operation_name = "Patch"
|
|
||||||
else:
|
|
||||||
raise SessionConfigurationException(to_native("Invalid REST method type {0}".format(method)))
|
|
||||||
|
|
||||||
# Get the non-parameter parts of the URL and append to the operation name
|
|
||||||
# e.g /application/version -> GetApplicationVersion
|
|
||||||
# e.g. /application/{id} -> GetApplication
|
|
||||||
# This may lead to duplicates, which we must prevent.
|
|
||||||
operation_name += re.sub(r"{(.*)}", "", url).replace("/", " ").title().replace(" ", "")
|
|
||||||
operation_spec["operationId"] = operation_name
|
|
||||||
|
|
||||||
op = RestOperation(session, url, method, parameters)
|
|
||||||
setattr(self, operation_name, bind(self, op.restmethod, operation_spec))
|
|
||||||
|
|
||||||
|
|
||||||
# Session to encapsulate the connection parameters of the module_utils Request object, the api spec, etc
|
|
||||||
class ECSSession(object):
|
|
||||||
def __init__(self, name, **kwargs):
|
|
||||||
"""
|
|
||||||
Initialize our session
|
|
||||||
"""
|
|
||||||
|
|
||||||
self._set_config(name, **kwargs)
|
|
||||||
|
|
||||||
def client(self):
|
|
||||||
resource = Resource(self)
|
|
||||||
return resource
|
|
||||||
|
|
||||||
def _set_config(self, name, **kwargs):
|
|
||||||
headers = {
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
"Connection": "keep-alive",
|
|
||||||
}
|
|
||||||
self.request = Request(headers=headers, timeout=60)
|
|
||||||
|
|
||||||
configurators = [self._read_config_vars]
|
|
||||||
for configurator in configurators:
|
|
||||||
self._config = configurator(name, **kwargs)
|
|
||||||
if self._config:
|
|
||||||
break
|
|
||||||
if self._config is None:
|
|
||||||
raise SessionConfigurationException(to_native("No Configuration Found."))
|
|
||||||
|
|
||||||
# set up auth if passed
|
|
||||||
entrust_api_user = self.get_config("entrust_api_user")
|
|
||||||
entrust_api_key = self.get_config("entrust_api_key")
|
|
||||||
if entrust_api_user and entrust_api_key:
|
|
||||||
self.request.url_username = entrust_api_user
|
|
||||||
self.request.url_password = entrust_api_key
|
|
||||||
else:
|
|
||||||
raise SessionConfigurationException(to_native("User and key must be provided."))
|
|
||||||
|
|
||||||
# set up client certificate if passed (support all-in one or cert + key)
|
|
||||||
entrust_api_cert = self.get_config("entrust_api_cert")
|
|
||||||
entrust_api_cert_key = self.get_config("entrust_api_cert_key")
|
|
||||||
if entrust_api_cert:
|
|
||||||
self.request.client_cert = entrust_api_cert
|
|
||||||
if entrust_api_cert_key:
|
|
||||||
self.request.client_key = entrust_api_cert_key
|
|
||||||
else:
|
|
||||||
raise SessionConfigurationException(to_native("Client certificate for authentication to the API must be provided."))
|
|
||||||
|
|
||||||
# set up the spec
|
|
||||||
entrust_api_specification_path = self.get_config("entrust_api_specification_path")
|
|
||||||
|
|
||||||
if not entrust_api_specification_path.startswith("http") and not os.path.isfile(entrust_api_specification_path):
|
|
||||||
raise SessionConfigurationException(to_native("OpenAPI specification was not found at location {0}.".format(entrust_api_specification_path)))
|
|
||||||
if not valid_file_format.match(entrust_api_specification_path):
|
|
||||||
raise SessionConfigurationException(to_native("OpenAPI specification filename must end in .json, .yml or .yaml"))
|
|
||||||
|
|
||||||
self.verify = True
|
|
||||||
|
|
||||||
if entrust_api_specification_path.startswith("http"):
|
|
||||||
try:
|
|
||||||
http_response = Request().open(method="GET", url=entrust_api_specification_path)
|
|
||||||
http_response_contents = http_response.read()
|
|
||||||
if entrust_api_specification_path.endswith(".json"):
|
|
||||||
self._spec = json.load(http_response_contents)
|
|
||||||
elif entrust_api_specification_path.endswith(".yml") or entrust_api_specification_path.endswith(".yaml"):
|
|
||||||
self._spec = yaml.safe_load(http_response_contents)
|
|
||||||
except HTTPError as e:
|
|
||||||
raise SessionConfigurationException(to_native("Error downloading specification from address '{0}', received error code '{1}'".format(
|
|
||||||
entrust_api_specification_path, e.getcode())))
|
|
||||||
else:
|
|
||||||
with open(entrust_api_specification_path) as f:
|
|
||||||
if ".json" in entrust_api_specification_path:
|
|
||||||
self._spec = json.load(f)
|
|
||||||
elif ".yml" in entrust_api_specification_path or ".yaml" in entrust_api_specification_path:
|
|
||||||
self._spec = yaml.safe_load(f)
|
|
||||||
|
|
||||||
def get_config(self, item):
|
|
||||||
return self._config.get(item, None)
|
|
||||||
|
|
||||||
def _read_config_vars(self, name, **kwargs):
|
|
||||||
""" Read configuration from variables passed to the module. """
|
|
||||||
config = {}
|
|
||||||
|
|
||||||
entrust_api_specification_path = kwargs.get("entrust_api_specification_path")
|
|
||||||
if not entrust_api_specification_path or (not entrust_api_specification_path.startswith("http") and not os.path.isfile(entrust_api_specification_path)):
|
|
||||||
raise SessionConfigurationException(
|
|
||||||
to_native(
|
|
||||||
"Parameter provided for entrust_api_specification_path of value '{0}' was not a valid file path or HTTPS address.".format(
|
|
||||||
entrust_api_specification_path
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
for required_file in ["entrust_api_cert", "entrust_api_cert_key"]:
|
|
||||||
file_path = kwargs.get(required_file)
|
|
||||||
if not file_path or not os.path.isfile(file_path):
|
|
||||||
raise SessionConfigurationException(
|
|
||||||
to_native("Parameter provided for {0} of value '{1}' was not a valid file path.".format(required_file, file_path))
|
|
||||||
)
|
|
||||||
|
|
||||||
for required_var in ["entrust_api_user", "entrust_api_key"]:
|
|
||||||
if not kwargs.get(required_var):
|
|
||||||
raise SessionConfigurationException(to_native("Parameter provided for {0} was missing.".format(required_var)))
|
|
||||||
|
|
||||||
config["entrust_api_cert"] = kwargs.get("entrust_api_cert")
|
|
||||||
config["entrust_api_cert_key"] = kwargs.get("entrust_api_cert_key")
|
|
||||||
config["entrust_api_specification_path"] = kwargs.get("entrust_api_specification_path")
|
|
||||||
config["entrust_api_user"] = kwargs.get("entrust_api_user")
|
|
||||||
config["entrust_api_key"] = kwargs.get("entrust_api_key")
|
|
||||||
|
|
||||||
return config
|
|
||||||
|
|
||||||
|
|
||||||
def ECSClient(entrust_api_user=None, entrust_api_key=None, entrust_api_cert=None, entrust_api_cert_key=None, entrust_api_specification_path=None):
|
|
||||||
"""Create an ECS client"""
|
|
||||||
|
|
||||||
if not YAML_FOUND:
|
|
||||||
raise SessionConfigurationException(missing_required_lib("PyYAML"), exception=YAML_IMP_ERR)
|
|
||||||
|
|
||||||
if entrust_api_specification_path is None:
|
|
||||||
entrust_api_specification_path = "https://cloud.entrust.net/EntrustCloud/documentation/cms-api-2.1.0.yaml"
|
|
||||||
|
|
||||||
# Not functionally necessary with current uses of this module_util, but better to be explicit for future use cases
|
|
||||||
entrust_api_user = to_text(entrust_api_user)
|
|
||||||
entrust_api_key = to_text(entrust_api_key)
|
|
||||||
entrust_api_cert_key = to_text(entrust_api_cert_key)
|
|
||||||
entrust_api_specification_path = to_text(entrust_api_specification_path)
|
|
||||||
|
|
||||||
return ECSSession(
|
|
||||||
"ecs",
|
|
||||||
entrust_api_user=entrust_api_user,
|
|
||||||
entrust_api_key=entrust_api_key,
|
|
||||||
entrust_api_cert=entrust_api_cert,
|
|
||||||
entrust_api_cert_key=entrust_api_cert_key,
|
|
||||||
entrust_api_specification_path=entrust_api_specification_path,
|
|
||||||
).client()
|
|
@ -1,106 +0,0 @@
|
|||||||
# This code is part of Ansible, but is an independent component.
|
|
||||||
# This particular file snippet, and this file snippet only, is BSD licensed.
|
|
||||||
# Modules you write using this snippet, which is embedded dynamically by Ansible
|
|
||||||
# still belong to the author of the module, and may assign their own license
|
|
||||||
# to the complete work.
|
|
||||||
#
|
|
||||||
# Copyright (c), Jonathan Mainguy <jon@soh.re>, 2015
|
|
||||||
# Most of this was originally added by Sven Schliesing @muffl0n in the mysql_user.py module
|
|
||||||
# All rights reserved.
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without modification,
|
|
||||||
# are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# * Redistributions of source code must retain the above copyright
|
|
||||||
# notice, this list of conditions and the following disclaimer.
|
|
||||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
|
||||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
|
||||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
|
||||||
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
|
||||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
|
||||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
|
||||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
|
||||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
try:
|
|
||||||
import pymysql as mysql_driver
|
|
||||||
_mysql_cursor_param = 'cursor'
|
|
||||||
except ImportError:
|
|
||||||
try:
|
|
||||||
import MySQLdb as mysql_driver
|
|
||||||
import MySQLdb.cursors
|
|
||||||
_mysql_cursor_param = 'cursorclass'
|
|
||||||
except ImportError:
|
|
||||||
mysql_driver = None
|
|
||||||
|
|
||||||
mysql_driver_fail_msg = 'The PyMySQL (Python 2.7 and Python 3.X) or MySQL-python (Python 2.X) module is required.'
|
|
||||||
|
|
||||||
|
|
||||||
def mysql_connect(module, login_user=None, login_password=None, config_file='', ssl_cert=None, ssl_key=None, ssl_ca=None, db=None, cursor_class=None,
|
|
||||||
connect_timeout=30, autocommit=False):
|
|
||||||
config = {}
|
|
||||||
|
|
||||||
if ssl_ca is not None or ssl_key is not None or ssl_cert is not None:
|
|
||||||
config['ssl'] = {}
|
|
||||||
|
|
||||||
if module.params['login_unix_socket']:
|
|
||||||
config['unix_socket'] = module.params['login_unix_socket']
|
|
||||||
else:
|
|
||||||
config['host'] = module.params['login_host']
|
|
||||||
config['port'] = module.params['login_port']
|
|
||||||
|
|
||||||
if os.path.exists(config_file):
|
|
||||||
config['read_default_file'] = config_file
|
|
||||||
|
|
||||||
# If login_user or login_password are given, they should override the
|
|
||||||
# config file
|
|
||||||
if login_user is not None:
|
|
||||||
config['user'] = login_user
|
|
||||||
if login_password is not None:
|
|
||||||
config['passwd'] = login_password
|
|
||||||
if ssl_cert is not None:
|
|
||||||
config['ssl']['cert'] = ssl_cert
|
|
||||||
if ssl_key is not None:
|
|
||||||
config['ssl']['key'] = ssl_key
|
|
||||||
if ssl_ca is not None:
|
|
||||||
config['ssl']['ca'] = ssl_ca
|
|
||||||
if db is not None:
|
|
||||||
config['db'] = db
|
|
||||||
if connect_timeout is not None:
|
|
||||||
config['connect_timeout'] = connect_timeout
|
|
||||||
|
|
||||||
if _mysql_cursor_param == 'cursor':
|
|
||||||
# In case of PyMySQL driver:
|
|
||||||
db_connection = mysql_driver.connect(autocommit=autocommit, **config)
|
|
||||||
else:
|
|
||||||
# In case of MySQLdb driver
|
|
||||||
db_connection = mysql_driver.connect(**config)
|
|
||||||
if autocommit:
|
|
||||||
db_connection.autocommit(True)
|
|
||||||
|
|
||||||
if cursor_class == 'DictCursor':
|
|
||||||
return db_connection.cursor(**{_mysql_cursor_param: mysql_driver.cursors.DictCursor}), db_connection
|
|
||||||
else:
|
|
||||||
return db_connection.cursor(), db_connection
|
|
||||||
|
|
||||||
|
|
||||||
def mysql_common_argument_spec():
|
|
||||||
return dict(
|
|
||||||
login_user=dict(type='str', default=None),
|
|
||||||
login_password=dict(type='str', no_log=True),
|
|
||||||
login_host=dict(type='str', default='localhost'),
|
|
||||||
login_port=dict(type='int', default=3306),
|
|
||||||
login_unix_socket=dict(type='str'),
|
|
||||||
config_file=dict(type='path', default='~/.my.cnf'),
|
|
||||||
connect_timeout=dict(type='int', default=30),
|
|
||||||
client_cert=dict(type='path', aliases=['ssl_cert']),
|
|
||||||
client_key=dict(type='path', aliases=['ssl_key']),
|
|
||||||
ca_cert=dict(type='path', aliases=['ssl_ca']),
|
|
||||||
)
|
|
@ -1,330 +0,0 @@
|
|||||||
# This code is part of Ansible, but is an independent component.
|
|
||||||
# This particular file snippet, and this file snippet only, is BSD licensed.
|
|
||||||
# Modules you write using this snippet, which is embedded dynamically by Ansible
|
|
||||||
# still belong to the author of the module, and may assign their own license
|
|
||||||
# to the complete work.
|
|
||||||
#
|
|
||||||
# Copyright (c), Ted Timmons <ted@timmons.me>, 2017.
|
|
||||||
# Most of this was originally added by other creators in the postgresql_user module.
|
|
||||||
# All rights reserved.
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without modification,
|
|
||||||
# are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# * Redistributions of source code must retain the above copyright
|
|
||||||
# notice, this list of conditions and the following disclaimer.
|
|
||||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
|
||||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
|
||||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
|
||||||
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
|
||||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
|
||||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
|
||||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
|
||||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
psycopg2 = None # This line needs for unit tests
|
|
||||||
try:
|
|
||||||
import psycopg2
|
|
||||||
HAS_PSYCOPG2 = True
|
|
||||||
except ImportError:
|
|
||||||
HAS_PSYCOPG2 = False
|
|
||||||
|
|
||||||
from ansible.module_utils.basic import missing_required_lib
|
|
||||||
from ansible.module_utils._text import to_native
|
|
||||||
from ansible.module_utils.six import iteritems
|
|
||||||
from ansible.module_utils.compat.version import LooseVersion
|
|
||||||
|
|
||||||
|
|
||||||
def postgres_common_argument_spec():
|
|
||||||
"""
|
|
||||||
Return a dictionary with connection options.
|
|
||||||
|
|
||||||
The options are commonly used by most of PostgreSQL modules.
|
|
||||||
"""
|
|
||||||
return dict(
|
|
||||||
login_user=dict(default='postgres'),
|
|
||||||
login_password=dict(default='', no_log=True),
|
|
||||||
login_host=dict(default=''),
|
|
||||||
login_unix_socket=dict(default=''),
|
|
||||||
port=dict(type='int', default=5432, aliases=['login_port']),
|
|
||||||
ssl_mode=dict(default='prefer', choices=['allow', 'disable', 'prefer', 'require', 'verify-ca', 'verify-full']),
|
|
||||||
ca_cert=dict(aliases=['ssl_rootcert']),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def ensure_required_libs(module):
|
|
||||||
"""Check required libraries."""
|
|
||||||
if not HAS_PSYCOPG2:
|
|
||||||
module.fail_json(msg=missing_required_lib('psycopg2'))
|
|
||||||
|
|
||||||
if module.params.get('ca_cert') and LooseVersion(psycopg2.__version__) < LooseVersion('2.4.3'):
|
|
||||||
module.fail_json(msg='psycopg2 must be at least 2.4.3 in order to use the ca_cert parameter')
|
|
||||||
|
|
||||||
|
|
||||||
def connect_to_db(module, conn_params, autocommit=False, fail_on_conn=True):
|
|
||||||
"""Connect to a PostgreSQL database.
|
|
||||||
|
|
||||||
Return psycopg2 connection object.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
module (AnsibleModule) -- object of ansible.module_utils.basic.AnsibleModule class
|
|
||||||
conn_params (dict) -- dictionary with connection parameters
|
|
||||||
|
|
||||||
Kwargs:
|
|
||||||
autocommit (bool) -- commit automatically (default False)
|
|
||||||
fail_on_conn (bool) -- fail if connection failed or just warn and return None (default True)
|
|
||||||
"""
|
|
||||||
ensure_required_libs(module)
|
|
||||||
|
|
||||||
db_connection = None
|
|
||||||
try:
|
|
||||||
db_connection = psycopg2.connect(**conn_params)
|
|
||||||
if autocommit:
|
|
||||||
if LooseVersion(psycopg2.__version__) >= LooseVersion('2.4.2'):
|
|
||||||
db_connection.set_session(autocommit=True)
|
|
||||||
else:
|
|
||||||
db_connection.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
|
|
||||||
|
|
||||||
# Switch role, if specified:
|
|
||||||
if module.params.get('session_role'):
|
|
||||||
cursor = db_connection.cursor(cursor_factory=psycopg2.extras.DictCursor)
|
|
||||||
|
|
||||||
try:
|
|
||||||
cursor.execute('SET ROLE "%s"' % module.params['session_role'])
|
|
||||||
except Exception as e:
|
|
||||||
module.fail_json(msg="Could not switch role: %s" % to_native(e))
|
|
||||||
finally:
|
|
||||||
cursor.close()
|
|
||||||
|
|
||||||
except TypeError as e:
|
|
||||||
if 'sslrootcert' in e.args[0]:
|
|
||||||
module.fail_json(msg='Postgresql server must be at least '
|
|
||||||
'version 8.4 to support sslrootcert')
|
|
||||||
|
|
||||||
if fail_on_conn:
|
|
||||||
module.fail_json(msg="unable to connect to database: %s" % to_native(e))
|
|
||||||
else:
|
|
||||||
module.warn("PostgreSQL server is unavailable: %s" % to_native(e))
|
|
||||||
db_connection = None
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
if fail_on_conn:
|
|
||||||
module.fail_json(msg="unable to connect to database: %s" % to_native(e))
|
|
||||||
else:
|
|
||||||
module.warn("PostgreSQL server is unavailable: %s" % to_native(e))
|
|
||||||
db_connection = None
|
|
||||||
|
|
||||||
return db_connection
|
|
||||||
|
|
||||||
|
|
||||||
def exec_sql(obj, query, query_params=None, ddl=False, add_to_executed=True, dont_exec=False):
|
|
||||||
"""Execute SQL.
|
|
||||||
|
|
||||||
Auxiliary function for PostgreSQL user classes.
|
|
||||||
|
|
||||||
Returns a query result if possible or True/False if ddl=True arg was passed.
|
|
||||||
It necessary for statements that don't return any result (like DDL queries).
|
|
||||||
|
|
||||||
Args:
|
|
||||||
obj (obj) -- must be an object of a user class.
|
|
||||||
The object must have module (AnsibleModule class object) and
|
|
||||||
cursor (psycopg cursor object) attributes
|
|
||||||
query (str) -- SQL query to execute
|
|
||||||
|
|
||||||
Kwargs:
|
|
||||||
query_params (dict or tuple) -- Query parameters to prevent SQL injections,
|
|
||||||
could be a dict or tuple
|
|
||||||
ddl (bool) -- must return True or False instead of rows (typical for DDL queries)
|
|
||||||
(default False)
|
|
||||||
add_to_executed (bool) -- append the query to obj.executed_queries attribute
|
|
||||||
dont_exec (bool) -- used with add_to_executed=True to generate a query, add it
|
|
||||||
to obj.executed_queries list and return True (default False)
|
|
||||||
"""
|
|
||||||
|
|
||||||
if dont_exec:
|
|
||||||
# This is usually needed to return queries in check_mode
|
|
||||||
# without execution
|
|
||||||
query = obj.cursor.mogrify(query, query_params)
|
|
||||||
if add_to_executed:
|
|
||||||
obj.executed_queries.append(query)
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
try:
|
|
||||||
if query_params is not None:
|
|
||||||
obj.cursor.execute(query, query_params)
|
|
||||||
else:
|
|
||||||
obj.cursor.execute(query)
|
|
||||||
|
|
||||||
if add_to_executed:
|
|
||||||
if query_params is not None:
|
|
||||||
obj.executed_queries.append(obj.cursor.mogrify(query, query_params))
|
|
||||||
else:
|
|
||||||
obj.executed_queries.append(query)
|
|
||||||
|
|
||||||
if not ddl:
|
|
||||||
res = obj.cursor.fetchall()
|
|
||||||
return res
|
|
||||||
return True
|
|
||||||
except Exception as e:
|
|
||||||
obj.module.fail_json(msg="Cannot execute SQL '%s': %s" % (query, to_native(e)))
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def get_conn_params(module, params_dict, warn_db_default=True):
|
|
||||||
"""Get connection parameters from the passed dictionary.
|
|
||||||
|
|
||||||
Return a dictionary with parameters to connect to PostgreSQL server.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
module (AnsibleModule) -- object of ansible.module_utils.basic.AnsibleModule class
|
|
||||||
params_dict (dict) -- dictionary with variables
|
|
||||||
|
|
||||||
Kwargs:
|
|
||||||
warn_db_default (bool) -- warn that the default DB is used (default True)
|
|
||||||
"""
|
|
||||||
# To use defaults values, keyword arguments must be absent, so
|
|
||||||
# check which values are empty and don't include in the return dictionary
|
|
||||||
params_map = {
|
|
||||||
"login_host": "host",
|
|
||||||
"login_user": "user",
|
|
||||||
"login_password": "password",
|
|
||||||
"port": "port",
|
|
||||||
"ssl_mode": "sslmode",
|
|
||||||
"ca_cert": "sslrootcert"
|
|
||||||
}
|
|
||||||
|
|
||||||
# Might be different in the modules:
|
|
||||||
if params_dict.get('db'):
|
|
||||||
params_map['db'] = 'database'
|
|
||||||
elif params_dict.get('database'):
|
|
||||||
params_map['database'] = 'database'
|
|
||||||
elif params_dict.get('login_db'):
|
|
||||||
params_map['login_db'] = 'database'
|
|
||||||
else:
|
|
||||||
if warn_db_default:
|
|
||||||
module.warn('Database name has not been passed, '
|
|
||||||
'used default database to connect to.')
|
|
||||||
|
|
||||||
kw = dict((params_map[k], v) for (k, v) in iteritems(params_dict)
|
|
||||||
if k in params_map and v != '' and v is not None)
|
|
||||||
|
|
||||||
# If a login_unix_socket is specified, incorporate it here.
|
|
||||||
is_localhost = "host" not in kw or kw["host"] is None or kw["host"] == "localhost"
|
|
||||||
if is_localhost and params_dict["login_unix_socket"] != "":
|
|
||||||
kw["host"] = params_dict["login_unix_socket"]
|
|
||||||
|
|
||||||
return kw
|
|
||||||
|
|
||||||
|
|
||||||
class PgMembership(object):
|
|
||||||
def __init__(self, module, cursor, groups, target_roles, fail_on_role=True):
|
|
||||||
self.module = module
|
|
||||||
self.cursor = cursor
|
|
||||||
self.target_roles = [r.strip() for r in target_roles]
|
|
||||||
self.groups = [r.strip() for r in groups]
|
|
||||||
self.executed_queries = []
|
|
||||||
self.granted = {}
|
|
||||||
self.revoked = {}
|
|
||||||
self.fail_on_role = fail_on_role
|
|
||||||
self.non_existent_roles = []
|
|
||||||
self.changed = False
|
|
||||||
self.__check_roles_exist()
|
|
||||||
|
|
||||||
def grant(self):
|
|
||||||
for group in self.groups:
|
|
||||||
self.granted[group] = []
|
|
||||||
|
|
||||||
for role in self.target_roles:
|
|
||||||
# If role is in a group now, pass:
|
|
||||||
if self.__check_membership(group, role):
|
|
||||||
continue
|
|
||||||
|
|
||||||
query = 'GRANT "%s" TO "%s"' % (group, role)
|
|
||||||
self.changed = exec_sql(self, query, ddl=True)
|
|
||||||
|
|
||||||
if self.changed:
|
|
||||||
self.granted[group].append(role)
|
|
||||||
|
|
||||||
return self.changed
|
|
||||||
|
|
||||||
def revoke(self):
|
|
||||||
for group in self.groups:
|
|
||||||
self.revoked[group] = []
|
|
||||||
|
|
||||||
for role in self.target_roles:
|
|
||||||
# If role is not in a group now, pass:
|
|
||||||
if not self.__check_membership(group, role):
|
|
||||||
continue
|
|
||||||
|
|
||||||
query = 'REVOKE "%s" FROM "%s"' % (group, role)
|
|
||||||
self.changed = exec_sql(self, query, ddl=True)
|
|
||||||
|
|
||||||
if self.changed:
|
|
||||||
self.revoked[group].append(role)
|
|
||||||
|
|
||||||
return self.changed
|
|
||||||
|
|
||||||
def __check_membership(self, src_role, dst_role):
|
|
||||||
query = ("SELECT ARRAY(SELECT b.rolname FROM "
|
|
||||||
"pg_catalog.pg_auth_members m "
|
|
||||||
"JOIN pg_catalog.pg_roles b ON (m.roleid = b.oid) "
|
|
||||||
"WHERE m.member = r.oid) "
|
|
||||||
"FROM pg_catalog.pg_roles r "
|
|
||||||
"WHERE r.rolname = %(dst_role)s")
|
|
||||||
|
|
||||||
res = exec_sql(self, query, query_params={'dst_role': dst_role}, add_to_executed=False)
|
|
||||||
membership = []
|
|
||||||
if res:
|
|
||||||
membership = res[0][0]
|
|
||||||
|
|
||||||
if not membership:
|
|
||||||
return False
|
|
||||||
|
|
||||||
if src_role in membership:
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def __check_roles_exist(self):
|
|
||||||
existent_groups = self.__roles_exist(self.groups)
|
|
||||||
existent_roles = self.__roles_exist(self.target_roles)
|
|
||||||
|
|
||||||
for group in self.groups:
|
|
||||||
if group not in existent_groups:
|
|
||||||
if self.fail_on_role:
|
|
||||||
self.module.fail_json(msg="Role %s does not exist" % group)
|
|
||||||
else:
|
|
||||||
self.module.warn("Role %s does not exist, pass" % group)
|
|
||||||
self.non_existent_roles.append(group)
|
|
||||||
|
|
||||||
for role in self.target_roles:
|
|
||||||
if role not in existent_roles:
|
|
||||||
if self.fail_on_role:
|
|
||||||
self.module.fail_json(msg="Role %s does not exist" % role)
|
|
||||||
else:
|
|
||||||
self.module.warn("Role %s does not exist, pass" % role)
|
|
||||||
|
|
||||||
if role not in self.groups:
|
|
||||||
self.non_existent_roles.append(role)
|
|
||||||
|
|
||||||
else:
|
|
||||||
if self.fail_on_role:
|
|
||||||
self.module.exit_json(msg="Role role '%s' is a member of role '%s'" % (role, role))
|
|
||||||
else:
|
|
||||||
self.module.warn("Role role '%s' is a member of role '%s', pass" % (role, role))
|
|
||||||
|
|
||||||
# Update role lists, excluding non existent roles:
|
|
||||||
self.groups = [g for g in self.groups if g not in self.non_existent_roles]
|
|
||||||
|
|
||||||
self.target_roles = [r for r in self.target_roles if r not in self.non_existent_roles]
|
|
||||||
|
|
||||||
def __roles_exist(self, roles):
|
|
||||||
tmp = ["'" + x + "'" for x in roles]
|
|
||||||
query = "SELECT rolname FROM pg_roles WHERE rolname IN (%s)" % ','.join(tmp)
|
|
||||||
return [x[0] for x in exec_sql(self, query, add_to_executed=False)]
|
|
@ -1,220 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
#
|
|
||||||
# Copyright: (c) 2016, Jorge Rodriguez <jorge.rodriguez@tiriel.eu>
|
|
||||||
# Copyright: (c) 2018, John Imison <john+github@imison.net>
|
|
||||||
#
|
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
from ansible.module_utils._text import to_native
|
|
||||||
from ansible.module_utils.basic import missing_required_lib
|
|
||||||
from ansible.module_utils.six.moves.urllib import parse as urllib_parse
|
|
||||||
from mimetypes import MimeTypes
|
|
||||||
|
|
||||||
import os
|
|
||||||
import json
|
|
||||||
import traceback
|
|
||||||
|
|
||||||
PIKA_IMP_ERR = None
|
|
||||||
try:
|
|
||||||
import pika
|
|
||||||
import pika.exceptions
|
|
||||||
from pika import spec
|
|
||||||
HAS_PIKA = True
|
|
||||||
except ImportError:
|
|
||||||
PIKA_IMP_ERR = traceback.format_exc()
|
|
||||||
HAS_PIKA = False
|
|
||||||
|
|
||||||
|
|
||||||
def rabbitmq_argument_spec():
|
|
||||||
return dict(
|
|
||||||
login_user=dict(type='str', default='guest'),
|
|
||||||
login_password=dict(type='str', default='guest', no_log=True),
|
|
||||||
login_host=dict(type='str', default='localhost'),
|
|
||||||
login_port=dict(type='str', default='15672'),
|
|
||||||
login_protocol=dict(type='str', default='http', choices=['http', 'https']),
|
|
||||||
ca_cert=dict(type='path', aliases=['cacert']),
|
|
||||||
client_cert=dict(type='path', aliases=['cert']),
|
|
||||||
client_key=dict(type='path', aliases=['key']),
|
|
||||||
vhost=dict(type='str', default='/'),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# notification/rabbitmq_basic_publish.py
|
|
||||||
class RabbitClient():
|
|
||||||
def __init__(self, module):
|
|
||||||
self.module = module
|
|
||||||
self.params = module.params
|
|
||||||
self.check_required_library()
|
|
||||||
self.check_host_params()
|
|
||||||
self.url = self.params['url']
|
|
||||||
self.proto = self.params['proto']
|
|
||||||
self.username = self.params['username']
|
|
||||||
self.password = self.params['password']
|
|
||||||
self.host = self.params['host']
|
|
||||||
self.port = self.params['port']
|
|
||||||
self.vhost = self.params['vhost']
|
|
||||||
self.queue = self.params['queue']
|
|
||||||
self.headers = self.params['headers']
|
|
||||||
self.cafile = self.params['cafile']
|
|
||||||
self.certfile = self.params['certfile']
|
|
||||||
self.keyfile = self.params['keyfile']
|
|
||||||
|
|
||||||
if self.host is not None:
|
|
||||||
self.build_url()
|
|
||||||
|
|
||||||
if self.cafile is not None:
|
|
||||||
self.append_ssl_certs()
|
|
||||||
|
|
||||||
self.connect_to_rabbitmq()
|
|
||||||
|
|
||||||
def check_required_library(self):
|
|
||||||
if not HAS_PIKA:
|
|
||||||
self.module.fail_json(msg=missing_required_lib("pika"), exception=PIKA_IMP_ERR)
|
|
||||||
|
|
||||||
def check_host_params(self):
|
|
||||||
# Fail if url is specified and other conflicting parameters have been specified
|
|
||||||
if self.params['url'] is not None and any(self.params[k] is not None for k in ['proto', 'host', 'port', 'password', 'username', 'vhost']):
|
|
||||||
self.module.fail_json(msg="url and proto, host, port, vhost, username or password cannot be specified at the same time.")
|
|
||||||
|
|
||||||
# Fail if url not specified and there is a missing parameter to build the url
|
|
||||||
if self.params['url'] is None and any(self.params[k] is None for k in ['proto', 'host', 'port', 'password', 'username', 'vhost']):
|
|
||||||
self.module.fail_json(msg="Connection parameters must be passed via url, or, proto, host, port, vhost, username or password.")
|
|
||||||
|
|
||||||
def append_ssl_certs(self):
|
|
||||||
ssl_options = {}
|
|
||||||
if self.cafile:
|
|
||||||
ssl_options['cafile'] = self.cafile
|
|
||||||
if self.certfile:
|
|
||||||
ssl_options['certfile'] = self.certfile
|
|
||||||
if self.keyfile:
|
|
||||||
ssl_options['keyfile'] = self.keyfile
|
|
||||||
|
|
||||||
self.url = self.url + '?ssl_options=' + urllib_parse.quote(json.dumps(ssl_options))
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def rabbitmq_argument_spec():
|
|
||||||
return dict(
|
|
||||||
url=dict(type='str'),
|
|
||||||
proto=dict(type='str', choices=['amqp', 'amqps']),
|
|
||||||
host=dict(type='str'),
|
|
||||||
port=dict(type='int'),
|
|
||||||
username=dict(type='str'),
|
|
||||||
password=dict(type='str', no_log=True),
|
|
||||||
vhost=dict(type='str'),
|
|
||||||
queue=dict(type='str')
|
|
||||||
)
|
|
||||||
|
|
||||||
''' Consider some file size limits here '''
|
|
||||||
def _read_file(self, path):
|
|
||||||
try:
|
|
||||||
with open(path, "rb") as file_handle:
|
|
||||||
return file_handle.read()
|
|
||||||
except IOError as e:
|
|
||||||
self.module.fail_json(msg="Unable to open file %s: %s" % (path, to_native(e)))
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _check_file_mime_type(path):
|
|
||||||
mime = MimeTypes()
|
|
||||||
return mime.guess_type(path)
|
|
||||||
|
|
||||||
def build_url(self):
|
|
||||||
self.url = '{0}://{1}:{2}@{3}:{4}/{5}'.format(self.proto,
|
|
||||||
self.username,
|
|
||||||
self.password,
|
|
||||||
self.host,
|
|
||||||
self.port,
|
|
||||||
self.vhost)
|
|
||||||
|
|
||||||
def connect_to_rabbitmq(self):
|
|
||||||
"""
|
|
||||||
Function to connect to rabbitmq using username and password
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
parameters = pika.URLParameters(self.url)
|
|
||||||
except Exception as e:
|
|
||||||
self.module.fail_json(msg="URL malformed: %s" % to_native(e))
|
|
||||||
|
|
||||||
try:
|
|
||||||
self.connection = pika.BlockingConnection(parameters)
|
|
||||||
except Exception as e:
|
|
||||||
self.module.fail_json(msg="Connection issue: %s" % to_native(e))
|
|
||||||
|
|
||||||
try:
|
|
||||||
self.conn_channel = self.connection.channel()
|
|
||||||
except pika.exceptions.AMQPChannelError as e:
|
|
||||||
self.close_connection()
|
|
||||||
self.module.fail_json(msg="Channel issue: %s" % to_native(e))
|
|
||||||
|
|
||||||
def close_connection(self):
|
|
||||||
try:
|
|
||||||
self.connection.close()
|
|
||||||
except pika.exceptions.AMQPConnectionError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def basic_publish(self):
|
|
||||||
self.content_type = self.params.get("content_type")
|
|
||||||
|
|
||||||
if self.params.get("body") is not None:
|
|
||||||
args = dict(
|
|
||||||
body=self.params.get("body"),
|
|
||||||
exchange=self.params.get("exchange"),
|
|
||||||
routing_key=self.params.get("routing_key"),
|
|
||||||
properties=pika.BasicProperties(content_type=self.content_type, delivery_mode=1, headers=self.headers))
|
|
||||||
|
|
||||||
# If src (file) is defined and content_type is left as default, do a mime lookup on the file
|
|
||||||
if self.params.get("src") is not None and self.content_type == 'text/plain':
|
|
||||||
self.content_type = RabbitClient._check_file_mime_type(self.params.get("src"))[0]
|
|
||||||
self.headers.update(
|
|
||||||
filename=os.path.basename(self.params.get("src"))
|
|
||||||
)
|
|
||||||
|
|
||||||
args = dict(
|
|
||||||
body=self._read_file(self.params.get("src")),
|
|
||||||
exchange=self.params.get("exchange"),
|
|
||||||
routing_key=self.params.get("routing_key"),
|
|
||||||
properties=pika.BasicProperties(content_type=self.content_type,
|
|
||||||
delivery_mode=1,
|
|
||||||
headers=self.headers
|
|
||||||
))
|
|
||||||
elif self.params.get("src") is not None:
|
|
||||||
args = dict(
|
|
||||||
body=self._read_file(self.params.get("src")),
|
|
||||||
exchange=self.params.get("exchange"),
|
|
||||||
routing_key=self.params.get("routing_key"),
|
|
||||||
properties=pika.BasicProperties(content_type=self.content_type,
|
|
||||||
delivery_mode=1,
|
|
||||||
headers=self.headers
|
|
||||||
))
|
|
||||||
|
|
||||||
try:
|
|
||||||
# If queue is not defined, RabbitMQ will return the queue name of the automatically generated queue.
|
|
||||||
if self.queue is None:
|
|
||||||
result = self.conn_channel.queue_declare(durable=self.params.get("durable"),
|
|
||||||
exclusive=self.params.get("exclusive"),
|
|
||||||
auto_delete=self.params.get("auto_delete"))
|
|
||||||
self.conn_channel.confirm_delivery()
|
|
||||||
self.queue = result.method.queue
|
|
||||||
else:
|
|
||||||
self.conn_channel.queue_declare(queue=self.queue,
|
|
||||||
durable=self.params.get("durable"),
|
|
||||||
exclusive=self.params.get("exclusive"),
|
|
||||||
auto_delete=self.params.get("auto_delete"))
|
|
||||||
self.conn_channel.confirm_delivery()
|
|
||||||
except Exception as e:
|
|
||||||
self.module.fail_json(msg="Queue declare issue: %s" % to_native(e))
|
|
||||||
|
|
||||||
# https://github.com/ansible/ansible/blob/devel/lib/ansible/module_utils/cloudstack.py#L150
|
|
||||||
if args['routing_key'] is None:
|
|
||||||
args['routing_key'] = self.queue
|
|
||||||
|
|
||||||
if args['exchange'] is None:
|
|
||||||
args['exchange'] = ''
|
|
||||||
|
|
||||||
try:
|
|
||||||
self.conn_channel.basic_publish(**args)
|
|
||||||
return True
|
|
||||||
except pika.exceptions.UnroutableError:
|
|
||||||
return False
|
|
Loading…
Reference in New Issue