mirror of https://github.com/ansible/ansible.git
Adding new yaml parsing classes
parent
ad9ab8e033
commit
8ab0749217
@ -0,0 +1,7 @@
|
|||||||
|
from yaml import load
|
||||||
|
from parsing.yaml.loader import AnsibleLoader
|
||||||
|
|
||||||
|
def safe_load(stream):
|
||||||
|
''' implements yaml.safe_load(), except using our custom loader class '''
|
||||||
|
return load(stream, AnsibleLoader)
|
||||||
|
|
@ -0,0 +1,28 @@
|
|||||||
|
from yaml.composer import Composer
|
||||||
|
from yaml.nodes import MappingNode
|
||||||
|
|
||||||
|
class AnsibleComposer(Composer):
|
||||||
|
def __init__(self):
|
||||||
|
self.__mapping_starts = []
|
||||||
|
super(Composer, self).__init__()
|
||||||
|
def compose_node(self, parent, index):
|
||||||
|
# the line number where the previous token has ended (plus empty lines)
|
||||||
|
node = Composer.compose_node(self, parent, index)
|
||||||
|
if isinstance(node, MappingNode):
|
||||||
|
node.__datasource__ = self.name
|
||||||
|
try:
|
||||||
|
(cur_line, cur_column) = self.__mapping_starts.pop()
|
||||||
|
except:
|
||||||
|
cur_line = None
|
||||||
|
cur_column = None
|
||||||
|
node.__line__ = cur_line
|
||||||
|
node.__column__ = cur_column
|
||||||
|
return node
|
||||||
|
def compose_mapping_node(self, anchor):
|
||||||
|
# the column here will point at the position in the file immediately
|
||||||
|
# after the first key is found, which could be a space or a newline.
|
||||||
|
# We could back this up to find the beginning of the key, but this
|
||||||
|
# should be good enough to determine the error location.
|
||||||
|
self.__mapping_starts.append((self.line + 1, self.column + 1))
|
||||||
|
return Composer.compose_mapping_node(self, anchor)
|
||||||
|
|
@ -0,0 +1,28 @@
|
|||||||
|
from yaml.constructor import Constructor
|
||||||
|
from parsing.yaml.objects import AnsibleMapping
|
||||||
|
|
||||||
|
class AnsibleConstructor(Constructor):
|
||||||
|
def construct_yaml_map(self, node):
|
||||||
|
data = AnsibleMapping()
|
||||||
|
yield data
|
||||||
|
value = self.construct_mapping(node)
|
||||||
|
data.update(value)
|
||||||
|
data._line_number = value._line_number
|
||||||
|
data._column_number = value._column_number
|
||||||
|
data._data_source = value._data_source
|
||||||
|
|
||||||
|
def construct_mapping(self, node, deep=False):
|
||||||
|
ret = AnsibleMapping(super(Constructor, self).construct_mapping(node, deep))
|
||||||
|
ret._line_number = node.__line__
|
||||||
|
ret._column_number = node.__column__
|
||||||
|
ret._data_source = node.__datasource__
|
||||||
|
return ret
|
||||||
|
|
||||||
|
AnsibleConstructor.add_constructor(
|
||||||
|
u'tag:yaml.org,2002:map',
|
||||||
|
AnsibleConstructor.construct_yaml_map)
|
||||||
|
|
||||||
|
AnsibleConstructor.add_constructor(
|
||||||
|
u'tag:yaml.org,2002:python/dict',
|
||||||
|
AnsibleConstructor.construct_yaml_map)
|
||||||
|
|
@ -0,0 +1,17 @@
|
|||||||
|
from yaml.reader import Reader
|
||||||
|
from yaml.scanner import Scanner
|
||||||
|
from yaml.parser import Parser
|
||||||
|
from yaml.resolver import Resolver
|
||||||
|
|
||||||
|
from parsing.yaml.composer import AnsibleComposer
|
||||||
|
from parsing.yaml.constructor import AnsibleConstructor
|
||||||
|
|
||||||
|
class AnsibleLoader(Reader, Scanner, Parser, AnsibleComposer, AnsibleConstructor, Resolver):
|
||||||
|
def __init__(self, stream):
|
||||||
|
Reader.__init__(self, stream)
|
||||||
|
Scanner.__init__(self)
|
||||||
|
Parser.__init__(self)
|
||||||
|
AnsibleComposer.__init__(self)
|
||||||
|
AnsibleConstructor.__init__(self)
|
||||||
|
Resolver.__init__(self)
|
||||||
|
|
@ -0,0 +1,14 @@
|
|||||||
|
class AnsibleBaseYAMLObject(object):
|
||||||
|
'''
|
||||||
|
the base class used to sub-class python built-in objects
|
||||||
|
so that we can add attributes to them during yaml parsing
|
||||||
|
|
||||||
|
'''
|
||||||
|
_data_source = None
|
||||||
|
_line_number = None
|
||||||
|
_column_number = None
|
||||||
|
|
||||||
|
class AnsibleMapping(AnsibleBaseYAMLObject, dict):
|
||||||
|
''' sub class for dictionaries '''
|
||||||
|
pass
|
||||||
|
|
Loading…
Reference in New Issue