parent: split out minify.py and add stub where master can install it.

This needs a cleaner mechanism to install it, at least this one is
documented.
pull/262/head
David Wilson 6 years ago
parent 325d13538f
commit 9492dbc4d7

@ -555,6 +555,7 @@ class Importer(object):
'jail', 'jail',
'lxc', 'lxc',
'master', 'master',
'minify',
'parent', 'parent',
'select', 'select',
'service', 'service',

@ -52,7 +52,9 @@ if not hasattr(pkgutil, 'find_loader'):
# been kept intentionally 2.3 compatible so we can reuse it. # been kept intentionally 2.3 compatible so we can reuse it.
from mitogen.compat import pkgutil from mitogen.compat import pkgutil
import mitogen
import mitogen.core import mitogen.core
import mitogen.minify
import mitogen.parent import mitogen.parent
from mitogen.core import LOG from mitogen.core import LOG
@ -79,6 +81,19 @@ def get_child_modules(path):
return [name for _, name, _ in it] return [name for _, name, _ in it]
def get_core_source():
"""
Master version of parent.get_core_source().
"""
source = inspect.getsource(mitogen.core)
return mitogen.minify.minimize_source(source)
if mitogen.is_master:
# TODO: find a less surprising way of installing this.
mitogen.parent.get_core_source = get_core_source
LOAD_CONST = dis.opname.index('LOAD_CONST') LOAD_CONST = dis.opname.index('LOAD_CONST')
IMPORT_NAME = dis.opname.index('IMPORT_NAME') IMPORT_NAME = dis.opname.index('IMPORT_NAME')

@ -0,0 +1,134 @@
# Copyright 2017, Alex Willmer
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import sys
try:
from cStringIO import StringIO as BytesIO
except ImportError:
from io import BytesIO
if sys.version_info < (2, 7, 11):
from mitogen.compat import tokenize
else:
import tokenize
try:
from functools import lru_cache
except ImportError:
from mitogen.compat.functools import lru_cache
@lru_cache()
def minimize_source(source):
"""Remove most comments and docstrings from Python source code.
"""
tokens = tokenize.generate_tokens(BytesIO(source).readline)
tokens = strip_comments(tokens)
tokens = strip_docstrings(tokens)
tokens = reindent(tokens)
return tokenize.untokenize(tokens)
def strip_comments(tokens):
"""Drop comment tokens from a `tokenize` stream.
Comments on lines 1-2 are kept, to preserve hashbang and encoding.
Trailing whitespace is remove from all lines.
"""
prev_typ = None
prev_end_col = 0
for typ, tok, (start_row, start_col), (end_row, end_col), line in tokens:
if typ in (tokenize.NL, tokenize.NEWLINE):
if prev_typ in (tokenize.NL, tokenize.NEWLINE):
start_col = 0
else:
start_col = prev_end_col
end_col = start_col + 1
elif typ == tokenize.COMMENT and start_row > 2:
continue
prev_typ = typ
prev_end_col = end_col
yield typ, tok, (start_row, start_col), (end_row, end_col), line
def strip_docstrings(tokens):
"""Replace docstring tokens with NL tokens in a `tokenize` stream.
Any STRING token not part of an expression is deemed a docstring.
Indented docstrings are not yet recognised.
"""
stack = []
state = 'wait_string'
for t in tokens:
typ = t[0]
if state == 'wait_string':
if typ in (tokenize.NL, tokenize.COMMENT):
yield t
elif typ in (tokenize.DEDENT, tokenize.INDENT, tokenize.STRING):
stack.append(t)
elif typ == tokenize.NEWLINE:
stack.append(t)
start_line, end_line = stack[0][2][0], stack[-1][3][0]+1
for i in range(start_line, end_line):
yield tokenize.NL, '\n', (i, 0), (i,1), '\n'
for t in stack:
if t[0] in (tokenize.DEDENT, tokenize.INDENT):
yield t[0], t[1], (i+1, t[2][1]), (i+1, t[3][1]), t[4]
del stack[:]
else:
stack.append(t)
for t in stack: yield t
del stack[:]
state = 'wait_newline'
elif state == 'wait_newline':
if typ == tokenize.NEWLINE:
state = 'wait_string'
yield t
def reindent(tokens, indent=' '):
"""Replace existing indentation in a token steam, with `indent`.
"""
old_levels = []
old_level = 0
new_level = 0
for typ, tok, (start_row, start_col), (end_row, end_col), line in tokens:
if typ == tokenize.INDENT:
old_levels.append(old_level)
old_level = len(tok)
new_level += 1
tok = indent * new_level
elif typ == tokenize.DEDENT:
old_level = old_levels.pop()
new_level -= 1
start_col = max(0, start_col - old_level + new_level)
if start_row == end_row:
end_col = start_col + len(tok)
yield typ, tok, (start_row, start_col), (end_row, end_col), line

@ -52,21 +52,6 @@ import zlib
# Absolute imports for <2.5. # Absolute imports for <2.5.
select = __import__('select') select = __import__('select')
try:
from cStringIO import StringIO as BytesIO
except ImportError:
from io import BytesIO
if sys.version_info < (2, 7, 11):
from mitogen.compat import tokenize
else:
import tokenize
try:
from functools import lru_cache
except ImportError:
from mitogen.compat.functools import lru_cache
import mitogen.core import mitogen.core
from mitogen.core import LOG from mitogen.core import LOG
from mitogen.core import IOLOG from mitogen.core import IOLOG
@ -82,101 +67,21 @@ def get_log_level():
return (LOG.level or logging.getLogger().level or logging.INFO) return (LOG.level or logging.getLogger().level or logging.INFO)
def is_immediate_child(msg, stream): def get_core_source():
"""
Handler policy that requires messages to arrive only from immediately
connected children.
"""
return msg.src_id == stream.remote_id
@lru_cache()
def minimize_source(source):
"""Remove most comments and docstrings from Python source code.
""" """
tokens = tokenize.generate_tokens(BytesIO(source).readline) In non-masters, simply fetch the cached mitogen.core source code via the
tokens = strip_comments(tokens) import mechanism. In masters, this function is replaced with a version that
tokens = strip_docstrings(tokens) performs minification directly.
tokens = reindent(tokens)
return tokenize.untokenize(tokens)
def strip_comments(tokens):
"""Drop comment tokens from a `tokenize` stream.
Comments on lines 1-2 are kept, to preserve hashbang and encoding.
Trailing whitespace is remove from all lines.
""" """
prev_typ = None return inspect.getsource(mitogen.core)
prev_end_col = 0
for typ, tok, (start_row, start_col), (end_row, end_col), line in tokens:
if typ in (tokenize.NL, tokenize.NEWLINE):
if prev_typ in (tokenize.NL, tokenize.NEWLINE):
start_col = 0
else:
start_col = prev_end_col
end_col = start_col + 1
elif typ == tokenize.COMMENT and start_row > 2:
continue
prev_typ = typ
prev_end_col = end_col
yield typ, tok, (start_row, start_col), (end_row, end_col), line
def strip_docstrings(tokens):
"""Replace docstring tokens with NL tokens in a `tokenize` stream.
Any STRING token not part of an expression is deemed a docstring. def is_immediate_child(msg, stream):
Indented docstrings are not yet recognised.
""" """
stack = [] Handler policy that requires messages to arrive only from immediately
state = 'wait_string' connected children.
for t in tokens:
typ = t[0]
if state == 'wait_string':
if typ in (tokenize.NL, tokenize.COMMENT):
yield t
elif typ in (tokenize.DEDENT, tokenize.INDENT, tokenize.STRING):
stack.append(t)
elif typ == tokenize.NEWLINE:
stack.append(t)
start_line, end_line = stack[0][2][0], stack[-1][3][0]+1
for i in range(start_line, end_line):
yield tokenize.NL, '\n', (i, 0), (i,1), '\n'
for t in stack:
if t[0] in (tokenize.DEDENT, tokenize.INDENT):
yield t[0], t[1], (i+1, t[2][1]), (i+1, t[3][1]), t[4]
del stack[:]
else:
stack.append(t)
for t in stack: yield t
del stack[:]
state = 'wait_newline'
elif state == 'wait_newline':
if typ == tokenize.NEWLINE:
state = 'wait_string'
yield t
def reindent(tokens, indent=' '):
"""Replace existing indentation in a token steam, with `indent`.
""" """
old_levels = [] return msg.src_id == stream.remote_id
old_level = 0
new_level = 0
for typ, tok, (start_row, start_col), (end_row, end_col), line in tokens:
if typ == tokenize.INDENT:
old_levels.append(old_level)
old_level = len(tok)
new_level += 1
tok = indent * new_level
elif typ == tokenize.DEDENT:
old_level = old_levels.pop()
new_level -= 1
start_col = max(0, start_col - old_level + new_level)
if start_row == end_row:
end_col = start_col + len(tok)
yield typ, tok, (start_row, start_col), (end_row, end_col), line
def flags(names): def flags(names):
@ -498,8 +403,7 @@ def stream_by_method_name(name):
@mitogen.core.takes_econtext @mitogen.core.takes_econtext
def _proxy_connect(name, method_name, kwargs, econtext): def _proxy_connect(name, method_name, kwargs, econtext):
upgrade_router(econtext)
mitogen.parent.upgrade_router(econtext)
try: try:
context = econtext.router._connect( context = econtext.router._connect(
klass=stream_by_method_name(method_name), klass=stream_by_method_name(method_name),
@ -921,11 +825,11 @@ class Stream(mitogen.core.Stream):
} }
def get_preamble(self): def get_preamble(self):
source = inspect.getsource(mitogen.core) source = get_core_source()
source += '\nExternalContext(%r).main()\n' % ( source += '\nExternalContext(%r).main()\n' % (
self.get_econtext_config(), self.get_econtext_config(),
) )
return zlib.compress(minimize_source(source), 9) return zlib.compress(source, 9)
create_child = staticmethod(create_child) create_child = staticmethod(create_child)
create_child_args = {} create_child_args = {}

@ -149,6 +149,7 @@ class Error(Exception):
""" """
Raised when an error occurs configuring a service or pool. Raised when an error occurs configuring a service or pool.
""" """
pass # cope with minify_source() bug.
class Policy(object): class Policy(object):

@ -8,6 +8,7 @@ import zlib
import mitogen.fakessh import mitogen.fakessh
import mitogen.master import mitogen.master
import mitogen.minify
import mitogen.parent import mitogen.parent
import mitogen.service import mitogen.service
import mitogen.ssh import mitogen.ssh
@ -34,16 +35,16 @@ print(
) )
for mod in ( for mod in (
mitogen.master,
mitogen.parent, mitogen.parent,
mitogen.service,
mitogen.ssh, mitogen.ssh,
mitogen.sudo, mitogen.sudo,
mitogen.service,
mitogen.fakessh, mitogen.fakessh,
mitogen.master,
): ):
original = inspect.getsource(mod) original = inspect.getsource(mod)
original_size = len(original) original_size = len(original)
minimized = mitogen.parent.minimize_source(original) minimized = mitogen.minify.minimize_source(original)
minimized_size = len(minimized) minimized_size = len(minimized)
compressed = zlib.compress(minimized, 9) compressed = zlib.compress(minimized, 9)
compressed_size = len(compressed) compressed_size = len(compressed)

Loading…
Cancel
Save