]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
- Added source transformation framework for non-2.4 parser implementations
authorJason Kirtland <jek@discorporate.us>
Sat, 19 Jan 2008 23:11:47 +0000 (23:11 +0000)
committerJason Kirtland <jek@discorporate.us>
Sat, 19 Jan 2008 23:11:47 +0000 (23:11 +0000)
- test/clone.py can create and update (transformed) copies of the test suite
- Added Python 2.4 decorator -> 2.3 source transform

test/clone.py [new file with mode: 0644]
test/testlib/filters.py [new file with mode: 0644]

diff --git a/test/clone.py b/test/clone.py
new file mode 100644 (file)
index 0000000..f56ab8c
--- /dev/null
@@ -0,0 +1,175 @@
+# only tested with cpython!
+import optparse, os, shutil, sys
+from os import path
+from testlib import filters
+
+__doc__ = """
+Creates and maintains a 'clone' of the test suite, optionally transforming
+the source code through a filter.  The primary purpose of this utility is
+to allow the tests to run on Python VMs that do not implement a parser that
+groks 2.4 style @decorations.
+
+Creating a clone:
+
+  Create a new, exact clone of the suite:
+  $ python test/clone.py -c myclone
+
+  Create a new clone using the 2.3 filter:
+  $ python test/clone.py -c --filter=py23 myclone
+
+After the clone is set up, changes in the master can be pulled into the clone
+with the -u or --update switch.  If the clone was created with a filter, it
+will be applied automatically when updating.
+
+  Update the clone:
+  $ python test/clone.py -u myclone
+
+The updating algorithm is very simple: if the version in test/ is newer than
+the one in your clone, the clone version is overwritten.
+"""
+
+options = None
+clone, clone_path = None, None
+filter = lambda x: x[:]
+
+def optparser():
+    parser = optparse.OptionParser(
+        usage=('usage: %prog [options] CLONE-NAME\n' + __doc__ ).rstrip())
+    parser.add_option('-n', '--dry-run', dest='dryrun',
+                      action='store_true',
+                      help=('Do not actually change any files; '
+                            'just print what would happen.'))
+    parser.add_option('-u', '--update', dest='update', action='store_true',
+                      help='Update an existing clone.')
+    parser.add_option('-c', '--create', dest='create', action='store_true',
+                      help='Create a new clone.')
+    parser.add_option('--filter', dest='filter',
+                      help='Run source code through a filter.')
+    parser.add_option('-l', '--filter-list', dest='filter_list',
+                      action='store_true',
+                      help='Show available filters.')
+    parser.add_option('-f', '--force', dest='force', action='store_true',
+                      help='Overwrite clone files even if unchanged.')
+    parser.add_option('-q', '--quiet', dest='quiet', action='store_true',
+                      help='Run quietly.')
+    parser.set_defaults(update=False, create=False,
+                        dryrun=False, filter_list=False,
+                        force=False, quiet=False)
+    return parser
+
+def config():
+    global clone, clone_path, options, filter
+
+    parser = optparser()
+    (options, args) = parser.parse_args()
+
+    if options.filter_list:
+        if options.quiet:
+            print '\n'.join(filters.__all__)
+        else:
+            print 'Available filters:'
+            for name in filters.__all__:
+                print '\t%s' % name
+        sys.exit(0)
+
+    if not options.update and not options.create:
+        parser.error('One of -u or -c is required.')
+
+    if len(args) != 1:
+        parser.error('A clone name is required.')
+
+    clone = args[0]
+    clone_path = path.abspath(clone)
+
+    if options.update and not path.exists(clone_path):
+        parser.error(
+            'Clone %s does not exist; create it with --create first.' % clone)
+    if options.create and path.exists(clone_path):
+        parser.error('Clone %s already exists.' % clone)
+
+    if options.filter:
+        if options.filter not in filters.__all__:
+            parser.error(('Filter "%s" unknown; use --filter-list to see '
+                          'available filters.') % options.filter)
+        filter = getattr(filters, options.filter)
+
+def setup():
+    global filter
+
+    if options.create:
+        if not options.quiet:
+            print "mkdir %s" % clone_path
+        if not options.dryrun:
+            os.mkdir(clone_path)
+
+        if options.filter and not options.dryrun:
+            if not options.quiet:
+                print 'storing filter "%s" in %s/.filter' % (
+                    options.filter, clone)
+            stash = open(path.join(clone_path, '.filter'), 'w')
+            stash.write(options.filter)
+            stash.close()
+    else:
+        stash_file = path.join(clone_path, '.filter')
+        if path.exists(stash_file):
+            stash = open(stash_file)
+            stashed = stash.read().strip()
+            stash.close()
+            if options.filter:
+                if (options.filter != stashed and stashed in filters.__all__ and
+                    not options.quiet):
+                    print (('Warning: --filter=%s overrides %s specified in '
+                            '%s/.filter') % (options.filter, stashed, clone))
+            else:
+                if stashed not in filters.__all__:
+                    sys.stderr.write(
+                        'Filter "%s" in %s/.filter is not valid, aborting.' %
+                        (stashed, clone))
+                    sys.exit(-1)
+            filter = getattr(filters, stashed)
+
+def sync():
+    source_path, _ = path.split(path.abspath(__file__))
+
+    ls = lambda root: [fn
+                       for fn in os.listdir(root)
+                       if (fn.endswith('.py') and not fn.startswith('.'))]
+
+    def walker(x, dirname, fnames):
+        if '.svn' in fnames:
+            fnames.remove('.svn')
+
+        rel_path = dirname[len(source_path) + 1:]
+        dest_path = path.join(clone_path, rel_path)
+
+        if not path.exists(dest_path):
+            if not options.quiet:
+                print "mkdir %s/%s" % (clone, rel_path)
+            if not options.dryrun:
+                os.mkdir(dest_path)
+
+        for filename in ls(dirname):
+            source_file = path.join(source_path, rel_path, filename)
+            dest_file = path.join(dest_path, filename)
+
+            if (options.force or
+                (not path.exists(dest_file) or
+                 os.stat(source_file)[-1] > os.stat(dest_file)[-1])):
+                if not options.quiet:
+                    print "syncing %s" % path.join(rel_path, filename)
+
+                raw = open(source_file)
+                filtered = filter(raw.readlines())
+                raw.close()
+
+                if not options.dryrun:
+                    synced = open(dest_file, 'w')
+                    synced.writelines(filtered)
+                    synced.close()
+
+    os.path.walk(source_path, walker, None)
+
+if __name__ == '__main__':
+    config()
+    setup()
+    sync()
diff --git a/test/testlib/filters.py b/test/testlib/filters.py
new file mode 100644 (file)
index 0000000..eb7eff2
--- /dev/null
@@ -0,0 +1,239 @@
+"""A collection of Python source transformers.
+
+Supports the 'clone' command, providing source code transforms to run the test
+suite on pre Python 2.4-level parser implementations.
+
+Includes::
+
+  py23
+     Converts 2.4-level source code into 2.3-parsable source.
+     Currently only rewrites @decorators, but generator transformations
+     are possible.
+  py23_decorators
+     py23 is currently an alias for py23_decorators.
+"""
+
+import sys
+from StringIO import StringIO
+from tokenize import *
+
+__all__ = ['py23_decorators', 'py23']
+
+
+def py23_decorators(lines):
+    """Translates @decorators in source lines to 2.3 syntax."""
+
+    tokens = peekable(generate_tokens(iter(lines).next))
+    text = untokenize(backport_decorators(tokens))
+    return [x + '\n' for x in text.split('\n')]
+
+py23 = py23_decorators
+
+
+def backport_decorators(stream):
+    """Restates @decorators in 2.3 syntax
+
+    Operates on token streams. Converts::
+
+      @foo
+      @bar(1, 2)
+      def quux():
+          pass
+    into::
+
+      def quux():
+          pass
+      quux = bar(1, 2)(quux)
+      quux = foo(quux)
+
+    Fails on decorated one-liners::
+
+      @decorator
+      def fn(): pass
+    """
+
+    if not hasattr(stream, 'peek'):
+        stream = peekable(iter(stream))
+
+    stack = [_DecoratorState('')]
+    emit = []
+    for ttype, tok, _, _, _ in stream:
+        current = stack[-1]
+        if ttype == INDENT:
+            current = _DecoratorState(tok)
+            stack.append(current)
+        elif ttype == DEDENT:
+            previous = stack.pop()
+            assert not previous.decorations
+            current = stack[-1]
+            if current.decorations:
+                ws = pop_trailing_whitespace(emit)
+
+                emit.append((ttype, tok))
+                for decorator, misc in reversed(current.decorations):
+                    if not decorator or decorator[0][1] != '@':
+                        emit.extend(decorator)
+                    else:
+                        emit.extend(
+                            [(NAME, current.fn_name), (OP, '=')] +
+                            decorator[1:] +
+                            [(OP, '('), (NAME, current.fn_name), (OP, ')')])
+                    emit.extend(misc)
+                current.decorations = []
+                emit.extend(ws)
+                continue
+        elif ttype == OP and tok == '@':
+            current.in_decorator = True
+            decoration = [(ttype, tok)]
+            current.decorations.append((decoration, []))
+            current.consume_identifier(stream)
+            if stream.peek()[1] == '(':
+                current.consume_parened(stream)
+            continue
+        elif ttype == NAME and tok == 'def':
+            current.in_decorator = False
+            current.fn_name = stream.peek()[1]
+        elif current.in_decorator:
+            current.append_misc((ttype, tok))
+            continue
+
+        emit.append((ttype, tok))
+    return emit
+
+class _DecoratorState(object):
+    """Holds state for restating decorators as function calls."""
+
+    in_decorator = False
+    fn_name = None
+    def __init__(self, indent):
+        self.indent = indent
+        self.decorations = []
+    def append_misc(self, token):
+        if not self.decorations:
+            self.decorations.append(([], []))
+        self.decorations[-1][1].append(token)
+    def consume_identifier(self, stream):
+        while True:
+            typ, value = stream.peek()[:2]
+            if not (typ == NAME or (typ == OP and value == '.')):
+                break
+            self.decorations[-1][0].append(stream.next()[:2])
+    def consume_parened(self, stream):
+        """Consume a (paren) sequence from a token seq starting with ("""
+        depth, offsets = 0, {'(':1, ')':-1}
+        while True:
+            typ, value = stream.next()[:2]
+            if typ == OP:
+                depth += offsets.get(value, 0)
+            self.decorations[-1][0].append((typ, value))
+            if depth == 0:
+                break
+
+def pop_trailing_whitespace(tokens):
+    """Removes trailing whitespace tokens from a token list."""
+
+    popped = []
+    for token in reversed(list(tokens)):
+        if token[0] not in (NL, COMMENT):
+            break
+        popped.append(tokens.pop())
+    return popped
+
+def untokenize(iterable):
+    """Turns a stream of tokens into a Python source str.
+
+    A PEP-8-ish variant of Python 2.5+'s tokenize.untokenize.  Produces output
+    that's not perfect, but is at least readable.  The stdlib version is
+    basically unusable.
+    """
+
+    if not hasattr(iterable, 'peek'):
+        iterable = peekable(iter(iterable))
+
+    startline = False
+    indents = []
+    toks = []
+    toks_append = toks.append
+
+    # this is pretty roughly hacked.  i think it could get very close to
+    # perfect by rewriting to operate over a sliding window of
+    # (prev, current, next) token sets + making some grouping macros to
+    # include all the tokens and operators this omits.
+    for tok in iterable:
+        toknum, tokval = tok[:2]
+
+        try:
+            next_num, next_val = iterable.peek()[:2]
+        except StopIteration:
+            next_num, next_val = None, None
+
+        if toknum == NAME:
+            if tokval == 'in':
+                tokval += ' '
+            elif next_num == OP:
+                if next_val not in ('(', ')', '[', ']', '{', '}',
+                                      ':', '.', ',',):
+                    tokval += ' '
+            elif next_num != NEWLINE:
+                tokval += ' '
+        elif toknum == OP:
+            if tokval in ('(', '@', '.', '[', '{', '*', '**'):
+                pass
+            elif tokval in ('%', ':') and next_num not in (NEWLINE, ):
+                tokval += ' '
+            elif next_num in (NAME, COMMENT,
+                              NUMBER, STRING):
+                tokval += ' '
+            elif (tokval in (')', ']', '}') and next_num == OP and
+                  '=' in next_val):
+                tokval += ' '
+            elif tokval == ',' or '=' in tokval:
+                tokval += ' '
+        elif toknum in (NUMBER, STRING):
+            if next_num == OP and next_val not in (')', ']', '}', ',', ':'):
+                tokval += ' '
+            elif next_num == NAME:
+                tokval += ' '
+
+        # would be nice to indent continued lines...
+        if toknum == INDENT:
+            indents.append(tokval)
+            continue
+        elif toknum == DEDENT:
+            indents.pop()
+            continue
+        elif toknum in (NEWLINE, COMMENT, NL):
+            startline = True
+        elif startline and indents:
+            toks_append(indents[-1])
+            startline = False
+        toks_append(tokval)
+    return ''.join(toks)
+
+
+class peekable(object):
+    """A iterator wrapper that allows peek()ing at the next value."""
+
+    def __init__(self, iterator):
+        self.iterator = iterator
+        self.buffer = []
+    def next(self):
+        if self.buffer:
+            return self.buffer.pop(0)
+        return self.iterator.next()
+    def peek(self):
+        if self.buffer:
+            return self.buffer[0]
+        x = self.iterator.next()
+        self.buffer.append(x)
+        return x
+    def __iter__(self):
+        return self
+
+if __name__ == '__main__':
+    # runnable.  converts a named file to 2.3.
+    input = open(len(sys.argv) == 2 and sys.argv[1] or __file__)
+
+    tokens = generate_tokens(input.readline)
+    back = backport_decorators(tokens)
+    print untokenize(back)