http://git-wip-us.apache.org/repos/asf/impala/blob/49413d9c/shell/ext-py/sqlparse-0.1.19/tests/test_regressions.py
----------------------------------------------------------------------
diff --git a/shell/ext-py/sqlparse-0.1.19/tests/test_regressions.py 
b/shell/ext-py/sqlparse-0.1.19/tests/test_regressions.py
new file mode 100644
index 0000000..a64b400
--- /dev/null
+++ b/shell/ext-py/sqlparse-0.1.19/tests/test_regressions.py
@@ -0,0 +1,276 @@
+# -*- coding: utf-8 -*-
+
+import sys
+
+from tests.utils import TestCaseBase, load_file
+
+import sqlparse
+from sqlparse import sql
+from sqlparse import tokens as T
+
+
+class RegressionTests(TestCaseBase):
+
+    def test_issue9(self):
+        # make sure where doesn't consume parenthesis
+        p = sqlparse.parse('(where 1)')[0]
+        self.assert_(isinstance(p, sql.Statement))
+        self.assertEqual(len(p.tokens), 1)
+        self.assert_(isinstance(p.tokens[0], sql.Parenthesis))
+        prt = p.tokens[0]
+        self.assertEqual(len(prt.tokens), 3)
+        self.assertEqual(prt.tokens[0].ttype, T.Punctuation)
+        self.assertEqual(prt.tokens[-1].ttype, T.Punctuation)
+
+    def test_issue13(self):
+        parsed = sqlparse.parse(("select 'one';\n"
+                                 "select 'two\\'';\n"
+                                 "select 'three';"))
+        self.assertEqual(len(parsed), 3)
+        self.assertEqual(str(parsed[1]).strip(), "select 'two\\'';")
+
+    def test_issue26(self):
+        # parse stand-alone comments
+        p = sqlparse.parse('--hello')[0]
+        self.assertEqual(len(p.tokens), 1)
+        self.assert_(p.tokens[0].ttype is T.Comment.Single)
+        p = sqlparse.parse('-- hello')[0]
+        self.assertEqual(len(p.tokens), 1)
+        self.assert_(p.tokens[0].ttype is T.Comment.Single)
+        p = sqlparse.parse('--hello\n')[0]
+        self.assertEqual(len(p.tokens), 1)
+        self.assert_(p.tokens[0].ttype is T.Comment.Single)
+        p = sqlparse.parse('--')[0]
+        self.assertEqual(len(p.tokens), 1)
+        self.assert_(p.tokens[0].ttype is T.Comment.Single)
+        p = sqlparse.parse('--\n')[0]
+        self.assertEqual(len(p.tokens), 1)
+        self.assert_(p.tokens[0].ttype is T.Comment.Single)
+
+    def test_issue34(self):
+        t = sqlparse.parse("create")[0].token_first()
+        self.assertEqual(t.match(T.Keyword.DDL, "create"), True)
+        self.assertEqual(t.match(T.Keyword.DDL, "CREATE"), True)
+
+    def test_issue35(self):
+        # missing space before LIMIT
+        sql = sqlparse.format("select * from foo where bar = 1 limit 1",
+                              reindent=True)
+        self.ndiffAssertEqual(sql, "\n".join(["select *",
+                                              "from foo",
+                                              "where bar = 1 limit 1"]))
+
+    def test_issue38(self):
+        sql = sqlparse.format("SELECT foo; -- comment",
+                              strip_comments=True)
+        self.ndiffAssertEqual(sql, "SELECT foo;")
+        sql = sqlparse.format("/* foo */", strip_comments=True)
+        self.ndiffAssertEqual(sql, "")
+
+    def test_issue39(self):
+        p = sqlparse.parse('select user.id from user')[0]
+        self.assertEqual(len(p.tokens), 7)
+        idt = p.tokens[2]
+        self.assertEqual(idt.__class__, sql.Identifier)
+        self.assertEqual(len(idt.tokens), 3)
+        self.assertEqual(idt.tokens[0].match(T.Name, 'user'), True)
+        self.assertEqual(idt.tokens[1].match(T.Punctuation, '.'), True)
+        self.assertEqual(idt.tokens[2].match(T.Name, 'id'), True)
+
+    def test_issue40(self):
+        # make sure identifier lists in subselects are grouped
+        p = sqlparse.parse(('SELECT id, name FROM '
+                            '(SELECT id, name FROM bar) as foo'))[0]
+        self.assertEqual(len(p.tokens), 7)
+        self.assertEqual(p.tokens[2].__class__, sql.IdentifierList)
+        self.assertEqual(p.tokens[-1].__class__, sql.Identifier)
+        self.assertEqual(p.tokens[-1].get_name(), u'foo')
+        sp = p.tokens[-1].tokens[0]
+        self.assertEqual(sp.tokens[3].__class__, sql.IdentifierList)
+        # make sure that formatting works as expected
+        self.ndiffAssertEqual(
+            sqlparse.format(('SELECT id, name FROM '
+                             '(SELECT id, name FROM bar)'),
+                            reindent=True),
+            ('SELECT id,\n'
+             '       name\n'
+             'FROM\n'
+             '  (SELECT id,\n'
+             '          name\n'
+             '   FROM bar)'))
+        self.ndiffAssertEqual(
+            sqlparse.format(('SELECT id, name FROM '
+                             '(SELECT id, name FROM bar) as foo'),
+                            reindent=True),
+            ('SELECT id,\n'
+             '       name\n'
+             'FROM\n'
+             '  (SELECT id,\n'
+             '          name\n'
+             '   FROM bar) as foo'))
+
+
+def test_issue78():
+    # the bug author provided this nice examples, let's use them!
+    def _get_identifier(sql):
+        p = sqlparse.parse(sql)[0]
+        return p.tokens[2]
+    results = (('get_name', 'z'),
+               ('get_real_name', 'y'),
+               ('get_parent_name', 'x'),
+               ('get_alias', 'z'),
+               ('get_typecast', 'text'))
+    variants = (
+        'select x.y::text as z from foo',
+        'select x.y::text as "z" from foo',
+        'select x."y"::text as z from foo',
+        'select x."y"::text as "z" from foo',
+        'select "x".y::text as z from foo',
+        'select "x".y::text as "z" from foo',
+        'select "x"."y"::text as z from foo',
+        'select "x"."y"::text as "z" from foo',
+    )
+    for variant in variants:
+        i = _get_identifier(variant)
+        assert isinstance(i, sql.Identifier)
+        for func_name, result in results:
+            func = getattr(i, func_name)
+            assert func() == result
+
+
+def test_issue83():
+    sql = """
+CREATE OR REPLACE FUNCTION func_a(text)
+  RETURNS boolean  LANGUAGE plpgsql STRICT IMMUTABLE AS
+$_$
+BEGIN
+ ...
+END;
+$_$;
+
+CREATE OR REPLACE FUNCTION func_b(text)
+  RETURNS boolean  LANGUAGE plpgsql STRICT IMMUTABLE AS
+$_$
+BEGIN
+ ...
+END;
+$_$;
+
+ALTER TABLE..... ;"""
+    t = sqlparse.split(sql)
+    assert len(t) == 3
+
+
+def test_comment_encoding_when_reindent():
+    # There was an UnicodeEncodeError in the reindent filter that
+    # casted every comment followed by a keyword to str.
+    sql = u'select foo -- Comment containing Ümläuts\nfrom bar'
+    formatted = sqlparse.format(sql, reindent=True)
+    assert formatted == sql
+
+
+def test_parse_sql_with_binary():
+    # See https://github.com/andialbrecht/sqlparse/pull/88
+    digest = '\x82|\xcb\x0e\xea\x8aplL4\xa1h\x91\xf8N{'
+    sql = 'select * from foo where bar = \'%s\'' % digest
+    formatted = sqlparse.format(sql, reindent=True)
+    tformatted = 'select *\nfrom foo\nwhere bar = \'%s\'' % digest
+    if sys.version_info < (3,):
+        tformatted = tformatted.decode('unicode-escape')
+    assert formatted == tformatted
+
+
+def test_dont_alias_keywords():
+    # The _group_left_right function had a bug where the check for the
+    # left side wasn't handled correctly. In one case this resulted in
+    # a keyword turning into an identifier.
+    p = sqlparse.parse('FROM AS foo')[0]
+    assert len(p.tokens) == 5
+    assert p.tokens[0].ttype is T.Keyword
+    assert p.tokens[2].ttype is T.Keyword
+
+
+def test_format_accepts_encoding():  # issue20
+    sql = load_file('test_cp1251.sql', 'cp1251')
+    formatted = sqlparse.format(sql, reindent=True, encoding='cp1251')
+    if sys.version_info < (3,):
+        tformatted = u'insert into foo\nvalues (1); -- Песня про 
надежду\n'
+    else:
+        tformatted = 'insert into foo\nvalues (1); -- Песня про 
надежду\n'
+    assert formatted == tformatted
+
+
+def test_issue90():
+    sql = ('UPDATE "gallery_photo" SET "owner_id" = 4018, "deleted_at" = NULL,'
+           ' "width" = NULL, "height" = NULL, "rating_votes" = 0,'
+           ' "rating_score" = 0, "thumbnail_width" = NULL,'
+           ' "thumbnail_height" = NULL, "price" = 1, "description" = NULL')
+    formatted = sqlparse.format(sql, reindent=True)
+    tformatted = '\n'.join(['UPDATE "gallery_photo"',
+                            'SET "owner_id" = 4018,',
+                            '    "deleted_at" = NULL,',
+                            '    "width" = NULL,',
+                            '    "height" = NULL,',
+                            '    "rating_votes" = 0,',
+                            '    "rating_score" = 0,',
+                            '    "thumbnail_width" = NULL,',
+                            '    "thumbnail_height" = NULL,',
+                            '    "price" = 1,',
+                            '    "description" = NULL'])
+    assert formatted == tformatted
+
+
+def test_except_formatting():
+    sql = 'SELECT 1 FROM foo WHERE 2 = 3 EXCEPT SELECT 2 FROM bar WHERE 1 = 2'
+    formatted = sqlparse.format(sql, reindent=True)
+    tformatted = '\n'.join([
+        'SELECT 1',
+        'FROM foo',
+        'WHERE 2 = 3',
+        'EXCEPT',
+        'SELECT 2',
+        'FROM bar',
+        'WHERE 1 = 2'
+    ])
+    assert formatted == tformatted
+
+
+def test_null_with_as():
+    sql = 'SELECT NULL AS c1, NULL AS c2 FROM t1'
+    formatted = sqlparse.format(sql, reindent=True)
+    tformatted = '\n'.join([
+        'SELECT NULL AS c1,',
+        '       NULL AS c2',
+        'FROM t1'
+    ])
+    assert formatted == tformatted
+
+
+def test_issue193_splitting_function():
+    sql = """CREATE FUNCTION a(x VARCHAR(20)) RETURNS VARCHAR(20)
+BEGIN
+ DECLARE y VARCHAR(20);
+ RETURN x;
+END;
+SELECT * FROM a.b;"""
+    splitted = sqlparse.split(sql)
+    assert len(splitted) == 2
+
+def test_issue194_splitting_function():
+    sql = """CREATE FUNCTION a(x VARCHAR(20)) RETURNS VARCHAR(20)
+BEGIN
+ DECLARE y VARCHAR(20);
+ IF (1 = 1) THEN
+ SET x = y;
+ END IF;
+ RETURN x;
+END;
+SELECT * FROM a.b;"""
+    splitted = sqlparse.split(sql)
+    assert len(splitted) == 2
+
+
+def test_issue186_get_type():
+    sql = "-- comment\ninsert into foo"
+    p = sqlparse.parse(sql)[0]
+    assert p.get_type() == 'INSERT'

http://git-wip-us.apache.org/repos/asf/impala/blob/49413d9c/shell/ext-py/sqlparse-0.1.19/tests/test_split.py
----------------------------------------------------------------------
diff --git a/shell/ext-py/sqlparse-0.1.19/tests/test_split.py 
b/shell/ext-py/sqlparse-0.1.19/tests/test_split.py
new file mode 100644
index 0000000..54e8d04
--- /dev/null
+++ b/shell/ext-py/sqlparse-0.1.19/tests/test_split.py
@@ -0,0 +1,144 @@
+# -*- coding: utf-8 -*-
+
+# Tests splitting functions.
+
+import unittest
+
+from tests.utils import load_file, TestCaseBase
+
+import sqlparse
+
+
+class SQLSplitTest(TestCaseBase):
+    """Tests sqlparse.sqlsplit()."""
+
+    _sql1 = 'select * from foo;'
+    _sql2 = 'select * from bar;'
+
+    def test_split_semicolon(self):
+        sql2 = 'select * from foo where bar = \'foo;bar\';'
+        stmts = sqlparse.parse(''.join([self._sql1, sql2]))
+        self.assertEqual(len(stmts), 2)
+        self.ndiffAssertEqual(unicode(stmts[0]), self._sql1)
+        self.ndiffAssertEqual(unicode(stmts[1]), sql2)
+
+    def test_split_backslash(self):
+        stmts = sqlparse.parse(r"select '\\'; select '\''; select '\\\'';")
+        self.assertEqual(len(stmts), 3)
+
+    def test_create_function(self):
+        sql = load_file('function.sql')
+        stmts = sqlparse.parse(sql)
+        self.assertEqual(len(stmts), 1)
+        self.ndiffAssertEqual(unicode(stmts[0]), sql)
+
+    def test_create_function_psql(self):
+        sql = load_file('function_psql.sql')
+        stmts = sqlparse.parse(sql)
+        self.assertEqual(len(stmts), 1)
+        self.ndiffAssertEqual(unicode(stmts[0]), sql)
+
+    def test_create_function_psql3(self):
+        sql = load_file('function_psql3.sql')
+        stmts = sqlparse.parse(sql)
+        self.assertEqual(len(stmts), 1)
+        self.ndiffAssertEqual(unicode(stmts[0]), sql)
+
+    def test_create_function_psql2(self):
+        sql = load_file('function_psql2.sql')
+        stmts = sqlparse.parse(sql)
+        self.assertEqual(len(stmts), 1)
+        self.ndiffAssertEqual(unicode(stmts[0]), sql)
+
+    def test_dashcomments(self):
+        sql = load_file('dashcomment.sql')
+        stmts = sqlparse.parse(sql)
+        self.assertEqual(len(stmts), 3)
+        self.ndiffAssertEqual(''.join(unicode(q) for q in stmts), sql)
+
+    def test_dashcomments_eol(self):
+        stmts = sqlparse.parse('select foo; -- comment\n')
+        self.assertEqual(len(stmts), 1)
+        stmts = sqlparse.parse('select foo; -- comment\r')
+        self.assertEqual(len(stmts), 1)
+        stmts = sqlparse.parse('select foo; -- comment\r\n')
+        self.assertEqual(len(stmts), 1)
+        stmts = sqlparse.parse('select foo; -- comment')
+        self.assertEqual(len(stmts), 1)
+
+    def test_begintag(self):
+        sql = load_file('begintag.sql')
+        stmts = sqlparse.parse(sql)
+        self.assertEqual(len(stmts), 3)
+        self.ndiffAssertEqual(''.join(unicode(q) for q in stmts), sql)
+
+    def test_begintag_2(self):
+        sql = load_file('begintag_2.sql')
+        stmts = sqlparse.parse(sql)
+        self.assertEqual(len(stmts), 1)
+        self.ndiffAssertEqual(''.join(unicode(q) for q in stmts), sql)
+
+    def test_dropif(self):
+        sql = 'DROP TABLE IF EXISTS FOO;\n\nSELECT * FROM BAR;'
+        stmts = sqlparse.parse(sql)
+        self.assertEqual(len(stmts), 2)
+        self.ndiffAssertEqual(''.join(unicode(q) for q in stmts), sql)
+
+    def test_comment_with_umlaut(self):
+        sql = (u'select * from foo;\n'
+               u'-- Testing an umlaut: ä\n'
+               u'select * from bar;')
+        stmts = sqlparse.parse(sql)
+        self.assertEqual(len(stmts), 2)
+        self.ndiffAssertEqual(''.join(unicode(q) for q in stmts), sql)
+
+    def test_comment_end_of_line(self):
+        sql = ('select * from foo; -- foo\n'
+               'select * from bar;')
+        stmts = sqlparse.parse(sql)
+        self.assertEqual(len(stmts), 2)
+        self.ndiffAssertEqual(''.join(unicode(q) for q in stmts), sql)
+        # make sure the comment belongs to first query
+        self.ndiffAssertEqual(unicode(stmts[0]), 'select * from foo; -- foo\n')
+
+    def test_casewhen(self):
+        sql = ('SELECT case when val = 1 then 2 else null end as foo;\n'
+               'comment on table actor is \'The actor table.\';')
+        stmts = sqlparse.split(sql)
+        self.assertEqual(len(stmts), 2)
+
+    def test_cursor_declare(self):
+        sql = ('DECLARE CURSOR "foo" AS SELECT 1;\n'
+               'SELECT 2;')
+        stmts = sqlparse.split(sql)
+        self.assertEqual(len(stmts), 2)
+
+    def test_if_function(self):  # see issue 33
+        # don't let IF as a function confuse the splitter
+        sql = ('CREATE TEMPORARY TABLE tmp '
+               'SELECT IF(a=1, a, b) AS o FROM one; '
+               'SELECT t FROM two')
+        stmts = sqlparse.split(sql)
+        self.assertEqual(len(stmts), 2)
+
+    def test_split_stream(self):
+        import types
+        from cStringIO import StringIO
+
+        stream = StringIO("SELECT 1; SELECT 2;")
+        stmts = sqlparse.parsestream(stream)
+        self.assertEqual(type(stmts), types.GeneratorType)
+        self.assertEqual(len(list(stmts)), 2)
+
+    def test_encoding_parsestream(self):
+        from cStringIO import StringIO
+        stream = StringIO("SELECT 1; SELECT 2;")
+        stmts = list(sqlparse.parsestream(stream))
+        self.assertEqual(type(stmts[0].tokens[0].value), unicode)
+
+
+def test_split_simple():
+    stmts = sqlparse.split('select * from foo; select * from bar;')
+    assert len(stmts) == 2
+    assert stmts[0] == 'select * from foo;'
+    assert stmts[1] == 'select * from bar;'

http://git-wip-us.apache.org/repos/asf/impala/blob/49413d9c/shell/ext-py/sqlparse-0.1.19/tests/test_tokenize.py
----------------------------------------------------------------------
diff --git a/shell/ext-py/sqlparse-0.1.19/tests/test_tokenize.py 
b/shell/ext-py/sqlparse-0.1.19/tests/test_tokenize.py
new file mode 100644
index 0000000..0b23fa8
--- /dev/null
+++ b/shell/ext-py/sqlparse-0.1.19/tests/test_tokenize.py
@@ -0,0 +1,190 @@
+# -*- coding: utf-8 -*-
+
+import sys
+import types
+import unittest
+
+import pytest
+
+import sqlparse
+from sqlparse import lexer
+from sqlparse import sql
+from sqlparse.tokens import *
+
+
+class TestTokenize(unittest.TestCase):
+
+    def test_simple(self):
+        s = 'select * from foo;'
+        stream = lexer.tokenize(s)
+        self.assert_(isinstance(stream, types.GeneratorType))
+        tokens = list(stream)
+        self.assertEqual(len(tokens), 8)
+        self.assertEqual(len(tokens[0]), 2)
+        self.assertEqual(tokens[0], (Keyword.DML, u'select'))
+        self.assertEqual(tokens[-1], (Punctuation, u';'))
+
+    def test_backticks(self):
+        s = '`foo`.`bar`'
+        tokens = list(lexer.tokenize(s))
+        self.assertEqual(len(tokens), 3)
+        self.assertEqual(tokens[0], (Name, u'`foo`'))
+
+    def test_linebreaks(self):  # issue1
+        s = 'foo\nbar\n'
+        tokens = lexer.tokenize(s)
+        self.assertEqual(''.join(str(x[1]) for x in tokens), s)
+        s = 'foo\rbar\r'
+        tokens = lexer.tokenize(s)
+        self.assertEqual(''.join(str(x[1]) for x in tokens), s)
+        s = 'foo\r\nbar\r\n'
+        tokens = lexer.tokenize(s)
+        self.assertEqual(''.join(str(x[1]) for x in tokens), s)
+        s = 'foo\r\nbar\n'
+        tokens = lexer.tokenize(s)
+        self.assertEqual(''.join(str(x[1]) for x in tokens), s)
+
+    def test_inline_keywords(self):  # issue 7
+        s = "create created_foo"
+        tokens = list(lexer.tokenize(s))
+        self.assertEqual(len(tokens), 3)
+        self.assertEqual(tokens[0][0], Keyword.DDL)
+        self.assertEqual(tokens[2][0], Name)
+        self.assertEqual(tokens[2][1], u'created_foo')
+        s = "enddate"
+        tokens = list(lexer.tokenize(s))
+        self.assertEqual(len(tokens), 1)
+        self.assertEqual(tokens[0][0], Name)
+        s = "join_col"
+        tokens = list(lexer.tokenize(s))
+        self.assertEqual(len(tokens), 1)
+        self.assertEqual(tokens[0][0], Name)
+        s = "left join_col"
+        tokens = list(lexer.tokenize(s))
+        self.assertEqual(len(tokens), 3)
+        self.assertEqual(tokens[2][0], Name)
+        self.assertEqual(tokens[2][1], 'join_col')
+
+    def test_negative_numbers(self):
+        s = "values(-1)"
+        tokens = list(lexer.tokenize(s))
+        self.assertEqual(len(tokens), 4)
+        self.assertEqual(tokens[2][0], Number.Integer)
+        self.assertEqual(tokens[2][1], '-1')
+
+    # Somehow this test fails on Python 3.2
+    @pytest.mark.skipif('sys.version_info >= (3,0)')
+    def test_tab_expansion(self):
+        s = "\t"
+        lex = lexer.Lexer()
+        lex.tabsize = 5
+        tokens = list(lex.get_tokens(s))
+        self.assertEqual(tokens[0][1], " " * 5)
+
+
+class TestToken(unittest.TestCase):
+
+    def test_str(self):
+        token = sql.Token(None, 'FoO')
+        self.assertEqual(str(token), 'FoO')
+
+    def test_repr(self):
+        token = sql.Token(Keyword, 'foo')
+        tst = "<Keyword 'foo' at 0x"
+        self.assertEqual(repr(token)[:len(tst)], tst)
+        token = sql.Token(Keyword, '1234567890')
+        tst = "<Keyword '123456...' at 0x"
+        self.assertEqual(repr(token)[:len(tst)], tst)
+
+    def test_flatten(self):
+        token = sql.Token(Keyword, 'foo')
+        gen = token.flatten()
+        self.assertEqual(type(gen), types.GeneratorType)
+        lgen = list(gen)
+        self.assertEqual(lgen, [token])
+
+
+class TestTokenList(unittest.TestCase):
+
+    def test_repr(self):
+        p = sqlparse.parse('foo, bar, baz')[0]
+        tst = "<IdentifierList 'foo, b...' at 0x"
+        self.assertEqual(repr(p.tokens[0])[:len(tst)], tst)
+
+    def test_token_first(self):
+        p = sqlparse.parse(' select foo')[0]
+        first = p.token_first()
+        self.assertEqual(first.value, 'select')
+        self.assertEqual(p.token_first(ignore_whitespace=False).value, ' ')
+        self.assertEqual(sql.TokenList([]).token_first(), None)
+
+    def test_token_matching(self):
+        t1 = sql.Token(Keyword, 'foo')
+        t2 = sql.Token(Punctuation, ',')
+        x = sql.TokenList([t1, t2])
+        self.assertEqual(x.token_matching(0, [lambda t: t.ttype is Keyword]),
+                         t1)
+        self.assertEqual(x.token_matching(0,
+                                          [lambda t: t.ttype is Punctuation]),
+                         t2)
+        self.assertEqual(x.token_matching(1, [lambda t: t.ttype is Keyword]),
+                         None)
+
+
+class TestStream(unittest.TestCase):
+    def test_simple(self):
+        from cStringIO import StringIO
+
+        stream = StringIO("SELECT 1; SELECT 2;")
+        lex = lexer.Lexer()
+
+        tokens = lex.get_tokens(stream)
+        self.assertEqual(len(list(tokens)), 9)
+
+        stream.seek(0)
+        lex.bufsize = 4
+        tokens = list(lex.get_tokens(stream))
+        self.assertEqual(len(tokens), 9)
+
+        stream.seek(0)
+        lex.bufsize = len(stream.getvalue())
+        tokens = list(lex.get_tokens(stream))
+        self.assertEqual(len(tokens), 9)
+
+    def test_error(self):
+        from cStringIO import StringIO
+
+        stream = StringIO("FOOBAR{")
+
+        lex = lexer.Lexer()
+        lex.bufsize = 4
+        tokens = list(lex.get_tokens(stream))
+        self.assertEqual(len(tokens), 2)
+        self.assertEqual(tokens[1][0], Error)
+
+
+@pytest.mark.parametrize('expr', ['JOIN', 'LEFT JOIN', 'LEFT OUTER JOIN',
+                                  'FULL OUTER JOIN', 'NATURAL JOIN',
+                                  'CROSS JOIN', 'STRAIGHT JOIN',
+                                  'INNER JOIN', 'LEFT INNER JOIN'])
+def test_parse_join(expr):
+    p = sqlparse.parse('%s foo' % expr)[0]
+    assert len(p.tokens) == 3
+    assert p.tokens[0].ttype is Keyword
+
+
+def test_parse_endifloop():
+    p = sqlparse.parse('END IF')[0]
+    assert len(p.tokens) == 1
+    assert p.tokens[0].ttype is Keyword
+    p = sqlparse.parse('END   IF')[0]
+    assert len(p.tokens) == 1
+    p = sqlparse.parse('END\t\nIF')[0]
+    assert len(p.tokens) == 1
+    assert p.tokens[0].ttype is Keyword
+    p = sqlparse.parse('END LOOP')[0]
+    assert len(p.tokens) == 1
+    assert p.tokens[0].ttype is Keyword
+    p = sqlparse.parse('END  LOOP')[0]
+    assert len(p.tokens) == 1
+    assert p.tokens[0].ttype is Keyword

http://git-wip-us.apache.org/repos/asf/impala/blob/49413d9c/shell/ext-py/sqlparse-0.1.19/tests/utils.py
----------------------------------------------------------------------
diff --git a/shell/ext-py/sqlparse-0.1.19/tests/utils.py 
b/shell/ext-py/sqlparse-0.1.19/tests/utils.py
new file mode 100644
index 0000000..9eb46bf
--- /dev/null
+++ b/shell/ext-py/sqlparse-0.1.19/tests/utils.py
@@ -0,0 +1,46 @@
+# -*- coding: utf-8 -*-
+
+"""Helpers for testing."""
+
+import codecs
+import difflib
+import os
+import unittest
+from StringIO import StringIO
+
+import sqlparse.utils
+
+NL = '\n'
+DIR_PATH = os.path.abspath(os.path.dirname(__file__))
+PARENT_DIR = os.path.dirname(DIR_PATH)
+FILES_DIR = os.path.join(DIR_PATH, 'files')
+
+
+def load_file(filename, encoding='utf-8'):
+    """Opens filename with encoding and return it's contents."""
+    f = codecs.open(os.path.join(FILES_DIR, filename), 'r', encoding)
+    data = f.read()
+    f.close()
+    return data
+
+
+class TestCaseBase(unittest.TestCase):
+    """Base class for test cases with some additional checks."""
+
+    # Adopted from Python's tests.
+    def ndiffAssertEqual(self, first, second):
+        """Like failUnlessEqual except use ndiff for readable output."""
+        if first != second:
+            sfirst = unicode(first)
+            ssecond = unicode(second)
+            # Using the built-in .splitlines() method here will cause incorrect
+            # results when splitting statements that have quoted CR/CR+LF
+            # characters.
+            sfirst = sqlparse.utils.split_unquoted_newlines(sfirst)
+            ssecond = sqlparse.utils.split_unquoted_newlines(ssecond)
+            diff = difflib.ndiff(sfirst, ssecond)
+            fp = StringIO()
+            fp.write(NL)
+            fp.write(NL.join(diff))
+            print fp.getvalue()
+            raise self.failureException, fp.getvalue()

http://git-wip-us.apache.org/repos/asf/impala/blob/49413d9c/shell/ext-py/sqlparse-0.1.19/tox.ini
----------------------------------------------------------------------
diff --git a/shell/ext-py/sqlparse-0.1.19/tox.ini 
b/shell/ext-py/sqlparse-0.1.19/tox.ini
new file mode 100644
index 0000000..e797ca9
--- /dev/null
+++ b/shell/ext-py/sqlparse-0.1.19/tox.ini
@@ -0,0 +1,37 @@
+[tox]
+envlist=py26,py27,py32,py33,py34,pypy
+
+[testenv]
+deps=
+  pytest
+  pytest-cov
+commands=
+  sqlformat --version  # Sanity check.
+  py.test --cov=sqlparse/ tests
+
+[testenv:py32]
+changedir={envdir}
+commands=
+  sqlformat --version  # Sanity check.
+  rm -rf tests/
+  cp -r {toxinidir}/tests/ tests/
+  2to3 -w --no-diffs -n tests/
+  py.test --cov={envdir}/lib/python3.2/site-packages/sqlparse/ tests
+
+[testenv:py33]
+changedir={envdir}
+commands=
+  sqlformat --version  # Sanity check.
+  rm -rf tests/
+  cp -r {toxinidir}/tests/ tests/
+  2to3 -w --no-diffs -n tests/
+  py.test --cov={envdir}/lib/python3.3/site-packages/sqlparse/ tests
+
+[testenv:py34]
+changedir={envdir}
+commands=
+  sqlformat --version  # Sanity check.
+  rm -rf tests/
+  cp -r {toxinidir}/tests/ tests/
+  2to3 -w --no-diffs -n tests/
+  py.test --cov={envdir}/lib/python3.4/site-packages/sqlparse/ tests

Reply via email to