3
from mako.lexer import Lexer
4
from mako import exceptions
5
from util import flatten_result, result_lines
6
from mako.template import Template
9
class LexerTest(unittest.TestCase):
10
def test_text_and_tag(self):
19
node = Lexer(template).parse()
20
assert repr(node) == r"""TemplateNode({}, [Text(u'\n<b>Hello world</b>\n ', (1, 1)), DefTag(u'def', {u'name': u'foo()'}, (3, 9), ["Text(u'\\n this is a def.\\n ', (3, 28))"]), Text(u'\n \n and some more text.\n', (5, 16))])"""
22
def test_unclosed_tag(self):
29
nodes = Lexer(template).parse()
31
except exceptions.SyntaxException, e:
32
assert str(e) == "Unclosed tag: <%def> at line: 5 char: 9"
34
def test_onlyclosed_tag(self):
45
nodes = Lexer(template).parse()
47
except exceptions.SyntaxException, e:
48
assert str(e) == "Closing tag without opening tag: </%namespace> at line: 6 char: 13"
50
def test_unmatched_tag(self):
52
<%namespace name="bar">
62
nodes = Lexer(template).parse()
64
except exceptions.SyntaxException, e:
65
assert str(e) == "Closing tag </%namespace> does not match tag: <%def> at line: 5 char: 13"
67
def test_nonexistent_tag(self):
72
node = Lexer(template).parse()
74
except exceptions.CompileException, e:
75
assert str(e) == "No such tag: 'lala' at line: 2 char: 13"
77
def test_text_tag(self):
87
<%illegal compionent>/></>
88
<%def name="laal()">def</%def>
93
<%def name="foo()">this is foo</%def>
99
node = Lexer(template).parse()
100
assert repr(node) == r"""TemplateNode({}, [Text(u'\n', (1, 1)), Comment(u'comment', (2, 1)), ControlLine(u'if', u'if foo:', False, (3, 1)), Text(u' hi\n', (4, 1)), ControlLine(u'if', u'endif', True, (5, 1)), Text(u' ', (6, 1)), TextTag(u'text', {}, (6, 9), ['Text(u\'\\n # more code\\n \\n % more code\\n <%illegal compionent>/></>\\n <%def name="laal()">def</%def>\\n \\n \\n \', (6, 16))']), Text(u'\n\n ', (14, 17)), DefTag(u'def', {u'name': u'foo()'}, (16, 9), ["Text(u'this is foo', (16, 28))"]), Text(u'\n \n', (16, 46)), ControlLine(u'if', u'if bar:', False, (18, 1)), Text(u' code\n', (19, 1)), ControlLine(u'if', u'endif', True, (20, 1)), Text(u' ', (21, 1))])"""
102
def test_def_syntax(self):
109
node = Lexer(template).parse()
111
except exceptions.CompileException, e:
112
assert str(e) == "Missing attribute(s): 'name' at line: 2 char: 9"
114
def test_def_syntax_2(self):
121
node = Lexer(template).parse()
123
except exceptions.CompileException, e:
124
assert str(e) == "Missing parenthesis in %def at line: 2 char: 9"
126
def test_expr_in_attribute(self):
127
"""test some slightly trickier expressions.
129
you can still trip up the expression parsing, though, unless we integrated really deeply somehow with AST."""
131
<%call expr="foo>bar and 'lala' or 'hoho'"/>
132
<%call expr='foo<bar and hoho>lala and "x" + "y"'/>
134
nodes = Lexer(template).parse()
136
assert repr(nodes) == r"""TemplateNode({}, [Text(u'\n ', (1, 1)), CallTag(u'call', {u'expr': u"foo>bar and 'lala' or 'hoho'"}, (2, 13), []), Text(u'\n ', (2, 57)), CallTag(u'call', {u'expr': u'foo<bar and hoho>lala and "x" + "y"'}, (3, 13), []), Text(u'\n ', (3, 64))])"""
138
def test_pagetag(self):
140
<%page cached="True", args="a, b"/>
144
nodes = Lexer(template).parse()
145
assert repr(nodes) == r"""TemplateNode({}, [Text(u'\n ', (1, 1)), PageTag(u'page', {u'cached': u'True', u'args': u'a, b'}, (2, 13), []), Text(u'\n \n some template\n ', (2, 48))])"""
147
def test_nesting(self):
150
<%namespace name="ns">
151
<%def name="lala(hi, there)">
152
<%call expr="something()"/>
157
nodes = Lexer(template).parse()
158
assert repr(nodes) == r"""TemplateNode({}, [Text(u'\n \n ', (1, 1)), NamespaceTag(u'namespace', {u'name': u'ns'}, (3, 9), ["Text(u'\\n ', (3, 31))", 'DefTag(u\'def\', {u\'name\': u\'lala(hi, there)\'}, (4, 13), ["Text(u\'\\\\n \', (4, 42))", "CallTag(u\'call\', {u\'expr\': u\'something()\'}, (5, 17), [])", "Text(u\'\\\\n \', (5, 44))"])', "Text(u'\\n ', (6, 20))"]), Text(u'\n \n ', (7, 22))])"""
176
nodes = Lexer(template).parse()
178
assert repr(nodes) == r"""TemplateNode({}, [Text(u'\n some text\n \n ', (1, 1)), Code(u'\nprint "hi"\nfor x in range(1,5):\n print x\n \n', False, (4, 9)), Text(u'\n \n more text\n \n ', (8, 11)), Code(u'\nimport foo\n \n', True, (12, 9)), Text(u'\n ', (14, 11))])"""
180
def test_code_and_tags(self):
182
<%namespace name="foo">
198
result: <%call expr="foo.x(result)"/>
200
nodes = Lexer(template).parse()
201
assert repr(nodes) == r"""TemplateNode({}, [Text(u'\n', (1, 1)), NamespaceTag(u'namespace', {u'name': u'foo'}, (2, 1), ["Text(u'\\n ', (2, 24))", 'DefTag(u\'def\', {u\'name\': u\'x()\'}, (3, 5), ["Text(u\'\\\\n this is x\\\\n \', (3, 22))"])', "Text(u'\\n ', (5, 12))", 'DefTag(u\'def\', {u\'name\': u\'y()\'}, (6, 5), ["Text(u\'\\\\n this is y\\\\n \', (6, 22))"])', "Text(u'\\n', (8, 12))"]), Text(u'\n\n', (9, 14)), Code(u'\nresult = []\ndata = get_data()\nfor x in data:\n result.append(x+7)\n\n', False, (11, 1)), Text(u'\n\n result: ', (16, 3)), CallTag(u'call', {u'expr': u'foo.x(result)'}, (18, 13), []), Text(u'\n', (18, 42))])"""
203
def test_expression(self):
205
this is some ${text} and this is ${textwith | escapes, moreescapes}
207
give me ${foo()} and ${bar()}
211
nodes = Lexer(template).parse()
212
assert repr(nodes) == r"""TemplateNode({}, [Text(u'\n this is some ', (1, 1)), Expression(u'text', [], (2, 22)), Text(u' and this is ', (2, 29)), Expression(u'textwith ', ['escapes', 'moreescapes'], (2, 42)), Text(u'\n ', (2, 76)), DefTag(u'def', {u'name': u'hi()'}, (3, 9), ["Text(u'\\n give me ', (3, 27))", "Expression(u'foo()', [], (4, 21))", "Text(u' and ', (4, 29))", "Expression(u'bar()', [], (4, 34))", "Text(u'\\n ', (4, 42))"]), Text(u'\n ', (5, 16)), Expression(u'hi()', [], (6, 9)), Text(u'\n', (6, 16))])"""
215
def test_tricky_expression(self):
220
nodes = Lexer(template).parse()
221
assert repr(nodes) == r"""TemplateNode({}, [Text(u'\n \n ', (1, 1)), Expression(u'x and "|" or "hi"', [], (3, 13)), Text(u'\n ', (3, 33))])"""
225
${hello + '''heres '{|}' text | | }''' | escape1}
227
nodes = Lexer(template).parse()
228
assert repr(nodes) == r"""TemplateNode({}, [Text(u'\n \n ', (1, 1)), Expression(u"hello + '''heres '{|}' text | | }''' ", ['escape1'], (3, 13)), Text(u'\n ', (3, 62))])"""
230
def test_tricky_code(self):
231
template = """<% print 'hi %>' %>"""
232
nodes = Lexer(template).parse()
233
assert repr(nodes) == r"""TemplateNode({}, [Code(u"print 'hi %>' \n", False, (1, 1))])"""
237
lines = src.split('\n')
240
nodes = Lexer(template).parse()
242
def test_tricky_code_2(self):
247
nodes = Lexer(template).parse()
248
assert repr(nodes) == r"""TemplateNode({}, [Code(u" \n # someone's comment\n \n", False, (1, 1)), Text(u'\n ', (3, 11))])"""
254
x = 7 # someone's '''comment
258
# someone else's comment
259
%> '''and now some text '''"""
260
nodes = Lexer(template).parse()
261
assert repr(nodes) == r"""TemplateNode({}, [Code(u"\nprint 'hi'\n# this is a comment\n# another comment\nx = 7 # someone's '''comment\nprint '''\n there\n '''\n# someone else's comment\n \n", False, (1, 1)), Text(u" '''and now some text '''", (10, 11))])"""
263
def test_control_lines(self):
267
mroe text la la blah blah
271
% for l in range(1,5):
272
tex tesl asdl l is ${l} kfmas d
277
nodes = Lexer(template).parse()
279
assert repr(nodes) == r"""TemplateNode({}, [Text(u'\ntext text la la\n', (1, 1)), ControlLine(u'if', u'if foo():', False, (3, 1)), Text(u' mroe text la la blah blah\n', (4, 1)), ControlLine(u'if', u'endif', True, (5, 1)), Text(u'\n and osme more stuff\n', (6, 1)), ControlLine(u'for', u'for l in range(1,5):', False, (8, 1)), Text(u' tex tesl asdl l is ', (9, 1)), Expression(u'l', [], (9, 24)), Text(u' kfmas d\n', (9, 28)), ControlLine(u'for', u'endfor', True, (10, 1)), Text(u' tetx text\n \n', (11, 1))])"""
281
def test_control_lines_2(self):
286
% for file in requestattr['toc'].filenames:
290
nodes = Lexer(template).parse()
291
assert repr(nodes) == r"""TemplateNode({}, [Text(u'\n\n\n', (1, 1)), ControlLine(u'for', u"for file in requestattr['toc'].filenames:", False, (4, 1)), Text(u' x\n', (5, 1)), ControlLine(u'for', u'endfor', True, (6, 1))])"""
293
def test_long_control_lines(self):
297
requestattr['toc'].filenames:
301
nodes = Lexer(template).parse()
302
assert repr(nodes) == r"""TemplateNode({}, [Text(u'\n', (1, 1)), ControlLine(u'for', u"for file in \\\n requestattr['toc'].filenames:", False, (2, 1)), Text(u' x\n', (4, 1)), ControlLine(u'for', u'endfor', True, (5, 1)), Text(u' ', (6, 1))])"""
304
def test_unmatched_control(self):
308
% for x in range(1,5):
312
nodes = Lexer(template).parse()
314
except exceptions.SyntaxException, e:
315
assert str(e) == "Keyword 'endif' doesn't match keyword 'for' at line: 5 char: 1"
317
def test_unmatched_control_2(self):
321
% for x in range(1,5):
325
nodes = Lexer(template).parse()
327
except exceptions.SyntaxException, e:
328
assert str(e) == "Unterminated control keyword: 'if' at line: 3 char: 1"
330
def test_unmatched_control_3(self):
334
% for x in range(1,5):
339
nodes = Lexer(template).parse()
341
except exceptions.SyntaxException, e:
342
assert str(e) == "Keyword 'endlala' doesn't match keyword 'for' at line: 5 char: 1"
344
def test_ternary_control(self):
356
nodes = Lexer(template).parse()
357
assert repr(nodes) == r"""TemplateNode({}, [Text(u'\n', (1, 1)), ControlLine(u'if', u'if x:', False, (2, 1)), Text(u' hi\n', (3, 1)), ControlLine(u'elif', u'elif y+7==10:', False, (4, 1)), Text(u' there\n', (5, 1)), ControlLine(u'elif', u'elif lala:', False, (6, 1)), Text(u' lala\n', (7, 1)), ControlLine(u'else', u'else:', False, (8, 1)), Text(u' hi\n', (9, 1)), ControlLine(u'if', u'endif', True, (10, 1))])"""
359
def test_integration(self):
360
template = """<%namespace name="foo" file="somefile.html"/>
361
## inherit from foobar.html
362
<%inherit file="foobar.html"/>
364
<%def name="header()">
367
<%def name="footer()">
375
<td>Hello ${x| h}</td>
381
nodes = Lexer(template).parse()
382
assert repr(nodes) == r"""TemplateNode({}, [NamespaceTag(u'namespace', {u'name': u'foo', u'file': u'somefile.html'}, (1, 1), []), Text(u'\n', (1, 46)), Comment(u'inherit from foobar.html', (2, 1)), InheritTag(u'inherit', {u'file': u'foobar.html'}, (3, 1), []), Text(u'\n\n', (3, 31)), DefTag(u'def', {u'name': u'header()'}, (5, 1), ["Text(u'\\n <div>header</div>\\n', (5, 23))"]), Text(u'\n', (7, 8)), DefTag(u'def', {u'name': u'footer()'}, (8, 1), ["Text(u'\\n <div> footer</div>\\n', (8, 23))"]), Text(u'\n\n<table>\n', (10, 8)), ControlLine(u'for', u'for j in data():', False, (13, 1)), Text(u' <tr>\n', (14, 1)), ControlLine(u'for', u'for x in j:', False, (15, 1)), Text(u' <td>Hello ', (16, 1)), Expression(u'x', ['h'], (16, 23)), Text(u'</td>\n', (16, 30)), ControlLine(u'for', u'endfor', True, (17, 1)), Text(u' </tr>\n', (18, 1)), ControlLine(u'for', u'endfor', True, (19, 1)), Text(u'</table>\n', (20, 1))])"""
384
def test_comment_after_statement(self):
392
nodes = Lexer(template).parse()
393
assert repr(nodes) == r"""TemplateNode({}, [Text(u'\n', (1, 1)), ControlLine(u'if', u'if x: #comment', False, (2, 1)), Text(u' hi\n', (3, 1)), ControlLine(u'else', u'else: #next', False, (4, 1)), Text(u' hi\n', (5, 1)), ControlLine(u'if', u'endif #end', True, (6, 1))])"""
396
template = file("./test_htdocs/crlf.html").read()
397
nodes = Lexer(template).parse()
398
assert repr(nodes) == r"""TemplateNode({}, [Text(u'<html>\r\n\r\n', (1, 1)), PageTag(u'page', {u'args': u"a=['foo',\n 'bar']"}, (3, 1), []), Text(u'\r\n\r\nlike the name says.\r\n\r\n', (4, 26)), ControlLine(u'for', u'for x in [1,2,3]:', False, (8, 1)), Text(u' ', (9, 1)), Expression(u'x', [], (9, 9)), Text(u'', (9, 13)), ControlLine(u'for', u'endfor', True, (10, 1)), Text(u'\r\n', (11, 1)), Expression(u"trumpeter == 'Miles' and trumpeter or \\\n 'Dizzy'", [], (12, 1)), Text(u'\r\n\r\n', (13, 15)), DefTag(u'def', {u'name': u'hi()'}, (15, 1), ["Text(u'\\r\\n hi!\\r\\n', (15, 19))"]), Text(u'\r\n\r\n</html>\r\n', (17, 8))])"""
399
assert flatten_result(Template(template).render()) == """<html> like the name says. 1 2 3 Dizzy </html>"""
401
def test_comments(self):
405
# other non comment stuff
413
this is ## not a comment
421
nodes = Lexer(template).parse()
422
assert repr(nodes) == r"""TemplateNode({}, [Text(u'\n<style>\n #someselector\n # other non comment stuff\n</style>\n', (1, 1)), Comment(u'a comment', (6, 1)), Text(u'\n# also not a comment\n\n', (7, 1)), Comment(u'this is a comment', (10, 1)), Text(u' \nthis is ## not a comment\n\n', (11, 1)), Comment(u' multiline\ncomment\n', (14, 1)), Text(u'\n\nhi\n', (16, 8))])"""
435
nodes = Lexer(template).parse()
436
assert repr(nodes) == r"""TemplateNode({}, [Text(u'\n ', (1, 1)), Comment(u'\n this is a comment\n ', (2, 9)), Text(u'\n ', (4, 16)), DefTag(u'def', {u'name': u'foo()'}, (5, 9), ["Text(u'\\n ', (5, 28))", "Comment(u'\\n this is the foo func\\n ', (6, 13))", "Text(u'\\n ', (8, 20))"]), Text(u'\n ', (9, 16))])"""
438
def test_preprocess(self):
440
return re.sub(r'(?<=\n)\s*#[^#]', "##", text)
446
nodes = Lexer(template, preprocessor=preproc).parse()
447
assert repr(nodes) == r"""TemplateNode({}, [Text(u'\n hi\n', (1, 1)), Comment(u'old style comment', (3, 1)), Comment(u'another comment', (4, 1))])"""
449
if __name__ == '__main__':