1 package tokens_test
2
3 import (
4 "testing"
5
6 "github.com/noirbizarre/gonja/tokens"
7 "github.com/stretchr/testify/assert"
8 )
9
10 type tok struct {
11 typ tokens.Type
12 val string
13 }
14
15 func (t tok) String() string {
16 return `"` + t.val + `"`
17 }
18
19 var (
20 EOF = tok{tokens.EOF, ""}
21 varBegin = tok{tokens.VariableBegin, "{{"}
22 varEnd = tok{tokens.VariableEnd, "}}"}
23 blockBegin = tok{tokens.BlockBegin, "{%"}
24 blockBeginTrim = tok{tokens.BlockBegin, "{%-"}
25 blockEnd = tok{tokens.BlockEnd, "%}"}
26 blockEndTrim = tok{tokens.BlockEnd, "-%}"}
27 lParen = tok{tokens.Lparen, "("}
28 rParen = tok{tokens.Rparen, ")"}
29 lBrace = tok{tokens.Lbrace, "{"}
30 rBrace = tok{tokens.Rbrace, "}"}
31 lBracket = tok{tokens.Lbracket, "["}
32 rBracket = tok{tokens.Rbracket, "]"}
33 space = tok{tokens.Whitespace, " "}
34 )
35
36 func data(text string) tok {
37 return tok{tokens.Data, text}
38 }
39
40 func name(text string) tok {
41 return tok{tokens.Name, text}
42 }
43
44 func str(text string) tok {
45 return tok{tokens.String, text}
46 }
47
48 func error(text string) tok {
49 return tok{tokens.Error, text}
50 }
51
52 var lexerCases = []struct {
53 name string
54 input string
55 expected []tok
56 }{
57 {"empty", "", []tok{EOF}},
58 {"data", "Hello World", []tok{
59 data("Hello World"),
60 EOF,
61 }},
62 {"comment", "{# a comment #}", []tok{
63 tok{tokens.CommentBegin, "{#"},
64 data(" a comment "),
65 tok{tokens.CommentEnd, "#}"},
66 EOF,
67 }},
68 {"mixed comment", "Hello, {# comment #}World", []tok{
69 data("Hello, "),
70 tok{tokens.CommentBegin, "{#"},
71 data(" comment "),
72 tok{tokens.CommentEnd, "#}"},
73 data("World"),
74 EOF,
75 }},
76 {"simple variable", "{{ foo }}", []tok{
77 varBegin,
78 space,
79 name("foo"),
80 space,
81 varEnd,
82 EOF,
83 }},
84 {"basic math expression", "{{ (a - b) + c }}", []tok{
85 varBegin, space,
86 lParen, name("a"), space, tok{tokens.Sub, "-"}, space, name("b"), rParen,
87 space, tok{tokens.Add, "+"}, space, name("c"),
88 space, varEnd,
89 EOF,
90 }},
91 {"blocks", "Hello. {% if true %}World{% else %}Nobody{% endif %}", []tok{
92 data("Hello. "),
93 blockBegin, space, name("if"), space, name("true"), space, blockEnd,
94 data("World"),
95 blockBegin, space, name("else"), space, blockEnd,
96 data("Nobody"),
97 blockBegin, space, name("endif"), space, blockEnd,
98 EOF,
99 }},
100 {"blocks with trim control", "Hello. {%- if true -%}World{%- else -%}Nobody{%- endif -%}", []tok{
101 data("Hello. "),
102 blockBeginTrim, space, name("if"), space, name("true"), space, blockEndTrim,
103 data("World"),
104 blockBeginTrim, space, name("else"), space, blockEndTrim,
105 data("Nobody"),
106 blockBeginTrim, space, name("endif"), space, blockEndTrim,
107 EOF,
108 }},
109 {"Ignore tags in comment", "<html>{# ignore {% tags %} in comments ##}</html>", []tok{
110 data("<html>"),
111 tok{tokens.CommentBegin, "{#"},
112 data(" ignore {% tags %} in comments #"),
113 tok{tokens.CommentEnd, "#}"},
114 data("</html>"),
115 EOF,
116 }},
117 {"Mixed content", "{# comment #}{% if foo -%} bar {%- elif baz %} bing{%endif %}", []tok{
118 tok{tokens.CommentBegin, "{#"},
119 data(" comment "),
120 tok{tokens.CommentEnd, "#}"},
121 blockBegin, space, name("if"), space, name("foo"), space, blockEndTrim,
122 data(" bar "),
123 blockBeginTrim, space, name("elif"), space, name("baz"), space, blockEnd,
124 data(" bing"),
125 blockBegin, name("endif"), tok{tokens.Whitespace, " "}, blockEnd,
126 EOF,
127 }},
128 {"mixed tokens with doubles", "{{ +--+ /+//,|*/**=>>=<=< == }}", []tok{
129 varBegin,
130 space,
131 tok{tokens.Add, "+"}, tok{tokens.Sub, "-"}, tok{tokens.Sub, "-"}, tok{tokens.Add, "+"},
132 space,
133 tok{tokens.Div, "/"}, tok{tokens.Add, "+"}, tok{tokens.Floordiv, "//"},
134 tok{tokens.Comma, ","},
135 tok{tokens.Pipe, "|"},
136 tok{tokens.Mul, "*"},
137 tok{tokens.Div, "/"},
138 tok{tokens.Pow, "**"},
139 tok{tokens.Assign, "="},
140 tok{tokens.Gt, ">"},
141 tok{tokens.Gteq, ">="},
142 tok{tokens.Lteq, "<="},
143 tok{tokens.Lt, "<"},
144 space,
145 tok{tokens.Eq, "=="},
146 space,
147 varEnd,
148 EOF,
149 }},
150 {"delimiters", "{{ ([{}]()) }}", []tok{
151 varBegin, space,
152 lParen, lBracket, lBrace, rBrace, rBracket, lParen, rParen, rParen,
153 space, varEnd,
154 EOF,
155 }},
156 {"Unbalanced delimiters", "{{ ([{]) }}", []tok{
157 varBegin, space,
158 lParen, lBracket, lBrace,
159 error(`Unbalanced delimiters, expected "}", got "]"`),
160 }},
161 {"Unexpeced delimiter", "{{ ()) }}", []tok{
162 varBegin, space,
163 lParen, rParen,
164 error(`Unexpected delimiter ")"`),
165 }},
166 {"Unbalance over end block", "{{ ({a:b, {a:b}}) }}", []tok{
167 varBegin, space,
168 lParen,
169 lBrace, name("a"), tok{tokens.Colon, ":"}, name("b"), tok{tokens.Comma, ","},
170 space,
171 lBrace, name("a"), tok{tokens.Colon, ":"}, name("b"), rBrace, rBrace,
172 rParen,
173 space, varEnd,
174 EOF,
175 }},
176 {"string with double quote", `{{ "Hello, " + "World" }}`, []tok{
177 varBegin, space,
178 str("Hello, "),
179 space, tok{tokens.Add, "+"}, space,
180 str("World"),
181 space, varEnd,
182 EOF,
183 }},
184 {"string with simple quote", `{{ 'Hello, ' + 'World' }}`, []tok{
185 varBegin, space,
186 str("Hello, "),
187 space, tok{tokens.Add, "+"}, space,
188 str("World"),
189 space, varEnd,
190 EOF,
191 }},
192 {"single quotes inside double quotes string", `{{ "'quoted' test" }}`, []tok{
193 varBegin, space, str("'quoted' test"), space, varEnd, EOF,
194 }},
195 {"escaped string", `{{ "Hello, \"World\"" }}`, []tok{
196 varBegin, space,
197 str(`Hello, "World"`),
198 space, varEnd,
199 EOF,
200 }},
201 {"escaped string mixed", `{{ "Hello,\n \'World\'" }}`, []tok{
202 varBegin, space,
203 str(`Hello,\n 'World'`),
204 space, varEnd,
205 EOF,
206 }},
207 {"if statement", `{% if 5.5 == 5.500000 %}5.5 is 5.500000{% endif %}`, []tok{
208 blockBegin, space, name("if"), space,
209 tok{tokens.Float, "5.5"}, space, tok{tokens.Eq, "=="}, space, tok{tokens.Float, "5.500000"},
210 space, blockEnd,
211 data("5.5 is 5.500000"),
212 blockBegin, space, name("endif"), space, blockEnd,
213 EOF,
214 }},
215 }
216
217 func tokenSlice(c chan *tokens.Token) []*tokens.Token {
218 toks := []*tokens.Token{}
219 for token := range c {
220 toks = append(toks, token)
221 }
222 return toks
223 }
224
225 func TestLexer(t *testing.T) {
226 for _, lc := range lexerCases {
227 test := lc
228 t.Run(test.name, func(t *testing.T) {
229 lexer := tokens.NewLexer(test.input)
230 go lexer.Run()
231 toks := tokenSlice(lexer.Tokens)
232
233 assert := assert.New(t)
234 assert.Equal(len(test.expected), len(toks))
235 actual := []tok{}
236 for _, token := range toks {
237 actual = append(actual, tok{token.Type, token.Val})
238 }
239 assert.Equal(test.expected, actual)
240 })
241 }
242 }
243
244 func streamResult(s *tokens.Stream) []tok {
245 out := []tok{}
246 for !s.End() {
247 token := s.Current()
248 out = append(out, tok{token.Type, token.Val})
249 s.Next()
250 }
251 return out
252 }
253
254 func asStreamResult(toks []tok) ([]tok, bool) {
255 out := []tok{}
256 isError := false
257 for _, token := range toks {
258 if token.typ == tokens.Error {
259 isError = true
260 break
261 }
262 if token.typ != tokens.Whitespace && token.typ != tokens.EOF {
263 out = append(out, token)
264 }
265 }
266 return out, isError
267 }
268
269 func TestLex(t *testing.T) {
270 for _, lc := range lexerCases {
271 test := lc
272 t.Run(test.name, func(t *testing.T) {
273 stream := tokens.Lex(test.input)
274 expected, _ := asStreamResult(test.expected)
275
276 actual := streamResult(stream)
277
278 assert := assert.New(t)
279 assert.Equal(len(expected), len(actual))
280 assert.Equal(expected, actual)
281 })
282 }
283 }
284
285 func TestStreamSlice(t *testing.T) {
286 for _, lc := range lexerCases {
287 test := lc
288 t.Run(test.name, func(t *testing.T) {
289 lexer := tokens.NewLexer(test.input)
290 go lexer.Run()
291 toks := tokenSlice(lexer.Tokens)
292
293 stream := tokens.NewStream(toks)
294 expected, _ := asStreamResult(test.expected)
295
296 actual := streamResult(stream)
297
298 assert := assert.New(t)
299 assert.Equal(len(expected), len(actual))
300 assert.Equal(expected, actual)
301 })
302 }
303 }
304
305 const positionsCase = `Hello
306 {#
307 Multiline comment
308 #}
309 World
310 `
311
312 func TestLexerPosition(t *testing.T) {
313 assert := assert.New(t)
314
315 lexer := tokens.NewLexer(positionsCase)
316 go lexer.Run()
317 toks := tokenSlice(lexer.Tokens)
318 assert.Equal([]*tokens.Token{
319 &tokens.Token{tokens.Data, "Hello\n", 0, 1, 1},
320 &tokens.Token{tokens.CommentBegin, "{#", 6, 2, 1},
321 &tokens.Token{tokens.Data, "\n Multiline comment\n", 8, 2, 3},
322 &tokens.Token{tokens.CommentEnd, "#}", 31, 4, 1},
323 &tokens.Token{tokens.Data, "\nWorld\n", 33, 4, 3},
324 &tokens.Token{tokens.EOF, "", 40, 6, 1},
325 }, toks)
326 }
327
View as plain text