...

Source file src/github.com/noirbizarre/gonja/parser/parser.go

Documentation: github.com/noirbizarre/gonja/parser

     1  package parser
     2  
     3  import (
     4  	"fmt"
     5  	"strings"
     6  
     7  	"github.com/noirbizarre/gonja/config"
     8  	"github.com/noirbizarre/gonja/nodes"
     9  	"github.com/noirbizarre/gonja/tokens"
    10  )
    11  
    12  // The parser provides you a comprehensive and easy tool to
    13  // work with the template document and arguments provided by
    14  // the user for your custom tag.
    15  //
    16  // The parser works on a token list which will be provided by gonja.
    17  // A token is a unit you can work with. Tokens are either of type identifier,
    18  // string, number, keyword, HTML or symbol.
    19  //
    20  // (See Token's documentation for more about tokens)
    21  type Parser struct {
    22  	Name   string
    23  	Stream *tokens.Stream
    24  	Config *config.Config
    25  
    26  	Template       *nodes.Template
    27  	Statements     map[string]StatementParser
    28  	Level          int8
    29  	TemplateParser TemplateParser
    30  }
    31  
    32  // Creates a new parser to parse tokens.
    33  // Used inside gonja to parse documents and to provide an easy-to-use
    34  // parser for tag authors
    35  // func NewParser(name string, tokens []*tokens.Token) *Parser {
    36  // 	p := &Parser{
    37  // 		name:          name,
    38  // 		tokens:        tokens,
    39  // 		template:      template,
    40  // 		// bannedStmts:   make(map[string]bool),
    41  // 		// bannedFilters: make(map[string]bool),
    42  // 	}
    43  // 	if len(tokens) > 0 {
    44  // 		p.lastToken = tokens[len(tokens)-1]
    45  // 	}
    46  // 	return p
    47  // }
    48  
    49  func NewParser(name string, cfg *config.Config, stream *tokens.Stream) *Parser {
    50  	return &Parser{
    51  		Name:   name,
    52  		Stream: stream,
    53  		Config: cfg,
    54  	}
    55  }
    56  
    57  func Parse(input string) (*nodes.Template, error) {
    58  	stream := tokens.Lex(input)
    59  	p := NewParser("parser", config.DefaultConfig, stream)
    60  	return p.Parse()
    61  }
    62  
    63  func (p *Parser) Parse() (*nodes.Template, error) {
    64  	// for p.state = parseProg; p.state != nil; {
    65  	// 	p.state = p.state(p)
    66  	// }
    67  
    68  	// lex everything
    69  	// t := p.Lexer.NextItem()
    70  	// for ; t.Typ != lex.EOF; t = p.Lexer.NextItem() {
    71  	// 	p.Items = append(p.Items, t)
    72  	// }
    73  	// p.Items = append(p.Items, t)
    74  
    75  	// tokens := []*l.Token{}
    76  	// for token := range p.Tokens {
    77  	// 	p.tokens = append(p.tokens, token)
    78  	// }
    79  
    80  	return p.ParseTemplate()
    81  }
    82  
    83  // Consume one token. It will be gone forever.
    84  func (p *Parser) Consume() {
    85  	p.Stream.Next()
    86  }
    87  
    88  // // Consume N tokens. They will be gone forever.
    89  // func (p *Parser) ConsumeN(count int) {
    90  // 	p.idx += count
    91  // }
    92  
    93  // Current returns the current token.
    94  func (p *Parser) Current() *tokens.Token {
    95  	return p.Stream.Current()
    96  }
    97  
    98  // Next returns and consume the current token
    99  func (p *Parser) Next() *tokens.Token {
   100  	// t := p.Stream.Next()
   101  	// p.Consume()
   102  	// return t
   103  	return p.Stream.Next()
   104  }
   105  
   106  func (p *Parser) End() bool {
   107  	return p.Stream.End()
   108  }
   109  
   110  // Match returns the CURRENT token if the given type matches.
   111  // Consumes this token on success.
   112  func (p *Parser) Match(types ...tokens.Type) *tokens.Token {
   113  	tok := p.Stream.Current()
   114  	for _, t := range types {
   115  		if tok.Type == t {
   116  			p.Stream.Next()
   117  			return tok
   118  		}
   119  	}
   120  	return nil
   121  }
   122  
   123  func (p *Parser) MatchName(names ...string) *tokens.Token {
   124  	t := p.Peek(tokens.Name)
   125  	if t != nil {
   126  		for _, name := range names {
   127  			if t.Val == name {
   128  				return p.Pop()
   129  			}
   130  		}
   131  	}
   132  	// if t != nil && t.Val == name { return p.Pop() }
   133  	return nil
   134  }
   135  
   136  // Pop returns the current token and advance to the next
   137  func (p *Parser) Pop() *tokens.Token {
   138  	t := p.Stream.Current()
   139  	p.Stream.Next()
   140  	return t
   141  }
   142  
   143  // Peek returns the next token without consuming the current
   144  // if it matches one of the given types
   145  func (p *Parser) Peek(types ...tokens.Type) *tokens.Token {
   146  	tok := p.Stream.Current()
   147  	for _, t := range types {
   148  		if tok.Type == t {
   149  			return tok
   150  		}
   151  	}
   152  	return nil
   153  }
   154  
   155  func (p *Parser) PeekName(names ...string) *tokens.Token {
   156  	t := p.Peek(tokens.Name)
   157  	if t != nil {
   158  		for _, name := range names {
   159  			if t.Val == name {
   160  				return t
   161  			}
   162  		}
   163  	}
   164  	// if t != nil && t.Val == name { return t }
   165  	return nil
   166  }
   167  
   168  // WrapUntil wraps all nodes between starting tag and "{% endtag %}" and provides
   169  // one simple interface to execute the wrapped nodes.
   170  // It returns a parser to process provided arguments to the tag.
   171  func (p *Parser) WrapUntil(names ...string) (*nodes.Wrapper, *Parser, error) {
   172  	wrapper := &nodes.Wrapper{
   173  		Location: p.Current(),
   174  		Trim:     &nodes.Trim{},
   175  	}
   176  
   177  	var args []*tokens.Token
   178  
   179  	for !p.Stream.End() {
   180  		// New tag, check whether we have to stop wrapping here
   181  		if begin := p.Match(tokens.BlockBegin); begin != nil {
   182  			ident := p.Peek(tokens.Name)
   183  
   184  			if ident != nil {
   185  				// We've found a (!) end-tag
   186  
   187  				found := false
   188  				for _, n := range names {
   189  					if ident.Val == n {
   190  						found = true
   191  						break
   192  					}
   193  				}
   194  
   195  				// We only process the tag if we've found an end tag
   196  				if found {
   197  					// Okay, endtag found.
   198  					p.Consume() // '{%' tagname
   199  					wrapper.Trim.Left = begin.Val[len(begin.Val)-1] == '-'
   200  					wrapper.LStrip = begin.Val[len(begin.Val)-1] == '+'
   201  
   202  					for {
   203  						if end := p.Match(tokens.BlockEnd); end != nil {
   204  							// Okay, end the wrapping here
   205  							wrapper.EndTag = ident.Val
   206  							wrapper.Trim.Right = end.Val[0] == '-'
   207  							stream := tokens.NewStream(args)
   208  							return wrapper, NewParser(p.Name, p.Config, stream), nil
   209  						}
   210  						t := p.Next()
   211  						// p.Consume()
   212  						if t == nil {
   213  							return nil, nil, p.Error("Unexpected EOF.", p.Current())
   214  						}
   215  						args = append(args, t)
   216  					}
   217  				}
   218  			}
   219  			p.Stream.Backup()
   220  		}
   221  
   222  		// Otherwise process next element to be wrapped
   223  		node, err := p.parseDocElement()
   224  		if err != nil {
   225  			return nil, nil, err
   226  		}
   227  		wrapper.Nodes = append(wrapper.Nodes, node)
   228  	}
   229  
   230  	return nil, nil, p.Error(fmt.Sprintf("Unexpected EOF, expected tag %s.", strings.Join(names, " or ")),
   231  		p.Current())
   232  }
   233  
   234  // Skips all nodes between starting tag and "{% endtag %}"
   235  func (p *Parser) SkipUntil(names ...string) error {
   236  	for !p.End() {
   237  		// New tag, check whether we have to stop wrapping here
   238  		if p.Match(tokens.BlockBegin) != nil {
   239  			ident := p.Peek(tokens.Name)
   240  
   241  			if ident != nil {
   242  				// We've found a (!) end-tag
   243  
   244  				found := false
   245  				for _, n := range names {
   246  					if ident.Val == n {
   247  						found = true
   248  						break
   249  					}
   250  				}
   251  
   252  				// We only process the tag if we've found an end tag
   253  				if found {
   254  					// Okay, endtag found.
   255  					p.Consume() // '{%' tagname
   256  
   257  					for {
   258  						if p.Match(tokens.BlockEnd) != nil {
   259  							// Done skipping, exit.
   260  							return nil
   261  						}
   262  					}
   263  				}
   264  			} else {
   265  				p.Stream.Backup()
   266  			}
   267  		}
   268  		t := p.Next()
   269  		if t == nil {
   270  			return p.Error("Unexpected EOF.", p.Current())
   271  		}
   272  	}
   273  
   274  	return p.Error(fmt.Sprintf("Unexpected EOF, expected tag %s.", strings.Join(names, " or ")), p.Current())
   275  }
   276  

View as plain text