...

Package tokens

import "github.com/noirbizarre/gonja/tokens"
Overview
Index

Overview ▾

Variables

TokenNames maps token types to their human readable name

var Names = map[Type]string{
    Error:  "Error",
    Add:    "Add",
    Assign: "Assign",
    Colon:  "Colon",
    Comma:  "Comma",
    Div:    "Div",
    Dot:    "Dot",
    Eq:     "Eq",

    Floordiv: "Floordiv",
    Gt:       "Gt",
    Gteq:     "Gteq",
    Lbrace:   "Lbrace",
    Lbracket: "Lbracket",
    Lparen:   "Lparen",
    Lt:       "Lt",
    Lteq:     "Lteq",

    Mod:                "Mod",
    Mul:                "Mul",
    Ne:                 "Ne",
    Pipe:               "Pipe",
    Pow:                "Pow",
    Rbrace:             "Rbrace",
    Rbracket:           "Rbracket",
    Rparen:             "Rparen",
    Semicolon:          "Semicolon",
    Sub:                "Sub",
    Tilde:              "Tilde",
    Whitespace:         "Whitespace",
    Float:              "Float",
    Integer:            "Integer",
    Name:               "Name",
    String:             "String",
    Operator:           "Operator",
    BlockBegin:         "BlockBegin",
    BlockEnd:           "BlockEnd",
    VariableBegin:      "VariableBegin",
    VariableEnd:        "VariableEnd",
    RawBegin:           "RawBegin",
    RawEnd:             "RawEnd",
    CommentBegin:       "CommentBegin",
    CommentEnd:         "CommentEnd",
    Comment:            "Comment",
    LinestatementBegin: "LinestatementBegin",
    LinestatementEnd:   "LinestatementEnd",
    LinecommentBegin:   "LinecommentBegin",
    LinecommentEnd:     "LinecommentEnd",
    Linecomment:        "Linecomment",
    Data:               "Data",
    Initial:            "Initial",
    EOF:                "EOF",
}

func ReadablePosition

func ReadablePosition(pos int, input string) (int, int)

type Lexer

Lexer holds the state of the scanner.

type Lexer struct {
    Input string // the string being scanned.
    Start int    // start position of this item.
    Pos   int    // current position in the input.
    Width int    // width of last rune read from input.
    Line  int    // Current line in the input
    Col   int    // Current position in the line
    // Position Position // Current lexing position in the input
    Config *config.Config // The lexer configuration
    Tokens chan *Token    // channel of scanned tokens.

    RawStatements rawStmt
    // contains filtered or unexported fields
}

func NewLexer

func NewLexer(input string) *Lexer

NewLexer creates a new scanner for the input string.

func (*Lexer) Current

func (l *Lexer) Current() string

func (*Lexer) Position

func (l *Lexer) Position() *Position

Position return the current position in the input

func (*Lexer) Run

func (l *Lexer) Run()

Run lexes the input by executing state functions until the state is nil.

type Pos

type Pos interface {
    Pos() int
}

type Position

Position describes an arbitrary source position including the file, line, and column location. A Position is valid if the line number is > 0.

type Position struct {
    Filename string // filename, if any
    Offset   int    // offset, starting at 0
    Line     int    // line number, starting at 1
    Column   int    // column number, starting at 1 (byte count)
}

func (*Position) IsValid

func (pos *Position) IsValid() bool

IsValid reports whether the position is valid.

func (*Position) Pos

func (pos *Position) Pos() int

Pos return the current offset starting at 0

func (Position) String

func (pos Position) String() string

String returns a string in one of several forms:

file:line:column    valid position with file name
file:line           valid position with file name but no column (column == 0)
line:column         valid position without file name
line                valid position without file name and no column (column == 0)
file                invalid position with file name
-                   invalid position without file name

type Stream

type Stream struct {
    // contains filtered or unexported fields
}

func Lex

func Lex(input string) *Stream

func NewStream

func NewStream(input interface{}) *Stream

func (*Stream) Backup

func (s *Stream) Backup()

func (*Stream) Current

func (s *Stream) Current() *Token

func (*Stream) EOF

func (s *Stream) EOF() bool

func (*Stream) End

func (s *Stream) End() bool

func (*Stream) IsError

func (s *Stream) IsError() bool

func (*Stream) Next

func (s *Stream) Next() *Token

func (*Stream) Peek

func (s *Stream) Peek() *Token

type Token

Token represents a unit of lexing

type Token struct {
    Type Type
    Val  string
    Pos  int
    Line int
    Col  int
}

func (Token) String

func (t Token) String() string

type TokenIterator

type TokenIterator interface {
    Next() *Token
}

func ChanIterator

func ChanIterator(input chan *Token) TokenIterator

func SliceIterator

func SliceIterator(input []*Token) TokenIterator

type Type

TokenType identifies the type of a token

type Type int

Known tokens

const (
    Error Type = iota
    Add
    Assign
    Colon
    Comma
    Div
    Dot
    Eq
    // EqEq
    Floordiv
    Gt
    Gteq
    Lbrace
    Lbracket
    Lparen
    Lt
    Lteq
    // Not
    // And
    // Or
    // Neq
    Mod
    Mul
    Ne
    Pipe
    Pow
    Rbrace
    Rbracket
    Rparen
    Semicolon
    Sub
    Tilde
    Whitespace
    Float
    Integer
    Name
    String
    Operator
    BlockBegin
    BlockEnd
    VariableBegin
    VariableEnd
    RawBegin
    RawEnd
    CommentBegin
    CommentEnd
    Comment
    LinestatementBegin
    LinestatementEnd
    LinecommentBegin
    LinecommentEnd
    Linecomment
    Data
    Initial
    EOF
)