Source file
src/go/parser/parser.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16 package parser
17
18 import (
19 "fmt"
20 "go/ast"
21 "go/build/constraint"
22 "go/internal/typeparams"
23 "go/scanner"
24 "go/token"
25 "strings"
26 )
27
28
29 type parser struct {
30 file *token.File
31 errors scanner.ErrorList
32 scanner scanner.Scanner
33
34
35 mode Mode
36 trace bool
37 indent int
38
39
40 comments []*ast.CommentGroup
41 leadComment *ast.CommentGroup
42 lineComment *ast.CommentGroup
43 top bool
44 goVersion string
45
46
47 pos token.Pos
48 tok token.Token
49 lit string
50
51
52
53
54
55 syncPos token.Pos
56 syncCnt int
57
58
59 exprLev int
60 inRhs bool
61
62 imports []*ast.ImportSpec
63
64
65
66 nestLev int
67 }
68
69 func (p *parser) init(fset *token.FileSet, filename string, src []byte, mode Mode) {
70 p.file = fset.AddFile(filename, -1, len(src))
71 eh := func(pos token.Position, msg string) { p.errors.Add(pos, msg) }
72 p.scanner.Init(p.file, src, eh, scanner.ScanComments)
73
74 p.top = true
75 p.mode = mode
76 p.trace = mode&Trace != 0
77 p.next()
78 }
79
80
81
82
83 func (p *parser) printTrace(a ...any) {
84 const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . "
85 const n = len(dots)
86 pos := p.file.Position(p.pos)
87 fmt.Printf("%5d:%3d: ", pos.Line, pos.Column)
88 i := 2 * p.indent
89 for i > n {
90 fmt.Print(dots)
91 i -= n
92 }
93
94 fmt.Print(dots[0:i])
95 fmt.Println(a...)
96 }
97
98 func trace(p *parser, msg string) *parser {
99 p.printTrace(msg, "(")
100 p.indent++
101 return p
102 }
103
104
105 func un(p *parser) {
106 p.indent--
107 p.printTrace(")")
108 }
109
110
111 const maxNestLev int = 1e5
112
113 func incNestLev(p *parser) *parser {
114 p.nestLev++
115 if p.nestLev > maxNestLev {
116 p.error(p.pos, "exceeded max nesting depth")
117 panic(bailout{})
118 }
119 return p
120 }
121
122
123
124 func decNestLev(p *parser) {
125 p.nestLev--
126 }
127
128
129 func (p *parser) next0() {
130
131
132
133
134 if p.trace && p.pos.IsValid() {
135 s := p.tok.String()
136 switch {
137 case p.tok.IsLiteral():
138 p.printTrace(s, p.lit)
139 case p.tok.IsOperator(), p.tok.IsKeyword():
140 p.printTrace("\"" + s + "\"")
141 default:
142 p.printTrace(s)
143 }
144 }
145
146 for {
147 p.pos, p.tok, p.lit = p.scanner.Scan()
148 if p.tok == token.COMMENT {
149 if p.top && strings.HasPrefix(p.lit, "//go:build") {
150 if x, err := constraint.Parse(p.lit); err == nil {
151 p.goVersion = constraint.GoVersion(x)
152 }
153 }
154 if p.mode&ParseComments == 0 {
155 continue
156 }
157 } else {
158
159 p.top = false
160 }
161 break
162 }
163 }
164
165
166 func (p *parser) consumeComment() (comment *ast.Comment, endline int) {
167
168
169 endline = p.file.Line(p.pos)
170 if p.lit[1] == '*' {
171
172 for i := 0; i < len(p.lit); i++ {
173 if p.lit[i] == '\n' {
174 endline++
175 }
176 }
177 }
178
179 comment = &ast.Comment{Slash: p.pos, Text: p.lit}
180 p.next0()
181
182 return
183 }
184
185
186
187
188
189 func (p *parser) consumeCommentGroup(n int) (comments *ast.CommentGroup, endline int) {
190 var list []*ast.Comment
191 endline = p.file.Line(p.pos)
192 for p.tok == token.COMMENT && p.file.Line(p.pos) <= endline+n {
193 var comment *ast.Comment
194 comment, endline = p.consumeComment()
195 list = append(list, comment)
196 }
197
198
199 comments = &ast.CommentGroup{List: list}
200 p.comments = append(p.comments, comments)
201
202 return
203 }
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219 func (p *parser) next() {
220 p.leadComment = nil
221 p.lineComment = nil
222 prev := p.pos
223 p.next0()
224
225 if p.tok == token.COMMENT {
226 var comment *ast.CommentGroup
227 var endline int
228
229 if p.file.Line(p.pos) == p.file.Line(prev) {
230
231
232 comment, endline = p.consumeCommentGroup(0)
233 if p.file.Line(p.pos) != endline || p.tok == token.SEMICOLON || p.tok == token.EOF {
234
235
236 p.lineComment = comment
237 }
238 }
239
240
241 endline = -1
242 for p.tok == token.COMMENT {
243 comment, endline = p.consumeCommentGroup(1)
244 }
245
246 if endline+1 == p.file.Line(p.pos) {
247
248
249 p.leadComment = comment
250 }
251 }
252 }
253
254
255
256 type bailout struct {
257 pos token.Pos
258 msg string
259 }
260
261 func (p *parser) error(pos token.Pos, msg string) {
262 if p.trace {
263 defer un(trace(p, "error: "+msg))
264 }
265
266 epos := p.file.Position(pos)
267
268
269
270
271 if p.mode&AllErrors == 0 {
272 n := len(p.errors)
273 if n > 0 && p.errors[n-1].Pos.Line == epos.Line {
274 return
275 }
276 if n > 10 {
277 panic(bailout{})
278 }
279 }
280
281 p.errors.Add(epos, msg)
282 }
283
284 func (p *parser) errorExpected(pos token.Pos, msg string) {
285 msg = "expected " + msg
286 if pos == p.pos {
287
288
289 switch {
290 case p.tok == token.SEMICOLON && p.lit == "\n":
291 msg += ", found newline"
292 case p.tok.IsLiteral():
293
294 msg += ", found " + p.lit
295 default:
296 msg += ", found '" + p.tok.String() + "'"
297 }
298 }
299 p.error(pos, msg)
300 }
301
302 func (p *parser) expect(tok token.Token) token.Pos {
303 pos := p.pos
304 if p.tok != tok {
305 p.errorExpected(pos, "'"+tok.String()+"'")
306 }
307 p.next()
308 return pos
309 }
310
311
312
313 func (p *parser) expect2(tok token.Token) (pos token.Pos) {
314 if p.tok == tok {
315 pos = p.pos
316 } else {
317 p.errorExpected(p.pos, "'"+tok.String()+"'")
318 }
319 p.next()
320 return
321 }
322
323
324
325 func (p *parser) expectClosing(tok token.Token, context string) token.Pos {
326 if p.tok != tok && p.tok == token.SEMICOLON && p.lit == "\n" {
327 p.error(p.pos, "missing ',' before newline in "+context)
328 p.next()
329 }
330 return p.expect(tok)
331 }
332
333
334 func (p *parser) expectSemi() (comment *ast.CommentGroup) {
335
336 if p.tok != token.RPAREN && p.tok != token.RBRACE {
337 switch p.tok {
338 case token.COMMA:
339
340 p.errorExpected(p.pos, "';'")
341 fallthrough
342 case token.SEMICOLON:
343 if p.lit == ";" {
344
345 p.next()
346 comment = p.lineComment
347 } else {
348
349 comment = p.lineComment
350 p.next()
351 }
352 return comment
353 default:
354 p.errorExpected(p.pos, "';'")
355 p.advance(stmtStart)
356 }
357 }
358 return nil
359 }
360
361 func (p *parser) atComma(context string, follow token.Token) bool {
362 if p.tok == token.COMMA {
363 return true
364 }
365 if p.tok != follow {
366 msg := "missing ','"
367 if p.tok == token.SEMICOLON && p.lit == "\n" {
368 msg += " before newline"
369 }
370 p.error(p.pos, msg+" in "+context)
371 return true
372 }
373 return false
374 }
375
376 func assert(cond bool, msg string) {
377 if !cond {
378 panic("go/parser internal error: " + msg)
379 }
380 }
381
382
383
384 func (p *parser) advance(to map[token.Token]bool) {
385 for ; p.tok != token.EOF; p.next() {
386 if to[p.tok] {
387
388
389
390
391
392
393
394 if p.pos == p.syncPos && p.syncCnt < 10 {
395 p.syncCnt++
396 return
397 }
398 if p.pos > p.syncPos {
399 p.syncPos = p.pos
400 p.syncCnt = 0
401 return
402 }
403
404
405
406
407
408 }
409 }
410 }
411
412 var stmtStart = map[token.Token]bool{
413 token.BREAK: true,
414 token.CONST: true,
415 token.CONTINUE: true,
416 token.DEFER: true,
417 token.FALLTHROUGH: true,
418 token.FOR: true,
419 token.GO: true,
420 token.GOTO: true,
421 token.IF: true,
422 token.RETURN: true,
423 token.SELECT: true,
424 token.SWITCH: true,
425 token.TYPE: true,
426 token.VAR: true,
427 }
428
429 var declStart = map[token.Token]bool{
430 token.IMPORT: true,
431 token.CONST: true,
432 token.TYPE: true,
433 token.VAR: true,
434 }
435
436 var exprEnd = map[token.Token]bool{
437 token.COMMA: true,
438 token.COLON: true,
439 token.SEMICOLON: true,
440 token.RPAREN: true,
441 token.RBRACK: true,
442 token.RBRACE: true,
443 }
444
445
446
447
448
449
450
451
452
453
454 func (p *parser) safePos(pos token.Pos) (res token.Pos) {
455 defer func() {
456 if recover() != nil {
457 res = token.Pos(p.file.Base() + p.file.Size())
458 }
459 }()
460 _ = p.file.Offset(pos)
461 return pos
462 }
463
464
465
466
467 func (p *parser) parseIdent() *ast.Ident {
468 pos := p.pos
469 name := "_"
470 if p.tok == token.IDENT {
471 name = p.lit
472 p.next()
473 } else {
474 p.expect(token.IDENT)
475 }
476 return &ast.Ident{NamePos: pos, Name: name}
477 }
478
479 func (p *parser) parseIdentList() (list []*ast.Ident) {
480 if p.trace {
481 defer un(trace(p, "IdentList"))
482 }
483
484 list = append(list, p.parseIdent())
485 for p.tok == token.COMMA {
486 p.next()
487 list = append(list, p.parseIdent())
488 }
489
490 return
491 }
492
493
494
495
496
497 func (p *parser) parseExprList() (list []ast.Expr) {
498 if p.trace {
499 defer un(trace(p, "ExpressionList"))
500 }
501
502 list = append(list, p.parseExpr())
503 for p.tok == token.COMMA {
504 p.next()
505 list = append(list, p.parseExpr())
506 }
507
508 return
509 }
510
511 func (p *parser) parseList(inRhs bool) []ast.Expr {
512 old := p.inRhs
513 p.inRhs = inRhs
514 list := p.parseExprList()
515 p.inRhs = old
516 return list
517 }
518
519
520
521
522 func (p *parser) parseType() ast.Expr {
523 if p.trace {
524 defer un(trace(p, "Type"))
525 }
526
527 typ := p.tryIdentOrType()
528
529 if typ == nil {
530 pos := p.pos
531 p.errorExpected(pos, "type")
532 p.advance(exprEnd)
533 return &ast.BadExpr{From: pos, To: p.pos}
534 }
535
536 return typ
537 }
538
539 func (p *parser) parseQualifiedIdent(ident *ast.Ident) ast.Expr {
540 if p.trace {
541 defer un(trace(p, "QualifiedIdent"))
542 }
543
544 typ := p.parseTypeName(ident)
545 if p.tok == token.LBRACK {
546 typ = p.parseTypeInstance(typ)
547 }
548
549 return typ
550 }
551
552
553 func (p *parser) parseTypeName(ident *ast.Ident) ast.Expr {
554 if p.trace {
555 defer un(trace(p, "TypeName"))
556 }
557
558 if ident == nil {
559 ident = p.parseIdent()
560 }
561
562 if p.tok == token.PERIOD {
563
564 p.next()
565 sel := p.parseIdent()
566 return &ast.SelectorExpr{X: ident, Sel: sel}
567 }
568
569 return ident
570 }
571
572
573
574 func (p *parser) parseArrayType(lbrack token.Pos, len ast.Expr) *ast.ArrayType {
575 if p.trace {
576 defer un(trace(p, "ArrayType"))
577 }
578
579 if len == nil {
580 p.exprLev++
581
582 if p.tok == token.ELLIPSIS {
583 len = &ast.Ellipsis{Ellipsis: p.pos}
584 p.next()
585 } else if p.tok != token.RBRACK {
586 len = p.parseRhs()
587 }
588 p.exprLev--
589 }
590 if p.tok == token.COMMA {
591
592
593
594 p.error(p.pos, "unexpected comma; expecting ]")
595 p.next()
596 }
597 p.expect(token.RBRACK)
598 elt := p.parseType()
599 return &ast.ArrayType{Lbrack: lbrack, Len: len, Elt: elt}
600 }
601
602 func (p *parser) parseArrayFieldOrTypeInstance(x *ast.Ident) (*ast.Ident, ast.Expr) {
603 if p.trace {
604 defer un(trace(p, "ArrayFieldOrTypeInstance"))
605 }
606
607 lbrack := p.expect(token.LBRACK)
608 trailingComma := token.NoPos
609 var args []ast.Expr
610 if p.tok != token.RBRACK {
611 p.exprLev++
612 args = append(args, p.parseRhs())
613 for p.tok == token.COMMA {
614 comma := p.pos
615 p.next()
616 if p.tok == token.RBRACK {
617 trailingComma = comma
618 break
619 }
620 args = append(args, p.parseRhs())
621 }
622 p.exprLev--
623 }
624 rbrack := p.expect(token.RBRACK)
625
626 if len(args) == 0 {
627
628 elt := p.parseType()
629 return x, &ast.ArrayType{Lbrack: lbrack, Elt: elt}
630 }
631
632
633 if len(args) == 1 {
634 elt := p.tryIdentOrType()
635 if elt != nil {
636
637 if trailingComma.IsValid() {
638
639 p.error(trailingComma, "unexpected comma; expecting ]")
640 }
641 return x, &ast.ArrayType{Lbrack: lbrack, Len: args[0], Elt: elt}
642 }
643 }
644
645
646 return nil, typeparams.PackIndexExpr(x, lbrack, args, rbrack)
647 }
648
649 func (p *parser) parseFieldDecl() *ast.Field {
650 if p.trace {
651 defer un(trace(p, "FieldDecl"))
652 }
653
654 doc := p.leadComment
655
656 var names []*ast.Ident
657 var typ ast.Expr
658 switch p.tok {
659 case token.IDENT:
660 name := p.parseIdent()
661 if p.tok == token.PERIOD || p.tok == token.STRING || p.tok == token.SEMICOLON || p.tok == token.RBRACE {
662
663 typ = name
664 if p.tok == token.PERIOD {
665 typ = p.parseQualifiedIdent(name)
666 }
667 } else {
668
669 names = []*ast.Ident{name}
670 for p.tok == token.COMMA {
671 p.next()
672 names = append(names, p.parseIdent())
673 }
674
675
676 if len(names) == 1 && p.tok == token.LBRACK {
677 name, typ = p.parseArrayFieldOrTypeInstance(name)
678 if name == nil {
679 names = nil
680 }
681 } else {
682
683 typ = p.parseType()
684 }
685 }
686 case token.MUL:
687 star := p.pos
688 p.next()
689 if p.tok == token.LPAREN {
690
691 p.error(p.pos, "cannot parenthesize embedded type")
692 p.next()
693 typ = p.parseQualifiedIdent(nil)
694
695 if p.tok == token.RPAREN {
696 p.next()
697 }
698 } else {
699
700 typ = p.parseQualifiedIdent(nil)
701 }
702 typ = &ast.StarExpr{Star: star, X: typ}
703
704 case token.LPAREN:
705 p.error(p.pos, "cannot parenthesize embedded type")
706 p.next()
707 if p.tok == token.MUL {
708
709 star := p.pos
710 p.next()
711 typ = &ast.StarExpr{Star: star, X: p.parseQualifiedIdent(nil)}
712 } else {
713
714 typ = p.parseQualifiedIdent(nil)
715 }
716
717 if p.tok == token.RPAREN {
718 p.next()
719 }
720
721 default:
722 pos := p.pos
723 p.errorExpected(pos, "field name or embedded type")
724 p.advance(exprEnd)
725 typ = &ast.BadExpr{From: pos, To: p.pos}
726 }
727
728 var tag *ast.BasicLit
729 if p.tok == token.STRING {
730 tag = &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit}
731 p.next()
732 }
733
734 comment := p.expectSemi()
735
736 field := &ast.Field{Doc: doc, Names: names, Type: typ, Tag: tag, Comment: comment}
737 return field
738 }
739
740 func (p *parser) parseStructType() *ast.StructType {
741 if p.trace {
742 defer un(trace(p, "StructType"))
743 }
744
745 pos := p.expect(token.STRUCT)
746 lbrace := p.expect(token.LBRACE)
747 var list []*ast.Field
748 for p.tok == token.IDENT || p.tok == token.MUL || p.tok == token.LPAREN {
749
750
751
752 list = append(list, p.parseFieldDecl())
753 }
754 rbrace := p.expect(token.RBRACE)
755
756 return &ast.StructType{
757 Struct: pos,
758 Fields: &ast.FieldList{
759 Opening: lbrace,
760 List: list,
761 Closing: rbrace,
762 },
763 }
764 }
765
766 func (p *parser) parsePointerType() *ast.StarExpr {
767 if p.trace {
768 defer un(trace(p, "PointerType"))
769 }
770
771 star := p.expect(token.MUL)
772 base := p.parseType()
773
774 return &ast.StarExpr{Star: star, X: base}
775 }
776
777 func (p *parser) parseDotsType() *ast.Ellipsis {
778 if p.trace {
779 defer un(trace(p, "DotsType"))
780 }
781
782 pos := p.expect(token.ELLIPSIS)
783 elt := p.parseType()
784
785 return &ast.Ellipsis{Ellipsis: pos, Elt: elt}
786 }
787
788 type field struct {
789 name *ast.Ident
790 typ ast.Expr
791 }
792
793 func (p *parser) parseParamDecl(name *ast.Ident, typeSetsOK bool) (f field) {
794
795
796 if p.trace {
797 defer un(trace(p, "ParamDeclOrNil"))
798 }
799
800 ptok := p.tok
801 if name != nil {
802 p.tok = token.IDENT
803 } else if typeSetsOK && p.tok == token.TILDE {
804
805 return field{nil, p.embeddedElem(nil)}
806 }
807
808 switch p.tok {
809 case token.IDENT:
810
811 if name != nil {
812 f.name = name
813 p.tok = ptok
814 } else {
815 f.name = p.parseIdent()
816 }
817 switch p.tok {
818 case token.IDENT, token.MUL, token.ARROW, token.FUNC, token.CHAN, token.MAP, token.STRUCT, token.INTERFACE, token.LPAREN:
819
820 f.typ = p.parseType()
821
822 case token.LBRACK:
823
824 f.name, f.typ = p.parseArrayFieldOrTypeInstance(f.name)
825
826 case token.ELLIPSIS:
827
828 f.typ = p.parseDotsType()
829 return
830
831 case token.PERIOD:
832
833 f.typ = p.parseQualifiedIdent(f.name)
834 f.name = nil
835
836 case token.TILDE:
837 if typeSetsOK {
838 f.typ = p.embeddedElem(nil)
839 return
840 }
841
842 case token.OR:
843 if typeSetsOK {
844
845 f.typ = p.embeddedElem(f.name)
846 f.name = nil
847 return
848 }
849 }
850
851 case token.MUL, token.ARROW, token.FUNC, token.LBRACK, token.CHAN, token.MAP, token.STRUCT, token.INTERFACE, token.LPAREN:
852
853 f.typ = p.parseType()
854
855 case token.ELLIPSIS:
856
857
858 f.typ = p.parseDotsType()
859 return
860
861 default:
862
863
864 p.errorExpected(p.pos, "')'")
865 p.advance(exprEnd)
866 }
867
868
869 if typeSetsOK && p.tok == token.OR && f.typ != nil {
870 f.typ = p.embeddedElem(f.typ)
871 }
872
873 return
874 }
875
876 func (p *parser) parseParameterList(name0 *ast.Ident, typ0 ast.Expr, closing token.Token) (params []*ast.Field) {
877 if p.trace {
878 defer un(trace(p, "ParameterList"))
879 }
880
881
882 tparams := closing == token.RBRACK
883
884 pos0 := p.pos
885 if name0 != nil {
886 pos0 = name0.Pos()
887 } else if typ0 != nil {
888 pos0 = typ0.Pos()
889 }
890
891
892
893
894
895
896
897 var list []field
898 var named int
899 var typed int
900
901 for name0 != nil || p.tok != closing && p.tok != token.EOF {
902 var par field
903 if typ0 != nil {
904 if tparams {
905 typ0 = p.embeddedElem(typ0)
906 }
907 par = field{name0, typ0}
908 } else {
909 par = p.parseParamDecl(name0, tparams)
910 }
911 name0 = nil
912 typ0 = nil
913 if par.name != nil || par.typ != nil {
914 list = append(list, par)
915 if par.name != nil && par.typ != nil {
916 named++
917 }
918 if par.typ != nil {
919 typed++
920 }
921 }
922 if !p.atComma("parameter list", closing) {
923 break
924 }
925 p.next()
926 }
927
928 if len(list) == 0 {
929 return
930 }
931
932
933 if named == 0 {
934
935 for i := 0; i < len(list); i++ {
936 par := &list[i]
937 if typ := par.name; typ != nil {
938 par.typ = typ
939 par.name = nil
940 }
941 }
942 if tparams {
943
944
945 var errPos token.Pos
946 var msg string
947 if named == typed {
948 errPos = p.pos
949 msg = "missing type constraint"
950 } else {
951 errPos = pos0
952 msg = "missing type parameter name"
953 if len(list) == 1 {
954 msg += " or invalid array length"
955 }
956 }
957 p.error(errPos, msg)
958 }
959 } else if named != len(list) {
960
961 var errPos token.Pos
962 var typ ast.Expr
963 for i := len(list) - 1; i >= 0; i-- {
964 if par := &list[i]; par.typ != nil {
965 typ = par.typ
966 if par.name == nil {
967 errPos = typ.Pos()
968 n := ast.NewIdent("_")
969 n.NamePos = errPos
970 par.name = n
971 }
972 } else if typ != nil {
973 par.typ = typ
974 } else {
975
976 errPos = par.name.Pos()
977 par.typ = &ast.BadExpr{From: errPos, To: p.pos}
978 }
979 }
980 if errPos.IsValid() {
981 var msg string
982 if tparams {
983
984
985
986
987
988
989 if named == typed {
990 errPos = p.pos
991 msg = "missing type constraint"
992 } else {
993 msg = "missing type parameter name"
994
995 if len(list) == 1 {
996 msg += " or invalid array length"
997 }
998 }
999 } else {
1000 msg = "mixed named and unnamed parameters"
1001 }
1002 p.error(errPos, msg)
1003 }
1004 }
1005
1006
1007
1008 if named == 0 {
1009
1010 for _, par := range list {
1011 assert(par.typ != nil, "nil type in unnamed parameter list")
1012 params = append(params, &ast.Field{Type: par.typ})
1013 }
1014 return
1015 }
1016
1017
1018
1019 var names []*ast.Ident
1020 var typ ast.Expr
1021 addParams := func() {
1022 assert(typ != nil, "nil type in named parameter list")
1023 field := &ast.Field{Names: names, Type: typ}
1024 params = append(params, field)
1025 names = nil
1026 }
1027 for _, par := range list {
1028 if par.typ != typ {
1029 if len(names) > 0 {
1030 addParams()
1031 }
1032 typ = par.typ
1033 }
1034 names = append(names, par.name)
1035 }
1036 if len(names) > 0 {
1037 addParams()
1038 }
1039 return
1040 }
1041
1042 func (p *parser) parseParameters(acceptTParams bool) (tparams, params *ast.FieldList) {
1043 if p.trace {
1044 defer un(trace(p, "Parameters"))
1045 }
1046
1047 if acceptTParams && p.tok == token.LBRACK {
1048 opening := p.pos
1049 p.next()
1050
1051 list := p.parseParameterList(nil, nil, token.RBRACK)
1052 rbrack := p.expect(token.RBRACK)
1053 tparams = &ast.FieldList{Opening: opening, List: list, Closing: rbrack}
1054
1055 if tparams.NumFields() == 0 {
1056 p.error(tparams.Closing, "empty type parameter list")
1057 tparams = nil
1058 }
1059 }
1060
1061 opening := p.expect(token.LPAREN)
1062
1063 var fields []*ast.Field
1064 if p.tok != token.RPAREN {
1065 fields = p.parseParameterList(nil, nil, token.RPAREN)
1066 }
1067
1068 rparen := p.expect(token.RPAREN)
1069 params = &ast.FieldList{Opening: opening, List: fields, Closing: rparen}
1070
1071 return
1072 }
1073
1074 func (p *parser) parseResult() *ast.FieldList {
1075 if p.trace {
1076 defer un(trace(p, "Result"))
1077 }
1078
1079 if p.tok == token.LPAREN {
1080 _, results := p.parseParameters(false)
1081 return results
1082 }
1083
1084 typ := p.tryIdentOrType()
1085 if typ != nil {
1086 list := make([]*ast.Field, 1)
1087 list[0] = &ast.Field{Type: typ}
1088 return &ast.FieldList{List: list}
1089 }
1090
1091 return nil
1092 }
1093
1094 func (p *parser) parseFuncType() *ast.FuncType {
1095 if p.trace {
1096 defer un(trace(p, "FuncType"))
1097 }
1098
1099 pos := p.expect(token.FUNC)
1100 tparams, params := p.parseParameters(true)
1101 if tparams != nil {
1102 p.error(tparams.Pos(), "function type must have no type parameters")
1103 }
1104 results := p.parseResult()
1105
1106 return &ast.FuncType{Func: pos, Params: params, Results: results}
1107 }
1108
1109 func (p *parser) parseMethodSpec() *ast.Field {
1110 if p.trace {
1111 defer un(trace(p, "MethodSpec"))
1112 }
1113
1114 doc := p.leadComment
1115 var idents []*ast.Ident
1116 var typ ast.Expr
1117 x := p.parseTypeName(nil)
1118 if ident, _ := x.(*ast.Ident); ident != nil {
1119 switch {
1120 case p.tok == token.LBRACK:
1121
1122 lbrack := p.pos
1123 p.next()
1124 p.exprLev++
1125 x := p.parseExpr()
1126 p.exprLev--
1127 if name0, _ := x.(*ast.Ident); name0 != nil && p.tok != token.COMMA && p.tok != token.RBRACK {
1128
1129
1130
1131
1132 _ = p.parseParameterList(name0, nil, token.RBRACK)
1133 _ = p.expect(token.RBRACK)
1134 p.error(lbrack, "interface method must have no type parameters")
1135
1136
1137 _, params := p.parseParameters(false)
1138 results := p.parseResult()
1139 idents = []*ast.Ident{ident}
1140 typ = &ast.FuncType{
1141 Func: token.NoPos,
1142 Params: params,
1143 Results: results,
1144 }
1145 } else {
1146
1147
1148 list := []ast.Expr{x}
1149 if p.atComma("type argument list", token.RBRACK) {
1150 p.exprLev++
1151 p.next()
1152 for p.tok != token.RBRACK && p.tok != token.EOF {
1153 list = append(list, p.parseType())
1154 if !p.atComma("type argument list", token.RBRACK) {
1155 break
1156 }
1157 p.next()
1158 }
1159 p.exprLev--
1160 }
1161 rbrack := p.expectClosing(token.RBRACK, "type argument list")
1162 typ = typeparams.PackIndexExpr(ident, lbrack, list, rbrack)
1163 }
1164 case p.tok == token.LPAREN:
1165
1166
1167 _, params := p.parseParameters(false)
1168 results := p.parseResult()
1169 idents = []*ast.Ident{ident}
1170 typ = &ast.FuncType{Func: token.NoPos, Params: params, Results: results}
1171 default:
1172
1173 typ = x
1174 }
1175 } else {
1176
1177 typ = x
1178 if p.tok == token.LBRACK {
1179
1180 typ = p.parseTypeInstance(typ)
1181 }
1182 }
1183
1184
1185
1186
1187
1188 return &ast.Field{Doc: doc, Names: idents, Type: typ}
1189 }
1190
1191 func (p *parser) embeddedElem(x ast.Expr) ast.Expr {
1192 if p.trace {
1193 defer un(trace(p, "EmbeddedElem"))
1194 }
1195 if x == nil {
1196 x = p.embeddedTerm()
1197 }
1198 for p.tok == token.OR {
1199 t := new(ast.BinaryExpr)
1200 t.OpPos = p.pos
1201 t.Op = token.OR
1202 p.next()
1203 t.X = x
1204 t.Y = p.embeddedTerm()
1205 x = t
1206 }
1207 return x
1208 }
1209
1210 func (p *parser) embeddedTerm() ast.Expr {
1211 if p.trace {
1212 defer un(trace(p, "EmbeddedTerm"))
1213 }
1214 if p.tok == token.TILDE {
1215 t := new(ast.UnaryExpr)
1216 t.OpPos = p.pos
1217 t.Op = token.TILDE
1218 p.next()
1219 t.X = p.parseType()
1220 return t
1221 }
1222
1223 t := p.tryIdentOrType()
1224 if t == nil {
1225 pos := p.pos
1226 p.errorExpected(pos, "~ term or type")
1227 p.advance(exprEnd)
1228 return &ast.BadExpr{From: pos, To: p.pos}
1229 }
1230
1231 return t
1232 }
1233
1234 func (p *parser) parseInterfaceType() *ast.InterfaceType {
1235 if p.trace {
1236 defer un(trace(p, "InterfaceType"))
1237 }
1238
1239 pos := p.expect(token.INTERFACE)
1240 lbrace := p.expect(token.LBRACE)
1241
1242 var list []*ast.Field
1243
1244 parseElements:
1245 for {
1246 switch {
1247 case p.tok == token.IDENT:
1248 f := p.parseMethodSpec()
1249 if f.Names == nil {
1250 f.Type = p.embeddedElem(f.Type)
1251 }
1252 f.Comment = p.expectSemi()
1253 list = append(list, f)
1254 case p.tok == token.TILDE:
1255 typ := p.embeddedElem(nil)
1256 comment := p.expectSemi()
1257 list = append(list, &ast.Field{Type: typ, Comment: comment})
1258 default:
1259 if t := p.tryIdentOrType(); t != nil {
1260 typ := p.embeddedElem(t)
1261 comment := p.expectSemi()
1262 list = append(list, &ast.Field{Type: typ, Comment: comment})
1263 } else {
1264 break parseElements
1265 }
1266 }
1267 }
1268
1269
1270
1271 rbrace := p.expect(token.RBRACE)
1272
1273 return &ast.InterfaceType{
1274 Interface: pos,
1275 Methods: &ast.FieldList{
1276 Opening: lbrace,
1277 List: list,
1278 Closing: rbrace,
1279 },
1280 }
1281 }
1282
1283 func (p *parser) parseMapType() *ast.MapType {
1284 if p.trace {
1285 defer un(trace(p, "MapType"))
1286 }
1287
1288 pos := p.expect(token.MAP)
1289 p.expect(token.LBRACK)
1290 key := p.parseType()
1291 p.expect(token.RBRACK)
1292 value := p.parseType()
1293
1294 return &ast.MapType{Map: pos, Key: key, Value: value}
1295 }
1296
1297 func (p *parser) parseChanType() *ast.ChanType {
1298 if p.trace {
1299 defer un(trace(p, "ChanType"))
1300 }
1301
1302 pos := p.pos
1303 dir := ast.SEND | ast.RECV
1304 var arrow token.Pos
1305 if p.tok == token.CHAN {
1306 p.next()
1307 if p.tok == token.ARROW {
1308 arrow = p.pos
1309 p.next()
1310 dir = ast.SEND
1311 }
1312 } else {
1313 arrow = p.expect(token.ARROW)
1314 p.expect(token.CHAN)
1315 dir = ast.RECV
1316 }
1317 value := p.parseType()
1318
1319 return &ast.ChanType{Begin: pos, Arrow: arrow, Dir: dir, Value: value}
1320 }
1321
1322 func (p *parser) parseTypeInstance(typ ast.Expr) ast.Expr {
1323 if p.trace {
1324 defer un(trace(p, "TypeInstance"))
1325 }
1326
1327 opening := p.expect(token.LBRACK)
1328 p.exprLev++
1329 var list []ast.Expr
1330 for p.tok != token.RBRACK && p.tok != token.EOF {
1331 list = append(list, p.parseType())
1332 if !p.atComma("type argument list", token.RBRACK) {
1333 break
1334 }
1335 p.next()
1336 }
1337 p.exprLev--
1338
1339 closing := p.expectClosing(token.RBRACK, "type argument list")
1340
1341 if len(list) == 0 {
1342 p.errorExpected(closing, "type argument list")
1343 return &ast.IndexExpr{
1344 X: typ,
1345 Lbrack: opening,
1346 Index: &ast.BadExpr{From: opening + 1, To: closing},
1347 Rbrack: closing,
1348 }
1349 }
1350
1351 return typeparams.PackIndexExpr(typ, opening, list, closing)
1352 }
1353
1354 func (p *parser) tryIdentOrType() ast.Expr {
1355 defer decNestLev(incNestLev(p))
1356
1357 switch p.tok {
1358 case token.IDENT:
1359 typ := p.parseTypeName(nil)
1360 if p.tok == token.LBRACK {
1361 typ = p.parseTypeInstance(typ)
1362 }
1363 return typ
1364 case token.LBRACK:
1365 lbrack := p.expect(token.LBRACK)
1366 return p.parseArrayType(lbrack, nil)
1367 case token.STRUCT:
1368 return p.parseStructType()
1369 case token.MUL:
1370 return p.parsePointerType()
1371 case token.FUNC:
1372 return p.parseFuncType()
1373 case token.INTERFACE:
1374 return p.parseInterfaceType()
1375 case token.MAP:
1376 return p.parseMapType()
1377 case token.CHAN, token.ARROW:
1378 return p.parseChanType()
1379 case token.LPAREN:
1380 lparen := p.pos
1381 p.next()
1382 typ := p.parseType()
1383 rparen := p.expect(token.RPAREN)
1384 return &ast.ParenExpr{Lparen: lparen, X: typ, Rparen: rparen}
1385 }
1386
1387
1388 return nil
1389 }
1390
1391
1392
1393
1394 func (p *parser) parseStmtList() (list []ast.Stmt) {
1395 if p.trace {
1396 defer un(trace(p, "StatementList"))
1397 }
1398
1399 for p.tok != token.CASE && p.tok != token.DEFAULT && p.tok != token.RBRACE && p.tok != token.EOF {
1400 list = append(list, p.parseStmt())
1401 }
1402
1403 return
1404 }
1405
1406 func (p *parser) parseBody() *ast.BlockStmt {
1407 if p.trace {
1408 defer un(trace(p, "Body"))
1409 }
1410
1411 lbrace := p.expect(token.LBRACE)
1412 list := p.parseStmtList()
1413 rbrace := p.expect2(token.RBRACE)
1414
1415 return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
1416 }
1417
1418 func (p *parser) parseBlockStmt() *ast.BlockStmt {
1419 if p.trace {
1420 defer un(trace(p, "BlockStmt"))
1421 }
1422
1423 lbrace := p.expect(token.LBRACE)
1424 list := p.parseStmtList()
1425 rbrace := p.expect2(token.RBRACE)
1426
1427 return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
1428 }
1429
1430
1431
1432
1433 func (p *parser) parseFuncTypeOrLit() ast.Expr {
1434 if p.trace {
1435 defer un(trace(p, "FuncTypeOrLit"))
1436 }
1437
1438 typ := p.parseFuncType()
1439 if p.tok != token.LBRACE {
1440
1441 return typ
1442 }
1443
1444 p.exprLev++
1445 body := p.parseBody()
1446 p.exprLev--
1447
1448 return &ast.FuncLit{Type: typ, Body: body}
1449 }
1450
1451
1452
1453 func (p *parser) parseOperand() ast.Expr {
1454 if p.trace {
1455 defer un(trace(p, "Operand"))
1456 }
1457
1458 switch p.tok {
1459 case token.IDENT:
1460 x := p.parseIdent()
1461 return x
1462
1463 case token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING:
1464 x := &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit}
1465 p.next()
1466 return x
1467
1468 case token.LPAREN:
1469 lparen := p.pos
1470 p.next()
1471 p.exprLev++
1472 x := p.parseRhs()
1473 p.exprLev--
1474 rparen := p.expect(token.RPAREN)
1475 return &ast.ParenExpr{Lparen: lparen, X: x, Rparen: rparen}
1476
1477 case token.FUNC:
1478 return p.parseFuncTypeOrLit()
1479 }
1480
1481 if typ := p.tryIdentOrType(); typ != nil {
1482
1483 _, isIdent := typ.(*ast.Ident)
1484 assert(!isIdent, "type cannot be identifier")
1485 return typ
1486 }
1487
1488
1489 pos := p.pos
1490 p.errorExpected(pos, "operand")
1491 p.advance(stmtStart)
1492 return &ast.BadExpr{From: pos, To: p.pos}
1493 }
1494
1495 func (p *parser) parseSelector(x ast.Expr) ast.Expr {
1496 if p.trace {
1497 defer un(trace(p, "Selector"))
1498 }
1499
1500 sel := p.parseIdent()
1501
1502 return &ast.SelectorExpr{X: x, Sel: sel}
1503 }
1504
1505 func (p *parser) parseTypeAssertion(x ast.Expr) ast.Expr {
1506 if p.trace {
1507 defer un(trace(p, "TypeAssertion"))
1508 }
1509
1510 lparen := p.expect(token.LPAREN)
1511 var typ ast.Expr
1512 if p.tok == token.TYPE {
1513
1514 p.next()
1515 } else {
1516 typ = p.parseType()
1517 }
1518 rparen := p.expect(token.RPAREN)
1519
1520 return &ast.TypeAssertExpr{X: x, Type: typ, Lparen: lparen, Rparen: rparen}
1521 }
1522
1523 func (p *parser) parseIndexOrSliceOrInstance(x ast.Expr) ast.Expr {
1524 if p.trace {
1525 defer un(trace(p, "parseIndexOrSliceOrInstance"))
1526 }
1527
1528 lbrack := p.expect(token.LBRACK)
1529 if p.tok == token.RBRACK {
1530
1531
1532 p.errorExpected(p.pos, "operand")
1533 rbrack := p.pos
1534 p.next()
1535 return &ast.IndexExpr{
1536 X: x,
1537 Lbrack: lbrack,
1538 Index: &ast.BadExpr{From: rbrack, To: rbrack},
1539 Rbrack: rbrack,
1540 }
1541 }
1542 p.exprLev++
1543
1544 const N = 3
1545 var args []ast.Expr
1546 var index [N]ast.Expr
1547 var colons [N - 1]token.Pos
1548 if p.tok != token.COLON {
1549
1550
1551 index[0] = p.parseRhs()
1552 }
1553 ncolons := 0
1554 switch p.tok {
1555 case token.COLON:
1556
1557 for p.tok == token.COLON && ncolons < len(colons) {
1558 colons[ncolons] = p.pos
1559 ncolons++
1560 p.next()
1561 if p.tok != token.COLON && p.tok != token.RBRACK && p.tok != token.EOF {
1562 index[ncolons] = p.parseRhs()
1563 }
1564 }
1565 case token.COMMA:
1566
1567 args = append(args, index[0])
1568 for p.tok == token.COMMA {
1569 p.next()
1570 if p.tok != token.RBRACK && p.tok != token.EOF {
1571 args = append(args, p.parseType())
1572 }
1573 }
1574 }
1575
1576 p.exprLev--
1577 rbrack := p.expect(token.RBRACK)
1578
1579 if ncolons > 0 {
1580
1581 slice3 := false
1582 if ncolons == 2 {
1583 slice3 = true
1584
1585
1586 if index[1] == nil {
1587 p.error(colons[0], "middle index required in 3-index slice")
1588 index[1] = &ast.BadExpr{From: colons[0] + 1, To: colons[1]}
1589 }
1590 if index[2] == nil {
1591 p.error(colons[1], "final index required in 3-index slice")
1592 index[2] = &ast.BadExpr{From: colons[1] + 1, To: rbrack}
1593 }
1594 }
1595 return &ast.SliceExpr{X: x, Lbrack: lbrack, Low: index[0], High: index[1], Max: index[2], Slice3: slice3, Rbrack: rbrack}
1596 }
1597
1598 if len(args) == 0 {
1599
1600 return &ast.IndexExpr{X: x, Lbrack: lbrack, Index: index[0], Rbrack: rbrack}
1601 }
1602
1603
1604 return typeparams.PackIndexExpr(x, lbrack, args, rbrack)
1605 }
1606
1607 func (p *parser) parseCallOrConversion(fun ast.Expr) *ast.CallExpr {
1608 if p.trace {
1609 defer un(trace(p, "CallOrConversion"))
1610 }
1611
1612 lparen := p.expect(token.LPAREN)
1613 p.exprLev++
1614 var list []ast.Expr
1615 var ellipsis token.Pos
1616 for p.tok != token.RPAREN && p.tok != token.EOF && !ellipsis.IsValid() {
1617 list = append(list, p.parseRhs())
1618 if p.tok == token.ELLIPSIS {
1619 ellipsis = p.pos
1620 p.next()
1621 }
1622 if !p.atComma("argument list", token.RPAREN) {
1623 break
1624 }
1625 p.next()
1626 }
1627 p.exprLev--
1628 rparen := p.expectClosing(token.RPAREN, "argument list")
1629
1630 return &ast.CallExpr{Fun: fun, Lparen: lparen, Args: list, Ellipsis: ellipsis, Rparen: rparen}
1631 }
1632
1633 func (p *parser) parseValue() ast.Expr {
1634 if p.trace {
1635 defer un(trace(p, "Element"))
1636 }
1637
1638 if p.tok == token.LBRACE {
1639 return p.parseLiteralValue(nil)
1640 }
1641
1642 x := p.parseExpr()
1643
1644 return x
1645 }
1646
1647 func (p *parser) parseElement() ast.Expr {
1648 if p.trace {
1649 defer un(trace(p, "Element"))
1650 }
1651
1652 x := p.parseValue()
1653 if p.tok == token.COLON {
1654 colon := p.pos
1655 p.next()
1656 x = &ast.KeyValueExpr{Key: x, Colon: colon, Value: p.parseValue()}
1657 }
1658
1659 return x
1660 }
1661
1662 func (p *parser) parseElementList() (list []ast.Expr) {
1663 if p.trace {
1664 defer un(trace(p, "ElementList"))
1665 }
1666
1667 for p.tok != token.RBRACE && p.tok != token.EOF {
1668 list = append(list, p.parseElement())
1669 if !p.atComma("composite literal", token.RBRACE) {
1670 break
1671 }
1672 p.next()
1673 }
1674
1675 return
1676 }
1677
1678 func (p *parser) parseLiteralValue(typ ast.Expr) ast.Expr {
1679 if p.trace {
1680 defer un(trace(p, "LiteralValue"))
1681 }
1682
1683 lbrace := p.expect(token.LBRACE)
1684 var elts []ast.Expr
1685 p.exprLev++
1686 if p.tok != token.RBRACE {
1687 elts = p.parseElementList()
1688 }
1689 p.exprLev--
1690 rbrace := p.expectClosing(token.RBRACE, "composite literal")
1691 return &ast.CompositeLit{Type: typ, Lbrace: lbrace, Elts: elts, Rbrace: rbrace}
1692 }
1693
1694 func (p *parser) parsePrimaryExpr(x ast.Expr) ast.Expr {
1695 if p.trace {
1696 defer un(trace(p, "PrimaryExpr"))
1697 }
1698
1699 if x == nil {
1700 x = p.parseOperand()
1701 }
1702
1703
1704
1705 var n int
1706 defer func() { p.nestLev -= n }()
1707 for n = 1; ; n++ {
1708 incNestLev(p)
1709 switch p.tok {
1710 case token.PERIOD:
1711 p.next()
1712 switch p.tok {
1713 case token.IDENT:
1714 x = p.parseSelector(x)
1715 case token.LPAREN:
1716 x = p.parseTypeAssertion(x)
1717 default:
1718 pos := p.pos
1719 p.errorExpected(pos, "selector or type assertion")
1720
1721
1722
1723
1724
1725 if p.tok != token.RBRACE {
1726 p.next()
1727 }
1728 sel := &ast.Ident{NamePos: pos, Name: "_"}
1729 x = &ast.SelectorExpr{X: x, Sel: sel}
1730 }
1731 case token.LBRACK:
1732 x = p.parseIndexOrSliceOrInstance(x)
1733 case token.LPAREN:
1734 x = p.parseCallOrConversion(x)
1735 case token.LBRACE:
1736
1737
1738 t := ast.Unparen(x)
1739
1740 switch t.(type) {
1741 case *ast.BadExpr, *ast.Ident, *ast.SelectorExpr:
1742 if p.exprLev < 0 {
1743 return x
1744 }
1745
1746 case *ast.IndexExpr, *ast.IndexListExpr:
1747 if p.exprLev < 0 {
1748 return x
1749 }
1750
1751 case *ast.ArrayType, *ast.StructType, *ast.MapType:
1752
1753 default:
1754 return x
1755 }
1756 if t != x {
1757 p.error(t.Pos(), "cannot parenthesize type in composite literal")
1758
1759 }
1760 x = p.parseLiteralValue(x)
1761 default:
1762 return x
1763 }
1764 }
1765 }
1766
1767 func (p *parser) parseUnaryExpr() ast.Expr {
1768 defer decNestLev(incNestLev(p))
1769
1770 if p.trace {
1771 defer un(trace(p, "UnaryExpr"))
1772 }
1773
1774 switch p.tok {
1775 case token.ADD, token.SUB, token.NOT, token.XOR, token.AND, token.TILDE:
1776 pos, op := p.pos, p.tok
1777 p.next()
1778 x := p.parseUnaryExpr()
1779 return &ast.UnaryExpr{OpPos: pos, Op: op, X: x}
1780
1781 case token.ARROW:
1782
1783 arrow := p.pos
1784 p.next()
1785
1786
1787
1788
1789
1790
1791
1792
1793
1794
1795
1796
1797
1798
1799
1800 x := p.parseUnaryExpr()
1801
1802
1803 if typ, ok := x.(*ast.ChanType); ok {
1804
1805
1806
1807 dir := ast.SEND
1808 for ok && dir == ast.SEND {
1809 if typ.Dir == ast.RECV {
1810
1811 p.errorExpected(typ.Arrow, "'chan'")
1812 }
1813 arrow, typ.Begin, typ.Arrow = typ.Arrow, arrow, arrow
1814 dir, typ.Dir = typ.Dir, ast.RECV
1815 typ, ok = typ.Value.(*ast.ChanType)
1816 }
1817 if dir == ast.SEND {
1818 p.errorExpected(arrow, "channel type")
1819 }
1820
1821 return x
1822 }
1823
1824
1825 return &ast.UnaryExpr{OpPos: arrow, Op: token.ARROW, X: x}
1826
1827 case token.MUL:
1828
1829 pos := p.pos
1830 p.next()
1831 x := p.parseUnaryExpr()
1832 return &ast.StarExpr{Star: pos, X: x}
1833 }
1834
1835 return p.parsePrimaryExpr(nil)
1836 }
1837
1838 func (p *parser) tokPrec() (token.Token, int) {
1839 tok := p.tok
1840 if p.inRhs && tok == token.ASSIGN {
1841 tok = token.EQL
1842 }
1843 return tok, tok.Precedence()
1844 }
1845
1846
1847
1848
1849
1850 func (p *parser) parseBinaryExpr(x ast.Expr, prec1 int) ast.Expr {
1851 if p.trace {
1852 defer un(trace(p, "BinaryExpr"))
1853 }
1854
1855 if x == nil {
1856 x = p.parseUnaryExpr()
1857 }
1858
1859
1860
1861 var n int
1862 defer func() { p.nestLev -= n }()
1863 for n = 1; ; n++ {
1864 incNestLev(p)
1865 op, oprec := p.tokPrec()
1866 if oprec < prec1 {
1867 return x
1868 }
1869 pos := p.expect(op)
1870 y := p.parseBinaryExpr(nil, oprec+1)
1871 x = &ast.BinaryExpr{X: x, OpPos: pos, Op: op, Y: y}
1872 }
1873 }
1874
1875
1876 func (p *parser) parseExpr() ast.Expr {
1877 if p.trace {
1878 defer un(trace(p, "Expression"))
1879 }
1880
1881 return p.parseBinaryExpr(nil, token.LowestPrec+1)
1882 }
1883
1884 func (p *parser) parseRhs() ast.Expr {
1885 old := p.inRhs
1886 p.inRhs = true
1887 x := p.parseExpr()
1888 p.inRhs = old
1889 return x
1890 }
1891
1892
1893
1894
1895
1896 const (
1897 basic = iota
1898 labelOk
1899 rangeOk
1900 )
1901
1902
1903
1904
1905
1906 func (p *parser) parseSimpleStmt(mode int) (ast.Stmt, bool) {
1907 if p.trace {
1908 defer un(trace(p, "SimpleStmt"))
1909 }
1910
1911 x := p.parseList(false)
1912
1913 switch p.tok {
1914 case
1915 token.DEFINE, token.ASSIGN, token.ADD_ASSIGN,
1916 token.SUB_ASSIGN, token.MUL_ASSIGN, token.QUO_ASSIGN,
1917 token.REM_ASSIGN, token.AND_ASSIGN, token.OR_ASSIGN,
1918 token.XOR_ASSIGN, token.SHL_ASSIGN, token.SHR_ASSIGN, token.AND_NOT_ASSIGN:
1919
1920 pos, tok := p.pos, p.tok
1921 p.next()
1922 var y []ast.Expr
1923 isRange := false
1924 if mode == rangeOk && p.tok == token.RANGE && (tok == token.DEFINE || tok == token.ASSIGN) {
1925 pos := p.pos
1926 p.next()
1927 y = []ast.Expr{&ast.UnaryExpr{OpPos: pos, Op: token.RANGE, X: p.parseRhs()}}
1928 isRange = true
1929 } else {
1930 y = p.parseList(true)
1931 }
1932 return &ast.AssignStmt{Lhs: x, TokPos: pos, Tok: tok, Rhs: y}, isRange
1933 }
1934
1935 if len(x) > 1 {
1936 p.errorExpected(x[0].Pos(), "1 expression")
1937
1938 }
1939
1940 switch p.tok {
1941 case token.COLON:
1942
1943 colon := p.pos
1944 p.next()
1945 if label, isIdent := x[0].(*ast.Ident); mode == labelOk && isIdent {
1946
1947
1948
1949 stmt := &ast.LabeledStmt{Label: label, Colon: colon, Stmt: p.parseStmt()}
1950 return stmt, false
1951 }
1952
1953
1954
1955
1956
1957
1958 p.error(colon, "illegal label declaration")
1959 return &ast.BadStmt{From: x[0].Pos(), To: colon + 1}, false
1960
1961 case token.ARROW:
1962
1963 arrow := p.pos
1964 p.next()
1965 y := p.parseRhs()
1966 return &ast.SendStmt{Chan: x[0], Arrow: arrow, Value: y}, false
1967
1968 case token.INC, token.DEC:
1969
1970 s := &ast.IncDecStmt{X: x[0], TokPos: p.pos, Tok: p.tok}
1971 p.next()
1972 return s, false
1973 }
1974
1975
1976 return &ast.ExprStmt{X: x[0]}, false
1977 }
1978
1979 func (p *parser) parseCallExpr(callType string) *ast.CallExpr {
1980 x := p.parseRhs()
1981 if t := ast.Unparen(x); t != x {
1982 p.error(x.Pos(), fmt.Sprintf("expression in %s must not be parenthesized", callType))
1983 x = t
1984 }
1985 if call, isCall := x.(*ast.CallExpr); isCall {
1986 return call
1987 }
1988 if _, isBad := x.(*ast.BadExpr); !isBad {
1989
1990 p.error(p.safePos(x.End()), fmt.Sprintf("expression in %s must be function call", callType))
1991 }
1992 return nil
1993 }
1994
1995 func (p *parser) parseGoStmt() ast.Stmt {
1996 if p.trace {
1997 defer un(trace(p, "GoStmt"))
1998 }
1999
2000 pos := p.expect(token.GO)
2001 call := p.parseCallExpr("go")
2002 p.expectSemi()
2003 if call == nil {
2004 return &ast.BadStmt{From: pos, To: pos + 2}
2005 }
2006
2007 return &ast.GoStmt{Go: pos, Call: call}
2008 }
2009
2010 func (p *parser) parseDeferStmt() ast.Stmt {
2011 if p.trace {
2012 defer un(trace(p, "DeferStmt"))
2013 }
2014
2015 pos := p.expect(token.DEFER)
2016 call := p.parseCallExpr("defer")
2017 p.expectSemi()
2018 if call == nil {
2019 return &ast.BadStmt{From: pos, To: pos + 5}
2020 }
2021
2022 return &ast.DeferStmt{Defer: pos, Call: call}
2023 }
2024
2025 func (p *parser) parseReturnStmt() *ast.ReturnStmt {
2026 if p.trace {
2027 defer un(trace(p, "ReturnStmt"))
2028 }
2029
2030 pos := p.pos
2031 p.expect(token.RETURN)
2032 var x []ast.Expr
2033 if p.tok != token.SEMICOLON && p.tok != token.RBRACE {
2034 x = p.parseList(true)
2035 }
2036 p.expectSemi()
2037
2038 return &ast.ReturnStmt{Return: pos, Results: x}
2039 }
2040
2041 func (p *parser) parseBranchStmt(tok token.Token) *ast.BranchStmt {
2042 if p.trace {
2043 defer un(trace(p, "BranchStmt"))
2044 }
2045
2046 pos := p.expect(tok)
2047 var label *ast.Ident
2048 if tok != token.FALLTHROUGH && p.tok == token.IDENT {
2049 label = p.parseIdent()
2050 }
2051 p.expectSemi()
2052
2053 return &ast.BranchStmt{TokPos: pos, Tok: tok, Label: label}
2054 }
2055
2056 func (p *parser) makeExpr(s ast.Stmt, want string) ast.Expr {
2057 if s == nil {
2058 return nil
2059 }
2060 if es, isExpr := s.(*ast.ExprStmt); isExpr {
2061 return es.X
2062 }
2063 found := "simple statement"
2064 if _, isAss := s.(*ast.AssignStmt); isAss {
2065 found = "assignment"
2066 }
2067 p.error(s.Pos(), fmt.Sprintf("expected %s, found %s (missing parentheses around composite literal?)", want, found))
2068 return &ast.BadExpr{From: s.Pos(), To: p.safePos(s.End())}
2069 }
2070
2071
2072
2073
2074 func (p *parser) parseIfHeader() (init ast.Stmt, cond ast.Expr) {
2075 if p.tok == token.LBRACE {
2076 p.error(p.pos, "missing condition in if statement")
2077 cond = &ast.BadExpr{From: p.pos, To: p.pos}
2078 return
2079 }
2080
2081
2082 prevLev := p.exprLev
2083 p.exprLev = -1
2084
2085 if p.tok != token.SEMICOLON {
2086
2087 if p.tok == token.VAR {
2088 p.next()
2089 p.error(p.pos, "var declaration not allowed in if initializer")
2090 }
2091 init, _ = p.parseSimpleStmt(basic)
2092 }
2093
2094 var condStmt ast.Stmt
2095 var semi struct {
2096 pos token.Pos
2097 lit string
2098 }
2099 if p.tok != token.LBRACE {
2100 if p.tok == token.SEMICOLON {
2101 semi.pos = p.pos
2102 semi.lit = p.lit
2103 p.next()
2104 } else {
2105 p.expect(token.SEMICOLON)
2106 }
2107 if p.tok != token.LBRACE {
2108 condStmt, _ = p.parseSimpleStmt(basic)
2109 }
2110 } else {
2111 condStmt = init
2112 init = nil
2113 }
2114
2115 if condStmt != nil {
2116 cond = p.makeExpr(condStmt, "boolean expression")
2117 } else if semi.pos.IsValid() {
2118 if semi.lit == "\n" {
2119 p.error(semi.pos, "unexpected newline, expecting { after if clause")
2120 } else {
2121 p.error(semi.pos, "missing condition in if statement")
2122 }
2123 }
2124
2125
2126 if cond == nil {
2127 cond = &ast.BadExpr{From: p.pos, To: p.pos}
2128 }
2129
2130 p.exprLev = prevLev
2131 return
2132 }
2133
2134 func (p *parser) parseIfStmt() *ast.IfStmt {
2135 defer decNestLev(incNestLev(p))
2136
2137 if p.trace {
2138 defer un(trace(p, "IfStmt"))
2139 }
2140
2141 pos := p.expect(token.IF)
2142
2143 init, cond := p.parseIfHeader()
2144 body := p.parseBlockStmt()
2145
2146 var else_ ast.Stmt
2147 if p.tok == token.ELSE {
2148 p.next()
2149 switch p.tok {
2150 case token.IF:
2151 else_ = p.parseIfStmt()
2152 case token.LBRACE:
2153 else_ = p.parseBlockStmt()
2154 p.expectSemi()
2155 default:
2156 p.errorExpected(p.pos, "if statement or block")
2157 else_ = &ast.BadStmt{From: p.pos, To: p.pos}
2158 }
2159 } else {
2160 p.expectSemi()
2161 }
2162
2163 return &ast.IfStmt{If: pos, Init: init, Cond: cond, Body: body, Else: else_}
2164 }
2165
2166 func (p *parser) parseCaseClause() *ast.CaseClause {
2167 if p.trace {
2168 defer un(trace(p, "CaseClause"))
2169 }
2170
2171 pos := p.pos
2172 var list []ast.Expr
2173 if p.tok == token.CASE {
2174 p.next()
2175 list = p.parseList(true)
2176 } else {
2177 p.expect(token.DEFAULT)
2178 }
2179
2180 colon := p.expect(token.COLON)
2181 body := p.parseStmtList()
2182
2183 return &ast.CaseClause{Case: pos, List: list, Colon: colon, Body: body}
2184 }
2185
2186 func isTypeSwitchAssert(x ast.Expr) bool {
2187 a, ok := x.(*ast.TypeAssertExpr)
2188 return ok && a.Type == nil
2189 }
2190
2191 func (p *parser) isTypeSwitchGuard(s ast.Stmt) bool {
2192 switch t := s.(type) {
2193 case *ast.ExprStmt:
2194
2195 return isTypeSwitchAssert(t.X)
2196 case *ast.AssignStmt:
2197
2198 if len(t.Lhs) == 1 && len(t.Rhs) == 1 && isTypeSwitchAssert(t.Rhs[0]) {
2199 switch t.Tok {
2200 case token.ASSIGN:
2201
2202 p.error(t.TokPos, "expected ':=', found '='")
2203 fallthrough
2204 case token.DEFINE:
2205 return true
2206 }
2207 }
2208 }
2209 return false
2210 }
2211
2212 func (p *parser) parseSwitchStmt() ast.Stmt {
2213 if p.trace {
2214 defer un(trace(p, "SwitchStmt"))
2215 }
2216
2217 pos := p.expect(token.SWITCH)
2218
2219 var s1, s2 ast.Stmt
2220 if p.tok != token.LBRACE {
2221 prevLev := p.exprLev
2222 p.exprLev = -1
2223 if p.tok != token.SEMICOLON {
2224 s2, _ = p.parseSimpleStmt(basic)
2225 }
2226 if p.tok == token.SEMICOLON {
2227 p.next()
2228 s1 = s2
2229 s2 = nil
2230 if p.tok != token.LBRACE {
2231
2232
2233
2234
2235
2236
2237
2238
2239
2240
2241
2242
2243 s2, _ = p.parseSimpleStmt(basic)
2244 }
2245 }
2246 p.exprLev = prevLev
2247 }
2248
2249 typeSwitch := p.isTypeSwitchGuard(s2)
2250 lbrace := p.expect(token.LBRACE)
2251 var list []ast.Stmt
2252 for p.tok == token.CASE || p.tok == token.DEFAULT {
2253 list = append(list, p.parseCaseClause())
2254 }
2255 rbrace := p.expect(token.RBRACE)
2256 p.expectSemi()
2257 body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
2258
2259 if typeSwitch {
2260 return &ast.TypeSwitchStmt{Switch: pos, Init: s1, Assign: s2, Body: body}
2261 }
2262
2263 return &ast.SwitchStmt{Switch: pos, Init: s1, Tag: p.makeExpr(s2, "switch expression"), Body: body}
2264 }
2265
2266 func (p *parser) parseCommClause() *ast.CommClause {
2267 if p.trace {
2268 defer un(trace(p, "CommClause"))
2269 }
2270
2271 pos := p.pos
2272 var comm ast.Stmt
2273 if p.tok == token.CASE {
2274 p.next()
2275 lhs := p.parseList(false)
2276 if p.tok == token.ARROW {
2277
2278 if len(lhs) > 1 {
2279 p.errorExpected(lhs[0].Pos(), "1 expression")
2280
2281 }
2282 arrow := p.pos
2283 p.next()
2284 rhs := p.parseRhs()
2285 comm = &ast.SendStmt{Chan: lhs[0], Arrow: arrow, Value: rhs}
2286 } else {
2287
2288 if tok := p.tok; tok == token.ASSIGN || tok == token.DEFINE {
2289
2290 if len(lhs) > 2 {
2291 p.errorExpected(lhs[0].Pos(), "1 or 2 expressions")
2292
2293 lhs = lhs[0:2]
2294 }
2295 pos := p.pos
2296 p.next()
2297 rhs := p.parseRhs()
2298 comm = &ast.AssignStmt{Lhs: lhs, TokPos: pos, Tok: tok, Rhs: []ast.Expr{rhs}}
2299 } else {
2300
2301 if len(lhs) > 1 {
2302 p.errorExpected(lhs[0].Pos(), "1 expression")
2303
2304 }
2305 comm = &ast.ExprStmt{X: lhs[0]}
2306 }
2307 }
2308 } else {
2309 p.expect(token.DEFAULT)
2310 }
2311
2312 colon := p.expect(token.COLON)
2313 body := p.parseStmtList()
2314
2315 return &ast.CommClause{Case: pos, Comm: comm, Colon: colon, Body: body}
2316 }
2317
2318 func (p *parser) parseSelectStmt() *ast.SelectStmt {
2319 if p.trace {
2320 defer un(trace(p, "SelectStmt"))
2321 }
2322
2323 pos := p.expect(token.SELECT)
2324 lbrace := p.expect(token.LBRACE)
2325 var list []ast.Stmt
2326 for p.tok == token.CASE || p.tok == token.DEFAULT {
2327 list = append(list, p.parseCommClause())
2328 }
2329 rbrace := p.expect(token.RBRACE)
2330 p.expectSemi()
2331 body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
2332
2333 return &ast.SelectStmt{Select: pos, Body: body}
2334 }
2335
2336 func (p *parser) parseForStmt() ast.Stmt {
2337 if p.trace {
2338 defer un(trace(p, "ForStmt"))
2339 }
2340
2341 pos := p.expect(token.FOR)
2342
2343 var s1, s2, s3 ast.Stmt
2344 var isRange bool
2345 if p.tok != token.LBRACE {
2346 prevLev := p.exprLev
2347 p.exprLev = -1
2348 if p.tok != token.SEMICOLON {
2349 if p.tok == token.RANGE {
2350
2351 pos := p.pos
2352 p.next()
2353 y := []ast.Expr{&ast.UnaryExpr{OpPos: pos, Op: token.RANGE, X: p.parseRhs()}}
2354 s2 = &ast.AssignStmt{Rhs: y}
2355 isRange = true
2356 } else {
2357 s2, isRange = p.parseSimpleStmt(rangeOk)
2358 }
2359 }
2360 if !isRange && p.tok == token.SEMICOLON {
2361 p.next()
2362 s1 = s2
2363 s2 = nil
2364 if p.tok != token.SEMICOLON {
2365 s2, _ = p.parseSimpleStmt(basic)
2366 }
2367 p.expectSemi()
2368 if p.tok != token.LBRACE {
2369 s3, _ = p.parseSimpleStmt(basic)
2370 }
2371 }
2372 p.exprLev = prevLev
2373 }
2374
2375 body := p.parseBlockStmt()
2376 p.expectSemi()
2377
2378 if isRange {
2379 as := s2.(*ast.AssignStmt)
2380
2381 var key, value ast.Expr
2382 switch len(as.Lhs) {
2383 case 0:
2384
2385 case 1:
2386 key = as.Lhs[0]
2387 case 2:
2388 key, value = as.Lhs[0], as.Lhs[1]
2389 default:
2390 p.errorExpected(as.Lhs[len(as.Lhs)-1].Pos(), "at most 2 expressions")
2391 return &ast.BadStmt{From: pos, To: p.safePos(body.End())}
2392 }
2393
2394
2395 x := as.Rhs[0].(*ast.UnaryExpr).X
2396 return &ast.RangeStmt{
2397 For: pos,
2398 Key: key,
2399 Value: value,
2400 TokPos: as.TokPos,
2401 Tok: as.Tok,
2402 Range: as.Rhs[0].Pos(),
2403 X: x,
2404 Body: body,
2405 }
2406 }
2407
2408
2409 return &ast.ForStmt{
2410 For: pos,
2411 Init: s1,
2412 Cond: p.makeExpr(s2, "boolean or range expression"),
2413 Post: s3,
2414 Body: body,
2415 }
2416 }
2417
2418 func (p *parser) parseStmt() (s ast.Stmt) {
2419 defer decNestLev(incNestLev(p))
2420
2421 if p.trace {
2422 defer un(trace(p, "Statement"))
2423 }
2424
2425 switch p.tok {
2426 case token.CONST, token.TYPE, token.VAR:
2427 s = &ast.DeclStmt{Decl: p.parseDecl(stmtStart)}
2428 case
2429
2430 token.IDENT, token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING, token.FUNC, token.LPAREN,
2431 token.LBRACK, token.STRUCT, token.MAP, token.CHAN, token.INTERFACE,
2432 token.ADD, token.SUB, token.MUL, token.AND, token.XOR, token.ARROW, token.NOT:
2433 s, _ = p.parseSimpleStmt(labelOk)
2434
2435
2436
2437 if _, isLabeledStmt := s.(*ast.LabeledStmt); !isLabeledStmt {
2438 p.expectSemi()
2439 }
2440 case token.GO:
2441 s = p.parseGoStmt()
2442 case token.DEFER:
2443 s = p.parseDeferStmt()
2444 case token.RETURN:
2445 s = p.parseReturnStmt()
2446 case token.BREAK, token.CONTINUE, token.GOTO, token.FALLTHROUGH:
2447 s = p.parseBranchStmt(p.tok)
2448 case token.LBRACE:
2449 s = p.parseBlockStmt()
2450 p.expectSemi()
2451 case token.IF:
2452 s = p.parseIfStmt()
2453 case token.SWITCH:
2454 s = p.parseSwitchStmt()
2455 case token.SELECT:
2456 s = p.parseSelectStmt()
2457 case token.FOR:
2458 s = p.parseForStmt()
2459 case token.SEMICOLON:
2460
2461
2462
2463 s = &ast.EmptyStmt{Semicolon: p.pos, Implicit: p.lit == "\n"}
2464 p.next()
2465 case token.RBRACE:
2466
2467 s = &ast.EmptyStmt{Semicolon: p.pos, Implicit: true}
2468 default:
2469
2470 pos := p.pos
2471 p.errorExpected(pos, "statement")
2472 p.advance(stmtStart)
2473 s = &ast.BadStmt{From: pos, To: p.pos}
2474 }
2475
2476 return
2477 }
2478
2479
2480
2481
2482 type parseSpecFunction func(doc *ast.CommentGroup, keyword token.Token, iota int) ast.Spec
2483
2484 func (p *parser) parseImportSpec(doc *ast.CommentGroup, _ token.Token, _ int) ast.Spec {
2485 if p.trace {
2486 defer un(trace(p, "ImportSpec"))
2487 }
2488
2489 var ident *ast.Ident
2490 switch p.tok {
2491 case token.IDENT:
2492 ident = p.parseIdent()
2493 case token.PERIOD:
2494 ident = &ast.Ident{NamePos: p.pos, Name: "."}
2495 p.next()
2496 }
2497
2498 pos := p.pos
2499 var path string
2500 if p.tok == token.STRING {
2501 path = p.lit
2502 p.next()
2503 } else if p.tok.IsLiteral() {
2504 p.error(pos, "import path must be a string")
2505 p.next()
2506 } else {
2507 p.error(pos, "missing import path")
2508 p.advance(exprEnd)
2509 }
2510 comment := p.expectSemi()
2511
2512
2513 spec := &ast.ImportSpec{
2514 Doc: doc,
2515 Name: ident,
2516 Path: &ast.BasicLit{ValuePos: pos, Kind: token.STRING, Value: path},
2517 Comment: comment,
2518 }
2519 p.imports = append(p.imports, spec)
2520
2521 return spec
2522 }
2523
2524 func (p *parser) parseValueSpec(doc *ast.CommentGroup, keyword token.Token, iota int) ast.Spec {
2525 if p.trace {
2526 defer un(trace(p, keyword.String()+"Spec"))
2527 }
2528
2529 idents := p.parseIdentList()
2530 var typ ast.Expr
2531 var values []ast.Expr
2532 switch keyword {
2533 case token.CONST:
2534
2535 if p.tok != token.EOF && p.tok != token.SEMICOLON && p.tok != token.RPAREN {
2536 typ = p.tryIdentOrType()
2537 if p.tok == token.ASSIGN {
2538 p.next()
2539 values = p.parseList(true)
2540 }
2541 }
2542 case token.VAR:
2543 if p.tok != token.ASSIGN {
2544 typ = p.parseType()
2545 }
2546 if p.tok == token.ASSIGN {
2547 p.next()
2548 values = p.parseList(true)
2549 }
2550 default:
2551 panic("unreachable")
2552 }
2553 comment := p.expectSemi()
2554
2555 spec := &ast.ValueSpec{
2556 Doc: doc,
2557 Names: idents,
2558 Type: typ,
2559 Values: values,
2560 Comment: comment,
2561 }
2562 return spec
2563 }
2564
2565 func (p *parser) parseGenericType(spec *ast.TypeSpec, openPos token.Pos, name0 *ast.Ident, typ0 ast.Expr) {
2566 if p.trace {
2567 defer un(trace(p, "parseGenericType"))
2568 }
2569
2570 list := p.parseParameterList(name0, typ0, token.RBRACK)
2571 closePos := p.expect(token.RBRACK)
2572 spec.TypeParams = &ast.FieldList{Opening: openPos, List: list, Closing: closePos}
2573
2574
2575 if p.tok == token.ASSIGN {
2576
2577 spec.Assign = p.pos
2578 p.next()
2579 }
2580 spec.Type = p.parseType()
2581 }
2582
2583 func (p *parser) parseTypeSpec(doc *ast.CommentGroup, _ token.Token, _ int) ast.Spec {
2584 if p.trace {
2585 defer un(trace(p, "TypeSpec"))
2586 }
2587
2588 name := p.parseIdent()
2589 spec := &ast.TypeSpec{Doc: doc, Name: name}
2590
2591 if p.tok == token.LBRACK {
2592
2593
2594 lbrack := p.pos
2595 p.next()
2596 if p.tok == token.IDENT {
2597
2598
2599
2600
2601
2602
2603
2604
2605
2606
2607
2608
2609
2610
2611
2612 var x ast.Expr = p.parseIdent()
2613 if p.tok != token.LBRACK {
2614
2615
2616
2617 p.exprLev++
2618 lhs := p.parsePrimaryExpr(x)
2619 x = p.parseBinaryExpr(lhs, token.LowestPrec+1)
2620 p.exprLev--
2621 }
2622
2623
2624
2625
2626
2627
2628
2629 if pname, ptype := extractName(x, p.tok == token.COMMA); pname != nil && (ptype != nil || p.tok != token.RBRACK) {
2630
2631
2632
2633 p.parseGenericType(spec, lbrack, pname, ptype)
2634 } else {
2635
2636
2637 spec.Type = p.parseArrayType(lbrack, x)
2638 }
2639 } else {
2640
2641 spec.Type = p.parseArrayType(lbrack, nil)
2642 }
2643 } else {
2644
2645 if p.tok == token.ASSIGN {
2646
2647 spec.Assign = p.pos
2648 p.next()
2649 }
2650 spec.Type = p.parseType()
2651 }
2652
2653 spec.Comment = p.expectSemi()
2654
2655 return spec
2656 }
2657
2658
2659
2660
2661
2662
2663
2664
2665
2666
2667
2668
2669
2670
2671
2672
2673
2674
2675
2676 func extractName(x ast.Expr, force bool) (*ast.Ident, ast.Expr) {
2677 switch x := x.(type) {
2678 case *ast.Ident:
2679 return x, nil
2680 case *ast.BinaryExpr:
2681 switch x.Op {
2682 case token.MUL:
2683 if name, _ := x.X.(*ast.Ident); name != nil && (force || isTypeElem(x.Y)) {
2684
2685 return name, &ast.StarExpr{Star: x.OpPos, X: x.Y}
2686 }
2687 case token.OR:
2688 if name, lhs := extractName(x.X, force || isTypeElem(x.Y)); name != nil && lhs != nil {
2689
2690 op := *x
2691 op.X = lhs
2692 return name, &op
2693 }
2694 }
2695 case *ast.CallExpr:
2696 if name, _ := x.Fun.(*ast.Ident); name != nil {
2697 if len(x.Args) == 1 && x.Ellipsis == token.NoPos && (force || isTypeElem(x.Args[0])) {
2698
2699 return name, x.Args[0]
2700 }
2701 }
2702 }
2703 return nil, x
2704 }
2705
2706
2707
2708 func isTypeElem(x ast.Expr) bool {
2709 switch x := x.(type) {
2710 case *ast.ArrayType, *ast.StructType, *ast.FuncType, *ast.InterfaceType, *ast.MapType, *ast.ChanType:
2711 return true
2712 case *ast.BinaryExpr:
2713 return isTypeElem(x.X) || isTypeElem(x.Y)
2714 case *ast.UnaryExpr:
2715 return x.Op == token.TILDE
2716 case *ast.ParenExpr:
2717 return isTypeElem(x.X)
2718 }
2719 return false
2720 }
2721
2722 func (p *parser) parseGenDecl(keyword token.Token, f parseSpecFunction) *ast.GenDecl {
2723 if p.trace {
2724 defer un(trace(p, "GenDecl("+keyword.String()+")"))
2725 }
2726
2727 doc := p.leadComment
2728 pos := p.expect(keyword)
2729 var lparen, rparen token.Pos
2730 var list []ast.Spec
2731 if p.tok == token.LPAREN {
2732 lparen = p.pos
2733 p.next()
2734 for iota := 0; p.tok != token.RPAREN && p.tok != token.EOF; iota++ {
2735 list = append(list, f(p.leadComment, keyword, iota))
2736 }
2737 rparen = p.expect(token.RPAREN)
2738 p.expectSemi()
2739 } else {
2740 list = append(list, f(nil, keyword, 0))
2741 }
2742
2743 return &ast.GenDecl{
2744 Doc: doc,
2745 TokPos: pos,
2746 Tok: keyword,
2747 Lparen: lparen,
2748 Specs: list,
2749 Rparen: rparen,
2750 }
2751 }
2752
2753 func (p *parser) parseFuncDecl() *ast.FuncDecl {
2754 if p.trace {
2755 defer un(trace(p, "FunctionDecl"))
2756 }
2757
2758 doc := p.leadComment
2759 pos := p.expect(token.FUNC)
2760
2761 var recv *ast.FieldList
2762 if p.tok == token.LPAREN {
2763 _, recv = p.parseParameters(false)
2764 }
2765
2766 ident := p.parseIdent()
2767
2768 tparams, params := p.parseParameters(true)
2769 if recv != nil && tparams != nil {
2770
2771
2772 p.error(tparams.Opening, "method must have no type parameters")
2773 tparams = nil
2774 }
2775 results := p.parseResult()
2776
2777 var body *ast.BlockStmt
2778 switch p.tok {
2779 case token.LBRACE:
2780 body = p.parseBody()
2781 p.expectSemi()
2782 case token.SEMICOLON:
2783 p.next()
2784 if p.tok == token.LBRACE {
2785
2786 p.error(p.pos, "unexpected semicolon or newline before {")
2787 body = p.parseBody()
2788 p.expectSemi()
2789 }
2790 default:
2791 p.expectSemi()
2792 }
2793
2794 decl := &ast.FuncDecl{
2795 Doc: doc,
2796 Recv: recv,
2797 Name: ident,
2798 Type: &ast.FuncType{
2799 Func: pos,
2800 TypeParams: tparams,
2801 Params: params,
2802 Results: results,
2803 },
2804 Body: body,
2805 }
2806 return decl
2807 }
2808
2809 func (p *parser) parseDecl(sync map[token.Token]bool) ast.Decl {
2810 if p.trace {
2811 defer un(trace(p, "Declaration"))
2812 }
2813
2814 var f parseSpecFunction
2815 switch p.tok {
2816 case token.IMPORT:
2817 f = p.parseImportSpec
2818
2819 case token.CONST, token.VAR:
2820 f = p.parseValueSpec
2821
2822 case token.TYPE:
2823 f = p.parseTypeSpec
2824
2825 case token.FUNC:
2826 return p.parseFuncDecl()
2827
2828 default:
2829 pos := p.pos
2830 p.errorExpected(pos, "declaration")
2831 p.advance(sync)
2832 return &ast.BadDecl{From: pos, To: p.pos}
2833 }
2834
2835 return p.parseGenDecl(p.tok, f)
2836 }
2837
2838
2839
2840
2841 func (p *parser) parseFile() *ast.File {
2842 if p.trace {
2843 defer un(trace(p, "File"))
2844 }
2845
2846
2847
2848 if p.errors.Len() != 0 {
2849 return nil
2850 }
2851
2852
2853 doc := p.leadComment
2854 pos := p.expect(token.PACKAGE)
2855
2856
2857 ident := p.parseIdent()
2858 if ident.Name == "_" && p.mode&DeclarationErrors != 0 {
2859 p.error(p.pos, "invalid package name _")
2860 }
2861 p.expectSemi()
2862
2863
2864
2865 if p.errors.Len() != 0 {
2866 return nil
2867 }
2868
2869 var decls []ast.Decl
2870 if p.mode&PackageClauseOnly == 0 {
2871
2872 for p.tok == token.IMPORT {
2873 decls = append(decls, p.parseGenDecl(token.IMPORT, p.parseImportSpec))
2874 }
2875
2876 if p.mode&ImportsOnly == 0 {
2877
2878 prev := token.IMPORT
2879 for p.tok != token.EOF {
2880
2881 if p.tok == token.IMPORT && prev != token.IMPORT {
2882 p.error(p.pos, "imports must appear before other declarations")
2883 }
2884 prev = p.tok
2885
2886 decls = append(decls, p.parseDecl(declStart))
2887 }
2888 }
2889 }
2890
2891 f := &ast.File{
2892 Doc: doc,
2893 Package: pos,
2894 Name: ident,
2895 Decls: decls,
2896 FileStart: token.Pos(p.file.Base()),
2897 FileEnd: token.Pos(p.file.Base() + p.file.Size()),
2898 Imports: p.imports,
2899 Comments: p.comments,
2900 GoVersion: p.goVersion,
2901 }
2902 var declErr func(token.Pos, string)
2903 if p.mode&DeclarationErrors != 0 {
2904 declErr = p.error
2905 }
2906 if p.mode&SkipObjectResolution == 0 {
2907 resolveFile(f, p.file, declErr)
2908 }
2909
2910 return f
2911 }
2912
View as plain text