Source file
src/go/parser/parser.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16 package parser
17
18 import (
19 "fmt"
20 "go/ast"
21 "go/build/constraint"
22 "go/internal/typeparams"
23 "go/scanner"
24 "go/token"
25 "strings"
26 )
27
28
29 type parser struct {
30 file *token.File
31 errors scanner.ErrorList
32 scanner scanner.Scanner
33
34
35 mode Mode
36 trace bool
37 indent int
38
39
40 comments []*ast.CommentGroup
41 leadComment *ast.CommentGroup
42 lineComment *ast.CommentGroup
43 top bool
44 goVersion string
45
46
47 pos token.Pos
48 tok token.Token
49 lit string
50
51
52
53
54
55 syncPos token.Pos
56 syncCnt int
57
58
59 exprLev int
60 inRhs bool
61
62 imports []*ast.ImportSpec
63
64
65
66 nestLev int
67 }
68
69 func (p *parser) init(fset *token.FileSet, filename string, src []byte, mode Mode) {
70 p.file = fset.AddFile(filename, -1, len(src))
71 eh := func(pos token.Position, msg string) { p.errors.Add(pos, msg) }
72 p.scanner.Init(p.file, src, eh, scanner.ScanComments)
73
74 p.top = true
75 p.mode = mode
76 p.trace = mode&Trace != 0
77 p.next()
78 }
79
80
81
82
83 func (p *parser) printTrace(a ...any) {
84 const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . "
85 const n = len(dots)
86 pos := p.file.Position(p.pos)
87 fmt.Printf("%5d:%3d: ", pos.Line, pos.Column)
88 i := 2 * p.indent
89 for i > n {
90 fmt.Print(dots)
91 i -= n
92 }
93
94 fmt.Print(dots[0:i])
95 fmt.Println(a...)
96 }
97
98 func trace(p *parser, msg string) *parser {
99 p.printTrace(msg, "(")
100 p.indent++
101 return p
102 }
103
104
105 func un(p *parser) {
106 p.indent--
107 p.printTrace(")")
108 }
109
110
111 const maxNestLev int = 1e5
112
113 func incNestLev(p *parser) *parser {
114 p.nestLev++
115 if p.nestLev > maxNestLev {
116 p.error(p.pos, "exceeded max nesting depth")
117 panic(bailout{})
118 }
119 return p
120 }
121
122
123
124 func decNestLev(p *parser) {
125 p.nestLev--
126 }
127
128
129 func (p *parser) next0() {
130
131
132
133
134 if p.trace && p.pos.IsValid() {
135 s := p.tok.String()
136 switch {
137 case p.tok.IsLiteral():
138 p.printTrace(s, p.lit)
139 case p.tok.IsOperator(), p.tok.IsKeyword():
140 p.printTrace("\"" + s + "\"")
141 default:
142 p.printTrace(s)
143 }
144 }
145
146 for {
147 p.pos, p.tok, p.lit = p.scanner.Scan()
148 if p.tok == token.COMMENT {
149 if p.top && strings.HasPrefix(p.lit, "//go:build") {
150 if x, err := constraint.Parse(p.lit); err == nil {
151 p.goVersion = constraint.GoVersion(x)
152 }
153 }
154 if p.mode&ParseComments == 0 {
155 continue
156 }
157 } else {
158
159 p.top = false
160 }
161 break
162 }
163 }
164
165
166 func (p *parser) consumeComment() (comment *ast.Comment, endline int) {
167
168
169 endline = p.file.Line(p.pos)
170 if p.lit[1] == '*' {
171
172 for i := 0; i < len(p.lit); i++ {
173 if p.lit[i] == '\n' {
174 endline++
175 }
176 }
177 }
178
179 comment = &ast.Comment{Slash: p.pos, Text: p.lit}
180 p.next0()
181
182 return
183 }
184
185
186
187
188
189 func (p *parser) consumeCommentGroup(n int) (comments *ast.CommentGroup, endline int) {
190 var list []*ast.Comment
191 endline = p.file.Line(p.pos)
192 for p.tok == token.COMMENT && p.file.Line(p.pos) <= endline+n {
193 var comment *ast.Comment
194 comment, endline = p.consumeComment()
195 list = append(list, comment)
196 }
197
198
199 comments = &ast.CommentGroup{List: list}
200 p.comments = append(p.comments, comments)
201
202 return
203 }
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219 func (p *parser) next() {
220 p.leadComment = nil
221 p.lineComment = nil
222 prev := p.pos
223 p.next0()
224
225 if p.tok == token.COMMENT {
226 var comment *ast.CommentGroup
227 var endline int
228
229 if p.file.Line(p.pos) == p.file.Line(prev) {
230
231
232 comment, endline = p.consumeCommentGroup(0)
233 if p.file.Line(p.pos) != endline || p.tok == token.SEMICOLON || p.tok == token.EOF {
234
235
236 p.lineComment = comment
237 }
238 }
239
240
241 endline = -1
242 for p.tok == token.COMMENT {
243 comment, endline = p.consumeCommentGroup(1)
244 }
245
246 if endline+1 == p.file.Line(p.pos) {
247
248
249 p.leadComment = comment
250 }
251 }
252 }
253
254
255
256 type bailout struct {
257 pos token.Pos
258 msg string
259 }
260
261 func (p *parser) error(pos token.Pos, msg string) {
262 if p.trace {
263 defer un(trace(p, "error: "+msg))
264 }
265
266 epos := p.file.Position(pos)
267
268
269
270
271 if p.mode&AllErrors == 0 {
272 n := len(p.errors)
273 if n > 0 && p.errors[n-1].Pos.Line == epos.Line {
274 return
275 }
276 if n > 10 {
277 panic(bailout{})
278 }
279 }
280
281 p.errors.Add(epos, msg)
282 }
283
284 func (p *parser) errorExpected(pos token.Pos, msg string) {
285 msg = "expected " + msg
286 if pos == p.pos {
287
288
289 switch {
290 case p.tok == token.SEMICOLON && p.lit == "\n":
291 msg += ", found newline"
292 case p.tok.IsLiteral():
293
294 msg += ", found " + p.lit
295 default:
296 msg += ", found '" + p.tok.String() + "'"
297 }
298 }
299 p.error(pos, msg)
300 }
301
302 func (p *parser) expect(tok token.Token) token.Pos {
303 pos := p.pos
304 if p.tok != tok {
305 p.errorExpected(pos, "'"+tok.String()+"'")
306 }
307 p.next()
308 return pos
309 }
310
311
312
313 func (p *parser) expect2(tok token.Token) (pos token.Pos) {
314 if p.tok == tok {
315 pos = p.pos
316 } else {
317 p.errorExpected(p.pos, "'"+tok.String()+"'")
318 }
319 p.next()
320 return
321 }
322
323
324
325 func (p *parser) expectClosing(tok token.Token, context string) token.Pos {
326 if p.tok != tok && p.tok == token.SEMICOLON && p.lit == "\n" {
327 p.error(p.pos, "missing ',' before newline in "+context)
328 p.next()
329 }
330 return p.expect(tok)
331 }
332
333
334 func (p *parser) expectSemi() (comment *ast.CommentGroup) {
335
336 if p.tok != token.RPAREN && p.tok != token.RBRACE {
337 switch p.tok {
338 case token.COMMA:
339
340 p.errorExpected(p.pos, "';'")
341 fallthrough
342 case token.SEMICOLON:
343 if p.lit == ";" {
344
345 p.next()
346 comment = p.lineComment
347 } else {
348
349 comment = p.lineComment
350 p.next()
351 }
352 return comment
353 default:
354 p.errorExpected(p.pos, "';'")
355 p.advance(stmtStart)
356 }
357 }
358 return nil
359 }
360
361 func (p *parser) atComma(context string, follow token.Token) bool {
362 if p.tok == token.COMMA {
363 return true
364 }
365 if p.tok != follow {
366 msg := "missing ','"
367 if p.tok == token.SEMICOLON && p.lit == "\n" {
368 msg += " before newline"
369 }
370 p.error(p.pos, msg+" in "+context)
371 return true
372 }
373 return false
374 }
375
376 func assert(cond bool, msg string) {
377 if !cond {
378 panic("go/parser internal error: " + msg)
379 }
380 }
381
382
383
384 func (p *parser) advance(to map[token.Token]bool) {
385 for ; p.tok != token.EOF; p.next() {
386 if to[p.tok] {
387
388
389
390
391
392
393
394 if p.pos == p.syncPos && p.syncCnt < 10 {
395 p.syncCnt++
396 return
397 }
398 if p.pos > p.syncPos {
399 p.syncPos = p.pos
400 p.syncCnt = 0
401 return
402 }
403
404
405
406
407
408 }
409 }
410 }
411
412 var stmtStart = map[token.Token]bool{
413 token.BREAK: true,
414 token.CONST: true,
415 token.CONTINUE: true,
416 token.DEFER: true,
417 token.FALLTHROUGH: true,
418 token.FOR: true,
419 token.GO: true,
420 token.GOTO: true,
421 token.IF: true,
422 token.RETURN: true,
423 token.SELECT: true,
424 token.SWITCH: true,
425 token.TYPE: true,
426 token.VAR: true,
427 }
428
429 var declStart = map[token.Token]bool{
430 token.IMPORT: true,
431 token.CONST: true,
432 token.TYPE: true,
433 token.VAR: true,
434 }
435
436 var exprEnd = map[token.Token]bool{
437 token.COMMA: true,
438 token.COLON: true,
439 token.SEMICOLON: true,
440 token.RPAREN: true,
441 token.RBRACK: true,
442 token.RBRACE: true,
443 }
444
445
446
447
448
449
450
451
452
453
454 func (p *parser) safePos(pos token.Pos) (res token.Pos) {
455 defer func() {
456 if recover() != nil {
457 res = token.Pos(p.file.Base() + p.file.Size())
458 }
459 }()
460 _ = p.file.Offset(pos)
461 return pos
462 }
463
464
465
466
467 func (p *parser) parseIdent() *ast.Ident {
468 pos := p.pos
469 name := "_"
470 if p.tok == token.IDENT {
471 name = p.lit
472 p.next()
473 } else {
474 p.expect(token.IDENT)
475 }
476 return &ast.Ident{NamePos: pos, Name: name}
477 }
478
479 func (p *parser) parseIdentList() (list []*ast.Ident) {
480 if p.trace {
481 defer un(trace(p, "IdentList"))
482 }
483
484 list = append(list, p.parseIdent())
485 for p.tok == token.COMMA {
486 p.next()
487 list = append(list, p.parseIdent())
488 }
489
490 return
491 }
492
493
494
495
496
497 func (p *parser) parseExprList() (list []ast.Expr) {
498 if p.trace {
499 defer un(trace(p, "ExpressionList"))
500 }
501
502 list = append(list, p.parseExpr())
503 for p.tok == token.COMMA {
504 p.next()
505 list = append(list, p.parseExpr())
506 }
507
508 return
509 }
510
511 func (p *parser) parseList(inRhs bool) []ast.Expr {
512 old := p.inRhs
513 p.inRhs = inRhs
514 list := p.parseExprList()
515 p.inRhs = old
516 return list
517 }
518
519
520
521
522 func (p *parser) parseType() ast.Expr {
523 if p.trace {
524 defer un(trace(p, "Type"))
525 }
526
527 typ := p.tryIdentOrType()
528
529 if typ == nil {
530 pos := p.pos
531 p.errorExpected(pos, "type")
532 p.advance(exprEnd)
533 return &ast.BadExpr{From: pos, To: p.pos}
534 }
535
536 return typ
537 }
538
539 func (p *parser) parseQualifiedIdent(ident *ast.Ident) ast.Expr {
540 if p.trace {
541 defer un(trace(p, "QualifiedIdent"))
542 }
543
544 typ := p.parseTypeName(ident)
545 if p.tok == token.LBRACK {
546 typ = p.parseTypeInstance(typ)
547 }
548
549 return typ
550 }
551
552
553 func (p *parser) parseTypeName(ident *ast.Ident) ast.Expr {
554 if p.trace {
555 defer un(trace(p, "TypeName"))
556 }
557
558 if ident == nil {
559 ident = p.parseIdent()
560 }
561
562 if p.tok == token.PERIOD {
563
564 p.next()
565 sel := p.parseIdent()
566 return &ast.SelectorExpr{X: ident, Sel: sel}
567 }
568
569 return ident
570 }
571
572
573
574 func (p *parser) parseArrayType(lbrack token.Pos, len ast.Expr) *ast.ArrayType {
575 if p.trace {
576 defer un(trace(p, "ArrayType"))
577 }
578
579 if len == nil {
580 p.exprLev++
581
582 if p.tok == token.ELLIPSIS {
583 len = &ast.Ellipsis{Ellipsis: p.pos}
584 p.next()
585 } else if p.tok != token.RBRACK {
586 len = p.parseRhs()
587 }
588 p.exprLev--
589 }
590 if p.tok == token.COMMA {
591
592
593
594 p.error(p.pos, "unexpected comma; expecting ]")
595 p.next()
596 }
597 p.expect(token.RBRACK)
598 elt := p.parseType()
599 return &ast.ArrayType{Lbrack: lbrack, Len: len, Elt: elt}
600 }
601
602 func (p *parser) parseArrayFieldOrTypeInstance(x *ast.Ident) (*ast.Ident, ast.Expr) {
603 if p.trace {
604 defer un(trace(p, "ArrayFieldOrTypeInstance"))
605 }
606
607 lbrack := p.expect(token.LBRACK)
608 trailingComma := token.NoPos
609 var args []ast.Expr
610 if p.tok != token.RBRACK {
611 p.exprLev++
612 args = append(args, p.parseRhs())
613 for p.tok == token.COMMA {
614 comma := p.pos
615 p.next()
616 if p.tok == token.RBRACK {
617 trailingComma = comma
618 break
619 }
620 args = append(args, p.parseRhs())
621 }
622 p.exprLev--
623 }
624 rbrack := p.expect(token.RBRACK)
625
626 if len(args) == 0 {
627
628 elt := p.parseType()
629 return x, &ast.ArrayType{Lbrack: lbrack, Elt: elt}
630 }
631
632
633 if len(args) == 1 {
634 elt := p.tryIdentOrType()
635 if elt != nil {
636
637 if trailingComma.IsValid() {
638
639 p.error(trailingComma, "unexpected comma; expecting ]")
640 }
641 return x, &ast.ArrayType{Lbrack: lbrack, Len: args[0], Elt: elt}
642 }
643 }
644
645
646 return nil, typeparams.PackIndexExpr(x, lbrack, args, rbrack)
647 }
648
649 func (p *parser) parseFieldDecl() *ast.Field {
650 if p.trace {
651 defer un(trace(p, "FieldDecl"))
652 }
653
654 doc := p.leadComment
655
656 var names []*ast.Ident
657 var typ ast.Expr
658 switch p.tok {
659 case token.IDENT:
660 name := p.parseIdent()
661 if p.tok == token.PERIOD || p.tok == token.STRING || p.tok == token.SEMICOLON || p.tok == token.RBRACE {
662
663 typ = name
664 if p.tok == token.PERIOD {
665 typ = p.parseQualifiedIdent(name)
666 }
667 } else {
668
669 names = []*ast.Ident{name}
670 for p.tok == token.COMMA {
671 p.next()
672 names = append(names, p.parseIdent())
673 }
674
675
676 if len(names) == 1 && p.tok == token.LBRACK {
677 name, typ = p.parseArrayFieldOrTypeInstance(name)
678 if name == nil {
679 names = nil
680 }
681 } else {
682
683 typ = p.parseType()
684 }
685 }
686 case token.MUL:
687 star := p.pos
688 p.next()
689 if p.tok == token.LPAREN {
690
691 p.error(p.pos, "cannot parenthesize embedded type")
692 p.next()
693 typ = p.parseQualifiedIdent(nil)
694
695 if p.tok == token.RPAREN {
696 p.next()
697 }
698 } else {
699
700 typ = p.parseQualifiedIdent(nil)
701 }
702 typ = &ast.StarExpr{Star: star, X: typ}
703
704 case token.LPAREN:
705 p.error(p.pos, "cannot parenthesize embedded type")
706 p.next()
707 if p.tok == token.MUL {
708
709 star := p.pos
710 p.next()
711 typ = &ast.StarExpr{Star: star, X: p.parseQualifiedIdent(nil)}
712 } else {
713
714 typ = p.parseQualifiedIdent(nil)
715 }
716
717 if p.tok == token.RPAREN {
718 p.next()
719 }
720
721 default:
722 pos := p.pos
723 p.errorExpected(pos, "field name or embedded type")
724 p.advance(exprEnd)
725 typ = &ast.BadExpr{From: pos, To: p.pos}
726 }
727
728 var tag *ast.BasicLit
729 if p.tok == token.STRING {
730 tag = &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit}
731 p.next()
732 }
733
734 comment := p.expectSemi()
735
736 field := &ast.Field{Doc: doc, Names: names, Type: typ, Tag: tag, Comment: comment}
737 return field
738 }
739
740 func (p *parser) parseStructType() *ast.StructType {
741 if p.trace {
742 defer un(trace(p, "StructType"))
743 }
744
745 pos := p.expect(token.STRUCT)
746 lbrace := p.expect(token.LBRACE)
747 var list []*ast.Field
748 for p.tok == token.IDENT || p.tok == token.MUL || p.tok == token.LPAREN {
749
750
751
752 list = append(list, p.parseFieldDecl())
753 }
754 rbrace := p.expect(token.RBRACE)
755
756 return &ast.StructType{
757 Struct: pos,
758 Fields: &ast.FieldList{
759 Opening: lbrace,
760 List: list,
761 Closing: rbrace,
762 },
763 }
764 }
765
766 func (p *parser) parsePointerType() *ast.StarExpr {
767 if p.trace {
768 defer un(trace(p, "PointerType"))
769 }
770
771 star := p.expect(token.MUL)
772 base := p.parseType()
773
774 return &ast.StarExpr{Star: star, X: base}
775 }
776
777 func (p *parser) parseDotsType() *ast.Ellipsis {
778 if p.trace {
779 defer un(trace(p, "DotsType"))
780 }
781
782 pos := p.expect(token.ELLIPSIS)
783 elt := p.parseType()
784
785 return &ast.Ellipsis{Ellipsis: pos, Elt: elt}
786 }
787
788 type field struct {
789 name *ast.Ident
790 typ ast.Expr
791 }
792
793 func (p *parser) parseParamDecl(name *ast.Ident, typeSetsOK bool) (f field) {
794
795
796 if p.trace {
797 defer un(trace(p, "ParamDeclOrNil"))
798 }
799
800 ptok := p.tok
801 if name != nil {
802 p.tok = token.IDENT
803 } else if typeSetsOK && p.tok == token.TILDE {
804
805 return field{nil, p.embeddedElem(nil)}
806 }
807
808 switch p.tok {
809 case token.IDENT:
810
811 if name != nil {
812 f.name = name
813 p.tok = ptok
814 } else {
815 f.name = p.parseIdent()
816 }
817 switch p.tok {
818 case token.IDENT, token.MUL, token.ARROW, token.FUNC, token.CHAN, token.MAP, token.STRUCT, token.INTERFACE, token.LPAREN:
819
820 f.typ = p.parseType()
821
822 case token.LBRACK:
823
824 f.name, f.typ = p.parseArrayFieldOrTypeInstance(f.name)
825
826 case token.ELLIPSIS:
827
828 f.typ = p.parseDotsType()
829 return
830
831 case token.PERIOD:
832
833 f.typ = p.parseQualifiedIdent(f.name)
834 f.name = nil
835
836 case token.TILDE:
837 if typeSetsOK {
838 f.typ = p.embeddedElem(nil)
839 return
840 }
841
842 case token.OR:
843 if typeSetsOK {
844
845 f.typ = p.embeddedElem(f.name)
846 f.name = nil
847 return
848 }
849 }
850
851 case token.MUL, token.ARROW, token.FUNC, token.LBRACK, token.CHAN, token.MAP, token.STRUCT, token.INTERFACE, token.LPAREN:
852
853 f.typ = p.parseType()
854
855 case token.ELLIPSIS:
856
857
858 f.typ = p.parseDotsType()
859 return
860
861 default:
862
863
864 p.errorExpected(p.pos, "')'")
865 p.advance(exprEnd)
866 }
867
868
869 if typeSetsOK && p.tok == token.OR && f.typ != nil {
870 f.typ = p.embeddedElem(f.typ)
871 }
872
873 return
874 }
875
876 func (p *parser) parseParameterList(name0 *ast.Ident, typ0 ast.Expr, closing token.Token) (params []*ast.Field) {
877 if p.trace {
878 defer un(trace(p, "ParameterList"))
879 }
880
881
882 tparams := closing == token.RBRACK
883
884 typeSetsOK := tparams
885
886 pos := p.pos
887 if name0 != nil {
888 pos = name0.Pos()
889 }
890
891 var list []field
892 var named int
893
894 for name0 != nil || p.tok != closing && p.tok != token.EOF {
895 var par field
896 if typ0 != nil {
897 if typeSetsOK {
898 typ0 = p.embeddedElem(typ0)
899 }
900 par = field{name0, typ0}
901 } else {
902 par = p.parseParamDecl(name0, typeSetsOK)
903 }
904 name0 = nil
905 typ0 = nil
906 if par.name != nil || par.typ != nil {
907 list = append(list, par)
908 if par.name != nil && par.typ != nil {
909 named++
910 }
911 }
912 if !p.atComma("parameter list", closing) {
913 break
914 }
915 p.next()
916 }
917
918 if len(list) == 0 {
919 return
920 }
921
922
923
924
925
926 if named == 0 {
927
928 for i := 0; i < len(list); i++ {
929 par := &list[i]
930 if typ := par.name; typ != nil {
931 par.typ = typ
932 par.name = nil
933 }
934 }
935 if tparams {
936 p.error(pos, "type parameters must be named")
937 }
938 } else if named != len(list) {
939
940 ok := true
941 var typ ast.Expr
942 missingName := pos
943 for i := len(list) - 1; i >= 0; i-- {
944 if par := &list[i]; par.typ != nil {
945 typ = par.typ
946 if par.name == nil {
947 ok = false
948 missingName = par.typ.Pos()
949 n := ast.NewIdent("_")
950 n.NamePos = typ.Pos()
951 par.name = n
952 }
953 } else if typ != nil {
954 par.typ = typ
955 } else {
956
957 ok = false
958 missingName = par.name.Pos()
959 par.typ = &ast.BadExpr{From: par.name.Pos(), To: p.pos}
960 }
961 }
962 if !ok {
963 if tparams {
964 p.error(missingName, "type parameters must be named")
965 } else {
966 p.error(pos, "mixed named and unnamed parameters")
967 }
968 }
969 }
970
971
972 if named == 0 {
973
974 for _, par := range list {
975 assert(par.typ != nil, "nil type in unnamed parameter list")
976 params = append(params, &ast.Field{Type: par.typ})
977 }
978 return
979 }
980
981
982 var names []*ast.Ident
983 var typ ast.Expr
984 addParams := func() {
985 assert(typ != nil, "nil type in named parameter list")
986 field := &ast.Field{Names: names, Type: typ}
987 params = append(params, field)
988 names = nil
989 }
990 for _, par := range list {
991 if par.typ != typ {
992 if len(names) > 0 {
993 addParams()
994 }
995 typ = par.typ
996 }
997 names = append(names, par.name)
998 }
999 if len(names) > 0 {
1000 addParams()
1001 }
1002 return
1003 }
1004
1005 func (p *parser) parseParameters(acceptTParams bool) (tparams, params *ast.FieldList) {
1006 if p.trace {
1007 defer un(trace(p, "Parameters"))
1008 }
1009
1010 if acceptTParams && p.tok == token.LBRACK {
1011 opening := p.pos
1012 p.next()
1013
1014 list := p.parseParameterList(nil, nil, token.RBRACK)
1015 rbrack := p.expect(token.RBRACK)
1016 tparams = &ast.FieldList{Opening: opening, List: list, Closing: rbrack}
1017
1018 if tparams.NumFields() == 0 {
1019 p.error(tparams.Closing, "empty type parameter list")
1020 tparams = nil
1021 }
1022 }
1023
1024 opening := p.expect(token.LPAREN)
1025
1026 var fields []*ast.Field
1027 if p.tok != token.RPAREN {
1028 fields = p.parseParameterList(nil, nil, token.RPAREN)
1029 }
1030
1031 rparen := p.expect(token.RPAREN)
1032 params = &ast.FieldList{Opening: opening, List: fields, Closing: rparen}
1033
1034 return
1035 }
1036
1037 func (p *parser) parseResult() *ast.FieldList {
1038 if p.trace {
1039 defer un(trace(p, "Result"))
1040 }
1041
1042 if p.tok == token.LPAREN {
1043 _, results := p.parseParameters(false)
1044 return results
1045 }
1046
1047 typ := p.tryIdentOrType()
1048 if typ != nil {
1049 list := make([]*ast.Field, 1)
1050 list[0] = &ast.Field{Type: typ}
1051 return &ast.FieldList{List: list}
1052 }
1053
1054 return nil
1055 }
1056
1057 func (p *parser) parseFuncType() *ast.FuncType {
1058 if p.trace {
1059 defer un(trace(p, "FuncType"))
1060 }
1061
1062 pos := p.expect(token.FUNC)
1063 tparams, params := p.parseParameters(true)
1064 if tparams != nil {
1065 p.error(tparams.Pos(), "function type must have no type parameters")
1066 }
1067 results := p.parseResult()
1068
1069 return &ast.FuncType{Func: pos, Params: params, Results: results}
1070 }
1071
1072 func (p *parser) parseMethodSpec() *ast.Field {
1073 if p.trace {
1074 defer un(trace(p, "MethodSpec"))
1075 }
1076
1077 doc := p.leadComment
1078 var idents []*ast.Ident
1079 var typ ast.Expr
1080 x := p.parseTypeName(nil)
1081 if ident, _ := x.(*ast.Ident); ident != nil {
1082 switch {
1083 case p.tok == token.LBRACK:
1084
1085 lbrack := p.pos
1086 p.next()
1087 p.exprLev++
1088 x := p.parseExpr()
1089 p.exprLev--
1090 if name0, _ := x.(*ast.Ident); name0 != nil && p.tok != token.COMMA && p.tok != token.RBRACK {
1091
1092
1093
1094
1095 _ = p.parseParameterList(name0, nil, token.RBRACK)
1096 _ = p.expect(token.RBRACK)
1097 p.error(lbrack, "interface method must have no type parameters")
1098
1099
1100 _, params := p.parseParameters(false)
1101 results := p.parseResult()
1102 idents = []*ast.Ident{ident}
1103 typ = &ast.FuncType{
1104 Func: token.NoPos,
1105 Params: params,
1106 Results: results,
1107 }
1108 } else {
1109
1110
1111 list := []ast.Expr{x}
1112 if p.atComma("type argument list", token.RBRACK) {
1113 p.exprLev++
1114 p.next()
1115 for p.tok != token.RBRACK && p.tok != token.EOF {
1116 list = append(list, p.parseType())
1117 if !p.atComma("type argument list", token.RBRACK) {
1118 break
1119 }
1120 p.next()
1121 }
1122 p.exprLev--
1123 }
1124 rbrack := p.expectClosing(token.RBRACK, "type argument list")
1125 typ = typeparams.PackIndexExpr(ident, lbrack, list, rbrack)
1126 }
1127 case p.tok == token.LPAREN:
1128
1129
1130 _, params := p.parseParameters(false)
1131 results := p.parseResult()
1132 idents = []*ast.Ident{ident}
1133 typ = &ast.FuncType{Func: token.NoPos, Params: params, Results: results}
1134 default:
1135
1136 typ = x
1137 }
1138 } else {
1139
1140 typ = x
1141 if p.tok == token.LBRACK {
1142
1143 typ = p.parseTypeInstance(typ)
1144 }
1145 }
1146
1147
1148
1149
1150
1151 return &ast.Field{Doc: doc, Names: idents, Type: typ}
1152 }
1153
1154 func (p *parser) embeddedElem(x ast.Expr) ast.Expr {
1155 if p.trace {
1156 defer un(trace(p, "EmbeddedElem"))
1157 }
1158 if x == nil {
1159 x = p.embeddedTerm()
1160 }
1161 for p.tok == token.OR {
1162 t := new(ast.BinaryExpr)
1163 t.OpPos = p.pos
1164 t.Op = token.OR
1165 p.next()
1166 t.X = x
1167 t.Y = p.embeddedTerm()
1168 x = t
1169 }
1170 return x
1171 }
1172
1173 func (p *parser) embeddedTerm() ast.Expr {
1174 if p.trace {
1175 defer un(trace(p, "EmbeddedTerm"))
1176 }
1177 if p.tok == token.TILDE {
1178 t := new(ast.UnaryExpr)
1179 t.OpPos = p.pos
1180 t.Op = token.TILDE
1181 p.next()
1182 t.X = p.parseType()
1183 return t
1184 }
1185
1186 t := p.tryIdentOrType()
1187 if t == nil {
1188 pos := p.pos
1189 p.errorExpected(pos, "~ term or type")
1190 p.advance(exprEnd)
1191 return &ast.BadExpr{From: pos, To: p.pos}
1192 }
1193
1194 return t
1195 }
1196
1197 func (p *parser) parseInterfaceType() *ast.InterfaceType {
1198 if p.trace {
1199 defer un(trace(p, "InterfaceType"))
1200 }
1201
1202 pos := p.expect(token.INTERFACE)
1203 lbrace := p.expect(token.LBRACE)
1204
1205 var list []*ast.Field
1206
1207 parseElements:
1208 for {
1209 switch {
1210 case p.tok == token.IDENT:
1211 f := p.parseMethodSpec()
1212 if f.Names == nil {
1213 f.Type = p.embeddedElem(f.Type)
1214 }
1215 f.Comment = p.expectSemi()
1216 list = append(list, f)
1217 case p.tok == token.TILDE:
1218 typ := p.embeddedElem(nil)
1219 comment := p.expectSemi()
1220 list = append(list, &ast.Field{Type: typ, Comment: comment})
1221 default:
1222 if t := p.tryIdentOrType(); t != nil {
1223 typ := p.embeddedElem(t)
1224 comment := p.expectSemi()
1225 list = append(list, &ast.Field{Type: typ, Comment: comment})
1226 } else {
1227 break parseElements
1228 }
1229 }
1230 }
1231
1232
1233
1234 rbrace := p.expect(token.RBRACE)
1235
1236 return &ast.InterfaceType{
1237 Interface: pos,
1238 Methods: &ast.FieldList{
1239 Opening: lbrace,
1240 List: list,
1241 Closing: rbrace,
1242 },
1243 }
1244 }
1245
1246 func (p *parser) parseMapType() *ast.MapType {
1247 if p.trace {
1248 defer un(trace(p, "MapType"))
1249 }
1250
1251 pos := p.expect(token.MAP)
1252 p.expect(token.LBRACK)
1253 key := p.parseType()
1254 p.expect(token.RBRACK)
1255 value := p.parseType()
1256
1257 return &ast.MapType{Map: pos, Key: key, Value: value}
1258 }
1259
1260 func (p *parser) parseChanType() *ast.ChanType {
1261 if p.trace {
1262 defer un(trace(p, "ChanType"))
1263 }
1264
1265 pos := p.pos
1266 dir := ast.SEND | ast.RECV
1267 var arrow token.Pos
1268 if p.tok == token.CHAN {
1269 p.next()
1270 if p.tok == token.ARROW {
1271 arrow = p.pos
1272 p.next()
1273 dir = ast.SEND
1274 }
1275 } else {
1276 arrow = p.expect(token.ARROW)
1277 p.expect(token.CHAN)
1278 dir = ast.RECV
1279 }
1280 value := p.parseType()
1281
1282 return &ast.ChanType{Begin: pos, Arrow: arrow, Dir: dir, Value: value}
1283 }
1284
1285 func (p *parser) parseTypeInstance(typ ast.Expr) ast.Expr {
1286 if p.trace {
1287 defer un(trace(p, "TypeInstance"))
1288 }
1289
1290 opening := p.expect(token.LBRACK)
1291 p.exprLev++
1292 var list []ast.Expr
1293 for p.tok != token.RBRACK && p.tok != token.EOF {
1294 list = append(list, p.parseType())
1295 if !p.atComma("type argument list", token.RBRACK) {
1296 break
1297 }
1298 p.next()
1299 }
1300 p.exprLev--
1301
1302 closing := p.expectClosing(token.RBRACK, "type argument list")
1303
1304 if len(list) == 0 {
1305 p.errorExpected(closing, "type argument list")
1306 return &ast.IndexExpr{
1307 X: typ,
1308 Lbrack: opening,
1309 Index: &ast.BadExpr{From: opening + 1, To: closing},
1310 Rbrack: closing,
1311 }
1312 }
1313
1314 return typeparams.PackIndexExpr(typ, opening, list, closing)
1315 }
1316
1317 func (p *parser) tryIdentOrType() ast.Expr {
1318 defer decNestLev(incNestLev(p))
1319
1320 switch p.tok {
1321 case token.IDENT:
1322 typ := p.parseTypeName(nil)
1323 if p.tok == token.LBRACK {
1324 typ = p.parseTypeInstance(typ)
1325 }
1326 return typ
1327 case token.LBRACK:
1328 lbrack := p.expect(token.LBRACK)
1329 return p.parseArrayType(lbrack, nil)
1330 case token.STRUCT:
1331 return p.parseStructType()
1332 case token.MUL:
1333 return p.parsePointerType()
1334 case token.FUNC:
1335 return p.parseFuncType()
1336 case token.INTERFACE:
1337 return p.parseInterfaceType()
1338 case token.MAP:
1339 return p.parseMapType()
1340 case token.CHAN, token.ARROW:
1341 return p.parseChanType()
1342 case token.LPAREN:
1343 lparen := p.pos
1344 p.next()
1345 typ := p.parseType()
1346 rparen := p.expect(token.RPAREN)
1347 return &ast.ParenExpr{Lparen: lparen, X: typ, Rparen: rparen}
1348 }
1349
1350
1351 return nil
1352 }
1353
1354
1355
1356
1357 func (p *parser) parseStmtList() (list []ast.Stmt) {
1358 if p.trace {
1359 defer un(trace(p, "StatementList"))
1360 }
1361
1362 for p.tok != token.CASE && p.tok != token.DEFAULT && p.tok != token.RBRACE && p.tok != token.EOF {
1363 list = append(list, p.parseStmt())
1364 }
1365
1366 return
1367 }
1368
1369 func (p *parser) parseBody() *ast.BlockStmt {
1370 if p.trace {
1371 defer un(trace(p, "Body"))
1372 }
1373
1374 lbrace := p.expect(token.LBRACE)
1375 list := p.parseStmtList()
1376 rbrace := p.expect2(token.RBRACE)
1377
1378 return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
1379 }
1380
1381 func (p *parser) parseBlockStmt() *ast.BlockStmt {
1382 if p.trace {
1383 defer un(trace(p, "BlockStmt"))
1384 }
1385
1386 lbrace := p.expect(token.LBRACE)
1387 list := p.parseStmtList()
1388 rbrace := p.expect2(token.RBRACE)
1389
1390 return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
1391 }
1392
1393
1394
1395
1396 func (p *parser) parseFuncTypeOrLit() ast.Expr {
1397 if p.trace {
1398 defer un(trace(p, "FuncTypeOrLit"))
1399 }
1400
1401 typ := p.parseFuncType()
1402 if p.tok != token.LBRACE {
1403
1404 return typ
1405 }
1406
1407 p.exprLev++
1408 body := p.parseBody()
1409 p.exprLev--
1410
1411 return &ast.FuncLit{Type: typ, Body: body}
1412 }
1413
1414
1415
1416 func (p *parser) parseOperand() ast.Expr {
1417 if p.trace {
1418 defer un(trace(p, "Operand"))
1419 }
1420
1421 switch p.tok {
1422 case token.IDENT:
1423 x := p.parseIdent()
1424 return x
1425
1426 case token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING:
1427 x := &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit}
1428 p.next()
1429 return x
1430
1431 case token.LPAREN:
1432 lparen := p.pos
1433 p.next()
1434 p.exprLev++
1435 x := p.parseRhs()
1436 p.exprLev--
1437 rparen := p.expect(token.RPAREN)
1438 return &ast.ParenExpr{Lparen: lparen, X: x, Rparen: rparen}
1439
1440 case token.FUNC:
1441 return p.parseFuncTypeOrLit()
1442 }
1443
1444 if typ := p.tryIdentOrType(); typ != nil {
1445
1446 _, isIdent := typ.(*ast.Ident)
1447 assert(!isIdent, "type cannot be identifier")
1448 return typ
1449 }
1450
1451
1452 pos := p.pos
1453 p.errorExpected(pos, "operand")
1454 p.advance(stmtStart)
1455 return &ast.BadExpr{From: pos, To: p.pos}
1456 }
1457
1458 func (p *parser) parseSelector(x ast.Expr) ast.Expr {
1459 if p.trace {
1460 defer un(trace(p, "Selector"))
1461 }
1462
1463 sel := p.parseIdent()
1464
1465 return &ast.SelectorExpr{X: x, Sel: sel}
1466 }
1467
1468 func (p *parser) parseTypeAssertion(x ast.Expr) ast.Expr {
1469 if p.trace {
1470 defer un(trace(p, "TypeAssertion"))
1471 }
1472
1473 lparen := p.expect(token.LPAREN)
1474 var typ ast.Expr
1475 if p.tok == token.TYPE {
1476
1477 p.next()
1478 } else {
1479 typ = p.parseType()
1480 }
1481 rparen := p.expect(token.RPAREN)
1482
1483 return &ast.TypeAssertExpr{X: x, Type: typ, Lparen: lparen, Rparen: rparen}
1484 }
1485
1486 func (p *parser) parseIndexOrSliceOrInstance(x ast.Expr) ast.Expr {
1487 if p.trace {
1488 defer un(trace(p, "parseIndexOrSliceOrInstance"))
1489 }
1490
1491 lbrack := p.expect(token.LBRACK)
1492 if p.tok == token.RBRACK {
1493
1494
1495 p.errorExpected(p.pos, "operand")
1496 rbrack := p.pos
1497 p.next()
1498 return &ast.IndexExpr{
1499 X: x,
1500 Lbrack: lbrack,
1501 Index: &ast.BadExpr{From: rbrack, To: rbrack},
1502 Rbrack: rbrack,
1503 }
1504 }
1505 p.exprLev++
1506
1507 const N = 3
1508 var args []ast.Expr
1509 var index [N]ast.Expr
1510 var colons [N - 1]token.Pos
1511 if p.tok != token.COLON {
1512
1513
1514 index[0] = p.parseRhs()
1515 }
1516 ncolons := 0
1517 switch p.tok {
1518 case token.COLON:
1519
1520 for p.tok == token.COLON && ncolons < len(colons) {
1521 colons[ncolons] = p.pos
1522 ncolons++
1523 p.next()
1524 if p.tok != token.COLON && p.tok != token.RBRACK && p.tok != token.EOF {
1525 index[ncolons] = p.parseRhs()
1526 }
1527 }
1528 case token.COMMA:
1529
1530 args = append(args, index[0])
1531 for p.tok == token.COMMA {
1532 p.next()
1533 if p.tok != token.RBRACK && p.tok != token.EOF {
1534 args = append(args, p.parseType())
1535 }
1536 }
1537 }
1538
1539 p.exprLev--
1540 rbrack := p.expect(token.RBRACK)
1541
1542 if ncolons > 0 {
1543
1544 slice3 := false
1545 if ncolons == 2 {
1546 slice3 = true
1547
1548
1549 if index[1] == nil {
1550 p.error(colons[0], "middle index required in 3-index slice")
1551 index[1] = &ast.BadExpr{From: colons[0] + 1, To: colons[1]}
1552 }
1553 if index[2] == nil {
1554 p.error(colons[1], "final index required in 3-index slice")
1555 index[2] = &ast.BadExpr{From: colons[1] + 1, To: rbrack}
1556 }
1557 }
1558 return &ast.SliceExpr{X: x, Lbrack: lbrack, Low: index[0], High: index[1], Max: index[2], Slice3: slice3, Rbrack: rbrack}
1559 }
1560
1561 if len(args) == 0 {
1562
1563 return &ast.IndexExpr{X: x, Lbrack: lbrack, Index: index[0], Rbrack: rbrack}
1564 }
1565
1566
1567 return typeparams.PackIndexExpr(x, lbrack, args, rbrack)
1568 }
1569
1570 func (p *parser) parseCallOrConversion(fun ast.Expr) *ast.CallExpr {
1571 if p.trace {
1572 defer un(trace(p, "CallOrConversion"))
1573 }
1574
1575 lparen := p.expect(token.LPAREN)
1576 p.exprLev++
1577 var list []ast.Expr
1578 var ellipsis token.Pos
1579 for p.tok != token.RPAREN && p.tok != token.EOF && !ellipsis.IsValid() {
1580 list = append(list, p.parseRhs())
1581 if p.tok == token.ELLIPSIS {
1582 ellipsis = p.pos
1583 p.next()
1584 }
1585 if !p.atComma("argument list", token.RPAREN) {
1586 break
1587 }
1588 p.next()
1589 }
1590 p.exprLev--
1591 rparen := p.expectClosing(token.RPAREN, "argument list")
1592
1593 return &ast.CallExpr{Fun: fun, Lparen: lparen, Args: list, Ellipsis: ellipsis, Rparen: rparen}
1594 }
1595
1596 func (p *parser) parseValue() ast.Expr {
1597 if p.trace {
1598 defer un(trace(p, "Element"))
1599 }
1600
1601 if p.tok == token.LBRACE {
1602 return p.parseLiteralValue(nil)
1603 }
1604
1605 x := p.parseExpr()
1606
1607 return x
1608 }
1609
1610 func (p *parser) parseElement() ast.Expr {
1611 if p.trace {
1612 defer un(trace(p, "Element"))
1613 }
1614
1615 x := p.parseValue()
1616 if p.tok == token.COLON {
1617 colon := p.pos
1618 p.next()
1619 x = &ast.KeyValueExpr{Key: x, Colon: colon, Value: p.parseValue()}
1620 }
1621
1622 return x
1623 }
1624
1625 func (p *parser) parseElementList() (list []ast.Expr) {
1626 if p.trace {
1627 defer un(trace(p, "ElementList"))
1628 }
1629
1630 for p.tok != token.RBRACE && p.tok != token.EOF {
1631 list = append(list, p.parseElement())
1632 if !p.atComma("composite literal", token.RBRACE) {
1633 break
1634 }
1635 p.next()
1636 }
1637
1638 return
1639 }
1640
1641 func (p *parser) parseLiteralValue(typ ast.Expr) ast.Expr {
1642 if p.trace {
1643 defer un(trace(p, "LiteralValue"))
1644 }
1645
1646 lbrace := p.expect(token.LBRACE)
1647 var elts []ast.Expr
1648 p.exprLev++
1649 if p.tok != token.RBRACE {
1650 elts = p.parseElementList()
1651 }
1652 p.exprLev--
1653 rbrace := p.expectClosing(token.RBRACE, "composite literal")
1654 return &ast.CompositeLit{Type: typ, Lbrace: lbrace, Elts: elts, Rbrace: rbrace}
1655 }
1656
1657
1658 func unparen(x ast.Expr) ast.Expr {
1659 if p, isParen := x.(*ast.ParenExpr); isParen {
1660 x = unparen(p.X)
1661 }
1662 return x
1663 }
1664
1665 func (p *parser) parsePrimaryExpr(x ast.Expr) ast.Expr {
1666 if p.trace {
1667 defer un(trace(p, "PrimaryExpr"))
1668 }
1669
1670 if x == nil {
1671 x = p.parseOperand()
1672 }
1673
1674
1675
1676 var n int
1677 defer func() { p.nestLev -= n }()
1678 for n = 1; ; n++ {
1679 incNestLev(p)
1680 switch p.tok {
1681 case token.PERIOD:
1682 p.next()
1683 switch p.tok {
1684 case token.IDENT:
1685 x = p.parseSelector(x)
1686 case token.LPAREN:
1687 x = p.parseTypeAssertion(x)
1688 default:
1689 pos := p.pos
1690 p.errorExpected(pos, "selector or type assertion")
1691
1692
1693
1694
1695
1696 if p.tok != token.RBRACE {
1697 p.next()
1698 }
1699 sel := &ast.Ident{NamePos: pos, Name: "_"}
1700 x = &ast.SelectorExpr{X: x, Sel: sel}
1701 }
1702 case token.LBRACK:
1703 x = p.parseIndexOrSliceOrInstance(x)
1704 case token.LPAREN:
1705 x = p.parseCallOrConversion(x)
1706 case token.LBRACE:
1707
1708
1709 t := unparen(x)
1710
1711 switch t.(type) {
1712 case *ast.BadExpr, *ast.Ident, *ast.SelectorExpr:
1713 if p.exprLev < 0 {
1714 return x
1715 }
1716
1717 case *ast.IndexExpr, *ast.IndexListExpr:
1718 if p.exprLev < 0 {
1719 return x
1720 }
1721
1722 case *ast.ArrayType, *ast.StructType, *ast.MapType:
1723
1724 default:
1725 return x
1726 }
1727 if t != x {
1728 p.error(t.Pos(), "cannot parenthesize type in composite literal")
1729
1730 }
1731 x = p.parseLiteralValue(x)
1732 default:
1733 return x
1734 }
1735 }
1736 }
1737
1738 func (p *parser) parseUnaryExpr() ast.Expr {
1739 defer decNestLev(incNestLev(p))
1740
1741 if p.trace {
1742 defer un(trace(p, "UnaryExpr"))
1743 }
1744
1745 switch p.tok {
1746 case token.ADD, token.SUB, token.NOT, token.XOR, token.AND, token.TILDE:
1747 pos, op := p.pos, p.tok
1748 p.next()
1749 x := p.parseUnaryExpr()
1750 return &ast.UnaryExpr{OpPos: pos, Op: op, X: x}
1751
1752 case token.ARROW:
1753
1754 arrow := p.pos
1755 p.next()
1756
1757
1758
1759
1760
1761
1762
1763
1764
1765
1766
1767
1768
1769
1770
1771 x := p.parseUnaryExpr()
1772
1773
1774 if typ, ok := x.(*ast.ChanType); ok {
1775
1776
1777
1778 dir := ast.SEND
1779 for ok && dir == ast.SEND {
1780 if typ.Dir == ast.RECV {
1781
1782 p.errorExpected(typ.Arrow, "'chan'")
1783 }
1784 arrow, typ.Begin, typ.Arrow = typ.Arrow, arrow, arrow
1785 dir, typ.Dir = typ.Dir, ast.RECV
1786 typ, ok = typ.Value.(*ast.ChanType)
1787 }
1788 if dir == ast.SEND {
1789 p.errorExpected(arrow, "channel type")
1790 }
1791
1792 return x
1793 }
1794
1795
1796 return &ast.UnaryExpr{OpPos: arrow, Op: token.ARROW, X: x}
1797
1798 case token.MUL:
1799
1800 pos := p.pos
1801 p.next()
1802 x := p.parseUnaryExpr()
1803 return &ast.StarExpr{Star: pos, X: x}
1804 }
1805
1806 return p.parsePrimaryExpr(nil)
1807 }
1808
1809 func (p *parser) tokPrec() (token.Token, int) {
1810 tok := p.tok
1811 if p.inRhs && tok == token.ASSIGN {
1812 tok = token.EQL
1813 }
1814 return tok, tok.Precedence()
1815 }
1816
1817
1818
1819
1820
1821 func (p *parser) parseBinaryExpr(x ast.Expr, prec1 int) ast.Expr {
1822 if p.trace {
1823 defer un(trace(p, "BinaryExpr"))
1824 }
1825
1826 if x == nil {
1827 x = p.parseUnaryExpr()
1828 }
1829
1830
1831
1832 var n int
1833 defer func() { p.nestLev -= n }()
1834 for n = 1; ; n++ {
1835 incNestLev(p)
1836 op, oprec := p.tokPrec()
1837 if oprec < prec1 {
1838 return x
1839 }
1840 pos := p.expect(op)
1841 y := p.parseBinaryExpr(nil, oprec+1)
1842 x = &ast.BinaryExpr{X: x, OpPos: pos, Op: op, Y: y}
1843 }
1844 }
1845
1846
1847 func (p *parser) parseExpr() ast.Expr {
1848 if p.trace {
1849 defer un(trace(p, "Expression"))
1850 }
1851
1852 return p.parseBinaryExpr(nil, token.LowestPrec+1)
1853 }
1854
1855 func (p *parser) parseRhs() ast.Expr {
1856 old := p.inRhs
1857 p.inRhs = true
1858 x := p.parseExpr()
1859 p.inRhs = old
1860 return x
1861 }
1862
1863
1864
1865
1866
1867 const (
1868 basic = iota
1869 labelOk
1870 rangeOk
1871 )
1872
1873
1874
1875
1876
1877 func (p *parser) parseSimpleStmt(mode int) (ast.Stmt, bool) {
1878 if p.trace {
1879 defer un(trace(p, "SimpleStmt"))
1880 }
1881
1882 x := p.parseList(false)
1883
1884 switch p.tok {
1885 case
1886 token.DEFINE, token.ASSIGN, token.ADD_ASSIGN,
1887 token.SUB_ASSIGN, token.MUL_ASSIGN, token.QUO_ASSIGN,
1888 token.REM_ASSIGN, token.AND_ASSIGN, token.OR_ASSIGN,
1889 token.XOR_ASSIGN, token.SHL_ASSIGN, token.SHR_ASSIGN, token.AND_NOT_ASSIGN:
1890
1891 pos, tok := p.pos, p.tok
1892 p.next()
1893 var y []ast.Expr
1894 isRange := false
1895 if mode == rangeOk && p.tok == token.RANGE && (tok == token.DEFINE || tok == token.ASSIGN) {
1896 pos := p.pos
1897 p.next()
1898 y = []ast.Expr{&ast.UnaryExpr{OpPos: pos, Op: token.RANGE, X: p.parseRhs()}}
1899 isRange = true
1900 } else {
1901 y = p.parseList(true)
1902 }
1903 return &ast.AssignStmt{Lhs: x, TokPos: pos, Tok: tok, Rhs: y}, isRange
1904 }
1905
1906 if len(x) > 1 {
1907 p.errorExpected(x[0].Pos(), "1 expression")
1908
1909 }
1910
1911 switch p.tok {
1912 case token.COLON:
1913
1914 colon := p.pos
1915 p.next()
1916 if label, isIdent := x[0].(*ast.Ident); mode == labelOk && isIdent {
1917
1918
1919
1920 stmt := &ast.LabeledStmt{Label: label, Colon: colon, Stmt: p.parseStmt()}
1921 return stmt, false
1922 }
1923
1924
1925
1926
1927
1928
1929 p.error(colon, "illegal label declaration")
1930 return &ast.BadStmt{From: x[0].Pos(), To: colon + 1}, false
1931
1932 case token.ARROW:
1933
1934 arrow := p.pos
1935 p.next()
1936 y := p.parseRhs()
1937 return &ast.SendStmt{Chan: x[0], Arrow: arrow, Value: y}, false
1938
1939 case token.INC, token.DEC:
1940
1941 s := &ast.IncDecStmt{X: x[0], TokPos: p.pos, Tok: p.tok}
1942 p.next()
1943 return s, false
1944 }
1945
1946
1947 return &ast.ExprStmt{X: x[0]}, false
1948 }
1949
1950 func (p *parser) parseCallExpr(callType string) *ast.CallExpr {
1951 x := p.parseRhs()
1952 if t := unparen(x); t != x {
1953 p.error(x.Pos(), fmt.Sprintf("expression in %s must not be parenthesized", callType))
1954 x = t
1955 }
1956 if call, isCall := x.(*ast.CallExpr); isCall {
1957 return call
1958 }
1959 if _, isBad := x.(*ast.BadExpr); !isBad {
1960
1961 p.error(p.safePos(x.End()), fmt.Sprintf("expression in %s must be function call", callType))
1962 }
1963 return nil
1964 }
1965
1966 func (p *parser) parseGoStmt() ast.Stmt {
1967 if p.trace {
1968 defer un(trace(p, "GoStmt"))
1969 }
1970
1971 pos := p.expect(token.GO)
1972 call := p.parseCallExpr("go")
1973 p.expectSemi()
1974 if call == nil {
1975 return &ast.BadStmt{From: pos, To: pos + 2}
1976 }
1977
1978 return &ast.GoStmt{Go: pos, Call: call}
1979 }
1980
1981 func (p *parser) parseDeferStmt() ast.Stmt {
1982 if p.trace {
1983 defer un(trace(p, "DeferStmt"))
1984 }
1985
1986 pos := p.expect(token.DEFER)
1987 call := p.parseCallExpr("defer")
1988 p.expectSemi()
1989 if call == nil {
1990 return &ast.BadStmt{From: pos, To: pos + 5}
1991 }
1992
1993 return &ast.DeferStmt{Defer: pos, Call: call}
1994 }
1995
1996 func (p *parser) parseReturnStmt() *ast.ReturnStmt {
1997 if p.trace {
1998 defer un(trace(p, "ReturnStmt"))
1999 }
2000
2001 pos := p.pos
2002 p.expect(token.RETURN)
2003 var x []ast.Expr
2004 if p.tok != token.SEMICOLON && p.tok != token.RBRACE {
2005 x = p.parseList(true)
2006 }
2007 p.expectSemi()
2008
2009 return &ast.ReturnStmt{Return: pos, Results: x}
2010 }
2011
2012 func (p *parser) parseBranchStmt(tok token.Token) *ast.BranchStmt {
2013 if p.trace {
2014 defer un(trace(p, "BranchStmt"))
2015 }
2016
2017 pos := p.expect(tok)
2018 var label *ast.Ident
2019 if tok != token.FALLTHROUGH && p.tok == token.IDENT {
2020 label = p.parseIdent()
2021 }
2022 p.expectSemi()
2023
2024 return &ast.BranchStmt{TokPos: pos, Tok: tok, Label: label}
2025 }
2026
2027 func (p *parser) makeExpr(s ast.Stmt, want string) ast.Expr {
2028 if s == nil {
2029 return nil
2030 }
2031 if es, isExpr := s.(*ast.ExprStmt); isExpr {
2032 return es.X
2033 }
2034 found := "simple statement"
2035 if _, isAss := s.(*ast.AssignStmt); isAss {
2036 found = "assignment"
2037 }
2038 p.error(s.Pos(), fmt.Sprintf("expected %s, found %s (missing parentheses around composite literal?)", want, found))
2039 return &ast.BadExpr{From: s.Pos(), To: p.safePos(s.End())}
2040 }
2041
2042
2043
2044
2045 func (p *parser) parseIfHeader() (init ast.Stmt, cond ast.Expr) {
2046 if p.tok == token.LBRACE {
2047 p.error(p.pos, "missing condition in if statement")
2048 cond = &ast.BadExpr{From: p.pos, To: p.pos}
2049 return
2050 }
2051
2052
2053 prevLev := p.exprLev
2054 p.exprLev = -1
2055
2056 if p.tok != token.SEMICOLON {
2057
2058 if p.tok == token.VAR {
2059 p.next()
2060 p.error(p.pos, "var declaration not allowed in if initializer")
2061 }
2062 init, _ = p.parseSimpleStmt(basic)
2063 }
2064
2065 var condStmt ast.Stmt
2066 var semi struct {
2067 pos token.Pos
2068 lit string
2069 }
2070 if p.tok != token.LBRACE {
2071 if p.tok == token.SEMICOLON {
2072 semi.pos = p.pos
2073 semi.lit = p.lit
2074 p.next()
2075 } else {
2076 p.expect(token.SEMICOLON)
2077 }
2078 if p.tok != token.LBRACE {
2079 condStmt, _ = p.parseSimpleStmt(basic)
2080 }
2081 } else {
2082 condStmt = init
2083 init = nil
2084 }
2085
2086 if condStmt != nil {
2087 cond = p.makeExpr(condStmt, "boolean expression")
2088 } else if semi.pos.IsValid() {
2089 if semi.lit == "\n" {
2090 p.error(semi.pos, "unexpected newline, expecting { after if clause")
2091 } else {
2092 p.error(semi.pos, "missing condition in if statement")
2093 }
2094 }
2095
2096
2097 if cond == nil {
2098 cond = &ast.BadExpr{From: p.pos, To: p.pos}
2099 }
2100
2101 p.exprLev = prevLev
2102 return
2103 }
2104
2105 func (p *parser) parseIfStmt() *ast.IfStmt {
2106 defer decNestLev(incNestLev(p))
2107
2108 if p.trace {
2109 defer un(trace(p, "IfStmt"))
2110 }
2111
2112 pos := p.expect(token.IF)
2113
2114 init, cond := p.parseIfHeader()
2115 body := p.parseBlockStmt()
2116
2117 var else_ ast.Stmt
2118 if p.tok == token.ELSE {
2119 p.next()
2120 switch p.tok {
2121 case token.IF:
2122 else_ = p.parseIfStmt()
2123 case token.LBRACE:
2124 else_ = p.parseBlockStmt()
2125 p.expectSemi()
2126 default:
2127 p.errorExpected(p.pos, "if statement or block")
2128 else_ = &ast.BadStmt{From: p.pos, To: p.pos}
2129 }
2130 } else {
2131 p.expectSemi()
2132 }
2133
2134 return &ast.IfStmt{If: pos, Init: init, Cond: cond, Body: body, Else: else_}
2135 }
2136
2137 func (p *parser) parseCaseClause() *ast.CaseClause {
2138 if p.trace {
2139 defer un(trace(p, "CaseClause"))
2140 }
2141
2142 pos := p.pos
2143 var list []ast.Expr
2144 if p.tok == token.CASE {
2145 p.next()
2146 list = p.parseList(true)
2147 } else {
2148 p.expect(token.DEFAULT)
2149 }
2150
2151 colon := p.expect(token.COLON)
2152 body := p.parseStmtList()
2153
2154 return &ast.CaseClause{Case: pos, List: list, Colon: colon, Body: body}
2155 }
2156
2157 func isTypeSwitchAssert(x ast.Expr) bool {
2158 a, ok := x.(*ast.TypeAssertExpr)
2159 return ok && a.Type == nil
2160 }
2161
2162 func (p *parser) isTypeSwitchGuard(s ast.Stmt) bool {
2163 switch t := s.(type) {
2164 case *ast.ExprStmt:
2165
2166 return isTypeSwitchAssert(t.X)
2167 case *ast.AssignStmt:
2168
2169 if len(t.Lhs) == 1 && len(t.Rhs) == 1 && isTypeSwitchAssert(t.Rhs[0]) {
2170 switch t.Tok {
2171 case token.ASSIGN:
2172
2173 p.error(t.TokPos, "expected ':=', found '='")
2174 fallthrough
2175 case token.DEFINE:
2176 return true
2177 }
2178 }
2179 }
2180 return false
2181 }
2182
2183 func (p *parser) parseSwitchStmt() ast.Stmt {
2184 if p.trace {
2185 defer un(trace(p, "SwitchStmt"))
2186 }
2187
2188 pos := p.expect(token.SWITCH)
2189
2190 var s1, s2 ast.Stmt
2191 if p.tok != token.LBRACE {
2192 prevLev := p.exprLev
2193 p.exprLev = -1
2194 if p.tok != token.SEMICOLON {
2195 s2, _ = p.parseSimpleStmt(basic)
2196 }
2197 if p.tok == token.SEMICOLON {
2198 p.next()
2199 s1 = s2
2200 s2 = nil
2201 if p.tok != token.LBRACE {
2202
2203
2204
2205
2206
2207
2208
2209
2210
2211
2212
2213
2214 s2, _ = p.parseSimpleStmt(basic)
2215 }
2216 }
2217 p.exprLev = prevLev
2218 }
2219
2220 typeSwitch := p.isTypeSwitchGuard(s2)
2221 lbrace := p.expect(token.LBRACE)
2222 var list []ast.Stmt
2223 for p.tok == token.CASE || p.tok == token.DEFAULT {
2224 list = append(list, p.parseCaseClause())
2225 }
2226 rbrace := p.expect(token.RBRACE)
2227 p.expectSemi()
2228 body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
2229
2230 if typeSwitch {
2231 return &ast.TypeSwitchStmt{Switch: pos, Init: s1, Assign: s2, Body: body}
2232 }
2233
2234 return &ast.SwitchStmt{Switch: pos, Init: s1, Tag: p.makeExpr(s2, "switch expression"), Body: body}
2235 }
2236
2237 func (p *parser) parseCommClause() *ast.CommClause {
2238 if p.trace {
2239 defer un(trace(p, "CommClause"))
2240 }
2241
2242 pos := p.pos
2243 var comm ast.Stmt
2244 if p.tok == token.CASE {
2245 p.next()
2246 lhs := p.parseList(false)
2247 if p.tok == token.ARROW {
2248
2249 if len(lhs) > 1 {
2250 p.errorExpected(lhs[0].Pos(), "1 expression")
2251
2252 }
2253 arrow := p.pos
2254 p.next()
2255 rhs := p.parseRhs()
2256 comm = &ast.SendStmt{Chan: lhs[0], Arrow: arrow, Value: rhs}
2257 } else {
2258
2259 if tok := p.tok; tok == token.ASSIGN || tok == token.DEFINE {
2260
2261 if len(lhs) > 2 {
2262 p.errorExpected(lhs[0].Pos(), "1 or 2 expressions")
2263
2264 lhs = lhs[0:2]
2265 }
2266 pos := p.pos
2267 p.next()
2268 rhs := p.parseRhs()
2269 comm = &ast.AssignStmt{Lhs: lhs, TokPos: pos, Tok: tok, Rhs: []ast.Expr{rhs}}
2270 } else {
2271
2272 if len(lhs) > 1 {
2273 p.errorExpected(lhs[0].Pos(), "1 expression")
2274
2275 }
2276 comm = &ast.ExprStmt{X: lhs[0]}
2277 }
2278 }
2279 } else {
2280 p.expect(token.DEFAULT)
2281 }
2282
2283 colon := p.expect(token.COLON)
2284 body := p.parseStmtList()
2285
2286 return &ast.CommClause{Case: pos, Comm: comm, Colon: colon, Body: body}
2287 }
2288
2289 func (p *parser) parseSelectStmt() *ast.SelectStmt {
2290 if p.trace {
2291 defer un(trace(p, "SelectStmt"))
2292 }
2293
2294 pos := p.expect(token.SELECT)
2295 lbrace := p.expect(token.LBRACE)
2296 var list []ast.Stmt
2297 for p.tok == token.CASE || p.tok == token.DEFAULT {
2298 list = append(list, p.parseCommClause())
2299 }
2300 rbrace := p.expect(token.RBRACE)
2301 p.expectSemi()
2302 body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
2303
2304 return &ast.SelectStmt{Select: pos, Body: body}
2305 }
2306
2307 func (p *parser) parseForStmt() ast.Stmt {
2308 if p.trace {
2309 defer un(trace(p, "ForStmt"))
2310 }
2311
2312 pos := p.expect(token.FOR)
2313
2314 var s1, s2, s3 ast.Stmt
2315 var isRange bool
2316 if p.tok != token.LBRACE {
2317 prevLev := p.exprLev
2318 p.exprLev = -1
2319 if p.tok != token.SEMICOLON {
2320 if p.tok == token.RANGE {
2321
2322 pos := p.pos
2323 p.next()
2324 y := []ast.Expr{&ast.UnaryExpr{OpPos: pos, Op: token.RANGE, X: p.parseRhs()}}
2325 s2 = &ast.AssignStmt{Rhs: y}
2326 isRange = true
2327 } else {
2328 s2, isRange = p.parseSimpleStmt(rangeOk)
2329 }
2330 }
2331 if !isRange && p.tok == token.SEMICOLON {
2332 p.next()
2333 s1 = s2
2334 s2 = nil
2335 if p.tok != token.SEMICOLON {
2336 s2, _ = p.parseSimpleStmt(basic)
2337 }
2338 p.expectSemi()
2339 if p.tok != token.LBRACE {
2340 s3, _ = p.parseSimpleStmt(basic)
2341 }
2342 }
2343 p.exprLev = prevLev
2344 }
2345
2346 body := p.parseBlockStmt()
2347 p.expectSemi()
2348
2349 if isRange {
2350 as := s2.(*ast.AssignStmt)
2351
2352 var key, value ast.Expr
2353 switch len(as.Lhs) {
2354 case 0:
2355
2356 case 1:
2357 key = as.Lhs[0]
2358 case 2:
2359 key, value = as.Lhs[0], as.Lhs[1]
2360 default:
2361 p.errorExpected(as.Lhs[len(as.Lhs)-1].Pos(), "at most 2 expressions")
2362 return &ast.BadStmt{From: pos, To: p.safePos(body.End())}
2363 }
2364
2365
2366 x := as.Rhs[0].(*ast.UnaryExpr).X
2367 return &ast.RangeStmt{
2368 For: pos,
2369 Key: key,
2370 Value: value,
2371 TokPos: as.TokPos,
2372 Tok: as.Tok,
2373 Range: as.Rhs[0].Pos(),
2374 X: x,
2375 Body: body,
2376 }
2377 }
2378
2379
2380 return &ast.ForStmt{
2381 For: pos,
2382 Init: s1,
2383 Cond: p.makeExpr(s2, "boolean or range expression"),
2384 Post: s3,
2385 Body: body,
2386 }
2387 }
2388
2389 func (p *parser) parseStmt() (s ast.Stmt) {
2390 defer decNestLev(incNestLev(p))
2391
2392 if p.trace {
2393 defer un(trace(p, "Statement"))
2394 }
2395
2396 switch p.tok {
2397 case token.CONST, token.TYPE, token.VAR:
2398 s = &ast.DeclStmt{Decl: p.parseDecl(stmtStart)}
2399 case
2400
2401 token.IDENT, token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING, token.FUNC, token.LPAREN,
2402 token.LBRACK, token.STRUCT, token.MAP, token.CHAN, token.INTERFACE,
2403 token.ADD, token.SUB, token.MUL, token.AND, token.XOR, token.ARROW, token.NOT:
2404 s, _ = p.parseSimpleStmt(labelOk)
2405
2406
2407
2408 if _, isLabeledStmt := s.(*ast.LabeledStmt); !isLabeledStmt {
2409 p.expectSemi()
2410 }
2411 case token.GO:
2412 s = p.parseGoStmt()
2413 case token.DEFER:
2414 s = p.parseDeferStmt()
2415 case token.RETURN:
2416 s = p.parseReturnStmt()
2417 case token.BREAK, token.CONTINUE, token.GOTO, token.FALLTHROUGH:
2418 s = p.parseBranchStmt(p.tok)
2419 case token.LBRACE:
2420 s = p.parseBlockStmt()
2421 p.expectSemi()
2422 case token.IF:
2423 s = p.parseIfStmt()
2424 case token.SWITCH:
2425 s = p.parseSwitchStmt()
2426 case token.SELECT:
2427 s = p.parseSelectStmt()
2428 case token.FOR:
2429 s = p.parseForStmt()
2430 case token.SEMICOLON:
2431
2432
2433
2434 s = &ast.EmptyStmt{Semicolon: p.pos, Implicit: p.lit == "\n"}
2435 p.next()
2436 case token.RBRACE:
2437
2438 s = &ast.EmptyStmt{Semicolon: p.pos, Implicit: true}
2439 default:
2440
2441 pos := p.pos
2442 p.errorExpected(pos, "statement")
2443 p.advance(stmtStart)
2444 s = &ast.BadStmt{From: pos, To: p.pos}
2445 }
2446
2447 return
2448 }
2449
2450
2451
2452
2453 type parseSpecFunction func(doc *ast.CommentGroup, keyword token.Token, iota int) ast.Spec
2454
2455 func (p *parser) parseImportSpec(doc *ast.CommentGroup, _ token.Token, _ int) ast.Spec {
2456 if p.trace {
2457 defer un(trace(p, "ImportSpec"))
2458 }
2459
2460 var ident *ast.Ident
2461 switch p.tok {
2462 case token.IDENT:
2463 ident = p.parseIdent()
2464 case token.PERIOD:
2465 ident = &ast.Ident{NamePos: p.pos, Name: "."}
2466 p.next()
2467 }
2468
2469 pos := p.pos
2470 var path string
2471 if p.tok == token.STRING {
2472 path = p.lit
2473 p.next()
2474 } else if p.tok.IsLiteral() {
2475 p.error(pos, "import path must be a string")
2476 p.next()
2477 } else {
2478 p.error(pos, "missing import path")
2479 p.advance(exprEnd)
2480 }
2481 comment := p.expectSemi()
2482
2483
2484 spec := &ast.ImportSpec{
2485 Doc: doc,
2486 Name: ident,
2487 Path: &ast.BasicLit{ValuePos: pos, Kind: token.STRING, Value: path},
2488 Comment: comment,
2489 }
2490 p.imports = append(p.imports, spec)
2491
2492 return spec
2493 }
2494
2495 func (p *parser) parseValueSpec(doc *ast.CommentGroup, keyword token.Token, iota int) ast.Spec {
2496 if p.trace {
2497 defer un(trace(p, keyword.String()+"Spec"))
2498 }
2499
2500 idents := p.parseIdentList()
2501 var typ ast.Expr
2502 var values []ast.Expr
2503 switch keyword {
2504 case token.CONST:
2505
2506 if p.tok != token.EOF && p.tok != token.SEMICOLON && p.tok != token.RPAREN {
2507 typ = p.tryIdentOrType()
2508 if p.tok == token.ASSIGN {
2509 p.next()
2510 values = p.parseList(true)
2511 }
2512 }
2513 case token.VAR:
2514 if p.tok != token.ASSIGN {
2515 typ = p.parseType()
2516 }
2517 if p.tok == token.ASSIGN {
2518 p.next()
2519 values = p.parseList(true)
2520 }
2521 default:
2522 panic("unreachable")
2523 }
2524 comment := p.expectSemi()
2525
2526 spec := &ast.ValueSpec{
2527 Doc: doc,
2528 Names: idents,
2529 Type: typ,
2530 Values: values,
2531 Comment: comment,
2532 }
2533 return spec
2534 }
2535
2536 func (p *parser) parseGenericType(spec *ast.TypeSpec, openPos token.Pos, name0 *ast.Ident, typ0 ast.Expr) {
2537 if p.trace {
2538 defer un(trace(p, "parseGenericType"))
2539 }
2540
2541 list := p.parseParameterList(name0, typ0, token.RBRACK)
2542 closePos := p.expect(token.RBRACK)
2543 spec.TypeParams = &ast.FieldList{Opening: openPos, List: list, Closing: closePos}
2544
2545
2546 if p.tok == token.ASSIGN {
2547
2548 spec.Assign = p.pos
2549 p.next()
2550 }
2551 spec.Type = p.parseType()
2552 }
2553
2554 func (p *parser) parseTypeSpec(doc *ast.CommentGroup, _ token.Token, _ int) ast.Spec {
2555 if p.trace {
2556 defer un(trace(p, "TypeSpec"))
2557 }
2558
2559 name := p.parseIdent()
2560 spec := &ast.TypeSpec{Doc: doc, Name: name}
2561
2562 if p.tok == token.LBRACK {
2563
2564
2565 lbrack := p.pos
2566 p.next()
2567 if p.tok == token.IDENT {
2568
2569
2570
2571
2572
2573
2574
2575
2576
2577
2578
2579
2580
2581
2582
2583 var x ast.Expr = p.parseIdent()
2584 if p.tok != token.LBRACK {
2585
2586
2587
2588 p.exprLev++
2589 lhs := p.parsePrimaryExpr(x)
2590 x = p.parseBinaryExpr(lhs, token.LowestPrec+1)
2591 p.exprLev--
2592 }
2593
2594
2595
2596
2597
2598
2599
2600 if pname, ptype := extractName(x, p.tok == token.COMMA); pname != nil && (ptype != nil || p.tok != token.RBRACK) {
2601
2602
2603
2604 p.parseGenericType(spec, lbrack, pname, ptype)
2605 } else {
2606
2607
2608 spec.Type = p.parseArrayType(lbrack, x)
2609 }
2610 } else {
2611
2612 spec.Type = p.parseArrayType(lbrack, nil)
2613 }
2614 } else {
2615
2616 if p.tok == token.ASSIGN {
2617
2618 spec.Assign = p.pos
2619 p.next()
2620 }
2621 spec.Type = p.parseType()
2622 }
2623
2624 spec.Comment = p.expectSemi()
2625
2626 return spec
2627 }
2628
2629
2630
2631
2632
2633
2634
2635
2636
2637
2638
2639
2640
2641
2642
2643
2644
2645
2646
2647 func extractName(x ast.Expr, force bool) (*ast.Ident, ast.Expr) {
2648 switch x := x.(type) {
2649 case *ast.Ident:
2650 return x, nil
2651 case *ast.BinaryExpr:
2652 switch x.Op {
2653 case token.MUL:
2654 if name, _ := x.X.(*ast.Ident); name != nil && (force || isTypeElem(x.Y)) {
2655
2656 return name, &ast.StarExpr{Star: x.OpPos, X: x.Y}
2657 }
2658 case token.OR:
2659 if name, lhs := extractName(x.X, force || isTypeElem(x.Y)); name != nil && lhs != nil {
2660
2661 op := *x
2662 op.X = lhs
2663 return name, &op
2664 }
2665 }
2666 case *ast.CallExpr:
2667 if name, _ := x.Fun.(*ast.Ident); name != nil {
2668 if len(x.Args) == 1 && x.Ellipsis == token.NoPos && (force || isTypeElem(x.Args[0])) {
2669
2670 return name, x.Args[0]
2671 }
2672 }
2673 }
2674 return nil, x
2675 }
2676
2677
2678
2679 func isTypeElem(x ast.Expr) bool {
2680 switch x := x.(type) {
2681 case *ast.ArrayType, *ast.StructType, *ast.FuncType, *ast.InterfaceType, *ast.MapType, *ast.ChanType:
2682 return true
2683 case *ast.BinaryExpr:
2684 return isTypeElem(x.X) || isTypeElem(x.Y)
2685 case *ast.UnaryExpr:
2686 return x.Op == token.TILDE
2687 case *ast.ParenExpr:
2688 return isTypeElem(x.X)
2689 }
2690 return false
2691 }
2692
2693 func (p *parser) parseGenDecl(keyword token.Token, f parseSpecFunction) *ast.GenDecl {
2694 if p.trace {
2695 defer un(trace(p, "GenDecl("+keyword.String()+")"))
2696 }
2697
2698 doc := p.leadComment
2699 pos := p.expect(keyword)
2700 var lparen, rparen token.Pos
2701 var list []ast.Spec
2702 if p.tok == token.LPAREN {
2703 lparen = p.pos
2704 p.next()
2705 for iota := 0; p.tok != token.RPAREN && p.tok != token.EOF; iota++ {
2706 list = append(list, f(p.leadComment, keyword, iota))
2707 }
2708 rparen = p.expect(token.RPAREN)
2709 p.expectSemi()
2710 } else {
2711 list = append(list, f(nil, keyword, 0))
2712 }
2713
2714 return &ast.GenDecl{
2715 Doc: doc,
2716 TokPos: pos,
2717 Tok: keyword,
2718 Lparen: lparen,
2719 Specs: list,
2720 Rparen: rparen,
2721 }
2722 }
2723
2724 func (p *parser) parseFuncDecl() *ast.FuncDecl {
2725 if p.trace {
2726 defer un(trace(p, "FunctionDecl"))
2727 }
2728
2729 doc := p.leadComment
2730 pos := p.expect(token.FUNC)
2731
2732 var recv *ast.FieldList
2733 if p.tok == token.LPAREN {
2734 _, recv = p.parseParameters(false)
2735 }
2736
2737 ident := p.parseIdent()
2738
2739 tparams, params := p.parseParameters(true)
2740 if recv != nil && tparams != nil {
2741
2742
2743 p.error(tparams.Opening, "method must have no type parameters")
2744 tparams = nil
2745 }
2746 results := p.parseResult()
2747
2748 var body *ast.BlockStmt
2749 switch p.tok {
2750 case token.LBRACE:
2751 body = p.parseBody()
2752 p.expectSemi()
2753 case token.SEMICOLON:
2754 p.next()
2755 if p.tok == token.LBRACE {
2756
2757 p.error(p.pos, "unexpected semicolon or newline before {")
2758 body = p.parseBody()
2759 p.expectSemi()
2760 }
2761 default:
2762 p.expectSemi()
2763 }
2764
2765 decl := &ast.FuncDecl{
2766 Doc: doc,
2767 Recv: recv,
2768 Name: ident,
2769 Type: &ast.FuncType{
2770 Func: pos,
2771 TypeParams: tparams,
2772 Params: params,
2773 Results: results,
2774 },
2775 Body: body,
2776 }
2777 return decl
2778 }
2779
2780 func (p *parser) parseDecl(sync map[token.Token]bool) ast.Decl {
2781 if p.trace {
2782 defer un(trace(p, "Declaration"))
2783 }
2784
2785 var f parseSpecFunction
2786 switch p.tok {
2787 case token.IMPORT:
2788 f = p.parseImportSpec
2789
2790 case token.CONST, token.VAR:
2791 f = p.parseValueSpec
2792
2793 case token.TYPE:
2794 f = p.parseTypeSpec
2795
2796 case token.FUNC:
2797 return p.parseFuncDecl()
2798
2799 default:
2800 pos := p.pos
2801 p.errorExpected(pos, "declaration")
2802 p.advance(sync)
2803 return &ast.BadDecl{From: pos, To: p.pos}
2804 }
2805
2806 return p.parseGenDecl(p.tok, f)
2807 }
2808
2809
2810
2811
2812 func (p *parser) parseFile() *ast.File {
2813 if p.trace {
2814 defer un(trace(p, "File"))
2815 }
2816
2817
2818
2819 if p.errors.Len() != 0 {
2820 return nil
2821 }
2822
2823
2824 doc := p.leadComment
2825 pos := p.expect(token.PACKAGE)
2826
2827
2828 ident := p.parseIdent()
2829 if ident.Name == "_" && p.mode&DeclarationErrors != 0 {
2830 p.error(p.pos, "invalid package name _")
2831 }
2832 p.expectSemi()
2833
2834
2835
2836 if p.errors.Len() != 0 {
2837 return nil
2838 }
2839
2840 var decls []ast.Decl
2841 if p.mode&PackageClauseOnly == 0 {
2842
2843 for p.tok == token.IMPORT {
2844 decls = append(decls, p.parseGenDecl(token.IMPORT, p.parseImportSpec))
2845 }
2846
2847 if p.mode&ImportsOnly == 0 {
2848
2849 prev := token.IMPORT
2850 for p.tok != token.EOF {
2851
2852 if p.tok == token.IMPORT && prev != token.IMPORT {
2853 p.error(p.pos, "imports must appear before other declarations")
2854 }
2855 prev = p.tok
2856
2857 decls = append(decls, p.parseDecl(declStart))
2858 }
2859 }
2860 }
2861
2862 f := &ast.File{
2863 Doc: doc,
2864 Package: pos,
2865 Name: ident,
2866 Decls: decls,
2867 FileStart: token.Pos(p.file.Base()),
2868 FileEnd: token.Pos(p.file.Base() + p.file.Size()),
2869 Imports: p.imports,
2870 Comments: p.comments,
2871 GoVersion: p.goVersion,
2872 }
2873 var declErr func(token.Pos, string)
2874 if p.mode&DeclarationErrors != 0 {
2875 declErr = p.error
2876 }
2877 if p.mode&SkipObjectResolution == 0 {
2878 resolveFile(f, p.file, declErr)
2879 }
2880
2881 return f
2882 }
2883
View as plain text