Source file
src/go/parser/parser.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16 package parser
17
18 import (
19 "fmt"
20 "go/ast"
21 "go/build/constraint"
22 "go/scanner"
23 "go/token"
24 "strings"
25 )
26
27
28 type parser struct {
29 file *token.File
30 errors scanner.ErrorList
31 scanner scanner.Scanner
32
33
34 mode Mode
35 trace bool
36 indent int
37
38
39 comments []*ast.CommentGroup
40 leadComment *ast.CommentGroup
41 lineComment *ast.CommentGroup
42 top bool
43 goVersion string
44
45
46 pos token.Pos
47 tok token.Token
48 lit string
49
50
51
52
53
54 syncPos token.Pos
55 syncCnt int
56
57
58 exprLev int
59 inRhs bool
60
61 imports []*ast.ImportSpec
62
63
64
65 nestLev int
66 }
67
68 func (p *parser) init(file *token.File, src []byte, mode Mode) {
69 p.file = file
70 eh := func(pos token.Position, msg string) { p.errors.Add(pos, msg) }
71 p.scanner.Init(p.file, src, eh, scanner.ScanComments)
72
73 p.top = true
74 p.mode = mode
75 p.trace = mode&Trace != 0
76 p.next()
77 }
78
79
80
81
82 func (p *parser) printTrace(a ...any) {
83 const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . "
84 const n = len(dots)
85 pos := p.file.Position(p.pos)
86 fmt.Printf("%5d:%3d: ", pos.Line, pos.Column)
87 i := 2 * p.indent
88 for i > n {
89 fmt.Print(dots)
90 i -= n
91 }
92
93 fmt.Print(dots[0:i])
94 fmt.Println(a...)
95 }
96
97 func trace(p *parser, msg string) *parser {
98 p.printTrace(msg, "(")
99 p.indent++
100 return p
101 }
102
103
104 func un(p *parser) {
105 p.indent--
106 p.printTrace(")")
107 }
108
109
110 const maxNestLev int = 1e5
111
112 func incNestLev(p *parser) *parser {
113 p.nestLev++
114 if p.nestLev > maxNestLev {
115 p.error(p.pos, "exceeded max nesting depth")
116 panic(bailout{})
117 }
118 return p
119 }
120
121
122
123 func decNestLev(p *parser) {
124 p.nestLev--
125 }
126
127
128 func (p *parser) next0() {
129
130
131
132
133 if p.trace && p.pos.IsValid() {
134 s := p.tok.String()
135 switch {
136 case p.tok.IsLiteral():
137 p.printTrace(s, p.lit)
138 case p.tok.IsOperator(), p.tok.IsKeyword():
139 p.printTrace("\"" + s + "\"")
140 default:
141 p.printTrace(s)
142 }
143 }
144
145 for {
146 p.pos, p.tok, p.lit = p.scanner.Scan()
147 if p.tok == token.COMMENT {
148 if p.top && strings.HasPrefix(p.lit, "//go:build") {
149 if x, err := constraint.Parse(p.lit); err == nil {
150 p.goVersion = constraint.GoVersion(x)
151 }
152 }
153 if p.mode&ParseComments == 0 {
154 continue
155 }
156 } else {
157
158 p.top = false
159 }
160 break
161 }
162 }
163
164
165 func (p *parser) lineFor(pos token.Pos) int {
166 return p.file.PositionFor(pos, false).Line
167 }
168
169
170 func (p *parser) consumeComment() (comment *ast.Comment, endline int) {
171
172
173 endline = p.lineFor(p.pos)
174 if p.lit[1] == '*' {
175
176 for i := 0; i < len(p.lit); i++ {
177 if p.lit[i] == '\n' {
178 endline++
179 }
180 }
181 }
182
183 comment = &ast.Comment{Slash: p.pos, Text: p.lit}
184 p.next0()
185
186 return
187 }
188
189
190
191
192
193 func (p *parser) consumeCommentGroup(n int) (comments *ast.CommentGroup, endline int) {
194 var list []*ast.Comment
195 endline = p.lineFor(p.pos)
196 for p.tok == token.COMMENT && p.lineFor(p.pos) <= endline+n {
197 var comment *ast.Comment
198 comment, endline = p.consumeComment()
199 list = append(list, comment)
200 }
201
202
203 comments = &ast.CommentGroup{List: list}
204 p.comments = append(p.comments, comments)
205
206 return
207 }
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223 func (p *parser) next() {
224 p.leadComment = nil
225 p.lineComment = nil
226 prev := p.pos
227 p.next0()
228
229 if p.tok == token.COMMENT {
230 var comment *ast.CommentGroup
231 var endline int
232
233 if p.lineFor(p.pos) == p.lineFor(prev) {
234
235
236 comment, endline = p.consumeCommentGroup(0)
237 if p.lineFor(p.pos) != endline || p.tok == token.SEMICOLON || p.tok == token.EOF {
238
239
240 p.lineComment = comment
241 }
242 }
243
244
245 endline = -1
246 for p.tok == token.COMMENT {
247 comment, endline = p.consumeCommentGroup(1)
248 }
249
250 if endline+1 == p.lineFor(p.pos) {
251
252
253 p.leadComment = comment
254 }
255 }
256 }
257
258
259
260 type bailout struct {
261 pos token.Pos
262 msg string
263 }
264
265 func (p *parser) error(pos token.Pos, msg string) {
266 if p.trace {
267 defer un(trace(p, "error: "+msg))
268 }
269
270 epos := p.file.Position(pos)
271
272
273
274
275 if p.mode&AllErrors == 0 {
276 n := len(p.errors)
277 if n > 0 && p.errors[n-1].Pos.Line == epos.Line {
278 return
279 }
280 if n > 10 {
281 panic(bailout{})
282 }
283 }
284
285 p.errors.Add(epos, msg)
286 }
287
288 func (p *parser) errorExpected(pos token.Pos, msg string) {
289 msg = "expected " + msg
290 if pos == p.pos {
291
292
293 switch {
294 case p.tok == token.SEMICOLON && p.lit == "\n":
295 msg += ", found newline"
296 case p.tok.IsLiteral():
297
298 msg += ", found " + p.lit
299 default:
300 msg += ", found '" + p.tok.String() + "'"
301 }
302 }
303 p.error(pos, msg)
304 }
305
306 func (p *parser) expect(tok token.Token) token.Pos {
307 pos := p.pos
308 if p.tok != tok {
309 p.errorExpected(pos, "'"+tok.String()+"'")
310 }
311 p.next()
312 return pos
313 }
314
315
316
317 func (p *parser) expect2(tok token.Token) (pos token.Pos) {
318 if p.tok == tok {
319 pos = p.pos
320 } else {
321 p.errorExpected(p.pos, "'"+tok.String()+"'")
322 }
323 p.next()
324 return
325 }
326
327
328
329 func (p *parser) expectClosing(tok token.Token, context string) token.Pos {
330 if p.tok != tok && p.tok == token.SEMICOLON && p.lit == "\n" {
331 p.error(p.pos, "missing ',' before newline in "+context)
332 p.next()
333 }
334 return p.expect(tok)
335 }
336
337
338 func (p *parser) expectSemi() (comment *ast.CommentGroup) {
339
340 if p.tok != token.RPAREN && p.tok != token.RBRACE {
341 switch p.tok {
342 case token.COMMA:
343
344 p.errorExpected(p.pos, "';'")
345 fallthrough
346 case token.SEMICOLON:
347 if p.lit == ";" {
348
349 p.next()
350 comment = p.lineComment
351 } else {
352
353 comment = p.lineComment
354 p.next()
355 }
356 return comment
357 default:
358 p.errorExpected(p.pos, "';'")
359 p.advance(stmtStart)
360 }
361 }
362 return nil
363 }
364
365 func (p *parser) atComma(context string, follow token.Token) bool {
366 if p.tok == token.COMMA {
367 return true
368 }
369 if p.tok != follow {
370 msg := "missing ','"
371 if p.tok == token.SEMICOLON && p.lit == "\n" {
372 msg += " before newline"
373 }
374 p.error(p.pos, msg+" in "+context)
375 return true
376 }
377 return false
378 }
379
380 func assert(cond bool, msg string) {
381 if !cond {
382 panic("go/parser internal error: " + msg)
383 }
384 }
385
386
387
388 func (p *parser) advance(to map[token.Token]bool) {
389 for ; p.tok != token.EOF; p.next() {
390 if to[p.tok] {
391
392
393
394
395
396
397
398 if p.pos == p.syncPos && p.syncCnt < 10 {
399 p.syncCnt++
400 return
401 }
402 if p.pos > p.syncPos {
403 p.syncPos = p.pos
404 p.syncCnt = 0
405 return
406 }
407
408
409
410
411
412 }
413 }
414 }
415
416 var stmtStart = map[token.Token]bool{
417 token.BREAK: true,
418 token.CONST: true,
419 token.CONTINUE: true,
420 token.DEFER: true,
421 token.FALLTHROUGH: true,
422 token.FOR: true,
423 token.GO: true,
424 token.GOTO: true,
425 token.IF: true,
426 token.RETURN: true,
427 token.SELECT: true,
428 token.SWITCH: true,
429 token.TYPE: true,
430 token.VAR: true,
431 }
432
433 var declStart = map[token.Token]bool{
434 token.IMPORT: true,
435 token.CONST: true,
436 token.TYPE: true,
437 token.VAR: true,
438 }
439
440 var exprEnd = map[token.Token]bool{
441 token.COMMA: true,
442 token.COLON: true,
443 token.SEMICOLON: true,
444 token.RPAREN: true,
445 token.RBRACK: true,
446 token.RBRACE: true,
447 }
448
449
450
451
452
453
454
455
456
457
458 func (p *parser) safePos(pos token.Pos) (res token.Pos) {
459 defer func() {
460 if recover() != nil {
461 res = token.Pos(p.file.Base() + p.file.Size())
462 }
463 }()
464 _ = p.file.Offset(pos)
465 return pos
466 }
467
468
469
470
471 func (p *parser) parseIdent() *ast.Ident {
472 pos := p.pos
473 name := "_"
474 if p.tok == token.IDENT {
475 name = p.lit
476 p.next()
477 } else {
478 p.expect(token.IDENT)
479 }
480 return &ast.Ident{NamePos: pos, Name: name}
481 }
482
483 func (p *parser) parseIdentList() (list []*ast.Ident) {
484 if p.trace {
485 defer un(trace(p, "IdentList"))
486 }
487
488 list = append(list, p.parseIdent())
489 for p.tok == token.COMMA {
490 p.next()
491 list = append(list, p.parseIdent())
492 }
493
494 return
495 }
496
497
498
499
500
501 func (p *parser) parseExprList() (list []ast.Expr) {
502 if p.trace {
503 defer un(trace(p, "ExpressionList"))
504 }
505
506 list = append(list, p.parseExpr())
507 for p.tok == token.COMMA {
508 p.next()
509 list = append(list, p.parseExpr())
510 }
511
512 return
513 }
514
515 func (p *parser) parseList(inRhs bool) []ast.Expr {
516 old := p.inRhs
517 p.inRhs = inRhs
518 list := p.parseExprList()
519 p.inRhs = old
520 return list
521 }
522
523
524
525
526 func (p *parser) parseType() ast.Expr {
527 if p.trace {
528 defer un(trace(p, "Type"))
529 }
530
531 typ := p.tryIdentOrType()
532
533 if typ == nil {
534 pos := p.pos
535 p.errorExpected(pos, "type")
536 p.advance(exprEnd)
537 return &ast.BadExpr{From: pos, To: p.pos}
538 }
539
540 return typ
541 }
542
543 func (p *parser) parseQualifiedIdent(ident *ast.Ident) ast.Expr {
544 if p.trace {
545 defer un(trace(p, "QualifiedIdent"))
546 }
547
548 typ := p.parseTypeName(ident)
549 if p.tok == token.LBRACK {
550 typ = p.parseTypeInstance(typ)
551 }
552
553 return typ
554 }
555
556
557 func (p *parser) parseTypeName(ident *ast.Ident) ast.Expr {
558 if p.trace {
559 defer un(trace(p, "TypeName"))
560 }
561
562 if ident == nil {
563 ident = p.parseIdent()
564 }
565
566 if p.tok == token.PERIOD {
567
568 p.next()
569 sel := p.parseIdent()
570 return &ast.SelectorExpr{X: ident, Sel: sel}
571 }
572
573 return ident
574 }
575
576
577
578 func (p *parser) parseArrayType(lbrack token.Pos, len ast.Expr) *ast.ArrayType {
579 if p.trace {
580 defer un(trace(p, "ArrayType"))
581 }
582
583 if len == nil {
584 p.exprLev++
585
586 if p.tok == token.ELLIPSIS {
587 len = &ast.Ellipsis{Ellipsis: p.pos}
588 p.next()
589 } else if p.tok != token.RBRACK {
590 len = p.parseRhs()
591 }
592 p.exprLev--
593 }
594 if p.tok == token.COMMA {
595
596
597
598 p.error(p.pos, "unexpected comma; expecting ]")
599 p.next()
600 }
601 p.expect(token.RBRACK)
602 elt := p.parseType()
603 return &ast.ArrayType{Lbrack: lbrack, Len: len, Elt: elt}
604 }
605
606 func (p *parser) parseArrayFieldOrTypeInstance(x *ast.Ident) (*ast.Ident, ast.Expr) {
607 if p.trace {
608 defer un(trace(p, "ArrayFieldOrTypeInstance"))
609 }
610
611 lbrack := p.expect(token.LBRACK)
612 trailingComma := token.NoPos
613 var args []ast.Expr
614 if p.tok != token.RBRACK {
615 p.exprLev++
616 args = append(args, p.parseRhs())
617 for p.tok == token.COMMA {
618 comma := p.pos
619 p.next()
620 if p.tok == token.RBRACK {
621 trailingComma = comma
622 break
623 }
624 args = append(args, p.parseRhs())
625 }
626 p.exprLev--
627 }
628 rbrack := p.expect(token.RBRACK)
629
630 if len(args) == 0 {
631
632 elt := p.parseType()
633 return x, &ast.ArrayType{Lbrack: lbrack, Elt: elt}
634 }
635
636
637 if len(args) == 1 {
638 elt := p.tryIdentOrType()
639 if elt != nil {
640
641 if trailingComma.IsValid() {
642
643 p.error(trailingComma, "unexpected comma; expecting ]")
644 }
645 return x, &ast.ArrayType{Lbrack: lbrack, Len: args[0], Elt: elt}
646 }
647 }
648
649
650 return nil, packIndexExpr(x, lbrack, args, rbrack)
651 }
652
653 func (p *parser) parseFieldDecl() *ast.Field {
654 if p.trace {
655 defer un(trace(p, "FieldDecl"))
656 }
657
658 doc := p.leadComment
659
660 var names []*ast.Ident
661 var typ ast.Expr
662 switch p.tok {
663 case token.IDENT:
664 name := p.parseIdent()
665 if p.tok == token.PERIOD || p.tok == token.STRING || p.tok == token.SEMICOLON || p.tok == token.RBRACE {
666
667 typ = name
668 if p.tok == token.PERIOD {
669 typ = p.parseQualifiedIdent(name)
670 }
671 } else {
672
673 names = []*ast.Ident{name}
674 for p.tok == token.COMMA {
675 p.next()
676 names = append(names, p.parseIdent())
677 }
678
679
680 if len(names) == 1 && p.tok == token.LBRACK {
681 name, typ = p.parseArrayFieldOrTypeInstance(name)
682 if name == nil {
683 names = nil
684 }
685 } else {
686
687 typ = p.parseType()
688 }
689 }
690 case token.MUL:
691 star := p.pos
692 p.next()
693 if p.tok == token.LPAREN {
694
695 p.error(p.pos, "cannot parenthesize embedded type")
696 p.next()
697 typ = p.parseQualifiedIdent(nil)
698
699 if p.tok == token.RPAREN {
700 p.next()
701 }
702 } else {
703
704 typ = p.parseQualifiedIdent(nil)
705 }
706 typ = &ast.StarExpr{Star: star, X: typ}
707
708 case token.LPAREN:
709 p.error(p.pos, "cannot parenthesize embedded type")
710 p.next()
711 if p.tok == token.MUL {
712
713 star := p.pos
714 p.next()
715 typ = &ast.StarExpr{Star: star, X: p.parseQualifiedIdent(nil)}
716 } else {
717
718 typ = p.parseQualifiedIdent(nil)
719 }
720
721 if p.tok == token.RPAREN {
722 p.next()
723 }
724
725 default:
726 pos := p.pos
727 p.errorExpected(pos, "field name or embedded type")
728 p.advance(exprEnd)
729 typ = &ast.BadExpr{From: pos, To: p.pos}
730 }
731
732 var tag *ast.BasicLit
733 if p.tok == token.STRING {
734 tag = &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit}
735 p.next()
736 }
737
738 comment := p.expectSemi()
739
740 field := &ast.Field{Doc: doc, Names: names, Type: typ, Tag: tag, Comment: comment}
741 return field
742 }
743
744 func (p *parser) parseStructType() *ast.StructType {
745 if p.trace {
746 defer un(trace(p, "StructType"))
747 }
748
749 pos := p.expect(token.STRUCT)
750 lbrace := p.expect(token.LBRACE)
751 var list []*ast.Field
752 for p.tok == token.IDENT || p.tok == token.MUL || p.tok == token.LPAREN {
753
754
755
756 list = append(list, p.parseFieldDecl())
757 }
758 rbrace := p.expect(token.RBRACE)
759
760 return &ast.StructType{
761 Struct: pos,
762 Fields: &ast.FieldList{
763 Opening: lbrace,
764 List: list,
765 Closing: rbrace,
766 },
767 }
768 }
769
770 func (p *parser) parsePointerType() *ast.StarExpr {
771 if p.trace {
772 defer un(trace(p, "PointerType"))
773 }
774
775 star := p.expect(token.MUL)
776 base := p.parseType()
777
778 return &ast.StarExpr{Star: star, X: base}
779 }
780
781 func (p *parser) parseDotsType() *ast.Ellipsis {
782 if p.trace {
783 defer un(trace(p, "DotsType"))
784 }
785
786 pos := p.expect(token.ELLIPSIS)
787 elt := p.parseType()
788
789 return &ast.Ellipsis{Ellipsis: pos, Elt: elt}
790 }
791
792 type field struct {
793 name *ast.Ident
794 typ ast.Expr
795 }
796
797 func (p *parser) parseParamDecl(name *ast.Ident, typeSetsOK bool) (f field) {
798
799
800 if p.trace {
801 defer un(trace(p, "ParamDecl"))
802 }
803
804 ptok := p.tok
805 if name != nil {
806 p.tok = token.IDENT
807 } else if typeSetsOK && p.tok == token.TILDE {
808
809 return field{nil, p.embeddedElem(nil)}
810 }
811
812 switch p.tok {
813 case token.IDENT:
814
815 if name != nil {
816 f.name = name
817 p.tok = ptok
818 } else {
819 f.name = p.parseIdent()
820 }
821 switch p.tok {
822 case token.IDENT, token.MUL, token.ARROW, token.FUNC, token.CHAN, token.MAP, token.STRUCT, token.INTERFACE, token.LPAREN:
823
824 f.typ = p.parseType()
825
826 case token.LBRACK:
827
828 f.name, f.typ = p.parseArrayFieldOrTypeInstance(f.name)
829
830 case token.ELLIPSIS:
831
832 f.typ = p.parseDotsType()
833 return
834
835 case token.PERIOD:
836
837 f.typ = p.parseQualifiedIdent(f.name)
838 f.name = nil
839
840 case token.TILDE:
841 if typeSetsOK {
842 f.typ = p.embeddedElem(nil)
843 return
844 }
845
846 case token.OR:
847 if typeSetsOK {
848
849 f.typ = p.embeddedElem(f.name)
850 f.name = nil
851 return
852 }
853 }
854
855 case token.MUL, token.ARROW, token.FUNC, token.LBRACK, token.CHAN, token.MAP, token.STRUCT, token.INTERFACE, token.LPAREN:
856
857 f.typ = p.parseType()
858
859 case token.ELLIPSIS:
860
861
862 f.typ = p.parseDotsType()
863 return
864
865 default:
866
867
868 p.errorExpected(p.pos, "')'")
869 p.advance(exprEnd)
870 }
871
872
873 if typeSetsOK && p.tok == token.OR && f.typ != nil {
874 f.typ = p.embeddedElem(f.typ)
875 }
876
877 return
878 }
879
880 func (p *parser) parseParameterList(name0 *ast.Ident, typ0 ast.Expr, closing token.Token, dddok bool) (params []*ast.Field) {
881 if p.trace {
882 defer un(trace(p, "ParameterList"))
883 }
884
885
886 tparams := closing == token.RBRACK
887
888 pos0 := p.pos
889 if name0 != nil {
890 pos0 = name0.Pos()
891 } else if typ0 != nil {
892 pos0 = typ0.Pos()
893 }
894
895
896
897
898
899
900
901 var list []field
902 var named int
903 var typed int
904
905 for name0 != nil || p.tok != closing && p.tok != token.EOF {
906 var par field
907 if typ0 != nil {
908 if tparams {
909 typ0 = p.embeddedElem(typ0)
910 }
911 par = field{name0, typ0}
912 } else {
913 par = p.parseParamDecl(name0, tparams)
914 }
915 name0 = nil
916 typ0 = nil
917 if par.name != nil || par.typ != nil {
918 list = append(list, par)
919 if par.name != nil && par.typ != nil {
920 named++
921 }
922 if par.typ != nil {
923 typed++
924 }
925 }
926 if !p.atComma("parameter list", closing) {
927 break
928 }
929 p.next()
930 }
931
932 if len(list) == 0 {
933 return
934 }
935
936
937 if named == 0 {
938
939 for i := range list {
940 par := &list[i]
941 if typ := par.name; typ != nil {
942 par.typ = typ
943 par.name = nil
944 }
945 }
946 if tparams {
947
948
949 var errPos token.Pos
950 var msg string
951 if named == typed {
952 errPos = p.pos
953 msg = "missing type constraint"
954 } else {
955 errPos = pos0
956 msg = "missing type parameter name"
957 if len(list) == 1 {
958 msg += " or invalid array length"
959 }
960 }
961 p.error(errPos, msg)
962 }
963 } else if named != len(list) {
964
965 var errPos token.Pos
966 var typ ast.Expr
967 for i := range list {
968 if par := &list[len(list)-i-1]; par.typ != nil {
969 typ = par.typ
970 if par.name == nil {
971 errPos = typ.Pos()
972 n := ast.NewIdent("_")
973 n.NamePos = errPos
974 par.name = n
975 }
976 } else if typ != nil {
977 par.typ = typ
978 } else {
979
980 errPos = par.name.Pos()
981 par.typ = &ast.BadExpr{From: errPos, To: p.pos}
982 }
983 }
984 if errPos.IsValid() {
985
986
987
988
989
990
991 var msg string
992 if named == typed {
993 errPos = p.pos
994 if tparams {
995 msg = "missing type constraint"
996 } else {
997 msg = "missing parameter type"
998 }
999 } else {
1000 if tparams {
1001 msg = "missing type parameter name"
1002
1003 if len(list) == 1 {
1004 msg += " or invalid array length"
1005 }
1006 } else {
1007 msg = "missing parameter name"
1008 }
1009 }
1010 p.error(errPos, msg)
1011 }
1012 }
1013
1014
1015 first := true
1016 for i, _ := range list {
1017 f := &list[i]
1018 if t, _ := f.typ.(*ast.Ellipsis); t != nil && (!dddok || i+1 < len(list)) {
1019 if first {
1020 first = false
1021 if dddok {
1022 p.error(t.Ellipsis, "can only use ... with final parameter")
1023 } else {
1024 p.error(t.Ellipsis, "invalid use of ...")
1025 }
1026 }
1027
1028
1029
1030 f.typ = &ast.BadExpr{From: t.Pos(), To: t.End()}
1031 }
1032 }
1033
1034
1035
1036 if named == 0 {
1037
1038 for _, par := range list {
1039 assert(par.typ != nil, "nil type in unnamed parameter list")
1040 params = append(params, &ast.Field{Type: par.typ})
1041 }
1042 return
1043 }
1044
1045
1046
1047 var names []*ast.Ident
1048 var typ ast.Expr
1049 addParams := func() {
1050 assert(typ != nil, "nil type in named parameter list")
1051 field := &ast.Field{Names: names, Type: typ}
1052 params = append(params, field)
1053 names = nil
1054 }
1055 for _, par := range list {
1056 if par.typ != typ {
1057 if len(names) > 0 {
1058 addParams()
1059 }
1060 typ = par.typ
1061 }
1062 names = append(names, par.name)
1063 }
1064 if len(names) > 0 {
1065 addParams()
1066 }
1067 return
1068 }
1069
1070 func (p *parser) parseTypeParameters() *ast.FieldList {
1071 if p.trace {
1072 defer un(trace(p, "TypeParameters"))
1073 }
1074
1075 lbrack := p.expect(token.LBRACK)
1076 var list []*ast.Field
1077 if p.tok != token.RBRACK {
1078 list = p.parseParameterList(nil, nil, token.RBRACK, false)
1079 }
1080 rbrack := p.expect(token.RBRACK)
1081
1082 if len(list) == 0 {
1083 p.error(rbrack, "empty type parameter list")
1084 return nil
1085 }
1086
1087 return &ast.FieldList{Opening: lbrack, List: list, Closing: rbrack}
1088 }
1089
1090 func (p *parser) parseParameters(result bool) *ast.FieldList {
1091 if p.trace {
1092 defer un(trace(p, "Parameters"))
1093 }
1094
1095 if !result || p.tok == token.LPAREN {
1096 lparen := p.expect(token.LPAREN)
1097 var list []*ast.Field
1098 if p.tok != token.RPAREN {
1099 list = p.parseParameterList(nil, nil, token.RPAREN, !result)
1100 }
1101 rparen := p.expect(token.RPAREN)
1102 return &ast.FieldList{Opening: lparen, List: list, Closing: rparen}
1103 }
1104
1105 if typ := p.tryIdentOrType(); typ != nil {
1106 list := make([]*ast.Field, 1)
1107 list[0] = &ast.Field{Type: typ}
1108 return &ast.FieldList{List: list}
1109 }
1110
1111 return nil
1112 }
1113
1114 func (p *parser) parseFuncType() *ast.FuncType {
1115 if p.trace {
1116 defer un(trace(p, "FuncType"))
1117 }
1118
1119 pos := p.expect(token.FUNC)
1120
1121 if p.tok == token.LBRACK {
1122 tparams := p.parseTypeParameters()
1123 if tparams != nil {
1124 p.error(tparams.Opening, "function type must have no type parameters")
1125 }
1126 }
1127 params := p.parseParameters(false)
1128 results := p.parseParameters(true)
1129
1130 return &ast.FuncType{Func: pos, Params: params, Results: results}
1131 }
1132
1133 func (p *parser) parseMethodSpec() *ast.Field {
1134 if p.trace {
1135 defer un(trace(p, "MethodSpec"))
1136 }
1137
1138 doc := p.leadComment
1139 var idents []*ast.Ident
1140 var typ ast.Expr
1141 x := p.parseTypeName(nil)
1142 if ident, _ := x.(*ast.Ident); ident != nil {
1143 switch {
1144 case p.tok == token.LBRACK:
1145
1146 lbrack := p.pos
1147 p.next()
1148 p.exprLev++
1149 x := p.parseExpr()
1150 p.exprLev--
1151 if name0, _ := x.(*ast.Ident); name0 != nil && p.tok != token.COMMA && p.tok != token.RBRACK {
1152
1153
1154
1155
1156 _ = p.parseParameterList(name0, nil, token.RBRACK, false)
1157 _ = p.expect(token.RBRACK)
1158 p.error(lbrack, "interface method must have no type parameters")
1159
1160
1161 params := p.parseParameters(false)
1162 results := p.parseParameters(true)
1163 idents = []*ast.Ident{ident}
1164 typ = &ast.FuncType{
1165 Func: token.NoPos,
1166 Params: params,
1167 Results: results,
1168 }
1169 } else {
1170
1171
1172 list := []ast.Expr{x}
1173 if p.atComma("type argument list", token.RBRACK) {
1174 p.exprLev++
1175 p.next()
1176 for p.tok != token.RBRACK && p.tok != token.EOF {
1177 list = append(list, p.parseType())
1178 if !p.atComma("type argument list", token.RBRACK) {
1179 break
1180 }
1181 p.next()
1182 }
1183 p.exprLev--
1184 }
1185 rbrack := p.expectClosing(token.RBRACK, "type argument list")
1186 typ = packIndexExpr(ident, lbrack, list, rbrack)
1187 }
1188 case p.tok == token.LPAREN:
1189
1190
1191 params := p.parseParameters(false)
1192 results := p.parseParameters(true)
1193 idents = []*ast.Ident{ident}
1194 typ = &ast.FuncType{Func: token.NoPos, Params: params, Results: results}
1195 default:
1196
1197 typ = x
1198 }
1199 } else {
1200
1201 typ = x
1202 if p.tok == token.LBRACK {
1203
1204 typ = p.parseTypeInstance(typ)
1205 }
1206 }
1207
1208
1209
1210
1211
1212 return &ast.Field{Doc: doc, Names: idents, Type: typ}
1213 }
1214
1215 func (p *parser) embeddedElem(x ast.Expr) ast.Expr {
1216 if p.trace {
1217 defer un(trace(p, "EmbeddedElem"))
1218 }
1219 if x == nil {
1220 x = p.embeddedTerm()
1221 }
1222 for p.tok == token.OR {
1223 t := new(ast.BinaryExpr)
1224 t.OpPos = p.pos
1225 t.Op = token.OR
1226 p.next()
1227 t.X = x
1228 t.Y = p.embeddedTerm()
1229 x = t
1230 }
1231 return x
1232 }
1233
1234 func (p *parser) embeddedTerm() ast.Expr {
1235 if p.trace {
1236 defer un(trace(p, "EmbeddedTerm"))
1237 }
1238 if p.tok == token.TILDE {
1239 t := new(ast.UnaryExpr)
1240 t.OpPos = p.pos
1241 t.Op = token.TILDE
1242 p.next()
1243 t.X = p.parseType()
1244 return t
1245 }
1246
1247 t := p.tryIdentOrType()
1248 if t == nil {
1249 pos := p.pos
1250 p.errorExpected(pos, "~ term or type")
1251 p.advance(exprEnd)
1252 return &ast.BadExpr{From: pos, To: p.pos}
1253 }
1254
1255 return t
1256 }
1257
1258 func (p *parser) parseInterfaceType() *ast.InterfaceType {
1259 if p.trace {
1260 defer un(trace(p, "InterfaceType"))
1261 }
1262
1263 pos := p.expect(token.INTERFACE)
1264 lbrace := p.expect(token.LBRACE)
1265
1266 var list []*ast.Field
1267
1268 parseElements:
1269 for {
1270 switch {
1271 case p.tok == token.IDENT:
1272 f := p.parseMethodSpec()
1273 if f.Names == nil {
1274 f.Type = p.embeddedElem(f.Type)
1275 }
1276 f.Comment = p.expectSemi()
1277 list = append(list, f)
1278 case p.tok == token.TILDE:
1279 typ := p.embeddedElem(nil)
1280 comment := p.expectSemi()
1281 list = append(list, &ast.Field{Type: typ, Comment: comment})
1282 default:
1283 if t := p.tryIdentOrType(); t != nil {
1284 typ := p.embeddedElem(t)
1285 comment := p.expectSemi()
1286 list = append(list, &ast.Field{Type: typ, Comment: comment})
1287 } else {
1288 break parseElements
1289 }
1290 }
1291 }
1292
1293
1294
1295 rbrace := p.expect(token.RBRACE)
1296
1297 return &ast.InterfaceType{
1298 Interface: pos,
1299 Methods: &ast.FieldList{
1300 Opening: lbrace,
1301 List: list,
1302 Closing: rbrace,
1303 },
1304 }
1305 }
1306
1307 func (p *parser) parseMapType() *ast.MapType {
1308 if p.trace {
1309 defer un(trace(p, "MapType"))
1310 }
1311
1312 pos := p.expect(token.MAP)
1313 p.expect(token.LBRACK)
1314 key := p.parseType()
1315 p.expect(token.RBRACK)
1316 value := p.parseType()
1317
1318 return &ast.MapType{Map: pos, Key: key, Value: value}
1319 }
1320
1321 func (p *parser) parseChanType() *ast.ChanType {
1322 if p.trace {
1323 defer un(trace(p, "ChanType"))
1324 }
1325
1326 pos := p.pos
1327 dir := ast.SEND | ast.RECV
1328 var arrow token.Pos
1329 if p.tok == token.CHAN {
1330 p.next()
1331 if p.tok == token.ARROW {
1332 arrow = p.pos
1333 p.next()
1334 dir = ast.SEND
1335 }
1336 } else {
1337 arrow = p.expect(token.ARROW)
1338 p.expect(token.CHAN)
1339 dir = ast.RECV
1340 }
1341 value := p.parseType()
1342
1343 return &ast.ChanType{Begin: pos, Arrow: arrow, Dir: dir, Value: value}
1344 }
1345
1346 func (p *parser) parseTypeInstance(typ ast.Expr) ast.Expr {
1347 if p.trace {
1348 defer un(trace(p, "TypeInstance"))
1349 }
1350
1351 opening := p.expect(token.LBRACK)
1352 p.exprLev++
1353 var list []ast.Expr
1354 for p.tok != token.RBRACK && p.tok != token.EOF {
1355 list = append(list, p.parseType())
1356 if !p.atComma("type argument list", token.RBRACK) {
1357 break
1358 }
1359 p.next()
1360 }
1361 p.exprLev--
1362
1363 closing := p.expectClosing(token.RBRACK, "type argument list")
1364
1365 if len(list) == 0 {
1366 p.errorExpected(closing, "type argument list")
1367 return &ast.IndexExpr{
1368 X: typ,
1369 Lbrack: opening,
1370 Index: &ast.BadExpr{From: opening + 1, To: closing},
1371 Rbrack: closing,
1372 }
1373 }
1374
1375 return packIndexExpr(typ, opening, list, closing)
1376 }
1377
1378 func (p *parser) tryIdentOrType() ast.Expr {
1379 defer decNestLev(incNestLev(p))
1380
1381 switch p.tok {
1382 case token.IDENT:
1383 typ := p.parseTypeName(nil)
1384 if p.tok == token.LBRACK {
1385 typ = p.parseTypeInstance(typ)
1386 }
1387 return typ
1388 case token.LBRACK:
1389 lbrack := p.expect(token.LBRACK)
1390 return p.parseArrayType(lbrack, nil)
1391 case token.STRUCT:
1392 return p.parseStructType()
1393 case token.MUL:
1394 return p.parsePointerType()
1395 case token.FUNC:
1396 return p.parseFuncType()
1397 case token.INTERFACE:
1398 return p.parseInterfaceType()
1399 case token.MAP:
1400 return p.parseMapType()
1401 case token.CHAN, token.ARROW:
1402 return p.parseChanType()
1403 case token.LPAREN:
1404 lparen := p.pos
1405 p.next()
1406 typ := p.parseType()
1407 rparen := p.expect(token.RPAREN)
1408 return &ast.ParenExpr{Lparen: lparen, X: typ, Rparen: rparen}
1409 }
1410
1411
1412 return nil
1413 }
1414
1415
1416
1417
1418 func (p *parser) parseStmtList() (list []ast.Stmt) {
1419 if p.trace {
1420 defer un(trace(p, "StatementList"))
1421 }
1422
1423 for p.tok != token.CASE && p.tok != token.DEFAULT && p.tok != token.RBRACE && p.tok != token.EOF {
1424 list = append(list, p.parseStmt())
1425 }
1426
1427 return
1428 }
1429
1430 func (p *parser) parseBody() *ast.BlockStmt {
1431 if p.trace {
1432 defer un(trace(p, "Body"))
1433 }
1434
1435 lbrace := p.expect(token.LBRACE)
1436 list := p.parseStmtList()
1437 rbrace := p.expect2(token.RBRACE)
1438
1439 return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
1440 }
1441
1442 func (p *parser) parseBlockStmt() *ast.BlockStmt {
1443 if p.trace {
1444 defer un(trace(p, "BlockStmt"))
1445 }
1446
1447 lbrace := p.expect(token.LBRACE)
1448 list := p.parseStmtList()
1449 rbrace := p.expect2(token.RBRACE)
1450
1451 return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
1452 }
1453
1454
1455
1456
1457 func (p *parser) parseFuncTypeOrLit() ast.Expr {
1458 if p.trace {
1459 defer un(trace(p, "FuncTypeOrLit"))
1460 }
1461
1462 typ := p.parseFuncType()
1463 if p.tok != token.LBRACE {
1464
1465 return typ
1466 }
1467
1468 p.exprLev++
1469 body := p.parseBody()
1470 p.exprLev--
1471
1472 return &ast.FuncLit{Type: typ, Body: body}
1473 }
1474
1475
1476
1477 func (p *parser) parseOperand() ast.Expr {
1478 if p.trace {
1479 defer un(trace(p, "Operand"))
1480 }
1481
1482 switch p.tok {
1483 case token.IDENT:
1484 x := p.parseIdent()
1485 return x
1486
1487 case token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING:
1488 x := &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit}
1489 p.next()
1490 return x
1491
1492 case token.LPAREN:
1493 lparen := p.pos
1494 p.next()
1495 p.exprLev++
1496 x := p.parseRhs()
1497 p.exprLev--
1498 rparen := p.expect(token.RPAREN)
1499 return &ast.ParenExpr{Lparen: lparen, X: x, Rparen: rparen}
1500
1501 case token.FUNC:
1502 return p.parseFuncTypeOrLit()
1503 }
1504
1505 if typ := p.tryIdentOrType(); typ != nil {
1506
1507 _, isIdent := typ.(*ast.Ident)
1508 assert(!isIdent, "type cannot be identifier")
1509 return typ
1510 }
1511
1512
1513 pos := p.pos
1514 p.errorExpected(pos, "operand")
1515 p.advance(stmtStart)
1516 return &ast.BadExpr{From: pos, To: p.pos}
1517 }
1518
1519 func (p *parser) parseSelector(x ast.Expr) ast.Expr {
1520 if p.trace {
1521 defer un(trace(p, "Selector"))
1522 }
1523
1524 sel := p.parseIdent()
1525
1526 return &ast.SelectorExpr{X: x, Sel: sel}
1527 }
1528
1529 func (p *parser) parseTypeAssertion(x ast.Expr) ast.Expr {
1530 if p.trace {
1531 defer un(trace(p, "TypeAssertion"))
1532 }
1533
1534 lparen := p.expect(token.LPAREN)
1535 var typ ast.Expr
1536 if p.tok == token.TYPE {
1537
1538 p.next()
1539 } else {
1540 typ = p.parseType()
1541 }
1542 rparen := p.expect(token.RPAREN)
1543
1544 return &ast.TypeAssertExpr{X: x, Type: typ, Lparen: lparen, Rparen: rparen}
1545 }
1546
1547 func (p *parser) parseIndexOrSliceOrInstance(x ast.Expr) ast.Expr {
1548 if p.trace {
1549 defer un(trace(p, "parseIndexOrSliceOrInstance"))
1550 }
1551
1552 lbrack := p.expect(token.LBRACK)
1553 if p.tok == token.RBRACK {
1554
1555
1556 p.errorExpected(p.pos, "operand")
1557 rbrack := p.pos
1558 p.next()
1559 return &ast.IndexExpr{
1560 X: x,
1561 Lbrack: lbrack,
1562 Index: &ast.BadExpr{From: rbrack, To: rbrack},
1563 Rbrack: rbrack,
1564 }
1565 }
1566 p.exprLev++
1567
1568 const N = 3
1569 var args []ast.Expr
1570 var index [N]ast.Expr
1571 var colons [N - 1]token.Pos
1572 if p.tok != token.COLON {
1573
1574
1575 index[0] = p.parseRhs()
1576 }
1577 ncolons := 0
1578 switch p.tok {
1579 case token.COLON:
1580
1581 for p.tok == token.COLON && ncolons < len(colons) {
1582 colons[ncolons] = p.pos
1583 ncolons++
1584 p.next()
1585 if p.tok != token.COLON && p.tok != token.RBRACK && p.tok != token.EOF {
1586 index[ncolons] = p.parseRhs()
1587 }
1588 }
1589 case token.COMMA:
1590
1591 args = append(args, index[0])
1592 for p.tok == token.COMMA {
1593 p.next()
1594 if p.tok != token.RBRACK && p.tok != token.EOF {
1595 args = append(args, p.parseType())
1596 }
1597 }
1598 }
1599
1600 p.exprLev--
1601 rbrack := p.expect(token.RBRACK)
1602
1603 if ncolons > 0 {
1604
1605 slice3 := false
1606 if ncolons == 2 {
1607 slice3 = true
1608
1609
1610 if index[1] == nil {
1611 p.error(colons[0], "middle index required in 3-index slice")
1612 index[1] = &ast.BadExpr{From: colons[0] + 1, To: colons[1]}
1613 }
1614 if index[2] == nil {
1615 p.error(colons[1], "final index required in 3-index slice")
1616 index[2] = &ast.BadExpr{From: colons[1] + 1, To: rbrack}
1617 }
1618 }
1619 return &ast.SliceExpr{X: x, Lbrack: lbrack, Low: index[0], High: index[1], Max: index[2], Slice3: slice3, Rbrack: rbrack}
1620 }
1621
1622 if len(args) == 0 {
1623
1624 return &ast.IndexExpr{X: x, Lbrack: lbrack, Index: index[0], Rbrack: rbrack}
1625 }
1626
1627
1628 return packIndexExpr(x, lbrack, args, rbrack)
1629 }
1630
1631 func (p *parser) parseCallOrConversion(fun ast.Expr) *ast.CallExpr {
1632 if p.trace {
1633 defer un(trace(p, "CallOrConversion"))
1634 }
1635
1636 lparen := p.expect(token.LPAREN)
1637 p.exprLev++
1638 var list []ast.Expr
1639 var ellipsis token.Pos
1640 for p.tok != token.RPAREN && p.tok != token.EOF && !ellipsis.IsValid() {
1641 list = append(list, p.parseRhs())
1642 if p.tok == token.ELLIPSIS {
1643 ellipsis = p.pos
1644 p.next()
1645 }
1646 if !p.atComma("argument list", token.RPAREN) {
1647 break
1648 }
1649 p.next()
1650 }
1651 p.exprLev--
1652 rparen := p.expectClosing(token.RPAREN, "argument list")
1653
1654 return &ast.CallExpr{Fun: fun, Lparen: lparen, Args: list, Ellipsis: ellipsis, Rparen: rparen}
1655 }
1656
1657 func (p *parser) parseValue() ast.Expr {
1658 if p.trace {
1659 defer un(trace(p, "Element"))
1660 }
1661
1662 if p.tok == token.LBRACE {
1663 return p.parseLiteralValue(nil)
1664 }
1665
1666 x := p.parseExpr()
1667
1668 return x
1669 }
1670
1671 func (p *parser) parseElement() ast.Expr {
1672 if p.trace {
1673 defer un(trace(p, "Element"))
1674 }
1675
1676 x := p.parseValue()
1677 if p.tok == token.COLON {
1678 colon := p.pos
1679 p.next()
1680 x = &ast.KeyValueExpr{Key: x, Colon: colon, Value: p.parseValue()}
1681 }
1682
1683 return x
1684 }
1685
1686 func (p *parser) parseElementList() (list []ast.Expr) {
1687 if p.trace {
1688 defer un(trace(p, "ElementList"))
1689 }
1690
1691 for p.tok != token.RBRACE && p.tok != token.EOF {
1692 list = append(list, p.parseElement())
1693 if !p.atComma("composite literal", token.RBRACE) {
1694 break
1695 }
1696 p.next()
1697 }
1698
1699 return
1700 }
1701
1702 func (p *parser) parseLiteralValue(typ ast.Expr) ast.Expr {
1703 defer decNestLev(incNestLev(p))
1704
1705 if p.trace {
1706 defer un(trace(p, "LiteralValue"))
1707 }
1708
1709 lbrace := p.expect(token.LBRACE)
1710 var elts []ast.Expr
1711 p.exprLev++
1712 if p.tok != token.RBRACE {
1713 elts = p.parseElementList()
1714 }
1715 p.exprLev--
1716 rbrace := p.expectClosing(token.RBRACE, "composite literal")
1717 return &ast.CompositeLit{Type: typ, Lbrace: lbrace, Elts: elts, Rbrace: rbrace}
1718 }
1719
1720 func (p *parser) parsePrimaryExpr(x ast.Expr) ast.Expr {
1721 if p.trace {
1722 defer un(trace(p, "PrimaryExpr"))
1723 }
1724
1725 if x == nil {
1726 x = p.parseOperand()
1727 }
1728
1729
1730
1731 var n int
1732 defer func() { p.nestLev -= n }()
1733 for n = 1; ; n++ {
1734 incNestLev(p)
1735 switch p.tok {
1736 case token.PERIOD:
1737 p.next()
1738 switch p.tok {
1739 case token.IDENT:
1740 x = p.parseSelector(x)
1741 case token.LPAREN:
1742 x = p.parseTypeAssertion(x)
1743 default:
1744 pos := p.pos
1745 p.errorExpected(pos, "selector or type assertion")
1746
1747
1748
1749
1750
1751 if p.tok != token.RBRACE {
1752 p.next()
1753 }
1754 sel := &ast.Ident{NamePos: pos, Name: "_"}
1755 x = &ast.SelectorExpr{X: x, Sel: sel}
1756 }
1757 case token.LBRACK:
1758 x = p.parseIndexOrSliceOrInstance(x)
1759 case token.LPAREN:
1760 x = p.parseCallOrConversion(x)
1761 case token.LBRACE:
1762
1763
1764 t := ast.Unparen(x)
1765
1766 switch t.(type) {
1767 case *ast.BadExpr, *ast.Ident, *ast.SelectorExpr:
1768 if p.exprLev < 0 {
1769 return x
1770 }
1771
1772 case *ast.IndexExpr, *ast.IndexListExpr:
1773 if p.exprLev < 0 {
1774 return x
1775 }
1776
1777 case *ast.ArrayType, *ast.StructType, *ast.MapType:
1778
1779 default:
1780 return x
1781 }
1782 if t != x {
1783 p.error(t.Pos(), "cannot parenthesize type in composite literal")
1784
1785 }
1786 x = p.parseLiteralValue(x)
1787 default:
1788 return x
1789 }
1790 }
1791 }
1792
1793 func (p *parser) parseUnaryExpr() ast.Expr {
1794 defer decNestLev(incNestLev(p))
1795
1796 if p.trace {
1797 defer un(trace(p, "UnaryExpr"))
1798 }
1799
1800 switch p.tok {
1801 case token.ADD, token.SUB, token.NOT, token.XOR, token.AND, token.TILDE:
1802 pos, op := p.pos, p.tok
1803 p.next()
1804 x := p.parseUnaryExpr()
1805 return &ast.UnaryExpr{OpPos: pos, Op: op, X: x}
1806
1807 case token.ARROW:
1808
1809 arrow := p.pos
1810 p.next()
1811
1812
1813
1814
1815
1816
1817
1818
1819
1820
1821
1822
1823
1824
1825
1826 x := p.parseUnaryExpr()
1827
1828
1829 if typ, ok := x.(*ast.ChanType); ok {
1830
1831
1832
1833 dir := ast.SEND
1834 for ok && dir == ast.SEND {
1835 if typ.Dir == ast.RECV {
1836
1837 p.errorExpected(typ.Arrow, "'chan'")
1838 }
1839 arrow, typ.Begin, typ.Arrow = typ.Arrow, arrow, arrow
1840 dir, typ.Dir = typ.Dir, ast.RECV
1841 typ, ok = typ.Value.(*ast.ChanType)
1842 }
1843 if dir == ast.SEND {
1844 p.errorExpected(arrow, "channel type")
1845 }
1846
1847 return x
1848 }
1849
1850
1851 return &ast.UnaryExpr{OpPos: arrow, Op: token.ARROW, X: x}
1852
1853 case token.MUL:
1854
1855 pos := p.pos
1856 p.next()
1857 x := p.parseUnaryExpr()
1858 return &ast.StarExpr{Star: pos, X: x}
1859 }
1860
1861 return p.parsePrimaryExpr(nil)
1862 }
1863
1864 func (p *parser) tokPrec() (token.Token, int) {
1865 tok := p.tok
1866 if p.inRhs && tok == token.ASSIGN {
1867 tok = token.EQL
1868 }
1869 return tok, tok.Precedence()
1870 }
1871
1872
1873
1874
1875
1876 func (p *parser) parseBinaryExpr(x ast.Expr, prec1 int) ast.Expr {
1877 if p.trace {
1878 defer un(trace(p, "BinaryExpr"))
1879 }
1880
1881 if x == nil {
1882 x = p.parseUnaryExpr()
1883 }
1884
1885
1886
1887 var n int
1888 defer func() { p.nestLev -= n }()
1889 for n = 1; ; n++ {
1890 incNestLev(p)
1891 op, oprec := p.tokPrec()
1892 if oprec < prec1 {
1893 return x
1894 }
1895 pos := p.expect(op)
1896 y := p.parseBinaryExpr(nil, oprec+1)
1897 x = &ast.BinaryExpr{X: x, OpPos: pos, Op: op, Y: y}
1898 }
1899 }
1900
1901
1902 func (p *parser) parseExpr() ast.Expr {
1903 if p.trace {
1904 defer un(trace(p, "Expression"))
1905 }
1906
1907 return p.parseBinaryExpr(nil, token.LowestPrec+1)
1908 }
1909
1910 func (p *parser) parseRhs() ast.Expr {
1911 old := p.inRhs
1912 p.inRhs = true
1913 x := p.parseExpr()
1914 p.inRhs = old
1915 return x
1916 }
1917
1918
1919
1920
1921
1922 const (
1923 basic = iota
1924 labelOk
1925 rangeOk
1926 )
1927
1928
1929
1930
1931
1932 func (p *parser) parseSimpleStmt(mode int) (ast.Stmt, bool) {
1933 if p.trace {
1934 defer un(trace(p, "SimpleStmt"))
1935 }
1936
1937 x := p.parseList(false)
1938
1939 switch p.tok {
1940 case
1941 token.DEFINE, token.ASSIGN, token.ADD_ASSIGN,
1942 token.SUB_ASSIGN, token.MUL_ASSIGN, token.QUO_ASSIGN,
1943 token.REM_ASSIGN, token.AND_ASSIGN, token.OR_ASSIGN,
1944 token.XOR_ASSIGN, token.SHL_ASSIGN, token.SHR_ASSIGN, token.AND_NOT_ASSIGN:
1945
1946 pos, tok := p.pos, p.tok
1947 p.next()
1948 var y []ast.Expr
1949 isRange := false
1950 if mode == rangeOk && p.tok == token.RANGE && (tok == token.DEFINE || tok == token.ASSIGN) {
1951 pos := p.pos
1952 p.next()
1953 y = []ast.Expr{&ast.UnaryExpr{OpPos: pos, Op: token.RANGE, X: p.parseRhs()}}
1954 isRange = true
1955 } else {
1956 y = p.parseList(true)
1957 }
1958 return &ast.AssignStmt{Lhs: x, TokPos: pos, Tok: tok, Rhs: y}, isRange
1959 }
1960
1961 if len(x) > 1 {
1962 p.errorExpected(x[0].Pos(), "1 expression")
1963
1964 }
1965
1966 switch p.tok {
1967 case token.COLON:
1968
1969 colon := p.pos
1970 p.next()
1971 if label, isIdent := x[0].(*ast.Ident); mode == labelOk && isIdent {
1972
1973
1974
1975 stmt := &ast.LabeledStmt{Label: label, Colon: colon, Stmt: p.parseStmt()}
1976 return stmt, false
1977 }
1978
1979
1980
1981
1982
1983
1984 p.error(colon, "illegal label declaration")
1985 return &ast.BadStmt{From: x[0].Pos(), To: colon + 1}, false
1986
1987 case token.ARROW:
1988
1989 arrow := p.pos
1990 p.next()
1991 y := p.parseRhs()
1992 return &ast.SendStmt{Chan: x[0], Arrow: arrow, Value: y}, false
1993
1994 case token.INC, token.DEC:
1995
1996 s := &ast.IncDecStmt{X: x[0], TokPos: p.pos, Tok: p.tok}
1997 p.next()
1998 return s, false
1999 }
2000
2001
2002 return &ast.ExprStmt{X: x[0]}, false
2003 }
2004
2005 func (p *parser) parseCallExpr(callType string) *ast.CallExpr {
2006 x := p.parseRhs()
2007 if t := ast.Unparen(x); t != x {
2008 p.error(x.Pos(), fmt.Sprintf("expression in %s must not be parenthesized", callType))
2009 x = t
2010 }
2011 if call, isCall := x.(*ast.CallExpr); isCall {
2012 return call
2013 }
2014 if _, isBad := x.(*ast.BadExpr); !isBad {
2015
2016 p.error(p.safePos(x.End()), fmt.Sprintf("expression in %s must be function call", callType))
2017 }
2018 return nil
2019 }
2020
2021 func (p *parser) parseGoStmt() ast.Stmt {
2022 if p.trace {
2023 defer un(trace(p, "GoStmt"))
2024 }
2025
2026 pos := p.expect(token.GO)
2027 call := p.parseCallExpr("go")
2028 p.expectSemi()
2029 if call == nil {
2030 return &ast.BadStmt{From: pos, To: pos + 2}
2031 }
2032
2033 return &ast.GoStmt{Go: pos, Call: call}
2034 }
2035
2036 func (p *parser) parseDeferStmt() ast.Stmt {
2037 if p.trace {
2038 defer un(trace(p, "DeferStmt"))
2039 }
2040
2041 pos := p.expect(token.DEFER)
2042 call := p.parseCallExpr("defer")
2043 p.expectSemi()
2044 if call == nil {
2045 return &ast.BadStmt{From: pos, To: pos + 5}
2046 }
2047
2048 return &ast.DeferStmt{Defer: pos, Call: call}
2049 }
2050
2051 func (p *parser) parseReturnStmt() *ast.ReturnStmt {
2052 if p.trace {
2053 defer un(trace(p, "ReturnStmt"))
2054 }
2055
2056 pos := p.pos
2057 p.expect(token.RETURN)
2058 var x []ast.Expr
2059 if p.tok != token.SEMICOLON && p.tok != token.RBRACE {
2060 x = p.parseList(true)
2061 }
2062 p.expectSemi()
2063
2064 return &ast.ReturnStmt{Return: pos, Results: x}
2065 }
2066
2067 func (p *parser) parseBranchStmt(tok token.Token) *ast.BranchStmt {
2068 if p.trace {
2069 defer un(trace(p, "BranchStmt"))
2070 }
2071
2072 pos := p.expect(tok)
2073 var label *ast.Ident
2074 if tok == token.GOTO || ((tok == token.CONTINUE || tok == token.BREAK) && p.tok == token.IDENT) {
2075 label = p.parseIdent()
2076 }
2077 p.expectSemi()
2078
2079 return &ast.BranchStmt{TokPos: pos, Tok: tok, Label: label}
2080 }
2081
2082 func (p *parser) makeExpr(s ast.Stmt, want string) ast.Expr {
2083 if s == nil {
2084 return nil
2085 }
2086 if es, isExpr := s.(*ast.ExprStmt); isExpr {
2087 return es.X
2088 }
2089 found := "simple statement"
2090 if _, isAss := s.(*ast.AssignStmt); isAss {
2091 found = "assignment"
2092 }
2093 p.error(s.Pos(), fmt.Sprintf("expected %s, found %s (missing parentheses around composite literal?)", want, found))
2094 return &ast.BadExpr{From: s.Pos(), To: p.safePos(s.End())}
2095 }
2096
2097
2098
2099
2100 func (p *parser) parseIfHeader() (init ast.Stmt, cond ast.Expr) {
2101 if p.tok == token.LBRACE {
2102 p.error(p.pos, "missing condition in if statement")
2103 cond = &ast.BadExpr{From: p.pos, To: p.pos}
2104 return
2105 }
2106
2107
2108 prevLev := p.exprLev
2109 p.exprLev = -1
2110
2111 if p.tok != token.SEMICOLON {
2112
2113 if p.tok == token.VAR {
2114 p.next()
2115 p.error(p.pos, "var declaration not allowed in if initializer")
2116 }
2117 init, _ = p.parseSimpleStmt(basic)
2118 }
2119
2120 var condStmt ast.Stmt
2121 var semi struct {
2122 pos token.Pos
2123 lit string
2124 }
2125 if p.tok != token.LBRACE {
2126 if p.tok == token.SEMICOLON {
2127 semi.pos = p.pos
2128 semi.lit = p.lit
2129 p.next()
2130 } else {
2131 p.expect(token.SEMICOLON)
2132 }
2133 if p.tok != token.LBRACE {
2134 condStmt, _ = p.parseSimpleStmt(basic)
2135 }
2136 } else {
2137 condStmt = init
2138 init = nil
2139 }
2140
2141 if condStmt != nil {
2142 cond = p.makeExpr(condStmt, "boolean expression")
2143 } else if semi.pos.IsValid() {
2144 if semi.lit == "\n" {
2145 p.error(semi.pos, "unexpected newline, expecting { after if clause")
2146 } else {
2147 p.error(semi.pos, "missing condition in if statement")
2148 }
2149 }
2150
2151
2152 if cond == nil {
2153 cond = &ast.BadExpr{From: p.pos, To: p.pos}
2154 }
2155
2156 p.exprLev = prevLev
2157 return
2158 }
2159
2160 func (p *parser) parseIfStmt() *ast.IfStmt {
2161 defer decNestLev(incNestLev(p))
2162
2163 if p.trace {
2164 defer un(trace(p, "IfStmt"))
2165 }
2166
2167 pos := p.expect(token.IF)
2168
2169 init, cond := p.parseIfHeader()
2170 body := p.parseBlockStmt()
2171
2172 var else_ ast.Stmt
2173 if p.tok == token.ELSE {
2174 p.next()
2175 switch p.tok {
2176 case token.IF:
2177 else_ = p.parseIfStmt()
2178 case token.LBRACE:
2179 else_ = p.parseBlockStmt()
2180 p.expectSemi()
2181 default:
2182 p.errorExpected(p.pos, "if statement or block")
2183 else_ = &ast.BadStmt{From: p.pos, To: p.pos}
2184 }
2185 } else {
2186 p.expectSemi()
2187 }
2188
2189 return &ast.IfStmt{If: pos, Init: init, Cond: cond, Body: body, Else: else_}
2190 }
2191
2192 func (p *parser) parseCaseClause() *ast.CaseClause {
2193 if p.trace {
2194 defer un(trace(p, "CaseClause"))
2195 }
2196
2197 pos := p.pos
2198 var list []ast.Expr
2199 if p.tok == token.CASE {
2200 p.next()
2201 list = p.parseList(true)
2202 } else {
2203 p.expect(token.DEFAULT)
2204 }
2205
2206 colon := p.expect(token.COLON)
2207 body := p.parseStmtList()
2208
2209 return &ast.CaseClause{Case: pos, List: list, Colon: colon, Body: body}
2210 }
2211
2212 func isTypeSwitchAssert(x ast.Expr) bool {
2213 a, ok := x.(*ast.TypeAssertExpr)
2214 return ok && a.Type == nil
2215 }
2216
2217 func (p *parser) isTypeSwitchGuard(s ast.Stmt) bool {
2218 switch t := s.(type) {
2219 case *ast.ExprStmt:
2220
2221 return isTypeSwitchAssert(t.X)
2222 case *ast.AssignStmt:
2223
2224 if len(t.Lhs) == 1 && len(t.Rhs) == 1 && isTypeSwitchAssert(t.Rhs[0]) {
2225 switch t.Tok {
2226 case token.ASSIGN:
2227
2228 p.error(t.TokPos, "expected ':=', found '='")
2229 fallthrough
2230 case token.DEFINE:
2231 return true
2232 }
2233 }
2234 }
2235 return false
2236 }
2237
2238 func (p *parser) parseSwitchStmt() ast.Stmt {
2239 if p.trace {
2240 defer un(trace(p, "SwitchStmt"))
2241 }
2242
2243 pos := p.expect(token.SWITCH)
2244
2245 var s1, s2 ast.Stmt
2246 if p.tok != token.LBRACE {
2247 prevLev := p.exprLev
2248 p.exprLev = -1
2249 if p.tok != token.SEMICOLON {
2250 s2, _ = p.parseSimpleStmt(basic)
2251 }
2252 if p.tok == token.SEMICOLON {
2253 p.next()
2254 s1 = s2
2255 s2 = nil
2256 if p.tok != token.LBRACE {
2257
2258
2259
2260
2261
2262
2263
2264
2265
2266
2267
2268
2269 s2, _ = p.parseSimpleStmt(basic)
2270 }
2271 }
2272 p.exprLev = prevLev
2273 }
2274
2275 typeSwitch := p.isTypeSwitchGuard(s2)
2276 lbrace := p.expect(token.LBRACE)
2277 var list []ast.Stmt
2278 for p.tok == token.CASE || p.tok == token.DEFAULT {
2279 list = append(list, p.parseCaseClause())
2280 }
2281 rbrace := p.expect(token.RBRACE)
2282 p.expectSemi()
2283 body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
2284
2285 if typeSwitch {
2286 return &ast.TypeSwitchStmt{Switch: pos, Init: s1, Assign: s2, Body: body}
2287 }
2288
2289 return &ast.SwitchStmt{Switch: pos, Init: s1, Tag: p.makeExpr(s2, "switch expression"), Body: body}
2290 }
2291
2292 func (p *parser) parseCommClause() *ast.CommClause {
2293 if p.trace {
2294 defer un(trace(p, "CommClause"))
2295 }
2296
2297 pos := p.pos
2298 var comm ast.Stmt
2299 if p.tok == token.CASE {
2300 p.next()
2301 lhs := p.parseList(false)
2302 if p.tok == token.ARROW {
2303
2304 if len(lhs) > 1 {
2305 p.errorExpected(lhs[0].Pos(), "1 expression")
2306
2307 }
2308 arrow := p.pos
2309 p.next()
2310 rhs := p.parseRhs()
2311 comm = &ast.SendStmt{Chan: lhs[0], Arrow: arrow, Value: rhs}
2312 } else {
2313
2314 if tok := p.tok; tok == token.ASSIGN || tok == token.DEFINE {
2315
2316 if len(lhs) > 2 {
2317 p.errorExpected(lhs[0].Pos(), "1 or 2 expressions")
2318
2319 lhs = lhs[0:2]
2320 }
2321 pos := p.pos
2322 p.next()
2323 rhs := p.parseRhs()
2324 comm = &ast.AssignStmt{Lhs: lhs, TokPos: pos, Tok: tok, Rhs: []ast.Expr{rhs}}
2325 } else {
2326
2327 if len(lhs) > 1 {
2328 p.errorExpected(lhs[0].Pos(), "1 expression")
2329
2330 }
2331 comm = &ast.ExprStmt{X: lhs[0]}
2332 }
2333 }
2334 } else {
2335 p.expect(token.DEFAULT)
2336 }
2337
2338 colon := p.expect(token.COLON)
2339 body := p.parseStmtList()
2340
2341 return &ast.CommClause{Case: pos, Comm: comm, Colon: colon, Body: body}
2342 }
2343
2344 func (p *parser) parseSelectStmt() *ast.SelectStmt {
2345 if p.trace {
2346 defer un(trace(p, "SelectStmt"))
2347 }
2348
2349 pos := p.expect(token.SELECT)
2350 lbrace := p.expect(token.LBRACE)
2351 var list []ast.Stmt
2352 for p.tok == token.CASE || p.tok == token.DEFAULT {
2353 list = append(list, p.parseCommClause())
2354 }
2355 rbrace := p.expect(token.RBRACE)
2356 p.expectSemi()
2357 body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
2358
2359 return &ast.SelectStmt{Select: pos, Body: body}
2360 }
2361
2362 func (p *parser) parseForStmt() ast.Stmt {
2363 if p.trace {
2364 defer un(trace(p, "ForStmt"))
2365 }
2366
2367 pos := p.expect(token.FOR)
2368
2369 var s1, s2, s3 ast.Stmt
2370 var isRange bool
2371 if p.tok != token.LBRACE {
2372 prevLev := p.exprLev
2373 p.exprLev = -1
2374 if p.tok != token.SEMICOLON {
2375 if p.tok == token.RANGE {
2376
2377 pos := p.pos
2378 p.next()
2379 y := []ast.Expr{&ast.UnaryExpr{OpPos: pos, Op: token.RANGE, X: p.parseRhs()}}
2380 s2 = &ast.AssignStmt{Rhs: y}
2381 isRange = true
2382 } else {
2383 s2, isRange = p.parseSimpleStmt(rangeOk)
2384 }
2385 }
2386 if !isRange && p.tok == token.SEMICOLON {
2387 p.next()
2388 s1 = s2
2389 s2 = nil
2390 if p.tok != token.SEMICOLON {
2391 s2, _ = p.parseSimpleStmt(basic)
2392 }
2393 p.expectSemi()
2394 if p.tok != token.LBRACE {
2395 s3, _ = p.parseSimpleStmt(basic)
2396 }
2397 }
2398 p.exprLev = prevLev
2399 }
2400
2401 body := p.parseBlockStmt()
2402 p.expectSemi()
2403
2404 if isRange {
2405 as := s2.(*ast.AssignStmt)
2406
2407 var key, value ast.Expr
2408 switch len(as.Lhs) {
2409 case 0:
2410
2411 case 1:
2412 key = as.Lhs[0]
2413 case 2:
2414 key, value = as.Lhs[0], as.Lhs[1]
2415 default:
2416 p.errorExpected(as.Lhs[len(as.Lhs)-1].Pos(), "at most 2 expressions")
2417 return &ast.BadStmt{From: pos, To: p.safePos(body.End())}
2418 }
2419
2420
2421 x := as.Rhs[0].(*ast.UnaryExpr).X
2422 return &ast.RangeStmt{
2423 For: pos,
2424 Key: key,
2425 Value: value,
2426 TokPos: as.TokPos,
2427 Tok: as.Tok,
2428 Range: as.Rhs[0].Pos(),
2429 X: x,
2430 Body: body,
2431 }
2432 }
2433
2434
2435 return &ast.ForStmt{
2436 For: pos,
2437 Init: s1,
2438 Cond: p.makeExpr(s2, "boolean or range expression"),
2439 Post: s3,
2440 Body: body,
2441 }
2442 }
2443
2444 func (p *parser) parseStmt() (s ast.Stmt) {
2445 defer decNestLev(incNestLev(p))
2446
2447 if p.trace {
2448 defer un(trace(p, "Statement"))
2449 }
2450
2451 switch p.tok {
2452 case token.CONST, token.TYPE, token.VAR:
2453 s = &ast.DeclStmt{Decl: p.parseDecl(stmtStart)}
2454 case
2455
2456 token.IDENT, token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING, token.FUNC, token.LPAREN,
2457 token.LBRACK, token.STRUCT, token.MAP, token.CHAN, token.INTERFACE,
2458 token.ADD, token.SUB, token.MUL, token.AND, token.XOR, token.ARROW, token.NOT:
2459 s, _ = p.parseSimpleStmt(labelOk)
2460
2461
2462
2463 if _, isLabeledStmt := s.(*ast.LabeledStmt); !isLabeledStmt {
2464 p.expectSemi()
2465 }
2466 case token.GO:
2467 s = p.parseGoStmt()
2468 case token.DEFER:
2469 s = p.parseDeferStmt()
2470 case token.RETURN:
2471 s = p.parseReturnStmt()
2472 case token.BREAK, token.CONTINUE, token.GOTO, token.FALLTHROUGH:
2473 s = p.parseBranchStmt(p.tok)
2474 case token.LBRACE:
2475 s = p.parseBlockStmt()
2476 p.expectSemi()
2477 case token.IF:
2478 s = p.parseIfStmt()
2479 case token.SWITCH:
2480 s = p.parseSwitchStmt()
2481 case token.SELECT:
2482 s = p.parseSelectStmt()
2483 case token.FOR:
2484 s = p.parseForStmt()
2485 case token.SEMICOLON:
2486
2487
2488
2489 s = &ast.EmptyStmt{Semicolon: p.pos, Implicit: p.lit == "\n"}
2490 p.next()
2491 case token.RBRACE:
2492
2493 s = &ast.EmptyStmt{Semicolon: p.pos, Implicit: true}
2494 default:
2495
2496 pos := p.pos
2497 p.errorExpected(pos, "statement")
2498 p.advance(stmtStart)
2499 s = &ast.BadStmt{From: pos, To: p.pos}
2500 }
2501
2502 return
2503 }
2504
2505
2506
2507
2508 type parseSpecFunction func(doc *ast.CommentGroup, keyword token.Token, iota int) ast.Spec
2509
2510 func (p *parser) parseImportSpec(doc *ast.CommentGroup, _ token.Token, _ int) ast.Spec {
2511 if p.trace {
2512 defer un(trace(p, "ImportSpec"))
2513 }
2514
2515 var ident *ast.Ident
2516 switch p.tok {
2517 case token.IDENT:
2518 ident = p.parseIdent()
2519 case token.PERIOD:
2520 ident = &ast.Ident{NamePos: p.pos, Name: "."}
2521 p.next()
2522 }
2523
2524 pos := p.pos
2525 var path string
2526 if p.tok == token.STRING {
2527 path = p.lit
2528 p.next()
2529 } else if p.tok.IsLiteral() {
2530 p.error(pos, "import path must be a string")
2531 p.next()
2532 } else {
2533 p.error(pos, "missing import path")
2534 p.advance(exprEnd)
2535 }
2536 comment := p.expectSemi()
2537
2538
2539 spec := &ast.ImportSpec{
2540 Doc: doc,
2541 Name: ident,
2542 Path: &ast.BasicLit{ValuePos: pos, Kind: token.STRING, Value: path},
2543 Comment: comment,
2544 }
2545 p.imports = append(p.imports, spec)
2546
2547 return spec
2548 }
2549
2550 func (p *parser) parseValueSpec(doc *ast.CommentGroup, keyword token.Token, iota int) ast.Spec {
2551 if p.trace {
2552 defer un(trace(p, keyword.String()+"Spec"))
2553 }
2554
2555 idents := p.parseIdentList()
2556 var typ ast.Expr
2557 var values []ast.Expr
2558 switch keyword {
2559 case token.CONST:
2560
2561 if p.tok != token.EOF && p.tok != token.SEMICOLON && p.tok != token.RPAREN {
2562 typ = p.tryIdentOrType()
2563 if p.tok == token.ASSIGN {
2564 p.next()
2565 values = p.parseList(true)
2566 }
2567 }
2568 case token.VAR:
2569 if p.tok != token.ASSIGN {
2570 typ = p.parseType()
2571 }
2572 if p.tok == token.ASSIGN {
2573 p.next()
2574 values = p.parseList(true)
2575 }
2576 default:
2577 panic("unreachable")
2578 }
2579 comment := p.expectSemi()
2580
2581 spec := &ast.ValueSpec{
2582 Doc: doc,
2583 Names: idents,
2584 Type: typ,
2585 Values: values,
2586 Comment: comment,
2587 }
2588 return spec
2589 }
2590
2591 func (p *parser) parseGenericType(spec *ast.TypeSpec, openPos token.Pos, name0 *ast.Ident, typ0 ast.Expr) {
2592 if p.trace {
2593 defer un(trace(p, "parseGenericType"))
2594 }
2595
2596 list := p.parseParameterList(name0, typ0, token.RBRACK, false)
2597 closePos := p.expect(token.RBRACK)
2598 spec.TypeParams = &ast.FieldList{Opening: openPos, List: list, Closing: closePos}
2599 if p.tok == token.ASSIGN {
2600
2601 spec.Assign = p.pos
2602 p.next()
2603 }
2604 spec.Type = p.parseType()
2605 }
2606
2607 func (p *parser) parseTypeSpec(doc *ast.CommentGroup, _ token.Token, _ int) ast.Spec {
2608 if p.trace {
2609 defer un(trace(p, "TypeSpec"))
2610 }
2611
2612 name := p.parseIdent()
2613 spec := &ast.TypeSpec{Doc: doc, Name: name}
2614
2615 if p.tok == token.LBRACK {
2616
2617
2618 lbrack := p.pos
2619 p.next()
2620 if p.tok == token.IDENT {
2621
2622
2623
2624
2625
2626
2627
2628
2629
2630
2631
2632
2633
2634
2635
2636 var x ast.Expr = p.parseIdent()
2637 if p.tok != token.LBRACK {
2638
2639
2640
2641 p.exprLev++
2642 lhs := p.parsePrimaryExpr(x)
2643 x = p.parseBinaryExpr(lhs, token.LowestPrec+1)
2644 p.exprLev--
2645 }
2646
2647
2648
2649
2650
2651
2652
2653 if pname, ptype := extractName(x, p.tok == token.COMMA); pname != nil && (ptype != nil || p.tok != token.RBRACK) {
2654
2655
2656
2657 p.parseGenericType(spec, lbrack, pname, ptype)
2658 } else {
2659
2660
2661 spec.Type = p.parseArrayType(lbrack, x)
2662 }
2663 } else {
2664
2665 spec.Type = p.parseArrayType(lbrack, nil)
2666 }
2667 } else {
2668
2669 if p.tok == token.ASSIGN {
2670
2671 spec.Assign = p.pos
2672 p.next()
2673 }
2674 spec.Type = p.parseType()
2675 }
2676
2677 spec.Comment = p.expectSemi()
2678
2679 return spec
2680 }
2681
2682
2683
2684
2685
2686
2687
2688
2689
2690
2691
2692
2693
2694
2695
2696
2697
2698
2699
2700 func extractName(x ast.Expr, force bool) (*ast.Ident, ast.Expr) {
2701 switch x := x.(type) {
2702 case *ast.Ident:
2703 return x, nil
2704 case *ast.BinaryExpr:
2705 switch x.Op {
2706 case token.MUL:
2707 if name, _ := x.X.(*ast.Ident); name != nil && (force || isTypeElem(x.Y)) {
2708
2709 return name, &ast.StarExpr{Star: x.OpPos, X: x.Y}
2710 }
2711 case token.OR:
2712 if name, lhs := extractName(x.X, force || isTypeElem(x.Y)); name != nil && lhs != nil {
2713
2714 op := *x
2715 op.X = lhs
2716 return name, &op
2717 }
2718 }
2719 case *ast.CallExpr:
2720 if name, _ := x.Fun.(*ast.Ident); name != nil {
2721 if len(x.Args) == 1 && x.Ellipsis == token.NoPos && (force || isTypeElem(x.Args[0])) {
2722
2723
2724
2725 return name, &ast.ParenExpr{
2726 Lparen: x.Lparen,
2727 X: x.Args[0],
2728 Rparen: x.Rparen,
2729 }
2730 }
2731 }
2732 }
2733 return nil, x
2734 }
2735
2736
2737
2738 func isTypeElem(x ast.Expr) bool {
2739 switch x := x.(type) {
2740 case *ast.ArrayType, *ast.StructType, *ast.FuncType, *ast.InterfaceType, *ast.MapType, *ast.ChanType:
2741 return true
2742 case *ast.BinaryExpr:
2743 return isTypeElem(x.X) || isTypeElem(x.Y)
2744 case *ast.UnaryExpr:
2745 return x.Op == token.TILDE
2746 case *ast.ParenExpr:
2747 return isTypeElem(x.X)
2748 }
2749 return false
2750 }
2751
2752 func (p *parser) parseGenDecl(keyword token.Token, f parseSpecFunction) *ast.GenDecl {
2753 if p.trace {
2754 defer un(trace(p, "GenDecl("+keyword.String()+")"))
2755 }
2756
2757 doc := p.leadComment
2758 pos := p.expect(keyword)
2759 var lparen, rparen token.Pos
2760 var list []ast.Spec
2761 if p.tok == token.LPAREN {
2762 lparen = p.pos
2763 p.next()
2764 for iota := 0; p.tok != token.RPAREN && p.tok != token.EOF; iota++ {
2765 list = append(list, f(p.leadComment, keyword, iota))
2766 }
2767 rparen = p.expect(token.RPAREN)
2768 p.expectSemi()
2769 } else {
2770 list = append(list, f(nil, keyword, 0))
2771 }
2772
2773 return &ast.GenDecl{
2774 Doc: doc,
2775 TokPos: pos,
2776 Tok: keyword,
2777 Lparen: lparen,
2778 Specs: list,
2779 Rparen: rparen,
2780 }
2781 }
2782
2783 func (p *parser) parseFuncDecl() *ast.FuncDecl {
2784 if p.trace {
2785 defer un(trace(p, "FunctionDecl"))
2786 }
2787
2788 doc := p.leadComment
2789 pos := p.expect(token.FUNC)
2790
2791 var recv *ast.FieldList
2792 if p.tok == token.LPAREN {
2793 recv = p.parseParameters(false)
2794 }
2795
2796 ident := p.parseIdent()
2797
2798 var tparams *ast.FieldList
2799 if p.tok == token.LBRACK {
2800 tparams = p.parseTypeParameters()
2801 if recv != nil && tparams != nil {
2802
2803
2804 p.error(tparams.Opening, "method must have no type parameters")
2805 tparams = nil
2806 }
2807 }
2808 params := p.parseParameters(false)
2809 results := p.parseParameters(true)
2810
2811 var body *ast.BlockStmt
2812 switch p.tok {
2813 case token.LBRACE:
2814 body = p.parseBody()
2815 p.expectSemi()
2816 case token.SEMICOLON:
2817 p.next()
2818 if p.tok == token.LBRACE {
2819
2820 p.error(p.pos, "unexpected semicolon or newline before {")
2821 body = p.parseBody()
2822 p.expectSemi()
2823 }
2824 default:
2825 p.expectSemi()
2826 }
2827
2828 decl := &ast.FuncDecl{
2829 Doc: doc,
2830 Recv: recv,
2831 Name: ident,
2832 Type: &ast.FuncType{
2833 Func: pos,
2834 TypeParams: tparams,
2835 Params: params,
2836 Results: results,
2837 },
2838 Body: body,
2839 }
2840 return decl
2841 }
2842
2843 func (p *parser) parseDecl(sync map[token.Token]bool) ast.Decl {
2844 if p.trace {
2845 defer un(trace(p, "Declaration"))
2846 }
2847
2848 var f parseSpecFunction
2849 switch p.tok {
2850 case token.IMPORT:
2851 f = p.parseImportSpec
2852
2853 case token.CONST, token.VAR:
2854 f = p.parseValueSpec
2855
2856 case token.TYPE:
2857 f = p.parseTypeSpec
2858
2859 case token.FUNC:
2860 return p.parseFuncDecl()
2861
2862 default:
2863 pos := p.pos
2864 p.errorExpected(pos, "declaration")
2865 p.advance(sync)
2866 return &ast.BadDecl{From: pos, To: p.pos}
2867 }
2868
2869 return p.parseGenDecl(p.tok, f)
2870 }
2871
2872
2873
2874
2875 func (p *parser) parseFile() *ast.File {
2876 if p.trace {
2877 defer un(trace(p, "File"))
2878 }
2879
2880
2881
2882 if p.errors.Len() != 0 {
2883 return nil
2884 }
2885
2886
2887 doc := p.leadComment
2888 pos := p.expect(token.PACKAGE)
2889
2890
2891 ident := p.parseIdent()
2892 if ident.Name == "_" && p.mode&DeclarationErrors != 0 {
2893 p.error(p.pos, "invalid package name _")
2894 }
2895 p.expectSemi()
2896
2897
2898
2899 if p.errors.Len() != 0 {
2900 return nil
2901 }
2902
2903 var decls []ast.Decl
2904 if p.mode&PackageClauseOnly == 0 {
2905
2906 for p.tok == token.IMPORT {
2907 decls = append(decls, p.parseGenDecl(token.IMPORT, p.parseImportSpec))
2908 }
2909
2910 if p.mode&ImportsOnly == 0 {
2911
2912 prev := token.IMPORT
2913 for p.tok != token.EOF {
2914
2915 if p.tok == token.IMPORT && prev != token.IMPORT {
2916 p.error(p.pos, "imports must appear before other declarations")
2917 }
2918 prev = p.tok
2919
2920 decls = append(decls, p.parseDecl(declStart))
2921 }
2922 }
2923 }
2924
2925 f := &ast.File{
2926 Doc: doc,
2927 Package: pos,
2928 Name: ident,
2929 Decls: decls,
2930
2931 Imports: p.imports,
2932 Comments: p.comments,
2933 GoVersion: p.goVersion,
2934 }
2935 var declErr func(token.Pos, string)
2936 if p.mode&DeclarationErrors != 0 {
2937 declErr = p.error
2938 }
2939 if p.mode&SkipObjectResolution == 0 {
2940 resolveFile(f, p.file, declErr)
2941 }
2942
2943 return f
2944 }
2945
2946
2947 func packIndexExpr(x ast.Expr, lbrack token.Pos, exprs []ast.Expr, rbrack token.Pos) ast.Expr {
2948 switch len(exprs) {
2949 case 0:
2950 panic("internal error: packIndexExpr with empty expr slice")
2951 case 1:
2952 return &ast.IndexExpr{
2953 X: x,
2954 Lbrack: lbrack,
2955 Index: exprs[0],
2956 Rbrack: rbrack,
2957 }
2958 default:
2959 return &ast.IndexListExpr{
2960 X: x,
2961 Lbrack: lbrack,
2962 Indices: exprs,
2963 Rbrack: rbrack,
2964 }
2965 }
2966 }
2967
View as plain text