1// Copyright 2009 The Go Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style
3// license that can be found in the LICENSE file.
4
5// Package parser implements a parser for Go source files. Input may be
6// provided in a variety of forms (see the various Parse* functions); the
7// output is an abstract syntax tree (AST) representing the Go source. The
8// parser is invoked through one of the Parse* functions.
9//
10// The parser accepts a larger language than is syntactically permitted by
11// the Go spec, for simplicity, and for improved robustness in the presence
12// of syntax errors. For instance, in method declarations, the receiver is
13// treated like an ordinary parameter list and thus may contain multiple
14// entries where the spec permits exactly one. Consequently, the corresponding
15// field in the AST (ast.FuncDecl.Recv) field is not restricted to one entry.
16//
17package parser
18
19import (
20	"fmt"
21	"go/ast"
22	"go/scanner"
23	"go/token"
24	"strconv"
25	"strings"
26	"unicode"
27)
28
29// The parser structure holds the parser's internal state.
30type parser struct {
31	file    *token.File
32	errors  scanner.ErrorList
33	scanner scanner.Scanner
34
35	// Tracing/debugging
36	mode   Mode // parsing mode
37	trace  bool // == (mode & Trace != 0)
38	indent int  // indentation used for tracing output
39
40	// Comments
41	comments    []*ast.CommentGroup
42	leadComment *ast.CommentGroup // last lead comment
43	lineComment *ast.CommentGroup // last line comment
44
45	// Next token
46	pos token.Pos   // token position
47	tok token.Token // one token look-ahead
48	lit string      // token literal
49
50	// Error recovery
51	// (used to limit the number of calls to syncXXX functions
52	// w/o making scanning progress - avoids potential endless
53	// loops across multiple parser functions during error recovery)
54	syncPos token.Pos // last synchronization position
55	syncCnt int       // number of calls to syncXXX without progress
56
57	// Non-syntactic parser control
58	exprLev int  // < 0: in control clause, >= 0: in expression
59	inRhs   bool // if set, the parser is parsing a rhs expression
60
61	// Ordinary identifier scopes
62	pkgScope   *ast.Scope        // pkgScope.Outer == nil
63	topScope   *ast.Scope        // top-most scope; may be pkgScope
64	unresolved []*ast.Ident      // unresolved identifiers
65	imports    []*ast.ImportSpec // list of imports
66
67	// Label scopes
68	// (maintained by open/close LabelScope)
69	labelScope  *ast.Scope     // label scope for current function
70	targetStack [][]*ast.Ident // stack of unresolved labels
71}
72
73func (p *parser) init(fset *token.FileSet, filename string, src []byte, mode Mode) {
74	p.file = fset.AddFile(filename, -1, len(src))
75	var m scanner.Mode
76	if mode&ParseComments != 0 {
77		m = scanner.ScanComments
78	}
79	eh := func(pos token.Position, msg string) { p.errors.Add(pos, msg) }
80	p.scanner.Init(p.file, src, eh, m)
81
82	p.mode = mode
83	p.trace = mode&Trace != 0 // for convenience (p.trace is used frequently)
84
85	p.next()
86}
87
88// ----------------------------------------------------------------------------
89// Scoping support
90
91func (p *parser) openScope() {
92	p.topScope = ast.NewScope(p.topScope)
93}
94
95func (p *parser) closeScope() {
96	p.topScope = p.topScope.Outer
97}
98
99func (p *parser) openLabelScope() {
100	p.labelScope = ast.NewScope(p.labelScope)
101	p.targetStack = append(p.targetStack, nil)
102}
103
104func (p *parser) closeLabelScope() {
105	// resolve labels
106	n := len(p.targetStack) - 1
107	scope := p.labelScope
108	for _, ident := range p.targetStack[n] {
109		ident.Obj = scope.Lookup(ident.Name)
110		if ident.Obj == nil && p.mode&DeclarationErrors != 0 {
111			p.error(ident.Pos(), fmt.Sprintf("label %s undefined", ident.Name))
112		}
113	}
114	// pop label scope
115	p.targetStack = p.targetStack[0:n]
116	p.labelScope = p.labelScope.Outer
117}
118
119func (p *parser) declare(decl, data interface{}, scope *ast.Scope, kind ast.ObjKind, idents ...*ast.Ident) {
120	for _, ident := range idents {
121		assert(ident.Obj == nil, "identifier already declared or resolved")
122		obj := ast.NewObj(kind, ident.Name)
123		// remember the corresponding declaration for redeclaration
124		// errors and global variable resolution/typechecking phase
125		obj.Decl = decl
126		obj.Data = data
127		ident.Obj = obj
128		if ident.Name != "_" {
129			if alt := scope.Insert(obj); alt != nil && p.mode&DeclarationErrors != 0 {
130				prevDecl := ""
131				if pos := alt.Pos(); pos.IsValid() {
132					prevDecl = fmt.Sprintf("\n\tprevious declaration at %s", p.file.Position(pos))
133				}
134				p.error(ident.Pos(), fmt.Sprintf("%s redeclared in this block%s", ident.Name, prevDecl))
135			}
136		}
137	}
138}
139
140func (p *parser) shortVarDecl(decl *ast.AssignStmt, list []ast.Expr) {
141	// Go spec: A short variable declaration may redeclare variables
142	// provided they were originally declared in the same block with
143	// the same type, and at least one of the non-blank variables is new.
144	n := 0 // number of new variables
145	for _, x := range list {
146		if ident, isIdent := x.(*ast.Ident); isIdent {
147			assert(ident.Obj == nil, "identifier already declared or resolved")
148			obj := ast.NewObj(ast.Var, ident.Name)
149			// remember corresponding assignment for other tools
150			obj.Decl = decl
151			ident.Obj = obj
152			if ident.Name != "_" {
153				if alt := p.topScope.Insert(obj); alt != nil {
154					ident.Obj = alt // redeclaration
155				} else {
156					n++ // new declaration
157				}
158			}
159		} else {
160			p.errorExpected(x.Pos(), "identifier on left side of :=")
161		}
162	}
163	if n == 0 && p.mode&DeclarationErrors != 0 {
164		p.error(list[0].Pos(), "no new variables on left side of :=")
165	}
166}
167
168// The unresolved object is a sentinel to mark identifiers that have been added
169// to the list of unresolved identifiers. The sentinel is only used for verifying
170// internal consistency.
171var unresolved = new(ast.Object)
172
173// If x is an identifier, tryResolve attempts to resolve x by looking up
174// the object it denotes. If no object is found and collectUnresolved is
175// set, x is marked as unresolved and collected in the list of unresolved
176// identifiers.
177//
178func (p *parser) tryResolve(x ast.Expr, collectUnresolved bool) {
179	// nothing to do if x is not an identifier or the blank identifier
180	ident, _ := x.(*ast.Ident)
181	if ident == nil {
182		return
183	}
184	assert(ident.Obj == nil, "identifier already declared or resolved")
185	if ident.Name == "_" {
186		return
187	}
188	// try to resolve the identifier
189	for s := p.topScope; s != nil; s = s.Outer {
190		if obj := s.Lookup(ident.Name); obj != nil {
191			ident.Obj = obj
192			return
193		}
194	}
195	// all local scopes are known, so any unresolved identifier
196	// must be found either in the file scope, package scope
197	// (perhaps in another file), or universe scope --- collect
198	// them so that they can be resolved later
199	if collectUnresolved {
200		ident.Obj = unresolved
201		p.unresolved = append(p.unresolved, ident)
202	}
203}
204
205func (p *parser) resolve(x ast.Expr) {
206	p.tryResolve(x, true)
207}
208
209// ----------------------------------------------------------------------------
210// Parsing support
211
212func (p *parser) printTrace(a ...interface{}) {
213	const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . "
214	const n = len(dots)
215	pos := p.file.Position(p.pos)
216	fmt.Printf("%5d:%3d: ", pos.Line, pos.Column)
217	i := 2 * p.indent
218	for i > n {
219		fmt.Print(dots)
220		i -= n
221	}
222	// i <= n
223	fmt.Print(dots[0:i])
224	fmt.Println(a...)
225}
226
227func trace(p *parser, msg string) *parser {
228	p.printTrace(msg, "(")
229	p.indent++
230	return p
231}
232
233// Usage pattern: defer un(trace(p, "..."))
234func un(p *parser) {
235	p.indent--
236	p.printTrace(")")
237}
238
239// Advance to the next token.
240func (p *parser) next0() {
241	// Because of one-token look-ahead, print the previous token
242	// when tracing as it provides a more readable output. The
243	// very first token (!p.pos.IsValid()) is not initialized
244	// (it is token.ILLEGAL), so don't print it .
245	if p.trace && p.pos.IsValid() {
246		s := p.tok.String()
247		switch {
248		case p.tok.IsLiteral():
249			p.printTrace(s, p.lit)
250		case p.tok.IsOperator(), p.tok.IsKeyword():
251			p.printTrace("\"" + s + "\"")
252		default:
253			p.printTrace(s)
254		}
255	}
256
257	p.pos, p.tok, p.lit = p.scanner.Scan()
258}
259
260// Consume a comment and return it and the line on which it ends.
261func (p *parser) consumeComment() (comment *ast.Comment, endline int) {
262	// /*-style comments may end on a different line than where they start.
263	// Scan the comment for '\n' chars and adjust endline accordingly.
264	endline = p.file.Line(p.pos)
265	if p.lit[1] == '*' {
266		// don't use range here - no need to decode Unicode code points
267		for i := 0; i < len(p.lit); i++ {
268			if p.lit[i] == '\n' {
269				endline++
270			}
271		}
272	}
273
274	comment = &ast.Comment{Slash: p.pos, Text: p.lit}
275	p.next0()
276
277	return
278}
279
280// Consume a group of adjacent comments, add it to the parser's
281// comments list, and return it together with the line at which
282// the last comment in the group ends. A non-comment token or n
283// empty lines terminate a comment group.
284//
285func (p *parser) consumeCommentGroup(n int) (comments *ast.CommentGroup, endline int) {
286	var list []*ast.Comment
287	endline = p.file.Line(p.pos)
288	for p.tok == token.COMMENT && p.file.Line(p.pos) <= endline+n {
289		var comment *ast.Comment
290		comment, endline = p.consumeComment()
291		list = append(list, comment)
292	}
293
294	// add comment group to the comments list
295	comments = &ast.CommentGroup{List: list}
296	p.comments = append(p.comments, comments)
297
298	return
299}
300
301// Advance to the next non-comment token. In the process, collect
302// any comment groups encountered, and remember the last lead and
303// and line comments.
304//
305// A lead comment is a comment group that starts and ends in a
306// line without any other tokens and that is followed by a non-comment
307// token on the line immediately after the comment group.
308//
309// A line comment is a comment group that follows a non-comment
310// token on the same line, and that has no tokens after it on the line
311// where it ends.
312//
313// Lead and line comments may be considered documentation that is
314// stored in the AST.
315//
316func (p *parser) next() {
317	p.leadComment = nil
318	p.lineComment = nil
319	prev := p.pos
320	p.next0()
321
322	if p.tok == token.COMMENT {
323		var comment *ast.CommentGroup
324		var endline int
325
326		if p.file.Line(p.pos) == p.file.Line(prev) {
327			// The comment is on same line as the previous token; it
328			// cannot be a lead comment but may be a line comment.
329			comment, endline = p.consumeCommentGroup(0)
330			if p.file.Line(p.pos) != endline || p.tok == token.EOF {
331				// The next token is on a different line, thus
332				// the last comment group is a line comment.
333				p.lineComment = comment
334			}
335		}
336
337		// consume successor comments, if any
338		endline = -1
339		for p.tok == token.COMMENT {
340			comment, endline = p.consumeCommentGroup(1)
341		}
342
343		if endline+1 == p.file.Line(p.pos) {
344			// The next token is following on the line immediately after the
345			// comment group, thus the last comment group is a lead comment.
346			p.leadComment = comment
347		}
348	}
349}
350
351// A bailout panic is raised to indicate early termination.
352type bailout struct{}
353
354func (p *parser) error(pos token.Pos, msg string) {
355	epos := p.file.Position(pos)
356
357	// If AllErrors is not set, discard errors reported on the same line
358	// as the last recorded error and stop parsing if there are more than
359	// 10 errors.
360	if p.mode&AllErrors == 0 {
361		n := len(p.errors)
362		if n > 0 && p.errors[n-1].Pos.Line == epos.Line {
363			return // discard - likely a spurious error
364		}
365		if n > 10 {
366			panic(bailout{})
367		}
368	}
369
370	p.errors.Add(epos, msg)
371}
372
373func (p *parser) errorExpected(pos token.Pos, msg string) {
374	msg = "expected " + msg
375	if pos == p.pos {
376		// the error happened at the current position;
377		// make the error message more specific
378		if p.tok == token.SEMICOLON && p.lit == "\n" {
379			msg += ", found newline"
380		} else {
381			msg += ", found '" + p.tok.String() + "'"
382			if p.tok.IsLiteral() {
383				msg += " " + p.lit
384			}
385		}
386	}
387	p.error(pos, msg)
388}
389
390func (p *parser) expect(tok token.Token) token.Pos {
391	pos := p.pos
392	if p.tok != tok {
393		p.errorExpected(pos, "'"+tok.String()+"'")
394	}
395	p.next() // make progress
396	return pos
397}
398
399// expectClosing is like expect but provides a better error message
400// for the common case of a missing comma before a newline.
401//
402func (p *parser) expectClosing(tok token.Token, context string) token.Pos {
403	if p.tok != tok && p.tok == token.SEMICOLON && p.lit == "\n" {
404		p.error(p.pos, "missing ',' before newline in "+context)
405		p.next()
406	}
407	return p.expect(tok)
408}
409
410func (p *parser) expectSemi() {
411	// semicolon is optional before a closing ')' or '}'
412	if p.tok != token.RPAREN && p.tok != token.RBRACE {
413		switch p.tok {
414		case token.COMMA:
415			// permit a ',' instead of a ';' but complain
416			p.errorExpected(p.pos, "';'")
417			fallthrough
418		case token.SEMICOLON:
419			p.next()
420		default:
421			p.errorExpected(p.pos, "';'")
422			syncStmt(p)
423		}
424	}
425}
426
427func (p *parser) atComma(context string, follow token.Token) bool {
428	if p.tok == token.COMMA {
429		return true
430	}
431	if p.tok != follow {
432		msg := "missing ','"
433		if p.tok == token.SEMICOLON && p.lit == "\n" {
434			msg += " before newline"
435		}
436		p.error(p.pos, msg+" in "+context)
437		return true // "insert" comma and continue
438	}
439	return false
440}
441
442func assert(cond bool, msg string) {
443	if !cond {
444		panic("go/parser internal error: " + msg)
445	}
446}
447
448// syncStmt advances to the next statement.
449// Used for synchronization after an error.
450//
451func syncStmt(p *parser) {
452	for {
453		switch p.tok {
454		case token.BREAK, token.CONST, token.CONTINUE, token.DEFER,
455			token.FALLTHROUGH, token.FOR, token.GO, token.GOTO,
456			token.IF, token.RETURN, token.SELECT, token.SWITCH,
457			token.TYPE, token.VAR:
458			// Return only if parser made some progress since last
459			// sync or if it has not reached 10 sync calls without
460			// progress. Otherwise consume at least one token to
461			// avoid an endless parser loop (it is possible that
462			// both parseOperand and parseStmt call syncStmt and
463			// correctly do not advance, thus the need for the
464			// invocation limit p.syncCnt).
465			if p.pos == p.syncPos && p.syncCnt < 10 {
466				p.syncCnt++
467				return
468			}
469			if p.pos > p.syncPos {
470				p.syncPos = p.pos
471				p.syncCnt = 0
472				return
473			}
474			// Reaching here indicates a parser bug, likely an
475			// incorrect token list in this function, but it only
476			// leads to skipping of possibly correct code if a
477			// previous error is present, and thus is preferred
478			// over a non-terminating parse.
479		case token.EOF:
480			return
481		}
482		p.next()
483	}
484}
485
486// syncDecl advances to the next declaration.
487// Used for synchronization after an error.
488//
489func syncDecl(p *parser) {
490	for {
491		switch p.tok {
492		case token.CONST, token.TYPE, token.VAR:
493			// see comments in syncStmt
494			if p.pos == p.syncPos && p.syncCnt < 10 {
495				p.syncCnt++
496				return
497			}
498			if p.pos > p.syncPos {
499				p.syncPos = p.pos
500				p.syncCnt = 0
501				return
502			}
503		case token.EOF:
504			return
505		}
506		p.next()
507	}
508}
509
510// safePos returns a valid file position for a given position: If pos
511// is valid to begin with, safePos returns pos. If pos is out-of-range,
512// safePos returns the EOF position.
513//
514// This is hack to work around "artificial" end positions in the AST which
515// are computed by adding 1 to (presumably valid) token positions. If the
516// token positions are invalid due to parse errors, the resulting end position
517// may be past the file's EOF position, which would lead to panics if used
518// later on.
519//
520func (p *parser) safePos(pos token.Pos) (res token.Pos) {
521	defer func() {
522		if recover() != nil {
523			res = token.Pos(p.file.Base() + p.file.Size()) // EOF position
524		}
525	}()
526	_ = p.file.Offset(pos) // trigger a panic if position is out-of-range
527	return pos
528}
529
530// ----------------------------------------------------------------------------
531// Identifiers
532
533func (p *parser) parseIdent() *ast.Ident {
534	pos := p.pos
535	name := "_"
536	if p.tok == token.IDENT {
537		name = p.lit
538		p.next()
539	} else {
540		p.expect(token.IDENT) // use expect() error handling
541	}
542	return &ast.Ident{NamePos: pos, Name: name}
543}
544
545func (p *parser) parseIdentList() (list []*ast.Ident) {
546	if p.trace {
547		defer un(trace(p, "IdentList"))
548	}
549
550	list = append(list, p.parseIdent())
551	for p.tok == token.COMMA {
552		p.next()
553		list = append(list, p.parseIdent())
554	}
555
556	return
557}
558
559// ----------------------------------------------------------------------------
560// Common productions
561
562// If lhs is set, result list elements which are identifiers are not resolved.
563func (p *parser) parseExprList(lhs bool) (list []ast.Expr) {
564	if p.trace {
565		defer un(trace(p, "ExpressionList"))
566	}
567
568	list = append(list, p.checkExpr(p.parseExpr(lhs)))
569	for p.tok == token.COMMA {
570		p.next()
571		list = append(list, p.checkExpr(p.parseExpr(lhs)))
572	}
573
574	return
575}
576
577func (p *parser) parseLhsList() []ast.Expr {
578	old := p.inRhs
579	p.inRhs = false
580	list := p.parseExprList(true)
581	switch p.tok {
582	case token.DEFINE:
583		// lhs of a short variable declaration
584		// but doesn't enter scope until later:
585		// caller must call p.shortVarDecl(p.makeIdentList(list))
586		// at appropriate time.
587	case token.COLON:
588		// lhs of a label declaration or a communication clause of a select
589		// statement (parseLhsList is not called when parsing the case clause
590		// of a switch statement):
591		// - labels are declared by the caller of parseLhsList
592		// - for communication clauses, if there is a stand-alone identifier
593		//   followed by a colon, we have a syntax error; there is no need
594		//   to resolve the identifier in that case
595	default:
596		// identifiers must be declared elsewhere
597		for _, x := range list {
598			p.resolve(x)
599		}
600	}
601	p.inRhs = old
602	return list
603}
604
605func (p *parser) parseRhsList() []ast.Expr {
606	old := p.inRhs
607	p.inRhs = true
608	list := p.parseExprList(false)
609	p.inRhs = old
610	return list
611}
612
613// ----------------------------------------------------------------------------
614// Types
615
616func (p *parser) parseType() ast.Expr {
617	if p.trace {
618		defer un(trace(p, "Type"))
619	}
620
621	typ := p.tryType()
622
623	if typ == nil {
624		pos := p.pos
625		p.errorExpected(pos, "type")
626		p.next() // make progress
627		return &ast.BadExpr{From: pos, To: p.pos}
628	}
629
630	return typ
631}
632
633// If the result is an identifier, it is not resolved.
634func (p *parser) parseTypeName() ast.Expr {
635	if p.trace {
636		defer un(trace(p, "TypeName"))
637	}
638
639	ident := p.parseIdent()
640	// don't resolve ident yet - it may be a parameter or field name
641
642	if p.tok == token.PERIOD {
643		// ident is a package name
644		p.next()
645		p.resolve(ident)
646		sel := p.parseIdent()
647		return &ast.SelectorExpr{X: ident, Sel: sel}
648	}
649
650	return ident
651}
652
653func (p *parser) parseArrayType() ast.Expr {
654	if p.trace {
655		defer un(trace(p, "ArrayType"))
656	}
657
658	lbrack := p.expect(token.LBRACK)
659	p.exprLev++
660	var len ast.Expr
661	// always permit ellipsis for more fault-tolerant parsing
662	if p.tok == token.ELLIPSIS {
663		len = &ast.Ellipsis{Ellipsis: p.pos}
664		p.next()
665	} else if p.tok != token.RBRACK {
666		len = p.parseRhs()
667	}
668	p.exprLev--
669	p.expect(token.RBRACK)
670	elt := p.parseType()
671
672	return &ast.ArrayType{Lbrack: lbrack, Len: len, Elt: elt}
673}
674
675func (p *parser) makeIdentList(list []ast.Expr) []*ast.Ident {
676	idents := make([]*ast.Ident, len(list))
677	for i, x := range list {
678		ident, isIdent := x.(*ast.Ident)
679		if !isIdent {
680			if _, isBad := x.(*ast.BadExpr); !isBad {
681				// only report error if it's a new one
682				p.errorExpected(x.Pos(), "identifier")
683			}
684			ident = &ast.Ident{NamePos: x.Pos(), Name: "_"}
685		}
686		idents[i] = ident
687	}
688	return idents
689}
690
691func (p *parser) parseFieldDecl(scope *ast.Scope) *ast.Field {
692	if p.trace {
693		defer un(trace(p, "FieldDecl"))
694	}
695
696	doc := p.leadComment
697
698	// 1st FieldDecl
699	// A type name used as an anonymous field looks like a field identifier.
700	var list []ast.Expr
701	for {
702		list = append(list, p.parseVarType(false))
703		if p.tok != token.COMMA {
704			break
705		}
706		p.next()
707	}
708
709	typ := p.tryVarType(false)
710
711	// analyze case
712	var idents []*ast.Ident
713	if typ != nil {
714		// IdentifierList Type
715		idents = p.makeIdentList(list)
716	} else {
717		// ["*"] TypeName (AnonymousField)
718		typ = list[0] // we always have at least one element
719		if n := len(list); n > 1 {
720			p.errorExpected(p.pos, "type")
721			typ = &ast.BadExpr{From: p.pos, To: p.pos}
722		} else if !isTypeName(deref(typ)) {
723			p.errorExpected(typ.Pos(), "anonymous field")
724			typ = &ast.BadExpr{From: typ.Pos(), To: p.safePos(typ.End())}
725		}
726	}
727
728	// Tag
729	var tag *ast.BasicLit
730	if p.tok == token.STRING {
731		tag = &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit}
732		p.next()
733	}
734
735	p.expectSemi() // call before accessing p.linecomment
736
737	field := &ast.Field{Doc: doc, Names: idents, Type: typ, Tag: tag, Comment: p.lineComment}
738	p.declare(field, nil, scope, ast.Var, idents...)
739	p.resolve(typ)
740
741	return field
742}
743
744func (p *parser) parseStructType() *ast.StructType {
745	if p.trace {
746		defer un(trace(p, "StructType"))
747	}
748
749	pos := p.expect(token.STRUCT)
750	lbrace := p.expect(token.LBRACE)
751	scope := ast.NewScope(nil) // struct scope
752	var list []*ast.Field
753	for p.tok == token.IDENT || p.tok == token.MUL || p.tok == token.LPAREN {
754		// a field declaration cannot start with a '(' but we accept
755		// it here for more robust parsing and better error messages
756		// (parseFieldDecl will check and complain if necessary)
757		list = append(list, p.parseFieldDecl(scope))
758	}
759	rbrace := p.expect(token.RBRACE)
760
761	return &ast.StructType{
762		Struct: pos,
763		Fields: &ast.FieldList{
764			Opening: lbrace,
765			List:    list,
766			Closing: rbrace,
767		},
768	}
769}
770
771func (p *parser) parsePointerType() *ast.StarExpr {
772	if p.trace {
773		defer un(trace(p, "PointerType"))
774	}
775
776	star := p.expect(token.MUL)
777	base := p.parseType()
778
779	return &ast.StarExpr{Star: star, X: base}
780}
781
782// If the result is an identifier, it is not resolved.
783func (p *parser) tryVarType(isParam bool) ast.Expr {
784	if isParam && p.tok == token.ELLIPSIS {
785		pos := p.pos
786		p.next()
787		typ := p.tryIdentOrType() // don't use parseType so we can provide better error message
788		if typ != nil {
789			p.resolve(typ)
790		} else {
791			p.error(pos, "'...' parameter is missing type")
792			typ = &ast.BadExpr{From: pos, To: p.pos}
793		}
794		return &ast.Ellipsis{Ellipsis: pos, Elt: typ}
795	}
796	return p.tryIdentOrType()
797}
798
799// If the result is an identifier, it is not resolved.
800func (p *parser) parseVarType(isParam bool) ast.Expr {
801	typ := p.tryVarType(isParam)
802	if typ == nil {
803		pos := p.pos
804		p.errorExpected(pos, "type")
805		p.next() // make progress
806		typ = &ast.BadExpr{From: pos, To: p.pos}
807	}
808	return typ
809}
810
811func (p *parser) parseParameterList(scope *ast.Scope, ellipsisOk bool) (params []*ast.Field) {
812	if p.trace {
813		defer un(trace(p, "ParameterList"))
814	}
815
816	// 1st ParameterDecl
817	// A list of identifiers looks like a list of type names.
818	var list []ast.Expr
819	for {
820		list = append(list, p.parseVarType(ellipsisOk))
821		if p.tok != token.COMMA {
822			break
823		}
824		p.next()
825		if p.tok == token.RPAREN {
826			break
827		}
828	}
829
830	// analyze case
831	if typ := p.tryVarType(ellipsisOk); typ != nil {
832		// IdentifierList Type
833		idents := p.makeIdentList(list)
834		field := &ast.Field{Names: idents, Type: typ}
835		params = append(params, field)
836		// Go spec: The scope of an identifier denoting a function
837		// parameter or result variable is the function body.
838		p.declare(field, nil, scope, ast.Var, idents...)
839		p.resolve(typ)
840		if !p.atComma("parameter list", token.RPAREN) {
841			return
842		}
843		p.next()
844		for p.tok != token.RPAREN && p.tok != token.EOF {
845			idents := p.parseIdentList()
846			typ := p.parseVarType(ellipsisOk)
847			field := &ast.Field{Names: idents, Type: typ}
848			params = append(params, field)
849			// Go spec: The scope of an identifier denoting a function
850			// parameter or result variable is the function body.
851			p.declare(field, nil, scope, ast.Var, idents...)
852			p.resolve(typ)
853			if !p.atComma("parameter list", token.RPAREN) {
854				break
855			}
856			p.next()
857		}
858		return
859	}
860
861	// Type { "," Type } (anonymous parameters)
862	params = make([]*ast.Field, len(list))
863	for i, typ := range list {
864		p.resolve(typ)
865		params[i] = &ast.Field{Type: typ}
866	}
867	return
868}
869
870func (p *parser) parseParameters(scope *ast.Scope, ellipsisOk bool) *ast.FieldList {
871	if p.trace {
872		defer un(trace(p, "Parameters"))
873	}
874
875	var params []*ast.Field
876	lparen := p.expect(token.LPAREN)
877	if p.tok != token.RPAREN {
878		params = p.parseParameterList(scope, ellipsisOk)
879	}
880	rparen := p.expect(token.RPAREN)
881
882	return &ast.FieldList{Opening: lparen, List: params, Closing: rparen}
883}
884
885func (p *parser) parseResult(scope *ast.Scope) *ast.FieldList {
886	if p.trace {
887		defer un(trace(p, "Result"))
888	}
889
890	if p.tok == token.LPAREN {
891		return p.parseParameters(scope, false)
892	}
893
894	typ := p.tryType()
895	if typ != nil {
896		list := make([]*ast.Field, 1)
897		list[0] = &ast.Field{Type: typ}
898		return &ast.FieldList{List: list}
899	}
900
901	return nil
902}
903
904func (p *parser) parseSignature(scope *ast.Scope) (params, results *ast.FieldList) {
905	if p.trace {
906		defer un(trace(p, "Signature"))
907	}
908
909	params = p.parseParameters(scope, true)
910	results = p.parseResult(scope)
911
912	return
913}
914
915func (p *parser) parseFuncType() (*ast.FuncType, *ast.Scope) {
916	if p.trace {
917		defer un(trace(p, "FuncType"))
918	}
919
920	pos := p.expect(token.FUNC)
921	scope := ast.NewScope(p.topScope) // function scope
922	params, results := p.parseSignature(scope)
923
924	return &ast.FuncType{Func: pos, Params: params, Results: results}, scope
925}
926
927func (p *parser) parseMethodSpec(scope *ast.Scope) *ast.Field {
928	if p.trace {
929		defer un(trace(p, "MethodSpec"))
930	}
931
932	doc := p.leadComment
933	var idents []*ast.Ident
934	var typ ast.Expr
935	x := p.parseTypeName()
936	if ident, isIdent := x.(*ast.Ident); isIdent && p.tok == token.LPAREN {
937		// method
938		idents = []*ast.Ident{ident}
939		scope := ast.NewScope(nil) // method scope
940		params, results := p.parseSignature(scope)
941		typ = &ast.FuncType{Func: token.NoPos, Params: params, Results: results}
942	} else {
943		// embedded interface
944		typ = x
945		p.resolve(typ)
946	}
947	p.expectSemi() // call before accessing p.linecomment
948
949	spec := &ast.Field{Doc: doc, Names: idents, Type: typ, Comment: p.lineComment}
950	p.declare(spec, nil, scope, ast.Fun, idents...)
951
952	return spec
953}
954
955func (p *parser) parseInterfaceType() *ast.InterfaceType {
956	if p.trace {
957		defer un(trace(p, "InterfaceType"))
958	}
959
960	pos := p.expect(token.INTERFACE)
961	lbrace := p.expect(token.LBRACE)
962	scope := ast.NewScope(nil) // interface scope
963	var list []*ast.Field
964	for p.tok == token.IDENT {
965		list = append(list, p.parseMethodSpec(scope))
966	}
967	rbrace := p.expect(token.RBRACE)
968
969	return &ast.InterfaceType{
970		Interface: pos,
971		Methods: &ast.FieldList{
972			Opening: lbrace,
973			List:    list,
974			Closing: rbrace,
975		},
976	}
977}
978
979func (p *parser) parseMapType() *ast.MapType {
980	if p.trace {
981		defer un(trace(p, "MapType"))
982	}
983
984	pos := p.expect(token.MAP)
985	p.expect(token.LBRACK)
986	key := p.parseType()
987	p.expect(token.RBRACK)
988	value := p.parseType()
989
990	return &ast.MapType{Map: pos, Key: key, Value: value}
991}
992
993func (p *parser) parseChanType() *ast.ChanType {
994	if p.trace {
995		defer un(trace(p, "ChanType"))
996	}
997
998	pos := p.pos
999	dir := ast.SEND | ast.RECV
1000	var arrow token.Pos
1001	if p.tok == token.CHAN {
1002		p.next()
1003		if p.tok == token.ARROW {
1004			arrow = p.pos
1005			p.next()
1006			dir = ast.SEND
1007		}
1008	} else {
1009		arrow = p.expect(token.ARROW)
1010		p.expect(token.CHAN)
1011		dir = ast.RECV
1012	}
1013	value := p.parseType()
1014
1015	return &ast.ChanType{Begin: pos, Arrow: arrow, Dir: dir, Value: value}
1016}
1017
1018// If the result is an identifier, it is not resolved.
1019func (p *parser) tryIdentOrType() ast.Expr {
1020	switch p.tok {
1021	case token.IDENT:
1022		return p.parseTypeName()
1023	case token.LBRACK:
1024		return p.parseArrayType()
1025	case token.STRUCT:
1026		return p.parseStructType()
1027	case token.MUL:
1028		return p.parsePointerType()
1029	case token.FUNC:
1030		typ, _ := p.parseFuncType()
1031		return typ
1032	case token.INTERFACE:
1033		return p.parseInterfaceType()
1034	case token.MAP:
1035		return p.parseMapType()
1036	case token.CHAN, token.ARROW:
1037		return p.parseChanType()
1038	case token.LPAREN:
1039		lparen := p.pos
1040		p.next()
1041		typ := p.parseType()
1042		rparen := p.expect(token.RPAREN)
1043		return &ast.ParenExpr{Lparen: lparen, X: typ, Rparen: rparen}
1044	}
1045
1046	// no type found
1047	return nil
1048}
1049
1050func (p *parser) tryType() ast.Expr {
1051	typ := p.tryIdentOrType()
1052	if typ != nil {
1053		p.resolve(typ)
1054	}
1055	return typ
1056}
1057
1058// ----------------------------------------------------------------------------
1059// Blocks
1060
1061func (p *parser) parseStmtList() (list []ast.Stmt) {
1062	if p.trace {
1063		defer un(trace(p, "StatementList"))
1064	}
1065
1066	for p.tok != token.CASE && p.tok != token.DEFAULT && p.tok != token.RBRACE && p.tok != token.EOF {
1067		list = append(list, p.parseStmt())
1068	}
1069
1070	return
1071}
1072
1073func (p *parser) parseBody(scope *ast.Scope) *ast.BlockStmt {
1074	if p.trace {
1075		defer un(trace(p, "Body"))
1076	}
1077
1078	lbrace := p.expect(token.LBRACE)
1079	p.topScope = scope // open function scope
1080	p.openLabelScope()
1081	list := p.parseStmtList()
1082	p.closeLabelScope()
1083	p.closeScope()
1084	rbrace := p.expect(token.RBRACE)
1085
1086	return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
1087}
1088
1089func (p *parser) parseBlockStmt() *ast.BlockStmt {
1090	if p.trace {
1091		defer un(trace(p, "BlockStmt"))
1092	}
1093
1094	lbrace := p.expect(token.LBRACE)
1095	p.openScope()
1096	list := p.parseStmtList()
1097	p.closeScope()
1098	rbrace := p.expect(token.RBRACE)
1099
1100	return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
1101}
1102
1103// ----------------------------------------------------------------------------
1104// Expressions
1105
1106func (p *parser) parseFuncTypeOrLit() ast.Expr {
1107	if p.trace {
1108		defer un(trace(p, "FuncTypeOrLit"))
1109	}
1110
1111	typ, scope := p.parseFuncType()
1112	if p.tok != token.LBRACE {
1113		// function type only
1114		return typ
1115	}
1116
1117	p.exprLev++
1118	body := p.parseBody(scope)
1119	p.exprLev--
1120
1121	return &ast.FuncLit{Type: typ, Body: body}
1122}
1123
1124// parseOperand may return an expression or a raw type (incl. array
1125// types of the form [...]T. Callers must verify the result.
1126// If lhs is set and the result is an identifier, it is not resolved.
1127//
1128func (p *parser) parseOperand(lhs bool) ast.Expr {
1129	if p.trace {
1130		defer un(trace(p, "Operand"))
1131	}
1132
1133	switch p.tok {
1134	case token.IDENT:
1135		x := p.parseIdent()
1136		if !lhs {
1137			p.resolve(x)
1138		}
1139		return x
1140
1141	case token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING:
1142		x := &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit}
1143		p.next()
1144		return x
1145
1146	case token.LPAREN:
1147		lparen := p.pos
1148		p.next()
1149		p.exprLev++
1150		x := p.parseRhsOrType() // types may be parenthesized: (some type)
1151		p.exprLev--
1152		rparen := p.expect(token.RPAREN)
1153		return &ast.ParenExpr{Lparen: lparen, X: x, Rparen: rparen}
1154
1155	case token.FUNC:
1156		return p.parseFuncTypeOrLit()
1157	}
1158
1159	if typ := p.tryIdentOrType(); typ != nil {
1160		// could be type for composite literal or conversion
1161		_, isIdent := typ.(*ast.Ident)
1162		assert(!isIdent, "type cannot be identifier")
1163		return typ
1164	}
1165
1166	// we have an error
1167	pos := p.pos
1168	p.errorExpected(pos, "operand")
1169	syncStmt(p)
1170	return &ast.BadExpr{From: pos, To: p.pos}
1171}
1172
1173func (p *parser) parseSelector(x ast.Expr) ast.Expr {
1174	if p.trace {
1175		defer un(trace(p, "Selector"))
1176	}
1177
1178	sel := p.parseIdent()
1179
1180	return &ast.SelectorExpr{X: x, Sel: sel}
1181}
1182
1183func (p *parser) parseTypeAssertion(x ast.Expr) ast.Expr {
1184	if p.trace {
1185		defer un(trace(p, "TypeAssertion"))
1186	}
1187
1188	lparen := p.expect(token.LPAREN)
1189	var typ ast.Expr
1190	if p.tok == token.TYPE {
1191		// type switch: typ == nil
1192		p.next()
1193	} else {
1194		typ = p.parseType()
1195	}
1196	rparen := p.expect(token.RPAREN)
1197
1198	return &ast.TypeAssertExpr{X: x, Type: typ, Lparen: lparen, Rparen: rparen}
1199}
1200
1201func (p *parser) parseIndexOrSlice(x ast.Expr) ast.Expr {
1202	if p.trace {
1203		defer un(trace(p, "IndexOrSlice"))
1204	}
1205
1206	const N = 3 // change the 3 to 2 to disable 3-index slices
1207	lbrack := p.expect(token.LBRACK)
1208	p.exprLev++
1209	var index [N]ast.Expr
1210	var colons [N - 1]token.Pos
1211	if p.tok != token.COLON {
1212		index[0] = p.parseRhs()
1213	}
1214	ncolons := 0
1215	for p.tok == token.COLON && ncolons < len(colons) {
1216		colons[ncolons] = p.pos
1217		ncolons++
1218		p.next()
1219		if p.tok != token.COLON && p.tok != token.RBRACK && p.tok != token.EOF {
1220			index[ncolons] = p.parseRhs()
1221		}
1222	}
1223	p.exprLev--
1224	rbrack := p.expect(token.RBRACK)
1225
1226	if ncolons > 0 {
1227		// slice expression
1228		slice3 := false
1229		if ncolons == 2 {
1230			slice3 = true
1231			// Check presence of 2nd and 3rd index here rather than during type-checking
1232			// to prevent erroneous programs from passing through gofmt (was issue 7305).
1233			if index[1] == nil {
1234				p.error(colons[0], "2nd index required in 3-index slice")
1235				index[1] = &ast.BadExpr{From: colons[0] + 1, To: colons[1]}
1236			}
1237			if index[2] == nil {
1238				p.error(colons[1], "3rd index required in 3-index slice")
1239				index[2] = &ast.BadExpr{From: colons[1] + 1, To: rbrack}
1240			}
1241		}
1242		return &ast.SliceExpr{X: x, Lbrack: lbrack, Low: index[0], High: index[1], Max: index[2], Slice3: slice3, Rbrack: rbrack}
1243	}
1244
1245	return &ast.IndexExpr{X: x, Lbrack: lbrack, Index: index[0], Rbrack: rbrack}
1246}
1247
1248func (p *parser) parseCallOrConversion(fun ast.Expr) *ast.CallExpr {
1249	if p.trace {
1250		defer un(trace(p, "CallOrConversion"))
1251	}
1252
1253	lparen := p.expect(token.LPAREN)
1254	p.exprLev++
1255	var list []ast.Expr
1256	var ellipsis token.Pos
1257	for p.tok != token.RPAREN && p.tok != token.EOF && !ellipsis.IsValid() {
1258		list = append(list, p.parseRhsOrType()) // builtins may expect a type: make(some type, ...)
1259		if p.tok == token.ELLIPSIS {
1260			ellipsis = p.pos
1261			p.next()
1262		}
1263		if !p.atComma("argument list", token.RPAREN) {
1264			break
1265		}
1266		p.next()
1267	}
1268	p.exprLev--
1269	rparen := p.expectClosing(token.RPAREN, "argument list")
1270
1271	return &ast.CallExpr{Fun: fun, Lparen: lparen, Args: list, Ellipsis: ellipsis, Rparen: rparen}
1272}
1273
1274func (p *parser) parseValue(keyOk bool) ast.Expr {
1275	if p.trace {
1276		defer un(trace(p, "Element"))
1277	}
1278
1279	if p.tok == token.LBRACE {
1280		return p.parseLiteralValue(nil)
1281	}
1282
1283	// Because the parser doesn't know the composite literal type, it cannot
1284	// know if a key that's an identifier is a struct field name or a name
1285	// denoting a value. The former is not resolved by the parser or the
1286	// resolver.
1287	//
1288	// Instead, _try_ to resolve such a key if possible. If it resolves,
1289	// it a) has correctly resolved, or b) incorrectly resolved because
1290	// the key is a struct field with a name matching another identifier.
1291	// In the former case we are done, and in the latter case we don't
1292	// care because the type checker will do a separate field lookup.
1293	//
1294	// If the key does not resolve, it a) must be defined at the top
1295	// level in another file of the same package, the universe scope, or be
1296	// undeclared; or b) it is a struct field. In the former case, the type
1297	// checker can do a top-level lookup, and in the latter case it will do
1298	// a separate field lookup.
1299	x := p.checkExpr(p.parseExpr(keyOk))
1300	if keyOk {
1301		if p.tok == token.COLON {
1302			// Try to resolve the key but don't collect it
1303			// as unresolved identifier if it fails so that
1304			// we don't get (possibly false) errors about
1305			// undeclared names.
1306			p.tryResolve(x, false)
1307		} else {
1308			// not a key
1309			p.resolve(x)
1310		}
1311	}
1312
1313	return x
1314}
1315
1316func (p *parser) parseElement() ast.Expr {
1317	if p.trace {
1318		defer un(trace(p, "Element"))
1319	}
1320
1321	x := p.parseValue(true)
1322	if p.tok == token.COLON {
1323		colon := p.pos
1324		p.next()
1325		x = &ast.KeyValueExpr{Key: x, Colon: colon, Value: p.parseValue(false)}
1326	}
1327
1328	return x
1329}
1330
1331func (p *parser) parseElementList() (list []ast.Expr) {
1332	if p.trace {
1333		defer un(trace(p, "ElementList"))
1334	}
1335
1336	for p.tok != token.RBRACE && p.tok != token.EOF {
1337		list = append(list, p.parseElement())
1338		if !p.atComma("composite literal", token.RBRACE) {
1339			break
1340		}
1341		p.next()
1342	}
1343
1344	return
1345}
1346
1347func (p *parser) parseLiteralValue(typ ast.Expr) ast.Expr {
1348	if p.trace {
1349		defer un(trace(p, "LiteralValue"))
1350	}
1351
1352	lbrace := p.expect(token.LBRACE)
1353	var elts []ast.Expr
1354	p.exprLev++
1355	if p.tok != token.RBRACE {
1356		elts = p.parseElementList()
1357	}
1358	p.exprLev--
1359	rbrace := p.expectClosing(token.RBRACE, "composite literal")
1360	return &ast.CompositeLit{Type: typ, Lbrace: lbrace, Elts: elts, Rbrace: rbrace}
1361}
1362
1363// checkExpr checks that x is an expression (and not a type).
1364func (p *parser) checkExpr(x ast.Expr) ast.Expr {
1365	switch unparen(x).(type) {
1366	case *ast.BadExpr:
1367	case *ast.Ident:
1368	case *ast.BasicLit:
1369	case *ast.FuncLit:
1370	case *ast.CompositeLit:
1371	case *ast.ParenExpr:
1372		panic("unreachable")
1373	case *ast.SelectorExpr:
1374	case *ast.IndexExpr:
1375	case *ast.SliceExpr:
1376	case *ast.TypeAssertExpr:
1377		// If t.Type == nil we have a type assertion of the form
1378		// y.(type), which is only allowed in type switch expressions.
1379		// It's hard to exclude those but for the case where we are in
1380		// a type switch. Instead be lenient and test this in the type
1381		// checker.
1382	case *ast.CallExpr:
1383	case *ast.StarExpr:
1384	case *ast.UnaryExpr:
1385	case *ast.BinaryExpr:
1386	default:
1387		// all other nodes are not proper expressions
1388		p.errorExpected(x.Pos(), "expression")
1389		x = &ast.BadExpr{From: x.Pos(), To: p.safePos(x.End())}
1390	}
1391	return x
1392}
1393
1394// isTypeName reports whether x is a (qualified) TypeName.
1395func isTypeName(x ast.Expr) bool {
1396	switch t := x.(type) {
1397	case *ast.BadExpr:
1398	case *ast.Ident:
1399	case *ast.SelectorExpr:
1400		_, isIdent := t.X.(*ast.Ident)
1401		return isIdent
1402	default:
1403		return false // all other nodes are not type names
1404	}
1405	return true
1406}
1407
1408// isLiteralType reports whether x is a legal composite literal type.
1409func isLiteralType(x ast.Expr) bool {
1410	switch t := x.(type) {
1411	case *ast.BadExpr:
1412	case *ast.Ident:
1413	case *ast.SelectorExpr:
1414		_, isIdent := t.X.(*ast.Ident)
1415		return isIdent
1416	case *ast.ArrayType:
1417	case *ast.StructType:
1418	case *ast.MapType:
1419	default:
1420		return false // all other nodes are not legal composite literal types
1421	}
1422	return true
1423}
1424
1425// If x is of the form *T, deref returns T, otherwise it returns x.
1426func deref(x ast.Expr) ast.Expr {
1427	if p, isPtr := x.(*ast.StarExpr); isPtr {
1428		x = p.X
1429	}
1430	return x
1431}
1432
1433// If x is of the form (T), unparen returns unparen(T), otherwise it returns x.
1434func unparen(x ast.Expr) ast.Expr {
1435	if p, isParen := x.(*ast.ParenExpr); isParen {
1436		x = unparen(p.X)
1437	}
1438	return x
1439}
1440
1441// checkExprOrType checks that x is an expression or a type
1442// (and not a raw type such as [...]T).
1443//
1444func (p *parser) checkExprOrType(x ast.Expr) ast.Expr {
1445	switch t := unparen(x).(type) {
1446	case *ast.ParenExpr:
1447		panic("unreachable")
1448	case *ast.UnaryExpr:
1449	case *ast.ArrayType:
1450		if len, isEllipsis := t.Len.(*ast.Ellipsis); isEllipsis {
1451			p.error(len.Pos(), "expected array length, found '...'")
1452			x = &ast.BadExpr{From: x.Pos(), To: p.safePos(x.End())}
1453		}
1454	}
1455
1456	// all other nodes are expressions or types
1457	return x
1458}
1459
1460// If lhs is set and the result is an identifier, it is not resolved.
1461func (p *parser) parsePrimaryExpr(lhs bool) ast.Expr {
1462	if p.trace {
1463		defer un(trace(p, "PrimaryExpr"))
1464	}
1465
1466	x := p.parseOperand(lhs)
1467L:
1468	for {
1469		switch p.tok {
1470		case token.PERIOD:
1471			p.next()
1472			if lhs {
1473				p.resolve(x)
1474			}
1475			switch p.tok {
1476			case token.IDENT:
1477				x = p.parseSelector(p.checkExprOrType(x))
1478			case token.LPAREN:
1479				x = p.parseTypeAssertion(p.checkExpr(x))
1480			default:
1481				pos := p.pos
1482				p.errorExpected(pos, "selector or type assertion")
1483				p.next() // make progress
1484				sel := &ast.Ident{NamePos: pos, Name: "_"}
1485				x = &ast.SelectorExpr{X: x, Sel: sel}
1486			}
1487		case token.LBRACK:
1488			if lhs {
1489				p.resolve(x)
1490			}
1491			x = p.parseIndexOrSlice(p.checkExpr(x))
1492		case token.LPAREN:
1493			if lhs {
1494				p.resolve(x)
1495			}
1496			x = p.parseCallOrConversion(p.checkExprOrType(x))
1497		case token.LBRACE:
1498			if isLiteralType(x) && (p.exprLev >= 0 || !isTypeName(x)) {
1499				if lhs {
1500					p.resolve(x)
1501				}
1502				x = p.parseLiteralValue(x)
1503			} else {
1504				break L
1505			}
1506		default:
1507			break L
1508		}
1509		lhs = false // no need to try to resolve again
1510	}
1511
1512	return x
1513}
1514
1515// If lhs is set and the result is an identifier, it is not resolved.
1516func (p *parser) parseUnaryExpr(lhs bool) ast.Expr {
1517	if p.trace {
1518		defer un(trace(p, "UnaryExpr"))
1519	}
1520
1521	switch p.tok {
1522	case token.ADD, token.SUB, token.NOT, token.XOR, token.AND:
1523		pos, op := p.pos, p.tok
1524		p.next()
1525		x := p.parseUnaryExpr(false)
1526		return &ast.UnaryExpr{OpPos: pos, Op: op, X: p.checkExpr(x)}
1527
1528	case token.ARROW:
1529		// channel type or receive expression
1530		arrow := p.pos
1531		p.next()
1532
1533		// If the next token is token.CHAN we still don't know if it
1534		// is a channel type or a receive operation - we only know
1535		// once we have found the end of the unary expression. There
1536		// are two cases:
1537		//
1538		//   <- type  => (<-type) must be channel type
1539		//   <- expr  => <-(expr) is a receive from an expression
1540		//
1541		// In the first case, the arrow must be re-associated with
1542		// the channel type parsed already:
1543		//
1544		//   <- (chan type)    =>  (<-chan type)
1545		//   <- (chan<- type)  =>  (<-chan (<-type))
1546
1547		x := p.parseUnaryExpr(false)
1548
1549		// determine which case we have
1550		if typ, ok := x.(*ast.ChanType); ok {
1551			// (<-type)
1552
1553			// re-associate position info and <-
1554			dir := ast.SEND
1555			for ok && dir == ast.SEND {
1556				if typ.Dir == ast.RECV {
1557					// error: (<-type) is (<-(<-chan T))
1558					p.errorExpected(typ.Arrow, "'chan'")
1559				}
1560				arrow, typ.Begin, typ.Arrow = typ.Arrow, arrow, arrow
1561				dir, typ.Dir = typ.Dir, ast.RECV
1562				typ, ok = typ.Value.(*ast.ChanType)
1563			}
1564			if dir == ast.SEND {
1565				p.errorExpected(arrow, "channel type")
1566			}
1567
1568			return x
1569		}
1570
1571		// <-(expr)
1572		return &ast.UnaryExpr{OpPos: arrow, Op: token.ARROW, X: p.checkExpr(x)}
1573
1574	case token.MUL:
1575		// pointer type or unary "*" expression
1576		pos := p.pos
1577		p.next()
1578		x := p.parseUnaryExpr(false)
1579		return &ast.StarExpr{Star: pos, X: p.checkExprOrType(x)}
1580	}
1581
1582	return p.parsePrimaryExpr(lhs)
1583}
1584
1585func (p *parser) tokPrec() (token.Token, int) {
1586	tok := p.tok
1587	if p.inRhs && tok == token.ASSIGN {
1588		tok = token.EQL
1589	}
1590	return tok, tok.Precedence()
1591}
1592
1593// If lhs is set and the result is an identifier, it is not resolved.
1594func (p *parser) parseBinaryExpr(lhs bool, prec1 int) ast.Expr {
1595	if p.trace {
1596		defer un(trace(p, "BinaryExpr"))
1597	}
1598
1599	x := p.parseUnaryExpr(lhs)
1600	for {
1601		op, oprec := p.tokPrec()
1602		if oprec < prec1 {
1603			return x
1604		}
1605		pos := p.expect(op)
1606		if lhs {
1607			p.resolve(x)
1608			lhs = false
1609		}
1610		y := p.parseBinaryExpr(false, oprec+1)
1611		x = &ast.BinaryExpr{X: p.checkExpr(x), OpPos: pos, Op: op, Y: p.checkExpr(y)}
1612	}
1613}
1614
1615// If lhs is set and the result is an identifier, it is not resolved.
1616// The result may be a type or even a raw type ([...]int). Callers must
1617// check the result (using checkExpr or checkExprOrType), depending on
1618// context.
1619func (p *parser) parseExpr(lhs bool) ast.Expr {
1620	if p.trace {
1621		defer un(trace(p, "Expression"))
1622	}
1623
1624	return p.parseBinaryExpr(lhs, token.LowestPrec+1)
1625}
1626
1627func (p *parser) parseRhs() ast.Expr {
1628	old := p.inRhs
1629	p.inRhs = true
1630	x := p.checkExpr(p.parseExpr(false))
1631	p.inRhs = old
1632	return x
1633}
1634
1635func (p *parser) parseRhsOrType() ast.Expr {
1636	old := p.inRhs
1637	p.inRhs = true
1638	x := p.checkExprOrType(p.parseExpr(false))
1639	p.inRhs = old
1640	return x
1641}
1642
1643// ----------------------------------------------------------------------------
1644// Statements
1645
1646// Parsing modes for parseSimpleStmt.
1647const (
1648	basic = iota
1649	labelOk
1650	rangeOk
1651)
1652
1653// parseSimpleStmt returns true as 2nd result if it parsed the assignment
1654// of a range clause (with mode == rangeOk). The returned statement is an
1655// assignment with a right-hand side that is a single unary expression of
1656// the form "range x". No guarantees are given for the left-hand side.
1657func (p *parser) parseSimpleStmt(mode int) (ast.Stmt, bool) {
1658	if p.trace {
1659		defer un(trace(p, "SimpleStmt"))
1660	}
1661
1662	x := p.parseLhsList()
1663
1664	switch p.tok {
1665	case
1666		token.DEFINE, token.ASSIGN, token.ADD_ASSIGN,
1667		token.SUB_ASSIGN, token.MUL_ASSIGN, token.QUO_ASSIGN,
1668		token.REM_ASSIGN, token.AND_ASSIGN, token.OR_ASSIGN,
1669		token.XOR_ASSIGN, token.SHL_ASSIGN, token.SHR_ASSIGN, token.AND_NOT_ASSIGN:
1670		// assignment statement, possibly part of a range clause
1671		pos, tok := p.pos, p.tok
1672		p.next()
1673		var y []ast.Expr
1674		isRange := false
1675		if mode == rangeOk && p.tok == token.RANGE && (tok == token.DEFINE || tok == token.ASSIGN) {
1676			pos := p.pos
1677			p.next()
1678			y = []ast.Expr{&ast.UnaryExpr{OpPos: pos, Op: token.RANGE, X: p.parseRhs()}}
1679			isRange = true
1680		} else {
1681			y = p.parseRhsList()
1682		}
1683		as := &ast.AssignStmt{Lhs: x, TokPos: pos, Tok: tok, Rhs: y}
1684		if tok == token.DEFINE {
1685			p.shortVarDecl(as, x)
1686		}
1687		return as, isRange
1688	}
1689
1690	if len(x) > 1 {
1691		p.errorExpected(x[0].Pos(), "1 expression")
1692		// continue with first expression
1693	}
1694
1695	switch p.tok {
1696	case token.COLON:
1697		// labeled statement
1698		colon := p.pos
1699		p.next()
1700		if label, isIdent := x[0].(*ast.Ident); mode == labelOk && isIdent {
1701			// Go spec: The scope of a label is the body of the function
1702			// in which it is declared and excludes the body of any nested
1703			// function.
1704			stmt := &ast.LabeledStmt{Label: label, Colon: colon, Stmt: p.parseStmt()}
1705			p.declare(stmt, nil, p.labelScope, ast.Lbl, label)
1706			return stmt, false
1707		}
1708		// The label declaration typically starts at x[0].Pos(), but the label
1709		// declaration may be erroneous due to a token after that position (and
1710		// before the ':'). If SpuriousErrors is not set, the (only) error
1711		// reported for the line is the illegal label error instead of the token
1712		// before the ':' that caused the problem. Thus, use the (latest) colon
1713		// position for error reporting.
1714		p.error(colon, "illegal label declaration")
1715		return &ast.BadStmt{From: x[0].Pos(), To: colon + 1}, false
1716
1717	case token.ARROW:
1718		// send statement
1719		arrow := p.pos
1720		p.next()
1721		y := p.parseRhs()
1722		return &ast.SendStmt{Chan: x[0], Arrow: arrow, Value: y}, false
1723
1724	case token.INC, token.DEC:
1725		// increment or decrement
1726		s := &ast.IncDecStmt{X: x[0], TokPos: p.pos, Tok: p.tok}
1727		p.next()
1728		return s, false
1729	}
1730
1731	// expression
1732	return &ast.ExprStmt{X: x[0]}, false
1733}
1734
1735func (p *parser) parseCallExpr(callType string) *ast.CallExpr {
1736	x := p.parseRhsOrType() // could be a conversion: (some type)(x)
1737	if call, isCall := x.(*ast.CallExpr); isCall {
1738		return call
1739	}
1740	if _, isBad := x.(*ast.BadExpr); !isBad {
1741		// only report error if it's a new one
1742		p.error(p.safePos(x.End()), fmt.Sprintf("function must be invoked in %s statement", callType))
1743	}
1744	return nil
1745}
1746
1747func (p *parser) parseGoStmt() ast.Stmt {
1748	if p.trace {
1749		defer un(trace(p, "GoStmt"))
1750	}
1751
1752	pos := p.expect(token.GO)
1753	call := p.parseCallExpr("go")
1754	p.expectSemi()
1755	if call == nil {
1756		return &ast.BadStmt{From: pos, To: pos + 2} // len("go")
1757	}
1758
1759	return &ast.GoStmt{Go: pos, Call: call}
1760}
1761
1762func (p *parser) parseDeferStmt() ast.Stmt {
1763	if p.trace {
1764		defer un(trace(p, "DeferStmt"))
1765	}
1766
1767	pos := p.expect(token.DEFER)
1768	call := p.parseCallExpr("defer")
1769	p.expectSemi()
1770	if call == nil {
1771		return &ast.BadStmt{From: pos, To: pos + 5} // len("defer")
1772	}
1773
1774	return &ast.DeferStmt{Defer: pos, Call: call}
1775}
1776
1777func (p *parser) parseReturnStmt() *ast.ReturnStmt {
1778	if p.trace {
1779		defer un(trace(p, "ReturnStmt"))
1780	}
1781
1782	pos := p.pos
1783	p.expect(token.RETURN)
1784	var x []ast.Expr
1785	if p.tok != token.SEMICOLON && p.tok != token.RBRACE {
1786		x = p.parseRhsList()
1787	}
1788	p.expectSemi()
1789
1790	return &ast.ReturnStmt{Return: pos, Results: x}
1791}
1792
1793func (p *parser) parseBranchStmt(tok token.Token) *ast.BranchStmt {
1794	if p.trace {
1795		defer un(trace(p, "BranchStmt"))
1796	}
1797
1798	pos := p.expect(tok)
1799	var label *ast.Ident
1800	if tok != token.FALLTHROUGH && p.tok == token.IDENT {
1801		label = p.parseIdent()
1802		// add to list of unresolved targets
1803		n := len(p.targetStack) - 1
1804		p.targetStack[n] = append(p.targetStack[n], label)
1805	}
1806	p.expectSemi()
1807
1808	return &ast.BranchStmt{TokPos: pos, Tok: tok, Label: label}
1809}
1810
1811func (p *parser) makeExpr(s ast.Stmt, kind string) ast.Expr {
1812	if s == nil {
1813		return nil
1814	}
1815	if es, isExpr := s.(*ast.ExprStmt); isExpr {
1816		return p.checkExpr(es.X)
1817	}
1818	p.error(s.Pos(), fmt.Sprintf("expected %s, found simple statement (missing parentheses around composite literal?)", kind))
1819	return &ast.BadExpr{From: s.Pos(), To: p.safePos(s.End())}
1820}
1821
1822func (p *parser) parseIfStmt() *ast.IfStmt {
1823	if p.trace {
1824		defer un(trace(p, "IfStmt"))
1825	}
1826
1827	pos := p.expect(token.IF)
1828	p.openScope()
1829	defer p.closeScope()
1830
1831	var s ast.Stmt
1832	var x ast.Expr
1833	{
1834		prevLev := p.exprLev
1835		p.exprLev = -1
1836		if p.tok == token.SEMICOLON {
1837			p.next()
1838			x = p.parseRhs()
1839		} else {
1840			s, _ = p.parseSimpleStmt(basic)
1841			if p.tok == token.SEMICOLON {
1842				p.next()
1843				x = p.parseRhs()
1844			} else {
1845				x = p.makeExpr(s, "boolean expression")
1846				s = nil
1847			}
1848		}
1849		p.exprLev = prevLev
1850	}
1851
1852	body := p.parseBlockStmt()
1853	var else_ ast.Stmt
1854	if p.tok == token.ELSE {
1855		p.next()
1856		switch p.tok {
1857		case token.IF:
1858			else_ = p.parseIfStmt()
1859		case token.LBRACE:
1860			else_ = p.parseBlockStmt()
1861			p.expectSemi()
1862		default:
1863			p.errorExpected(p.pos, "if statement or block")
1864			else_ = &ast.BadStmt{From: p.pos, To: p.pos}
1865		}
1866	} else {
1867		p.expectSemi()
1868	}
1869
1870	return &ast.IfStmt{If: pos, Init: s, Cond: x, Body: body, Else: else_}
1871}
1872
1873func (p *parser) parseTypeList() (list []ast.Expr) {
1874	if p.trace {
1875		defer un(trace(p, "TypeList"))
1876	}
1877
1878	list = append(list, p.parseType())
1879	for p.tok == token.COMMA {
1880		p.next()
1881		list = append(list, p.parseType())
1882	}
1883
1884	return
1885}
1886
1887func (p *parser) parseCaseClause(typeSwitch bool) *ast.CaseClause {
1888	if p.trace {
1889		defer un(trace(p, "CaseClause"))
1890	}
1891
1892	pos := p.pos
1893	var list []ast.Expr
1894	if p.tok == token.CASE {
1895		p.next()
1896		if typeSwitch {
1897			list = p.parseTypeList()
1898		} else {
1899			list = p.parseRhsList()
1900		}
1901	} else {
1902		p.expect(token.DEFAULT)
1903	}
1904
1905	colon := p.expect(token.COLON)
1906	p.openScope()
1907	body := p.parseStmtList()
1908	p.closeScope()
1909
1910	return &ast.CaseClause{Case: pos, List: list, Colon: colon, Body: body}
1911}
1912
1913func isTypeSwitchAssert(x ast.Expr) bool {
1914	a, ok := x.(*ast.TypeAssertExpr)
1915	return ok && a.Type == nil
1916}
1917
1918func (p *parser) isTypeSwitchGuard(s ast.Stmt) bool {
1919	switch t := s.(type) {
1920	case *ast.ExprStmt:
1921		// x.(type)
1922		return isTypeSwitchAssert(t.X)
1923	case *ast.AssignStmt:
1924		// v := x.(type)
1925		if len(t.Lhs) == 1 && len(t.Rhs) == 1 && isTypeSwitchAssert(t.Rhs[0]) {
1926			switch t.Tok {
1927			case token.ASSIGN:
1928				// permit v = x.(type) but complain
1929				p.error(t.TokPos, "expected ':=', found '='")
1930				fallthrough
1931			case token.DEFINE:
1932				return true
1933			}
1934		}
1935	}
1936	return false
1937}
1938
1939func (p *parser) parseSwitchStmt() ast.Stmt {
1940	if p.trace {
1941		defer un(trace(p, "SwitchStmt"))
1942	}
1943
1944	pos := p.expect(token.SWITCH)
1945	p.openScope()
1946	defer p.closeScope()
1947
1948	var s1, s2 ast.Stmt
1949	if p.tok != token.LBRACE {
1950		prevLev := p.exprLev
1951		p.exprLev = -1
1952		if p.tok != token.SEMICOLON {
1953			s2, _ = p.parseSimpleStmt(basic)
1954		}
1955		if p.tok == token.SEMICOLON {
1956			p.next()
1957			s1 = s2
1958			s2 = nil
1959			if p.tok != token.LBRACE {
1960				// A TypeSwitchGuard may declare a variable in addition
1961				// to the variable declared in the initial SimpleStmt.
1962				// Introduce extra scope to avoid redeclaration errors:
1963				//
1964				//	switch t := 0; t := x.(T) { ... }
1965				//
1966				// (this code is not valid Go because the first t
1967				// cannot be accessed and thus is never used, the extra
1968				// scope is needed for the correct error message).
1969				//
1970				// If we don't have a type switch, s2 must be an expression.
1971				// Having the extra nested but empty scope won't affect it.
1972				p.openScope()
1973				defer p.closeScope()
1974				s2, _ = p.parseSimpleStmt(basic)
1975			}
1976		}
1977		p.exprLev = prevLev
1978	}
1979
1980	typeSwitch := p.isTypeSwitchGuard(s2)
1981	lbrace := p.expect(token.LBRACE)
1982	var list []ast.Stmt
1983	for p.tok == token.CASE || p.tok == token.DEFAULT {
1984		list = append(list, p.parseCaseClause(typeSwitch))
1985	}
1986	rbrace := p.expect(token.RBRACE)
1987	p.expectSemi()
1988	body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
1989
1990	if typeSwitch {
1991		return &ast.TypeSwitchStmt{Switch: pos, Init: s1, Assign: s2, Body: body}
1992	}
1993
1994	return &ast.SwitchStmt{Switch: pos, Init: s1, Tag: p.makeExpr(s2, "switch expression"), Body: body}
1995}
1996
1997func (p *parser) parseCommClause() *ast.CommClause {
1998	if p.trace {
1999		defer un(trace(p, "CommClause"))
2000	}
2001
2002	p.openScope()
2003	pos := p.pos
2004	var comm ast.Stmt
2005	if p.tok == token.CASE {
2006		p.next()
2007		lhs := p.parseLhsList()
2008		if p.tok == token.ARROW {
2009			// SendStmt
2010			if len(lhs) > 1 {
2011				p.errorExpected(lhs[0].Pos(), "1 expression")
2012				// continue with first expression
2013			}
2014			arrow := p.pos
2015			p.next()
2016			rhs := p.parseRhs()
2017			comm = &ast.SendStmt{Chan: lhs[0], Arrow: arrow, Value: rhs}
2018		} else {
2019			// RecvStmt
2020			if tok := p.tok; tok == token.ASSIGN || tok == token.DEFINE {
2021				// RecvStmt with assignment
2022				if len(lhs) > 2 {
2023					p.errorExpected(lhs[0].Pos(), "1 or 2 expressions")
2024					// continue with first two expressions
2025					lhs = lhs[0:2]
2026				}
2027				pos := p.pos
2028				p.next()
2029				rhs := p.parseRhs()
2030				as := &ast.AssignStmt{Lhs: lhs, TokPos: pos, Tok: tok, Rhs: []ast.Expr{rhs}}
2031				if tok == token.DEFINE {
2032					p.shortVarDecl(as, lhs)
2033				}
2034				comm = as
2035			} else {
2036				// lhs must be single receive operation
2037				if len(lhs) > 1 {
2038					p.errorExpected(lhs[0].Pos(), "1 expression")
2039					// continue with first expression
2040				}
2041				comm = &ast.ExprStmt{X: lhs[0]}
2042			}
2043		}
2044	} else {
2045		p.expect(token.DEFAULT)
2046	}
2047
2048	colon := p.expect(token.COLON)
2049	body := p.parseStmtList()
2050	p.closeScope()
2051
2052	return &ast.CommClause{Case: pos, Comm: comm, Colon: colon, Body: body}
2053}
2054
2055func (p *parser) parseSelectStmt() *ast.SelectStmt {
2056	if p.trace {
2057		defer un(trace(p, "SelectStmt"))
2058	}
2059
2060	pos := p.expect(token.SELECT)
2061	lbrace := p.expect(token.LBRACE)
2062	var list []ast.Stmt
2063	for p.tok == token.CASE || p.tok == token.DEFAULT {
2064		list = append(list, p.parseCommClause())
2065	}
2066	rbrace := p.expect(token.RBRACE)
2067	p.expectSemi()
2068	body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
2069
2070	return &ast.SelectStmt{Select: pos, Body: body}
2071}
2072
2073func (p *parser) parseForStmt() ast.Stmt {
2074	if p.trace {
2075		defer un(trace(p, "ForStmt"))
2076	}
2077
2078	pos := p.expect(token.FOR)
2079	p.openScope()
2080	defer p.closeScope()
2081
2082	var s1, s2, s3 ast.Stmt
2083	var isRange bool
2084	if p.tok != token.LBRACE {
2085		prevLev := p.exprLev
2086		p.exprLev = -1
2087		if p.tok != token.SEMICOLON {
2088			if p.tok == token.RANGE {
2089				// "for range x" (nil lhs in assignment)
2090				pos := p.pos
2091				p.next()
2092				y := []ast.Expr{&ast.UnaryExpr{OpPos: pos, Op: token.RANGE, X: p.parseRhs()}}
2093				s2 = &ast.AssignStmt{Rhs: y}
2094				isRange = true
2095			} else {
2096				s2, isRange = p.parseSimpleStmt(rangeOk)
2097			}
2098		}
2099		if !isRange && p.tok == token.SEMICOLON {
2100			p.next()
2101			s1 = s2
2102			s2 = nil
2103			if p.tok != token.SEMICOLON {
2104				s2, _ = p.parseSimpleStmt(basic)
2105			}
2106			p.expectSemi()
2107			if p.tok != token.LBRACE {
2108				s3, _ = p.parseSimpleStmt(basic)
2109			}
2110		}
2111		p.exprLev = prevLev
2112	}
2113
2114	body := p.parseBlockStmt()
2115	p.expectSemi()
2116
2117	if isRange {
2118		as := s2.(*ast.AssignStmt)
2119		// check lhs
2120		var key, value ast.Expr
2121		switch len(as.Lhs) {
2122		case 0:
2123			// nothing to do
2124		case 1:
2125			key = as.Lhs[0]
2126		case 2:
2127			key, value = as.Lhs[0], as.Lhs[1]
2128		default:
2129			p.errorExpected(as.Lhs[len(as.Lhs)-1].Pos(), "at most 2 expressions")
2130			return &ast.BadStmt{From: pos, To: p.safePos(body.End())}
2131		}
2132		// parseSimpleStmt returned a right-hand side that
2133		// is a single unary expression of the form "range x"
2134		x := as.Rhs[0].(*ast.UnaryExpr).X
2135		return &ast.RangeStmt{
2136			For:    pos,
2137			Key:    key,
2138			Value:  value,
2139			TokPos: as.TokPos,
2140			Tok:    as.Tok,
2141			X:      x,
2142			Body:   body,
2143		}
2144	}
2145
2146	// regular for statement
2147	return &ast.ForStmt{
2148		For:  pos,
2149		Init: s1,
2150		Cond: p.makeExpr(s2, "boolean or range expression"),
2151		Post: s3,
2152		Body: body,
2153	}
2154}
2155
2156func (p *parser) parseStmt() (s ast.Stmt) {
2157	if p.trace {
2158		defer un(trace(p, "Statement"))
2159	}
2160
2161	switch p.tok {
2162	case token.CONST, token.TYPE, token.VAR:
2163		s = &ast.DeclStmt{Decl: p.parseDecl(syncStmt)}
2164	case
2165		// tokens that may start an expression
2166		token.IDENT, token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING, token.FUNC, token.LPAREN, // operands
2167		token.LBRACK, token.STRUCT, token.MAP, token.CHAN, token.INTERFACE, // composite types
2168		token.ADD, token.SUB, token.MUL, token.AND, token.XOR, token.ARROW, token.NOT: // unary operators
2169		s, _ = p.parseSimpleStmt(labelOk)
2170		// because of the required look-ahead, labeled statements are
2171		// parsed by parseSimpleStmt - don't expect a semicolon after
2172		// them
2173		if _, isLabeledStmt := s.(*ast.LabeledStmt); !isLabeledStmt {
2174			p.expectSemi()
2175		}
2176	case token.GO:
2177		s = p.parseGoStmt()
2178	case token.DEFER:
2179		s = p.parseDeferStmt()
2180	case token.RETURN:
2181		s = p.parseReturnStmt()
2182	case token.BREAK, token.CONTINUE, token.GOTO, token.FALLTHROUGH:
2183		s = p.parseBranchStmt(p.tok)
2184	case token.LBRACE:
2185		s = p.parseBlockStmt()
2186		p.expectSemi()
2187	case token.IF:
2188		s = p.parseIfStmt()
2189	case token.SWITCH:
2190		s = p.parseSwitchStmt()
2191	case token.SELECT:
2192		s = p.parseSelectStmt()
2193	case token.FOR:
2194		s = p.parseForStmt()
2195	case token.SEMICOLON:
2196		// Is it ever possible to have an implicit semicolon
2197		// producing an empty statement in a valid program?
2198		// (handle correctly anyway)
2199		s = &ast.EmptyStmt{Semicolon: p.pos, Implicit: p.lit == "\n"}
2200		p.next()
2201	case token.RBRACE:
2202		// a semicolon may be omitted before a closing "}"
2203		s = &ast.EmptyStmt{Semicolon: p.pos, Implicit: true}
2204	default:
2205		// no statement found
2206		pos := p.pos
2207		p.errorExpected(pos, "statement")
2208		syncStmt(p)
2209		s = &ast.BadStmt{From: pos, To: p.pos}
2210	}
2211
2212	return
2213}
2214
2215// ----------------------------------------------------------------------------
2216// Declarations
2217
2218type parseSpecFunction func(doc *ast.CommentGroup, keyword token.Token, iota int) ast.Spec
2219
2220func isValidImport(lit string) bool {
2221	const illegalChars = `!"#$%&'()*,:;<=>?[\]^{|}` + "`\uFFFD"
2222	s, _ := strconv.Unquote(lit) // go/scanner returns a legal string literal
2223	for _, r := range s {
2224		if !unicode.IsGraphic(r) || unicode.IsSpace(r) || strings.ContainsRune(illegalChars, r) {
2225			return false
2226		}
2227	}
2228	return s != ""
2229}
2230
2231func (p *parser) parseImportSpec(doc *ast.CommentGroup, _ token.Token, _ int) ast.Spec {
2232	if p.trace {
2233		defer un(trace(p, "ImportSpec"))
2234	}
2235
2236	var ident *ast.Ident
2237	switch p.tok {
2238	case token.PERIOD:
2239		ident = &ast.Ident{NamePos: p.pos, Name: "."}
2240		p.next()
2241	case token.IDENT:
2242		ident = p.parseIdent()
2243	}
2244
2245	pos := p.pos
2246	var path string
2247	if p.tok == token.STRING {
2248		path = p.lit
2249		if !isValidImport(path) {
2250			p.error(pos, "invalid import path: "+path)
2251		}
2252		p.next()
2253	} else {
2254		p.expect(token.STRING) // use expect() error handling
2255	}
2256	p.expectSemi() // call before accessing p.linecomment
2257
2258	// collect imports
2259	spec := &ast.ImportSpec{
2260		Doc:     doc,
2261		Name:    ident,
2262		Path:    &ast.BasicLit{ValuePos: pos, Kind: token.STRING, Value: path},
2263		Comment: p.lineComment,
2264	}
2265	p.imports = append(p.imports, spec)
2266
2267	return spec
2268}
2269
2270func (p *parser) parseValueSpec(doc *ast.CommentGroup, keyword token.Token, iota int) ast.Spec {
2271	if p.trace {
2272		defer un(trace(p, keyword.String()+"Spec"))
2273	}
2274
2275	pos := p.pos
2276	idents := p.parseIdentList()
2277	typ := p.tryType()
2278	var values []ast.Expr
2279	// always permit optional initialization for more tolerant parsing
2280	if p.tok == token.ASSIGN {
2281		p.next()
2282		values = p.parseRhsList()
2283	}
2284	p.expectSemi() // call before accessing p.linecomment
2285
2286	switch keyword {
2287	case token.VAR:
2288		if typ == nil && values == nil {
2289			p.error(pos, "missing variable type or initialization")
2290		}
2291	case token.CONST:
2292		if values == nil && (iota == 0 || typ != nil) {
2293			p.error(pos, "missing constant value")
2294		}
2295	}
2296
2297	// Go spec: The scope of a constant or variable identifier declared inside
2298	// a function begins at the end of the ConstSpec or VarSpec and ends at
2299	// the end of the innermost containing block.
2300	// (Global identifiers are resolved in a separate phase after parsing.)
2301	spec := &ast.ValueSpec{
2302		Doc:     doc,
2303		Names:   idents,
2304		Type:    typ,
2305		Values:  values,
2306		Comment: p.lineComment,
2307	}
2308	kind := ast.Con
2309	if keyword == token.VAR {
2310		kind = ast.Var
2311	}
2312	p.declare(spec, iota, p.topScope, kind, idents...)
2313
2314	return spec
2315}
2316
2317func (p *parser) parseTypeSpec(doc *ast.CommentGroup, _ token.Token, _ int) ast.Spec {
2318	if p.trace {
2319		defer un(trace(p, "TypeSpec"))
2320	}
2321
2322	ident := p.parseIdent()
2323
2324	// Go spec: The scope of a type identifier declared inside a function begins
2325	// at the identifier in the TypeSpec and ends at the end of the innermost
2326	// containing block.
2327	// (Global identifiers are resolved in a separate phase after parsing.)
2328	spec := &ast.TypeSpec{Doc: doc, Name: ident}
2329	p.declare(spec, nil, p.topScope, ast.Typ, ident)
2330	if p.tok == token.ASSIGN {
2331		spec.Assign = p.pos
2332		p.next()
2333	}
2334	spec.Type = p.parseType()
2335	p.expectSemi() // call before accessing p.linecomment
2336	spec.Comment = p.lineComment
2337
2338	return spec
2339}
2340
2341func (p *parser) parseGenDecl(keyword token.Token, f parseSpecFunction) *ast.GenDecl {
2342	if p.trace {
2343		defer un(trace(p, "GenDecl("+keyword.String()+")"))
2344	}
2345
2346	doc := p.leadComment
2347	pos := p.expect(keyword)
2348	var lparen, rparen token.Pos
2349	var list []ast.Spec
2350	if p.tok == token.LPAREN {
2351		lparen = p.pos
2352		p.next()
2353		for iota := 0; p.tok != token.RPAREN && p.tok != token.EOF; iota++ {
2354			list = append(list, f(p.leadComment, keyword, iota))
2355		}
2356		rparen = p.expect(token.RPAREN)
2357		p.expectSemi()
2358	} else {
2359		list = append(list, f(nil, keyword, 0))
2360	}
2361
2362	return &ast.GenDecl{
2363		Doc:    doc,
2364		TokPos: pos,
2365		Tok:    keyword,
2366		Lparen: lparen,
2367		Specs:  list,
2368		Rparen: rparen,
2369	}
2370}
2371
2372func (p *parser) parseFuncDecl() *ast.FuncDecl {
2373	if p.trace {
2374		defer un(trace(p, "FunctionDecl"))
2375	}
2376
2377	doc := p.leadComment
2378	pos := p.expect(token.FUNC)
2379	scope := ast.NewScope(p.topScope) // function scope
2380
2381	var recv *ast.FieldList
2382	if p.tok == token.LPAREN {
2383		recv = p.parseParameters(scope, false)
2384	}
2385
2386	ident := p.parseIdent()
2387
2388	params, results := p.parseSignature(scope)
2389
2390	var body *ast.BlockStmt
2391	if p.tok == token.LBRACE {
2392		body = p.parseBody(scope)
2393	}
2394	p.expectSemi()
2395
2396	decl := &ast.FuncDecl{
2397		Doc:  doc,
2398		Recv: recv,
2399		Name: ident,
2400		Type: &ast.FuncType{
2401			Func:    pos,
2402			Params:  params,
2403			Results: results,
2404		},
2405		Body: body,
2406	}
2407	if recv == nil {
2408		// Go spec: The scope of an identifier denoting a constant, type,
2409		// variable, or function (but not method) declared at top level
2410		// (outside any function) is the package block.
2411		//
2412		// init() functions cannot be referred to and there may
2413		// be more than one - don't put them in the pkgScope
2414		if ident.Name != "init" {
2415			p.declare(decl, nil, p.pkgScope, ast.Fun, ident)
2416		}
2417	}
2418
2419	return decl
2420}
2421
2422func (p *parser) parseDecl(sync func(*parser)) ast.Decl {
2423	if p.trace {
2424		defer un(trace(p, "Declaration"))
2425	}
2426
2427	var f parseSpecFunction
2428	switch p.tok {
2429	case token.CONST, token.VAR:
2430		f = p.parseValueSpec
2431
2432	case token.TYPE:
2433		f = p.parseTypeSpec
2434
2435	case token.FUNC:
2436		return p.parseFuncDecl()
2437
2438	default:
2439		pos := p.pos
2440		p.errorExpected(pos, "declaration")
2441		sync(p)
2442		return &ast.BadDecl{From: pos, To: p.pos}
2443	}
2444
2445	return p.parseGenDecl(p.tok, f)
2446}
2447
2448// ----------------------------------------------------------------------------
2449// Source files
2450
2451func (p *parser) parseFile() *ast.File {
2452	if p.trace {
2453		defer un(trace(p, "File"))
2454	}
2455
2456	// Don't bother parsing the rest if we had errors scanning the first token.
2457	// Likely not a Go source file at all.
2458	if p.errors.Len() != 0 {
2459		return nil
2460	}
2461
2462	// package clause
2463	doc := p.leadComment
2464	pos := p.expect(token.PACKAGE)
2465	// Go spec: The package clause is not a declaration;
2466	// the package name does not appear in any scope.
2467	ident := p.parseIdent()
2468	if ident.Name == "_" && p.mode&DeclarationErrors != 0 {
2469		p.error(p.pos, "invalid package name _")
2470	}
2471	p.expectSemi()
2472
2473	// Don't bother parsing the rest if we had errors parsing the package clause.
2474	// Likely not a Go source file at all.
2475	if p.errors.Len() != 0 {
2476		return nil
2477	}
2478
2479	p.openScope()
2480	p.pkgScope = p.topScope
2481	var decls []ast.Decl
2482	if p.mode&PackageClauseOnly == 0 {
2483		// import decls
2484		for p.tok == token.IMPORT {
2485			decls = append(decls, p.parseGenDecl(token.IMPORT, p.parseImportSpec))
2486		}
2487
2488		if p.mode&ImportsOnly == 0 {
2489			// rest of package body
2490			for p.tok != token.EOF {
2491				decls = append(decls, p.parseDecl(syncDecl))
2492			}
2493		}
2494	}
2495	p.closeScope()
2496	assert(p.topScope == nil, "unbalanced scopes")
2497	assert(p.labelScope == nil, "unbalanced label scopes")
2498
2499	// resolve global identifiers within the same file
2500	i := 0
2501	for _, ident := range p.unresolved {
2502		// i <= index for current ident
2503		assert(ident.Obj == unresolved, "object already resolved")
2504		ident.Obj = p.pkgScope.Lookup(ident.Name) // also removes unresolved sentinel
2505		if ident.Obj == nil {
2506			p.unresolved[i] = ident
2507			i++
2508		}
2509	}
2510
2511	return &ast.File{
2512		Doc:        doc,
2513		Package:    pos,
2514		Name:       ident,
2515		Decls:      decls,
2516		Scope:      p.pkgScope,
2517		Imports:    p.imports,
2518		Unresolved: p.unresolved[0:i],
2519		Comments:   p.comments,
2520	}
2521}
2522