1// Copyright 2019 The Go Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style
3// license that can be found in the LICENSE file.
4
5package cache
6
7import (
8	"bytes"
9	"context"
10	"go/ast"
11	"go/parser"
12	"go/scanner"
13	"go/token"
14	"reflect"
15
16	"golang.org/x/tools/internal/lsp/protocol"
17	"golang.org/x/tools/internal/lsp/source"
18	"golang.org/x/tools/internal/lsp/telemetry"
19	"golang.org/x/tools/internal/memoize"
20	"golang.org/x/tools/internal/span"
21	"golang.org/x/tools/internal/telemetry/trace"
22	errors "golang.org/x/xerrors"
23)
24
25// Limits the number of parallel parser calls per process.
26var parseLimit = make(chan struct{}, 20)
27
28// parseKey uniquely identifies a parsed Go file.
29type parseKey struct {
30	file source.FileIdentity
31	mode source.ParseMode
32}
33
34type parseGoHandle struct {
35	handle *memoize.Handle
36	file   source.FileHandle
37	mode   source.ParseMode
38}
39
40type parseGoData struct {
41	memoize.NoCopy
42
43	src        []byte
44	ast        *ast.File
45	parseError error // errors associated with parsing the file
46	mapper     *protocol.ColumnMapper
47	err        error
48}
49
50func (c *Cache) ParseGoHandle(fh source.FileHandle, mode source.ParseMode) source.ParseGoHandle {
51	key := parseKey{
52		file: fh.Identity(),
53		mode: mode,
54	}
55	fset := c.fset
56	h := c.store.Bind(key, func(ctx context.Context) interface{} {
57		return parseGo(ctx, fset, fh, mode)
58	})
59	return &parseGoHandle{
60		handle: h,
61		file:   fh,
62		mode:   mode,
63	}
64}
65
66func (pgh *parseGoHandle) String() string {
67	return pgh.File().Identity().URI.Filename()
68}
69
70func (pgh *parseGoHandle) File() source.FileHandle {
71	return pgh.file
72}
73
74func (pgh *parseGoHandle) Mode() source.ParseMode {
75	return pgh.mode
76}
77
78func (pgh *parseGoHandle) Parse(ctx context.Context) (*ast.File, []byte, *protocol.ColumnMapper, error, error) {
79	v := pgh.handle.Get(ctx)
80	if v == nil {
81		return nil, nil, nil, nil, errors.Errorf("no parsed file for %s", pgh.File().Identity().URI)
82	}
83	data := v.(*parseGoData)
84	return data.ast, data.src, data.mapper, data.parseError, data.err
85}
86
87func (pgh *parseGoHandle) Cached() (*ast.File, []byte, *protocol.ColumnMapper, error, error) {
88	v := pgh.handle.Cached()
89	if v == nil {
90		return nil, nil, nil, nil, errors.Errorf("no cached AST for %s", pgh.file.Identity().URI)
91	}
92	data := v.(*parseGoData)
93	return data.ast, data.src, data.mapper, data.parseError, data.err
94}
95
96func hashParseKey(ph source.ParseGoHandle) string {
97	b := bytes.NewBuffer(nil)
98	b.WriteString(ph.File().Identity().String())
99	b.WriteString(string(ph.Mode()))
100	return hashContents(b.Bytes())
101}
102
103func hashParseKeys(phs []source.ParseGoHandle) string {
104	b := bytes.NewBuffer(nil)
105	for _, ph := range phs {
106		b.WriteString(hashParseKey(ph))
107	}
108	return hashContents(b.Bytes())
109}
110
111func parseGo(ctx context.Context, fset *token.FileSet, fh source.FileHandle, mode source.ParseMode) *parseGoData {
112	ctx, done := trace.StartSpan(ctx, "cache.parseGo", telemetry.File.Of(fh.Identity().URI.Filename()))
113	defer done()
114
115	if fh.Identity().Kind != source.Go {
116		return &parseGoData{err: errors.Errorf("cannot parse non-Go file %s", fh.Identity().URI)}
117	}
118	buf, _, err := fh.Read(ctx)
119	if err != nil {
120		return &parseGoData{err: err}
121	}
122	parseLimit <- struct{}{}
123	defer func() { <-parseLimit }()
124	parserMode := parser.AllErrors | parser.ParseComments
125	if mode == source.ParseHeader {
126		parserMode = parser.ImportsOnly | parser.ParseComments
127	}
128	file, parseError := parser.ParseFile(fset, fh.Identity().URI.Filename(), buf, parserMode)
129	var tok *token.File
130	if file != nil {
131		tok = fset.File(file.Pos())
132		if tok == nil {
133			return &parseGoData{err: errors.Errorf("successfully parsed but no token.File for %s (%v)", fh.Identity().URI, parseError)}
134		}
135
136		// Fix any badly parsed parts of the AST.
137		_ = fixAST(ctx, file, tok, buf)
138
139		// Fix certain syntax errors that render the file unparseable.
140		newSrc := fixSrc(file, tok, buf)
141		if newSrc != nil {
142			newFile, _ := parser.ParseFile(fset, fh.Identity().URI.Filename(), newSrc, parserMode)
143			if newFile != nil {
144				// Maintain the original parseError so we don't try formatting the doctored file.
145				file = newFile
146				buf = newSrc
147				tok = fset.File(file.Pos())
148
149				_ = fixAST(ctx, file, tok, buf)
150			}
151		}
152
153		if mode == source.ParseExported {
154			trimAST(file)
155		}
156	}
157
158	if file == nil {
159		// If the file is nil only due to parse errors,
160		// the parse errors are the actual errors.
161		err := parseError
162		if err == nil {
163			err = errors.Errorf("no AST for %s", fh.Identity().URI)
164		}
165		return &parseGoData{parseError: parseError, err: err}
166	}
167	m := &protocol.ColumnMapper{
168		URI:       fh.Identity().URI,
169		Converter: span.NewTokenConverter(fset, tok),
170		Content:   buf,
171	}
172
173	return &parseGoData{
174		src:        buf,
175		ast:        file,
176		mapper:     m,
177		parseError: parseError,
178	}
179}
180
181// trimAST clears any part of the AST not relevant to type checking
182// expressions at pos.
183func trimAST(file *ast.File) {
184	ast.Inspect(file, func(n ast.Node) bool {
185		if n == nil {
186			return false
187		}
188		switch n := n.(type) {
189		case *ast.FuncDecl:
190			n.Body = nil
191		case *ast.BlockStmt:
192			n.List = nil
193		case *ast.CaseClause:
194			n.Body = nil
195		case *ast.CommClause:
196			n.Body = nil
197		case *ast.CompositeLit:
198			// Leave elts in place for [...]T
199			// array literals, because they can
200			// affect the expression's type.
201			if !isEllipsisArray(n.Type) {
202				n.Elts = nil
203			}
204		}
205		return true
206	})
207}
208
209func isEllipsisArray(n ast.Expr) bool {
210	at, ok := n.(*ast.ArrayType)
211	if !ok {
212		return false
213	}
214	_, ok = at.Len.(*ast.Ellipsis)
215	return ok
216}
217
218// fixAST inspects the AST and potentially modifies any *ast.BadStmts so that it can be
219// type-checked more effectively.
220func fixAST(ctx context.Context, n ast.Node, tok *token.File, src []byte) error {
221	var err error
222	walkASTWithParent(n, func(n, parent ast.Node) bool {
223		switch n := n.(type) {
224		case *ast.BadStmt:
225			err = fixDeferOrGoStmt(n, parent, tok, src) // don't shadow err
226			if err == nil {
227				// Recursively fix in our fixed node.
228				err = fixAST(ctx, parent, tok, src)
229			} else {
230				err = errors.Errorf("unable to parse defer or go from *ast.BadStmt: %v", err)
231			}
232			return false
233		case *ast.BadExpr:
234			// Don't propagate this error since *ast.BadExpr is very common
235			// and it is only sometimes due to array types. Errors from here
236			// are expected and not actionable in general.
237			if fixArrayType(n, parent, tok, src) == nil {
238				// Recursively fix in our fixed node.
239				err = fixAST(ctx, parent, tok, src)
240				return false
241			}
242
243			// Fix cases where parser interprets if/for/switch "init"
244			// statement as "cond" expression, e.g.:
245			//
246			//   // "i := foo" is init statement, not condition.
247			//   for i := foo
248			//
249			fixInitStmt(n, parent, tok, src)
250
251			return false
252		case *ast.SelectorExpr:
253			// Fix cases where a keyword prefix results in a phantom "_" selector, e.g.:
254			//
255			//   foo.var<> // want to complete to "foo.variance"
256			//
257			fixPhantomSelector(n, tok, src)
258			return true
259		default:
260			return true
261		}
262	})
263
264	return err
265}
266
267// walkASTWithParent walks the AST rooted at n. The semantics are
268// similar to ast.Inspect except it does not call f(nil).
269func walkASTWithParent(n ast.Node, f func(n ast.Node, parent ast.Node) bool) {
270	var ancestors []ast.Node
271	ast.Inspect(n, func(n ast.Node) (recurse bool) {
272		defer func() {
273			if recurse {
274				ancestors = append(ancestors, n)
275			}
276		}()
277
278		if n == nil {
279			ancestors = ancestors[:len(ancestors)-1]
280			return false
281		}
282
283		var parent ast.Node
284		if len(ancestors) > 0 {
285			parent = ancestors[len(ancestors)-1]
286		}
287
288		return f(n, parent)
289	})
290}
291
292// fixSrc attempts to modify the file's source code to fix certain
293// syntax errors that leave the rest of the file unparsed.
294func fixSrc(f *ast.File, tok *token.File, src []byte) (newSrc []byte) {
295	walkASTWithParent(f, func(n, parent ast.Node) bool {
296		if newSrc != nil {
297			return false
298		}
299
300		switch n := n.(type) {
301		case *ast.BlockStmt:
302			newSrc = fixMissingCurlies(f, n, parent, tok, src)
303		case *ast.SelectorExpr:
304			newSrc = fixDanglingSelector(f, n, parent, tok, src)
305		}
306
307		return newSrc == nil
308	})
309
310	return newSrc
311}
312
313// fixMissingCurlies adds in curly braces for block statements that
314// are missing curly braces. For example:
315//
316//   if foo
317//
318// becomes
319//
320//   if foo {}
321func fixMissingCurlies(f *ast.File, b *ast.BlockStmt, parent ast.Node, tok *token.File, src []byte) []byte {
322	// If the "{" is already in the source code, there isn't anything to
323	// fix since we aren't mising curlies.
324	if b.Lbrace.IsValid() {
325		braceOffset := tok.Offset(b.Lbrace)
326		if braceOffset < len(src) && src[braceOffset] == '{' {
327			return nil
328		}
329	}
330
331	parentLine := tok.Line(parent.Pos())
332
333	if parentLine >= tok.LineCount() {
334		// If we are the last line in the file, no need to fix anything.
335		return nil
336	}
337
338	// Insert curlies at the end of parent's starting line. The parent
339	// is the statement that contains the block, e.g. *ast.IfStmt. The
340	// block's Pos()/End() can't be relied upon because they are based
341	// on the (missing) curly braces. We assume the statement is a
342	// single line for now and try sticking the curly braces at the end.
343	insertPos := tok.LineStart(parentLine+1) - 1
344
345	// Scootch position backwards until it's not in a comment. For example:
346	//
347	// if foo<> // some amazing comment |
348	// someOtherCode()
349	//
350	// insertPos will be located at "|", so we back it out of the comment.
351	didSomething := true
352	for didSomething {
353		didSomething = false
354		for _, c := range f.Comments {
355			if c.Pos() < insertPos && insertPos <= c.End() {
356				insertPos = c.Pos()
357				didSomething = true
358			}
359		}
360	}
361
362	// Bail out if line doesn't end in an ident or ".". This is to avoid
363	// cases like below where we end up making things worse by adding
364	// curlies:
365	//
366	//   if foo &&
367	//     bar<>
368	switch precedingToken(insertPos, tok, src) {
369	case token.IDENT, token.PERIOD:
370		// ok
371	default:
372		return nil
373	}
374
375	var buf bytes.Buffer
376	buf.Grow(len(src) + 3)
377	buf.Write(src[:tok.Offset(insertPos)])
378
379	// Detect if we need to insert a semicolon to fix "for" loop situations like:
380	//
381	//   for i := foo(); foo<>
382	//
383	// Just adding curlies is not sufficient to make things parse well.
384	if fs, ok := parent.(*ast.ForStmt); ok {
385		if _, ok := fs.Cond.(*ast.BadExpr); !ok {
386			if xs, ok := fs.Post.(*ast.ExprStmt); ok {
387				if _, ok := xs.X.(*ast.BadExpr); ok {
388					buf.WriteByte(';')
389				}
390			}
391		}
392	}
393
394	// Insert "{}" at insertPos.
395	buf.WriteByte('{')
396	buf.WriteByte('}')
397	buf.Write(src[tok.Offset(insertPos):])
398	return buf.Bytes()
399}
400
401// fixDanglingSelector inserts real "_" selector expressions in place
402// of phantom "_" selectors. For example:
403//
404// func _() {
405//   x.<>
406// }
407// var x struct { i int }
408//
409// To fix completion at "<>", we insert a real "_" after the "." so the
410// following declaration of "x" can be parsed and type checked
411// normally.
412func fixDanglingSelector(f *ast.File, s *ast.SelectorExpr, parent ast.Node, tok *token.File, src []byte) []byte {
413	if !isPhantomUnderscore(s.Sel, tok, src) {
414		return nil
415	}
416
417	if !s.X.End().IsValid() {
418		return nil
419	}
420
421	// Insert directly after the selector's ".".
422	insertOffset := tok.Offset(s.X.End()) + 1
423	if src[insertOffset-1] != '.' {
424		return nil
425	}
426
427	var buf bytes.Buffer
428	buf.Grow(len(src) + 1)
429	buf.Write(src[:insertOffset])
430	buf.WriteByte('_')
431	buf.Write(src[insertOffset:])
432	return buf.Bytes()
433}
434
435// fixPhantomSelector tries to fix selector expressions with phantom
436// "_" selectors. In particular, we check if the selector is a
437// keyword, and if so we swap in an *ast.Ident with the keyword text. For example:
438//
439// foo.var
440//
441// yields a "_" selector instead of "var" since "var" is a keyword.
442func fixPhantomSelector(sel *ast.SelectorExpr, tok *token.File, src []byte) {
443	if !isPhantomUnderscore(sel.Sel, tok, src) {
444		return
445	}
446
447	// Only consider selectors directly abutting the selector ".". This
448	// avoids false positives in cases like:
449	//
450	//   foo. // don't think "var" is our selector
451	//   var bar = 123
452	//
453	if sel.Sel.Pos() != sel.X.End()+1 {
454		return
455	}
456
457	maybeKeyword := readKeyword(sel.Sel.Pos(), tok, src)
458	if maybeKeyword == "" {
459		return
460	}
461
462	replaceNode(sel, sel.Sel, &ast.Ident{
463		Name:    maybeKeyword,
464		NamePos: sel.Sel.Pos(),
465	})
466}
467
468// isPhantomUnderscore reports whether the given ident is a phantom
469// underscore. The parser sometimes inserts phantom underscores when
470// it encounters otherwise unparseable situations.
471func isPhantomUnderscore(id *ast.Ident, tok *token.File, src []byte) bool {
472	if id == nil || id.Name != "_" {
473		return false
474	}
475
476	// Phantom underscore means the underscore is not actually in the
477	// program text.
478	offset := tok.Offset(id.Pos())
479	return len(src) <= offset || src[offset] != '_'
480}
481
482// fixInitStmt fixes cases where the parser misinterprets an
483// if/for/switch "init" statement as the "cond" conditional. In cases
484// like "if i := 0" the user hasn't typed the semicolon yet so the
485// parser is looking for the conditional expression. However, "i := 0"
486// are not valid expressions, so we get a BadExpr.
487func fixInitStmt(bad *ast.BadExpr, parent ast.Node, tok *token.File, src []byte) {
488	if !bad.Pos().IsValid() || !bad.End().IsValid() {
489		return
490	}
491
492	// Try to extract a statement from the BadExpr.
493	stmtBytes := src[tok.Offset(bad.Pos()) : tok.Offset(bad.End()-1)+1]
494	stmt, err := parseStmt(bad.Pos(), stmtBytes)
495	if err != nil {
496		return
497	}
498
499	// If the parent statement doesn't already have an "init" statement,
500	// move the extracted statement into the "init" field and insert a
501	// dummy expression into the required "cond" field.
502	switch p := parent.(type) {
503	case *ast.IfStmt:
504		if p.Init != nil {
505			return
506		}
507		p.Init = stmt
508		p.Cond = &ast.Ident{Name: "_"}
509	case *ast.ForStmt:
510		if p.Init != nil {
511			return
512		}
513		p.Init = stmt
514		p.Cond = &ast.Ident{Name: "_"}
515	case *ast.SwitchStmt:
516		if p.Init != nil {
517			return
518		}
519		p.Init = stmt
520		p.Tag = nil
521	}
522}
523
524// readKeyword reads the keyword starting at pos, if any.
525func readKeyword(pos token.Pos, tok *token.File, src []byte) string {
526	var kwBytes []byte
527	for i := tok.Offset(pos); i < len(src); i++ {
528		// Use a simplified identifier check since keywords are always lowercase ASCII.
529		if src[i] < 'a' || src[i] > 'z' {
530			break
531		}
532		kwBytes = append(kwBytes, src[i])
533
534		// Stop search at arbitrarily chosen too-long-for-a-keyword length.
535		if len(kwBytes) > 15 {
536			return ""
537		}
538	}
539
540	if kw := string(kwBytes); token.Lookup(kw).IsKeyword() {
541		return kw
542	}
543
544	return ""
545}
546
547// fixArrayType tries to parse an *ast.BadExpr into an *ast.ArrayType.
548// go/parser often turns lone array types like "[]int" into BadExprs
549// if it isn't expecting a type.
550func fixArrayType(bad *ast.BadExpr, parent ast.Node, tok *token.File, src []byte) error {
551	// Our expected input is a bad expression that looks like "[]someExpr".
552
553	from := bad.Pos()
554	to := bad.End()
555
556	if !from.IsValid() || !to.IsValid() {
557		return errors.Errorf("invalid BadExpr from/to: %d/%d", from, to)
558	}
559
560	exprBytes := make([]byte, 0, int(to-from)+3)
561	// Avoid doing tok.Offset(to) since that panics if badExpr ends at EOF.
562	exprBytes = append(exprBytes, src[tok.Offset(from):tok.Offset(to-1)+1]...)
563	exprBytes = bytes.TrimSpace(exprBytes)
564
565	// If our expression ends in "]" (e.g. "[]"), add a phantom selector
566	// so we can complete directly after the "[]".
567	if len(exprBytes) > 0 && exprBytes[len(exprBytes)-1] == ']' {
568		exprBytes = append(exprBytes, '_')
569	}
570
571	// Add "{}" to turn our ArrayType into a CompositeLit. This is to
572	// handle the case of "[...]int" where we must make it a composite
573	// literal to be parseable.
574	exprBytes = append(exprBytes, '{', '}')
575
576	expr, err := parseExpr(from, exprBytes)
577	if err != nil {
578		return err
579	}
580
581	cl, _ := expr.(*ast.CompositeLit)
582	if cl == nil {
583		return errors.Errorf("expr not compLit (%T)", expr)
584	}
585
586	at, _ := cl.Type.(*ast.ArrayType)
587	if at == nil {
588		return errors.Errorf("compLit type not array (%T)", cl.Type)
589	}
590
591	if !replaceNode(parent, bad, at) {
592		return errors.Errorf("couldn't replace array type")
593	}
594
595	return nil
596}
597
598// precedingToken scans src to find the token preceding pos.
599func precedingToken(pos token.Pos, tok *token.File, src []byte) token.Token {
600	s := &scanner.Scanner{}
601	s.Init(tok, src, nil, 0)
602
603	var lastTok token.Token
604	for {
605		p, t, _ := s.Scan()
606		if t == token.EOF || p >= pos {
607			break
608		}
609
610		lastTok = t
611	}
612	return lastTok
613}
614
615// fixDeferOrGoStmt tries to parse an *ast.BadStmt into a defer or a go statement.
616//
617// go/parser packages a statement of the form "defer x." as an *ast.BadStmt because
618// it does not include a call expression. This means that go/types skips type-checking
619// this statement entirely, and we can't use the type information when completing.
620// Here, we try to generate a fake *ast.DeferStmt or *ast.GoStmt to put into the AST,
621// instead of the *ast.BadStmt.
622func fixDeferOrGoStmt(bad *ast.BadStmt, parent ast.Node, tok *token.File, src []byte) error {
623	// Check if we have a bad statement containing either a "go" or "defer".
624	s := &scanner.Scanner{}
625	s.Init(tok, src, nil, 0)
626
627	var (
628		pos token.Pos
629		tkn token.Token
630	)
631	for {
632		if tkn == token.EOF {
633			return errors.Errorf("reached the end of the file")
634		}
635		if pos >= bad.From {
636			break
637		}
638		pos, tkn, _ = s.Scan()
639	}
640
641	var stmt ast.Stmt
642	switch tkn {
643	case token.DEFER:
644		stmt = &ast.DeferStmt{
645			Defer: pos,
646		}
647	case token.GO:
648		stmt = &ast.GoStmt{
649			Go: pos,
650		}
651	default:
652		return errors.Errorf("no defer or go statement found")
653	}
654
655	var (
656		from, to, last   token.Pos
657		lastToken        token.Token
658		braceDepth       int
659		phantomSelectors []token.Pos
660	)
661FindTo:
662	for {
663		to, tkn, _ = s.Scan()
664
665		if from == token.NoPos {
666			from = to
667		}
668
669		switch tkn {
670		case token.EOF:
671			break FindTo
672		case token.SEMICOLON:
673			// If we aren't in nested braces, end of statement means
674			// end of expression.
675			if braceDepth == 0 {
676				break FindTo
677			}
678		case token.LBRACE:
679			braceDepth++
680		}
681
682		// This handles the common dangling selector case. For example in
683		//
684		// defer fmt.
685		// y := 1
686		//
687		// we notice the dangling period and end our expression.
688		//
689		// If the previous token was a "." and we are looking at a "}",
690		// the period is likely a dangling selector and needs a phantom
691		// "_". Likewise if the current token is on a different line than
692		// the period, the period is likely a dangling selector.
693		if lastToken == token.PERIOD && (tkn == token.RBRACE || tok.Line(to) > tok.Line(last)) {
694			// Insert phantom "_" selector after the dangling ".".
695			phantomSelectors = append(phantomSelectors, last+1)
696			// If we aren't in a block then end the expression after the ".".
697			if braceDepth == 0 {
698				to = last + 1
699				break
700			}
701		}
702
703		lastToken = tkn
704		last = to
705
706		switch tkn {
707		case token.RBRACE:
708			braceDepth--
709			if braceDepth <= 0 {
710				if braceDepth == 0 {
711					// +1 to include the "}" itself.
712					to += 1
713				}
714				break FindTo
715			}
716		}
717	}
718
719	if !from.IsValid() || tok.Offset(from) >= len(src) {
720		return errors.Errorf("invalid from position")
721	}
722
723	if !to.IsValid() || tok.Offset(to) >= len(src) {
724		return errors.Errorf("invalid to position %d", to)
725	}
726
727	// Insert any phantom selectors needed to prevent dangling "." from messing
728	// up the AST.
729	exprBytes := make([]byte, 0, int(to-from)+len(phantomSelectors))
730	for i, b := range src[tok.Offset(from):tok.Offset(to)] {
731		if len(phantomSelectors) > 0 && from+token.Pos(i) == phantomSelectors[0] {
732			exprBytes = append(exprBytes, '_')
733			phantomSelectors = phantomSelectors[1:]
734		}
735		exprBytes = append(exprBytes, b)
736	}
737
738	if len(phantomSelectors) > 0 {
739		exprBytes = append(exprBytes, '_')
740	}
741
742	expr, err := parseExpr(from, exprBytes)
743	if err != nil {
744		return err
745	}
746
747	// Package the expression into a fake *ast.CallExpr and re-insert
748	// into the function.
749	call := &ast.CallExpr{
750		Fun:    expr,
751		Lparen: to,
752		Rparen: to,
753	}
754
755	switch stmt := stmt.(type) {
756	case *ast.DeferStmt:
757		stmt.Call = call
758	case *ast.GoStmt:
759		stmt.Call = call
760	}
761
762	if !replaceNode(parent, bad, stmt) {
763		return errors.Errorf("couldn't replace CallExpr")
764	}
765
766	return nil
767}
768
769// parseStmt parses the statement in src and updates its position to
770// start at pos.
771func parseStmt(pos token.Pos, src []byte) (ast.Stmt, error) {
772	// Wrap our expression to make it a valid Go file we can pass to ParseFile.
773	fileSrc := bytes.Join([][]byte{
774		[]byte("package fake;func _(){"),
775		src,
776		[]byte("}"),
777	}, nil)
778
779	// Use ParseFile instead of ParseExpr because ParseFile has
780	// best-effort behavior, whereas ParseExpr fails hard on any error.
781	fakeFile, err := parser.ParseFile(token.NewFileSet(), "", fileSrc, 0)
782	if fakeFile == nil {
783		return nil, errors.Errorf("error reading fake file source: %v", err)
784	}
785
786	// Extract our expression node from inside the fake file.
787	if len(fakeFile.Decls) == 0 {
788		return nil, errors.Errorf("error parsing fake file: %v", err)
789	}
790
791	fakeDecl, _ := fakeFile.Decls[0].(*ast.FuncDecl)
792	if fakeDecl == nil || len(fakeDecl.Body.List) == 0 {
793		return nil, errors.Errorf("no statement in %s: %v", src, err)
794	}
795
796	stmt := fakeDecl.Body.List[0]
797
798	// parser.ParseFile returns undefined positions.
799	// Adjust them for the current file.
800	offsetPositions(stmt, pos-1-(stmt.Pos()-1))
801
802	return stmt, nil
803}
804
805// parseExpr parses the expression in src and updates its position to
806// start at pos.
807func parseExpr(pos token.Pos, src []byte) (ast.Expr, error) {
808	stmt, err := parseStmt(pos, src)
809	if err != nil {
810		return nil, err
811	}
812
813	exprStmt, ok := stmt.(*ast.ExprStmt)
814	if !ok {
815		return nil, errors.Errorf("no expr in %s: %v", src, err)
816	}
817
818	return exprStmt.X, nil
819}
820
821var tokenPosType = reflect.TypeOf(token.NoPos)
822
823// offsetPositions applies an offset to the positions in an ast.Node.
824func offsetPositions(n ast.Node, offset token.Pos) {
825	ast.Inspect(n, func(n ast.Node) bool {
826		if n == nil {
827			return false
828		}
829
830		v := reflect.ValueOf(n).Elem()
831
832		switch v.Kind() {
833		case reflect.Struct:
834			for i := 0; i < v.NumField(); i++ {
835				f := v.Field(i)
836				if f.Type() != tokenPosType {
837					continue
838				}
839
840				if !f.CanSet() {
841					continue
842				}
843
844				f.SetInt(f.Int() + int64(offset))
845			}
846		}
847
848		return true
849	})
850}
851
852// replaceNode updates parent's child oldChild to be newChild. It
853// returns whether it replaced successfully.
854func replaceNode(parent, oldChild, newChild ast.Node) bool {
855	if parent == nil || oldChild == nil || newChild == nil {
856		return false
857	}
858
859	parentVal := reflect.ValueOf(parent).Elem()
860	if parentVal.Kind() != reflect.Struct {
861		return false
862	}
863
864	newChildVal := reflect.ValueOf(newChild)
865
866	tryReplace := func(v reflect.Value) bool {
867		if !v.CanSet() || !v.CanInterface() {
868			return false
869		}
870
871		// If the existing value is oldChild, we found our child. Make
872		// sure our newChild is assignable and then make the swap.
873		if v.Interface() == oldChild && newChildVal.Type().AssignableTo(v.Type()) {
874			v.Set(newChildVal)
875			return true
876		}
877
878		return false
879	}
880
881	// Loop over parent's struct fields.
882	for i := 0; i < parentVal.NumField(); i++ {
883		f := parentVal.Field(i)
884
885		switch f.Kind() {
886		// Check interface and pointer fields.
887		case reflect.Interface, reflect.Ptr:
888			if tryReplace(f) {
889				return true
890			}
891
892		// Search through any slice fields.
893		case reflect.Slice:
894			for i := 0; i < f.Len(); i++ {
895				if tryReplace(f.Index(i)) {
896					return true
897				}
898			}
899		}
900	}
901
902	return false
903}
904