1// Copyright 2013 The Go Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style
3// license that can be found in the LICENSE file.
4
5package pointer
6
7// This file defines the constraint generation phase.
8
9// TODO(adonovan): move the constraint definitions and the store() etc
10// functions which add them (and are also used by the solver) into a
11// new file, constraints.go.
12
13import (
14	"fmt"
15	"go/token"
16	"go/types"
17
18	"golang.org/x/tools/go/callgraph"
19	"golang.org/x/tools/go/ssa"
20)
21
22var (
23	tEface     = types.NewInterface(nil, nil).Complete()
24	tInvalid   = types.Typ[types.Invalid]
25	tUnsafePtr = types.Typ[types.UnsafePointer]
26)
27
28// ---------- Node creation ----------
29
30// nextNode returns the index of the next unused node.
31func (a *analysis) nextNode() nodeid {
32	return nodeid(len(a.nodes))
33}
34
35// addNodes creates nodes for all scalar elements in type typ, and
36// returns the id of the first one, or zero if the type was
37// analytically uninteresting.
38//
39// comment explains the origin of the nodes, as a debugging aid.
40//
41func (a *analysis) addNodes(typ types.Type, comment string) nodeid {
42	id := a.nextNode()
43	for _, fi := range a.flatten(typ) {
44		a.addOneNode(fi.typ, comment, fi)
45	}
46	if id == a.nextNode() {
47		return 0 // type contained no pointers
48	}
49	return id
50}
51
52// addOneNode creates a single node with type typ, and returns its id.
53//
54// typ should generally be scalar (except for tagged.T nodes
55// and struct/array identity nodes).  Use addNodes for non-scalar types.
56//
57// comment explains the origin of the nodes, as a debugging aid.
58// subelement indicates the subelement, e.g. ".a.b[*].c".
59//
60func (a *analysis) addOneNode(typ types.Type, comment string, subelement *fieldInfo) nodeid {
61	id := a.nextNode()
62	a.nodes = append(a.nodes, &node{typ: typ, subelement: subelement, solve: new(solverState)})
63	if a.log != nil {
64		fmt.Fprintf(a.log, "\tcreate n%d %s for %s%s\n",
65			id, typ, comment, subelement.path())
66	}
67	return id
68}
69
70// setValueNode associates node id with the value v.
71// cgn identifies the context iff v is a local variable.
72//
73func (a *analysis) setValueNode(v ssa.Value, id nodeid, cgn *cgnode) {
74	if cgn != nil {
75		a.localval[v] = id
76	} else {
77		a.globalval[v] = id
78	}
79	if a.log != nil {
80		fmt.Fprintf(a.log, "\tval[%s] = n%d  (%T)\n", v.Name(), id, v)
81	}
82
83	// Due to context-sensitivity, we may encounter the same Value
84	// in many contexts. We merge them to a canonical node, since
85	// that's what all clients want.
86
87	// Record the (v, id) relation if the client has queried pts(v).
88	if _, ok := a.config.Queries[v]; ok {
89		t := v.Type()
90		ptr, ok := a.result.Queries[v]
91		if !ok {
92			// First time?  Create the canonical query node.
93			ptr = Pointer{a, a.addNodes(t, "query")}
94			a.result.Queries[v] = ptr
95		}
96		a.result.Queries[v] = ptr
97		a.copy(ptr.n, id, a.sizeof(t))
98	}
99
100	// Record the (*v, id) relation if the client has queried pts(*v).
101	if _, ok := a.config.IndirectQueries[v]; ok {
102		t := v.Type()
103		ptr, ok := a.result.IndirectQueries[v]
104		if !ok {
105			// First time? Create the canonical indirect query node.
106			ptr = Pointer{a, a.addNodes(v.Type(), "query.indirect")}
107			a.result.IndirectQueries[v] = ptr
108		}
109		a.genLoad(cgn, ptr.n, v, 0, a.sizeof(t))
110	}
111
112	for _, query := range a.config.extendedQueries[v] {
113		t, nid := a.evalExtendedQuery(v.Type().Underlying(), id, query.ops)
114
115		if query.ptr.a == nil {
116			query.ptr.a = a
117			query.ptr.n = a.addNodes(t, "query.extended")
118		}
119		a.copy(query.ptr.n, nid, a.sizeof(t))
120	}
121}
122
123// endObject marks the end of a sequence of calls to addNodes denoting
124// a single object allocation.
125//
126// obj is the start node of the object, from a prior call to nextNode.
127// Its size, flags and optional data will be updated.
128//
129func (a *analysis) endObject(obj nodeid, cgn *cgnode, data interface{}) *object {
130	// Ensure object is non-empty by padding;
131	// the pad will be the object node.
132	size := uint32(a.nextNode() - obj)
133	if size == 0 {
134		a.addOneNode(tInvalid, "padding", nil)
135	}
136	objNode := a.nodes[obj]
137	o := &object{
138		size: size, // excludes padding
139		cgn:  cgn,
140		data: data,
141	}
142	objNode.obj = o
143
144	return o
145}
146
147// makeFunctionObject creates and returns a new function object
148// (contour) for fn, and returns the id of its first node.  It also
149// enqueues fn for subsequent constraint generation.
150//
151// For a context-sensitive contour, callersite identifies the sole
152// callsite; for shared contours, caller is nil.
153//
154func (a *analysis) makeFunctionObject(fn *ssa.Function, callersite *callsite) nodeid {
155	if a.log != nil {
156		fmt.Fprintf(a.log, "\t---- makeFunctionObject %s\n", fn)
157	}
158
159	// obj is the function object (identity, params, results).
160	obj := a.nextNode()
161	cgn := a.makeCGNode(fn, obj, callersite)
162	sig := fn.Signature
163	a.addOneNode(sig, "func.cgnode", nil) // (scalar with Signature type)
164	if recv := sig.Recv(); recv != nil {
165		a.addNodes(recv.Type(), "func.recv")
166	}
167	a.addNodes(sig.Params(), "func.params")
168	a.addNodes(sig.Results(), "func.results")
169	a.endObject(obj, cgn, fn).flags |= otFunction
170
171	if a.log != nil {
172		fmt.Fprintf(a.log, "\t----\n")
173	}
174
175	// Queue it up for constraint processing.
176	a.genq = append(a.genq, cgn)
177
178	return obj
179}
180
181// makeTagged creates a tagged object of type typ.
182func (a *analysis) makeTagged(typ types.Type, cgn *cgnode, data interface{}) nodeid {
183	obj := a.addOneNode(typ, "tagged.T", nil) // NB: type may be non-scalar!
184	a.addNodes(typ, "tagged.v")
185	a.endObject(obj, cgn, data).flags |= otTagged
186	return obj
187}
188
189// makeRtype returns the canonical tagged object of type *rtype whose
190// payload points to the sole rtype object for T.
191//
192// TODO(adonovan): move to reflect.go; it's part of the solver really.
193//
194func (a *analysis) makeRtype(T types.Type) nodeid {
195	if v := a.rtypes.At(T); v != nil {
196		return v.(nodeid)
197	}
198
199	// Create the object for the reflect.rtype itself, which is
200	// ordinarily a large struct but here a single node will do.
201	obj := a.nextNode()
202	a.addOneNode(T, "reflect.rtype", nil)
203	a.endObject(obj, nil, T)
204
205	id := a.makeTagged(a.reflectRtypePtr, nil, T)
206	a.nodes[id+1].typ = T // trick (each *rtype tagged object is a singleton)
207	a.addressOf(a.reflectRtypePtr, id+1, obj)
208
209	a.rtypes.Set(T, id)
210	return id
211}
212
213// rtypeValue returns the type of the *reflect.rtype-tagged object obj.
214func (a *analysis) rtypeTaggedValue(obj nodeid) types.Type {
215	tDyn, t, _ := a.taggedValue(obj)
216	if tDyn != a.reflectRtypePtr {
217		panic(fmt.Sprintf("not a *reflect.rtype-tagged object: obj=n%d tag=%v payload=n%d", obj, tDyn, t))
218	}
219	return a.nodes[t].typ
220}
221
222// valueNode returns the id of the value node for v, creating it (and
223// the association) as needed.  It may return zero for uninteresting
224// values containing no pointers.
225//
226func (a *analysis) valueNode(v ssa.Value) nodeid {
227	// Value nodes for locals are created en masse by genFunc.
228	if id, ok := a.localval[v]; ok {
229		return id
230	}
231
232	// Value nodes for globals are created on demand.
233	id, ok := a.globalval[v]
234	if !ok {
235		var comment string
236		if a.log != nil {
237			comment = v.String()
238		}
239		id = a.addNodes(v.Type(), comment)
240		if obj := a.objectNode(nil, v); obj != 0 {
241			a.addressOf(v.Type(), id, obj)
242		}
243		a.setValueNode(v, id, nil)
244	}
245	return id
246}
247
248// valueOffsetNode ascertains the node for tuple/struct value v,
249// then returns the node for its subfield #index.
250//
251func (a *analysis) valueOffsetNode(v ssa.Value, index int) nodeid {
252	id := a.valueNode(v)
253	if id == 0 {
254		panic(fmt.Sprintf("cannot offset within n0: %s = %s", v.Name(), v))
255	}
256	return id + nodeid(a.offsetOf(v.Type(), index))
257}
258
259// isTaggedObject reports whether object obj is a tagged object.
260func (a *analysis) isTaggedObject(obj nodeid) bool {
261	return a.nodes[obj].obj.flags&otTagged != 0
262}
263
264// taggedValue returns the dynamic type tag, the (first node of the)
265// payload, and the indirect flag of the tagged object starting at id.
266// Panic ensues if !isTaggedObject(id).
267//
268func (a *analysis) taggedValue(obj nodeid) (tDyn types.Type, v nodeid, indirect bool) {
269	n := a.nodes[obj]
270	flags := n.obj.flags
271	if flags&otTagged == 0 {
272		panic(fmt.Sprintf("not a tagged object: n%d", obj))
273	}
274	return n.typ, obj + 1, flags&otIndirect != 0
275}
276
277// funcParams returns the first node of the params (P) block of the
278// function whose object node (obj.flags&otFunction) is id.
279//
280func (a *analysis) funcParams(id nodeid) nodeid {
281	n := a.nodes[id]
282	if n.obj == nil || n.obj.flags&otFunction == 0 {
283		panic(fmt.Sprintf("funcParams(n%d): not a function object block", id))
284	}
285	return id + 1
286}
287
288// funcResults returns the first node of the results (R) block of the
289// function whose object node (obj.flags&otFunction) is id.
290//
291func (a *analysis) funcResults(id nodeid) nodeid {
292	n := a.nodes[id]
293	if n.obj == nil || n.obj.flags&otFunction == 0 {
294		panic(fmt.Sprintf("funcResults(n%d): not a function object block", id))
295	}
296	sig := n.typ.(*types.Signature)
297	id += 1 + nodeid(a.sizeof(sig.Params()))
298	if sig.Recv() != nil {
299		id += nodeid(a.sizeof(sig.Recv().Type()))
300	}
301	return id
302}
303
304// ---------- Constraint creation ----------
305
306// copy creates a constraint of the form dst = src.
307// sizeof is the width (in logical fields) of the copied type.
308//
309func (a *analysis) copy(dst, src nodeid, sizeof uint32) {
310	if src == dst || sizeof == 0 {
311		return // trivial
312	}
313	if src == 0 || dst == 0 {
314		panic(fmt.Sprintf("ill-typed copy dst=n%d src=n%d", dst, src))
315	}
316	for i := uint32(0); i < sizeof; i++ {
317		a.addConstraint(&copyConstraint{dst, src})
318		src++
319		dst++
320	}
321}
322
323// addressOf creates a constraint of the form id = &obj.
324// T is the type of the address.
325func (a *analysis) addressOf(T types.Type, id, obj nodeid) {
326	if id == 0 {
327		panic("addressOf: zero id")
328	}
329	if obj == 0 {
330		panic("addressOf: zero obj")
331	}
332	if a.shouldTrack(T) {
333		a.addConstraint(&addrConstraint{id, obj})
334	}
335}
336
337// load creates a load constraint of the form dst = src[offset].
338// offset is the pointer offset in logical fields.
339// sizeof is the width (in logical fields) of the loaded type.
340//
341func (a *analysis) load(dst, src nodeid, offset, sizeof uint32) {
342	if dst == 0 {
343		return // load of non-pointerlike value
344	}
345	if src == 0 && dst == 0 {
346		return // non-pointerlike operation
347	}
348	if src == 0 || dst == 0 {
349		panic(fmt.Sprintf("ill-typed load dst=n%d src=n%d", dst, src))
350	}
351	for i := uint32(0); i < sizeof; i++ {
352		a.addConstraint(&loadConstraint{offset, dst, src})
353		offset++
354		dst++
355	}
356}
357
358// store creates a store constraint of the form dst[offset] = src.
359// offset is the pointer offset in logical fields.
360// sizeof is the width (in logical fields) of the stored type.
361//
362func (a *analysis) store(dst, src nodeid, offset uint32, sizeof uint32) {
363	if src == 0 {
364		return // store of non-pointerlike value
365	}
366	if src == 0 && dst == 0 {
367		return // non-pointerlike operation
368	}
369	if src == 0 || dst == 0 {
370		panic(fmt.Sprintf("ill-typed store dst=n%d src=n%d", dst, src))
371	}
372	for i := uint32(0); i < sizeof; i++ {
373		a.addConstraint(&storeConstraint{offset, dst, src})
374		offset++
375		src++
376	}
377}
378
379// offsetAddr creates an offsetAddr constraint of the form dst = &src.#offset.
380// offset is the field offset in logical fields.
381// T is the type of the address.
382//
383func (a *analysis) offsetAddr(T types.Type, dst, src nodeid, offset uint32) {
384	if !a.shouldTrack(T) {
385		return
386	}
387	if offset == 0 {
388		// Simplify  dst = &src->f0
389		//       to  dst = src
390		// (NB: this optimisation is defeated by the identity
391		// field prepended to struct and array objects.)
392		a.copy(dst, src, 1)
393	} else {
394		a.addConstraint(&offsetAddrConstraint{offset, dst, src})
395	}
396}
397
398// typeAssert creates a typeFilter or untag constraint of the form dst = src.(T):
399// typeFilter for an interface, untag for a concrete type.
400// The exact flag is specified as for untagConstraint.
401//
402func (a *analysis) typeAssert(T types.Type, dst, src nodeid, exact bool) {
403	if isInterface(T) {
404		a.addConstraint(&typeFilterConstraint{T, dst, src})
405	} else {
406		a.addConstraint(&untagConstraint{T, dst, src, exact})
407	}
408}
409
410// addConstraint adds c to the constraint set.
411func (a *analysis) addConstraint(c constraint) {
412	a.constraints = append(a.constraints, c)
413	if a.log != nil {
414		fmt.Fprintf(a.log, "\t%s\n", c)
415	}
416}
417
418// copyElems generates load/store constraints for *dst = *src,
419// where src and dst are slices or *arrays.
420//
421func (a *analysis) copyElems(cgn *cgnode, typ types.Type, dst, src ssa.Value) {
422	tmp := a.addNodes(typ, "copy")
423	sz := a.sizeof(typ)
424	a.genLoad(cgn, tmp, src, 1, sz)
425	a.genStore(cgn, dst, tmp, 1, sz)
426}
427
428// ---------- Constraint generation ----------
429
430// genConv generates constraints for the conversion operation conv.
431func (a *analysis) genConv(conv *ssa.Convert, cgn *cgnode) {
432	res := a.valueNode(conv)
433	if res == 0 {
434		return // result is non-pointerlike
435	}
436
437	tSrc := conv.X.Type()
438	tDst := conv.Type()
439
440	switch utSrc := tSrc.Underlying().(type) {
441	case *types.Slice:
442		// []byte/[]rune -> string?
443		return
444
445	case *types.Pointer:
446		// *T -> unsafe.Pointer?
447		if tDst.Underlying() == tUnsafePtr {
448			return // we don't model unsafe aliasing (unsound)
449		}
450
451	case *types.Basic:
452		switch tDst.Underlying().(type) {
453		case *types.Pointer:
454			// Treat unsafe.Pointer->*T conversions like
455			// new(T) and create an unaliased object.
456			if utSrc == tUnsafePtr {
457				obj := a.addNodes(mustDeref(tDst), "unsafe.Pointer conversion")
458				a.endObject(obj, cgn, conv)
459				a.addressOf(tDst, res, obj)
460				return
461			}
462
463		case *types.Slice:
464			// string -> []byte/[]rune (or named aliases)?
465			if utSrc.Info()&types.IsString != 0 {
466				obj := a.addNodes(sliceToArray(tDst), "convert")
467				a.endObject(obj, cgn, conv)
468				a.addressOf(tDst, res, obj)
469				return
470			}
471
472		case *types.Basic:
473			// All basic-to-basic type conversions are no-ops.
474			// This includes uintptr<->unsafe.Pointer conversions,
475			// which we (unsoundly) ignore.
476			return
477		}
478	}
479
480	panic(fmt.Sprintf("illegal *ssa.Convert %s -> %s: %s", tSrc, tDst, conv.Parent()))
481}
482
483// genAppend generates constraints for a call to append.
484func (a *analysis) genAppend(instr *ssa.Call, cgn *cgnode) {
485	// Consider z = append(x, y).   y is optional.
486	// This may allocate a new [1]T array; call its object w.
487	// We get the following constraints:
488	// 	z = x
489	// 	z = &w
490	//     *z = *y
491
492	x := instr.Call.Args[0]
493
494	z := instr
495	a.copy(a.valueNode(z), a.valueNode(x), 1) // z = x
496
497	if len(instr.Call.Args) == 1 {
498		return // no allocation for z = append(x) or _ = append(x).
499	}
500
501	// TODO(adonovan): test append([]byte, ...string) []byte.
502
503	y := instr.Call.Args[1]
504	tArray := sliceToArray(instr.Call.Args[0].Type())
505
506	var w nodeid
507	w = a.nextNode()
508	a.addNodes(tArray, "append")
509	a.endObject(w, cgn, instr)
510
511	a.copyElems(cgn, tArray.Elem(), z, y)        // *z = *y
512	a.addressOf(instr.Type(), a.valueNode(z), w) //  z = &w
513}
514
515// genBuiltinCall generates contraints for a call to a built-in.
516func (a *analysis) genBuiltinCall(instr ssa.CallInstruction, cgn *cgnode) {
517	call := instr.Common()
518	switch call.Value.(*ssa.Builtin).Name() {
519	case "append":
520		// Safe cast: append cannot appear in a go or defer statement.
521		a.genAppend(instr.(*ssa.Call), cgn)
522
523	case "copy":
524		tElem := call.Args[0].Type().Underlying().(*types.Slice).Elem()
525		a.copyElems(cgn, tElem, call.Args[0], call.Args[1])
526
527	case "panic":
528		a.copy(a.panicNode, a.valueNode(call.Args[0]), 1)
529
530	case "recover":
531		if v := instr.Value(); v != nil {
532			a.copy(a.valueNode(v), a.panicNode, 1)
533		}
534
535	case "print":
536		// In the tests, the probe might be the sole reference
537		// to its arg, so make sure we create nodes for it.
538		if len(call.Args) > 0 {
539			a.valueNode(call.Args[0])
540		}
541
542	case "ssa:wrapnilchk":
543		a.copy(a.valueNode(instr.Value()), a.valueNode(call.Args[0]), 1)
544
545	default:
546		// No-ops: close len cap real imag complex print println delete.
547	}
548}
549
550// shouldUseContext defines the context-sensitivity policy.  It
551// returns true if we should analyse all static calls to fn anew.
552//
553// Obviously this interface rather limits how much freedom we have to
554// choose a policy.  The current policy, rather arbitrarily, is true
555// for intrinsics and accessor methods (actually: short, single-block,
556// call-free functions).  This is just a starting point.
557//
558func (a *analysis) shouldUseContext(fn *ssa.Function) bool {
559	if a.findIntrinsic(fn) != nil {
560		return true // treat intrinsics context-sensitively
561	}
562	if len(fn.Blocks) != 1 {
563		return false // too expensive
564	}
565	blk := fn.Blocks[0]
566	if len(blk.Instrs) > 10 {
567		return false // too expensive
568	}
569	if fn.Synthetic != "" && (fn.Pkg == nil || fn != fn.Pkg.Func("init")) {
570		return true // treat synthetic wrappers context-sensitively
571	}
572	for _, instr := range blk.Instrs {
573		switch instr := instr.(type) {
574		case ssa.CallInstruction:
575			// Disallow function calls (except to built-ins)
576			// because of the danger of unbounded recursion.
577			if _, ok := instr.Common().Value.(*ssa.Builtin); !ok {
578				return false
579			}
580		}
581	}
582	return true
583}
584
585// genStaticCall generates constraints for a statically dispatched function call.
586func (a *analysis) genStaticCall(caller *cgnode, site *callsite, call *ssa.CallCommon, result nodeid) {
587	fn := call.StaticCallee()
588
589	// Special cases for inlined intrinsics.
590	switch fn {
591	case a.runtimeSetFinalizer:
592		// Inline SetFinalizer so the call appears direct.
593		site.targets = a.addOneNode(tInvalid, "SetFinalizer.targets", nil)
594		a.addConstraint(&runtimeSetFinalizerConstraint{
595			targets: site.targets,
596			x:       a.valueNode(call.Args[0]),
597			f:       a.valueNode(call.Args[1]),
598		})
599		return
600
601	case a.reflectValueCall:
602		// Inline (reflect.Value).Call so the call appears direct.
603		dotdotdot := false
604		ret := reflectCallImpl(a, caller, site, a.valueNode(call.Args[0]), a.valueNode(call.Args[1]), dotdotdot)
605		if result != 0 {
606			a.addressOf(fn.Signature.Results().At(0).Type(), result, ret)
607		}
608		return
609	}
610
611	// Ascertain the context (contour/cgnode) for a particular call.
612	var obj nodeid
613	if a.shouldUseContext(fn) {
614		obj = a.makeFunctionObject(fn, site) // new contour
615	} else {
616		obj = a.objectNode(nil, fn) // shared contour
617	}
618	a.callEdge(caller, site, obj)
619
620	sig := call.Signature()
621
622	// Copy receiver, if any.
623	params := a.funcParams(obj)
624	args := call.Args
625	if sig.Recv() != nil {
626		sz := a.sizeof(sig.Recv().Type())
627		a.copy(params, a.valueNode(args[0]), sz)
628		params += nodeid(sz)
629		args = args[1:]
630	}
631
632	// Copy actual parameters into formal params block.
633	// Must loop, since the actuals aren't contiguous.
634	for i, arg := range args {
635		sz := a.sizeof(sig.Params().At(i).Type())
636		a.copy(params, a.valueNode(arg), sz)
637		params += nodeid(sz)
638	}
639
640	// Copy formal results block to actual result.
641	if result != 0 {
642		a.copy(result, a.funcResults(obj), a.sizeof(sig.Results()))
643	}
644}
645
646// genDynamicCall generates constraints for a dynamic function call.
647func (a *analysis) genDynamicCall(caller *cgnode, site *callsite, call *ssa.CallCommon, result nodeid) {
648	// pts(targets) will be the set of possible call targets.
649	site.targets = a.valueNode(call.Value)
650
651	// We add dynamic closure rules that store the arguments into
652	// the P-block and load the results from the R-block of each
653	// function discovered in pts(targets).
654
655	sig := call.Signature()
656	var offset uint32 = 1 // P/R block starts at offset 1
657	for i, arg := range call.Args {
658		sz := a.sizeof(sig.Params().At(i).Type())
659		a.genStore(caller, call.Value, a.valueNode(arg), offset, sz)
660		offset += sz
661	}
662	if result != 0 {
663		a.genLoad(caller, result, call.Value, offset, a.sizeof(sig.Results()))
664	}
665}
666
667// genInvoke generates constraints for a dynamic method invocation.
668func (a *analysis) genInvoke(caller *cgnode, site *callsite, call *ssa.CallCommon, result nodeid) {
669	if call.Value.Type() == a.reflectType {
670		a.genInvokeReflectType(caller, site, call, result)
671		return
672	}
673
674	sig := call.Signature()
675
676	// Allocate a contiguous targets/params/results block for this call.
677	block := a.nextNode()
678	// pts(targets) will be the set of possible call targets
679	site.targets = a.addOneNode(sig, "invoke.targets", nil)
680	p := a.addNodes(sig.Params(), "invoke.params")
681	r := a.addNodes(sig.Results(), "invoke.results")
682
683	// Copy the actual parameters into the call's params block.
684	for i, n := 0, sig.Params().Len(); i < n; i++ {
685		sz := a.sizeof(sig.Params().At(i).Type())
686		a.copy(p, a.valueNode(call.Args[i]), sz)
687		p += nodeid(sz)
688	}
689	// Copy the call's results block to the actual results.
690	if result != 0 {
691		a.copy(result, r, a.sizeof(sig.Results()))
692	}
693
694	// We add a dynamic invoke constraint that will connect the
695	// caller's and the callee's P/R blocks for each discovered
696	// call target.
697	a.addConstraint(&invokeConstraint{call.Method, a.valueNode(call.Value), block})
698}
699
700// genInvokeReflectType is a specialization of genInvoke where the
701// receiver type is a reflect.Type, under the assumption that there
702// can be at most one implementation of this interface, *reflect.rtype.
703//
704// (Though this may appear to be an instance of a pattern---method
705// calls on interfaces known to have exactly one implementation---in
706// practice it occurs rarely, so we special case for reflect.Type.)
707//
708// In effect we treat this:
709//    var rt reflect.Type = ...
710//    rt.F()
711// as this:
712//    rt.(*reflect.rtype).F()
713//
714func (a *analysis) genInvokeReflectType(caller *cgnode, site *callsite, call *ssa.CallCommon, result nodeid) {
715	// Unpack receiver into rtype
716	rtype := a.addOneNode(a.reflectRtypePtr, "rtype.recv", nil)
717	recv := a.valueNode(call.Value)
718	a.typeAssert(a.reflectRtypePtr, rtype, recv, true)
719
720	// Look up the concrete method.
721	fn := a.prog.LookupMethod(a.reflectRtypePtr, call.Method.Pkg(), call.Method.Name())
722
723	obj := a.makeFunctionObject(fn, site) // new contour for this call
724	a.callEdge(caller, site, obj)
725
726	// From now on, it's essentially a static call, but little is
727	// gained by factoring together the code for both cases.
728
729	sig := fn.Signature // concrete method
730	targets := a.addOneNode(sig, "call.targets", nil)
731	a.addressOf(sig, targets, obj) // (a singleton)
732
733	// Copy receiver.
734	params := a.funcParams(obj)
735	a.copy(params, rtype, 1)
736	params++
737
738	// Copy actual parameters into formal P-block.
739	// Must loop, since the actuals aren't contiguous.
740	for i, arg := range call.Args {
741		sz := a.sizeof(sig.Params().At(i).Type())
742		a.copy(params, a.valueNode(arg), sz)
743		params += nodeid(sz)
744	}
745
746	// Copy formal R-block to actual R-block.
747	if result != 0 {
748		a.copy(result, a.funcResults(obj), a.sizeof(sig.Results()))
749	}
750}
751
752// genCall generates constraints for call instruction instr.
753func (a *analysis) genCall(caller *cgnode, instr ssa.CallInstruction) {
754	call := instr.Common()
755
756	// Intrinsic implementations of built-in functions.
757	if _, ok := call.Value.(*ssa.Builtin); ok {
758		a.genBuiltinCall(instr, caller)
759		return
760	}
761
762	var result nodeid
763	if v := instr.Value(); v != nil {
764		result = a.valueNode(v)
765	}
766
767	site := &callsite{instr: instr}
768	if call.StaticCallee() != nil {
769		a.genStaticCall(caller, site, call, result)
770	} else if call.IsInvoke() {
771		a.genInvoke(caller, site, call, result)
772	} else {
773		a.genDynamicCall(caller, site, call, result)
774	}
775
776	caller.sites = append(caller.sites, site)
777
778	if a.log != nil {
779		// TODO(adonovan): debug: improve log message.
780		fmt.Fprintf(a.log, "\t%s to targets %s from %s\n", site, site.targets, caller)
781	}
782}
783
784// objectNode returns the object to which v points, if known.
785// In other words, if the points-to set of v is a singleton, it
786// returns the sole label, zero otherwise.
787//
788// We exploit this information to make the generated constraints less
789// dynamic.  For example, a complex load constraint can be replaced by
790// a simple copy constraint when the sole destination is known a priori.
791//
792// Some SSA instructions always have singletons points-to sets:
793// 	Alloc, Function, Global, MakeChan, MakeClosure,  MakeInterface,  MakeMap,  MakeSlice.
794// Others may be singletons depending on their operands:
795// 	FreeVar, Const, Convert, FieldAddr, IndexAddr, Slice.
796//
797// Idempotent.  Objects are created as needed, possibly via recursion
798// down the SSA value graph, e.g IndexAddr(FieldAddr(Alloc))).
799//
800func (a *analysis) objectNode(cgn *cgnode, v ssa.Value) nodeid {
801	switch v.(type) {
802	case *ssa.Global, *ssa.Function, *ssa.Const, *ssa.FreeVar:
803		// Global object.
804		obj, ok := a.globalobj[v]
805		if !ok {
806			switch v := v.(type) {
807			case *ssa.Global:
808				obj = a.nextNode()
809				a.addNodes(mustDeref(v.Type()), "global")
810				a.endObject(obj, nil, v)
811
812			case *ssa.Function:
813				obj = a.makeFunctionObject(v, nil)
814
815			case *ssa.Const:
816				// not addressable
817
818			case *ssa.FreeVar:
819				// not addressable
820			}
821
822			if a.log != nil {
823				fmt.Fprintf(a.log, "\tglobalobj[%s] = n%d\n", v, obj)
824			}
825			a.globalobj[v] = obj
826		}
827		return obj
828	}
829
830	// Local object.
831	obj, ok := a.localobj[v]
832	if !ok {
833		switch v := v.(type) {
834		case *ssa.Alloc:
835			obj = a.nextNode()
836			a.addNodes(mustDeref(v.Type()), "alloc")
837			a.endObject(obj, cgn, v)
838
839		case *ssa.MakeSlice:
840			obj = a.nextNode()
841			a.addNodes(sliceToArray(v.Type()), "makeslice")
842			a.endObject(obj, cgn, v)
843
844		case *ssa.MakeChan:
845			obj = a.nextNode()
846			a.addNodes(v.Type().Underlying().(*types.Chan).Elem(), "makechan")
847			a.endObject(obj, cgn, v)
848
849		case *ssa.MakeMap:
850			obj = a.nextNode()
851			tmap := v.Type().Underlying().(*types.Map)
852			a.addNodes(tmap.Key(), "makemap.key")
853			elem := a.addNodes(tmap.Elem(), "makemap.value")
854
855			// To update the value field, MapUpdate
856			// generates store-with-offset constraints which
857			// the presolver can't model, so we must mark
858			// those nodes indirect.
859			for id, end := elem, elem+nodeid(a.sizeof(tmap.Elem())); id < end; id++ {
860				a.mapValues = append(a.mapValues, id)
861			}
862			a.endObject(obj, cgn, v)
863
864		case *ssa.MakeInterface:
865			tConc := v.X.Type()
866			obj = a.makeTagged(tConc, cgn, v)
867
868			// Copy the value into it, if nontrivial.
869			if x := a.valueNode(v.X); x != 0 {
870				a.copy(obj+1, x, a.sizeof(tConc))
871			}
872
873		case *ssa.FieldAddr:
874			if xobj := a.objectNode(cgn, v.X); xobj != 0 {
875				obj = xobj + nodeid(a.offsetOf(mustDeref(v.X.Type()), v.Field))
876			}
877
878		case *ssa.IndexAddr:
879			if xobj := a.objectNode(cgn, v.X); xobj != 0 {
880				obj = xobj + 1
881			}
882
883		case *ssa.Slice:
884			obj = a.objectNode(cgn, v.X)
885
886		case *ssa.Convert:
887			// TODO(adonovan): opt: handle these cases too:
888			// - unsafe.Pointer->*T conversion acts like Alloc
889			// - string->[]byte/[]rune conversion acts like MakeSlice
890		}
891
892		if a.log != nil {
893			fmt.Fprintf(a.log, "\tlocalobj[%s] = n%d\n", v.Name(), obj)
894		}
895		a.localobj[v] = obj
896	}
897	return obj
898}
899
900// genLoad generates constraints for result = *(ptr + val).
901func (a *analysis) genLoad(cgn *cgnode, result nodeid, ptr ssa.Value, offset, sizeof uint32) {
902	if obj := a.objectNode(cgn, ptr); obj != 0 {
903		// Pre-apply loadConstraint.solve().
904		a.copy(result, obj+nodeid(offset), sizeof)
905	} else {
906		a.load(result, a.valueNode(ptr), offset, sizeof)
907	}
908}
909
910// genOffsetAddr generates constraints for a 'v=ptr.field' (FieldAddr)
911// or 'v=ptr[*]' (IndexAddr) instruction v.
912func (a *analysis) genOffsetAddr(cgn *cgnode, v ssa.Value, ptr nodeid, offset uint32) {
913	dst := a.valueNode(v)
914	if obj := a.objectNode(cgn, v); obj != 0 {
915		// Pre-apply offsetAddrConstraint.solve().
916		a.addressOf(v.Type(), dst, obj)
917	} else {
918		a.offsetAddr(v.Type(), dst, ptr, offset)
919	}
920}
921
922// genStore generates constraints for *(ptr + offset) = val.
923func (a *analysis) genStore(cgn *cgnode, ptr ssa.Value, val nodeid, offset, sizeof uint32) {
924	if obj := a.objectNode(cgn, ptr); obj != 0 {
925		// Pre-apply storeConstraint.solve().
926		a.copy(obj+nodeid(offset), val, sizeof)
927	} else {
928		a.store(a.valueNode(ptr), val, offset, sizeof)
929	}
930}
931
932// genInstr generates constraints for instruction instr in context cgn.
933func (a *analysis) genInstr(cgn *cgnode, instr ssa.Instruction) {
934	if a.log != nil {
935		var prefix string
936		if val, ok := instr.(ssa.Value); ok {
937			prefix = val.Name() + " = "
938		}
939		fmt.Fprintf(a.log, "; %s%s\n", prefix, instr)
940	}
941
942	switch instr := instr.(type) {
943	case *ssa.DebugRef:
944		// no-op.
945
946	case *ssa.UnOp:
947		switch instr.Op {
948		case token.ARROW: // <-x
949			// We can ignore instr.CommaOk because the node we're
950			// altering is always at zero offset relative to instr
951			tElem := instr.X.Type().Underlying().(*types.Chan).Elem()
952			a.genLoad(cgn, a.valueNode(instr), instr.X, 0, a.sizeof(tElem))
953
954		case token.MUL: // *x
955			a.genLoad(cgn, a.valueNode(instr), instr.X, 0, a.sizeof(instr.Type()))
956
957		default:
958			// NOT, SUB, XOR: no-op.
959		}
960
961	case *ssa.BinOp:
962		// All no-ops.
963
964	case ssa.CallInstruction: // *ssa.Call, *ssa.Go, *ssa.Defer
965		a.genCall(cgn, instr)
966
967	case *ssa.ChangeType:
968		a.copy(a.valueNode(instr), a.valueNode(instr.X), 1)
969
970	case *ssa.Convert:
971		a.genConv(instr, cgn)
972
973	case *ssa.Extract:
974		a.copy(a.valueNode(instr),
975			a.valueOffsetNode(instr.Tuple, instr.Index),
976			a.sizeof(instr.Type()))
977
978	case *ssa.FieldAddr:
979		a.genOffsetAddr(cgn, instr, a.valueNode(instr.X),
980			a.offsetOf(mustDeref(instr.X.Type()), instr.Field))
981
982	case *ssa.IndexAddr:
983		a.genOffsetAddr(cgn, instr, a.valueNode(instr.X), 1)
984
985	case *ssa.Field:
986		a.copy(a.valueNode(instr),
987			a.valueOffsetNode(instr.X, instr.Field),
988			a.sizeof(instr.Type()))
989
990	case *ssa.Index:
991		a.copy(a.valueNode(instr), 1+a.valueNode(instr.X), a.sizeof(instr.Type()))
992
993	case *ssa.Select:
994		recv := a.valueOffsetNode(instr, 2) // instr : (index, recvOk, recv0, ... recv_n-1)
995		for _, st := range instr.States {
996			elemSize := a.sizeof(st.Chan.Type().Underlying().(*types.Chan).Elem())
997			switch st.Dir {
998			case types.RecvOnly:
999				a.genLoad(cgn, recv, st.Chan, 0, elemSize)
1000				recv += nodeid(elemSize)
1001
1002			case types.SendOnly:
1003				a.genStore(cgn, st.Chan, a.valueNode(st.Send), 0, elemSize)
1004			}
1005		}
1006
1007	case *ssa.Return:
1008		results := a.funcResults(cgn.obj)
1009		for _, r := range instr.Results {
1010			sz := a.sizeof(r.Type())
1011			a.copy(results, a.valueNode(r), sz)
1012			results += nodeid(sz)
1013		}
1014
1015	case *ssa.Send:
1016		a.genStore(cgn, instr.Chan, a.valueNode(instr.X), 0, a.sizeof(instr.X.Type()))
1017
1018	case *ssa.Store:
1019		a.genStore(cgn, instr.Addr, a.valueNode(instr.Val), 0, a.sizeof(instr.Val.Type()))
1020
1021	case *ssa.Alloc, *ssa.MakeSlice, *ssa.MakeChan, *ssa.MakeMap, *ssa.MakeInterface:
1022		v := instr.(ssa.Value)
1023		a.addressOf(v.Type(), a.valueNode(v), a.objectNode(cgn, v))
1024
1025	case *ssa.ChangeInterface:
1026		a.copy(a.valueNode(instr), a.valueNode(instr.X), 1)
1027
1028	case *ssa.TypeAssert:
1029		a.typeAssert(instr.AssertedType, a.valueNode(instr), a.valueNode(instr.X), true)
1030
1031	case *ssa.Slice:
1032		a.copy(a.valueNode(instr), a.valueNode(instr.X), 1)
1033
1034	case *ssa.If, *ssa.Jump:
1035		// no-op.
1036
1037	case *ssa.Phi:
1038		sz := a.sizeof(instr.Type())
1039		for _, e := range instr.Edges {
1040			a.copy(a.valueNode(instr), a.valueNode(e), sz)
1041		}
1042
1043	case *ssa.MakeClosure:
1044		fn := instr.Fn.(*ssa.Function)
1045		a.copy(a.valueNode(instr), a.valueNode(fn), 1)
1046		// Free variables are treated like global variables.
1047		for i, b := range instr.Bindings {
1048			a.copy(a.valueNode(fn.FreeVars[i]), a.valueNode(b), a.sizeof(b.Type()))
1049		}
1050
1051	case *ssa.RunDefers:
1052		// The analysis is flow insensitive, so we just "call"
1053		// defers as we encounter them.
1054
1055	case *ssa.Range:
1056		// Do nothing.  Next{Iter: *ssa.Range} handles this case.
1057
1058	case *ssa.Next:
1059		if !instr.IsString { // map
1060			// Assumes that Next is always directly applied to a Range result.
1061			theMap := instr.Iter.(*ssa.Range).X
1062			tMap := theMap.Type().Underlying().(*types.Map)
1063
1064			ksize := a.sizeof(tMap.Key())
1065			vsize := a.sizeof(tMap.Elem())
1066
1067			// The k/v components of the Next tuple may each be invalid.
1068			tTuple := instr.Type().(*types.Tuple)
1069
1070			// Load from the map's (k,v) into the tuple's (ok, k, v).
1071			osrc := uint32(0) // offset within map object
1072			odst := uint32(1) // offset within tuple (initially just after 'ok bool')
1073			sz := uint32(0)   // amount to copy
1074
1075			// Is key valid?
1076			if tTuple.At(1).Type() != tInvalid {
1077				sz += ksize
1078			} else {
1079				odst += ksize
1080				osrc += ksize
1081			}
1082
1083			// Is value valid?
1084			if tTuple.At(2).Type() != tInvalid {
1085				sz += vsize
1086			}
1087
1088			a.genLoad(cgn, a.valueNode(instr)+nodeid(odst), theMap, osrc, sz)
1089		}
1090
1091	case *ssa.Lookup:
1092		if tMap, ok := instr.X.Type().Underlying().(*types.Map); ok {
1093			// CommaOk can be ignored: field 0 is a no-op.
1094			ksize := a.sizeof(tMap.Key())
1095			vsize := a.sizeof(tMap.Elem())
1096			a.genLoad(cgn, a.valueNode(instr), instr.X, ksize, vsize)
1097		}
1098
1099	case *ssa.MapUpdate:
1100		tmap := instr.Map.Type().Underlying().(*types.Map)
1101		ksize := a.sizeof(tmap.Key())
1102		vsize := a.sizeof(tmap.Elem())
1103		a.genStore(cgn, instr.Map, a.valueNode(instr.Key), 0, ksize)
1104		a.genStore(cgn, instr.Map, a.valueNode(instr.Value), ksize, vsize)
1105
1106	case *ssa.Panic:
1107		a.copy(a.panicNode, a.valueNode(instr.X), 1)
1108
1109	default:
1110		panic(fmt.Sprintf("unimplemented: %T", instr))
1111	}
1112}
1113
1114func (a *analysis) makeCGNode(fn *ssa.Function, obj nodeid, callersite *callsite) *cgnode {
1115	cgn := &cgnode{fn: fn, obj: obj, callersite: callersite}
1116	a.cgnodes = append(a.cgnodes, cgn)
1117	return cgn
1118}
1119
1120// genRootCalls generates the synthetic root of the callgraph and the
1121// initial calls from it to the analysis scope, such as main, a test
1122// or a library.
1123//
1124func (a *analysis) genRootCalls() *cgnode {
1125	r := a.prog.NewFunction("<root>", new(types.Signature), "root of callgraph")
1126	root := a.makeCGNode(r, 0, nil)
1127
1128	// TODO(adonovan): make an ssa utility to construct an actual
1129	// root function so we don't need to special-case site-less
1130	// call edges.
1131
1132	// For each main package, call main.init(), main.main().
1133	for _, mainPkg := range a.config.Mains {
1134		main := mainPkg.Func("main")
1135		if main == nil {
1136			panic(fmt.Sprintf("%s has no main function", mainPkg))
1137		}
1138
1139		targets := a.addOneNode(main.Signature, "root.targets", nil)
1140		site := &callsite{targets: targets}
1141		root.sites = append(root.sites, site)
1142		for _, fn := range [2]*ssa.Function{mainPkg.Func("init"), main} {
1143			if a.log != nil {
1144				fmt.Fprintf(a.log, "\troot call to %s:\n", fn)
1145			}
1146			a.copy(targets, a.valueNode(fn), 1)
1147		}
1148	}
1149
1150	return root
1151}
1152
1153// genFunc generates constraints for function fn.
1154func (a *analysis) genFunc(cgn *cgnode) {
1155	fn := cgn.fn
1156
1157	impl := a.findIntrinsic(fn)
1158
1159	if a.log != nil {
1160		fmt.Fprintf(a.log, "\n\n==== Generating constraints for %s, %s\n", cgn, cgn.contour())
1161
1162		// Hack: don't display body if intrinsic.
1163		if impl != nil {
1164			fn2 := *cgn.fn // copy
1165			fn2.Locals = nil
1166			fn2.Blocks = nil
1167			fn2.WriteTo(a.log)
1168		} else {
1169			cgn.fn.WriteTo(a.log)
1170		}
1171	}
1172
1173	if impl != nil {
1174		impl(a, cgn)
1175		return
1176	}
1177
1178	if fn.Blocks == nil {
1179		// External function with no intrinsic treatment.
1180		// We'll warn about calls to such functions at the end.
1181		return
1182	}
1183
1184	if a.log != nil {
1185		fmt.Fprintln(a.log, "; Creating nodes for local values")
1186	}
1187
1188	a.localval = make(map[ssa.Value]nodeid)
1189	a.localobj = make(map[ssa.Value]nodeid)
1190
1191	// The value nodes for the params are in the func object block.
1192	params := a.funcParams(cgn.obj)
1193	for _, p := range fn.Params {
1194		a.setValueNode(p, params, cgn)
1195		params += nodeid(a.sizeof(p.Type()))
1196	}
1197
1198	// Free variables have global cardinality:
1199	// the outer function sets them with MakeClosure;
1200	// the inner function accesses them with FreeVar.
1201	//
1202	// TODO(adonovan): treat free vars context-sensitively.
1203
1204	// Create value nodes for all value instructions
1205	// since SSA may contain forward references.
1206	var space [10]*ssa.Value
1207	for _, b := range fn.Blocks {
1208		for _, instr := range b.Instrs {
1209			switch instr := instr.(type) {
1210			case *ssa.Range:
1211				// do nothing: it has a funky type,
1212				// and *ssa.Next does all the work.
1213
1214			case ssa.Value:
1215				var comment string
1216				if a.log != nil {
1217					comment = instr.Name()
1218				}
1219				id := a.addNodes(instr.Type(), comment)
1220				a.setValueNode(instr, id, cgn)
1221			}
1222
1223			// Record all address-taken functions (for presolver).
1224			rands := instr.Operands(space[:0])
1225			if call, ok := instr.(ssa.CallInstruction); ok && !call.Common().IsInvoke() {
1226				// Skip CallCommon.Value in "call" mode.
1227				// TODO(adonovan): fix: relies on unspecified ordering.  Specify it.
1228				rands = rands[1:]
1229			}
1230			for _, rand := range rands {
1231				if atf, ok := (*rand).(*ssa.Function); ok {
1232					a.atFuncs[atf] = true
1233				}
1234			}
1235		}
1236	}
1237
1238	// Generate constraints for instructions.
1239	for _, b := range fn.Blocks {
1240		for _, instr := range b.Instrs {
1241			a.genInstr(cgn, instr)
1242		}
1243	}
1244
1245	a.localval = nil
1246	a.localobj = nil
1247}
1248
1249// genMethodsOf generates nodes and constraints for all methods of type T.
1250func (a *analysis) genMethodsOf(T types.Type) {
1251	itf := isInterface(T)
1252
1253	// TODO(adonovan): can we skip this entirely if itf is true?
1254	// I think so, but the answer may depend on reflection.
1255	mset := a.prog.MethodSets.MethodSet(T)
1256	for i, n := 0, mset.Len(); i < n; i++ {
1257		m := a.prog.MethodValue(mset.At(i))
1258		a.valueNode(m)
1259
1260		if !itf {
1261			// Methods of concrete types are address-taken functions.
1262			a.atFuncs[m] = true
1263		}
1264	}
1265}
1266
1267// generate generates offline constraints for the entire program.
1268func (a *analysis) generate() {
1269	start("Constraint generation")
1270	if a.log != nil {
1271		fmt.Fprintln(a.log, "==== Generating constraints")
1272	}
1273
1274	// Create a dummy node since we use the nodeid 0 for
1275	// non-pointerlike variables.
1276	a.addNodes(tInvalid, "(zero)")
1277
1278	// Create the global node for panic values.
1279	a.panicNode = a.addNodes(tEface, "panic")
1280
1281	// Create nodes and constraints for all methods of reflect.rtype.
1282	// (Shared contours are used by dynamic calls to reflect.Type
1283	// methods---typically just String().)
1284	if rtype := a.reflectRtypePtr; rtype != nil {
1285		a.genMethodsOf(rtype)
1286	}
1287
1288	root := a.genRootCalls()
1289
1290	if a.config.BuildCallGraph {
1291		a.result.CallGraph = callgraph.New(root.fn)
1292	}
1293
1294	// Create nodes and constraints for all methods of all types
1295	// that are dynamically accessible via reflection or interfaces.
1296	for _, T := range a.prog.RuntimeTypes() {
1297		a.genMethodsOf(T)
1298	}
1299
1300	// Generate constraints for functions as they become reachable
1301	// from the roots.  (No constraints are generated for functions
1302	// that are dead in this analysis scope.)
1303	for len(a.genq) > 0 {
1304		cgn := a.genq[0]
1305		a.genq = a.genq[1:]
1306		a.genFunc(cgn)
1307	}
1308
1309	// The runtime magically allocates os.Args; so should we.
1310	if os := a.prog.ImportedPackage("os"); os != nil {
1311		// In effect:  os.Args = new([1]string)[:]
1312		T := types.NewSlice(types.Typ[types.String])
1313		obj := a.addNodes(sliceToArray(T), "<command-line args>")
1314		a.endObject(obj, nil, "<command-line args>")
1315		a.addressOf(T, a.objectNode(nil, os.Var("Args")), obj)
1316	}
1317
1318	// Discard generation state, to avoid confusion after node renumbering.
1319	a.panicNode = 0
1320	a.globalval = nil
1321	a.localval = nil
1322	a.localobj = nil
1323
1324	stop("Constraint generation")
1325}
1326