1// Copyright 2009 The Go Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style
3// license that can be found in the LICENSE file.
4
5package walk
6
7import (
8	"errors"
9	"fmt"
10
11	"cmd/compile/internal/base"
12	"cmd/compile/internal/ir"
13	"cmd/compile/internal/reflectdata"
14	"cmd/compile/internal/ssagen"
15	"cmd/compile/internal/typecheck"
16	"cmd/compile/internal/types"
17	"cmd/internal/src"
18)
19
20// The constant is known to runtime.
21const tmpstringbufsize = 32
22const zeroValSize = 1024 // must match value of runtime/map.go:maxZero
23
24func Walk(fn *ir.Func) {
25	ir.CurFunc = fn
26	errorsBefore := base.Errors()
27	order(fn)
28	if base.Errors() > errorsBefore {
29		return
30	}
31
32	if base.Flag.W != 0 {
33		s := fmt.Sprintf("\nbefore walk %v", ir.CurFunc.Sym())
34		ir.DumpList(s, ir.CurFunc.Body)
35	}
36
37	lno := base.Pos
38
39	base.Pos = lno
40	if base.Errors() > errorsBefore {
41		return
42	}
43	walkStmtList(ir.CurFunc.Body)
44	if base.Flag.W != 0 {
45		s := fmt.Sprintf("after walk %v", ir.CurFunc.Sym())
46		ir.DumpList(s, ir.CurFunc.Body)
47	}
48
49	if base.Flag.Cfg.Instrumenting {
50		instrument(fn)
51	}
52
53	// Eagerly compute sizes of all variables for SSA.
54	for _, n := range fn.Dcl {
55		types.CalcSize(n.Type())
56	}
57}
58
59// walkRecv walks an ORECV node.
60func walkRecv(n *ir.UnaryExpr) ir.Node {
61	if n.Typecheck() == 0 {
62		base.Fatalf("missing typecheck: %+v", n)
63	}
64	init := ir.TakeInit(n)
65
66	n.X = walkExpr(n.X, &init)
67	call := walkExpr(mkcall1(chanfn("chanrecv1", 2, n.X.Type()), nil, &init, n.X, typecheck.NodNil()), &init)
68	return ir.InitExpr(init, call)
69}
70
71func convas(n *ir.AssignStmt, init *ir.Nodes) *ir.AssignStmt {
72	if n.Op() != ir.OAS {
73		base.Fatalf("convas: not OAS %v", n.Op())
74	}
75	n.SetTypecheck(1)
76
77	if n.X == nil || n.Y == nil {
78		return n
79	}
80
81	lt := n.X.Type()
82	rt := n.Y.Type()
83	if lt == nil || rt == nil {
84		return n
85	}
86
87	if ir.IsBlank(n.X) {
88		n.Y = typecheck.DefaultLit(n.Y, nil)
89		return n
90	}
91
92	if !types.Identical(lt, rt) {
93		n.Y = typecheck.AssignConv(n.Y, lt, "assignment")
94		n.Y = walkExpr(n.Y, init)
95	}
96	types.CalcSize(n.Y.Type())
97
98	return n
99}
100
101var stop = errors.New("stop")
102
103func vmkcall(fn ir.Node, t *types.Type, init *ir.Nodes, va []ir.Node) *ir.CallExpr {
104	if init == nil {
105		base.Fatalf("mkcall with nil init: %v", fn)
106	}
107	if fn.Type() == nil || fn.Type().Kind() != types.TFUNC {
108		base.Fatalf("mkcall %v %v", fn, fn.Type())
109	}
110
111	n := fn.Type().NumParams()
112	if n != len(va) {
113		base.Fatalf("vmkcall %v needs %v args got %v", fn, n, len(va))
114	}
115
116	call := typecheck.Call(base.Pos, fn, va, false).(*ir.CallExpr)
117	call.SetType(t)
118	return walkExpr(call, init).(*ir.CallExpr)
119}
120
121func mkcall(name string, t *types.Type, init *ir.Nodes, args ...ir.Node) *ir.CallExpr {
122	return vmkcall(typecheck.LookupRuntime(name), t, init, args)
123}
124
125func mkcallstmt(name string, args ...ir.Node) ir.Node {
126	return mkcallstmt1(typecheck.LookupRuntime(name), args...)
127}
128
129func mkcall1(fn ir.Node, t *types.Type, init *ir.Nodes, args ...ir.Node) *ir.CallExpr {
130	return vmkcall(fn, t, init, args)
131}
132
133func mkcallstmt1(fn ir.Node, args ...ir.Node) ir.Node {
134	var init ir.Nodes
135	n := vmkcall(fn, nil, &init, args)
136	if len(init) == 0 {
137		return n
138	}
139	init.Append(n)
140	return ir.NewBlockStmt(n.Pos(), init)
141}
142
143func chanfn(name string, n int, t *types.Type) ir.Node {
144	if !t.IsChan() {
145		base.Fatalf("chanfn %v", t)
146	}
147	fn := typecheck.LookupRuntime(name)
148	switch n {
149	default:
150		base.Fatalf("chanfn %d", n)
151	case 1:
152		fn = typecheck.SubstArgTypes(fn, t.Elem())
153	case 2:
154		fn = typecheck.SubstArgTypes(fn, t.Elem(), t.Elem())
155	}
156	return fn
157}
158
159func mapfn(name string, t *types.Type, isfat bool) ir.Node {
160	if !t.IsMap() {
161		base.Fatalf("mapfn %v", t)
162	}
163	fn := typecheck.LookupRuntime(name)
164	if mapfast(t) == mapslow || isfat {
165		fn = typecheck.SubstArgTypes(fn, t.Key(), t.Elem(), t.Key(), t.Elem())
166	} else {
167		fn = typecheck.SubstArgTypes(fn, t.Key(), t.Elem(), t.Elem())
168	}
169	return fn
170}
171
172func mapfndel(name string, t *types.Type) ir.Node {
173	if !t.IsMap() {
174		base.Fatalf("mapfn %v", t)
175	}
176	fn := typecheck.LookupRuntime(name)
177	if mapfast(t) == mapslow {
178		fn = typecheck.SubstArgTypes(fn, t.Key(), t.Elem(), t.Key())
179	} else {
180		fn = typecheck.SubstArgTypes(fn, t.Key(), t.Elem())
181	}
182	return fn
183}
184
185const (
186	mapslow = iota
187	mapfast32
188	mapfast32ptr
189	mapfast64
190	mapfast64ptr
191	mapfaststr
192	nmapfast
193)
194
195type mapnames [nmapfast]string
196
197func mkmapnames(base string, ptr string) mapnames {
198	return mapnames{base, base + "_fast32", base + "_fast32" + ptr, base + "_fast64", base + "_fast64" + ptr, base + "_faststr"}
199}
200
201var mapaccess1 = mkmapnames("mapaccess1", "")
202var mapaccess2 = mkmapnames("mapaccess2", "")
203var mapassign = mkmapnames("mapassign", "ptr")
204var mapdelete = mkmapnames("mapdelete", "")
205
206func mapfast(t *types.Type) int {
207	// Check runtime/map.go:maxElemSize before changing.
208	if t.Elem().Size() > 128 {
209		return mapslow
210	}
211	switch reflectdata.AlgType(t.Key()) {
212	case types.AMEM32:
213		if !t.Key().HasPointers() {
214			return mapfast32
215		}
216		if types.PtrSize == 4 {
217			return mapfast32ptr
218		}
219		base.Fatalf("small pointer %v", t.Key())
220	case types.AMEM64:
221		if !t.Key().HasPointers() {
222			return mapfast64
223		}
224		if types.PtrSize == 8 {
225			return mapfast64ptr
226		}
227		// Two-word object, at least one of which is a pointer.
228		// Use the slow path.
229	case types.ASTRING:
230		return mapfaststr
231	}
232	return mapslow
233}
234
235func walkAppendArgs(n *ir.CallExpr, init *ir.Nodes) {
236	walkExprListSafe(n.Args, init)
237
238	// walkExprListSafe will leave OINDEX (s[n]) alone if both s
239	// and n are name or literal, but those may index the slice we're
240	// modifying here. Fix explicitly.
241	ls := n.Args
242	for i1, n1 := range ls {
243		ls[i1] = cheapExpr(n1, init)
244	}
245}
246
247// appendWalkStmt typechecks and walks stmt and then appends it to init.
248func appendWalkStmt(init *ir.Nodes, stmt ir.Node) {
249	op := stmt.Op()
250	n := typecheck.Stmt(stmt)
251	if op == ir.OAS || op == ir.OAS2 {
252		// If the assignment has side effects, walkExpr will append them
253		// directly to init for us, while walkStmt will wrap it in an OBLOCK.
254		// We need to append them directly.
255		// TODO(rsc): Clean this up.
256		n = walkExpr(n, init)
257	} else {
258		n = walkStmt(n)
259	}
260	init.Append(n)
261}
262
263// The max number of defers in a function using open-coded defers. We enforce this
264// limit because the deferBits bitmask is currently a single byte (to minimize code size)
265const maxOpenDefers = 8
266
267// backingArrayPtrLen extracts the pointer and length from a slice or string.
268// This constructs two nodes referring to n, so n must be a cheapExpr.
269func backingArrayPtrLen(n ir.Node) (ptr, length ir.Node) {
270	var init ir.Nodes
271	c := cheapExpr(n, &init)
272	if c != n || len(init) != 0 {
273		base.Fatalf("backingArrayPtrLen not cheap: %v", n)
274	}
275	ptr = ir.NewUnaryExpr(base.Pos, ir.OSPTR, n)
276	if n.Type().IsString() {
277		ptr.SetType(types.Types[types.TUINT8].PtrTo())
278	} else {
279		ptr.SetType(n.Type().Elem().PtrTo())
280	}
281	length = ir.NewUnaryExpr(base.Pos, ir.OLEN, n)
282	length.SetType(types.Types[types.TINT])
283	return ptr, length
284}
285
286// mayCall reports whether evaluating expression n may require
287// function calls, which could clobber function call arguments/results
288// currently on the stack.
289func mayCall(n ir.Node) bool {
290	// When instrumenting, any expression might require function calls.
291	if base.Flag.Cfg.Instrumenting {
292		return true
293	}
294
295	isSoftFloat := func(typ *types.Type) bool {
296		return types.IsFloat[typ.Kind()] || types.IsComplex[typ.Kind()]
297	}
298
299	return ir.Any(n, func(n ir.Node) bool {
300		// walk should have already moved any Init blocks off of
301		// expressions.
302		if len(n.Init()) != 0 {
303			base.FatalfAt(n.Pos(), "mayCall %+v", n)
304		}
305
306		switch n.Op() {
307		default:
308			base.FatalfAt(n.Pos(), "mayCall %+v", n)
309
310		case ir.OCALLFUNC, ir.OCALLINTER,
311			ir.OUNSAFEADD, ir.OUNSAFESLICE:
312			return true
313
314		case ir.OINDEX, ir.OSLICE, ir.OSLICEARR, ir.OSLICE3, ir.OSLICE3ARR, ir.OSLICESTR,
315			ir.ODEREF, ir.ODOTPTR, ir.ODOTTYPE, ir.ODYNAMICDOTTYPE, ir.ODIV, ir.OMOD, ir.OSLICE2ARRPTR:
316			// These ops might panic, make sure they are done
317			// before we start marshaling args for a call. See issue 16760.
318			return true
319
320		case ir.OANDAND, ir.OOROR:
321			n := n.(*ir.LogicalExpr)
322			// The RHS expression may have init statements that
323			// should only execute conditionally, and so cannot be
324			// pulled out to the top-level init list. We could try
325			// to be more precise here.
326			return len(n.Y.Init()) != 0
327
328		// When using soft-float, these ops might be rewritten to function calls
329		// so we ensure they are evaluated first.
330		case ir.OADD, ir.OSUB, ir.OMUL, ir.ONEG:
331			return ssagen.Arch.SoftFloat && isSoftFloat(n.Type())
332		case ir.OLT, ir.OEQ, ir.ONE, ir.OLE, ir.OGE, ir.OGT:
333			n := n.(*ir.BinaryExpr)
334			return ssagen.Arch.SoftFloat && isSoftFloat(n.X.Type())
335		case ir.OCONV:
336			n := n.(*ir.ConvExpr)
337			return ssagen.Arch.SoftFloat && (isSoftFloat(n.Type()) || isSoftFloat(n.X.Type()))
338
339		case ir.OLITERAL, ir.ONIL, ir.ONAME, ir.OLINKSYMOFFSET, ir.OMETHEXPR,
340			ir.OAND, ir.OANDNOT, ir.OLSH, ir.OOR, ir.ORSH, ir.OXOR, ir.OCOMPLEX, ir.OEFACE,
341			ir.OADDR, ir.OBITNOT, ir.ONOT, ir.OPLUS,
342			ir.OCAP, ir.OIMAG, ir.OLEN, ir.OREAL,
343			ir.OCONVNOP, ir.ODOT,
344			ir.OCFUNC, ir.OIDATA, ir.OITAB, ir.OSPTR,
345			ir.OBYTES2STRTMP, ir.OGETG, ir.OGETCALLERPC, ir.OGETCALLERSP, ir.OSLICEHEADER:
346			// ok: operations that don't require function calls.
347			// Expand as needed.
348		}
349
350		return false
351	})
352}
353
354// itabType loads the _type field from a runtime.itab struct.
355func itabType(itab ir.Node) ir.Node {
356	if itabTypeField == nil {
357		// runtime.itab's _type field
358		itabTypeField = runtimeField("_type", int64(types.PtrSize), types.NewPtr(types.Types[types.TUINT8]))
359	}
360	return boundedDotPtr(base.Pos, itab, itabTypeField)
361}
362
363var itabTypeField *types.Field
364
365// boundedDotPtr returns a selector expression representing ptr.field
366// and omits nil-pointer checks for ptr.
367func boundedDotPtr(pos src.XPos, ptr ir.Node, field *types.Field) *ir.SelectorExpr {
368	sel := ir.NewSelectorExpr(pos, ir.ODOTPTR, ptr, field.Sym)
369	sel.Selection = field
370	sel.SetType(field.Type)
371	sel.SetTypecheck(1)
372	sel.SetBounded(true) // guaranteed not to fault
373	return sel
374}
375
376func runtimeField(name string, offset int64, typ *types.Type) *types.Field {
377	f := types.NewField(src.NoXPos, ir.Pkgs.Runtime.Lookup(name), typ)
378	f.Offset = offset
379	return f
380}
381
382// ifaceData loads the data field from an interface.
383// The concrete type must be known to have type t.
384// It follows the pointer if !IsDirectIface(t).
385func ifaceData(pos src.XPos, n ir.Node, t *types.Type) ir.Node {
386	if t.IsInterface() {
387		base.Fatalf("ifaceData interface: %v", t)
388	}
389	ptr := ir.NewUnaryExpr(pos, ir.OIDATA, n)
390	if types.IsDirectIface(t) {
391		ptr.SetType(t)
392		ptr.SetTypecheck(1)
393		return ptr
394	}
395	ptr.SetType(types.NewPtr(t))
396	ptr.SetTypecheck(1)
397	ind := ir.NewStarExpr(pos, ptr)
398	ind.SetType(t)
399	ind.SetTypecheck(1)
400	ind.SetBounded(true)
401	return ind
402}
403