1// Copyright 2015 The Go Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style
3// license that can be found in the LICENSE file.
4
5package ssa
6
7import (
8	"cmd/compile/internal/ir"
9	"cmd/compile/internal/types"
10	"cmd/internal/src"
11	"fmt"
12	"math"
13	"sort"
14	"strings"
15)
16
17// A Value represents a value in the SSA representation of the program.
18// The ID and Type fields must not be modified. The remainder may be modified
19// if they preserve the value of the Value (e.g. changing a (mul 2 x) to an (add x x)).
20type Value struct {
21	// A unique identifier for the value. For performance we allocate these IDs
22	// densely starting at 1.  There is no guarantee that there won't be occasional holes, though.
23	ID ID
24
25	// The operation that computes this value. See op.go.
26	Op Op
27
28	// The type of this value. Normally this will be a Go type, but there
29	// are a few other pseudo-types, see ../types/type.go.
30	Type *types.Type
31
32	// Auxiliary info for this value. The type of this information depends on the opcode and type.
33	// AuxInt is used for integer values, Aux is used for other values.
34	// Floats are stored in AuxInt using math.Float64bits(f).
35	// Unused portions of AuxInt are filled by sign-extending the used portion,
36	// even if the represented value is unsigned.
37	// Users of AuxInt which interpret AuxInt as unsigned (e.g. shifts) must be careful.
38	// Use Value.AuxUnsigned to get the zero-extended value of AuxInt.
39	AuxInt int64
40	Aux    Aux
41
42	// Arguments of this value
43	Args []*Value
44
45	// Containing basic block
46	Block *Block
47
48	// Source position
49	Pos src.XPos
50
51	// Use count. Each appearance in Value.Args and Block.Controls counts once.
52	Uses int32
53
54	// wasm: Value stays on the WebAssembly stack. This value will not get a "register" (WebAssembly variable)
55	// nor a slot on Go stack, and the generation of this value is delayed to its use time.
56	OnWasmStack bool
57
58	// Is this value in the per-function constant cache? If so, remove from cache before changing it or recycling it.
59	InCache bool
60
61	// Storage for the first three args
62	argstorage [3]*Value
63}
64
65// Examples:
66// Opcode          aux   args
67//  OpAdd          nil      2
68//  OpConst     string      0    string constant
69//  OpConst      int64      0    int64 constant
70//  OpAddcq      int64      1    amd64 op: v = arg[0] + constant
71
72// short form print. Just v#.
73func (v *Value) String() string {
74	if v == nil {
75		return "nil" // should never happen, but not panicking helps with debugging
76	}
77	return fmt.Sprintf("v%d", v.ID)
78}
79
80func (v *Value) AuxInt8() int8 {
81	if opcodeTable[v.Op].auxType != auxInt8 && opcodeTable[v.Op].auxType != auxNameOffsetInt8 {
82		v.Fatalf("op %s doesn't have an int8 aux field", v.Op)
83	}
84	return int8(v.AuxInt)
85}
86
87func (v *Value) AuxInt16() int16 {
88	if opcodeTable[v.Op].auxType != auxInt16 {
89		v.Fatalf("op %s doesn't have an int16 aux field", v.Op)
90	}
91	return int16(v.AuxInt)
92}
93
94func (v *Value) AuxInt32() int32 {
95	if opcodeTable[v.Op].auxType != auxInt32 {
96		v.Fatalf("op %s doesn't have an int32 aux field", v.Op)
97	}
98	return int32(v.AuxInt)
99}
100
101// AuxUnsigned returns v.AuxInt as an unsigned value for OpConst*.
102// v.AuxInt is always sign-extended to 64 bits, even if the
103// represented value is unsigned. This undoes that sign extension.
104func (v *Value) AuxUnsigned() uint64 {
105	c := v.AuxInt
106	switch v.Op {
107	case OpConst64:
108		return uint64(c)
109	case OpConst32:
110		return uint64(uint32(c))
111	case OpConst16:
112		return uint64(uint16(c))
113	case OpConst8:
114		return uint64(uint8(c))
115	}
116	v.Fatalf("op %s isn't OpConst*", v.Op)
117	return 0
118}
119
120func (v *Value) AuxFloat() float64 {
121	if opcodeTable[v.Op].auxType != auxFloat32 && opcodeTable[v.Op].auxType != auxFloat64 {
122		v.Fatalf("op %s doesn't have a float aux field", v.Op)
123	}
124	return math.Float64frombits(uint64(v.AuxInt))
125}
126func (v *Value) AuxValAndOff() ValAndOff {
127	if opcodeTable[v.Op].auxType != auxSymValAndOff {
128		v.Fatalf("op %s doesn't have a ValAndOff aux field", v.Op)
129	}
130	return ValAndOff(v.AuxInt)
131}
132
133func (v *Value) AuxArm64BitField() arm64BitField {
134	if opcodeTable[v.Op].auxType != auxARM64BitField {
135		v.Fatalf("op %s doesn't have a ValAndOff aux field", v.Op)
136	}
137	return arm64BitField(v.AuxInt)
138}
139
140// long form print.  v# = opcode <type> [aux] args [: reg] (names)
141func (v *Value) LongString() string {
142	if v == nil {
143		return "<NIL VALUE>"
144	}
145	s := fmt.Sprintf("v%d = %s", v.ID, v.Op)
146	s += " <" + v.Type.String() + ">"
147	s += v.auxString()
148	for _, a := range v.Args {
149		s += fmt.Sprintf(" %v", a)
150	}
151	var r []Location
152	if v.Block != nil {
153		r = v.Block.Func.RegAlloc
154	}
155	if int(v.ID) < len(r) && r[v.ID] != nil {
156		s += " : " + r[v.ID].String()
157	}
158	var names []string
159	if v.Block != nil {
160		for name, values := range v.Block.Func.NamedValues {
161			for _, value := range values {
162				if value == v {
163					names = append(names, name.String())
164					break // drop duplicates.
165				}
166			}
167		}
168	}
169	if len(names) != 0 {
170		sort.Strings(names) // Otherwise a source of variation in debugging output.
171		s += " (" + strings.Join(names, ", ") + ")"
172	}
173	return s
174}
175
176func (v *Value) auxString() string {
177	switch opcodeTable[v.Op].auxType {
178	case auxBool:
179		if v.AuxInt == 0 {
180			return " [false]"
181		} else {
182			return " [true]"
183		}
184	case auxInt8:
185		return fmt.Sprintf(" [%d]", v.AuxInt8())
186	case auxInt16:
187		return fmt.Sprintf(" [%d]", v.AuxInt16())
188	case auxInt32:
189		return fmt.Sprintf(" [%d]", v.AuxInt32())
190	case auxInt64, auxInt128:
191		return fmt.Sprintf(" [%d]", v.AuxInt)
192	case auxARM64BitField:
193		lsb := v.AuxArm64BitField().getARM64BFlsb()
194		width := v.AuxArm64BitField().getARM64BFwidth()
195		return fmt.Sprintf(" [lsb=%d,width=%d]", lsb, width)
196	case auxFloat32, auxFloat64:
197		return fmt.Sprintf(" [%g]", v.AuxFloat())
198	case auxString:
199		return fmt.Sprintf(" {%q}", v.Aux)
200	case auxSym, auxCall, auxTyp:
201		if v.Aux != nil {
202			return fmt.Sprintf(" {%v}", v.Aux)
203		}
204	case auxSymOff, auxCallOff, auxTypSize, auxNameOffsetInt8:
205		s := ""
206		if v.Aux != nil {
207			s = fmt.Sprintf(" {%v}", v.Aux)
208		}
209		if v.AuxInt != 0 || opcodeTable[v.Op].auxType == auxNameOffsetInt8 {
210			s += fmt.Sprintf(" [%v]", v.AuxInt)
211		}
212		return s
213	case auxSymValAndOff:
214		s := ""
215		if v.Aux != nil {
216			s = fmt.Sprintf(" {%v}", v.Aux)
217		}
218		return s + fmt.Sprintf(" [%s]", v.AuxValAndOff())
219	case auxCCop:
220		return fmt.Sprintf(" {%s}", Op(v.AuxInt))
221	case auxS390XCCMask, auxS390XRotateParams:
222		return fmt.Sprintf(" {%v}", v.Aux)
223	case auxFlagConstant:
224		return fmt.Sprintf("[%s]", flagConstant(v.AuxInt))
225	}
226	return ""
227}
228
229// If/when midstack inlining is enabled (-l=4), the compiler gets both larger and slower.
230// Not-inlining this method is a help (*Value.reset and *Block.NewValue0 are similar).
231//go:noinline
232func (v *Value) AddArg(w *Value) {
233	if v.Args == nil {
234		v.resetArgs() // use argstorage
235	}
236	v.Args = append(v.Args, w)
237	w.Uses++
238}
239
240//go:noinline
241func (v *Value) AddArg2(w1, w2 *Value) {
242	if v.Args == nil {
243		v.resetArgs() // use argstorage
244	}
245	v.Args = append(v.Args, w1, w2)
246	w1.Uses++
247	w2.Uses++
248}
249
250//go:noinline
251func (v *Value) AddArg3(w1, w2, w3 *Value) {
252	if v.Args == nil {
253		v.resetArgs() // use argstorage
254	}
255	v.Args = append(v.Args, w1, w2, w3)
256	w1.Uses++
257	w2.Uses++
258	w3.Uses++
259}
260
261//go:noinline
262func (v *Value) AddArg4(w1, w2, w3, w4 *Value) {
263	v.Args = append(v.Args, w1, w2, w3, w4)
264	w1.Uses++
265	w2.Uses++
266	w3.Uses++
267	w4.Uses++
268}
269
270//go:noinline
271func (v *Value) AddArg5(w1, w2, w3, w4, w5 *Value) {
272	v.Args = append(v.Args, w1, w2, w3, w4, w5)
273	w1.Uses++
274	w2.Uses++
275	w3.Uses++
276	w4.Uses++
277	w5.Uses++
278}
279
280//go:noinline
281func (v *Value) AddArg6(w1, w2, w3, w4, w5, w6 *Value) {
282	v.Args = append(v.Args, w1, w2, w3, w4, w5, w6)
283	w1.Uses++
284	w2.Uses++
285	w3.Uses++
286	w4.Uses++
287	w5.Uses++
288	w6.Uses++
289}
290
291func (v *Value) AddArgs(a ...*Value) {
292	if v.Args == nil {
293		v.resetArgs() // use argstorage
294	}
295	v.Args = append(v.Args, a...)
296	for _, x := range a {
297		x.Uses++
298	}
299}
300func (v *Value) SetArg(i int, w *Value) {
301	v.Args[i].Uses--
302	v.Args[i] = w
303	w.Uses++
304}
305func (v *Value) SetArgs1(a *Value) {
306	v.resetArgs()
307	v.AddArg(a)
308}
309func (v *Value) SetArgs2(a, b *Value) {
310	v.resetArgs()
311	v.AddArg(a)
312	v.AddArg(b)
313}
314func (v *Value) SetArgs3(a, b, c *Value) {
315	v.resetArgs()
316	v.AddArg(a)
317	v.AddArg(b)
318	v.AddArg(c)
319}
320
321func (v *Value) resetArgs() {
322	for _, a := range v.Args {
323		a.Uses--
324	}
325	v.argstorage[0] = nil
326	v.argstorage[1] = nil
327	v.argstorage[2] = nil
328	v.Args = v.argstorage[:0]
329}
330
331// reset is called from most rewrite rules.
332// Allowing it to be inlined increases the size
333// of cmd/compile by almost 10%, and slows it down.
334//go:noinline
335func (v *Value) reset(op Op) {
336	if v.InCache {
337		v.Block.Func.unCache(v)
338	}
339	v.Op = op
340	v.resetArgs()
341	v.AuxInt = 0
342	v.Aux = nil
343}
344
345// invalidateRecursively marks a value as invalid (unused)
346// and after decrementing reference counts on its Args,
347// also recursively invalidates any of those whose use
348// count goes to zero.  It returns whether any of the
349// invalidated values was marked with IsStmt.
350//
351// BEWARE of doing this *before* you've applied intended
352// updates to SSA.
353func (v *Value) invalidateRecursively() bool {
354	lostStmt := v.Pos.IsStmt() == src.PosIsStmt
355	if v.InCache {
356		v.Block.Func.unCache(v)
357	}
358	v.Op = OpInvalid
359
360	for _, a := range v.Args {
361		a.Uses--
362		if a.Uses == 0 {
363			lost := a.invalidateRecursively()
364			lostStmt = lost || lostStmt
365		}
366	}
367
368	v.argstorage[0] = nil
369	v.argstorage[1] = nil
370	v.argstorage[2] = nil
371	v.Args = v.argstorage[:0]
372
373	v.AuxInt = 0
374	v.Aux = nil
375	return lostStmt
376}
377
378// copyOf is called from rewrite rules.
379// It modifies v to be (Copy a).
380//go:noinline
381func (v *Value) copyOf(a *Value) {
382	if v == a {
383		return
384	}
385	if v.InCache {
386		v.Block.Func.unCache(v)
387	}
388	v.Op = OpCopy
389	v.resetArgs()
390	v.AddArg(a)
391	v.AuxInt = 0
392	v.Aux = nil
393	v.Type = a.Type
394}
395
396// copyInto makes a new value identical to v and adds it to the end of b.
397// unlike copyIntoWithXPos this does not check for v.Pos being a statement.
398func (v *Value) copyInto(b *Block) *Value {
399	c := b.NewValue0(v.Pos.WithNotStmt(), v.Op, v.Type) // Lose the position, this causes line number churn otherwise.
400	c.Aux = v.Aux
401	c.AuxInt = v.AuxInt
402	c.AddArgs(v.Args...)
403	for _, a := range v.Args {
404		if a.Type.IsMemory() {
405			v.Fatalf("can't move a value with a memory arg %s", v.LongString())
406		}
407	}
408	return c
409}
410
411// copyIntoWithXPos makes a new value identical to v and adds it to the end of b.
412// The supplied position is used as the position of the new value.
413// Because this is used for rematerialization, check for case that (rematerialized)
414// input to value with position 'pos' carried a statement mark, and that the supplied
415// position (of the instruction using the rematerialized value) is not marked, and
416// preserve that mark if its line matches the supplied position.
417func (v *Value) copyIntoWithXPos(b *Block, pos src.XPos) *Value {
418	if v.Pos.IsStmt() == src.PosIsStmt && pos.IsStmt() != src.PosIsStmt && v.Pos.SameFileAndLine(pos) {
419		pos = pos.WithIsStmt()
420	}
421	c := b.NewValue0(pos, v.Op, v.Type)
422	c.Aux = v.Aux
423	c.AuxInt = v.AuxInt
424	c.AddArgs(v.Args...)
425	for _, a := range v.Args {
426		if a.Type.IsMemory() {
427			v.Fatalf("can't move a value with a memory arg %s", v.LongString())
428		}
429	}
430	return c
431}
432
433func (v *Value) Logf(msg string, args ...interface{}) { v.Block.Logf(msg, args...) }
434func (v *Value) Log() bool                            { return v.Block.Log() }
435func (v *Value) Fatalf(msg string, args ...interface{}) {
436	v.Block.Func.fe.Fatalf(v.Pos, msg, args...)
437}
438
439// isGenericIntConst reports whether v is a generic integer constant.
440func (v *Value) isGenericIntConst() bool {
441	return v != nil && (v.Op == OpConst64 || v.Op == OpConst32 || v.Op == OpConst16 || v.Op == OpConst8)
442}
443
444// ResultReg returns the result register assigned to v, in cmd/internal/obj/$ARCH numbering.
445// It is similar to Reg and Reg0, except that it is usable interchangeably for all Value Ops.
446// If you know v.Op, using Reg or Reg0 (as appropriate) will be more efficient.
447func (v *Value) ResultReg() int16 {
448	reg := v.Block.Func.RegAlloc[v.ID]
449	if reg == nil {
450		v.Fatalf("nil reg for value: %s\n%s\n", v.LongString(), v.Block.Func)
451	}
452	if pair, ok := reg.(LocPair); ok {
453		reg = pair[0]
454	}
455	if reg == nil {
456		v.Fatalf("nil reg0 for value: %s\n%s\n", v.LongString(), v.Block.Func)
457	}
458	return reg.(*Register).objNum
459}
460
461// Reg returns the register assigned to v, in cmd/internal/obj/$ARCH numbering.
462func (v *Value) Reg() int16 {
463	reg := v.Block.Func.RegAlloc[v.ID]
464	if reg == nil {
465		v.Fatalf("nil register for value: %s\n%s\n", v.LongString(), v.Block.Func)
466	}
467	return reg.(*Register).objNum
468}
469
470// Reg0 returns the register assigned to the first output of v, in cmd/internal/obj/$ARCH numbering.
471func (v *Value) Reg0() int16 {
472	reg := v.Block.Func.RegAlloc[v.ID].(LocPair)[0]
473	if reg == nil {
474		v.Fatalf("nil first register for value: %s\n%s\n", v.LongString(), v.Block.Func)
475	}
476	return reg.(*Register).objNum
477}
478
479// Reg1 returns the register assigned to the second output of v, in cmd/internal/obj/$ARCH numbering.
480func (v *Value) Reg1() int16 {
481	reg := v.Block.Func.RegAlloc[v.ID].(LocPair)[1]
482	if reg == nil {
483		v.Fatalf("nil second register for value: %s\n%s\n", v.LongString(), v.Block.Func)
484	}
485	return reg.(*Register).objNum
486}
487
488func (v *Value) RegName() string {
489	reg := v.Block.Func.RegAlloc[v.ID]
490	if reg == nil {
491		v.Fatalf("nil register for value: %s\n%s\n", v.LongString(), v.Block.Func)
492	}
493	return reg.(*Register).name
494}
495
496// MemoryArg returns the memory argument for the Value.
497// The returned value, if non-nil, will be memory-typed (or a tuple with a memory-typed second part).
498// Otherwise, nil is returned.
499func (v *Value) MemoryArg() *Value {
500	if v.Op == OpPhi {
501		v.Fatalf("MemoryArg on Phi")
502	}
503	na := len(v.Args)
504	if na == 0 {
505		return nil
506	}
507	if m := v.Args[na-1]; m.Type.IsMemory() {
508		return m
509	}
510	return nil
511}
512
513// LackingPos indicates whether v is a value that is unlikely to have a correct
514// position assigned to it.  Ignoring such values leads to more user-friendly positions
515// assigned to nearby values and the blocks containing them.
516func (v *Value) LackingPos() bool {
517	// The exact definition of LackingPos is somewhat heuristically defined and may change
518	// in the future, for example if some of these operations are generated more carefully
519	// with respect to their source position.
520	return v.Op == OpVarDef || v.Op == OpVarKill || v.Op == OpVarLive || v.Op == OpPhi ||
521		(v.Op == OpFwdRef || v.Op == OpCopy) && v.Type == types.TypeMem
522}
523
524// removeable reports whether the value v can be removed from the SSA graph entirely
525// if its use count drops to 0.
526func (v *Value) removeable() bool {
527	if v.Type.IsVoid() {
528		// Void ops, like nil pointer checks, must stay.
529		return false
530	}
531	if v.Type.IsMemory() {
532		// We don't need to preserve all memory ops, but we do need
533		// to keep calls at least (because they might have
534		// synchronization operations we can't see).
535		return false
536	}
537	if v.Op.HasSideEffects() {
538		// These are mostly synchronization operations.
539		return false
540	}
541	return true
542}
543
544// TODO(mdempsky): Shouldn't be necessary; see discussion at golang.org/cl/275756
545func (*Value) CanBeAnSSAAux() {}
546
547// AutoVar returns a *Name and int64 representing the auto variable and offset within it
548// where v should be spilled.
549func AutoVar(v *Value) (*ir.Name, int64) {
550	if loc, ok := v.Block.Func.RegAlloc[v.ID].(LocalSlot); ok {
551		if v.Type.Size() > loc.Type.Size() {
552			v.Fatalf("spill/restore type %s doesn't fit in slot type %s", v.Type, loc.Type)
553		}
554		return loc.N, loc.Off
555	}
556	// Assume it is a register, return its spill slot, which needs to be live
557	nameOff := v.Aux.(*AuxNameOffset)
558	return nameOff.Name, nameOff.Offset
559}
560