1// Copyright 2015 The Go Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style
3// license that can be found in the LICENSE file.
4
5package ssa
6
7import (
8	"cmd/compile/internal/ir"
9	"cmd/compile/internal/types"
10	"cmd/internal/src"
11	"fmt"
12	"math"
13	"sort"
14	"strings"
15)
16
17// A Value represents a value in the SSA representation of the program.
18// The ID and Type fields must not be modified. The remainder may be modified
19// if they preserve the value of the Value (e.g. changing a (mul 2 x) to an (add x x)).
20type Value struct {
21	// A unique identifier for the value. For performance we allocate these IDs
22	// densely starting at 1.  There is no guarantee that there won't be occasional holes, though.
23	ID ID
24
25	// The operation that computes this value. See op.go.
26	Op Op
27
28	// The type of this value. Normally this will be a Go type, but there
29	// are a few other pseudo-types, see ../types/type.go.
30	Type *types.Type
31
32	// Auxiliary info for this value. The type of this information depends on the opcode and type.
33	// AuxInt is used for integer values, Aux is used for other values.
34	// Floats are stored in AuxInt using math.Float64bits(f).
35	// Unused portions of AuxInt are filled by sign-extending the used portion,
36	// even if the represented value is unsigned.
37	// Users of AuxInt which interpret AuxInt as unsigned (e.g. shifts) must be careful.
38	// Use Value.AuxUnsigned to get the zero-extended value of AuxInt.
39	AuxInt int64
40	Aux    Aux
41
42	// Arguments of this value
43	Args []*Value
44
45	// Containing basic block
46	Block *Block
47
48	// Source position
49	Pos src.XPos
50
51	// Use count. Each appearance in Value.Args and Block.Controls counts once.
52	Uses int32
53
54	// wasm: Value stays on the WebAssembly stack. This value will not get a "register" (WebAssembly variable)
55	// nor a slot on Go stack, and the generation of this value is delayed to its use time.
56	OnWasmStack bool
57
58	// Is this value in the per-function constant cache? If so, remove from cache before changing it or recycling it.
59	InCache bool
60
61	// Storage for the first three args
62	argstorage [3]*Value
63}
64
65// Examples:
66// Opcode          aux   args
67//  OpAdd          nil      2
68//  OpConst     string      0    string constant
69//  OpConst      int64      0    int64 constant
70//  OpAddcq      int64      1    amd64 op: v = arg[0] + constant
71
72// short form print. Just v#.
73func (v *Value) String() string {
74	if v == nil {
75		return "nil" // should never happen, but not panicking helps with debugging
76	}
77	return fmt.Sprintf("v%d", v.ID)
78}
79
80func (v *Value) AuxInt8() int8 {
81	if opcodeTable[v.Op].auxType != auxInt8 && opcodeTable[v.Op].auxType != auxNameOffsetInt8 {
82		v.Fatalf("op %s doesn't have an int8 aux field", v.Op)
83	}
84	return int8(v.AuxInt)
85}
86
87func (v *Value) AuxUInt8() uint8 {
88	if opcodeTable[v.Op].auxType != auxUInt8 {
89		v.Fatalf("op %s doesn't have a uint8 aux field", v.Op)
90	}
91	return uint8(v.AuxInt)
92}
93
94func (v *Value) AuxInt16() int16 {
95	if opcodeTable[v.Op].auxType != auxInt16 {
96		v.Fatalf("op %s doesn't have an int16 aux field", v.Op)
97	}
98	return int16(v.AuxInt)
99}
100
101func (v *Value) AuxInt32() int32 {
102	if opcodeTable[v.Op].auxType != auxInt32 {
103		v.Fatalf("op %s doesn't have an int32 aux field", v.Op)
104	}
105	return int32(v.AuxInt)
106}
107
108// AuxUnsigned returns v.AuxInt as an unsigned value for OpConst*.
109// v.AuxInt is always sign-extended to 64 bits, even if the
110// represented value is unsigned. This undoes that sign extension.
111func (v *Value) AuxUnsigned() uint64 {
112	c := v.AuxInt
113	switch v.Op {
114	case OpConst64:
115		return uint64(c)
116	case OpConst32:
117		return uint64(uint32(c))
118	case OpConst16:
119		return uint64(uint16(c))
120	case OpConst8:
121		return uint64(uint8(c))
122	}
123	v.Fatalf("op %s isn't OpConst*", v.Op)
124	return 0
125}
126
127func (v *Value) AuxFloat() float64 {
128	if opcodeTable[v.Op].auxType != auxFloat32 && opcodeTable[v.Op].auxType != auxFloat64 {
129		v.Fatalf("op %s doesn't have a float aux field", v.Op)
130	}
131	return math.Float64frombits(uint64(v.AuxInt))
132}
133func (v *Value) AuxValAndOff() ValAndOff {
134	if opcodeTable[v.Op].auxType != auxSymValAndOff {
135		v.Fatalf("op %s doesn't have a ValAndOff aux field", v.Op)
136	}
137	return ValAndOff(v.AuxInt)
138}
139
140func (v *Value) AuxArm64BitField() arm64BitField {
141	if opcodeTable[v.Op].auxType != auxARM64BitField {
142		v.Fatalf("op %s doesn't have a ARM64BitField aux field", v.Op)
143	}
144	return arm64BitField(v.AuxInt)
145}
146
147// long form print.  v# = opcode <type> [aux] args [: reg] (names)
148func (v *Value) LongString() string {
149	if v == nil {
150		return "<NIL VALUE>"
151	}
152	s := fmt.Sprintf("v%d = %s", v.ID, v.Op)
153	s += " <" + v.Type.String() + ">"
154	s += v.auxString()
155	for _, a := range v.Args {
156		s += fmt.Sprintf(" %v", a)
157	}
158	if v.Block == nil {
159		return s
160	}
161	r := v.Block.Func.RegAlloc
162	if int(v.ID) < len(r) && r[v.ID] != nil {
163		s += " : " + r[v.ID].String()
164	}
165	if reg := v.Block.Func.tempRegs[v.ID]; reg != nil {
166		s += " tmp=" + reg.String()
167	}
168	var names []string
169	for name, values := range v.Block.Func.NamedValues {
170		for _, value := range values {
171			if value == v {
172				names = append(names, name.String())
173				break // drop duplicates.
174			}
175		}
176	}
177	if len(names) != 0 {
178		sort.Strings(names) // Otherwise a source of variation in debugging output.
179		s += " (" + strings.Join(names, ", ") + ")"
180	}
181	return s
182}
183
184func (v *Value) auxString() string {
185	switch opcodeTable[v.Op].auxType {
186	case auxBool:
187		if v.AuxInt == 0 {
188			return " [false]"
189		} else {
190			return " [true]"
191		}
192	case auxInt8:
193		return fmt.Sprintf(" [%d]", v.AuxInt8())
194	case auxInt16:
195		return fmt.Sprintf(" [%d]", v.AuxInt16())
196	case auxInt32:
197		return fmt.Sprintf(" [%d]", v.AuxInt32())
198	case auxInt64, auxInt128:
199		return fmt.Sprintf(" [%d]", v.AuxInt)
200	case auxUInt8:
201		return fmt.Sprintf(" [%d]", v.AuxUInt8())
202	case auxARM64BitField:
203		lsb := v.AuxArm64BitField().getARM64BFlsb()
204		width := v.AuxArm64BitField().getARM64BFwidth()
205		return fmt.Sprintf(" [lsb=%d,width=%d]", lsb, width)
206	case auxFloat32, auxFloat64:
207		return fmt.Sprintf(" [%g]", v.AuxFloat())
208	case auxString:
209		return fmt.Sprintf(" {%q}", v.Aux)
210	case auxSym, auxCall, auxTyp:
211		if v.Aux != nil {
212			return fmt.Sprintf(" {%v}", v.Aux)
213		}
214		return ""
215	case auxSymOff, auxCallOff, auxTypSize, auxNameOffsetInt8:
216		s := ""
217		if v.Aux != nil {
218			s = fmt.Sprintf(" {%v}", v.Aux)
219		}
220		if v.AuxInt != 0 || opcodeTable[v.Op].auxType == auxNameOffsetInt8 {
221			s += fmt.Sprintf(" [%v]", v.AuxInt)
222		}
223		return s
224	case auxSymValAndOff:
225		s := ""
226		if v.Aux != nil {
227			s = fmt.Sprintf(" {%v}", v.Aux)
228		}
229		return s + fmt.Sprintf(" [%s]", v.AuxValAndOff())
230	case auxCCop:
231		return fmt.Sprintf(" [%s]", Op(v.AuxInt))
232	case auxS390XCCMask, auxS390XRotateParams:
233		return fmt.Sprintf(" {%v}", v.Aux)
234	case auxFlagConstant:
235		return fmt.Sprintf("[%s]", flagConstant(v.AuxInt))
236	case auxNone:
237		return ""
238	default:
239		// If you see this, add a case above instead.
240		return fmt.Sprintf("[auxtype=%d AuxInt=%d Aux=%v]", opcodeTable[v.Op].auxType, v.AuxInt, v.Aux)
241	}
242}
243
244// If/when midstack inlining is enabled (-l=4), the compiler gets both larger and slower.
245// Not-inlining this method is a help (*Value.reset and *Block.NewValue0 are similar).
246//
247//go:noinline
248func (v *Value) AddArg(w *Value) {
249	if v.Args == nil {
250		v.resetArgs() // use argstorage
251	}
252	v.Args = append(v.Args, w)
253	w.Uses++
254}
255
256//go:noinline
257func (v *Value) AddArg2(w1, w2 *Value) {
258	if v.Args == nil {
259		v.resetArgs() // use argstorage
260	}
261	v.Args = append(v.Args, w1, w2)
262	w1.Uses++
263	w2.Uses++
264}
265
266//go:noinline
267func (v *Value) AddArg3(w1, w2, w3 *Value) {
268	if v.Args == nil {
269		v.resetArgs() // use argstorage
270	}
271	v.Args = append(v.Args, w1, w2, w3)
272	w1.Uses++
273	w2.Uses++
274	w3.Uses++
275}
276
277//go:noinline
278func (v *Value) AddArg4(w1, w2, w3, w4 *Value) {
279	v.Args = append(v.Args, w1, w2, w3, w4)
280	w1.Uses++
281	w2.Uses++
282	w3.Uses++
283	w4.Uses++
284}
285
286//go:noinline
287func (v *Value) AddArg5(w1, w2, w3, w4, w5 *Value) {
288	v.Args = append(v.Args, w1, w2, w3, w4, w5)
289	w1.Uses++
290	w2.Uses++
291	w3.Uses++
292	w4.Uses++
293	w5.Uses++
294}
295
296//go:noinline
297func (v *Value) AddArg6(w1, w2, w3, w4, w5, w6 *Value) {
298	v.Args = append(v.Args, w1, w2, w3, w4, w5, w6)
299	w1.Uses++
300	w2.Uses++
301	w3.Uses++
302	w4.Uses++
303	w5.Uses++
304	w6.Uses++
305}
306
307func (v *Value) AddArgs(a ...*Value) {
308	if v.Args == nil {
309		v.resetArgs() // use argstorage
310	}
311	v.Args = append(v.Args, a...)
312	for _, x := range a {
313		x.Uses++
314	}
315}
316func (v *Value) SetArg(i int, w *Value) {
317	v.Args[i].Uses--
318	v.Args[i] = w
319	w.Uses++
320}
321func (v *Value) SetArgs1(a *Value) {
322	v.resetArgs()
323	v.AddArg(a)
324}
325func (v *Value) SetArgs2(a, b *Value) {
326	v.resetArgs()
327	v.AddArg(a)
328	v.AddArg(b)
329}
330func (v *Value) SetArgs3(a, b, c *Value) {
331	v.resetArgs()
332	v.AddArg(a)
333	v.AddArg(b)
334	v.AddArg(c)
335}
336
337func (v *Value) resetArgs() {
338	for _, a := range v.Args {
339		a.Uses--
340	}
341	v.argstorage[0] = nil
342	v.argstorage[1] = nil
343	v.argstorage[2] = nil
344	v.Args = v.argstorage[:0]
345}
346
347// reset is called from most rewrite rules.
348// Allowing it to be inlined increases the size
349// of cmd/compile by almost 10%, and slows it down.
350//
351//go:noinline
352func (v *Value) reset(op Op) {
353	if v.InCache {
354		v.Block.Func.unCache(v)
355	}
356	v.Op = op
357	v.resetArgs()
358	v.AuxInt = 0
359	v.Aux = nil
360}
361
362// invalidateRecursively marks a value as invalid (unused)
363// and after decrementing reference counts on its Args,
364// also recursively invalidates any of those whose use
365// count goes to zero.  It returns whether any of the
366// invalidated values was marked with IsStmt.
367//
368// BEWARE of doing this *before* you've applied intended
369// updates to SSA.
370func (v *Value) invalidateRecursively() bool {
371	lostStmt := v.Pos.IsStmt() == src.PosIsStmt
372	if v.InCache {
373		v.Block.Func.unCache(v)
374	}
375	v.Op = OpInvalid
376
377	for _, a := range v.Args {
378		a.Uses--
379		if a.Uses == 0 {
380			lost := a.invalidateRecursively()
381			lostStmt = lost || lostStmt
382		}
383	}
384
385	v.argstorage[0] = nil
386	v.argstorage[1] = nil
387	v.argstorage[2] = nil
388	v.Args = v.argstorage[:0]
389
390	v.AuxInt = 0
391	v.Aux = nil
392	return lostStmt
393}
394
395// copyOf is called from rewrite rules.
396// It modifies v to be (Copy a).
397//
398//go:noinline
399func (v *Value) copyOf(a *Value) {
400	if v == a {
401		return
402	}
403	if v.InCache {
404		v.Block.Func.unCache(v)
405	}
406	v.Op = OpCopy
407	v.resetArgs()
408	v.AddArg(a)
409	v.AuxInt = 0
410	v.Aux = nil
411	v.Type = a.Type
412}
413
414// copyInto makes a new value identical to v and adds it to the end of b.
415// unlike copyIntoWithXPos this does not check for v.Pos being a statement.
416func (v *Value) copyInto(b *Block) *Value {
417	c := b.NewValue0(v.Pos.WithNotStmt(), v.Op, v.Type) // Lose the position, this causes line number churn otherwise.
418	c.Aux = v.Aux
419	c.AuxInt = v.AuxInt
420	c.AddArgs(v.Args...)
421	for _, a := range v.Args {
422		if a.Type.IsMemory() {
423			v.Fatalf("can't move a value with a memory arg %s", v.LongString())
424		}
425	}
426	return c
427}
428
429// copyIntoWithXPos makes a new value identical to v and adds it to the end of b.
430// The supplied position is used as the position of the new value.
431// Because this is used for rematerialization, check for case that (rematerialized)
432// input to value with position 'pos' carried a statement mark, and that the supplied
433// position (of the instruction using the rematerialized value) is not marked, and
434// preserve that mark if its line matches the supplied position.
435func (v *Value) copyIntoWithXPos(b *Block, pos src.XPos) *Value {
436	if v.Pos.IsStmt() == src.PosIsStmt && pos.IsStmt() != src.PosIsStmt && v.Pos.SameFileAndLine(pos) {
437		pos = pos.WithIsStmt()
438	}
439	c := b.NewValue0(pos, v.Op, v.Type)
440	c.Aux = v.Aux
441	c.AuxInt = v.AuxInt
442	c.AddArgs(v.Args...)
443	for _, a := range v.Args {
444		if a.Type.IsMemory() {
445			v.Fatalf("can't move a value with a memory arg %s", v.LongString())
446		}
447	}
448	return c
449}
450
451func (v *Value) Logf(msg string, args ...interface{}) { v.Block.Logf(msg, args...) }
452func (v *Value) Log() bool                            { return v.Block.Log() }
453func (v *Value) Fatalf(msg string, args ...interface{}) {
454	v.Block.Func.fe.Fatalf(v.Pos, msg, args...)
455}
456
457// isGenericIntConst reports whether v is a generic integer constant.
458func (v *Value) isGenericIntConst() bool {
459	return v != nil && (v.Op == OpConst64 || v.Op == OpConst32 || v.Op == OpConst16 || v.Op == OpConst8)
460}
461
462// ResultReg returns the result register assigned to v, in cmd/internal/obj/$ARCH numbering.
463// It is similar to Reg and Reg0, except that it is usable interchangeably for all Value Ops.
464// If you know v.Op, using Reg or Reg0 (as appropriate) will be more efficient.
465func (v *Value) ResultReg() int16 {
466	reg := v.Block.Func.RegAlloc[v.ID]
467	if reg == nil {
468		v.Fatalf("nil reg for value: %s\n%s\n", v.LongString(), v.Block.Func)
469	}
470	if pair, ok := reg.(LocPair); ok {
471		reg = pair[0]
472	}
473	if reg == nil {
474		v.Fatalf("nil reg0 for value: %s\n%s\n", v.LongString(), v.Block.Func)
475	}
476	return reg.(*Register).objNum
477}
478
479// Reg returns the register assigned to v, in cmd/internal/obj/$ARCH numbering.
480func (v *Value) Reg() int16 {
481	reg := v.Block.Func.RegAlloc[v.ID]
482	if reg == nil {
483		v.Fatalf("nil register for value: %s\n%s\n", v.LongString(), v.Block.Func)
484	}
485	return reg.(*Register).objNum
486}
487
488// Reg0 returns the register assigned to the first output of v, in cmd/internal/obj/$ARCH numbering.
489func (v *Value) Reg0() int16 {
490	reg := v.Block.Func.RegAlloc[v.ID].(LocPair)[0]
491	if reg == nil {
492		v.Fatalf("nil first register for value: %s\n%s\n", v.LongString(), v.Block.Func)
493	}
494	return reg.(*Register).objNum
495}
496
497// Reg1 returns the register assigned to the second output of v, in cmd/internal/obj/$ARCH numbering.
498func (v *Value) Reg1() int16 {
499	reg := v.Block.Func.RegAlloc[v.ID].(LocPair)[1]
500	if reg == nil {
501		v.Fatalf("nil second register for value: %s\n%s\n", v.LongString(), v.Block.Func)
502	}
503	return reg.(*Register).objNum
504}
505
506// RegTmp returns the temporary register assigned to v, in cmd/internal/obj/$ARCH numbering.
507func (v *Value) RegTmp() int16 {
508	reg := v.Block.Func.tempRegs[v.ID]
509	if reg == nil {
510		v.Fatalf("nil tmp register for value: %s\n%s\n", v.LongString(), v.Block.Func)
511	}
512	return reg.objNum
513}
514
515func (v *Value) RegName() string {
516	reg := v.Block.Func.RegAlloc[v.ID]
517	if reg == nil {
518		v.Fatalf("nil register for value: %s\n%s\n", v.LongString(), v.Block.Func)
519	}
520	return reg.(*Register).name
521}
522
523// MemoryArg returns the memory argument for the Value.
524// The returned value, if non-nil, will be memory-typed (or a tuple with a memory-typed second part).
525// Otherwise, nil is returned.
526func (v *Value) MemoryArg() *Value {
527	if v.Op == OpPhi {
528		v.Fatalf("MemoryArg on Phi")
529	}
530	na := len(v.Args)
531	if na == 0 {
532		return nil
533	}
534	if m := v.Args[na-1]; m.Type.IsMemory() {
535		return m
536	}
537	return nil
538}
539
540// LackingPos indicates whether v is a value that is unlikely to have a correct
541// position assigned to it.  Ignoring such values leads to more user-friendly positions
542// assigned to nearby values and the blocks containing them.
543func (v *Value) LackingPos() bool {
544	// The exact definition of LackingPos is somewhat heuristically defined and may change
545	// in the future, for example if some of these operations are generated more carefully
546	// with respect to their source position.
547	return v.Op == OpVarDef || v.Op == OpVarLive || v.Op == OpPhi ||
548		(v.Op == OpFwdRef || v.Op == OpCopy) && v.Type == types.TypeMem
549}
550
551// removeable reports whether the value v can be removed from the SSA graph entirely
552// if its use count drops to 0.
553func (v *Value) removeable() bool {
554	if v.Type.IsVoid() {
555		// Void ops (inline marks), must stay.
556		return false
557	}
558	if opcodeTable[v.Op].nilCheck {
559		// Nil pointer checks must stay.
560		return false
561	}
562	if v.Type.IsMemory() {
563		// We don't need to preserve all memory ops, but we do need
564		// to keep calls at least (because they might have
565		// synchronization operations we can't see).
566		return false
567	}
568	if v.Op.HasSideEffects() {
569		// These are mostly synchronization operations.
570		return false
571	}
572	return true
573}
574
575// AutoVar returns a *Name and int64 representing the auto variable and offset within it
576// where v should be spilled.
577func AutoVar(v *Value) (*ir.Name, int64) {
578	if loc, ok := v.Block.Func.RegAlloc[v.ID].(LocalSlot); ok {
579		if v.Type.Size() > loc.Type.Size() {
580			v.Fatalf("spill/restore type %s doesn't fit in slot type %s", v.Type, loc.Type)
581		}
582		return loc.N, loc.Off
583	}
584	// Assume it is a register, return its spill slot, which needs to be live
585	nameOff := v.Aux.(*AuxNameOffset)
586	return nameOff.Name, nameOff.Offset
587}
588
589// CanSSA reports whether values of type t can be represented as a Value.
590func CanSSA(t *types.Type) bool {
591	types.CalcSize(t)
592	if t.Size() > int64(4*types.PtrSize) {
593		// 4*Widthptr is an arbitrary constant. We want it
594		// to be at least 3*Widthptr so slices can be registerized.
595		// Too big and we'll introduce too much register pressure.
596		return false
597	}
598	switch t.Kind() {
599	case types.TARRAY:
600		// We can't do larger arrays because dynamic indexing is
601		// not supported on SSA variables.
602		// TODO: allow if all indexes are constant.
603		if t.NumElem() <= 1 {
604			return CanSSA(t.Elem())
605		}
606		return false
607	case types.TSTRUCT:
608		if t.NumFields() > MaxStruct {
609			return false
610		}
611		for _, t1 := range t.Fields() {
612			if !CanSSA(t1.Type) {
613				return false
614			}
615		}
616		return true
617	default:
618		return true
619	}
620}
621