1// Copyright 2009 The Go Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style
3// license that can be found in the LICENSE file.
4
5package walk
6
7import (
8	"fmt"
9	"go/constant"
10	"internal/abi"
11	"internal/buildcfg"
12	"strings"
13
14	"cmd/compile/internal/base"
15	"cmd/compile/internal/ir"
16	"cmd/compile/internal/objw"
17	"cmd/compile/internal/reflectdata"
18	"cmd/compile/internal/rttype"
19	"cmd/compile/internal/staticdata"
20	"cmd/compile/internal/typecheck"
21	"cmd/compile/internal/types"
22	"cmd/internal/obj"
23	"cmd/internal/objabi"
24)
25
26// The result of walkExpr MUST be assigned back to n, e.g.
27//
28//	n.Left = walkExpr(n.Left, init)
29func walkExpr(n ir.Node, init *ir.Nodes) ir.Node {
30	if n == nil {
31		return n
32	}
33
34	if n, ok := n.(ir.InitNode); ok && init == n.PtrInit() {
35		// not okay to use n->ninit when walking n,
36		// because we might replace n with some other node
37		// and would lose the init list.
38		base.Fatalf("walkExpr init == &n->ninit")
39	}
40
41	if len(n.Init()) != 0 {
42		walkStmtList(n.Init())
43		init.Append(ir.TakeInit(n)...)
44	}
45
46	lno := ir.SetPos(n)
47
48	if base.Flag.LowerW > 1 {
49		ir.Dump("before walk expr", n)
50	}
51
52	if n.Typecheck() != 1 {
53		base.Fatalf("missed typecheck: %+v", n)
54	}
55
56	if n.Type().IsUntyped() {
57		base.Fatalf("expression has untyped type: %+v", n)
58	}
59
60	n = walkExpr1(n, init)
61
62	// Eagerly compute sizes of all expressions for the back end.
63	if typ := n.Type(); typ != nil && typ.Kind() != types.TBLANK && !typ.IsFuncArgStruct() {
64		types.CheckSize(typ)
65	}
66	if n, ok := n.(*ir.Name); ok && n.Heapaddr != nil {
67		types.CheckSize(n.Heapaddr.Type())
68	}
69	if ir.IsConst(n, constant.String) {
70		// Emit string symbol now to avoid emitting
71		// any concurrently during the backend.
72		_ = staticdata.StringSym(n.Pos(), constant.StringVal(n.Val()))
73	}
74
75	if base.Flag.LowerW != 0 && n != nil {
76		ir.Dump("after walk expr", n)
77	}
78
79	base.Pos = lno
80	return n
81}
82
83func walkExpr1(n ir.Node, init *ir.Nodes) ir.Node {
84	switch n.Op() {
85	default:
86		ir.Dump("walk", n)
87		base.Fatalf("walkExpr: switch 1 unknown op %+v", n.Op())
88		panic("unreachable")
89
90	case ir.OGETG, ir.OGETCALLERPC, ir.OGETCALLERSP:
91		return n
92
93	case ir.OTYPE, ir.ONAME, ir.OLITERAL, ir.ONIL, ir.OLINKSYMOFFSET:
94		// TODO(mdempsky): Just return n; see discussion on CL 38655.
95		// Perhaps refactor to use Node.mayBeShared for these instead.
96		// If these return early, make sure to still call
97		// StringSym for constant strings.
98		return n
99
100	case ir.OMETHEXPR:
101		// TODO(mdempsky): Do this right after type checking.
102		n := n.(*ir.SelectorExpr)
103		return n.FuncName()
104
105	case ir.OMIN, ir.OMAX:
106		n := n.(*ir.CallExpr)
107		return walkMinMax(n, init)
108
109	case ir.ONOT, ir.ONEG, ir.OPLUS, ir.OBITNOT, ir.OREAL, ir.OIMAG, ir.OSPTR, ir.OITAB, ir.OIDATA:
110		n := n.(*ir.UnaryExpr)
111		n.X = walkExpr(n.X, init)
112		return n
113
114	case ir.ODOTMETH, ir.ODOTINTER:
115		n := n.(*ir.SelectorExpr)
116		n.X = walkExpr(n.X, init)
117		return n
118
119	case ir.OADDR:
120		n := n.(*ir.AddrExpr)
121		n.X = walkExpr(n.X, init)
122		return n
123
124	case ir.ODEREF:
125		n := n.(*ir.StarExpr)
126		n.X = walkExpr(n.X, init)
127		return n
128
129	case ir.OMAKEFACE, ir.OAND, ir.OANDNOT, ir.OSUB, ir.OMUL, ir.OADD, ir.OOR, ir.OXOR, ir.OLSH, ir.ORSH,
130		ir.OUNSAFEADD:
131		n := n.(*ir.BinaryExpr)
132		n.X = walkExpr(n.X, init)
133		n.Y = walkExpr(n.Y, init)
134		return n
135
136	case ir.OUNSAFESLICE:
137		n := n.(*ir.BinaryExpr)
138		return walkUnsafeSlice(n, init)
139
140	case ir.OUNSAFESTRING:
141		n := n.(*ir.BinaryExpr)
142		return walkUnsafeString(n, init)
143
144	case ir.OUNSAFESTRINGDATA, ir.OUNSAFESLICEDATA:
145		n := n.(*ir.UnaryExpr)
146		return walkUnsafeData(n, init)
147
148	case ir.ODOT, ir.ODOTPTR:
149		n := n.(*ir.SelectorExpr)
150		return walkDot(n, init)
151
152	case ir.ODOTTYPE, ir.ODOTTYPE2:
153		n := n.(*ir.TypeAssertExpr)
154		return walkDotType(n, init)
155
156	case ir.ODYNAMICDOTTYPE, ir.ODYNAMICDOTTYPE2:
157		n := n.(*ir.DynamicTypeAssertExpr)
158		return walkDynamicDotType(n, init)
159
160	case ir.OLEN, ir.OCAP:
161		n := n.(*ir.UnaryExpr)
162		return walkLenCap(n, init)
163
164	case ir.OCOMPLEX:
165		n := n.(*ir.BinaryExpr)
166		n.X = walkExpr(n.X, init)
167		n.Y = walkExpr(n.Y, init)
168		return n
169
170	case ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
171		n := n.(*ir.BinaryExpr)
172		return walkCompare(n, init)
173
174	case ir.OANDAND, ir.OOROR:
175		n := n.(*ir.LogicalExpr)
176		return walkLogical(n, init)
177
178	case ir.OPRINT, ir.OPRINTLN:
179		return walkPrint(n.(*ir.CallExpr), init)
180
181	case ir.OPANIC:
182		n := n.(*ir.UnaryExpr)
183		return mkcall("gopanic", nil, init, n.X)
184
185	case ir.ORECOVERFP:
186		return walkRecoverFP(n.(*ir.CallExpr), init)
187
188	case ir.OCFUNC:
189		return n
190
191	case ir.OCALLINTER, ir.OCALLFUNC:
192		n := n.(*ir.CallExpr)
193		return walkCall(n, init)
194
195	case ir.OAS, ir.OASOP:
196		return walkAssign(init, n)
197
198	case ir.OAS2:
199		n := n.(*ir.AssignListStmt)
200		return walkAssignList(init, n)
201
202	// a,b,... = fn()
203	case ir.OAS2FUNC:
204		n := n.(*ir.AssignListStmt)
205		return walkAssignFunc(init, n)
206
207	// x, y = <-c
208	// order.stmt made sure x is addressable or blank.
209	case ir.OAS2RECV:
210		n := n.(*ir.AssignListStmt)
211		return walkAssignRecv(init, n)
212
213	// a,b = m[i]
214	case ir.OAS2MAPR:
215		n := n.(*ir.AssignListStmt)
216		return walkAssignMapRead(init, n)
217
218	case ir.ODELETE:
219		n := n.(*ir.CallExpr)
220		return walkDelete(init, n)
221
222	case ir.OAS2DOTTYPE:
223		n := n.(*ir.AssignListStmt)
224		return walkAssignDotType(n, init)
225
226	case ir.OCONVIFACE:
227		n := n.(*ir.ConvExpr)
228		return walkConvInterface(n, init)
229
230	case ir.OCONV, ir.OCONVNOP:
231		n := n.(*ir.ConvExpr)
232		return walkConv(n, init)
233
234	case ir.OSLICE2ARR:
235		n := n.(*ir.ConvExpr)
236		return walkSliceToArray(n, init)
237
238	case ir.OSLICE2ARRPTR:
239		n := n.(*ir.ConvExpr)
240		n.X = walkExpr(n.X, init)
241		return n
242
243	case ir.ODIV, ir.OMOD:
244		n := n.(*ir.BinaryExpr)
245		return walkDivMod(n, init)
246
247	case ir.OINDEX:
248		n := n.(*ir.IndexExpr)
249		return walkIndex(n, init)
250
251	case ir.OINDEXMAP:
252		n := n.(*ir.IndexExpr)
253		return walkIndexMap(n, init)
254
255	case ir.ORECV:
256		base.Fatalf("walkExpr ORECV") // should see inside OAS only
257		panic("unreachable")
258
259	case ir.OSLICEHEADER:
260		n := n.(*ir.SliceHeaderExpr)
261		return walkSliceHeader(n, init)
262
263	case ir.OSTRINGHEADER:
264		n := n.(*ir.StringHeaderExpr)
265		return walkStringHeader(n, init)
266
267	case ir.OSLICE, ir.OSLICEARR, ir.OSLICESTR, ir.OSLICE3, ir.OSLICE3ARR:
268		n := n.(*ir.SliceExpr)
269		return walkSlice(n, init)
270
271	case ir.ONEW:
272		n := n.(*ir.UnaryExpr)
273		return walkNew(n, init)
274
275	case ir.OADDSTR:
276		return walkAddString(n.(*ir.AddStringExpr), init)
277
278	case ir.OAPPEND:
279		// order should make sure we only see OAS(node, OAPPEND), which we handle above.
280		base.Fatalf("append outside assignment")
281		panic("unreachable")
282
283	case ir.OCOPY:
284		return walkCopy(n.(*ir.BinaryExpr), init, base.Flag.Cfg.Instrumenting && !base.Flag.CompilingRuntime)
285
286	case ir.OCLEAR:
287		n := n.(*ir.UnaryExpr)
288		return walkClear(n)
289
290	case ir.OCLOSE:
291		n := n.(*ir.UnaryExpr)
292		return walkClose(n, init)
293
294	case ir.OMAKECHAN:
295		n := n.(*ir.MakeExpr)
296		return walkMakeChan(n, init)
297
298	case ir.OMAKEMAP:
299		n := n.(*ir.MakeExpr)
300		return walkMakeMap(n, init)
301
302	case ir.OMAKESLICE:
303		n := n.(*ir.MakeExpr)
304		return walkMakeSlice(n, init)
305
306	case ir.OMAKESLICECOPY:
307		n := n.(*ir.MakeExpr)
308		return walkMakeSliceCopy(n, init)
309
310	case ir.ORUNESTR:
311		n := n.(*ir.ConvExpr)
312		return walkRuneToString(n, init)
313
314	case ir.OBYTES2STR, ir.ORUNES2STR:
315		n := n.(*ir.ConvExpr)
316		return walkBytesRunesToString(n, init)
317
318	case ir.OBYTES2STRTMP:
319		n := n.(*ir.ConvExpr)
320		return walkBytesToStringTemp(n, init)
321
322	case ir.OSTR2BYTES:
323		n := n.(*ir.ConvExpr)
324		return walkStringToBytes(n, init)
325
326	case ir.OSTR2BYTESTMP:
327		n := n.(*ir.ConvExpr)
328		return walkStringToBytesTemp(n, init)
329
330	case ir.OSTR2RUNES:
331		n := n.(*ir.ConvExpr)
332		return walkStringToRunes(n, init)
333
334	case ir.OARRAYLIT, ir.OSLICELIT, ir.OMAPLIT, ir.OSTRUCTLIT, ir.OPTRLIT:
335		return walkCompLit(n, init)
336
337	case ir.OSEND:
338		n := n.(*ir.SendStmt)
339		return walkSend(n, init)
340
341	case ir.OCLOSURE:
342		return walkClosure(n.(*ir.ClosureExpr), init)
343
344	case ir.OMETHVALUE:
345		return walkMethodValue(n.(*ir.SelectorExpr), init)
346	}
347
348	// No return! Each case must return (or panic),
349	// to avoid confusion about what gets returned
350	// in the presence of type assertions.
351}
352
353// walk the whole tree of the body of an
354// expression or simple statement.
355// the types expressions are calculated.
356// compile-time constants are evaluated.
357// complex side effects like statements are appended to init.
358func walkExprList(s []ir.Node, init *ir.Nodes) {
359	for i := range s {
360		s[i] = walkExpr(s[i], init)
361	}
362}
363
364func walkExprListCheap(s []ir.Node, init *ir.Nodes) {
365	for i, n := range s {
366		s[i] = cheapExpr(n, init)
367		s[i] = walkExpr(s[i], init)
368	}
369}
370
371func walkExprListSafe(s []ir.Node, init *ir.Nodes) {
372	for i, n := range s {
373		s[i] = safeExpr(n, init)
374		s[i] = walkExpr(s[i], init)
375	}
376}
377
378// return side-effect free and cheap n, appending side effects to init.
379// result may not be assignable.
380func cheapExpr(n ir.Node, init *ir.Nodes) ir.Node {
381	switch n.Op() {
382	case ir.ONAME, ir.OLITERAL, ir.ONIL:
383		return n
384	}
385
386	return copyExpr(n, n.Type(), init)
387}
388
389// return side effect-free n, appending side effects to init.
390// result is assignable if n is.
391func safeExpr(n ir.Node, init *ir.Nodes) ir.Node {
392	if n == nil {
393		return nil
394	}
395
396	if len(n.Init()) != 0 {
397		walkStmtList(n.Init())
398		init.Append(ir.TakeInit(n)...)
399	}
400
401	switch n.Op() {
402	case ir.ONAME, ir.OLITERAL, ir.ONIL, ir.OLINKSYMOFFSET:
403		return n
404
405	case ir.OLEN, ir.OCAP:
406		n := n.(*ir.UnaryExpr)
407		l := safeExpr(n.X, init)
408		if l == n.X {
409			return n
410		}
411		a := ir.Copy(n).(*ir.UnaryExpr)
412		a.X = l
413		return walkExpr(typecheck.Expr(a), init)
414
415	case ir.ODOT, ir.ODOTPTR:
416		n := n.(*ir.SelectorExpr)
417		l := safeExpr(n.X, init)
418		if l == n.X {
419			return n
420		}
421		a := ir.Copy(n).(*ir.SelectorExpr)
422		a.X = l
423		return walkExpr(typecheck.Expr(a), init)
424
425	case ir.ODEREF:
426		n := n.(*ir.StarExpr)
427		l := safeExpr(n.X, init)
428		if l == n.X {
429			return n
430		}
431		a := ir.Copy(n).(*ir.StarExpr)
432		a.X = l
433		return walkExpr(typecheck.Expr(a), init)
434
435	case ir.OINDEX, ir.OINDEXMAP:
436		n := n.(*ir.IndexExpr)
437		l := safeExpr(n.X, init)
438		r := safeExpr(n.Index, init)
439		if l == n.X && r == n.Index {
440			return n
441		}
442		a := ir.Copy(n).(*ir.IndexExpr)
443		a.X = l
444		a.Index = r
445		return walkExpr(typecheck.Expr(a), init)
446
447	case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT:
448		n := n.(*ir.CompLitExpr)
449		if isStaticCompositeLiteral(n) {
450			return n
451		}
452	}
453
454	// make a copy; must not be used as an lvalue
455	if ir.IsAddressable(n) {
456		base.Fatalf("missing lvalue case in safeExpr: %v", n)
457	}
458	return cheapExpr(n, init)
459}
460
461func copyExpr(n ir.Node, t *types.Type, init *ir.Nodes) ir.Node {
462	l := typecheck.TempAt(base.Pos, ir.CurFunc, t)
463	appendWalkStmt(init, ir.NewAssignStmt(base.Pos, l, n))
464	return l
465}
466
467func walkAddString(n *ir.AddStringExpr, init *ir.Nodes) ir.Node {
468	c := len(n.List)
469
470	if c < 2 {
471		base.Fatalf("walkAddString count %d too small", c)
472	}
473
474	buf := typecheck.NodNil()
475	if n.Esc() == ir.EscNone {
476		sz := int64(0)
477		for _, n1 := range n.List {
478			if n1.Op() == ir.OLITERAL {
479				sz += int64(len(ir.StringVal(n1)))
480			}
481		}
482
483		// Don't allocate the buffer if the result won't fit.
484		if sz < tmpstringbufsize {
485			// Create temporary buffer for result string on stack.
486			buf = stackBufAddr(tmpstringbufsize, types.Types[types.TUINT8])
487		}
488	}
489
490	// build list of string arguments
491	args := []ir.Node{buf}
492	for _, n2 := range n.List {
493		args = append(args, typecheck.Conv(n2, types.Types[types.TSTRING]))
494	}
495
496	var fn string
497	if c <= 5 {
498		// small numbers of strings use direct runtime helpers.
499		// note: order.expr knows this cutoff too.
500		fn = fmt.Sprintf("concatstring%d", c)
501	} else {
502		// large numbers of strings are passed to the runtime as a slice.
503		fn = "concatstrings"
504
505		t := types.NewSlice(types.Types[types.TSTRING])
506		// args[1:] to skip buf arg
507		slice := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, t, args[1:])
508		slice.Prealloc = n.Prealloc
509		args = []ir.Node{buf, slice}
510		slice.SetEsc(ir.EscNone)
511	}
512
513	cat := typecheck.LookupRuntime(fn)
514	r := ir.NewCallExpr(base.Pos, ir.OCALL, cat, nil)
515	r.Args = args
516	r1 := typecheck.Expr(r)
517	r1 = walkExpr(r1, init)
518	r1.SetType(n.Type())
519
520	return r1
521}
522
523type hookInfo struct {
524	paramType   types.Kind
525	argsNum     int
526	runtimeFunc string
527}
528
529var hooks = map[string]hookInfo{
530	"strings.EqualFold": {paramType: types.TSTRING, argsNum: 2, runtimeFunc: "libfuzzerHookEqualFold"},
531}
532
533// walkCall walks an OCALLFUNC or OCALLINTER node.
534func walkCall(n *ir.CallExpr, init *ir.Nodes) ir.Node {
535	if n.Op() == ir.OCALLMETH {
536		base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
537	}
538	if n.Op() == ir.OCALLINTER || n.Fun.Op() == ir.OMETHEXPR {
539		// We expect both interface call reflect.Type.Method and concrete
540		// call reflect.(*rtype).Method.
541		usemethod(n)
542	}
543	if n.Op() == ir.OCALLINTER {
544		reflectdata.MarkUsedIfaceMethod(n)
545	}
546
547	if n.Op() == ir.OCALLFUNC && n.Fun.Op() == ir.OCLOSURE {
548		directClosureCall(n)
549	}
550
551	if ir.IsFuncPCIntrinsic(n) {
552		// For internal/abi.FuncPCABIxxx(fn), if fn is a defined function, rewrite
553		// it to the address of the function of the ABI fn is defined.
554		name := n.Fun.(*ir.Name).Sym().Name
555		arg := n.Args[0]
556		var wantABI obj.ABI
557		switch name {
558		case "FuncPCABI0":
559			wantABI = obj.ABI0
560		case "FuncPCABIInternal":
561			wantABI = obj.ABIInternal
562		}
563		if n.Type() != types.Types[types.TUINTPTR] {
564			base.FatalfAt(n.Pos(), "FuncPC intrinsic should return uintptr, got %v", n.Type()) // as expected by typecheck.FuncPC.
565		}
566		n := ir.FuncPC(n.Pos(), arg, wantABI)
567		return walkExpr(n, init)
568	}
569
570	if name, ok := n.Fun.(*ir.Name); ok {
571		sym := name.Sym()
572		if sym.Pkg.Path == "go.runtime" && sym.Name == "deferrangefunc" {
573			// Call to runtime.deferrangefunc is being shared with a range-over-func
574			// body that might add defers to this frame, so we cannot use open-coded defers
575			// and we need to call deferreturn even if we don't see any other explicit defers.
576			ir.CurFunc.SetHasDefer(true)
577			ir.CurFunc.SetOpenCodedDeferDisallowed(true)
578		}
579	}
580
581	walkCall1(n, init)
582	return n
583}
584
585func walkCall1(n *ir.CallExpr, init *ir.Nodes) {
586	if n.Walked() {
587		return // already walked
588	}
589	n.SetWalked(true)
590
591	if n.Op() == ir.OCALLMETH {
592		base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
593	}
594
595	args := n.Args
596	params := n.Fun.Type().Params()
597
598	n.Fun = walkExpr(n.Fun, init)
599	walkExprList(args, init)
600
601	for i, arg := range args {
602		// Validate argument and parameter types match.
603		param := params[i]
604		if !types.Identical(arg.Type(), param.Type) {
605			base.FatalfAt(n.Pos(), "assigning %L to parameter %v (type %v)", arg, param.Sym, param.Type)
606		}
607
608		// For any argument whose evaluation might require a function call,
609		// store that argument into a temporary variable,
610		// to prevent that calls from clobbering arguments already on the stack.
611		if mayCall(arg) {
612			// assignment of arg to Temp
613			tmp := typecheck.TempAt(base.Pos, ir.CurFunc, param.Type)
614			init.Append(convas(typecheck.Stmt(ir.NewAssignStmt(base.Pos, tmp, arg)).(*ir.AssignStmt), init))
615			// replace arg with temp
616			args[i] = tmp
617		}
618	}
619
620	funSym := n.Fun.Sym()
621	if base.Debug.Libfuzzer != 0 && funSym != nil {
622		if hook, found := hooks[funSym.Pkg.Path+"."+funSym.Name]; found {
623			if len(args) != hook.argsNum {
624				panic(fmt.Sprintf("%s.%s expects %d arguments, but received %d", funSym.Pkg.Path, funSym.Name, hook.argsNum, len(args)))
625			}
626			var hookArgs []ir.Node
627			for _, arg := range args {
628				hookArgs = append(hookArgs, tracecmpArg(arg, types.Types[hook.paramType], init))
629			}
630			hookArgs = append(hookArgs, fakePC(n))
631			init.Append(mkcall(hook.runtimeFunc, nil, init, hookArgs...))
632		}
633	}
634}
635
636// walkDivMod walks an ODIV or OMOD node.
637func walkDivMod(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
638	n.X = walkExpr(n.X, init)
639	n.Y = walkExpr(n.Y, init)
640
641	// rewrite complex div into function call.
642	et := n.X.Type().Kind()
643
644	if types.IsComplex[et] && n.Op() == ir.ODIV {
645		t := n.Type()
646		call := mkcall("complex128div", types.Types[types.TCOMPLEX128], init, typecheck.Conv(n.X, types.Types[types.TCOMPLEX128]), typecheck.Conv(n.Y, types.Types[types.TCOMPLEX128]))
647		return typecheck.Conv(call, t)
648	}
649
650	// Nothing to do for float divisions.
651	if types.IsFloat[et] {
652		return n
653	}
654
655	// rewrite 64-bit div and mod on 32-bit architectures.
656	// TODO: Remove this code once we can introduce
657	// runtime calls late in SSA processing.
658	if types.RegSize < 8 && (et == types.TINT64 || et == types.TUINT64) {
659		if n.Y.Op() == ir.OLITERAL {
660			// Leave div/mod by constant powers of 2 or small 16-bit constants.
661			// The SSA backend will handle those.
662			switch et {
663			case types.TINT64:
664				c := ir.Int64Val(n.Y)
665				if c < 0 {
666					c = -c
667				}
668				if c != 0 && c&(c-1) == 0 {
669					return n
670				}
671			case types.TUINT64:
672				c := ir.Uint64Val(n.Y)
673				if c < 1<<16 {
674					return n
675				}
676				if c != 0 && c&(c-1) == 0 {
677					return n
678				}
679			}
680		}
681		var fn string
682		if et == types.TINT64 {
683			fn = "int64"
684		} else {
685			fn = "uint64"
686		}
687		if n.Op() == ir.ODIV {
688			fn += "div"
689		} else {
690			fn += "mod"
691		}
692		return mkcall(fn, n.Type(), init, typecheck.Conv(n.X, types.Types[et]), typecheck.Conv(n.Y, types.Types[et]))
693	}
694	return n
695}
696
697// walkDot walks an ODOT or ODOTPTR node.
698func walkDot(n *ir.SelectorExpr, init *ir.Nodes) ir.Node {
699	usefield(n)
700	n.X = walkExpr(n.X, init)
701	return n
702}
703
704// walkDotType walks an ODOTTYPE or ODOTTYPE2 node.
705func walkDotType(n *ir.TypeAssertExpr, init *ir.Nodes) ir.Node {
706	n.X = walkExpr(n.X, init)
707	// Set up interface type addresses for back end.
708	if !n.Type().IsInterface() && !n.X.Type().IsEmptyInterface() {
709		n.ITab = reflectdata.ITabAddrAt(base.Pos, n.Type(), n.X.Type())
710	}
711	if n.X.Type().IsInterface() && n.Type().IsInterface() && !n.Type().IsEmptyInterface() {
712		// This kind of conversion needs a runtime call. Allocate
713		// a descriptor for that call.
714		n.Descriptor = makeTypeAssertDescriptor(n.Type(), n.Op() == ir.ODOTTYPE2)
715	}
716	return n
717}
718
719func makeTypeAssertDescriptor(target *types.Type, canFail bool) *obj.LSym {
720	// When converting from an interface to a non-empty interface. Needs a runtime call.
721	// Allocate an internal/abi.TypeAssert descriptor for that call.
722	lsym := types.LocalPkg.Lookup(fmt.Sprintf(".typeAssert.%d", typeAssertGen)).LinksymABI(obj.ABI0)
723	typeAssertGen++
724	c := rttype.NewCursor(lsym, 0, rttype.TypeAssert)
725	c.Field("Cache").WritePtr(typecheck.LookupRuntimeVar("emptyTypeAssertCache"))
726	c.Field("Inter").WritePtr(reflectdata.TypeLinksym(target))
727	c.Field("CanFail").WriteBool(canFail)
728	objw.Global(lsym, int32(rttype.TypeAssert.Size()), obj.LOCAL)
729	lsym.Gotype = reflectdata.TypeLinksym(rttype.TypeAssert)
730	return lsym
731}
732
733var typeAssertGen int
734
735// walkDynamicDotType walks an ODYNAMICDOTTYPE or ODYNAMICDOTTYPE2 node.
736func walkDynamicDotType(n *ir.DynamicTypeAssertExpr, init *ir.Nodes) ir.Node {
737	n.X = walkExpr(n.X, init)
738	n.RType = walkExpr(n.RType, init)
739	n.ITab = walkExpr(n.ITab, init)
740	// Convert to non-dynamic if we can.
741	if n.RType != nil && n.RType.Op() == ir.OADDR {
742		addr := n.RType.(*ir.AddrExpr)
743		if addr.X.Op() == ir.OLINKSYMOFFSET {
744			r := ir.NewTypeAssertExpr(n.Pos(), n.X, n.Type())
745			if n.Op() == ir.ODYNAMICDOTTYPE2 {
746				r.SetOp(ir.ODOTTYPE2)
747			}
748			r.SetType(n.Type())
749			r.SetTypecheck(1)
750			return walkExpr(r, init)
751		}
752	}
753	return n
754}
755
756// walkIndex walks an OINDEX node.
757func walkIndex(n *ir.IndexExpr, init *ir.Nodes) ir.Node {
758	n.X = walkExpr(n.X, init)
759
760	// save the original node for bounds checking elision.
761	// If it was a ODIV/OMOD walk might rewrite it.
762	r := n.Index
763
764	n.Index = walkExpr(n.Index, init)
765
766	// if range of type cannot exceed static array bound,
767	// disable bounds check.
768	if n.Bounded() {
769		return n
770	}
771	t := n.X.Type()
772	if t != nil && t.IsPtr() {
773		t = t.Elem()
774	}
775	if t.IsArray() {
776		n.SetBounded(bounded(r, t.NumElem()))
777		if base.Flag.LowerM != 0 && n.Bounded() && !ir.IsConst(n.Index, constant.Int) {
778			base.Warn("index bounds check elided")
779		}
780	} else if ir.IsConst(n.X, constant.String) {
781		n.SetBounded(bounded(r, int64(len(ir.StringVal(n.X)))))
782		if base.Flag.LowerM != 0 && n.Bounded() && !ir.IsConst(n.Index, constant.Int) {
783			base.Warn("index bounds check elided")
784		}
785	}
786	return n
787}
788
789// mapKeyArg returns an expression for key that is suitable to be passed
790// as the key argument for runtime map* functions.
791// n is the map indexing or delete Node (to provide Pos).
792func mapKeyArg(fast int, n, key ir.Node, assigned bool) ir.Node {
793	if fast == mapslow {
794		// standard version takes key by reference.
795		// orderState.expr made sure key is addressable.
796		return typecheck.NodAddr(key)
797	}
798	if assigned {
799		// mapassign does distinguish pointer vs. integer key.
800		return key
801	}
802	// mapaccess and mapdelete don't distinguish pointer vs. integer key.
803	switch fast {
804	case mapfast32ptr:
805		return ir.NewConvExpr(n.Pos(), ir.OCONVNOP, types.Types[types.TUINT32], key)
806	case mapfast64ptr:
807		return ir.NewConvExpr(n.Pos(), ir.OCONVNOP, types.Types[types.TUINT64], key)
808	default:
809		// fast version takes key by value.
810		return key
811	}
812}
813
814// walkIndexMap walks an OINDEXMAP node.
815// It replaces m[k] with *map{access1,assign}(maptype, m, &k)
816func walkIndexMap(n *ir.IndexExpr, init *ir.Nodes) ir.Node {
817	n.X = walkExpr(n.X, init)
818	n.Index = walkExpr(n.Index, init)
819	map_ := n.X
820	t := map_.Type()
821	fast := mapfast(t)
822	key := mapKeyArg(fast, n, n.Index, n.Assigned)
823	args := []ir.Node{reflectdata.IndexMapRType(base.Pos, n), map_, key}
824
825	var mapFn ir.Node
826	switch {
827	case n.Assigned:
828		mapFn = mapfn(mapassign[fast], t, false)
829	case t.Elem().Size() > abi.ZeroValSize:
830		args = append(args, reflectdata.ZeroAddr(t.Elem().Size()))
831		mapFn = mapfn("mapaccess1_fat", t, true)
832	default:
833		mapFn = mapfn(mapaccess1[fast], t, false)
834	}
835	call := mkcall1(mapFn, nil, init, args...)
836	call.SetType(types.NewPtr(t.Elem()))
837	call.MarkNonNil() // mapaccess1* and mapassign always return non-nil pointers.
838	star := ir.NewStarExpr(base.Pos, call)
839	star.SetType(t.Elem())
840	star.SetTypecheck(1)
841	return star
842}
843
844// walkLogical walks an OANDAND or OOROR node.
845func walkLogical(n *ir.LogicalExpr, init *ir.Nodes) ir.Node {
846	n.X = walkExpr(n.X, init)
847
848	// cannot put side effects from n.Right on init,
849	// because they cannot run before n.Left is checked.
850	// save elsewhere and store on the eventual n.Right.
851	var ll ir.Nodes
852
853	n.Y = walkExpr(n.Y, &ll)
854	n.Y = ir.InitExpr(ll, n.Y)
855	return n
856}
857
858// walkSend walks an OSEND node.
859func walkSend(n *ir.SendStmt, init *ir.Nodes) ir.Node {
860	n1 := n.Value
861	n1 = typecheck.AssignConv(n1, n.Chan.Type().Elem(), "chan send")
862	n1 = walkExpr(n1, init)
863	n1 = typecheck.NodAddr(n1)
864	return mkcall1(chanfn("chansend1", 2, n.Chan.Type()), nil, init, n.Chan, n1)
865}
866
867// walkSlice walks an OSLICE, OSLICEARR, OSLICESTR, OSLICE3, or OSLICE3ARR node.
868func walkSlice(n *ir.SliceExpr, init *ir.Nodes) ir.Node {
869	n.X = walkExpr(n.X, init)
870	n.Low = walkExpr(n.Low, init)
871	if n.Low != nil && ir.IsZero(n.Low) {
872		// Reduce x[0:j] to x[:j] and x[0:j:k] to x[:j:k].
873		n.Low = nil
874	}
875	n.High = walkExpr(n.High, init)
876	n.Max = walkExpr(n.Max, init)
877
878	if (n.Op() == ir.OSLICE || n.Op() == ir.OSLICESTR) && n.Low == nil && n.High == nil {
879		// Reduce x[:] to x.
880		if base.Debug.Slice > 0 {
881			base.Warn("slice: omit slice operation")
882		}
883		return n.X
884	}
885	return n
886}
887
888// walkSliceHeader walks an OSLICEHEADER node.
889func walkSliceHeader(n *ir.SliceHeaderExpr, init *ir.Nodes) ir.Node {
890	n.Ptr = walkExpr(n.Ptr, init)
891	n.Len = walkExpr(n.Len, init)
892	n.Cap = walkExpr(n.Cap, init)
893	return n
894}
895
896// walkStringHeader walks an OSTRINGHEADER node.
897func walkStringHeader(n *ir.StringHeaderExpr, init *ir.Nodes) ir.Node {
898	n.Ptr = walkExpr(n.Ptr, init)
899	n.Len = walkExpr(n.Len, init)
900	return n
901}
902
903// return 1 if integer n must be in range [0, max), 0 otherwise.
904func bounded(n ir.Node, max int64) bool {
905	if n.Type() == nil || !n.Type().IsInteger() {
906		return false
907	}
908
909	sign := n.Type().IsSigned()
910	bits := int32(8 * n.Type().Size())
911
912	if ir.IsSmallIntConst(n) {
913		v := ir.Int64Val(n)
914		return 0 <= v && v < max
915	}
916
917	switch n.Op() {
918	case ir.OAND, ir.OANDNOT:
919		n := n.(*ir.BinaryExpr)
920		v := int64(-1)
921		switch {
922		case ir.IsSmallIntConst(n.X):
923			v = ir.Int64Val(n.X)
924		case ir.IsSmallIntConst(n.Y):
925			v = ir.Int64Val(n.Y)
926			if n.Op() == ir.OANDNOT {
927				v = ^v
928				if !sign {
929					v &= 1<<uint(bits) - 1
930				}
931			}
932		}
933		if 0 <= v && v < max {
934			return true
935		}
936
937	case ir.OMOD:
938		n := n.(*ir.BinaryExpr)
939		if !sign && ir.IsSmallIntConst(n.Y) {
940			v := ir.Int64Val(n.Y)
941			if 0 <= v && v <= max {
942				return true
943			}
944		}
945
946	case ir.ODIV:
947		n := n.(*ir.BinaryExpr)
948		if !sign && ir.IsSmallIntConst(n.Y) {
949			v := ir.Int64Val(n.Y)
950			for bits > 0 && v >= 2 {
951				bits--
952				v >>= 1
953			}
954		}
955
956	case ir.ORSH:
957		n := n.(*ir.BinaryExpr)
958		if !sign && ir.IsSmallIntConst(n.Y) {
959			v := ir.Int64Val(n.Y)
960			if v > int64(bits) {
961				return true
962			}
963			bits -= int32(v)
964		}
965	}
966
967	if !sign && bits <= 62 && 1<<uint(bits) <= max {
968		return true
969	}
970
971	return false
972}
973
974// usemethod checks calls for uses of Method and MethodByName of reflect.Value,
975// reflect.Type, reflect.(*rtype), and reflect.(*interfaceType).
976func usemethod(n *ir.CallExpr) {
977	// Don't mark reflect.(*rtype).Method, etc. themselves in the reflect package.
978	// Those functions may be alive via the itab, which should not cause all methods
979	// alive. We only want to mark their callers.
980	if base.Ctxt.Pkgpath == "reflect" {
981		// TODO: is there a better way than hardcoding the names?
982		switch fn := ir.CurFunc.Nname.Sym().Name; {
983		case fn == "(*rtype).Method", fn == "(*rtype).MethodByName":
984			return
985		case fn == "(*interfaceType).Method", fn == "(*interfaceType).MethodByName":
986			return
987		case fn == "Value.Method", fn == "Value.MethodByName":
988			return
989		}
990	}
991
992	dot, ok := n.Fun.(*ir.SelectorExpr)
993	if !ok {
994		return
995	}
996
997	// looking for either direct method calls and interface method calls of:
998	//	reflect.Type.Method        - func(int) reflect.Method
999	//	reflect.Type.MethodByName  - func(string) (reflect.Method, bool)
1000	//
1001	//	reflect.Value.Method       - func(int) reflect.Value
1002	//	reflect.Value.MethodByName - func(string) reflect.Value
1003	methodName := dot.Sel.Name
1004	t := dot.Selection.Type
1005
1006	// Check the number of arguments and return values.
1007	if t.NumParams() != 1 || (t.NumResults() != 1 && t.NumResults() != 2) {
1008		return
1009	}
1010
1011	// Check the type of the argument.
1012	switch pKind := t.Param(0).Type.Kind(); {
1013	case methodName == "Method" && pKind == types.TINT,
1014		methodName == "MethodByName" && pKind == types.TSTRING:
1015
1016	default:
1017		// not a call to Method or MethodByName of reflect.{Type,Value}.
1018		return
1019	}
1020
1021	// Check that first result type is "reflect.Method" or "reflect.Value".
1022	// Note that we have to check sym name and sym package separately, as
1023	// we can't check for exact string "reflect.Method" reliably
1024	// (e.g., see #19028 and #38515).
1025	switch s := t.Result(0).Type.Sym(); {
1026	case s != nil && types.ReflectSymName(s) == "Method",
1027		s != nil && types.ReflectSymName(s) == "Value":
1028
1029	default:
1030		// not a call to Method or MethodByName of reflect.{Type,Value}.
1031		return
1032	}
1033
1034	var targetName ir.Node
1035	switch dot.Op() {
1036	case ir.ODOTINTER:
1037		if methodName == "MethodByName" {
1038			targetName = n.Args[0]
1039		}
1040	case ir.OMETHEXPR:
1041		if methodName == "MethodByName" {
1042			targetName = n.Args[1]
1043		}
1044	default:
1045		base.FatalfAt(dot.Pos(), "usemethod: unexpected dot.Op() %s", dot.Op())
1046	}
1047
1048	if ir.IsConst(targetName, constant.String) {
1049		name := constant.StringVal(targetName.Val())
1050
1051		r := obj.Addrel(ir.CurFunc.LSym)
1052		r.Type = objabi.R_USENAMEDMETHOD
1053		r.Sym = staticdata.StringSymNoCommon(name)
1054	} else {
1055		ir.CurFunc.LSym.Set(obj.AttrReflectMethod, true)
1056	}
1057}
1058
1059func usefield(n *ir.SelectorExpr) {
1060	if !buildcfg.Experiment.FieldTrack {
1061		return
1062	}
1063
1064	switch n.Op() {
1065	default:
1066		base.Fatalf("usefield %v", n.Op())
1067
1068	case ir.ODOT, ir.ODOTPTR:
1069		break
1070	}
1071
1072	field := n.Selection
1073	if field == nil {
1074		base.Fatalf("usefield %v %v without paramfld", n.X.Type(), n.Sel)
1075	}
1076	if field.Sym != n.Sel {
1077		base.Fatalf("field inconsistency: %v != %v", field.Sym, n.Sel)
1078	}
1079	if !strings.Contains(field.Note, "go:\"track\"") {
1080		return
1081	}
1082
1083	outer := n.X.Type()
1084	if outer.IsPtr() {
1085		outer = outer.Elem()
1086	}
1087	if outer.Sym() == nil {
1088		base.Errorf("tracked field must be in named struct type")
1089	}
1090
1091	sym := reflectdata.TrackSym(outer, field)
1092	if ir.CurFunc.FieldTrack == nil {
1093		ir.CurFunc.FieldTrack = make(map[*obj.LSym]struct{})
1094	}
1095	ir.CurFunc.FieldTrack[sym] = struct{}{}
1096}
1097