1// Copyright 2012 The Go Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style
3// license that can be found in the LICENSE file.
4
5package walk
6
7import (
8	"fmt"
9	"go/constant"
10
11	"cmd/compile/internal/base"
12	"cmd/compile/internal/ir"
13	"cmd/compile/internal/reflectdata"
14	"cmd/compile/internal/ssa"
15	"cmd/compile/internal/staticinit"
16	"cmd/compile/internal/typecheck"
17	"cmd/compile/internal/types"
18	"cmd/internal/objabi"
19	"cmd/internal/src"
20)
21
22// Rewrite tree to use separate statements to enforce
23// order of evaluation. Makes walk easier, because it
24// can (after this runs) reorder at will within an expression.
25//
26// Rewrite m[k] op= r into m[k] = m[k] op r if op is / or %.
27//
28// Introduce temporaries as needed by runtime routines.
29// For example, the map runtime routines take the map key
30// by reference, so make sure all map keys are addressable
31// by copying them to temporaries as needed.
32// The same is true for channel operations.
33//
34// Arrange that map index expressions only appear in direct
35// assignments x = m[k] or m[k] = x, never in larger expressions.
36//
37// Arrange that receive expressions only appear in direct assignments
38// x = <-c or as standalone statements <-c, never in larger expressions.
39
40// orderState holds state during the ordering process.
41type orderState struct {
42	out  []ir.Node             // list of generated statements
43	temp []*ir.Name            // stack of temporary variables
44	free map[string][]*ir.Name // free list of unused temporaries, by type.LinkString().
45	edit func(ir.Node) ir.Node // cached closure of o.exprNoLHS
46}
47
48// order rewrites fn.Nbody to apply the ordering constraints
49// described in the comment at the top of the file.
50func order(fn *ir.Func) {
51	if base.Flag.W > 1 {
52		s := fmt.Sprintf("\nbefore order %v", fn.Sym())
53		ir.DumpList(s, fn.Body)
54	}
55	ir.SetPos(fn) // Set reasonable position for instrumenting code. See issue 53688.
56	orderBlock(&fn.Body, map[string][]*ir.Name{})
57}
58
59// append typechecks stmt and appends it to out.
60func (o *orderState) append(stmt ir.Node) {
61	o.out = append(o.out, typecheck.Stmt(stmt))
62}
63
64// newTemp allocates a new temporary with the given type,
65// pushes it onto the temp stack, and returns it.
66// If clear is true, newTemp emits code to zero the temporary.
67func (o *orderState) newTemp(t *types.Type, clear bool) *ir.Name {
68	var v *ir.Name
69	key := t.LinkString()
70	if a := o.free[key]; len(a) > 0 {
71		v = a[len(a)-1]
72		if !types.Identical(t, v.Type()) {
73			base.Fatalf("expected %L to have type %v", v, t)
74		}
75		o.free[key] = a[:len(a)-1]
76	} else {
77		v = typecheck.TempAt(base.Pos, ir.CurFunc, t)
78	}
79	if clear {
80		o.append(ir.NewAssignStmt(base.Pos, v, nil))
81	}
82
83	o.temp = append(o.temp, v)
84	return v
85}
86
87// copyExpr behaves like newTemp but also emits
88// code to initialize the temporary to the value n.
89func (o *orderState) copyExpr(n ir.Node) *ir.Name {
90	return o.copyExpr1(n, false)
91}
92
93// copyExprClear is like copyExpr but clears the temp before assignment.
94// It is provided for use when the evaluation of tmp = n turns into
95// a function call that is passed a pointer to the temporary as the output space.
96// If the call blocks before tmp has been written,
97// the garbage collector will still treat the temporary as live,
98// so we must zero it before entering that call.
99// Today, this only happens for channel receive operations.
100// (The other candidate would be map access, but map access
101// returns a pointer to the result data instead of taking a pointer
102// to be filled in.)
103func (o *orderState) copyExprClear(n ir.Node) *ir.Name {
104	return o.copyExpr1(n, true)
105}
106
107func (o *orderState) copyExpr1(n ir.Node, clear bool) *ir.Name {
108	t := n.Type()
109	v := o.newTemp(t, clear)
110	o.append(ir.NewAssignStmt(base.Pos, v, n))
111	return v
112}
113
114// cheapExpr returns a cheap version of n.
115// The definition of cheap is that n is a variable or constant.
116// If not, cheapExpr allocates a new tmp, emits tmp = n,
117// and then returns tmp.
118func (o *orderState) cheapExpr(n ir.Node) ir.Node {
119	if n == nil {
120		return nil
121	}
122
123	switch n.Op() {
124	case ir.ONAME, ir.OLITERAL, ir.ONIL:
125		return n
126	case ir.OLEN, ir.OCAP:
127		n := n.(*ir.UnaryExpr)
128		l := o.cheapExpr(n.X)
129		if l == n.X {
130			return n
131		}
132		a := ir.Copy(n).(*ir.UnaryExpr)
133		a.X = l
134		return typecheck.Expr(a)
135	}
136
137	return o.copyExpr(n)
138}
139
140// safeExpr returns a safe version of n.
141// The definition of safe is that n can appear multiple times
142// without violating the semantics of the original program,
143// and that assigning to the safe version has the same effect
144// as assigning to the original n.
145//
146// The intended use is to apply to x when rewriting x += y into x = x + y.
147func (o *orderState) safeExpr(n ir.Node) ir.Node {
148	switch n.Op() {
149	case ir.ONAME, ir.OLITERAL, ir.ONIL:
150		return n
151
152	case ir.OLEN, ir.OCAP:
153		n := n.(*ir.UnaryExpr)
154		l := o.safeExpr(n.X)
155		if l == n.X {
156			return n
157		}
158		a := ir.Copy(n).(*ir.UnaryExpr)
159		a.X = l
160		return typecheck.Expr(a)
161
162	case ir.ODOT:
163		n := n.(*ir.SelectorExpr)
164		l := o.safeExpr(n.X)
165		if l == n.X {
166			return n
167		}
168		a := ir.Copy(n).(*ir.SelectorExpr)
169		a.X = l
170		return typecheck.Expr(a)
171
172	case ir.ODOTPTR:
173		n := n.(*ir.SelectorExpr)
174		l := o.cheapExpr(n.X)
175		if l == n.X {
176			return n
177		}
178		a := ir.Copy(n).(*ir.SelectorExpr)
179		a.X = l
180		return typecheck.Expr(a)
181
182	case ir.ODEREF:
183		n := n.(*ir.StarExpr)
184		l := o.cheapExpr(n.X)
185		if l == n.X {
186			return n
187		}
188		a := ir.Copy(n).(*ir.StarExpr)
189		a.X = l
190		return typecheck.Expr(a)
191
192	case ir.OINDEX, ir.OINDEXMAP:
193		n := n.(*ir.IndexExpr)
194		var l ir.Node
195		if n.X.Type().IsArray() {
196			l = o.safeExpr(n.X)
197		} else {
198			l = o.cheapExpr(n.X)
199		}
200		r := o.cheapExpr(n.Index)
201		if l == n.X && r == n.Index {
202			return n
203		}
204		a := ir.Copy(n).(*ir.IndexExpr)
205		a.X = l
206		a.Index = r
207		return typecheck.Expr(a)
208
209	default:
210		base.Fatalf("order.safeExpr %v", n.Op())
211		return nil // not reached
212	}
213}
214
215// addrTemp ensures that n is okay to pass by address to runtime routines.
216// If the original argument n is not okay, addrTemp creates a tmp, emits
217// tmp = n, and then returns tmp.
218// The result of addrTemp MUST be assigned back to n, e.g.
219//
220//	n.Left = o.addrTemp(n.Left)
221func (o *orderState) addrTemp(n ir.Node) ir.Node {
222	if n.Op() == ir.OLITERAL || n.Op() == ir.ONIL {
223		// TODO: expand this to all static composite literal nodes?
224		n = typecheck.DefaultLit(n, nil)
225		types.CalcSize(n.Type())
226		vstat := readonlystaticname(n.Type())
227		var s staticinit.Schedule
228		s.StaticAssign(vstat, 0, n, n.Type())
229		if s.Out != nil {
230			base.Fatalf("staticassign of const generated code: %+v", n)
231		}
232		vstat = typecheck.Expr(vstat).(*ir.Name)
233		return vstat
234	}
235
236	// Prevent taking the address of an SSA-able local variable (#63332).
237	//
238	// TODO(mdempsky): Note that OuterValue unwraps OCONVNOPs, but
239	// IsAddressable does not. It should be possible to skip copying for
240	// at least some of these OCONVNOPs (e.g., reinsert them after the
241	// OADDR operation), but at least walkCompare needs to be fixed to
242	// support that (see trybot failures on go.dev/cl/541715, PS1).
243	if ir.IsAddressable(n) {
244		if name, ok := ir.OuterValue(n).(*ir.Name); ok && name.Op() == ir.ONAME {
245			if name.Class == ir.PAUTO && !name.Addrtaken() && ssa.CanSSA(name.Type()) {
246				goto Copy
247			}
248		}
249
250		return n
251	}
252
253Copy:
254	return o.copyExpr(n)
255}
256
257// mapKeyTemp prepares n to be a key in a map runtime call and returns n.
258// The first parameter is the position of n's containing node, for use in case
259// that n's position is not unique (e.g., if n is an ONAME).
260func (o *orderState) mapKeyTemp(outerPos src.XPos, t *types.Type, n ir.Node) ir.Node {
261	pos := outerPos
262	if ir.HasUniquePos(n) {
263		pos = n.Pos()
264	}
265	// Most map calls need to take the address of the key.
266	// Exception: map*_fast* calls. See golang.org/issue/19015.
267	alg := mapfast(t)
268	if alg == mapslow {
269		return o.addrTemp(n)
270	}
271	var kt *types.Type
272	switch alg {
273	case mapfast32:
274		kt = types.Types[types.TUINT32]
275	case mapfast64:
276		kt = types.Types[types.TUINT64]
277	case mapfast32ptr, mapfast64ptr:
278		kt = types.Types[types.TUNSAFEPTR]
279	case mapfaststr:
280		kt = types.Types[types.TSTRING]
281	}
282	nt := n.Type()
283	switch {
284	case nt == kt:
285		return n
286	case nt.Kind() == kt.Kind(), nt.IsPtrShaped() && kt.IsPtrShaped():
287		// can directly convert (e.g. named type to underlying type, or one pointer to another)
288		return typecheck.Expr(ir.NewConvExpr(pos, ir.OCONVNOP, kt, n))
289	case nt.IsInteger() && kt.IsInteger():
290		// can directly convert (e.g. int32 to uint32)
291		if n.Op() == ir.OLITERAL && nt.IsSigned() {
292			// avoid constant overflow error
293			n = ir.NewConstExpr(constant.MakeUint64(uint64(ir.Int64Val(n))), n)
294			n.SetType(kt)
295			return n
296		}
297		return typecheck.Expr(ir.NewConvExpr(pos, ir.OCONV, kt, n))
298	default:
299		// Unsafe cast through memory.
300		// We'll need to do a load with type kt. Create a temporary of type kt to
301		// ensure sufficient alignment. nt may be under-aligned.
302		if uint8(kt.Alignment()) < uint8(nt.Alignment()) {
303			base.Fatalf("mapKeyTemp: key type is not sufficiently aligned, kt=%v nt=%v", kt, nt)
304		}
305		tmp := o.newTemp(kt, true)
306		// *(*nt)(&tmp) = n
307		var e ir.Node = typecheck.NodAddr(tmp)
308		e = ir.NewConvExpr(pos, ir.OCONVNOP, nt.PtrTo(), e)
309		e = ir.NewStarExpr(pos, e)
310		o.append(ir.NewAssignStmt(pos, e, n))
311		return tmp
312	}
313}
314
315// mapKeyReplaceStrConv replaces OBYTES2STR by OBYTES2STRTMP
316// in n to avoid string allocations for keys in map lookups.
317// Returns a bool that signals if a modification was made.
318//
319// For:
320//
321//	x = m[string(k)]
322//	x = m[T1{... Tn{..., string(k), ...}}]
323//
324// where k is []byte, T1 to Tn is a nesting of struct and array literals,
325// the allocation of backing bytes for the string can be avoided
326// by reusing the []byte backing array. These are special cases
327// for avoiding allocations when converting byte slices to strings.
328// It would be nice to handle these generally, but because
329// []byte keys are not allowed in maps, the use of string(k)
330// comes up in important cases in practice. See issue 3512.
331func mapKeyReplaceStrConv(n ir.Node) bool {
332	var replaced bool
333	switch n.Op() {
334	case ir.OBYTES2STR:
335		n := n.(*ir.ConvExpr)
336		n.SetOp(ir.OBYTES2STRTMP)
337		replaced = true
338	case ir.OSTRUCTLIT:
339		n := n.(*ir.CompLitExpr)
340		for _, elem := range n.List {
341			elem := elem.(*ir.StructKeyExpr)
342			if mapKeyReplaceStrConv(elem.Value) {
343				replaced = true
344			}
345		}
346	case ir.OARRAYLIT:
347		n := n.(*ir.CompLitExpr)
348		for _, elem := range n.List {
349			if elem.Op() == ir.OKEY {
350				elem = elem.(*ir.KeyExpr).Value
351			}
352			if mapKeyReplaceStrConv(elem) {
353				replaced = true
354			}
355		}
356	}
357	return replaced
358}
359
360type ordermarker int
361
362// markTemp returns the top of the temporary variable stack.
363func (o *orderState) markTemp() ordermarker {
364	return ordermarker(len(o.temp))
365}
366
367// popTemp pops temporaries off the stack until reaching the mark,
368// which must have been returned by markTemp.
369func (o *orderState) popTemp(mark ordermarker) {
370	for _, n := range o.temp[mark:] {
371		key := n.Type().LinkString()
372		o.free[key] = append(o.free[key], n)
373	}
374	o.temp = o.temp[:mark]
375}
376
377// stmtList orders each of the statements in the list.
378func (o *orderState) stmtList(l ir.Nodes) {
379	s := l
380	for i := range s {
381		orderMakeSliceCopy(s[i:])
382		o.stmt(s[i])
383	}
384}
385
386// orderMakeSliceCopy matches the pattern:
387//
388//	m = OMAKESLICE([]T, x); OCOPY(m, s)
389//
390// and rewrites it to:
391//
392//	m = OMAKESLICECOPY([]T, x, s); nil
393func orderMakeSliceCopy(s []ir.Node) {
394	if base.Flag.N != 0 || base.Flag.Cfg.Instrumenting {
395		return
396	}
397	if len(s) < 2 || s[0] == nil || s[0].Op() != ir.OAS || s[1] == nil || s[1].Op() != ir.OCOPY {
398		return
399	}
400
401	as := s[0].(*ir.AssignStmt)
402	cp := s[1].(*ir.BinaryExpr)
403	if as.Y == nil || as.Y.Op() != ir.OMAKESLICE || ir.IsBlank(as.X) ||
404		as.X.Op() != ir.ONAME || cp.X.Op() != ir.ONAME || cp.Y.Op() != ir.ONAME ||
405		as.X.Name() != cp.X.Name() || cp.X.Name() == cp.Y.Name() {
406		// The line above this one is correct with the differing equality operators:
407		// we want as.X and cp.X to be the same name,
408		// but we want the initial data to be coming from a different name.
409		return
410	}
411
412	mk := as.Y.(*ir.MakeExpr)
413	if mk.Esc() == ir.EscNone || mk.Len == nil || mk.Cap != nil {
414		return
415	}
416	mk.SetOp(ir.OMAKESLICECOPY)
417	mk.Cap = cp.Y
418	// Set bounded when m = OMAKESLICE([]T, len(s)); OCOPY(m, s)
419	mk.SetBounded(mk.Len.Op() == ir.OLEN && ir.SameSafeExpr(mk.Len.(*ir.UnaryExpr).X, cp.Y))
420	as.Y = typecheck.Expr(mk)
421	s[1] = nil // remove separate copy call
422}
423
424// edge inserts coverage instrumentation for libfuzzer.
425func (o *orderState) edge() {
426	if base.Debug.Libfuzzer == 0 {
427		return
428	}
429
430	// Create a new uint8 counter to be allocated in section __sancov_cntrs
431	counter := staticinit.StaticName(types.Types[types.TUINT8])
432	counter.SetLibfuzzer8BitCounter(true)
433	// As well as setting SetLibfuzzer8BitCounter, we preemptively set the
434	// symbol type to SLIBFUZZER_8BIT_COUNTER so that the race detector
435	// instrumentation pass (which does not have access to the flags set by
436	// SetLibfuzzer8BitCounter) knows to ignore them. This information is
437	// lost by the time it reaches the compile step, so SetLibfuzzer8BitCounter
438	// is still necessary.
439	counter.Linksym().Type = objabi.SLIBFUZZER_8BIT_COUNTER
440
441	// We guarantee that the counter never becomes zero again once it has been
442	// incremented once. This implementation follows the NeverZero optimization
443	// presented by the paper:
444	// "AFL++: Combining Incremental Steps of Fuzzing Research"
445	// The NeverZero policy avoids the overflow to 0 by setting the counter to one
446	// after it reaches 255 and so, if an edge is executed at least one time, the entry is
447	// never 0.
448	// Another policy presented in the paper is the Saturated Counters policy which
449	// freezes the counter when it reaches the value of 255. However, a range
450	// of experiments showed that that decreases overall performance.
451	o.append(ir.NewIfStmt(base.Pos,
452		ir.NewBinaryExpr(base.Pos, ir.OEQ, counter, ir.NewInt(base.Pos, 0xff)),
453		[]ir.Node{ir.NewAssignStmt(base.Pos, counter, ir.NewInt(base.Pos, 1))},
454		[]ir.Node{ir.NewAssignOpStmt(base.Pos, ir.OADD, counter, ir.NewInt(base.Pos, 1))}))
455}
456
457// orderBlock orders the block of statements in n into a new slice,
458// and then replaces the old slice in n with the new slice.
459// free is a map that can be used to obtain temporary variables by type.
460func orderBlock(n *ir.Nodes, free map[string][]*ir.Name) {
461	if len(*n) != 0 {
462		// Set reasonable position for instrumenting code. See issue 53688.
463		// It would be nice if ir.Nodes had a position (the opening {, probably),
464		// but it doesn't. So we use the first statement's position instead.
465		ir.SetPos((*n)[0])
466	}
467	var order orderState
468	order.free = free
469	mark := order.markTemp()
470	order.edge()
471	order.stmtList(*n)
472	order.popTemp(mark)
473	*n = order.out
474}
475
476// exprInPlace orders the side effects in *np and
477// leaves them as the init list of the final *np.
478// The result of exprInPlace MUST be assigned back to n, e.g.
479//
480//	n.Left = o.exprInPlace(n.Left)
481func (o *orderState) exprInPlace(n ir.Node) ir.Node {
482	var order orderState
483	order.free = o.free
484	n = order.expr(n, nil)
485	n = ir.InitExpr(order.out, n)
486
487	// insert new temporaries from order
488	// at head of outer list.
489	o.temp = append(o.temp, order.temp...)
490	return n
491}
492
493// orderStmtInPlace orders the side effects of the single statement *np
494// and replaces it with the resulting statement list.
495// The result of orderStmtInPlace MUST be assigned back to n, e.g.
496//
497//	n.Left = orderStmtInPlace(n.Left)
498//
499// free is a map that can be used to obtain temporary variables by type.
500func orderStmtInPlace(n ir.Node, free map[string][]*ir.Name) ir.Node {
501	var order orderState
502	order.free = free
503	mark := order.markTemp()
504	order.stmt(n)
505	order.popTemp(mark)
506	return ir.NewBlockStmt(src.NoXPos, order.out)
507}
508
509// init moves n's init list to o.out.
510func (o *orderState) init(n ir.Node) {
511	if ir.MayBeShared(n) {
512		// For concurrency safety, don't mutate potentially shared nodes.
513		// First, ensure that no work is required here.
514		if len(n.Init()) > 0 {
515			base.Fatalf("order.init shared node with ninit")
516		}
517		return
518	}
519	o.stmtList(ir.TakeInit(n))
520}
521
522// call orders the call expression n.
523// n.Op is OCALLFUNC/OCALLINTER or a builtin like OCOPY.
524func (o *orderState) call(nn ir.Node) {
525	if len(nn.Init()) > 0 {
526		// Caller should have already called o.init(nn).
527		base.Fatalf("%v with unexpected ninit", nn.Op())
528	}
529	if nn.Op() == ir.OCALLMETH {
530		base.FatalfAt(nn.Pos(), "OCALLMETH missed by typecheck")
531	}
532
533	// Builtin functions.
534	if nn.Op() != ir.OCALLFUNC && nn.Op() != ir.OCALLINTER {
535		switch n := nn.(type) {
536		default:
537			base.Fatalf("unexpected call: %+v", n)
538		case *ir.UnaryExpr:
539			n.X = o.expr(n.X, nil)
540		case *ir.ConvExpr:
541			n.X = o.expr(n.X, nil)
542		case *ir.BinaryExpr:
543			n.X = o.expr(n.X, nil)
544			n.Y = o.expr(n.Y, nil)
545		case *ir.MakeExpr:
546			n.Len = o.expr(n.Len, nil)
547			n.Cap = o.expr(n.Cap, nil)
548		case *ir.CallExpr:
549			o.exprList(n.Args)
550		}
551		return
552	}
553
554	n := nn.(*ir.CallExpr)
555	typecheck.AssertFixedCall(n)
556
557	if ir.IsFuncPCIntrinsic(n) && ir.IsIfaceOfFunc(n.Args[0]) != nil {
558		// For internal/abi.FuncPCABIxxx(fn), if fn is a defined function,
559		// do not introduce temporaries here, so it is easier to rewrite it
560		// to symbol address reference later in walk.
561		return
562	}
563
564	n.Fun = o.expr(n.Fun, nil)
565	o.exprList(n.Args)
566}
567
568// mapAssign appends n to o.out.
569func (o *orderState) mapAssign(n ir.Node) {
570	switch n.Op() {
571	default:
572		base.Fatalf("order.mapAssign %v", n.Op())
573
574	case ir.OAS:
575		n := n.(*ir.AssignStmt)
576		if n.X.Op() == ir.OINDEXMAP {
577			n.Y = o.safeMapRHS(n.Y)
578		}
579		o.out = append(o.out, n)
580	case ir.OASOP:
581		n := n.(*ir.AssignOpStmt)
582		if n.X.Op() == ir.OINDEXMAP {
583			n.Y = o.safeMapRHS(n.Y)
584		}
585		o.out = append(o.out, n)
586	}
587}
588
589func (o *orderState) safeMapRHS(r ir.Node) ir.Node {
590	// Make sure we evaluate the RHS before starting the map insert.
591	// We need to make sure the RHS won't panic.  See issue 22881.
592	if r.Op() == ir.OAPPEND {
593		r := r.(*ir.CallExpr)
594		s := r.Args[1:]
595		for i, n := range s {
596			s[i] = o.cheapExpr(n)
597		}
598		return r
599	}
600	return o.cheapExpr(r)
601}
602
603// stmt orders the statement n, appending to o.out.
604func (o *orderState) stmt(n ir.Node) {
605	if n == nil {
606		return
607	}
608
609	lno := ir.SetPos(n)
610	o.init(n)
611
612	switch n.Op() {
613	default:
614		base.Fatalf("order.stmt %v", n.Op())
615
616	case ir.OINLMARK:
617		o.out = append(o.out, n)
618
619	case ir.OAS:
620		n := n.(*ir.AssignStmt)
621		t := o.markTemp()
622
623		// There's a delicate interaction here between two OINDEXMAP
624		// optimizations.
625		//
626		// First, we want to handle m[k] = append(m[k], ...) with a single
627		// runtime call to mapassign. This requires the m[k] expressions to
628		// satisfy ir.SameSafeExpr in walkAssign.
629		//
630		// But if k is a slow map key type that's passed by reference (e.g.,
631		// byte), then we want to avoid marking user variables as addrtaken,
632		// if that might prevent the compiler from keeping k in a register.
633		//
634		// TODO(mdempsky): It would be better if walk was responsible for
635		// inserting temporaries as needed.
636		mapAppend := n.X.Op() == ir.OINDEXMAP && n.Y.Op() == ir.OAPPEND &&
637			ir.SameSafeExpr(n.X, n.Y.(*ir.CallExpr).Args[0])
638
639		n.X = o.expr(n.X, nil)
640		if mapAppend {
641			indexLHS := n.X.(*ir.IndexExpr)
642			indexLHS.X = o.cheapExpr(indexLHS.X)
643			indexLHS.Index = o.cheapExpr(indexLHS.Index)
644
645			call := n.Y.(*ir.CallExpr)
646			arg0 := call.Args[0]
647			// ir.SameSafeExpr skips OCONVNOPs, so we must do the same here (#66096).
648			for arg0.Op() == ir.OCONVNOP {
649				arg0 = arg0.(*ir.ConvExpr).X
650			}
651			indexRHS := arg0.(*ir.IndexExpr)
652			indexRHS.X = indexLHS.X
653			indexRHS.Index = indexLHS.Index
654
655			o.exprList(call.Args[1:])
656		} else {
657			n.Y = o.expr(n.Y, n.X)
658		}
659		o.mapAssign(n)
660		o.popTemp(t)
661
662	case ir.OASOP:
663		n := n.(*ir.AssignOpStmt)
664		t := o.markTemp()
665		n.X = o.expr(n.X, nil)
666		n.Y = o.expr(n.Y, nil)
667
668		if base.Flag.Cfg.Instrumenting || n.X.Op() == ir.OINDEXMAP && (n.AsOp == ir.ODIV || n.AsOp == ir.OMOD) {
669			// Rewrite m[k] op= r into m[k] = m[k] op r so
670			// that we can ensure that if op panics
671			// because r is zero, the panic happens before
672			// the map assignment.
673			// DeepCopy is a big hammer here, but safeExpr
674			// makes sure there is nothing too deep being copied.
675			l1 := o.safeExpr(n.X)
676			l2 := ir.DeepCopy(src.NoXPos, l1)
677			if l2.Op() == ir.OINDEXMAP {
678				l2 := l2.(*ir.IndexExpr)
679				l2.Assigned = false
680			}
681			l2 = o.copyExpr(l2)
682			r := o.expr(typecheck.Expr(ir.NewBinaryExpr(n.Pos(), n.AsOp, l2, n.Y)), nil)
683			as := typecheck.Stmt(ir.NewAssignStmt(n.Pos(), l1, r))
684			o.mapAssign(as)
685			o.popTemp(t)
686			return
687		}
688
689		o.mapAssign(n)
690		o.popTemp(t)
691
692	case ir.OAS2:
693		n := n.(*ir.AssignListStmt)
694		t := o.markTemp()
695		o.exprList(n.Lhs)
696		o.exprList(n.Rhs)
697		o.out = append(o.out, n)
698		o.popTemp(t)
699
700	// Special: avoid copy of func call n.Right
701	case ir.OAS2FUNC:
702		n := n.(*ir.AssignListStmt)
703		t := o.markTemp()
704		o.exprList(n.Lhs)
705		call := n.Rhs[0]
706		o.init(call)
707		if ic, ok := call.(*ir.InlinedCallExpr); ok {
708			o.stmtList(ic.Body)
709
710			n.SetOp(ir.OAS2)
711			n.Rhs = ic.ReturnVars
712
713			o.exprList(n.Rhs)
714			o.out = append(o.out, n)
715		} else {
716			o.call(call)
717			o.as2func(n)
718		}
719		o.popTemp(t)
720
721	// Special: use temporary variables to hold result,
722	// so that runtime can take address of temporary.
723	// No temporary for blank assignment.
724	//
725	// OAS2MAPR: make sure key is addressable if needed,
726	//           and make sure OINDEXMAP is not copied out.
727	case ir.OAS2DOTTYPE, ir.OAS2RECV, ir.OAS2MAPR:
728		n := n.(*ir.AssignListStmt)
729		t := o.markTemp()
730		o.exprList(n.Lhs)
731
732		switch r := n.Rhs[0]; r.Op() {
733		case ir.ODOTTYPE2:
734			r := r.(*ir.TypeAssertExpr)
735			r.X = o.expr(r.X, nil)
736		case ir.ODYNAMICDOTTYPE2:
737			r := r.(*ir.DynamicTypeAssertExpr)
738			r.X = o.expr(r.X, nil)
739			r.RType = o.expr(r.RType, nil)
740			r.ITab = o.expr(r.ITab, nil)
741		case ir.ORECV:
742			r := r.(*ir.UnaryExpr)
743			r.X = o.expr(r.X, nil)
744		case ir.OINDEXMAP:
745			r := r.(*ir.IndexExpr)
746			r.X = o.expr(r.X, nil)
747			r.Index = o.expr(r.Index, nil)
748			// See similar conversion for OINDEXMAP below.
749			_ = mapKeyReplaceStrConv(r.Index)
750			r.Index = o.mapKeyTemp(r.Pos(), r.X.Type(), r.Index)
751		default:
752			base.Fatalf("order.stmt: %v", r.Op())
753		}
754
755		o.as2ok(n)
756		o.popTemp(t)
757
758	// Special: does not save n onto out.
759	case ir.OBLOCK:
760		n := n.(*ir.BlockStmt)
761		o.stmtList(n.List)
762
763	// Special: n->left is not an expression; save as is.
764	case ir.OBREAK,
765		ir.OCONTINUE,
766		ir.ODCL,
767		ir.OFALL,
768		ir.OGOTO,
769		ir.OLABEL,
770		ir.OTAILCALL:
771		o.out = append(o.out, n)
772
773	// Special: handle call arguments.
774	case ir.OCALLFUNC, ir.OCALLINTER:
775		n := n.(*ir.CallExpr)
776		t := o.markTemp()
777		o.call(n)
778		o.out = append(o.out, n)
779		o.popTemp(t)
780
781	case ir.OINLCALL:
782		n := n.(*ir.InlinedCallExpr)
783		o.stmtList(n.Body)
784
785		// discard results; double-check for no side effects
786		for _, result := range n.ReturnVars {
787			if staticinit.AnySideEffects(result) {
788				base.FatalfAt(result.Pos(), "inlined call result has side effects: %v", result)
789			}
790		}
791
792	case ir.OCHECKNIL, ir.OCLEAR, ir.OCLOSE, ir.OPANIC, ir.ORECV:
793		n := n.(*ir.UnaryExpr)
794		t := o.markTemp()
795		n.X = o.expr(n.X, nil)
796		o.out = append(o.out, n)
797		o.popTemp(t)
798
799	case ir.OCOPY:
800		n := n.(*ir.BinaryExpr)
801		t := o.markTemp()
802		n.X = o.expr(n.X, nil)
803		n.Y = o.expr(n.Y, nil)
804		o.out = append(o.out, n)
805		o.popTemp(t)
806
807	case ir.OPRINT, ir.OPRINTLN, ir.ORECOVERFP:
808		n := n.(*ir.CallExpr)
809		t := o.markTemp()
810		o.call(n)
811		o.out = append(o.out, n)
812		o.popTemp(t)
813
814	// Special: order arguments to inner call but not call itself.
815	case ir.ODEFER, ir.OGO:
816		n := n.(*ir.GoDeferStmt)
817		t := o.markTemp()
818		o.init(n.Call)
819		o.call(n.Call)
820		o.out = append(o.out, n)
821		o.popTemp(t)
822
823	case ir.ODELETE:
824		n := n.(*ir.CallExpr)
825		t := o.markTemp()
826		n.Args[0] = o.expr(n.Args[0], nil)
827		n.Args[1] = o.expr(n.Args[1], nil)
828		n.Args[1] = o.mapKeyTemp(n.Pos(), n.Args[0].Type(), n.Args[1])
829		o.out = append(o.out, n)
830		o.popTemp(t)
831
832	// Clean temporaries from condition evaluation at
833	// beginning of loop body and after for statement.
834	case ir.OFOR:
835		n := n.(*ir.ForStmt)
836		t := o.markTemp()
837		n.Cond = o.exprInPlace(n.Cond)
838		orderBlock(&n.Body, o.free)
839		n.Post = orderStmtInPlace(n.Post, o.free)
840		o.out = append(o.out, n)
841		o.popTemp(t)
842
843	// Clean temporaries from condition at
844	// beginning of both branches.
845	case ir.OIF:
846		n := n.(*ir.IfStmt)
847		t := o.markTemp()
848		n.Cond = o.exprInPlace(n.Cond)
849		o.popTemp(t)
850		orderBlock(&n.Body, o.free)
851		orderBlock(&n.Else, o.free)
852		o.out = append(o.out, n)
853
854	case ir.ORANGE:
855		// n.Right is the expression being ranged over.
856		// order it, and then make a copy if we need one.
857		// We almost always do, to ensure that we don't
858		// see any value changes made during the loop.
859		// Usually the copy is cheap (e.g., array pointer,
860		// chan, slice, string are all tiny).
861		// The exception is ranging over an array value
862		// (not a slice, not a pointer to array),
863		// which must make a copy to avoid seeing updates made during
864		// the range body. Ranging over an array value is uncommon though.
865
866		// Mark []byte(str) range expression to reuse string backing storage.
867		// It is safe because the storage cannot be mutated.
868		n := n.(*ir.RangeStmt)
869		if x, ok := n.X.(*ir.ConvExpr); ok {
870			switch x.Op() {
871			case ir.OSTR2BYTES:
872				x.SetOp(ir.OSTR2BYTESTMP)
873				fallthrough
874			case ir.OSTR2BYTESTMP:
875				x.MarkNonNil() // "range []byte(nil)" is fine
876			}
877		}
878
879		t := o.markTemp()
880		n.X = o.expr(n.X, nil)
881
882		orderBody := true
883		xt := typecheck.RangeExprType(n.X.Type())
884		switch k := xt.Kind(); {
885		default:
886			base.Fatalf("order.stmt range %v", n.Type())
887
888		case types.IsInt[k]:
889			// Used only once, no need to copy.
890
891		case k == types.TARRAY, k == types.TSLICE:
892			if n.Value == nil || ir.IsBlank(n.Value) {
893				// for i := range x will only use x once, to compute len(x).
894				// No need to copy it.
895				break
896			}
897			fallthrough
898
899		case k == types.TCHAN, k == types.TSTRING:
900			// chan, string, slice, array ranges use value multiple times.
901			// make copy.
902			r := n.X
903
904			if r.Type().IsString() && r.Type() != types.Types[types.TSTRING] {
905				r = ir.NewConvExpr(base.Pos, ir.OCONV, nil, r)
906				r.SetType(types.Types[types.TSTRING])
907				r = typecheck.Expr(r)
908			}
909
910			n.X = o.copyExpr(r)
911
912		case k == types.TMAP:
913			if isMapClear(n) {
914				// Preserve the body of the map clear pattern so it can
915				// be detected during walk. The loop body will not be used
916				// when optimizing away the range loop to a runtime call.
917				orderBody = false
918				break
919			}
920
921			// copy the map value in case it is a map literal.
922			// TODO(rsc): Make tmp = literal expressions reuse tmp.
923			// For maps tmp is just one word so it hardly matters.
924			r := n.X
925			n.X = o.copyExpr(r)
926
927			// n.Prealloc is the temp for the iterator.
928			// MapIterType contains pointers and needs to be zeroed.
929			n.Prealloc = o.newTemp(reflectdata.MapIterType(), true)
930		}
931		n.Key = o.exprInPlace(n.Key)
932		n.Value = o.exprInPlace(n.Value)
933		if orderBody {
934			orderBlock(&n.Body, o.free)
935		}
936		o.out = append(o.out, n)
937		o.popTemp(t)
938
939	case ir.ORETURN:
940		n := n.(*ir.ReturnStmt)
941		o.exprList(n.Results)
942		o.out = append(o.out, n)
943
944	// Special: clean case temporaries in each block entry.
945	// Select must enter one of its blocks, so there is no
946	// need for a cleaning at the end.
947	// Doubly special: evaluation order for select is stricter
948	// than ordinary expressions. Even something like p.c
949	// has to be hoisted into a temporary, so that it cannot be
950	// reordered after the channel evaluation for a different
951	// case (if p were nil, then the timing of the fault would
952	// give this away).
953	case ir.OSELECT:
954		n := n.(*ir.SelectStmt)
955		t := o.markTemp()
956		for _, ncas := range n.Cases {
957			r := ncas.Comm
958			ir.SetPos(ncas)
959
960			// Append any new body prologue to ninit.
961			// The next loop will insert ninit into nbody.
962			if len(ncas.Init()) != 0 {
963				base.Fatalf("order select ninit")
964			}
965			if r == nil {
966				continue
967			}
968			switch r.Op() {
969			default:
970				ir.Dump("select case", r)
971				base.Fatalf("unknown op in select %v", r.Op())
972
973			case ir.OSELRECV2:
974				// case x, ok = <-c
975				r := r.(*ir.AssignListStmt)
976				recv := r.Rhs[0].(*ir.UnaryExpr)
977				recv.X = o.expr(recv.X, nil)
978				if !ir.IsAutoTmp(recv.X) {
979					recv.X = o.copyExpr(recv.X)
980				}
981				init := ir.TakeInit(r)
982
983				colas := r.Def
984				do := func(i int, t *types.Type) {
985					n := r.Lhs[i]
986					if ir.IsBlank(n) {
987						return
988					}
989					// If this is case x := <-ch or case x, y := <-ch, the case has
990					// the ODCL nodes to declare x and y. We want to delay that
991					// declaration (and possible allocation) until inside the case body.
992					// Delete the ODCL nodes here and recreate them inside the body below.
993					if colas {
994						if len(init) > 0 && init[0].Op() == ir.ODCL && init[0].(*ir.Decl).X == n {
995							init = init[1:]
996
997							// iimport may have added a default initialization assignment,
998							// due to how it handles ODCL statements.
999							if len(init) > 0 && init[0].Op() == ir.OAS && init[0].(*ir.AssignStmt).X == n {
1000								init = init[1:]
1001							}
1002						}
1003						dcl := typecheck.Stmt(ir.NewDecl(base.Pos, ir.ODCL, n.(*ir.Name)))
1004						ncas.PtrInit().Append(dcl)
1005					}
1006					tmp := o.newTemp(t, t.HasPointers())
1007					as := typecheck.Stmt(ir.NewAssignStmt(base.Pos, n, typecheck.Conv(tmp, n.Type())))
1008					ncas.PtrInit().Append(as)
1009					r.Lhs[i] = tmp
1010				}
1011				do(0, recv.X.Type().Elem())
1012				do(1, types.Types[types.TBOOL])
1013				if len(init) != 0 {
1014					ir.DumpList("ninit", init)
1015					base.Fatalf("ninit on select recv")
1016				}
1017				orderBlock(ncas.PtrInit(), o.free)
1018
1019			case ir.OSEND:
1020				r := r.(*ir.SendStmt)
1021				if len(r.Init()) != 0 {
1022					ir.DumpList("ninit", r.Init())
1023					base.Fatalf("ninit on select send")
1024				}
1025
1026				// case c <- x
1027				// r->left is c, r->right is x, both are always evaluated.
1028				r.Chan = o.expr(r.Chan, nil)
1029
1030				if !ir.IsAutoTmp(r.Chan) {
1031					r.Chan = o.copyExpr(r.Chan)
1032				}
1033				r.Value = o.expr(r.Value, nil)
1034				if !ir.IsAutoTmp(r.Value) {
1035					r.Value = o.copyExpr(r.Value)
1036				}
1037			}
1038		}
1039		// Now that we have accumulated all the temporaries, clean them.
1040		// Also insert any ninit queued during the previous loop.
1041		// (The temporary cleaning must follow that ninit work.)
1042		for _, cas := range n.Cases {
1043			orderBlock(&cas.Body, o.free)
1044
1045			// TODO(mdempsky): Is this actually necessary?
1046			// walkSelect appears to walk Ninit.
1047			cas.Body.Prepend(ir.TakeInit(cas)...)
1048		}
1049
1050		o.out = append(o.out, n)
1051		o.popTemp(t)
1052
1053	// Special: value being sent is passed as a pointer; make it addressable.
1054	case ir.OSEND:
1055		n := n.(*ir.SendStmt)
1056		t := o.markTemp()
1057		n.Chan = o.expr(n.Chan, nil)
1058		n.Value = o.expr(n.Value, nil)
1059		if base.Flag.Cfg.Instrumenting {
1060			// Force copying to the stack so that (chan T)(nil) <- x
1061			// is still instrumented as a read of x.
1062			n.Value = o.copyExpr(n.Value)
1063		} else {
1064			n.Value = o.addrTemp(n.Value)
1065		}
1066		o.out = append(o.out, n)
1067		o.popTemp(t)
1068
1069	// TODO(rsc): Clean temporaries more aggressively.
1070	// Note that because walkSwitch will rewrite some of the
1071	// switch into a binary search, this is not as easy as it looks.
1072	// (If we ran that code here we could invoke order.stmt on
1073	// the if-else chain instead.)
1074	// For now just clean all the temporaries at the end.
1075	// In practice that's fine.
1076	case ir.OSWITCH:
1077		n := n.(*ir.SwitchStmt)
1078		if base.Debug.Libfuzzer != 0 && !hasDefaultCase(n) {
1079			// Add empty "default:" case for instrumentation.
1080			n.Cases = append(n.Cases, ir.NewCaseStmt(base.Pos, nil, nil))
1081		}
1082
1083		t := o.markTemp()
1084		n.Tag = o.expr(n.Tag, nil)
1085		for _, ncas := range n.Cases {
1086			o.exprListInPlace(ncas.List)
1087			orderBlock(&ncas.Body, o.free)
1088		}
1089
1090		o.out = append(o.out, n)
1091		o.popTemp(t)
1092	}
1093
1094	base.Pos = lno
1095}
1096
1097func hasDefaultCase(n *ir.SwitchStmt) bool {
1098	for _, ncas := range n.Cases {
1099		if len(ncas.List) == 0 {
1100			return true
1101		}
1102	}
1103	return false
1104}
1105
1106// exprList orders the expression list l into o.
1107func (o *orderState) exprList(l ir.Nodes) {
1108	s := l
1109	for i := range s {
1110		s[i] = o.expr(s[i], nil)
1111	}
1112}
1113
1114// exprListInPlace orders the expression list l but saves
1115// the side effects on the individual expression ninit lists.
1116func (o *orderState) exprListInPlace(l ir.Nodes) {
1117	s := l
1118	for i := range s {
1119		s[i] = o.exprInPlace(s[i])
1120	}
1121}
1122
1123func (o *orderState) exprNoLHS(n ir.Node) ir.Node {
1124	return o.expr(n, nil)
1125}
1126
1127// expr orders a single expression, appending side
1128// effects to o.out as needed.
1129// If this is part of an assignment lhs = *np, lhs is given.
1130// Otherwise lhs == nil. (When lhs != nil it may be possible
1131// to avoid copying the result of the expression to a temporary.)
1132// The result of expr MUST be assigned back to n, e.g.
1133//
1134//	n.Left = o.expr(n.Left, lhs)
1135func (o *orderState) expr(n, lhs ir.Node) ir.Node {
1136	if n == nil {
1137		return n
1138	}
1139	lno := ir.SetPos(n)
1140	n = o.expr1(n, lhs)
1141	base.Pos = lno
1142	return n
1143}
1144
1145func (o *orderState) expr1(n, lhs ir.Node) ir.Node {
1146	o.init(n)
1147
1148	switch n.Op() {
1149	default:
1150		if o.edit == nil {
1151			o.edit = o.exprNoLHS // create closure once
1152		}
1153		ir.EditChildren(n, o.edit)
1154		return n
1155
1156	// Addition of strings turns into a function call.
1157	// Allocate a temporary to hold the strings.
1158	// Fewer than 5 strings use direct runtime helpers.
1159	case ir.OADDSTR:
1160		n := n.(*ir.AddStringExpr)
1161		o.exprList(n.List)
1162
1163		if len(n.List) > 5 {
1164			t := types.NewArray(types.Types[types.TSTRING], int64(len(n.List)))
1165			n.Prealloc = o.newTemp(t, false)
1166		}
1167
1168		// Mark string(byteSlice) arguments to reuse byteSlice backing
1169		// buffer during conversion. String concatenation does not
1170		// memorize the strings for later use, so it is safe.
1171		// However, we can do it only if there is at least one non-empty string literal.
1172		// Otherwise if all other arguments are empty strings,
1173		// concatstrings will return the reference to the temp string
1174		// to the caller.
1175		hasbyte := false
1176
1177		haslit := false
1178		for _, n1 := range n.List {
1179			hasbyte = hasbyte || n1.Op() == ir.OBYTES2STR
1180			haslit = haslit || n1.Op() == ir.OLITERAL && len(ir.StringVal(n1)) != 0
1181		}
1182
1183		if haslit && hasbyte {
1184			for _, n2 := range n.List {
1185				if n2.Op() == ir.OBYTES2STR {
1186					n2 := n2.(*ir.ConvExpr)
1187					n2.SetOp(ir.OBYTES2STRTMP)
1188				}
1189			}
1190		}
1191		return n
1192
1193	case ir.OINDEXMAP:
1194		n := n.(*ir.IndexExpr)
1195		n.X = o.expr(n.X, nil)
1196		n.Index = o.expr(n.Index, nil)
1197		needCopy := false
1198
1199		if !n.Assigned {
1200			// Enforce that any []byte slices we are not copying
1201			// can not be changed before the map index by forcing
1202			// the map index to happen immediately following the
1203			// conversions. See copyExpr a few lines below.
1204			needCopy = mapKeyReplaceStrConv(n.Index)
1205
1206			if base.Flag.Cfg.Instrumenting {
1207				// Race detector needs the copy.
1208				needCopy = true
1209			}
1210		}
1211
1212		// key may need to be be addressable
1213		n.Index = o.mapKeyTemp(n.Pos(), n.X.Type(), n.Index)
1214		if needCopy {
1215			return o.copyExpr(n)
1216		}
1217		return n
1218
1219	// concrete type (not interface) argument might need an addressable
1220	// temporary to pass to the runtime conversion routine.
1221	case ir.OCONVIFACE:
1222		n := n.(*ir.ConvExpr)
1223		n.X = o.expr(n.X, nil)
1224		if n.X.Type().IsInterface() {
1225			return n
1226		}
1227		if _, _, needsaddr := dataWordFuncName(n.X.Type()); needsaddr || isStaticCompositeLiteral(n.X) {
1228			// Need a temp if we need to pass the address to the conversion function.
1229			// We also process static composite literal node here, making a named static global
1230			// whose address we can put directly in an interface (see OCONVIFACE case in walk).
1231			n.X = o.addrTemp(n.X)
1232		}
1233		return n
1234
1235	case ir.OCONVNOP:
1236		n := n.(*ir.ConvExpr)
1237		if n.X.Op() == ir.OCALLMETH {
1238			base.FatalfAt(n.X.Pos(), "OCALLMETH missed by typecheck")
1239		}
1240		if n.Type().IsKind(types.TUNSAFEPTR) && n.X.Type().IsKind(types.TUINTPTR) && (n.X.Op() == ir.OCALLFUNC || n.X.Op() == ir.OCALLINTER) {
1241			call := n.X.(*ir.CallExpr)
1242			// When reordering unsafe.Pointer(f()) into a separate
1243			// statement, the conversion and function call must stay
1244			// together. See golang.org/issue/15329.
1245			o.init(call)
1246			o.call(call)
1247			if lhs == nil || lhs.Op() != ir.ONAME || base.Flag.Cfg.Instrumenting {
1248				return o.copyExpr(n)
1249			}
1250		} else {
1251			n.X = o.expr(n.X, nil)
1252		}
1253		return n
1254
1255	case ir.OANDAND, ir.OOROR:
1256		// ... = LHS && RHS
1257		//
1258		// var r bool
1259		// r = LHS
1260		// if r {       // or !r, for OROR
1261		//     r = RHS
1262		// }
1263		// ... = r
1264
1265		n := n.(*ir.LogicalExpr)
1266		r := o.newTemp(n.Type(), false)
1267
1268		// Evaluate left-hand side.
1269		lhs := o.expr(n.X, nil)
1270		o.out = append(o.out, typecheck.Stmt(ir.NewAssignStmt(base.Pos, r, lhs)))
1271
1272		// Evaluate right-hand side, save generated code.
1273		saveout := o.out
1274		o.out = nil
1275		t := o.markTemp()
1276		o.edge()
1277		rhs := o.expr(n.Y, nil)
1278		o.out = append(o.out, typecheck.Stmt(ir.NewAssignStmt(base.Pos, r, rhs)))
1279		o.popTemp(t)
1280		gen := o.out
1281		o.out = saveout
1282
1283		// If left-hand side doesn't cause a short-circuit, issue right-hand side.
1284		nif := ir.NewIfStmt(base.Pos, r, nil, nil)
1285		if n.Op() == ir.OANDAND {
1286			nif.Body = gen
1287		} else {
1288			nif.Else = gen
1289		}
1290		o.out = append(o.out, nif)
1291		return r
1292
1293	case ir.OCALLMETH:
1294		base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
1295		panic("unreachable")
1296
1297	case ir.OCALLFUNC,
1298		ir.OCALLINTER,
1299		ir.OCAP,
1300		ir.OCOMPLEX,
1301		ir.OCOPY,
1302		ir.OIMAG,
1303		ir.OLEN,
1304		ir.OMAKECHAN,
1305		ir.OMAKEMAP,
1306		ir.OMAKESLICE,
1307		ir.OMAKESLICECOPY,
1308		ir.OMAX,
1309		ir.OMIN,
1310		ir.ONEW,
1311		ir.OREAL,
1312		ir.ORECOVERFP,
1313		ir.OSTR2BYTES,
1314		ir.OSTR2BYTESTMP,
1315		ir.OSTR2RUNES:
1316
1317		if isRuneCount(n) {
1318			// len([]rune(s)) is rewritten to runtime.countrunes(s) later.
1319			conv := n.(*ir.UnaryExpr).X.(*ir.ConvExpr)
1320			conv.X = o.expr(conv.X, nil)
1321		} else {
1322			o.call(n)
1323		}
1324
1325		if lhs == nil || lhs.Op() != ir.ONAME || base.Flag.Cfg.Instrumenting {
1326			return o.copyExpr(n)
1327		}
1328		return n
1329
1330	case ir.OINLCALL:
1331		n := n.(*ir.InlinedCallExpr)
1332		o.stmtList(n.Body)
1333		return n.SingleResult()
1334
1335	case ir.OAPPEND:
1336		// Check for append(x, make([]T, y)...) .
1337		n := n.(*ir.CallExpr)
1338		if isAppendOfMake(n) {
1339			n.Args[0] = o.expr(n.Args[0], nil) // order x
1340			mk := n.Args[1].(*ir.MakeExpr)
1341			mk.Len = o.expr(mk.Len, nil) // order y
1342		} else {
1343			o.exprList(n.Args)
1344		}
1345
1346		if lhs == nil || lhs.Op() != ir.ONAME && !ir.SameSafeExpr(lhs, n.Args[0]) {
1347			return o.copyExpr(n)
1348		}
1349		return n
1350
1351	case ir.OSLICE, ir.OSLICEARR, ir.OSLICESTR, ir.OSLICE3, ir.OSLICE3ARR:
1352		n := n.(*ir.SliceExpr)
1353		n.X = o.expr(n.X, nil)
1354		n.Low = o.cheapExpr(o.expr(n.Low, nil))
1355		n.High = o.cheapExpr(o.expr(n.High, nil))
1356		n.Max = o.cheapExpr(o.expr(n.Max, nil))
1357		if lhs == nil || lhs.Op() != ir.ONAME && !ir.SameSafeExpr(lhs, n.X) {
1358			return o.copyExpr(n)
1359		}
1360		return n
1361
1362	case ir.OCLOSURE:
1363		n := n.(*ir.ClosureExpr)
1364		if n.Transient() && len(n.Func.ClosureVars) > 0 {
1365			n.Prealloc = o.newTemp(typecheck.ClosureType(n), false)
1366		}
1367		return n
1368
1369	case ir.OMETHVALUE:
1370		n := n.(*ir.SelectorExpr)
1371		n.X = o.expr(n.X, nil)
1372		if n.Transient() {
1373			t := typecheck.MethodValueType(n)
1374			n.Prealloc = o.newTemp(t, false)
1375		}
1376		return n
1377
1378	case ir.OSLICELIT:
1379		n := n.(*ir.CompLitExpr)
1380		o.exprList(n.List)
1381		if n.Transient() {
1382			t := types.NewArray(n.Type().Elem(), n.Len)
1383			n.Prealloc = o.newTemp(t, false)
1384		}
1385		return n
1386
1387	case ir.ODOTTYPE, ir.ODOTTYPE2:
1388		n := n.(*ir.TypeAssertExpr)
1389		n.X = o.expr(n.X, nil)
1390		if !types.IsDirectIface(n.Type()) || base.Flag.Cfg.Instrumenting {
1391			return o.copyExprClear(n)
1392		}
1393		return n
1394
1395	case ir.ORECV:
1396		n := n.(*ir.UnaryExpr)
1397		n.X = o.expr(n.X, nil)
1398		return o.copyExprClear(n)
1399
1400	case ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
1401		n := n.(*ir.BinaryExpr)
1402		n.X = o.expr(n.X, nil)
1403		n.Y = o.expr(n.Y, nil)
1404
1405		t := n.X.Type()
1406		switch {
1407		case t.IsString():
1408			// Mark string(byteSlice) arguments to reuse byteSlice backing
1409			// buffer during conversion. String comparison does not
1410			// memorize the strings for later use, so it is safe.
1411			if n.X.Op() == ir.OBYTES2STR {
1412				n.X.(*ir.ConvExpr).SetOp(ir.OBYTES2STRTMP)
1413			}
1414			if n.Y.Op() == ir.OBYTES2STR {
1415				n.Y.(*ir.ConvExpr).SetOp(ir.OBYTES2STRTMP)
1416			}
1417
1418		case t.IsStruct() || t.IsArray():
1419			// for complex comparisons, we need both args to be
1420			// addressable so we can pass them to the runtime.
1421			n.X = o.addrTemp(n.X)
1422			n.Y = o.addrTemp(n.Y)
1423		}
1424		return n
1425
1426	case ir.OMAPLIT:
1427		// Order map by converting:
1428		//   map[int]int{
1429		//     a(): b(),
1430		//     c(): d(),
1431		//     e(): f(),
1432		//   }
1433		// to
1434		//   m := map[int]int{}
1435		//   m[a()] = b()
1436		//   m[c()] = d()
1437		//   m[e()] = f()
1438		// Then order the result.
1439		// Without this special case, order would otherwise compute all
1440		// the keys and values before storing any of them to the map.
1441		// See issue 26552.
1442		n := n.(*ir.CompLitExpr)
1443		entries := n.List
1444		statics := entries[:0]
1445		var dynamics []*ir.KeyExpr
1446		for _, r := range entries {
1447			r := r.(*ir.KeyExpr)
1448
1449			if !isStaticCompositeLiteral(r.Key) || !isStaticCompositeLiteral(r.Value) {
1450				dynamics = append(dynamics, r)
1451				continue
1452			}
1453
1454			// Recursively ordering some static entries can change them to dynamic;
1455			// e.g., OCONVIFACE nodes. See #31777.
1456			r = o.expr(r, nil).(*ir.KeyExpr)
1457			if !isStaticCompositeLiteral(r.Key) || !isStaticCompositeLiteral(r.Value) {
1458				dynamics = append(dynamics, r)
1459				continue
1460			}
1461
1462			statics = append(statics, r)
1463		}
1464		n.List = statics
1465
1466		if len(dynamics) == 0 {
1467			return n
1468		}
1469
1470		// Emit the creation of the map (with all its static entries).
1471		m := o.newTemp(n.Type(), false)
1472		as := ir.NewAssignStmt(base.Pos, m, n)
1473		typecheck.Stmt(as)
1474		o.stmt(as)
1475
1476		// Emit eval+insert of dynamic entries, one at a time.
1477		for _, r := range dynamics {
1478			lhs := typecheck.AssignExpr(ir.NewIndexExpr(base.Pos, m, r.Key)).(*ir.IndexExpr)
1479			base.AssertfAt(lhs.Op() == ir.OINDEXMAP, lhs.Pos(), "want OINDEXMAP, have %+v", lhs)
1480			lhs.RType = n.RType
1481
1482			as := ir.NewAssignStmt(base.Pos, lhs, r.Value)
1483			typecheck.Stmt(as)
1484			o.stmt(as)
1485		}
1486
1487		// Remember that we issued these assignments so we can include that count
1488		// in the map alloc hint.
1489		// We're assuming here that all the keys in the map literal are distinct.
1490		// If any are equal, this will be an overcount. Probably not worth accounting
1491		// for that, as equal keys in map literals are rare, and at worst we waste
1492		// a bit of space.
1493		n.Len += int64(len(dynamics))
1494
1495		return m
1496	}
1497
1498	// No return - type-assertions above. Each case must return for itself.
1499}
1500
1501// as2func orders OAS2FUNC nodes. It creates temporaries to ensure left-to-right assignment.
1502// The caller should order the right-hand side of the assignment before calling order.as2func.
1503// It rewrites,
1504//
1505//	a, b, a = ...
1506//
1507// as
1508//
1509//	tmp1, tmp2, tmp3 = ...
1510//	a, b, a = tmp1, tmp2, tmp3
1511//
1512// This is necessary to ensure left to right assignment order.
1513func (o *orderState) as2func(n *ir.AssignListStmt) {
1514	results := n.Rhs[0].Type()
1515	as := ir.NewAssignListStmt(n.Pos(), ir.OAS2, nil, nil)
1516	for i, nl := range n.Lhs {
1517		if !ir.IsBlank(nl) {
1518			typ := results.Field(i).Type
1519			tmp := o.newTemp(typ, typ.HasPointers())
1520			n.Lhs[i] = tmp
1521			as.Lhs = append(as.Lhs, nl)
1522			as.Rhs = append(as.Rhs, tmp)
1523		}
1524	}
1525
1526	o.out = append(o.out, n)
1527	o.stmt(typecheck.Stmt(as))
1528}
1529
1530// as2ok orders OAS2XXX with ok.
1531// Just like as2func, this also adds temporaries to ensure left-to-right assignment.
1532func (o *orderState) as2ok(n *ir.AssignListStmt) {
1533	as := ir.NewAssignListStmt(n.Pos(), ir.OAS2, nil, nil)
1534
1535	do := func(i int, typ *types.Type) {
1536		if nl := n.Lhs[i]; !ir.IsBlank(nl) {
1537			var tmp ir.Node = o.newTemp(typ, typ.HasPointers())
1538			n.Lhs[i] = tmp
1539			as.Lhs = append(as.Lhs, nl)
1540			if i == 1 {
1541				// The "ok" result is an untyped boolean according to the Go
1542				// spec. We need to explicitly convert it to the LHS type in
1543				// case the latter is a defined boolean type (#8475).
1544				tmp = typecheck.Conv(tmp, nl.Type())
1545			}
1546			as.Rhs = append(as.Rhs, tmp)
1547		}
1548	}
1549
1550	do(0, n.Rhs[0].Type())
1551	do(1, types.Types[types.TBOOL])
1552
1553	o.out = append(o.out, n)
1554	o.stmt(typecheck.Stmt(as))
1555}
1556