1// Copyright 2009 The Go Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style
3// license that can be found in the LICENSE file.
4
5package walk
6
7import (
8	"unicode/utf8"
9
10	"cmd/compile/internal/base"
11	"cmd/compile/internal/ir"
12	"cmd/compile/internal/reflectdata"
13	"cmd/compile/internal/ssagen"
14	"cmd/compile/internal/typecheck"
15	"cmd/compile/internal/types"
16	"cmd/internal/src"
17	"cmd/internal/sys"
18)
19
20func cheapComputableIndex(width int64) bool {
21	switch ssagen.Arch.LinkArch.Family {
22	// MIPS does not have R+R addressing
23	// Arm64 may lack ability to generate this code in our assembler,
24	// but the architecture supports it.
25	case sys.PPC64, sys.S390X:
26		return width == 1
27	case sys.AMD64, sys.I386, sys.ARM64, sys.ARM:
28		switch width {
29		case 1, 2, 4, 8:
30			return true
31		}
32	}
33	return false
34}
35
36// walkRange transforms various forms of ORANGE into
37// simpler forms.  The result must be assigned back to n.
38// Node n may also be modified in place, and may also be
39// the returned node.
40func walkRange(nrange *ir.RangeStmt) ir.Node {
41	base.Assert(!nrange.DistinctVars) // Should all be rewritten before escape analysis
42	if isMapClear(nrange) {
43		return mapRangeClear(nrange)
44	}
45
46	nfor := ir.NewForStmt(nrange.Pos(), nil, nil, nil, nil, nrange.DistinctVars)
47	nfor.SetInit(nrange.Init())
48	nfor.Label = nrange.Label
49
50	// variable name conventions:
51	//	ohv1, hv1, hv2: hidden (old) val 1, 2
52	//	ha, hit: hidden aggregate, iterator
53	//	hn, hp: hidden len, pointer
54	//	hb: hidden bool
55	//	a, v1, v2: not hidden aggregate, val 1, 2
56
57	a := nrange.X
58	t := a.Type()
59	lno := ir.SetPos(a)
60
61	v1, v2 := nrange.Key, nrange.Value
62
63	if ir.IsBlank(v2) {
64		v2 = nil
65	}
66
67	if ir.IsBlank(v1) && v2 == nil {
68		v1 = nil
69	}
70
71	if v1 == nil && v2 != nil {
72		base.Fatalf("walkRange: v2 != nil while v1 == nil")
73	}
74
75	var body []ir.Node
76	var init []ir.Node
77	switch k := t.Kind(); {
78	default:
79		base.Fatalf("walkRange")
80
81	case types.IsInt[k]:
82		hv1 := typecheck.TempAt(base.Pos, ir.CurFunc, t)
83		hn := typecheck.TempAt(base.Pos, ir.CurFunc, t)
84
85		init = append(init, ir.NewAssignStmt(base.Pos, hv1, nil))
86		init = append(init, ir.NewAssignStmt(base.Pos, hn, a))
87
88		nfor.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, hv1, hn)
89		nfor.Post = ir.NewAssignStmt(base.Pos, hv1, ir.NewBinaryExpr(base.Pos, ir.OADD, hv1, ir.NewInt(base.Pos, 1)))
90
91		if v1 != nil {
92			body = []ir.Node{rangeAssign(nrange, hv1)}
93		}
94
95	case k == types.TARRAY, k == types.TSLICE, k == types.TPTR: // TPTR is pointer-to-array
96		if nn := arrayRangeClear(nrange, v1, v2, a); nn != nil {
97			base.Pos = lno
98			return nn
99		}
100
101		// Element type of the iteration
102		var elem *types.Type
103		switch t.Kind() {
104		case types.TSLICE, types.TARRAY:
105			elem = t.Elem()
106		case types.TPTR:
107			elem = t.Elem().Elem()
108		}
109
110		// order.stmt arranged for a copy of the array/slice variable if needed.
111		ha := a
112
113		hv1 := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TINT])
114		hn := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TINT])
115
116		init = append(init, ir.NewAssignStmt(base.Pos, hv1, nil))
117		init = append(init, ir.NewAssignStmt(base.Pos, hn, ir.NewUnaryExpr(base.Pos, ir.OLEN, ha)))
118
119		nfor.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, hv1, hn)
120		nfor.Post = ir.NewAssignStmt(base.Pos, hv1, ir.NewBinaryExpr(base.Pos, ir.OADD, hv1, ir.NewInt(base.Pos, 1)))
121
122		// for range ha { body }
123		if v1 == nil {
124			break
125		}
126
127		// for v1 := range ha { body }
128		if v2 == nil {
129			body = []ir.Node{rangeAssign(nrange, hv1)}
130			break
131		}
132
133		// for v1, v2 := range ha { body }
134		if cheapComputableIndex(elem.Size()) {
135			// v1, v2 = hv1, ha[hv1]
136			tmp := ir.NewIndexExpr(base.Pos, ha, hv1)
137			tmp.SetBounded(true)
138			body = []ir.Node{rangeAssign2(nrange, hv1, tmp)}
139			break
140		}
141
142		// Slice to iterate over
143		var hs ir.Node
144		if t.IsSlice() {
145			hs = ha
146		} else {
147			var arr ir.Node
148			if t.IsPtr() {
149				arr = ha
150			} else {
151				arr = typecheck.NodAddr(ha)
152				arr.SetType(t.PtrTo())
153				arr.SetTypecheck(1)
154			}
155			hs = ir.NewSliceExpr(base.Pos, ir.OSLICEARR, arr, nil, nil, nil)
156			// old typechecker doesn't know OSLICEARR, so we set types explicitly
157			hs.SetType(types.NewSlice(elem))
158			hs.SetTypecheck(1)
159		}
160
161		// We use a "pointer" to keep track of where we are in the backing array
162		// of the slice hs. This pointer starts at hs.ptr and gets incremented
163		// by the element size each time through the loop.
164		//
165		// It's tricky, though, as on the last iteration this pointer gets
166		// incremented to point past the end of the backing array. We can't
167		// let the garbage collector see that final out-of-bounds pointer.
168		//
169		// To avoid this, we keep the "pointer" alternately in 2 variables, one
170		// pointer typed and one uintptr typed. Most of the time it lives in the
171		// regular pointer variable, but when it might be out of bounds (after it
172		// has been incremented, but before the loop condition has been checked)
173		// it lives briefly in the uintptr variable.
174		//
175		// hp contains the pointer version (of type *T, where T is the element type).
176		// It is guaranteed to always be in range, keeps the backing store alive,
177		// and is updated on stack copies. If a GC occurs when this function is
178		// suspended at any safepoint, this variable ensures correct operation.
179		//
180		// hu contains the equivalent uintptr version. It may point past the
181		// end, but doesn't keep the backing store alive and doesn't get updated
182		// on a stack copy. If a GC occurs while this function is on the top of
183		// the stack, then the last frame is scanned conservatively and hu will
184		// act as a reference to the backing array to ensure it is not collected.
185		//
186		// The "pointer" we're moving across the backing array lives in one
187		// or the other of hp and hu as the loop proceeds.
188		//
189		// hp is live during most of the body of the loop. But it isn't live
190		// at the very top of the loop, when we haven't checked i<n yet, and
191		// it could point off the end of the backing store.
192		// hu is live only at the very top and very bottom of the loop.
193		// In particular, only when it cannot possibly be live across a call.
194		//
195		// So we do
196		//   hu = uintptr(unsafe.Pointer(hs.ptr))
197		//   for i := 0; i < hs.len; i++ {
198		//     hp = (*T)(unsafe.Pointer(hu))
199		//     v1, v2 = i, *hp
200		//     ... body of loop ...
201		//     hu = uintptr(unsafe.Pointer(hp)) + elemsize
202		//   }
203		//
204		// Between the assignments to hu and the assignment back to hp, there
205		// must not be any calls.
206
207		// Pointer to current iteration position. Start on entry to the loop
208		// with the pointer in hu.
209		ptr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, hs)
210		ptr.SetBounded(true)
211		huVal := ir.NewConvExpr(base.Pos, ir.OCONVNOP, types.Types[types.TUNSAFEPTR], ptr)
212		huVal = ir.NewConvExpr(base.Pos, ir.OCONVNOP, types.Types[types.TUINTPTR], huVal)
213		hu := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TUINTPTR])
214		init = append(init, ir.NewAssignStmt(base.Pos, hu, huVal))
215
216		// Convert hu to hp at the top of the loop (after the condition has been checked).
217		hpVal := ir.NewConvExpr(base.Pos, ir.OCONVNOP, types.Types[types.TUNSAFEPTR], hu)
218		hpVal.SetCheckPtr(true) // disable checkptr on this conversion
219		hpVal = ir.NewConvExpr(base.Pos, ir.OCONVNOP, elem.PtrTo(), hpVal)
220		hp := typecheck.TempAt(base.Pos, ir.CurFunc, elem.PtrTo())
221		body = append(body, ir.NewAssignStmt(base.Pos, hp, hpVal))
222
223		// Assign variables on the LHS of the range statement. Use *hp to get the element.
224		e := ir.NewStarExpr(base.Pos, hp)
225		e.SetBounded(true)
226		a := rangeAssign2(nrange, hv1, e)
227		body = append(body, a)
228
229		// Advance pointer for next iteration of the loop.
230		// This reads from hp and writes to hu.
231		huVal = ir.NewConvExpr(base.Pos, ir.OCONVNOP, types.Types[types.TUNSAFEPTR], hp)
232		huVal = ir.NewConvExpr(base.Pos, ir.OCONVNOP, types.Types[types.TUINTPTR], huVal)
233		as := ir.NewAssignStmt(base.Pos, hu, ir.NewBinaryExpr(base.Pos, ir.OADD, huVal, ir.NewInt(base.Pos, elem.Size())))
234		nfor.Post = ir.NewBlockStmt(base.Pos, []ir.Node{nfor.Post, as})
235
236	case k == types.TMAP:
237		// order.stmt allocated the iterator for us.
238		// we only use a once, so no copy needed.
239		ha := a
240
241		hit := nrange.Prealloc
242		th := hit.Type()
243		// depends on layout of iterator struct.
244		// See cmd/compile/internal/reflectdata/reflect.go:MapIterType
245		keysym := th.Field(0).Sym
246		elemsym := th.Field(1).Sym // ditto
247
248		fn := typecheck.LookupRuntime("mapiterinit", t.Key(), t.Elem(), th)
249		init = append(init, mkcallstmt1(fn, reflectdata.RangeMapRType(base.Pos, nrange), ha, typecheck.NodAddr(hit)))
250		nfor.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, ir.NewSelectorExpr(base.Pos, ir.ODOT, hit, keysym), typecheck.NodNil())
251
252		fn = typecheck.LookupRuntime("mapiternext", th)
253		nfor.Post = mkcallstmt1(fn, typecheck.NodAddr(hit))
254
255		key := ir.NewStarExpr(base.Pos, typecheck.ConvNop(ir.NewSelectorExpr(base.Pos, ir.ODOT, hit, keysym), types.NewPtr(t.Key())))
256		if v1 == nil {
257			body = nil
258		} else if v2 == nil {
259			body = []ir.Node{rangeAssign(nrange, key)}
260		} else {
261			elem := ir.NewStarExpr(base.Pos, typecheck.ConvNop(ir.NewSelectorExpr(base.Pos, ir.ODOT, hit, elemsym), types.NewPtr(t.Elem())))
262			body = []ir.Node{rangeAssign2(nrange, key, elem)}
263		}
264
265	case k == types.TCHAN:
266		// order.stmt arranged for a copy of the channel variable.
267		ha := a
268
269		hv1 := typecheck.TempAt(base.Pos, ir.CurFunc, t.Elem())
270		hv1.SetTypecheck(1)
271		if t.Elem().HasPointers() {
272			init = append(init, ir.NewAssignStmt(base.Pos, hv1, nil))
273		}
274		hb := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TBOOL])
275
276		nfor.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, hb, ir.NewBool(base.Pos, false))
277		lhs := []ir.Node{hv1, hb}
278		rhs := []ir.Node{ir.NewUnaryExpr(base.Pos, ir.ORECV, ha)}
279		a := ir.NewAssignListStmt(base.Pos, ir.OAS2RECV, lhs, rhs)
280		a.SetTypecheck(1)
281		nfor.Cond = ir.InitExpr([]ir.Node{a}, nfor.Cond)
282		if v1 == nil {
283			body = nil
284		} else {
285			body = []ir.Node{rangeAssign(nrange, hv1)}
286		}
287		// Zero hv1. This prevents hv1 from being the sole, inaccessible
288		// reference to an otherwise GC-able value during the next channel receive.
289		// See issue 15281.
290		body = append(body, ir.NewAssignStmt(base.Pos, hv1, nil))
291
292	case k == types.TSTRING:
293		// Transform string range statements like "for v1, v2 = range a" into
294		//
295		// ha := a
296		// for hv1 := 0; hv1 < len(ha); {
297		//   hv1t := hv1
298		//   hv2 := rune(ha[hv1])
299		//   if hv2 < utf8.RuneSelf {
300		//      hv1++
301		//   } else {
302		//      hv2, hv1 = decoderune(ha, hv1)
303		//   }
304		//   v1, v2 = hv1t, hv2
305		//   // original body
306		// }
307
308		// order.stmt arranged for a copy of the string variable.
309		ha := a
310
311		hv1 := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TINT])
312		hv1t := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TINT])
313		hv2 := typecheck.TempAt(base.Pos, ir.CurFunc, types.RuneType)
314
315		// hv1 := 0
316		init = append(init, ir.NewAssignStmt(base.Pos, hv1, nil))
317
318		// hv1 < len(ha)
319		nfor.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, hv1, ir.NewUnaryExpr(base.Pos, ir.OLEN, ha))
320
321		if v1 != nil {
322			// hv1t = hv1
323			body = append(body, ir.NewAssignStmt(base.Pos, hv1t, hv1))
324		}
325
326		// hv2 := rune(ha[hv1])
327		nind := ir.NewIndexExpr(base.Pos, ha, hv1)
328		nind.SetBounded(true)
329		body = append(body, ir.NewAssignStmt(base.Pos, hv2, typecheck.Conv(nind, types.RuneType)))
330
331		// if hv2 < utf8.RuneSelf
332		nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
333		nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, hv2, ir.NewInt(base.Pos, utf8.RuneSelf))
334
335		// hv1++
336		nif.Body = []ir.Node{ir.NewAssignStmt(base.Pos, hv1, ir.NewBinaryExpr(base.Pos, ir.OADD, hv1, ir.NewInt(base.Pos, 1)))}
337
338		// } else {
339		// hv2, hv1 = decoderune(ha, hv1)
340		fn := typecheck.LookupRuntime("decoderune")
341		call := mkcall1(fn, fn.Type().ResultsTuple(), &nif.Else, ha, hv1)
342		a := ir.NewAssignListStmt(base.Pos, ir.OAS2, []ir.Node{hv2, hv1}, []ir.Node{call})
343		nif.Else.Append(a)
344
345		body = append(body, nif)
346
347		if v1 != nil {
348			if v2 != nil {
349				// v1, v2 = hv1t, hv2
350				body = append(body, rangeAssign2(nrange, hv1t, hv2))
351			} else {
352				// v1 = hv1t
353				body = append(body, rangeAssign(nrange, hv1t))
354			}
355		}
356	}
357
358	typecheck.Stmts(init)
359
360	nfor.PtrInit().Append(init...)
361
362	typecheck.Stmts(nfor.Cond.Init())
363
364	nfor.Cond = typecheck.Expr(nfor.Cond)
365	nfor.Cond = typecheck.DefaultLit(nfor.Cond, nil)
366	nfor.Post = typecheck.Stmt(nfor.Post)
367	typecheck.Stmts(body)
368	nfor.Body.Append(body...)
369	nfor.Body.Append(nrange.Body...)
370
371	var n ir.Node = nfor
372
373	n = walkStmt(n)
374
375	base.Pos = lno
376	return n
377}
378
379// rangeAssign returns "n.Key = key".
380func rangeAssign(n *ir.RangeStmt, key ir.Node) ir.Node {
381	key = rangeConvert(n, n.Key.Type(), key, n.KeyTypeWord, n.KeySrcRType)
382	return ir.NewAssignStmt(n.Pos(), n.Key, key)
383}
384
385// rangeAssign2 returns "n.Key, n.Value = key, value".
386func rangeAssign2(n *ir.RangeStmt, key, value ir.Node) ir.Node {
387	// Use OAS2 to correctly handle assignments
388	// of the form "v1, a[v1] = range".
389	key = rangeConvert(n, n.Key.Type(), key, n.KeyTypeWord, n.KeySrcRType)
390	value = rangeConvert(n, n.Value.Type(), value, n.ValueTypeWord, n.ValueSrcRType)
391	return ir.NewAssignListStmt(n.Pos(), ir.OAS2, []ir.Node{n.Key, n.Value}, []ir.Node{key, value})
392}
393
394// rangeConvert returns src, converted to dst if necessary. If a
395// conversion is necessary, then typeWord and srcRType are copied to
396// their respective ConvExpr fields.
397func rangeConvert(nrange *ir.RangeStmt, dst *types.Type, src, typeWord, srcRType ir.Node) ir.Node {
398	src = typecheck.Expr(src)
399	if dst.Kind() == types.TBLANK || types.Identical(dst, src.Type()) {
400		return src
401	}
402
403	n := ir.NewConvExpr(nrange.Pos(), ir.OCONV, dst, src)
404	n.TypeWord = typeWord
405	n.SrcRType = srcRType
406	return typecheck.Expr(n)
407}
408
409// isMapClear checks if n is of the form:
410//
411//	for k := range m {
412//		delete(m, k)
413//	}
414//
415// where == for keys of map m is reflexive.
416func isMapClear(n *ir.RangeStmt) bool {
417	if base.Flag.N != 0 || base.Flag.Cfg.Instrumenting {
418		return false
419	}
420
421	t := n.X.Type()
422	if n.Op() != ir.ORANGE || t.Kind() != types.TMAP || n.Key == nil || n.Value != nil {
423		return false
424	}
425
426	k := n.Key
427	// Require k to be a new variable name.
428	if !ir.DeclaredBy(k, n) {
429		return false
430	}
431
432	if len(n.Body) != 1 {
433		return false
434	}
435
436	stmt := n.Body[0] // only stmt in body
437	if stmt == nil || stmt.Op() != ir.ODELETE {
438		return false
439	}
440
441	m := n.X
442	if delete := stmt.(*ir.CallExpr); !ir.SameSafeExpr(delete.Args[0], m) || !ir.SameSafeExpr(delete.Args[1], k) {
443		return false
444	}
445
446	// Keys where equality is not reflexive can not be deleted from maps.
447	if !types.IsReflexive(t.Key()) {
448		return false
449	}
450
451	return true
452}
453
454// mapRangeClear constructs a call to runtime.mapclear for the map range idiom.
455func mapRangeClear(nrange *ir.RangeStmt) ir.Node {
456	m := nrange.X
457	origPos := ir.SetPos(m)
458	defer func() { base.Pos = origPos }()
459
460	return mapClear(m, reflectdata.RangeMapRType(base.Pos, nrange))
461}
462
463// mapClear constructs a call to runtime.mapclear for the map m.
464func mapClear(m, rtyp ir.Node) ir.Node {
465	t := m.Type()
466
467	// instantiate mapclear(typ *type, hmap map[any]any)
468	fn := typecheck.LookupRuntime("mapclear", t.Key(), t.Elem())
469	n := mkcallstmt1(fn, rtyp, m)
470	return walkStmt(typecheck.Stmt(n))
471}
472
473// Lower n into runtime·memclr if possible, for
474// fast zeroing of slices and arrays (issue 5373).
475// Look for instances of
476//
477//	for i := range a {
478//		a[i] = zero
479//	}
480//
481// in which the evaluation of a is side-effect-free.
482//
483// Parameters are as in walkRange: "for v1, v2 = range a".
484func arrayRangeClear(loop *ir.RangeStmt, v1, v2, a ir.Node) ir.Node {
485	if base.Flag.N != 0 || base.Flag.Cfg.Instrumenting {
486		return nil
487	}
488
489	if v1 == nil || v2 != nil {
490		return nil
491	}
492
493	if len(loop.Body) != 1 || loop.Body[0] == nil {
494		return nil
495	}
496
497	stmt1 := loop.Body[0] // only stmt in body
498	if stmt1.Op() != ir.OAS {
499		return nil
500	}
501	stmt := stmt1.(*ir.AssignStmt)
502	if stmt.X.Op() != ir.OINDEX {
503		return nil
504	}
505	lhs := stmt.X.(*ir.IndexExpr)
506	x := lhs.X
507	if a.Type().IsPtr() && a.Type().Elem().IsArray() {
508		if s, ok := x.(*ir.StarExpr); ok && s.Op() == ir.ODEREF {
509			x = s.X
510		}
511	}
512
513	if !ir.SameSafeExpr(x, a) || !ir.SameSafeExpr(lhs.Index, v1) {
514		return nil
515	}
516
517	if !ir.IsZero(stmt.Y) {
518		return nil
519	}
520
521	return arrayClear(stmt.Pos(), a, loop)
522}
523
524// arrayClear constructs a call to runtime.memclr for fast zeroing of slices and arrays.
525func arrayClear(wbPos src.XPos, a ir.Node, nrange *ir.RangeStmt) ir.Node {
526	elemsize := typecheck.RangeExprType(a.Type()).Elem().Size()
527	if elemsize <= 0 {
528		return nil
529	}
530
531	// Convert to
532	// if len(a) != 0 {
533	// 	hp = &a[0]
534	// 	hn = len(a)*sizeof(elem(a))
535	// 	memclr{NoHeap,Has}Pointers(hp, hn)
536	// 	i = len(a) - 1
537	// }
538	n := ir.NewIfStmt(base.Pos, nil, nil, nil)
539	n.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, ir.NewUnaryExpr(base.Pos, ir.OLEN, a), ir.NewInt(base.Pos, 0))
540
541	// hp = &a[0]
542	hp := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TUNSAFEPTR])
543
544	ix := ir.NewIndexExpr(base.Pos, a, ir.NewInt(base.Pos, 0))
545	ix.SetBounded(true)
546	addr := typecheck.ConvNop(typecheck.NodAddr(ix), types.Types[types.TUNSAFEPTR])
547	n.Body.Append(ir.NewAssignStmt(base.Pos, hp, addr))
548
549	// hn = len(a) * sizeof(elem(a))
550	hn := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TUINTPTR])
551	mul := typecheck.Conv(ir.NewBinaryExpr(base.Pos, ir.OMUL, ir.NewUnaryExpr(base.Pos, ir.OLEN, a), ir.NewInt(base.Pos, elemsize)), types.Types[types.TUINTPTR])
552	n.Body.Append(ir.NewAssignStmt(base.Pos, hn, mul))
553
554	var fn ir.Node
555	if a.Type().Elem().HasPointers() {
556		// memclrHasPointers(hp, hn)
557		ir.CurFunc.SetWBPos(wbPos)
558		fn = mkcallstmt("memclrHasPointers", hp, hn)
559	} else {
560		// memclrNoHeapPointers(hp, hn)
561		fn = mkcallstmt("memclrNoHeapPointers", hp, hn)
562	}
563
564	n.Body.Append(fn)
565
566	// For array range clear, also set "i = len(a) - 1"
567	if nrange != nil {
568		idx := ir.NewAssignStmt(base.Pos, nrange.Key, ir.NewBinaryExpr(base.Pos, ir.OSUB, ir.NewUnaryExpr(base.Pos, ir.OLEN, a), ir.NewInt(base.Pos, 1)))
569		n.Body.Append(idx)
570	}
571
572	n.Cond = typecheck.Expr(n.Cond)
573	n.Cond = typecheck.DefaultLit(n.Cond, nil)
574	typecheck.Stmts(n.Body)
575	return walkStmt(n)
576}
577